Merge branch 'master' into eugenia/black_cherry_stable
Conflicts:
asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/RESTAPIServlet.java
asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
diff --git a/.gitignore b/.gitignore
index 3707fd7..fd22e3b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -3,10 +3,17 @@
.settings
.project
ClusterControllerService
-asterix-app/rttest
+rttest
+mdtest
+ittest
+asterix_logs
+build
+bin
+asterix-app/rttest/
asterix-app/mdtest/
asterix-app/opttest/
asterix-installer/ittest/
build
asterix_logs
bin/
+*-coredump
diff --git a/asterix-algebra/pom.xml b/asterix-algebra/pom.xml
index 074d693..f4c6150 100644
--- a/asterix-algebra/pom.xml
+++ b/asterix-algebra/pom.xml
@@ -17,7 +17,7 @@
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
</parent>
<artifactId>asterix-algebra</artifactId>
@@ -91,25 +91,31 @@
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-runtime</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-aql</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-metadata</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.asterix</groupId>
+ <artifactId>asterix-external-data</artifactId>
+ <version>0.8.4-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-transactions</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/base/AsterixOperatorAnnotations.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/base/AsterixOperatorAnnotations.java
index bfa6045..68f231e 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/base/AsterixOperatorAnnotations.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/base/AsterixOperatorAnnotations.java
@@ -19,4 +19,4 @@
public static final String PUSHED_FIELD_ACCESS = "PUSHED_FIELD_ACCESS";
public static final String PUSHED_RUNNABLE_FIELD_ACCESS = "PUSHED_RUNNABLE_FIELD_ACCESS";
public static final String FIELD_TYPE = "FIELD_TYPE";
-}
+}
\ No newline at end of file
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/base/LogicalExpressionDeepCopyVisitor.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/base/LogicalExpressionDeepCopyVisitor.java
index e8f48a6..3b50edc 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/base/LogicalExpressionDeepCopyVisitor.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/base/LogicalExpressionDeepCopyVisitor.java
@@ -40,7 +40,8 @@
private final Map<LogicalVariable, LogicalVariable> inVarMapping;
private final Map<LogicalVariable, LogicalVariable> outVarMapping;
- public LogicalExpressionDeepCopyVisitor(Counter counter, Map<LogicalVariable, LogicalVariable> inVarMapping, Map<LogicalVariable, LogicalVariable> variableMapping) {
+ public LogicalExpressionDeepCopyVisitor(Counter counter, Map<LogicalVariable, LogicalVariable> inVarMapping,
+ Map<LogicalVariable, LogicalVariable> variableMapping) {
this.counter = counter;
this.inVarMapping = inVarMapping;
this.outVarMapping = variableMapping;
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/IndexSearchPOperator.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/IndexSearchPOperator.java
index 66ae216..7d95978 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/IndexSearchPOperator.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/IndexSearchPOperator.java
@@ -68,7 +68,7 @@
}
return keyIndexes;
}
-
+
public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op,
IPhysicalPropertiesVector reqdByParent) {
if (requiresBroadcast) {
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/jobgen/AqlLogicalExpressionJobGen.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/jobgen/AqlLogicalExpressionJobGen.java
index 6a48a17..10848e8 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/jobgen/AqlLogicalExpressionJobGen.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/jobgen/AqlLogicalExpressionJobGen.java
@@ -18,10 +18,13 @@
import org.apache.commons.lang3.mutable.Mutable;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.common.functions.FunctionDescriptorTag;
+import edu.uci.ics.asterix.external.library.ExternalFunctionDescriptorProvider;
import edu.uci.ics.asterix.formats.base.IDataFormat;
import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.functions.IExternalFunctionInfo;
import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.comparisons.ComparisonEvalFactory;
import edu.uci.ics.asterix.runtime.formats.FormatUtils;
@@ -138,9 +141,18 @@
}
IFunctionDescriptor fd = null;
- AqlMetadataProvider mp = (AqlMetadataProvider) context.getMetadataProvider();
- IDataFormat format = FormatUtils.getDefaultFormat();
- fd = format.resolveFunction(expr, env);
+ if (!(expr.getFunctionInfo() instanceof IExternalFunctionInfo)) {
+ AqlMetadataProvider mp = (AqlMetadataProvider) context.getMetadataProvider();
+ IDataFormat format = FormatUtils.getDefaultFormat();
+ fd = format.resolveFunction(expr, env);
+ } else {
+ try {
+ fd = ExternalFunctionDescriptorProvider.getExternalFunctionDescriptor((IExternalFunctionInfo) expr
+ .getFunctionInfo());
+ } catch (AsterixException ae) {
+ throw new AlgebricksException(ae);
+ }
+ }
return fd.createEvaluatorFactory(args);
}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/RuleCollections.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/RuleCollections.java
index 0a1b429..01fe103 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/RuleCollections.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/RuleCollections.java
@@ -31,6 +31,7 @@
import edu.uci.ics.asterix.optimizer.rules.FuzzyEqRule;
import edu.uci.ics.asterix.optimizer.rules.IfElseToSwitchCaseFunctionRule;
import edu.uci.ics.asterix.optimizer.rules.InlineUnnestFunctionRule;
+import edu.uci.ics.asterix.optimizer.rules.IntroduceAutogenerateIDRule;
import edu.uci.ics.asterix.optimizer.rules.IntroduceDynamicTypeCastRule;
import edu.uci.ics.asterix.optimizer.rules.IntroduceEnforcedListTypeRule;
import edu.uci.ics.asterix.optimizer.rules.IntroduceInstantLockSearchCallbackRule;
@@ -49,11 +50,13 @@
import edu.uci.ics.asterix.optimizer.rules.PushProperJoinThroughProduct;
import edu.uci.ics.asterix.optimizer.rules.PushSimilarityFunctionsBelowJoin;
import edu.uci.ics.asterix.optimizer.rules.RemoveRedundantListifyRule;
+import edu.uci.ics.asterix.optimizer.rules.RemoveSortInFeedIngestionRule;
import edu.uci.ics.asterix.optimizer.rules.RemoveUnusedOneToOneEquiJoinRule;
import edu.uci.ics.asterix.optimizer.rules.ReplaceSinkOpWithCommitOpRule;
import edu.uci.ics.asterix.optimizer.rules.SetAsterixPhysicalOperatorsRule;
import edu.uci.ics.asterix.optimizer.rules.SetClosedRecordConstructorsRule;
import edu.uci.ics.asterix.optimizer.rules.SimilarityCheckRule;
+import edu.uci.ics.asterix.optimizer.rules.SweepIllegalNonfunctionalFunctions;
import edu.uci.ics.asterix.optimizer.rules.UnnestToDataScanRule;
import edu.uci.ics.asterix.optimizer.rules.am.IntroduceJoinAccessMethodRule;
import edu.uci.ics.asterix.optimizer.rules.am.IntroduceSelectAccessMethodRule;
@@ -64,6 +67,7 @@
import edu.uci.ics.hyracks.algebricks.rewriter.rules.ComplexUnnestToProductRule;
import edu.uci.ics.hyracks.algebricks.rewriter.rules.ConsolidateAssignsRule;
import edu.uci.ics.hyracks.algebricks.rewriter.rules.ConsolidateSelectsRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.CopyLimitDownRule;
import edu.uci.ics.hyracks.algebricks.rewriter.rules.EliminateGroupByEmptyKeyRule;
import edu.uci.ics.hyracks.algebricks.rewriter.rules.EliminateSubplanRule;
import edu.uci.ics.hyracks.algebricks.rewriter.rules.EnforceOrderByAfterSubplan;
@@ -88,7 +92,6 @@
import edu.uci.ics.hyracks.algebricks.rewriter.rules.PullSelectOutOfEqJoin;
import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushAssignBelowUnionAllRule;
import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushAssignDownThroughProductRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushLimitDownRule;
import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushNestedOrderByUnderPreSortedGroupByRule;
import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushProjectDownRule;
import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushSelectDownRule;
@@ -113,6 +116,12 @@
return typeInfer;
}
+ public final static List<IAlgebraicRewriteRule> buildAutogenerateIDRuleCollection() {
+ List<IAlgebraicRewriteRule> autogen = new LinkedList<>();
+ autogen.add(new IntroduceAutogenerateIDRule());
+ return autogen;
+ }
+
public final static List<IAlgebraicRewriteRule> buildNormalizationRuleCollection() {
List<IAlgebraicRewriteRule> normalization = new LinkedList<IAlgebraicRewriteRule>();
normalization.add(new IntroduceUnnestForCollectionToSequenceRule());
@@ -227,6 +236,8 @@
planCleanupRules.add(new RemoveRedundantVariablesRule());
planCleanupRules.add(new PushProjectDownRule());
planCleanupRules.add(new PushSelectDownRule());
+ planCleanupRules.add(new SetClosedRecordConstructorsRule());
+ planCleanupRules.add(new IntroduceDynamicTypeCastRule());
planCleanupRules.add(new RemoveUnusedAssignAndAggregateRule());
return planCleanupRules;
}
@@ -247,8 +258,8 @@
physicalRewritesAllLevels.add(new SetAsterixPhysicalOperatorsRule());
physicalRewritesAllLevels.add(new IntroduceInstantLockSearchCallbackRule());
physicalRewritesAllLevels.add(new EnforceStructuralPropertiesRule());
+ physicalRewritesAllLevels.add(new RemoveSortInFeedIngestionRule());
physicalRewritesAllLevels.add(new IntroHashPartitionMergeExchange());
- physicalRewritesAllLevels.add(new SetClosedRecordConstructorsRule());
physicalRewritesAllLevels.add(new PushProjectDownRule());
physicalRewritesAllLevels.add(new InsertProjectBeforeUnionRule());
physicalRewritesAllLevels.add(new IntroduceMaterializationForInsertWithSelfScanRule());
@@ -263,7 +274,7 @@
public final static List<IAlgebraicRewriteRule> buildPhysicalRewritesTopLevelRuleCollection() {
List<IAlgebraicRewriteRule> physicalRewritesTopLevel = new LinkedList<IAlgebraicRewriteRule>();
physicalRewritesTopLevel.add(new PushNestedOrderByUnderPreSortedGroupByRule());
- physicalRewritesTopLevel.add(new PushLimitDownRule());
+ physicalRewritesTopLevel.add(new CopyLimitDownRule());
physicalRewritesTopLevel.add(new IntroduceProjectsRule());
physicalRewritesTopLevel.add(new SetAlgebricksPhysicalOperatorsRule());
physicalRewritesTopLevel.add(new IntroduceRapidFrameFlushProjectAssignRule());
@@ -280,6 +291,7 @@
// propagated.
prepareForJobGenRewrites.add(new ReinferAllTypesRule());
prepareForJobGenRewrites.add(new SetExecutionModeRule());
+ prepareForJobGenRewrites.add(new SweepIllegalNonfunctionalFunctions());
return prepareForJobGenRewrites;
}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/handle/IHandle.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/handle/IHandle.java
index 4506654..405f966 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/handle/IHandle.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/handle/IHandle.java
@@ -15,17 +15,16 @@
package edu.uci.ics.asterix.optimizer.handle;
/**
- *
* A handle is a way of accessing an ADM instance or a collection of ADM
* instances nested within another ADM instance.
*
* @author Nicola
- *
*/
public interface IHandle {
public enum HandleType {
- FIELD_INDEX_AND_TYPE, FIELD_NAME
+ FIELD_INDEX_AND_TYPE,
+ FIELD_NAME
}
public HandleType getHandleType();
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/AsterixInlineVariablesRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/AsterixInlineVariablesRule.java
index bf18cdb..05051de 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/AsterixInlineVariablesRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/AsterixInlineVariablesRule.java
@@ -18,7 +18,7 @@
import edu.uci.ics.hyracks.algebricks.rewriter.rules.InlineVariablesRule;
public class AsterixInlineVariablesRule extends InlineVariablesRule {
-
+
public AsterixInlineVariablesRule() {
// Do not inline field accesses because doing so would interfere with our access method rewrites.
// TODO: For now we must also exclude record constructor functions to avoid breaking our type casting rules
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ByNameToByHandleFieldAccessRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ByNameToByHandleFieldAccessRule.java
index 575490b..69b23f0 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ByNameToByHandleFieldAccessRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ByNameToByHandleFieldAccessRule.java
@@ -75,7 +75,7 @@
AssignOperator assignVar = new AssignOperator(varArray, exprArray);
x = new VariableReferenceExpression(var1);
assignVar.getInputs().add(opUnder);
- opUnder = new MutableObject<ILogicalOperator>(assignVar);
+ opUnder = new MutableObject<ILogicalOperator>(assignVar);
}
// let $t := type-of(x)
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/CancelUnnestWithNestedListifyRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/CancelUnnestWithNestedListifyRule.java
index 5dd5d27..6e5cb02 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/CancelUnnestWithNestedListifyRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/CancelUnnestWithNestedListifyRule.java
@@ -271,7 +271,7 @@
raggOp.setPhysicalOperator(raggPOp);
raggOp.getInputs().add(nestedPlanRoot.getInputs().get(0));
gby.getNestedPlans().get(0).getRoots().set(0, new MutableObject<ILogicalOperator>(raggOp));
-
+
opRef.setValue(nestedAssign);
context.computeAndSetTypeEnvironmentForOperator(nestedAssign);
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ConstantFoldingRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ConstantFoldingRule.java
index ae0a842..a42507c 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ConstantFoldingRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ConstantFoldingRule.java
@@ -165,7 +165,7 @@
public Pair<Boolean, ILogicalExpression> visitScalarFunctionCallExpression(ScalarFunctionCallExpression expr,
Void arg) throws AlgebricksException {
boolean changed = changeRec(expr, arg);
- if (!checkArgs(expr)) {
+ if (!checkArgs(expr) || !expr.isFunctional()) {
return new Pair<Boolean, ILogicalExpression>(changed, expr);
}
//Current ARecord SerDe assumes a closed record, so we do not constant fold open record constructors
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/CountVarToCountOneRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/CountVarToCountOneRule.java
index 60eb974..4148aca 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/CountVarToCountOneRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/CountVarToCountOneRule.java
@@ -14,7 +14,6 @@
*/
package edu.uci.ics.asterix.optimizer.rules;
-
import org.apache.commons.lang3.mutable.Mutable;
import edu.uci.ics.asterix.om.base.AInt64;
@@ -43,7 +42,8 @@
// It is only for a group-by having just one aggregate which is a count.
@Override
- public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
if (op1.getOperatorTag() != LogicalOperatorTag.GROUP) {
return false;
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ExtractDistinctByExpressionsRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ExtractDistinctByExpressionsRule.java
index f2a8a27..cef3d4c 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ExtractDistinctByExpressionsRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ExtractDistinctByExpressionsRule.java
@@ -43,7 +43,8 @@
}
@Override
- public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
if (op1.getOperatorTag() != LogicalOperatorTag.DISTINCT) {
return false;
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ExtractOrderExpressionsRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ExtractOrderExpressionsRule.java
index 962d48c..dfac43c 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ExtractOrderExpressionsRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ExtractOrderExpressionsRule.java
@@ -14,8 +14,6 @@
*/
package edu.uci.ics.asterix.optimizer.rules;
-
-
import org.apache.commons.lang3.mutable.Mutable;
import edu.uci.ics.asterix.optimizer.base.AnalysisUtil;
@@ -33,7 +31,7 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
import edu.uci.ics.hyracks.algebricks.rewriter.rules.AbstractExtractExprRule;
-public class ExtractOrderExpressionsRule extends AbstractExtractExprRule {
+public class ExtractOrderExpressionsRule extends AbstractExtractExprRule {
@Override
public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
@@ -41,7 +39,8 @@
}
@Override
- public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
if (op1.getOperatorTag() != LogicalOperatorTag.ORDER) {
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/FeedScanCollectionToUnnest.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/FeedScanCollectionToUnnest.java
index 3940ed3..89e5050 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/FeedScanCollectionToUnnest.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/FeedScanCollectionToUnnest.java
@@ -14,21 +14,29 @@
*/
package edu.uci.ics.asterix.optimizer.rules;
+import java.util.ArrayList;
+import java.util.List;
import org.apache.commons.lang3.mutable.Mutable;
import org.apache.commons.lang3.mutable.MutableObject;
import edu.uci.ics.asterix.aql.util.FunctionUtils;
import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression;
import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression.FunctionKind;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractAssignOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
public class FeedScanCollectionToUnnest implements IAlgebraicRewriteRule {
@@ -39,7 +47,8 @@
}
@Override
- public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
+ public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
if (context.checkIfInDontApplySet(this, op)) {
return false;
@@ -50,7 +59,7 @@
}
UnnestOperator unnest = (UnnestOperator) op;
ILogicalExpression unnestExpr = unnest.getExpressionRef().getValue();
- if (needsScanCollection(unnestExpr)) {
+ if (needsScanCollection(unnestExpr, op)) {
ILogicalExpression newExpr = new UnnestingFunctionCallExpression(
FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.SCAN_COLLECTION),
new MutableObject<ILogicalExpression>(unnestExpr));
@@ -62,14 +71,53 @@
return false;
}
- private boolean needsScanCollection(ILogicalExpression unnestExpr) {
+ private ILogicalExpression findVarOriginExpression(LogicalVariable v, ILogicalOperator op)
+ throws AlgebricksException {
+ boolean searchInputs = false;
+ if (!(op instanceof AbstractAssignOperator)) {
+ searchInputs = true;
+ } else {
+ AbstractAssignOperator aao = (AbstractAssignOperator) op;
+ List<LogicalVariable> producedVars = new ArrayList<>();
+ VariableUtilities.getProducedVariables(op, producedVars);
+ int exprIndex = producedVars.indexOf(v);
+ if (exprIndex == -1) {
+ searchInputs = true;
+ } else {
+ ILogicalExpression originalCandidate = aao.getExpressions().get(exprIndex).getValue();
+ if (originalCandidate.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
+ searchInputs = true;
+ } else {
+ return originalCandidate;
+ }
+ }
+ }
+
+ if (searchInputs) {
+ for (Mutable<ILogicalOperator> childOp : op.getInputs()) {
+ ILogicalExpression ret = findVarOriginExpression(v, childOp.getValue());
+ if (ret != null) {
+ return ret;
+ }
+ }
+ }
+
+ throw new IllegalStateException("Unable to find the original expression that produced variable " + v);
+ }
+
+ private boolean needsScanCollection(ILogicalExpression unnestExpr, ILogicalOperator op) throws AlgebricksException {
switch (unnestExpr.getExpressionTag()) {
case VARIABLE: {
- return true;
+ LogicalVariable v = ((VariableReferenceExpression) unnestExpr).getVariableReference();
+ ILogicalExpression originalExpr = findVarOriginExpression(v, op);
+ if (originalExpr.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
+ return false;
+ } else {
+ return !isUnnestingFunction(originalExpr);
+ }
}
case FUNCTION_CALL: {
- AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) unnestExpr;
- return fce.getKind() != FunctionKind.UNNEST;
+ return !isUnnestingFunction(unnestExpr);
}
default: {
return false;
@@ -77,4 +125,11 @@
}
}
+ private boolean isUnnestingFunction(ILogicalExpression expr) {
+ if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+ AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) expr;
+ return fce.getKind() == FunctionKind.UNNEST;
+ }
+ return false;
+ }
}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceAutogenerateIDRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceAutogenerateIDRule.java
new file mode 100644
index 0000000..59f8d22
--- /dev/null
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceAutogenerateIDRule.java
@@ -0,0 +1,130 @@
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
+import edu.uci.ics.asterix.metadata.declared.AqlDataSource.AqlDataSourceType;
+import edu.uci.ics.asterix.metadata.declared.DatasetDataSource;
+import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails;
+import edu.uci.ics.asterix.om.base.AString;
+import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator.Kind;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class IntroduceAutogenerateIDRule implements IAlgebraicRewriteRule {
+
+ @Override
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ return false;
+ }
+
+ @Override
+ public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
+
+ // match: [insert to internal dataset with autogenerated id] - assign - project
+ // produce: insert - assign - assign* - project
+ AbstractLogicalOperator currentOp = (AbstractLogicalOperator) opRef.getValue();
+ if (currentOp.getOperatorTag() != LogicalOperatorTag.INSERT_DELETE) {
+ return false;
+ }
+
+ InsertDeleteOperator insertOp = (InsertDeleteOperator) currentOp;
+ if (insertOp.getOperation() != Kind.INSERT) {
+ return false;
+ }
+
+ if (((AqlDataSource) insertOp.getDataSource()).getDatasourceType() != AqlDataSourceType.INTERNAL_DATASET) {
+ return false;
+ }
+
+ AbstractLogicalOperator parentOp = (AbstractLogicalOperator) currentOp.getInputs().get(0).getValue();
+ if (parentOp.getOperatorTag() != LogicalOperatorTag.ASSIGN) {
+ return false;
+ }
+ AssignOperator assignOp = (AssignOperator) parentOp;
+
+ AbstractLogicalOperator grandparentOp = (AbstractLogicalOperator) parentOp.getInputs().get(0).getValue();
+ if (grandparentOp.getOperatorTag() != LogicalOperatorTag.PROJECT) {
+ return false;
+ }
+ ProjectOperator projectOp = (ProjectOperator) grandparentOp;
+
+ DatasetDataSource dds = (DatasetDataSource) insertOp.getDataSource();
+ boolean autogenerated = ((InternalDatasetDetails) dds.getDataset().getDatasetDetails()).isAutogenerated();
+ if (!autogenerated) {
+ return false;
+ }
+
+ String pkFieldName = ((InternalDatasetDetails) dds.getDataset().getDatasetDetails()).getPrimaryKey().get(0);
+
+ LogicalVariable inputRecord = projectOp.getVariables().get(0);
+ ILogicalExpression rec0 = new VariableReferenceExpression(inputRecord);
+ ILogicalExpression rec1 = createPrimaryKeyRecordExpression(pkFieldName);
+ ILogicalExpression mergedRec = createRecordMergeFunction(rec0, rec1);
+ ILogicalExpression nonNullMergedRec = createNotNullFunction(mergedRec);
+
+ LogicalVariable v = context.newVar();
+ AssignOperator newAssign = new AssignOperator(v, new MutableObject<ILogicalExpression>(nonNullMergedRec));
+ newAssign.getInputs().add(new MutableObject<ILogicalOperator>(projectOp));
+ assignOp.getInputs().set(0, new MutableObject<ILogicalOperator>(newAssign));
+ VariableUtilities.substituteVariables(assignOp, inputRecord, v, context);
+ VariableUtilities.substituteVariables(insertOp, inputRecord, v, context);
+ context.computeAndSetTypeEnvironmentForOperator(newAssign);
+ context.computeAndSetTypeEnvironmentForOperator(assignOp);
+ context.computeAndSetTypeEnvironmentForOperator(insertOp);
+ return true;
+ }
+
+ private ILogicalExpression createNotNullFunction(ILogicalExpression mergedRec) {
+ List<Mutable<ILogicalExpression>> args = new ArrayList<>();
+ args.add(new MutableObject<ILogicalExpression>(mergedRec));
+ AbstractFunctionCallExpression notNullFn = new ScalarFunctionCallExpression(
+ FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.NOT_NULL), args);
+ return notNullFn;
+ }
+
+ private AbstractFunctionCallExpression createPrimaryKeyRecordExpression(String pkFieldName) {
+ AbstractFunctionCallExpression uuidFn = new ScalarFunctionCallExpression(
+ FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.CREATE_UUID));
+ List<Mutable<ILogicalExpression>> openRecordConsArgs = new ArrayList<>();
+ Mutable<ILogicalExpression> pkFieldNameExpression = new MutableObject<ILogicalExpression>(
+ new ConstantExpression(new AsterixConstantValue(new AString(pkFieldName))));
+ openRecordConsArgs.add(pkFieldNameExpression);
+ Mutable<ILogicalExpression> pkFieldValueExpression = new MutableObject<ILogicalExpression>(uuidFn);
+ openRecordConsArgs.add(pkFieldValueExpression);
+ AbstractFunctionCallExpression openRecFn = new ScalarFunctionCallExpression(
+ FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR), openRecordConsArgs);
+ return openRecFn;
+ }
+
+ private AbstractFunctionCallExpression createRecordMergeFunction(ILogicalExpression rec0, ILogicalExpression rec1) {
+ List<Mutable<ILogicalExpression>> recordMergeFnArgs = new ArrayList<>();
+ recordMergeFnArgs.add(new MutableObject<>(rec0));
+ recordMergeFnArgs.add(new MutableObject<>(rec1));
+ AbstractFunctionCallExpression recordMergeFn = new ScalarFunctionCallExpression(
+ FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.RECORD_MERGE), recordMergeFnArgs);
+ return recordMergeFn;
+ }
+}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java
index 2cab21d..1314405 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java
@@ -201,10 +201,14 @@
if (inputType.getTypeTag() != ATypeTag.RECORD) {
throw new AlgebricksException("The input type " + inputType + " is not a valid record type!");
}
+ ARecordType inputRecType = (ARecordType) inputType;
+ if (reqType.isOpen() != inputRecType.isOpen()) {
+ return false;
+ }
IAType[] reqTypes = reqType.getFieldTypes();
String[] reqFieldNames = reqType.getFieldNames();
- IAType[] inputTypes = ((ARecordType) inputType).getFieldTypes();
+ IAType[] inputTypes = inputRecType.getFieldTypes();
String[] inputFieldNames = ((ARecordType) inputType).getFieldNames();
if (reqTypes.length != inputTypes.length) {
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceInstantLockSearchCallbackRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceInstantLockSearchCallbackRule.java
index 62cca6c..a2b8ef5 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceInstantLockSearchCallbackRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceInstantLockSearchCallbackRule.java
@@ -25,6 +25,7 @@
import edu.uci.ics.asterix.algebra.operators.physical.BTreeSearchPOperator;
import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
import edu.uci.ics.asterix.metadata.declared.AqlMetadataImplConfig;
+import edu.uci.ics.asterix.metadata.declared.DatasetDataSource;
import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
import edu.uci.ics.asterix.optimizer.rules.am.AccessMethodJobGenParams;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -81,11 +82,11 @@
}
} else if (descendantOp.getOperatorTag() == LogicalOperatorTag.DATASOURCESCAN) {
DataSourceScanOperator dataSourceScanOp = (DataSourceScanOperator) descendantOp;
- String datasetName = ((AqlDataSource) dataSourceScanOp.getDataSource()).getDataset().getDatasetName();
- if (dataSourcesMap.containsKey(datasetName)) {
- ++(dataSourcesMap.get(datasetName).first);
+ String datasourceName = ((AqlDataSource) dataSourceScanOp.getDataSource()).getDatasourceName();
+ if (dataSourcesMap.containsKey(datasourceName)) {
+ ++(dataSourcesMap.get(datasourceName).first);
} else {
- dataSourcesMap.put(datasetName, new Triple<Integer, LogicalOperatorTag, IPhysicalOperator>(1,
+ dataSourcesMap.put(datasourceName, new Triple<Integer, LogicalOperatorTag, IPhysicalOperator>(1,
LogicalOperatorTag.DATASOURCESCAN, dataSourceScanOp.getPhysicalOperator()));
}
}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceMaterializationForInsertWithSelfScanRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceMaterializationForInsertWithSelfScanRule.java
index c15b0f0..9f0fc3c 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceMaterializationForInsertWithSelfScanRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceMaterializationForInsertWithSelfScanRule.java
@@ -21,6 +21,7 @@
import edu.uci.ics.asterix.algebra.operators.physical.MaterializePOperator;
import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
import edu.uci.ics.asterix.metadata.declared.AqlDataSource.AqlDataSourceType;
+import edu.uci.ics.asterix.metadata.declared.DatasetDataSource;
import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
import edu.uci.ics.asterix.optimizer.rules.am.AccessMethodJobGenParams;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -54,7 +55,7 @@
}
InsertDeleteOperator insertOp = (InsertDeleteOperator) op;
- boolean sameDataset = checkIfInsertAndScanDatasetsSame(op, ((AqlDataSource) insertOp.getDataSource())
+ boolean sameDataset = checkIfInsertAndScanDatasetsSame(op, ((DatasetDataSource) insertOp.getDataSource())
.getDataset().getDatasetName());
if (sameDataset) {
@@ -104,9 +105,8 @@
} else if (descendantOp.getOperatorTag() == LogicalOperatorTag.DATASOURCESCAN) {
DataSourceScanOperator dataSourceScanOp = (DataSourceScanOperator) descendantOp;
AqlDataSource ds = (AqlDataSource) dataSourceScanOp.getDataSource();
- if (ds.getDatasourceType() != AqlDataSourceType.FEED
- && ds.getDatasourceType() != AqlDataSourceType.EXTERNAL_FEED) {
- if (ds.getDataset().getDatasetName().compareTo(insertDatasetName) == 0) {
+ if (ds.getDatasourceType() != AqlDataSourceType.FEED) {
+ if (((DatasetDataSource) ds).getDataset().getDatasetName().compareTo(insertDatasetName) == 0) {
return true;
}
}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java
index dc8d741..f2ca7d3 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java
@@ -172,7 +172,8 @@
project.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
context.computeAndSetTypeEnvironmentForOperator(project);
context.computeAndSetTypeEnvironmentForOperator(assign);
- if (index.getIndexType() == IndexType.BTREE || index.getIndexType() == IndexType.SINGLE_PARTITION_WORD_INVIX
+ if (index.getIndexType() == IndexType.BTREE
+ || index.getIndexType() == IndexType.SINGLE_PARTITION_WORD_INVIX
|| index.getIndexType() == IndexType.SINGLE_PARTITION_NGRAM_INVIX
|| index.getIndexType() == IndexType.LENGTH_PARTITIONED_WORD_INVIX
|| index.getIndexType() == IndexType.LENGTH_PARTITIONED_NGRAM_INVIX) {
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceTransactionCommitByAssignOpRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceTransactionCommitByAssignOpRule.java
index df6ed18..41eb883 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceTransactionCommitByAssignOpRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceTransactionCommitByAssignOpRule.java
@@ -62,7 +62,7 @@
//set the input of the new assign-operator to the input of the select-operator.
assignOperator.getInputs().add(childOfSelect);
-
+
//set the result value of the assign-operator to the condition of the select-operator
selectOperator.getCondition().setValue(new VariableReferenceExpression(v));//scalarFunctionCallExpression);
selectOperator.getInputs().set(0, new MutableObject<ILogicalOperator>(assignOperator));
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/LoadRecordFieldsRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/LoadRecordFieldsRule.java
index e44112e..ac6a690 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/LoadRecordFieldsRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/LoadRecordFieldsRule.java
@@ -218,7 +218,6 @@
}
/**
- *
* Pushes one field-access assignment above toPushThroughChildRef
*
* @param toPush
@@ -234,17 +233,11 @@
}
/**
- *
* Rewrite
- *
* assign $x := field-access($y, "field")
- *
* assign $y := record-constructor { "field": Expr, ... }
- *
* into
- *
* assign $x := Expr
- *
* assign $y := record-constructor { "field": Expr, ... }
*
* @param toPush
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/NestGroupByRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/NestGroupByRule.java
index 4203891..ac2e682 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/NestGroupByRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/NestGroupByRule.java
@@ -48,7 +48,8 @@
}
@Override
- public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
if (op1.getOperatorTag() != LogicalOperatorTag.SUBPLAN) {
return false;
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PullPositionalVariableFromUnnestRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PullPositionalVariableFromUnnestRule.java
index 004e221..d4abefe 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PullPositionalVariableFromUnnestRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PullPositionalVariableFromUnnestRule.java
@@ -43,7 +43,8 @@
}
@Override
- public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
if (op.getOperatorTag() != LogicalOperatorTag.UNNEST) {
return false;
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushFieldAccessRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushFieldAccessRule.java
index d33d674..4b7fd44 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushFieldAccessRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushFieldAccessRule.java
@@ -25,6 +25,8 @@
import edu.uci.ics.asterix.algebra.base.AsterixOperatorAnnotations;
import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
import edu.uci.ics.asterix.common.exceptions.AsterixRuntimeException;
+import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
+import edu.uci.ics.asterix.metadata.declared.AqlDataSource.AqlDataSourceType;
import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
import edu.uci.ics.asterix.metadata.declared.AqlSourceId;
import edu.uci.ics.asterix.metadata.entities.Dataset;
@@ -87,18 +89,7 @@
} else {
return false;
}
- AbstractLogicalOperator op2 = (AbstractLogicalOperator) access.getInputs().get(0).getValue();
- // If it's not an indexed field, it is pushed so that scan can be
- // rewritten into index search.
- if (op2.getOperatorTag() == LogicalOperatorTag.PROJECT || context.checkAndAddToAlreadyCompared(op, op2)
- && !(op2.getOperatorTag() == LogicalOperatorTag.SELECT && isAccessToIndexedField(access, context))) {
- return false;
- }
- boolean changed = propagateFieldAccessRec(opRef, context, finalAnnot);
- // if (changed) {
- // OptimizationUtil.typeOpRec(opRef, context);
- // }
- return changed;
+ return propagateFieldAccessRec(opRef, context, finalAnnot);
}
@SuppressWarnings("unchecked")
@@ -129,11 +120,12 @@
}
AqlMetadataProvider mp = (AqlMetadataProvider) context.getMetadataProvider();
AqlSourceId asid = ((IDataSource<AqlSourceId>) scan.getDataSource()).getId();
+
Dataset dataset = mp.findDataset(asid.getDataverseName(), asid.getDatasetName());
if (dataset == null) {
throw new AlgebricksException("Dataset " + asid.getDatasetName() + " not found.");
}
- if (dataset.getDatasetType() != DatasetType.INTERNAL && dataset.getDatasetType() != DatasetType.FEED) {
+ if (dataset.getDatasetType() != DatasetType.INTERNAL) {
return false;
}
ILogicalExpression e1 = accessFun.getArguments().get(1).getValue();
@@ -193,6 +185,12 @@
AssignOperator access = (AssignOperator) opRef.getValue();
Mutable<ILogicalOperator> opRef2 = access.getInputs().get(0);
AbstractLogicalOperator op2 = (AbstractLogicalOperator) opRef2.getValue();
+ // If it's not an indexed field, it is pushed so that scan can be
+ // rewritten into index search.
+ if (op2.getOperatorTag() == LogicalOperatorTag.PROJECT || context.checkAndAddToAlreadyCompared(access, op2)
+ && !(op2.getOperatorTag() == LogicalOperatorTag.SELECT && isAccessToIndexedField(access, context))) {
+ return false;
+ }
if (tryingToPushThroughSelectionWithSameDataSource(access, op2)) {
return false;
}
@@ -302,14 +300,16 @@
ILogicalExpression e1 = accessFun.getArguments().get(1).getValue();
if (e1.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
IDataSource<AqlSourceId> dataSource = (IDataSource<AqlSourceId>) scan.getDataSource();
+ if (((AqlDataSource) dataSource).getDatasourceType().equals(AqlDataSourceType.FEED)) {
+ return false;
+ }
AqlSourceId asid = dataSource.getId();
AqlMetadataProvider mp = (AqlMetadataProvider) context.getMetadataProvider();
Dataset dataset = mp.findDataset(asid.getDataverseName(), asid.getDatasetName());
if (dataset == null) {
throw new AlgebricksException("Dataset " + asid.getDatasetName() + " not found.");
}
- if (dataset.getDatasetType() != DatasetType.INTERNAL
- && dataset.getDatasetType() != DatasetType.FEED) {
+ if (dataset.getDatasetType() != DatasetType.INTERNAL) {
setAsFinal(access, context, finalAnnot);
return false;
}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushGroupByThroughProduct.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushGroupByThroughProduct.java
index 4e3f460..af59445 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushGroupByThroughProduct.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushGroupByThroughProduct.java
@@ -41,12 +41,12 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-
public class PushGroupByThroughProduct implements IAlgebraicRewriteRule {
private enum PushTestResult {
- FALSE, TRUE, REPEATED_DECORS
+ FALSE,
+ TRUE,
+ REPEATED_DECORS
}
@Override
@@ -55,7 +55,8 @@
}
@Override
- public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
if (op1.getOperatorTag() != LogicalOperatorTag.GROUP) {
return false;
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushProperJoinThroughProduct.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushProperJoinThroughProduct.java
index de44eba..2892d89 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushProperJoinThroughProduct.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushProperJoinThroughProduct.java
@@ -43,7 +43,8 @@
}
@Override
- public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
LogicalOperatorTag tag1 = op.getOperatorTag();
if (tag1 != LogicalOperatorTag.INNERJOIN && tag1 != LogicalOperatorTag.LEFTOUTERJOIN) {
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushSimilarityFunctionsBelowJoin.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushSimilarityFunctionsBelowJoin.java
index b339103..18ea39e 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushSimilarityFunctionsBelowJoin.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushSimilarityFunctionsBelowJoin.java
@@ -30,25 +30,22 @@
* a join (which may blow up the cardinality).
* Also, this rule may help to enable other rules such as common subexpression elimination, again to reduce
* the number of calls to expensive similarity functions.
- *
* Example:
- *
* Before plan:
* assign [$$10] <- [funcA(funcB(simFuncX($$3, $$4)))]
- * join (some condition)
- * join_branch_0 where $$3 and $$4 are not live
- * ...
- * join_branch_1 where $$3 and $$4 are live
- * ...
- *
+ * join (some condition)
+ * join_branch_0 where $$3 and $$4 are not live
+ * ...
+ * join_branch_1 where $$3 and $$4 are live
+ * ...
* After plan:
* assign [$$10] <- [funcA(funcB($$11))]
- * join (some condition)
- * join_branch_0 where $$3 and $$4 are not live
- * ...
- * join_branch_1 where $$3 and $$4 are live
- * assign[$$11] <- [simFuncX($$3, $$4)]
- * ...
+ * join (some condition)
+ * join_branch_0 where $$3 and $$4 are not live
+ * ...
+ * join_branch_1 where $$3 and $$4 are live
+ * assign[$$11] <- [simFuncX($$3, $$4)]
+ * ...
*/
public class PushSimilarityFunctionsBelowJoin extends PushFunctionsBelowJoin {
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/RemoveRedundantListifyRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/RemoveRedundantListifyRule.java
index 51511aa..f42c983 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/RemoveRedundantListifyRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/RemoveRedundantListifyRule.java
@@ -68,7 +68,7 @@
}
@Override
- public boolean rewritePre( Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
// apply it only at the top of the plan
ILogicalOperator op = opRef.getValue();
if (context.checkIfInDontApplySet(this, op)) {
@@ -86,7 +86,7 @@
if (op.hasNestedPlans()) {
AbstractOperatorWithNestedPlans aonp = (AbstractOperatorWithNestedPlans) op;
for (ILogicalPlan p : aonp.getNestedPlans()) {
- for ( Mutable<ILogicalOperator> r : p.getRoots()) {
+ for (Mutable<ILogicalOperator> r : p.getRoots()) {
if (applyRuleDown(r, varSet, context)) {
changed = true;
}
@@ -94,7 +94,7 @@
}
}
}
- for ( Mutable<ILogicalOperator> i : op.getInputs()) {
+ for (Mutable<ILogicalOperator> i : op.getInputs()) {
if (applyRuleDown(i, varSet, context)) {
changed = true;
}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/RemoveSortInFeedIngestionRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/RemoveSortInFeedIngestionRule.java
new file mode 100644
index 0000000..951cacf
--- /dev/null
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/RemoveSortInFeedIngestionRule.java
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
+import edu.uci.ics.asterix.metadata.declared.AqlDataSource.AqlDataSourceType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class RemoveSortInFeedIngestionRule implements IAlgebraicRewriteRule {
+
+ @Override
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ return false;
+ }
+
+ @Override
+ public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
+ AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+ if (op.getOperatorTag() != LogicalOperatorTag.INSERT_DELETE) {
+ return false;
+ }
+
+ AbstractLogicalOperator insertOp = op;
+ AbstractLogicalOperator descendantOp = (AbstractLogicalOperator) op.getInputs().get(0).getValue();
+ boolean isSourceAFeed = false;
+ while (descendantOp != null) {
+ if (descendantOp.getOperatorTag() == LogicalOperatorTag.DATASOURCESCAN) {
+ AqlDataSource dataSource = (AqlDataSource) ((DataSourceScanOperator) descendantOp).getDataSource();
+ if (dataSource.getDatasourceType().equals(AqlDataSourceType.FEED)) {
+ isSourceAFeed = true;
+ }
+ break;
+ }
+ if (descendantOp.getInputs().isEmpty()) {
+ break;
+ }
+ descendantOp = (AbstractLogicalOperator) descendantOp.getInputs().get(0).getValue();
+ }
+
+ if (isSourceAFeed) {
+ AbstractLogicalOperator prevOp = (AbstractLogicalOperator) insertOp.getInputs().get(0).getValue();
+ if (prevOp.getOperatorTag() == LogicalOperatorTag.ORDER) {
+ insertOp.getInputs().set(0, prevOp.getInputs().get(0));
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/RemoveUnusedOneToOneEquiJoinRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/RemoveUnusedOneToOneEquiJoinRule.java
index 432ef10..a6416ba 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/RemoveUnusedOneToOneEquiJoinRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/RemoveUnusedOneToOneEquiJoinRule.java
@@ -22,7 +22,7 @@
import org.apache.commons.lang3.mutable.Mutable;
-import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
+import edu.uci.ics.asterix.metadata.declared.DatasetDataSource;
import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails;
import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -152,8 +152,8 @@
// only used primary key variables of those datascans.
for (int i = 0; i < dataScans.size(); i++) {
if (i > 0) {
- AqlDataSource prevAqlDataSource = (AqlDataSource) dataScans.get(i - 1).getDataSource();
- AqlDataSource currAqlDataSource = (AqlDataSource) dataScans.get(i).getDataSource();
+ DatasetDataSource prevAqlDataSource = (DatasetDataSource) dataScans.get(i - 1).getDataSource();
+ DatasetDataSource currAqlDataSource = (DatasetDataSource) dataScans.get(i).getDataSource();
if (!prevAqlDataSource.getDataset().equals(currAqlDataSource.getDataset())) {
return -1;
}
@@ -189,10 +189,10 @@
private void fillPKVars(DataSourceScanOperator dataScan, List<LogicalVariable> pkVars) {
pkVars.clear();
- AqlDataSource aqlDataSource = (AqlDataSource) dataScan.getDataSource();
+ DatasetDataSource datasetDataSource = (DatasetDataSource) dataScan.getDataSource();
pkVars.clear();
- if (aqlDataSource.getDataset().getDatasetDetails() instanceof InternalDatasetDetails) {
- int numPKs = DatasetUtils.getPartitioningKeys(aqlDataSource.getDataset()).size();
+ if (datasetDataSource.getDataset().getDatasetDetails() instanceof InternalDatasetDetails) {
+ int numPKs = DatasetUtils.getPartitioningKeys(datasetDataSource.getDataset()).size();
for (int i = 0; i < numPKs; i++) {
pkVars.add(dataScan.getVariables().get(i));
}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ReplaceSinkOpWithCommitOpRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ReplaceSinkOpWithCommitOpRule.java
index 1fe7ee9..2550a34 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ReplaceSinkOpWithCommitOpRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ReplaceSinkOpWithCommitOpRule.java
@@ -24,6 +24,7 @@
import edu.uci.ics.asterix.common.transactions.JobId;
import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.declared.DatasetDataSource;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
@@ -63,12 +64,13 @@
if (descendantOp.getOperatorTag() == LogicalOperatorTag.INDEX_INSERT_DELETE) {
IndexInsertDeleteOperator indexInsertDeleteOperator = (IndexInsertDeleteOperator) descendantOp;
primaryKeyExprs = indexInsertDeleteOperator.getPrimaryKeyExpressions();
- datasetId = ((AqlDataSource) indexInsertDeleteOperator.getDataSourceIndex().getDataSource()).getDataset().getDatasetId();
+ datasetId = ((DatasetDataSource) indexInsertDeleteOperator.getDataSourceIndex().getDataSource())
+ .getDataset().getDatasetId();
break;
} else if (descendantOp.getOperatorTag() == LogicalOperatorTag.INSERT_DELETE) {
InsertDeleteOperator insertDeleteOperator = (InsertDeleteOperator) descendantOp;
primaryKeyExprs = insertDeleteOperator.getPrimaryKeyExpressions();
- datasetId = ((AqlDataSource) insertDeleteOperator.getDataSource()).getDataset().getDatasetId();
+ datasetId = ((DatasetDataSource) insertDeleteOperator.getDataSource()).getDataset().getDatasetId();
break;
}
descendantOp = (AbstractLogicalOperator) descendantOp.getInputs().get(0).getValue();
@@ -79,7 +81,7 @@
//copy primaryKeyExprs
List<LogicalVariable> primaryKeyLogicalVars = new ArrayList<LogicalVariable>();
for (Mutable<ILogicalExpression> expr : primaryKeyExprs) {
- VariableReferenceExpression varRefExpr = (VariableReferenceExpression)expr.getValue();
+ VariableReferenceExpression varRefExpr = (VariableReferenceExpression) expr.getValue();
primaryKeyLogicalVars.add(new LogicalVariable(varRefExpr.getVariableReference().getId()));
}
@@ -89,13 +91,14 @@
//create the logical and physical operator
CommitOperator commitOperator = new CommitOperator(primaryKeyLogicalVars);
- CommitPOperator commitPOperator = new CommitPOperator(jobId, datasetId, primaryKeyLogicalVars, mp.isWriteTransaction());
+ CommitPOperator commitPOperator = new CommitPOperator(jobId, datasetId, primaryKeyLogicalVars,
+ mp.isWriteTransaction());
commitOperator.setPhysicalOperator(commitPOperator);
//create ExtensionOperator and put the commitOperator in it.
ExtensionOperator extensionOperator = new ExtensionOperator(commitOperator);
extensionOperator.setPhysicalOperator(commitPOperator);
-
+
//update plan link
extensionOperator.getInputs().add(sinkOperator.getInputs().get(0));
context.computeAndSetTypeEnvironmentForOperator(extensionOperator);
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java
index f6b44f0..6744471 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java
@@ -199,6 +199,7 @@
case DATETIME:
case DATE:
case TIME:
+ case UUID:
case DURATION:
case YEARMONTHDURATION:
case DAYTIMEDURATION:
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SimilarityCheckRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SimilarityCheckRule.java
index 32b2f59..5c42555 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SimilarityCheckRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SimilarityCheckRule.java
@@ -244,52 +244,56 @@
AsterixConstantValue constVal, AbstractFunctionCallExpression funcExpr) {
// Remember args from original similarity function to add them to the similarity-check function later.
ArrayList<Mutable<ILogicalExpression>> similarityArgs = null;
- ScalarFunctionCallExpression simCheckFuncExpr = null;
+ ScalarFunctionCallExpression simCheckFuncExpr = null;
// Look for jaccard function call, and GE or GT.
if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.SIMILARITY_JACCARD) {
IAObject jaccThresh;
- if (normFuncIdent == AlgebricksBuiltinFunctions.GE) {
+ if (normFuncIdent == AlgebricksBuiltinFunctions.GE) {
if (constVal.getObject() instanceof AFloat) {
- jaccThresh = constVal.getObject();
+ jaccThresh = constVal.getObject();
} else {
- jaccThresh = new AFloat((float)((ADouble) constVal.getObject()).getDoubleValue());
+ jaccThresh = new AFloat((float) ((ADouble) constVal.getObject()).getDoubleValue());
}
} else if (normFuncIdent == AlgebricksBuiltinFunctions.GT) {
- float threshVal = 0.0f;
+ float threshVal = 0.0f;
if (constVal.getObject() instanceof AFloat) {
threshVal = ((AFloat) constVal.getObject()).getFloatValue();
} else {
- threshVal = (float)((ADouble) constVal.getObject()).getDoubleValue();
+ threshVal = (float) ((ADouble) constVal.getObject()).getDoubleValue();
}
float f = threshVal + Float.MIN_VALUE;
- if (f > 1.0f) f = 1.0f;
+ if (f > 1.0f)
+ f = 1.0f;
jaccThresh = new AFloat(f);
} else {
return null;
}
similarityArgs = new ArrayList<Mutable<ILogicalExpression>>();
similarityArgs.addAll(funcExpr.getArguments());
- similarityArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(jaccThresh))));
+ similarityArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
+ jaccThresh))));
simCheckFuncExpr = new ScalarFunctionCallExpression(
FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.SIMILARITY_JACCARD_CHECK), similarityArgs);
}
// Look for edit-distance function call, and LE or LT.
- if(funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.EDIT_DISTANCE) {
+ if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.EDIT_DISTANCE) {
AInt32 aInt = (AInt32) constVal.getObject();
AInt32 edThresh;
if (normFuncIdent == AlgebricksBuiltinFunctions.LE) {
edThresh = aInt;
} else if (normFuncIdent == AlgebricksBuiltinFunctions.LT) {
int ed = aInt.getIntegerValue() - 1;
- if (ed < 0) ed = 0;
+ if (ed < 0)
+ ed = 0;
edThresh = new AInt32(ed);
} else {
return null;
}
similarityArgs = new ArrayList<Mutable<ILogicalExpression>>();
similarityArgs.addAll(funcExpr.getArguments());
- similarityArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(edThresh))));
+ similarityArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
+ edThresh))));
simCheckFuncExpr = new ScalarFunctionCallExpression(
FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.EDIT_DISTANCE_CHECK), similarityArgs);
}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SweepIllegalNonfunctionalFunctions.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SweepIllegalNonfunctionalFunctions.java
new file mode 100644
index 0000000..96e5c4b
--- /dev/null
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SweepIllegalNonfunctionalFunctions.java
@@ -0,0 +1,264 @@
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LimitOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.PartitioningSplitOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.RunningAggregateOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.WriteOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.WriteResultOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalOperatorVisitor;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.AbstractExtractExprRule;
+
+public class SweepIllegalNonfunctionalFunctions extends AbstractExtractExprRule implements IAlgebraicRewriteRule {
+
+ private final IllegalNonfunctionalFunctionSweeperOperatorVisitor visitor;
+
+ public SweepIllegalNonfunctionalFunctions() {
+ visitor = new IllegalNonfunctionalFunctionSweeperOperatorVisitor();
+ }
+
+ @Override
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ return false;
+ }
+
+ @Override
+ public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
+ ILogicalOperator op = opRef.getValue();
+ if (context.checkIfInDontApplySet(this, op)) {
+ return false;
+ }
+
+ op.accept(visitor, null);
+ context.computeAndSetTypeEnvironmentForOperator(op);
+ context.addToDontApplySet(this, op);
+ return false;
+ }
+
+ private class IllegalNonfunctionalFunctionSweeperOperatorVisitor implements ILogicalOperatorVisitor<Void, Void> {
+
+ private void sweepExpression(ILogicalExpression expr, ILogicalOperator op) throws AlgebricksException {
+ if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+ if (!expr.isFunctional()) {
+ AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) expr;
+ throw new AlgebricksException("Found non-functional function " + fce.getFunctionIdentifier()
+ + " in op " + op);
+ }
+ }
+ }
+
+ @Override
+ public Void visitAggregateOperator(AggregateOperator op, Void arg) throws AlgebricksException {
+ for (Mutable<ILogicalExpression> me : op.getExpressions()) {
+ sweepExpression(me.getValue(), op);
+ }
+ List<Mutable<ILogicalExpression>> mergeExprs = op.getMergeExpressions();
+ if (mergeExprs != null) {
+ for (Mutable<ILogicalExpression> me : mergeExprs) {
+ sweepExpression(me.getValue(), op);
+ }
+ }
+ return null;
+ }
+
+ @Override
+ public Void visitRunningAggregateOperator(RunningAggregateOperator op, Void arg) throws AlgebricksException {
+ for (Mutable<ILogicalExpression> me : op.getExpressions()) {
+ sweepExpression(me.getValue(), op);
+ }
+ return null;
+ }
+
+ @Override
+ public Void visitEmptyTupleSourceOperator(EmptyTupleSourceOperator op, Void arg) throws AlgebricksException {
+ return null;
+ }
+
+ @Override
+ public Void visitGroupByOperator(GroupByOperator op, Void arg) throws AlgebricksException {
+ for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : op.getGroupByList()) {
+ sweepExpression(p.second.getValue(), op);
+ }
+ for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : op.getDecorList()) {
+ sweepExpression(p.second.getValue(), op);
+ }
+ return null;
+ }
+
+ @Override
+ public Void visitLimitOperator(LimitOperator op, Void arg) throws AlgebricksException {
+ return null;
+ }
+
+ @Override
+ public Void visitInnerJoinOperator(InnerJoinOperator op, Void arg) throws AlgebricksException {
+ sweepExpression(op.getCondition().getValue(), op);
+ return null;
+ }
+
+ @Override
+ public Void visitLeftOuterJoinOperator(LeftOuterJoinOperator op, Void arg) throws AlgebricksException {
+ sweepExpression(op.getCondition().getValue(), op);
+ return null;
+ }
+
+ @Override
+ public Void visitNestedTupleSourceOperator(NestedTupleSourceOperator op, Void arg) throws AlgebricksException {
+ return null;
+ }
+
+ @Override
+ public Void visitOrderOperator(OrderOperator op, Void arg) throws AlgebricksException {
+ for (Pair<IOrder, Mutable<ILogicalExpression>> p : op.getOrderExpressions()) {
+ sweepExpression(p.second.getValue(), op);
+ }
+ return null;
+ }
+
+ @Override
+ public Void visitAssignOperator(AssignOperator op, Void arg) throws AlgebricksException {
+ return null;
+ }
+
+ @Override
+ public Void visitSelectOperator(SelectOperator op, Void arg) throws AlgebricksException {
+ return null;
+ }
+
+ @Override
+ public Void visitExtensionOperator(ExtensionOperator op, Void arg) throws AlgebricksException {
+ return null;
+ }
+
+ @Override
+ public Void visitProjectOperator(ProjectOperator op, Void arg) throws AlgebricksException {
+ return null;
+ }
+
+ @Override
+ public Void visitPartitioningSplitOperator(PartitioningSplitOperator op, Void arg) throws AlgebricksException {
+ for (Mutable<ILogicalExpression> expr : op.getExpressions()) {
+ sweepExpression(expr.getValue(), op);
+ }
+ return null;
+ }
+
+ @Override
+ public Void visitReplicateOperator(ReplicateOperator op, Void arg) throws AlgebricksException {
+ return null;
+ }
+
+ @Override
+ public Void visitScriptOperator(ScriptOperator op, Void arg) throws AlgebricksException {
+ return null;
+ }
+
+ @Override
+ public Void visitSubplanOperator(SubplanOperator op, Void arg) throws AlgebricksException {
+ return null;
+ }
+
+ @Override
+ public Void visitSinkOperator(SinkOperator op, Void arg) throws AlgebricksException {
+ return null;
+ }
+
+ @Override
+ public Void visitUnionOperator(UnionAllOperator op, Void arg) throws AlgebricksException {
+ return null;
+ }
+
+ @Override
+ public Void visitUnnestOperator(UnnestOperator op, Void arg) throws AlgebricksException {
+ return null;
+ }
+
+ @Override
+ public Void visitUnnestMapOperator(UnnestMapOperator op, Void arg) throws AlgebricksException {
+ return null;
+ }
+
+ @Override
+ public Void visitDataScanOperator(DataSourceScanOperator op, Void arg) throws AlgebricksException {
+ return null;
+ }
+
+ @Override
+ public Void visitDistinctOperator(DistinctOperator op, Void arg) throws AlgebricksException {
+ for (Mutable<ILogicalExpression> expr : op.getExpressions()) {
+ sweepExpression(expr.getValue(), op);
+ }
+ return null;
+ }
+
+ @Override
+ public Void visitExchangeOperator(ExchangeOperator op, Void arg) throws AlgebricksException {
+ return null;
+ }
+
+ @Override
+ public Void visitWriteOperator(WriteOperator op, Void arg) throws AlgebricksException {
+ return null;
+ }
+
+ @Override
+ public Void visitDistributeResultOperator(DistributeResultOperator op, Void arg) throws AlgebricksException {
+ return null;
+ }
+
+ @Override
+ public Void visitWriteResultOperator(WriteResultOperator op, Void arg) throws AlgebricksException {
+ return null;
+ }
+
+ @Override
+ public Void visitInsertDeleteOperator(InsertDeleteOperator op, Void tag) throws AlgebricksException {
+ return null;
+ }
+
+ @Override
+ public Void visitIndexInsertDeleteOperator(IndexInsertDeleteOperator op, Void tag) throws AlgebricksException {
+ return null;
+ }
+
+ }
+
+}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/UnnestToDataScanRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/UnnestToDataScanRule.java
index 7ca6e3c..e644459 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/UnnestToDataScanRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/UnnestToDataScanRule.java
@@ -20,12 +20,16 @@
import org.apache.commons.lang3.mutable.Mutable;
import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
import edu.uci.ics.asterix.metadata.declared.AqlSourceId;
-import edu.uci.ics.asterix.metadata.declared.ExternalFeedDataSource;
+import edu.uci.ics.asterix.metadata.declared.FeedDataSource;
import edu.uci.ics.asterix.metadata.entities.Dataset;
import edu.uci.ics.asterix.metadata.entities.Dataverse;
+import edu.uci.ics.asterix.metadata.entities.Feed;
+import edu.uci.ics.asterix.metadata.entities.FeedPolicy;
+import edu.uci.ics.asterix.metadata.feeds.BuiltinFeedPolicies;
import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
import edu.uci.ics.asterix.om.base.AString;
import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
@@ -105,14 +109,13 @@
ArrayList<LogicalVariable> v = new ArrayList<LogicalVariable>();
- if (dataset.getDatasetType() == DatasetType.INTERNAL || dataset.getDatasetType() == DatasetType.FEED) {
+ if (dataset.getDatasetType() == DatasetType.INTERNAL) {
int numPrimaryKeys = DatasetUtils.getPartitioningKeys(dataset).size();
for (int i = 0; i < numPrimaryKeys; i++) {
v.add(context.newVar());
}
}
v.add(unnest.getVariable());
-
DataSourceScanOperator scan = new DataSourceScanOperator(v, metadataProvider.findDataSource(asid));
List<Mutable<ILogicalOperator>> scanInpList = scan.getInputs();
scanInpList.addAll(unnest.getInputs());
@@ -127,40 +130,36 @@
if (unnest.getPositionalVariable() != null) {
throw new AlgebricksException("No positional variables are allowed over datasets.");
}
- ILogicalExpression expr = f.getArguments().get(0).getValue();
- if (expr.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
- return false;
- }
- ConstantExpression ce = (ConstantExpression) expr;
- IAlgebricksConstantValue acv = ce.getValue();
- if (!(acv instanceof AsterixConstantValue)) {
- return false;
- }
- AsterixConstantValue acv2 = (AsterixConstantValue) acv;
- if (acv2.getObject().getType().getTypeTag() != ATypeTag.STRING) {
- return false;
- }
- String datasetArg = ((AString) acv2.getObject()).getStringValue();
+
+ String feedArg = getStringArgument(f, 0);
+ String outputType = getStringArgument(f, 1);
+ String targetDataset = getStringArgument(f, 2);
AqlMetadataProvider metadataProvider = (AqlMetadataProvider) context.getMetadataProvider();
- Pair<String, String> datasetReference = parseDatasetReference(metadataProvider, datasetArg);
- String dataverseName = datasetReference.first;
- String datasetName = datasetReference.second;
- Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
- if (dataset == null) {
- throw new AlgebricksException("Could not find dataset " + datasetName);
+ Pair<String, String> feedReference = parseDatasetReference(metadataProvider, feedArg);
+ String dataverseName = feedReference.first;
+ String feedName = feedReference.second;
+ Feed feed = metadataProvider.findFeed(dataverseName, feedName);
+ if (feed == null) {
+ throw new AlgebricksException("Could not find feed " + feedName);
}
- if (dataset.getDatasetType() != DatasetType.FEED) {
- throw new IllegalArgumentException("invalid dataset type:" + dataset.getDatasetType());
+ AqlSourceId asid = new AqlSourceId(dataverseName, feedName);
+ String policyName = metadataProvider.getConfig().get(BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY);
+ FeedPolicy policy = metadataProvider.findFeedPolicy(dataverseName, policyName);
+ if (policy == null) {
+ policy = BuiltinFeedPolicies.getFeedPolicy(policyName);
+ if (policy == null) {
+ throw new AlgebricksException("Unknown feed policy:" + policyName);
+ }
}
- AqlSourceId asid = new AqlSourceId(dataverseName, datasetName);
ArrayList<LogicalVariable> v = new ArrayList<LogicalVariable>();
v.add(unnest.getVariable());
- DataSourceScanOperator scan = new DataSourceScanOperator(v, createDummyFeedDataSource(asid, dataset,
- metadataProvider));
+ DataSourceScanOperator scan = new DataSourceScanOperator(v, createFeedDataSource(asid,
+ new FeedConnectionId(dataverseName, feedName, targetDataset), metadataProvider, policy,
+ outputType));
List<Mutable<ILogicalOperator>> scanInpList = scan.getInputs();
scanInpList.addAll(unnest.getInputs());
@@ -170,6 +169,7 @@
return true;
}
+
}
return false;
@@ -184,18 +184,18 @@
context.addPrimaryKey(pk);
}
- private AqlDataSource createDummyFeedDataSource(AqlSourceId aqlId, Dataset dataset,
- AqlMetadataProvider metadataProvider) throws AlgebricksException {
+ private AqlDataSource createFeedDataSource(AqlSourceId aqlId, FeedConnectionId feedId,
+ AqlMetadataProvider metadataProvider, FeedPolicy feedPolicy, String outputType) throws AlgebricksException {
if (!aqlId.getDataverseName().equals(
metadataProvider.getDefaultDataverse() == null ? null : metadataProvider.getDefaultDataverse()
.getDataverseName())) {
return null;
}
- String tName = dataset.getItemTypeName();
- IAType itemType = metadataProvider.findType(dataset.getDataverseName(), tName);
- ExternalFeedDataSource extDataSource = new ExternalFeedDataSource(aqlId, dataset, itemType,
- AqlDataSource.AqlDataSourceType.EXTERNAL_FEED);
- return extDataSource;
+ IAType feedOutputType = metadataProvider.findType(feedId.getDataverse(), outputType);
+ FeedDataSource feedDataSource = new FeedDataSource(aqlId, feedId, feedOutputType,
+ AqlDataSource.AqlDataSourceType.FEED);
+ feedDataSource.getProperties().put(BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY, feedPolicy);
+ return feedDataSource;
}
private Pair<String, String> parseDatasetReference(AqlMetadataProvider metadataProvider, String datasetArg)
@@ -216,4 +216,23 @@
}
return new Pair<String, String>(dataverseName, datasetName);
}
+
+ private String getStringArgument(AbstractFunctionCallExpression f, int index) {
+
+ ILogicalExpression expr = f.getArguments().get(index).getValue();
+ if (expr.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
+ return null;
+ }
+ ConstantExpression ce = (ConstantExpression) expr;
+ IAlgebricksConstantValue acv = ce.getValue();
+ if (!(acv instanceof AsterixConstantValue)) {
+ return null;
+ }
+ AsterixConstantValue acv2 = (AsterixConstantValue) acv;
+ if (acv2.getObject().getType().getTypeTag() != ATypeTag.STRING) {
+ return null;
+ }
+ String argument = ((AString) acv2.getObject()).getStringValue();
+ return argument;
+ }
}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AbstractIntroduceAccessMethodRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AbstractIntroduceAccessMethodRule.java
index d4c00e4..6b68b7f 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AbstractIntroduceAccessMethodRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AbstractIntroduceAccessMethodRule.java
@@ -132,7 +132,7 @@
Iterator<Map.Entry<Index, List<Integer>>> it = analysisCtx.indexExprs.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<Index, List<Integer>> entry = it.next();
- Index index = entry.getKey();
+ Index index = entry.getKey();
boolean allUsed = true;
int lastFieldMatched = -1;
for (int i = 0; i < index.getKeyFieldNames().size(); i++) {
@@ -266,8 +266,8 @@
return true;
}
- protected void fillAllIndexExprs(OptimizableOperatorSubTree subTree,
- AccessMethodAnalysisContext analysisCtx) throws AlgebricksException {
+ protected void fillAllIndexExprs(OptimizableOperatorSubTree subTree, AccessMethodAnalysisContext analysisCtx)
+ throws AlgebricksException {
for (int optFuncExprIndex = 0; optFuncExprIndex < analysisCtx.matchedFuncExprs.size(); optFuncExprIndex++) {
IOptimizableFuncExpr optFuncExpr = analysisCtx.matchedFuncExprs.get(optFuncExprIndex);
// Try to match variables from optFuncExpr to assigns.
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AccessMethodUtils.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AccessMethodUtils.java
index ab0fd79..fc0c562 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AccessMethodUtils.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AccessMethodUtils.java
@@ -149,7 +149,7 @@
switch (index.getIndexType()) {
case BTREE:
case SINGLE_PARTITION_WORD_INVIX:
- case SINGLE_PARTITION_NGRAM_INVIX:
+ case SINGLE_PARTITION_NGRAM_INVIX:
case LENGTH_PARTITIONED_WORD_INVIX:
case LENGTH_PARTITIONED_NGRAM_INVIX: {
return index.getKeyFieldNames().size();
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/BTreeAccessMethod.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/BTreeAccessMethod.java
index ddcf768..7a15c32 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/BTreeAccessMethod.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/BTreeAccessMethod.java
@@ -27,10 +27,11 @@
import org.apache.commons.lang3.mutable.Mutable;
import org.apache.commons.lang3.mutable.MutableObject;
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.common.annotations.SkipSecondaryIndexSearchExpressionAnnotation;
import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
import edu.uci.ics.asterix.metadata.entities.Dataset;
import edu.uci.ics.asterix.metadata.entities.Index;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
@@ -565,7 +566,7 @@
private ILogicalExpression createSelectCondition(List<Mutable<ILogicalExpression>> predList) {
if (predList.size() > 1) {
- IFunctionInfo finfo = AsterixBuiltinFunctions.getAsterixFunctionInfo(AlgebricksBuiltinFunctions.AND);
+ IFunctionInfo finfo = FunctionUtils.getFunctionInfo(AlgebricksBuiltinFunctions.AND);
return new ScalarFunctionCallExpression(finfo, predList);
}
return predList.get(0).getValue();
@@ -579,6 +580,11 @@
return false;
}
}
+ if (!index.isPrimaryIndex()
+ && optFuncExpr.getFuncExpr().getAnnotations()
+ .containsKey(SkipSecondaryIndexSearchExpressionAnnotation.INSTANCE)) {
+ return false;
+ }
// No additional analysis required for BTrees.
return true;
}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/IOptimizableFuncExpr.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/IOptimizableFuncExpr.java
index 507a085..5ec9702 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/IOptimizableFuncExpr.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/IOptimizableFuncExpr.java
@@ -25,16 +25,26 @@
*/
public interface IOptimizableFuncExpr {
public AbstractFunctionCallExpression getFuncExpr();
+
public int getNumLogicalVars();
+
public int getNumConstantVals();
+
public LogicalVariable getLogicalVar(int index);
+
public void setFieldName(int index, String fieldName);
+
public String getFieldName(int index);
+
public void setOptimizableSubTree(int index, OptimizableOperatorSubTree subTree);
+
public OptimizableOperatorSubTree getOperatorSubTree(int index);
+
public IAlgebricksConstantValue getConstantVal(int index);
-
+
public int findLogicalVar(LogicalVariable var);
+
public int findFieldName(String fieldName);
+
public void substituteVar(LogicalVariable original, LogicalVariable substitution);
}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/IntroduceJoinAccessMethodRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/IntroduceJoinAccessMethodRule.java
index b1e0a46..8790c7c 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/IntroduceJoinAccessMethodRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/IntroduceJoinAccessMethodRule.java
@@ -37,27 +37,22 @@
/**
* This rule optimizes a join with secondary indexes into an indexed nested-loop join.
- *
* Matches the following operator pattern:
* (join) <-- (select)? <-- (assign)+ <-- (datasource scan)
- * <-- (select)? <-- (assign)+ <-- (datasource scan)
- *
- * Replaces the above pattern with the following simplified plan:
+ * <-- (select)? <-- (assign)+ <-- (datasource scan)
+ * Replaces the above pattern with the following simplified plan:
* (select) <-- (assign) <-- (btree search) <-- (sort) <-- (unnest(index search)) <-- (assign) <-- (datasource scan)
* The sort is optional, and some access methods may choose not to sort.
- *
* Note that for some index-based optimizations we do not remove the triggering
* condition from the join, since the secondary index may only act as a filter, and the
* final verification must still be done with the original join condition.
- *
- * The basic outline of this rule is:
- * 1. Match operator pattern.
- * 2. Analyze join condition to see if there are optimizable functions (delegated to IAccessMethods).
- * 3. Check metadata to see if there are applicable indexes.
+ * The basic outline of this rule is:
+ * 1. Match operator pattern.
+ * 2. Analyze join condition to see if there are optimizable functions (delegated to IAccessMethods).
+ * 3. Check metadata to see if there are applicable indexes.
* 4. Choose an index to apply (for now only a single index will be chosen).
* 5. Rewrite plan using index (delegated to IAccessMethods).
- *
- * TODO (Alex): Currently this rule requires a data scan on both inputs of the join. I should generalize the pattern
+ * TODO (Alex): Currently this rule requires a data scan on both inputs of the join. I should generalize the pattern
* to accept any subtree on one side, as long as the other side has a datasource scan.
*/
public class IntroduceJoinAccessMethodRule extends AbstractIntroduceAccessMethodRule {
@@ -73,7 +68,7 @@
static {
registerAccessMethod(BTreeAccessMethod.INSTANCE, accessMethods);
registerAccessMethod(RTreeAccessMethod.INSTANCE, accessMethods);
- registerAccessMethod(InvertedIndexAccessMethod.INSTANCE, accessMethods);
+ registerAccessMethod(InvertedIndexAccessMethod.INSTANCE, accessMethods);
}
@Override
@@ -133,7 +128,7 @@
boolean res = chosenIndex.first.applyJoinPlanTransformation(joinRef, leftSubTree, rightSubTree,
chosenIndex.second, analysisCtx, context);
if (res) {
- OperatorPropertiesUtil.typeOpRec(opRef, context);
+ OperatorPropertiesUtil.typeOpRec(opRef, context);
}
context.addToDontApplySet(this, join);
return res;
@@ -170,7 +165,7 @@
public Map<FunctionIdentifier, List<IAccessMethod>> getAccessMethods() {
return accessMethods;
}
-
+
private void clear() {
joinRef = null;
join = null;
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/IntroduceSelectAccessMethodRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/IntroduceSelectAccessMethodRule.java
index d71c5ef..47efeb0 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/IntroduceSelectAccessMethodRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/IntroduceSelectAccessMethodRule.java
@@ -39,7 +39,6 @@
* This rule optimizes simple selections with secondary or primary indexes. The use of an
* index is expressed as an unnest-map over an index-search function which will be
* replaced with the appropriate embodiment during codegen.
- *
* Matches the following operator patterns:
* Standard secondary index pattern:
* There must be at least one assign, but there may be more, e.g., when matching similarity-jaccard-check().
@@ -47,22 +46,18 @@
* Primary index lookup pattern:
* Since no assign is necessary to get the primary key fields (they are already stored fields in the BTree tuples).
* (select) <-- (datasource scan)
- *
* Replaces the above patterns with this plan:
* (select) <-- (assign) <-- (btree search) <-- (sort) <-- (unnest-map(index search)) <-- (assign)
* The sort is optional, and some access methods implementations may choose not to sort.
- *
* Note that for some index-based optimizations we do not remove the triggering
* condition from the select, since the index may only acts as a filter, and the
* final verification must still be done with the original select condition.
- *
- * The basic outline of this rule is:
- * 1. Match operator pattern.
- * 2. Analyze select condition to see if there are optimizable functions (delegated to IAccessMethods).
- * 3. Check metadata to see if there are applicable indexes.
+ * The basic outline of this rule is:
+ * 1. Match operator pattern.
+ * 2. Analyze select condition to see if there are optimizable functions (delegated to IAccessMethods).
+ * 3. Check metadata to see if there are applicable indexes.
* 4. Choose an index to apply (for now only a single index will be chosen).
* 5. Rewrite plan using index (delegated to IAccessMethods).
- *
*/
public class IntroduceSelectAccessMethodRule extends AbstractIntroduceAccessMethodRule {
@@ -149,10 +144,10 @@
public Map<FunctionIdentifier, List<IAccessMethod>> getAccessMethods() {
return accessMethods;
}
-
+
private void clear() {
selectRef = null;
select = null;
selectCond = null;
}
-}
+}
\ No newline at end of file
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/InvertedIndexAccessMethod.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/InvertedIndexAccessMethod.java
index fb8becc..6636d07 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/InvertedIndexAccessMethod.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/InvertedIndexAccessMethod.java
@@ -25,6 +25,7 @@
import edu.uci.ics.asterix.algebra.base.LogicalOperatorDeepCopyVisitor;
import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.common.annotations.SkipSecondaryIndexSearchExpressionAnnotation;
import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
import edu.uci.ics.asterix.formats.nontagged.AqlBinaryTokenizerFactoryProvider;
import edu.uci.ics.asterix.metadata.entities.Dataset;
@@ -766,6 +767,10 @@
@Override
public boolean exprIsOptimizable(Index index, IOptimizableFuncExpr optFuncExpr) {
+ if (optFuncExpr.getFuncExpr().getAnnotations()
+ .containsKey(SkipSecondaryIndexSearchExpressionAnnotation.INSTANCE)) {
+ return false;
+ }
if (optFuncExpr.getFuncExpr().getFunctionIdentifier() == AsterixBuiltinFunctions.EDIT_DISTANCE_CHECK) {
// Must be for a join query.
if (optFuncExpr.getNumConstantVals() == 1) {
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/OptimizableFuncExpr.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/OptimizableFuncExpr.java
index 3ebde07..f1b93fe 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/OptimizableFuncExpr.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/OptimizableFuncExpr.java
@@ -23,63 +23,65 @@
* number of constant args, variable args and field names.
*/
public class OptimizableFuncExpr implements IOptimizableFuncExpr {
- protected final AbstractFunctionCallExpression funcExpr;
+ protected final AbstractFunctionCallExpression funcExpr;
protected final LogicalVariable[] logicalVars;
protected final String[] fieldNames;
protected final OptimizableOperatorSubTree[] subTrees;
protected final IAlgebricksConstantValue[] constantVals;
-
- public OptimizableFuncExpr(AbstractFunctionCallExpression funcExpr, LogicalVariable[] logicalVars, IAlgebricksConstantValue[] constantVals) {
- this.funcExpr = funcExpr;
- this.logicalVars = logicalVars;
- this.constantVals = constantVals;
- this.fieldNames = new String[logicalVars.length];
- this.subTrees = new OptimizableOperatorSubTree[logicalVars.length];
+
+ public OptimizableFuncExpr(AbstractFunctionCallExpression funcExpr, LogicalVariable[] logicalVars,
+ IAlgebricksConstantValue[] constantVals) {
+ this.funcExpr = funcExpr;
+ this.logicalVars = logicalVars;
+ this.constantVals = constantVals;
+ this.fieldNames = new String[logicalVars.length];
+ this.subTrees = new OptimizableOperatorSubTree[logicalVars.length];
}
-
+
// Special, more convenient c'tor for simple binary functions.
- public OptimizableFuncExpr(AbstractFunctionCallExpression funcExpr, LogicalVariable logicalVar, IAlgebricksConstantValue constantVal) {
- this.funcExpr = funcExpr;
- this.logicalVars = new LogicalVariable[] { logicalVar };
- this.constantVals = new IAlgebricksConstantValue[] { constantVal };
- this.fieldNames = new String[logicalVars.length];
- this.subTrees = new OptimizableOperatorSubTree[logicalVars.length];
+ public OptimizableFuncExpr(AbstractFunctionCallExpression funcExpr, LogicalVariable logicalVar,
+ IAlgebricksConstantValue constantVal) {
+ this.funcExpr = funcExpr;
+ this.logicalVars = new LogicalVariable[] { logicalVar };
+ this.constantVals = new IAlgebricksConstantValue[] { constantVal };
+ this.fieldNames = new String[logicalVars.length];
+ this.subTrees = new OptimizableOperatorSubTree[logicalVars.length];
}
-
- @Override
- public AbstractFunctionCallExpression getFuncExpr() {
- return funcExpr;
- }
-
- @Override
- public int getNumLogicalVars() {
- return logicalVars.length;
- }
-
- @Override
- public int getNumConstantVals() {
- return constantVals.length;
- }
-
- @Override
- public LogicalVariable getLogicalVar(int index) {
- return logicalVars[index];
- }
-
- @Override
- public void setFieldName(int index, String fieldName) {
- fieldNames[index] = fieldName;
- }
-
- @Override
- public String getFieldName(int index) {
- return fieldNames[index];
- }
-
- @Override
- public IAlgebricksConstantValue getConstantVal(int index) {
- return constantVals[index];
- }
+
+ @Override
+ public AbstractFunctionCallExpression getFuncExpr() {
+ return funcExpr;
+ }
+
+ @Override
+ public int getNumLogicalVars() {
+ return logicalVars.length;
+ }
+
+ @Override
+ public int getNumConstantVals() {
+ return constantVals.length;
+ }
+
+ @Override
+ public LogicalVariable getLogicalVar(int index) {
+ return logicalVars[index];
+ }
+
+ @Override
+ public void setFieldName(int index, String fieldName) {
+ fieldNames[index] = fieldName;
+ }
+
+ @Override
+ public String getFieldName(int index) {
+ return fieldNames[index];
+ }
+
+ @Override
+ public IAlgebricksConstantValue getConstantVal(int index) {
+ return constantVals[index];
+ }
@Override
public int findLogicalVar(LogicalVariable var) {
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/OptimizableOperatorSubTree.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/OptimizableOperatorSubTree.java
index 28aee7a..334d411 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/OptimizableOperatorSubTree.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/OptimizableOperatorSubTree.java
@@ -114,7 +114,7 @@
if (dataset == null) {
throw new AlgebricksException("No metadata for dataset " + datasetName);
}
- if (dataset.getDatasetType() != DatasetType.INTERNAL && dataset.getDatasetType() != DatasetType.FEED) {
+ if (dataset.getDatasetType() != DatasetType.INTERNAL) {
return false;
}
// Get the record type for that dataset.
@@ -129,7 +129,7 @@
public boolean hasDataSourceScan() {
return dataSourceScan != null;
}
-
+
public void reset() {
root = null;
rootRef = null;
@@ -140,7 +140,7 @@
dataset = null;
recordType = null;
}
-
+
public void getPrimaryKeyVars(List<LogicalVariable> target) {
int numPrimaryKeys = DatasetUtils.getPartitioningKeys(dataset).size();
for (int i = 0; i < numPrimaryKeys; i++) {
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/RTreeAccessMethod.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/RTreeAccessMethod.java
index b8125aa..c714aa9 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/RTreeAccessMethod.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/RTreeAccessMethod.java
@@ -21,6 +21,7 @@
import org.apache.commons.lang3.mutable.MutableObject;
import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.common.annotations.SkipSecondaryIndexSearchExpressionAnnotation;
import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
import edu.uci.ics.asterix.metadata.entities.Dataset;
import edu.uci.ics.asterix.metadata.entities.Index;
@@ -204,6 +205,10 @@
@Override
public boolean exprIsOptimizable(Index index, IOptimizableFuncExpr optFuncExpr) {
+ if (optFuncExpr.getFuncExpr().getAnnotations()
+ .containsKey(SkipSecondaryIndexSearchExpressionAnnotation.INSTANCE)) {
+ return false;
+ }
// No additional analysis required.
return true;
}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/typecast/StaticTypeCastUtil.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/typecast/StaticTypeCastUtil.java
index 1be4f40..47b4b7a 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/typecast/StaticTypeCastUtil.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/typecast/StaticTypeCastUtil.java
@@ -29,7 +29,6 @@
import edu.uci.ics.asterix.om.base.AString;
import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.om.functions.AsterixFunctionInfo;
import edu.uci.ics.asterix.om.pointables.base.DefaultOpenFieldType;
import edu.uci.ics.asterix.om.typecomputer.base.TypeComputerUtilities;
import edu.uci.ics.asterix.om.types.ARecordType;
@@ -253,6 +252,10 @@
*/
private static boolean staticRecordTypeCast(AbstractFunctionCallExpression func, ARecordType reqType,
ARecordType inputType, IVariableTypeEnvironment env) throws AlgebricksException {
+ if (!(func.getFunctionIdentifier() == AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR || func
+ .getFunctionIdentifier() == AsterixBuiltinFunctions.CLOSED_RECORD_CONSTRUCTOR)) {
+ return false;
+ }
IAType[] reqFieldTypes = reqType.getFieldTypes();
String[] reqFieldNames = reqType.getFieldNames();
IAType[] inputFieldTypes = inputType.getFieldTypes();
@@ -329,7 +332,7 @@
matched = true;
ScalarFunctionCallExpression notNullFunc = new ScalarFunctionCallExpression(
- new AsterixFunctionInfo(AsterixBuiltinFunctions.NOT_NULL));
+ FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.NOT_NULL));
notNullFunc.getArguments().add(new MutableObject<ILogicalExpression>(arg));
//wrap the not null function to the original function
func.getArguments().get(2 * i + 1).setValue(notNullFunc);
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AbstractAqlTranslator.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AbstractAqlTranslator.java
index 94c074f..1d6b1c3 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AbstractAqlTranslator.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AbstractAqlTranslator.java
@@ -16,6 +16,8 @@
import java.util.Map;
import java.util.Map.Entry;
+import java.util.logging.Level;
+import java.util.logging.Logger;
import edu.uci.ics.asterix.aql.base.Statement;
import edu.uci.ics.asterix.aql.expression.DatasetDecl;
@@ -30,6 +32,7 @@
import edu.uci.ics.asterix.metadata.entities.AsterixBuiltinTypeMap;
import edu.uci.ics.asterix.metadata.entities.Dataverse;
import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
import edu.uci.ics.asterix.om.util.AsterixClusterProperties;
import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
@@ -39,10 +42,38 @@
*/
public abstract class AbstractAqlTranslator {
+ protected static final Logger LOGGER = Logger.getLogger(AbstractAqlTranslator.class.getName());
+
protected static final Map<String, BuiltinType> builtinTypeMap = AsterixBuiltinTypeMap.getBuiltinTypes();
public void validateOperation(Dataverse defaultDataverse, Statement stmt) throws AsterixException {
+ if (!AsterixClusterProperties.INSTANCE.getState().equals(AsterixClusterProperties.State.ACTIVE)) {
+ int maxWaitCycles = AsterixAppContextInfo.getInstance().getExternalProperties().getMaxWaitClusterActive();
+ int waitCycleCount = 0;
+ try {
+ while (!AsterixClusterProperties.INSTANCE.getState().equals(AsterixClusterProperties.State.ACTIVE)
+ && waitCycleCount < maxWaitCycles) {
+ Thread.sleep(1000);
+ waitCycleCount++;
+ }
+ } catch (InterruptedException e) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Thread interrupted while waiting for cluster to be "
+ + AsterixClusterProperties.State.ACTIVE);
+ }
+ }
+ if (!AsterixClusterProperties.INSTANCE.getState().equals(AsterixClusterProperties.State.ACTIVE)) {
+ throw new AsterixException(" Asterix Cluster is in " + AsterixClusterProperties.State.UNUSABLE
+ + " state." + "\n One or more Node Controllers have left or haven't joined yet.\n");
+ } else {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Cluster is now " + AsterixClusterProperties.State.ACTIVE);
+ }
+ }
+ }
+
+
if (AsterixClusterProperties.INSTANCE.getState().equals(AsterixClusterProperties.State.UNUSABLE)) {
throw new AsterixException(" Asterix Cluster is in " + AsterixClusterProperties.State.UNUSABLE + " state."
+ "\n One or more Node Controllers have left.\n");
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlExpressionToPlanTranslator.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlExpressionToPlanTranslator.java
index 7b12550..4e82d1c 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlExpressionToPlanTranslator.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlExpressionToPlanTranslator.java
@@ -26,20 +26,22 @@
import edu.uci.ics.asterix.aql.base.Clause;
import edu.uci.ics.asterix.aql.base.Expression;
import edu.uci.ics.asterix.aql.base.Expression.Kind;
-import edu.uci.ics.asterix.aql.expression.BeginFeedStatement;
import edu.uci.ics.asterix.aql.expression.CallExpr;
import edu.uci.ics.asterix.aql.expression.CompactStatement;
-import edu.uci.ics.asterix.aql.expression.ControlFeedStatement;
+import edu.uci.ics.asterix.aql.expression.ConnectFeedStatement;
import edu.uci.ics.asterix.aql.expression.CreateDataverseStatement;
+import edu.uci.ics.asterix.aql.expression.CreateFeedStatement;
import edu.uci.ics.asterix.aql.expression.CreateFunctionStatement;
import edu.uci.ics.asterix.aql.expression.CreateIndexStatement;
import edu.uci.ics.asterix.aql.expression.DatasetDecl;
import edu.uci.ics.asterix.aql.expression.DataverseDecl;
import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
import edu.uci.ics.asterix.aql.expression.DeleteStatement;
+import edu.uci.ics.asterix.aql.expression.DisconnectFeedStatement;
import edu.uci.ics.asterix.aql.expression.DistinctClause;
import edu.uci.ics.asterix.aql.expression.DropStatement;
import edu.uci.ics.asterix.aql.expression.FLWOGRExpression;
+import edu.uci.ics.asterix.aql.expression.FeedDropStatement;
import edu.uci.ics.asterix.aql.expression.FieldAccessor;
import edu.uci.ics.asterix.aql.expression.FieldBinding;
import edu.uci.ics.asterix.aql.expression.ForClause;
@@ -56,7 +58,7 @@
import edu.uci.ics.asterix.aql.expression.ListConstructor;
import edu.uci.ics.asterix.aql.expression.ListConstructor.Type;
import edu.uci.ics.asterix.aql.expression.LiteralExpr;
-import edu.uci.ics.asterix.aql.expression.LoadFromFileStatement;
+import edu.uci.ics.asterix.aql.expression.LoadStatement;
import edu.uci.ics.asterix.aql.expression.NodeGroupDropStatement;
import edu.uci.ics.asterix.aql.expression.NodegroupDecl;
import edu.uci.ics.asterix.aql.expression.OperatorExpr;
@@ -93,13 +95,15 @@
import edu.uci.ics.asterix.formats.base.IDataFormat;
import edu.uci.ics.asterix.metadata.MetadataException;
import edu.uci.ics.asterix.metadata.MetadataManager;
-import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
+import edu.uci.ics.asterix.metadata.declared.AqlDataSource.AqlDataSourceType;
import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
import edu.uci.ics.asterix.metadata.declared.AqlSourceId;
+import edu.uci.ics.asterix.metadata.declared.DatasetDataSource;
import edu.uci.ics.asterix.metadata.declared.ResultSetDataSink;
import edu.uci.ics.asterix.metadata.declared.ResultSetSinkId;
import edu.uci.ics.asterix.metadata.entities.Dataset;
import edu.uci.ics.asterix.metadata.entities.Function;
+import edu.uci.ics.asterix.metadata.functions.ExternalFunctionCompilerUtil;
import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
import edu.uci.ics.asterix.om.base.AInt32;
import edu.uci.ics.asterix.om.base.AString;
@@ -153,7 +157,6 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.WriteResultOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;
import edu.uci.ics.hyracks.api.io.FileReference;
import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
@@ -221,25 +224,23 @@
@SuppressWarnings("unchecked")
/** This assign adds a marker function collection-to-sequence: if the input is a singleton collection, unnest it; otherwise do nothing. */
AssignOperator assignCollectionToSequence = new AssignOperator(seqVar,
- new MutableObject<ILogicalExpression>(
- new ScalarFunctionCallExpression(AsterixBuiltinFunctions
- .getAsterixFunctionInfo(AsterixBuiltinFunctions.COLLECTION_TO_SEQUENCE),
- new MutableObject<ILogicalExpression>(new VariableReferenceExpression(resVar)))));
+ new MutableObject<ILogicalExpression>(new ScalarFunctionCallExpression(
+ FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.COLLECTION_TO_SEQUENCE),
+ new MutableObject<ILogicalExpression>(new VariableReferenceExpression(resVar)))));
assignCollectionToSequence.getInputs().add(
new MutableObject<ILogicalOperator>(project.getInputs().get(0).getValue()));
project.getInputs().get(0).setValue(assignCollectionToSequence);
project.getVariables().set(0, seqVar);
resVar = seqVar;
- AqlDataSource targetDatasource = validateDatasetInfo(metadataProvider, stmt.getDataverseName(),
+ DatasetDataSource targetDatasource = validateDatasetInfo(metadataProvider, stmt.getDataverseName(),
stmt.getDatasetName());
ArrayList<LogicalVariable> vars = new ArrayList<LogicalVariable>();
ArrayList<Mutable<ILogicalExpression>> exprs = new ArrayList<Mutable<ILogicalExpression>>();
List<Mutable<ILogicalExpression>> varRefsForLoading = new ArrayList<Mutable<ILogicalExpression>>();
List<String> partitionKeys = DatasetUtils.getPartitioningKeys(targetDatasource.getDataset());
for (String keyFieldName : partitionKeys) {
- IFunctionInfo finfoAccess = AsterixBuiltinFunctions
- .getAsterixFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME);
+ IFunctionInfo finfoAccess = FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME);
@SuppressWarnings("unchecked")
ScalarFunctionCallExpression f = new ScalarFunctionCallExpression(finfoAccess,
new MutableObject<ILogicalExpression>(new VariableReferenceExpression(METADATA_DUMMY_VAR)),
@@ -274,7 +275,7 @@
leafOperator.getInputs().add(new MutableObject<ILogicalOperator>(deleteOp));
break;
}
- case BEGIN_FEED: {
+ case CONNECT_FEED: {
ILogicalOperator insertOp = new InsertDeleteOperator(targetDatasource, varRef, varRefsForLoading,
InsertDeleteOperator.Kind.INSERT);
insertOp.getInputs().add(new MutableObject<ILogicalOperator>(assign));
@@ -290,20 +291,21 @@
return plan;
}
- private AqlDataSource validateDatasetInfo(AqlMetadataProvider metadataProvider, String dataverseName,
+ private DatasetDataSource validateDatasetInfo(AqlMetadataProvider metadataProvider, String dataverseName,
String datasetName) throws AlgebricksException {
Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
if (dataset == null) {
throw new AlgebricksException("Cannot find dataset " + datasetName + " in dataverse " + dataverseName);
}
-
- AqlSourceId sourceId = new AqlSourceId(dataverseName, datasetName);
- String itemTypeName = dataset.getItemTypeName();
- IAType itemType = metadataProvider.findType(dataverseName, itemTypeName);
- AqlDataSource dataSource = new AqlDataSource(sourceId, dataset, itemType);
if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
throw new AlgebricksException("Cannot write output to an external dataset.");
}
+ AqlSourceId sourceId = new AqlSourceId(dataverseName, datasetName);
+ String itemTypeName = dataset.getItemTypeName();
+ IAType itemType = metadataProvider.findType(dataverseName, itemTypeName);
+ DatasetDataSource dataSource = new DatasetDataSource(sourceId, dataset.getDataverseName(),
+ dataset.getDatasetName(), itemType, AqlDataSourceType.INTERNAL_DATASET);
+
return dataSource;
}
@@ -509,13 +511,18 @@
return null;
}
AbstractFunctionCallExpression f = null;
- if (function.getLanguage().equalsIgnoreCase(Function.LANGUAGE_AQL)) {
- IFunctionInfo finfo = new AsterixFunctionInfo(signature);
- return new ScalarFunctionCallExpression(finfo, args);
+ if (function.getLanguage().equalsIgnoreCase(Function.LANGUAGE_JAVA)) {
+ IFunctionInfo finfo = ExternalFunctionCompilerUtil.getExternalFunctionInfo(
+ metadataProvider.getMetadataTxnContext(), function);
+ f = new ScalarFunctionCallExpression(finfo, args);
+ } else if (function.getLanguage().equalsIgnoreCase(Function.LANGUAGE_AQL)) {
+ IFunctionInfo finfo = FunctionUtils.getFunctionInfo(signature);
+ f = new ScalarFunctionCallExpression(finfo, args);
} else {
throw new MetadataException(" User defined functions written in " + function.getLanguage()
+ " are not supported");
}
+ return f;
}
private AbstractFunctionCallExpression lookupBuiltinFunction(String functionName, int arity,
@@ -1256,7 +1263,7 @@
}
@Override
- public Pair<ILogicalOperator, LogicalVariable> visitLoadFromFileStatement(LoadFromFileStatement stmtLoad,
+ public Pair<ILogicalOperator, LogicalVariable> visitLoadStatement(LoadStatement stmtLoad,
Mutable<ILogicalOperator> arg) throws AsterixException {
// TODO Auto-generated method stub
return null;
@@ -1270,7 +1277,7 @@
}
@Override
- public Pair<ILogicalOperator, LogicalVariable> visitControlFeedStatement(ControlFeedStatement del,
+ public Pair<ILogicalOperator, LogicalVariable> visitDisconnectFeedStatement(DisconnectFeedStatement del,
Mutable<ILogicalOperator> arg) throws AsterixException {
// TODO Auto-generated method stub
return null;
@@ -1426,7 +1433,21 @@
}
@Override
- public Pair<ILogicalOperator, LogicalVariable> visitBeginFeedStatement(BeginFeedStatement bf,
+ public Pair<ILogicalOperator, LogicalVariable> visitCreateFeedStatement(CreateFeedStatement del,
+ Mutable<ILogicalOperator> arg) throws AsterixException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public Pair<ILogicalOperator, LogicalVariable> visitConnectFeedStatement(ConnectFeedStatement del,
+ Mutable<ILogicalOperator> arg) throws AsterixException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public Pair<ILogicalOperator, LogicalVariable> visitDropFeedStatement(FeedDropStatement del,
Mutable<ILogicalOperator> arg) throws AsterixException {
// TODO Auto-generated method stub
return null;
@@ -1438,4 +1459,4 @@
// TODO Auto-generated method stub
return null;
}
-}
+}
\ No newline at end of file
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlPlusExpressionToPlanTranslator.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlPlusExpressionToPlanTranslator.java
index 4d11dbd..1b37303 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlPlusExpressionToPlanTranslator.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlPlusExpressionToPlanTranslator.java
@@ -26,11 +26,12 @@
import edu.uci.ics.asterix.aql.base.Clause;
import edu.uci.ics.asterix.aql.base.Expression;
import edu.uci.ics.asterix.aql.base.Expression.Kind;
-import edu.uci.ics.asterix.aql.expression.BeginFeedStatement;
import edu.uci.ics.asterix.aql.expression.CallExpr;
+import edu.uci.ics.asterix.aql.expression.ConnectFeedStatement;
+import edu.uci.ics.asterix.aql.expression.DisconnectFeedStatement;
import edu.uci.ics.asterix.aql.expression.CompactStatement;
-import edu.uci.ics.asterix.aql.expression.ControlFeedStatement;
import edu.uci.ics.asterix.aql.expression.CreateDataverseStatement;
+import edu.uci.ics.asterix.aql.expression.CreateFeedStatement;
import edu.uci.ics.asterix.aql.expression.CreateFunctionStatement;
import edu.uci.ics.asterix.aql.expression.CreateIndexStatement;
import edu.uci.ics.asterix.aql.expression.DatasetDecl;
@@ -40,6 +41,7 @@
import edu.uci.ics.asterix.aql.expression.DistinctClause;
import edu.uci.ics.asterix.aql.expression.DropStatement;
import edu.uci.ics.asterix.aql.expression.FLWOGRExpression;
+import edu.uci.ics.asterix.aql.expression.FeedDropStatement;
import edu.uci.ics.asterix.aql.expression.FieldAccessor;
import edu.uci.ics.asterix.aql.expression.FieldBinding;
import edu.uci.ics.asterix.aql.expression.ForClause;
@@ -58,7 +60,7 @@
import edu.uci.ics.asterix.aql.expression.ListConstructor;
import edu.uci.ics.asterix.aql.expression.ListConstructor.Type;
import edu.uci.ics.asterix.aql.expression.LiteralExpr;
-import edu.uci.ics.asterix.aql.expression.LoadFromFileStatement;
+import edu.uci.ics.asterix.aql.expression.LoadStatement;
import edu.uci.ics.asterix.aql.expression.MetaVariableClause;
import edu.uci.ics.asterix.aql.expression.MetaVariableExpr;
import edu.uci.ics.asterix.aql.expression.NodeGroupDropStatement;
@@ -1207,7 +1209,7 @@
}
@Override
- public Pair<ILogicalOperator, LogicalVariable> visitLoadFromFileStatement(LoadFromFileStatement stmtLoad,
+ public Pair<ILogicalOperator, LogicalVariable> visitLoadStatement(LoadStatement stmtLoad,
Mutable<ILogicalOperator> arg) throws AsterixException {
// TODO Auto-generated method stub
return null;
@@ -1423,7 +1425,7 @@
}
@Override
- public Pair<ILogicalOperator, LogicalVariable> visitControlFeedStatement(ControlFeedStatement del,
+ public Pair<ILogicalOperator, LogicalVariable> visitDisconnectFeedStatement(DisconnectFeedStatement del,
Mutable<ILogicalOperator> arg) throws AsterixException {
// TODO Auto-generated method stub
return null;
@@ -1444,7 +1446,21 @@
}
@Override
- public Pair<ILogicalOperator, LogicalVariable> visitBeginFeedStatement(BeginFeedStatement bf,
+ public Pair<ILogicalOperator, LogicalVariable> visitCreateFeedStatement(CreateFeedStatement del,
+ Mutable<ILogicalOperator> arg) throws AsterixException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public Pair<ILogicalOperator, LogicalVariable> visitConnectFeedStatement(ConnectFeedStatement del,
+ Mutable<ILogicalOperator> arg) throws AsterixException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public Pair<ILogicalOperator, LogicalVariable> visitDropFeedStatement(FeedDropStatement del,
Mutable<ILogicalOperator> arg) throws AsterixException {
// TODO Auto-generated method stub
return null;
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/CompiledStatements.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/CompiledStatements.java
index d9aa047..e403cce 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/CompiledStatements.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/CompiledStatements.java
@@ -22,7 +22,6 @@
import edu.uci.ics.asterix.aql.base.Expression;
import edu.uci.ics.asterix.aql.base.Statement.Kind;
import edu.uci.ics.asterix.aql.expression.CallExpr;
-import edu.uci.ics.asterix.aql.expression.ControlFeedStatement.OperationType;
import edu.uci.ics.asterix.aql.expression.FLWOGRExpression;
import edu.uci.ics.asterix.aql.expression.FieldAccessor;
import edu.uci.ics.asterix.aql.expression.FieldBinding;
@@ -282,7 +281,7 @@
@Override
public Kind getKind() {
- return Kind.LOAD_FROM_FILE;
+ return Kind.LOAD;
}
}
@@ -321,15 +320,20 @@
}
}
- public static class CompiledBeginFeedStatement implements ICompiledDmlStatement {
+ public static class CompiledConnectFeedStatement implements ICompiledDmlStatement {
private String dataverseName;
+ private String feedName;
private String datasetName;
+ private String policyName;
private Query query;
private int varCounter;
- public CompiledBeginFeedStatement(String dataverseName, String datasetName, Query query, int varCounter) {
+ public CompiledConnectFeedStatement(String dataverseName, String feedName, String datasetName,
+ String policyName, Query query, int varCounter) {
this.dataverseName = dataverseName;
+ this.feedName = feedName;
this.datasetName = datasetName;
+ this.policyName = policyName;
this.query = query;
this.varCounter = varCounter;
}
@@ -339,6 +343,10 @@
return dataverseName;
}
+ public String getFeedName() {
+ return feedName;
+ }
+
@Override
public String getDatasetName() {
return datasetName;
@@ -358,24 +366,25 @@
@Override
public Kind getKind() {
- return Kind.BEGIN_FEED;
+ return Kind.CONNECT_FEED;
+ }
+
+ public String getPolicyName() {
+ return policyName;
}
}
- public static class CompiledControlFeedStatement implements ICompiledDmlStatement {
+ public static class CompiledDisconnectFeedStatement implements ICompiledDmlStatement {
private String dataverseName;
private String datasetName;
- private OperationType operationType;
+ private String feedName;
private Query query;
private int varCounter;
- private Map<String, String> alteredParams;
- public CompiledControlFeedStatement(OperationType operationType, String dataverseName, String datasetName,
- Map<String, String> alteredParams) {
+ public CompiledDisconnectFeedStatement(String dataverseName, String feedName, String datasetName) {
this.dataverseName = dataverseName;
+ this.feedName = feedName;
this.datasetName = datasetName;
- this.operationType = operationType;
- this.alteredParams = alteredParams;
}
@Override
@@ -388,8 +397,8 @@
return datasetName;
}
- public OperationType getOperationType() {
- return operationType;
+ public String getFeedName() {
+ return feedName;
}
public int getVarCounter() {
@@ -402,16 +411,9 @@
@Override
public Kind getKind() {
- return Kind.CONTROL_FEED;
+ return Kind.DISCONNECT_FEED;
}
- public Map<String, String> getProperties() {
- return alteredParams;
- }
-
- public void setProperties(Map<String, String> properties) {
- this.alteredParams = properties;
- }
}
public static class CompiledDeleteStatement implements ICompiledDmlStatement {
@@ -554,4 +556,4 @@
}
}
-}
+}
\ No newline at end of file
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/ConstantHelper.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/ConstantHelper.java
index 88276d0..998bc2d 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/ConstantHelper.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/ConstantHelper.java
@@ -50,7 +50,7 @@
}
case LONG: {
LongIntegerLiteral il = (LongIntegerLiteral) valLiteral;
- return new AInt64(il.getValue());
+ return new AInt64(il.getValue());
}
case NULL: {
return ANull.NULL;
diff --git a/asterix-algebra/src/main/javacc/AQLPlus.jj b/asterix-algebra/src/main/javacc/AQLPlus.jj
index 75179b1..95c6b2d 100644
--- a/asterix-algebra/src/main/javacc/AQLPlus.jj
+++ b/asterix-algebra/src/main/javacc/AQLPlus.jj
@@ -192,7 +192,7 @@
}
}
-LoadFromFileStatement LoadStatement() throws ParseException:
+LoadStatement LoadStatement() throws ParseException:
{
Identifier datasetName = null;
boolean alreadySorted = false;
@@ -256,7 +256,7 @@
";"
{
- return new LoadFromFileStatement(null, datasetName, adapter, properties, alreadySorted);
+ return new LoadStatement(null, datasetName, adapter, properties, alreadySorted);
}
}
diff --git a/asterix-app/.gitignore b/asterix-app/.gitignore
deleted file mode 100644
index ea8c4bf..0000000
--- a/asterix-app/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-/target
diff --git a/asterix-app/data/csv/fragile_02.adm b/asterix-app/data/csv/fragile_02.adm
new file mode 100644
index 0000000..92b4477
--- /dev/null
+++ b/asterix-app/data/csv/fragile_02.adm
Binary files differ
diff --git a/asterix-app/data/twitter/tw_messages_100.adm b/asterix-app/data/twitter/tw_messages_100.adm
new file mode 100644
index 0000000..f60be3e
--- /dev/null
+++ b/asterix-app/data/twitter/tw_messages_100.adm
@@ -0,0 +1,100 @@
+{ "tweetid": 1i64, "user": { "screen-name": "EdwardLeslie#333", "lang": "en", "friends_count": 31, "statuses_count": 107, "name": "Edward Leslie", "followers_count": 80 }, "sender-location": point("29.37,78.8"), "send-time": datetime("2005-10-14T10:10:00.000Z"), "referred-topics": {{ "at&t", "network" }}, "message-text": " can't stand at&t the network is terrible:(" }
+{ "tweetid": 2i64, "user": { "screen-name": "PenniBauerle$865", "lang": "en", "friends_count": 32, "statuses_count": 308, "name": "Penni Bauerle", "followers_count": 97 }, "sender-location": point("37.99,83.51"), "send-time": datetime("2011-09-23T10:10:00.000Z"), "referred-topics": {{ "iphone", "plan" }}, "message-text": " love iphone its plan is awesome" }
+{ "tweetid": 3i64, "user": { "screen-name": "TrudiSaline$17", "lang": "en", "friends_count": 2, "statuses_count": 248, "name": "Trudi Saline", "followers_count": 154 }, "sender-location": point("48.17,93.4"), "send-time": datetime("2007-07-02T10:10:00.000Z"), "referred-topics": {{ "sprint", "3G" }}, "message-text": " like sprint its 3G is good:)" }
+{ "tweetid": 4i64, "user": { "screen-name": "EdytheMurray#502", "lang": "en", "friends_count": 23, "statuses_count": 142, "name": "Edythe Murray", "followers_count": 164 }, "sender-location": point("24.63,90.02"), "send-time": datetime("2008-03-16T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "voice-clarity" }}, "message-text": " like t-mobile the voice-clarity is good:)" }
+{ "tweetid": 5i64, "user": { "screen-name": "CoralMoon#517", "lang": "en", "friends_count": 35, "statuses_count": 3, "name": "Coral Moon", "followers_count": 67 }, "sender-location": point("32.05,75.79"), "send-time": datetime("2006-02-18T10:10:00.000Z"), "referred-topics": {{ "samsung", "touch-screen" }}, "message-text": " love samsung the touch-screen is mind-blowing" }
+{ "tweetid": 6i64, "user": { "screen-name": "CarriePinney#881", "lang": "en", "friends_count": 77, "statuses_count": 113, "name": "Carrie Pinney", "followers_count": 120 }, "sender-location": point("45.72,93.27"), "send-time": datetime("2011-12-02T10:10:00.000Z"), "referred-topics": {{ "sprint", "speed" }}, "message-text": " love sprint its speed is awesome:)" }
+{ "tweetid": 7i64, "user": { "screen-name": "AmadoTomey_367", "lang": "en", "friends_count": 28, "statuses_count": 379, "name": "Amado Tomey", "followers_count": 119 }, "sender-location": point("43.0,96.53"), "send-time": datetime("2011-07-04T10:10:00.000Z"), "referred-topics": {{ "verizon", "platform" }}, "message-text": " hate verizon its platform is OMG:(" }
+{ "tweetid": 8i64, "user": { "screen-name": "OdellWallace#398", "lang": "en", "friends_count": 10, "statuses_count": 89, "name": "Odell Wallace", "followers_count": 4 }, "sender-location": point("28.61,90.69"), "send-time": datetime("2012-01-09T10:10:00.000Z"), "referred-topics": {{ "motorola", "signal" }}, "message-text": " love motorola its signal is amazing:)" }
+{ "tweetid": 9i64, "user": { "screen-name": "NickLing#80", "lang": "en", "friends_count": 99, "statuses_count": 291, "name": "Nick Ling", "followers_count": 144 }, "sender-location": point("33.59,71.74"), "send-time": datetime("2011-05-14T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "speed" }}, "message-text": " hate t-mobile the speed is horrible:(" }
+{ "tweetid": 10i64, "user": { "screen-name": "MickeyDunkle_962", "lang": "en", "friends_count": 46, "statuses_count": 429, "name": "Mickey Dunkle", "followers_count": 110 }, "sender-location": point("28.72,70.51"), "send-time": datetime("2006-05-02T10:10:00.000Z"), "referred-topics": {{ "at&t", "reachability" }}, "message-text": " can't stand at&t its reachability is OMG:(" }
+{ "tweetid": 11i64, "user": { "screen-name": "AlaynaKnopsnider$684", "lang": "en", "friends_count": 70, "statuses_count": 425, "name": "Alayna Knopsnider", "followers_count": 106 }, "sender-location": point("35.4,69.61"), "send-time": datetime("2012-08-15T10:10:00.000Z"), "referred-topics": {{ "sprint", "voice-command" }}, "message-text": " dislike sprint the voice-command is bad" }
+{ "tweetid": 12i64, "user": { "screen-name": "SeraphinaWall_37", "lang": "en", "friends_count": 34, "statuses_count": 43, "name": "Seraphina Wall", "followers_count": 101 }, "sender-location": point("27.83,95.15"), "send-time": datetime("2010-02-08T10:10:00.000Z"), "referred-topics": {{ "motorola", "signal" }}, "message-text": " like motorola its signal is amazing:)" }
+{ "tweetid": 13i64, "user": { "screen-name": "TonyaKnopsnider#342", "lang": "en", "friends_count": 96, "statuses_count": 479, "name": "Tonya Knopsnider", "followers_count": 105 }, "sender-location": point("27.95,74.39"), "send-time": datetime("2008-05-26T10:10:00.000Z"), "referred-topics": {{ "motorola", "voicemail-service" }}, "message-text": " dislike motorola its voicemail-service is bad" }
+{ "tweetid": 14i64, "user": { "screen-name": "SkylerStough#713", "lang": "en", "friends_count": 29, "statuses_count": 41, "name": "Skyler Stough", "followers_count": 118 }, "sender-location": point("39.72,68.97"), "send-time": datetime("2012-05-21T10:10:00.000Z"), "referred-topics": {{ "iphone", "3G" }}, "message-text": " love iphone its 3G is awesome:)" }
+{ "tweetid": 15i64, "user": { "screen-name": "IrisMillard$830", "lang": "en", "friends_count": 9, "statuses_count": 56, "name": "Iris Millard", "followers_count": 127 }, "sender-location": point("27.59,95.34"), "send-time": datetime("2010-02-07T10:10:00.000Z"), "referred-topics": {{ "sprint", "voice-clarity" }}, "message-text": " like sprint the voice-clarity is amazing" }
+{ "tweetid": 16i64, "user": { "screen-name": "KaylynBrinigh_817", "lang": "en", "friends_count": 11, "statuses_count": 448, "name": "Kaylyn Brinigh", "followers_count": 53 }, "sender-location": point("25.19,79.71"), "send-time": datetime("2005-04-06T10:10:00.000Z"), "referred-topics": {{ "samsung", "customization" }}, "message-text": " love samsung its customization is amazing:)" }
+{ "tweetid": 17i64, "user": { "screen-name": "SungHoopengarner#732", "lang": "en", "friends_count": 55, "statuses_count": 129, "name": "Sung Hoopengarner", "followers_count": 152 }, "sender-location": point("47.75,93.12"), "send-time": datetime("2010-01-04T10:10:00.000Z"), "referred-topics": {{ "motorola", "voice-command" }}, "message-text": " dislike motorola its voice-command is horrible:(" }
+{ "tweetid": 18i64, "user": { "screen-name": "RenatoRyals_261", "lang": "en", "friends_count": 46, "statuses_count": 439, "name": "Renato Ryals", "followers_count": 73 }, "sender-location": point("38.48,75.0"), "send-time": datetime("2010-04-14T10:10:00.000Z"), "referred-topics": {{ "sprint", "signal" }}, "message-text": " love sprint its signal is good:)" }
+{ "tweetid": 19i64, "user": { "screen-name": "JohnnieHanseu#755", "lang": "en", "friends_count": 84, "statuses_count": 281, "name": "Johnnie Hanseu", "followers_count": 70 }, "sender-location": point("42.75,70.91"), "send-time": datetime("2010-06-12T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "3G" }}, "message-text": " like t-mobile its 3G is mind-blowing:)" }
+{ "tweetid": 20i64, "user": { "screen-name": "LindseyRahl#362", "lang": "en", "friends_count": 27, "statuses_count": 458, "name": "Lindsey Rahl", "followers_count": 24 }, "sender-location": point("36.2,94.8"), "send-time": datetime("2007-01-02T10:10:00.000Z"), "referred-topics": {{ "at&t", "voice-command" }}, "message-text": " can't stand at&t the voice-command is horrible:(" }
+{ "tweetid": 21i64, "user": { "screen-name": "CearaLing$289", "lang": "en", "friends_count": 39, "statuses_count": 177, "name": "Ceara Ling", "followers_count": 40 }, "sender-location": point("39.58,71.28"), "send-time": datetime("2008-05-20T10:10:00.000Z"), "referred-topics": {{ "samsung", "reachability" }}, "message-text": " like samsung the reachability is amazing:)" }
+{ "tweetid": 22i64, "user": { "screen-name": "DomoniqueEisenmann_636", "lang": "en", "friends_count": 27, "statuses_count": 465, "name": "Domonique Eisenmann", "followers_count": 166 }, "sender-location": point("47.11,77.87"), "send-time": datetime("2008-10-24T10:10:00.000Z"), "referred-topics": {{ "sprint", "voice-command" }}, "message-text": " can't stand sprint its voice-command is horrible:(" }
+{ "tweetid": 23i64, "user": { "screen-name": "MelanieGadow$539", "lang": "en", "friends_count": 34, "statuses_count": 112, "name": "Melanie Gadow", "followers_count": 65 }, "sender-location": point("31.9,87.22"), "send-time": datetime("2012-07-26T10:10:00.000Z"), "referred-topics": {{ "sprint", "speed" }}, "message-text": " like sprint its speed is mind-blowing:)" }
+{ "tweetid": 24i64, "user": { "screen-name": "HewiePeters#654", "lang": "en", "friends_count": 8, "statuses_count": 309, "name": "Hewie Peters", "followers_count": 15 }, "sender-location": point("42.84,90.27"), "send-time": datetime("2011-02-09T10:10:00.000Z"), "referred-topics": {{ "at&t", "wireless" }}, "message-text": " hate at&t its wireless is terrible:(" }
+{ "tweetid": 25i64, "user": { "screen-name": "HollisJudge#731", "lang": "en", "friends_count": 58, "statuses_count": 211, "name": "Hollis Judge", "followers_count": 190 }, "sender-location": point("34.33,83.22"), "send-time": datetime("2006-11-21T10:10:00.000Z"), "referred-topics": {{ "samsung", "voice-clarity" }}, "message-text": " dislike samsung its voice-clarity is OMG:(" }
+{ "tweetid": 26i64, "user": { "screen-name": "DemarcusHarrow$822", "lang": "en", "friends_count": 60, "statuses_count": 171, "name": "Demarcus Harrow", "followers_count": 151 }, "sender-location": point("37.01,80.04"), "send-time": datetime("2012-07-19T10:10:00.000Z"), "referred-topics": {{ "at&t", "shortcut-menu" }}, "message-text": " like at&t its shortcut-menu is awesome" }
+{ "tweetid": 27i64, "user": { "screen-name": "OrsonBauerle$52", "lang": "en", "friends_count": 91, "statuses_count": 271, "name": "Orson Bauerle", "followers_count": 144 }, "sender-location": point("48.91,75.54"), "send-time": datetime("2010-02-18T10:10:00.000Z"), "referred-topics": {{ "samsung", "speed" }}, "message-text": " love samsung the speed is amazing:)" }
+{ "tweetid": 28i64, "user": { "screen-name": "ChadBeach#363", "lang": "en", "friends_count": 88, "statuses_count": 275, "name": "Chad Beach", "followers_count": 142 }, "sender-location": point("35.5,73.83"), "send-time": datetime("2007-07-28T10:10:00.000Z"), "referred-topics": {{ "motorola", "signal" }}, "message-text": " love motorola its signal is mind-blowing" }
+{ "tweetid": 29i64, "user": { "screen-name": "LupeNewbern#345", "lang": "en", "friends_count": 99, "statuses_count": 45, "name": "Lupe Newbern", "followers_count": 86 }, "sender-location": point("35.07,70.43"), "send-time": datetime("2010-12-23T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "voicemail-service" }}, "message-text": " love t-mobile its voicemail-service is awesome" }
+{ "tweetid": 30i64, "user": { "screen-name": "LoydJohnston@664", "lang": "en", "friends_count": 86, "statuses_count": 10, "name": "Loyd Johnston", "followers_count": 58 }, "sender-location": point("42.55,72.33"), "send-time": datetime("2010-02-01T10:10:00.000Z"), "referred-topics": {{ "iphone", "network" }}, "message-text": " like iphone the network is awesome" }
+{ "tweetid": 31i64, "user": { "screen-name": "VerityMunson#211", "lang": "en", "friends_count": 75, "statuses_count": 359, "name": "Verity Munson", "followers_count": 165 }, "sender-location": point("30.65,77.21"), "send-time": datetime("2009-01-06T10:10:00.000Z"), "referred-topics": {{ "verizon", "voice-command" }}, "message-text": " can't stand verizon the voice-command is bad:(" }
+{ "tweetid": 32i64, "user": { "screen-name": "RinaHerndon#616", "lang": "en", "friends_count": 19, "statuses_count": 265, "name": "Rina Herndon", "followers_count": 26 }, "sender-location": point("40.76,75.79"), "send-time": datetime("2009-09-19T10:10:00.000Z"), "referred-topics": {{ "verizon", "shortcut-menu" }}, "message-text": " love verizon the shortcut-menu is mind-blowing:)" }
+{ "tweetid": 33i64, "user": { "screen-name": "MadelaineSchreckengost@250", "lang": "en", "friends_count": 45, "statuses_count": 310, "name": "Madelaine Schreckengost", "followers_count": 153 }, "sender-location": point("30.35,66.43"), "send-time": datetime("2005-07-08T10:10:00.000Z"), "referred-topics": {{ "at&t", "plan" }}, "message-text": " can't stand at&t the plan is bad" }
+{ "tweetid": 34i64, "user": { "screen-name": "RadclyffeStaymates_289", "lang": "en", "friends_count": 50, "statuses_count": 188, "name": "Radclyffe Staymates", "followers_count": 97 }, "sender-location": point("45.42,77.18"), "send-time": datetime("2012-05-06T10:10:00.000Z"), "referred-topics": {{ "at&t", "customer-service" }}, "message-text": " hate at&t its customer-service is OMG" }
+{ "tweetid": 35i64, "user": { "screen-name": "VernieAlice$968", "lang": "en", "friends_count": 70, "statuses_count": 491, "name": "Vernie Alice", "followers_count": 193 }, "sender-location": point("28.03,79.37"), "send-time": datetime("2010-01-19T10:10:00.000Z"), "referred-topics": {{ "motorola", "voice-command" }}, "message-text": " can't stand motorola the voice-command is horrible" }
+{ "tweetid": 36i64, "user": { "screen-name": "GertieDugger#987", "lang": "en", "friends_count": 22, "statuses_count": 72, "name": "Gertie Dugger", "followers_count": 12 }, "sender-location": point("25.77,92.7"), "send-time": datetime("2009-09-25T10:10:00.000Z"), "referred-topics": {{ "sprint", "touch-screen" }}, "message-text": " like sprint its touch-screen is awesome" }
+{ "tweetid": 37i64, "user": { "screen-name": "AggieBollinger@675", "lang": "en", "friends_count": 45, "statuses_count": 175, "name": "Aggie Bollinger", "followers_count": 67 }, "sender-location": point("42.6,68.28"), "send-time": datetime("2012-02-22T10:10:00.000Z"), "referred-topics": {{ "sprint", "voice-clarity" }}, "message-text": " love sprint its voice-clarity is awesome" }
+{ "tweetid": 38i64, "user": { "screen-name": "JocelynPatton$328", "lang": "en", "friends_count": 35, "statuses_count": 484, "name": "Jocelyn Patton", "followers_count": 174 }, "sender-location": point("28.77,88.28"), "send-time": datetime("2006-12-09T10:10:00.000Z"), "referred-topics": {{ "at&t", "wireless" }}, "message-text": " hate at&t the wireless is horrible:(" }
+{ "tweetid": 39i64, "user": { "screen-name": "CandelariaHujsak#602", "lang": "en", "friends_count": 28, "statuses_count": 499, "name": "Candelaria Hujsak", "followers_count": 94 }, "sender-location": point("36.09,96.94"), "send-time": datetime("2007-11-23T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "wireless" }}, "message-text": " can't stand t-mobile the wireless is terrible:(" }
+{ "tweetid": 40i64, "user": { "screen-name": "DamarisMueller#283", "lang": "en", "friends_count": 46, "statuses_count": 122, "name": "Damaris Mueller", "followers_count": 189 }, "sender-location": point("44.31,73.93"), "send-time": datetime("2012-02-28T10:10:00.000Z"), "referred-topics": {{ "sprint", "wireless" }}, "message-text": " like sprint its wireless is awesome" }
+{ "tweetid": 41i64, "user": { "screen-name": "ChuckPhilbrick_884", "lang": "en", "friends_count": 73, "statuses_count": 237, "name": "Chuck Philbrick", "followers_count": 35 }, "sender-location": point("35.39,81.04"), "send-time": datetime("2012-05-07T10:10:00.000Z"), "referred-topics": {{ "verizon", "plan" }}, "message-text": " love verizon its plan is good:)" }
+{ "tweetid": 42i64, "user": { "screen-name": "BraxtonKifer_723", "lang": "en", "friends_count": 65, "statuses_count": 459, "name": "Braxton Kifer", "followers_count": 6 }, "sender-location": point("30.23,70.06"), "send-time": datetime("2007-10-15T10:10:00.000Z"), "referred-topics": {{ "verizon", "touch-screen" }}, "message-text": " dislike verizon the touch-screen is horrible" }
+{ "tweetid": 43i64, "user": { "screen-name": "DeshawnPorter#734", "lang": "en", "friends_count": 26, "statuses_count": 408, "name": "Deshawn Porter", "followers_count": 14 }, "sender-location": point("35.2,82.65"), "send-time": datetime("2005-10-06T10:10:00.000Z"), "referred-topics": {{ "sprint", "wireless" }}, "message-text": " love sprint its wireless is amazing" }
+{ "tweetid": 44i64, "user": { "screen-name": "SamanthaBeach$879", "lang": "en", "friends_count": 95, "statuses_count": 481, "name": "Samantha Beach", "followers_count": 119 }, "sender-location": point("30.28,89.79"), "send-time": datetime("2005-09-20T10:10:00.000Z"), "referred-topics": {{ "motorola", "network" }}, "message-text": " love motorola the network is mind-blowing:)" }
+{ "tweetid": 45i64, "user": { "screen-name": "NoelleBash_83", "lang": "en", "friends_count": 4, "statuses_count": 148, "name": "Noelle Bash", "followers_count": 139 }, "sender-location": point("42.4,96.94"), "send-time": datetime("2007-01-05T10:10:00.000Z"), "referred-topics": {{ "iphone", "platform" }}, "message-text": " hate iphone its platform is terrible:(" }
+{ "tweetid": 46i64, "user": { "screen-name": "RuthWells#712", "lang": "en", "friends_count": 51, "statuses_count": 415, "name": "Ruth Wells", "followers_count": 57 }, "sender-location": point("31.93,82.03"), "send-time": datetime("2007-04-21T10:10:00.000Z"), "referred-topics": {{ "iphone", "customization" }}, "message-text": " dislike iphone the customization is bad:(" }
+{ "tweetid": 47i64, "user": { "screen-name": "NakiaClose@771", "lang": "en", "friends_count": 59, "statuses_count": 239, "name": "Nakia Close", "followers_count": 105 }, "sender-location": point("47.06,92.54"), "send-time": datetime("2005-02-18T10:10:00.000Z"), "referred-topics": {{ "motorola", "3G" }}, "message-text": " can't stand motorola its 3G is OMG:(" }
+{ "tweetid": 48i64, "user": { "screen-name": "EmLinton#420", "lang": "en", "friends_count": 87, "statuses_count": 481, "name": "Em Linton", "followers_count": 141 }, "sender-location": point("35.6,88.2"), "send-time": datetime("2006-09-24T10:10:00.000Z"), "referred-topics": {{ "iphone", "customer-service" }}, "message-text": " hate iphone its customer-service is horrible" }
+{ "tweetid": 49i64, "user": { "screen-name": "DarbyPatton_703", "lang": "en", "friends_count": 40, "statuses_count": 79, "name": "Darby Patton", "followers_count": 159 }, "sender-location": point("36.57,84.01"), "send-time": datetime("2006-06-14T10:10:00.000Z"), "referred-topics": {{ "verizon", "platform" }}, "message-text": " love verizon its platform is good" }
+{ "tweetid": 50i64, "user": { "screen-name": "WilburStephenson$295", "lang": "en", "friends_count": 57, "statuses_count": 337, "name": "Wilbur Stephenson", "followers_count": 188 }, "sender-location": point("38.35,83.92"), "send-time": datetime("2006-10-14T10:10:00.000Z"), "referred-topics": {{ "motorola", "plan" }}, "message-text": " dislike motorola the plan is OMG:(" }
+{ "tweetid": 51i64, "user": { "screen-name": "PalmerHahn@368", "lang": "en", "friends_count": 13, "statuses_count": 196, "name": "Palmer Hahn", "followers_count": 69 }, "sender-location": point("48.96,88.74"), "send-time": datetime("2006-01-07T10:10:00.000Z"), "referred-topics": {{ "samsung", "shortcut-menu" }}, "message-text": " like samsung its shortcut-menu is awesome" }
+{ "tweetid": 52i64, "user": { "screen-name": "HarlanWynne_297", "lang": "en", "friends_count": 71, "statuses_count": 262, "name": "Harlan Wynne", "followers_count": 151 }, "sender-location": point("41.05,93.92"), "send-time": datetime("2008-07-08T10:10:00.000Z"), "referred-topics": {{ "samsung", "platform" }}, "message-text": " like samsung its platform is awesome" }
+{ "tweetid": 53i64, "user": { "screen-name": "GrettaCable#405", "lang": "en", "friends_count": 7, "statuses_count": 324, "name": "Gretta Cable", "followers_count": 82 }, "sender-location": point("40.6,71.86"), "send-time": datetime("2010-11-16T10:10:00.000Z"), "referred-topics": {{ "iphone", "network" }}, "message-text": " like iphone its network is amazing:)" }
+{ "tweetid": 54i64, "user": { "screen-name": "PhilipaRing_461", "lang": "en", "friends_count": 43, "statuses_count": 53, "name": "Philipa Ring", "followers_count": 164 }, "sender-location": point("30.47,90.14"), "send-time": datetime("2011-12-24T10:10:00.000Z"), "referred-topics": {{ "motorola", "voicemail-service" }}, "message-text": " like motorola its voicemail-service is amazing" }
+{ "tweetid": 55i64, "user": { "screen-name": "LindseyBurch_187", "lang": "en", "friends_count": 9, "statuses_count": 54, "name": "Lindsey Burch", "followers_count": 6 }, "sender-location": point("31.66,68.68"), "send-time": datetime("2011-12-21T10:10:00.000Z"), "referred-topics": {{ "samsung", "touch-screen" }}, "message-text": " can't stand samsung its touch-screen is terrible" }
+{ "tweetid": 56i64, "user": { "screen-name": "AnnabelLosey_61", "lang": "en", "friends_count": 53, "statuses_count": 381, "name": "Annabel Losey", "followers_count": 133 }, "sender-location": point("37.33,85.16"), "send-time": datetime("2005-11-14T10:10:00.000Z"), "referred-topics": {{ "sprint", "customization" }}, "message-text": " can't stand sprint the customization is horrible:(" }
+{ "tweetid": 57i64, "user": { "screen-name": "HectorLalty@132", "lang": "en", "friends_count": 2, "statuses_count": 195, "name": "Hector Lalty", "followers_count": 92 }, "sender-location": point("46.52,80.45"), "send-time": datetime("2012-04-15T10:10:00.000Z"), "referred-topics": {{ "iphone", "reachability" }}, "message-text": " hate iphone the reachability is bad:(" }
+{ "tweetid": 58i64, "user": { "screen-name": "KatieWilkins_817", "lang": "en", "friends_count": 95, "statuses_count": 476, "name": "Katie Wilkins", "followers_count": 151 }, "sender-location": point("44.72,69.13"), "send-time": datetime("2006-11-01T10:10:00.000Z"), "referred-topics": {{ "sprint", "voice-command" }}, "message-text": " like sprint the voice-command is amazing:)" }
+{ "tweetid": 59i64, "user": { "screen-name": "BrianneRamsey$451", "lang": "en", "friends_count": 13, "statuses_count": 69, "name": "Brianne Ramsey", "followers_count": 102 }, "sender-location": point("37.02,80.95"), "send-time": datetime("2007-02-08T10:10:00.000Z"), "referred-topics": {{ "verizon", "network" }}, "message-text": " dislike verizon the network is terrible" }
+{ "tweetid": 60i64, "user": { "screen-name": "RinaHujsak#7", "lang": "en", "friends_count": 69, "statuses_count": 73, "name": "Rina Hujsak", "followers_count": 63 }, "sender-location": point("28.27,73.68"), "send-time": datetime("2009-03-28T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "network" }}, "message-text": " like t-mobile its network is amazing:)" }
+{ "tweetid": 61i64, "user": { "screen-name": "GertieSadley$508", "lang": "en", "friends_count": 35, "statuses_count": 235, "name": "Gertie Sadley", "followers_count": 87 }, "sender-location": point("40.19,86.0"), "send-time": datetime("2006-07-27T10:10:00.000Z"), "referred-topics": {{ "at&t", "reachability" }}, "message-text": " love at&t its reachability is mind-blowing:)" }
+{ "tweetid": 62i64, "user": { "screen-name": "AaronJackson_273", "lang": "en", "friends_count": 98, "statuses_count": 205, "name": "Aaron Jackson", "followers_count": 128 }, "sender-location": point("48.11,85.01"), "send-time": datetime("2011-05-14T10:10:00.000Z"), "referred-topics": {{ "iphone", "voice-command" }}, "message-text": " like iphone the voice-command is awesome:)" }
+{ "tweetid": 63i64, "user": { "screen-name": "CreightonHujsak$142", "lang": "en", "friends_count": 21, "statuses_count": 68, "name": "Creighton Hujsak", "followers_count": 70 }, "sender-location": point("40.55,90.98"), "send-time": datetime("2010-08-15T10:10:00.000Z"), "referred-topics": {{ "samsung", "voicemail-service" }}, "message-text": " love samsung the voicemail-service is amazing" }
+{ "tweetid": 64i64, "user": { "screen-name": "KazukoWilkinson$204", "lang": "en", "friends_count": 51, "statuses_count": 147, "name": "Kazuko Wilkinson", "followers_count": 86 }, "sender-location": point("29.64,94.45"), "send-time": datetime("2008-08-24T10:10:00.000Z"), "referred-topics": {{ "motorola", "speed" }}, "message-text": " love motorola the speed is mind-blowing:)" }
+{ "tweetid": 65i64, "user": { "screen-name": "GonzaloDiegel#186", "lang": "en", "friends_count": 80, "statuses_count": 149, "name": "Gonzalo Diegel", "followers_count": 89 }, "sender-location": point("48.68,83.09"), "send-time": datetime("2008-04-24T10:10:00.000Z"), "referred-topics": {{ "at&t", "voicemail-service" }}, "message-text": " dislike at&t its voicemail-service is horrible:(" }
+{ "tweetid": 66i64, "user": { "screen-name": "KizzyKanaga$317", "lang": "en", "friends_count": 52, "statuses_count": 330, "name": "Kizzy Kanaga", "followers_count": 6 }, "sender-location": point("27.96,90.03"), "send-time": datetime("2009-10-19T10:10:00.000Z"), "referred-topics": {{ "at&t", "touch-screen" }}, "message-text": " like at&t the touch-screen is amazing" }
+{ "tweetid": 67i64, "user": { "screen-name": "CraigTreeby@171", "lang": "en", "friends_count": 72, "statuses_count": 44, "name": "Craig Treeby", "followers_count": 155 }, "sender-location": point("48.99,91.21"), "send-time": datetime("2006-02-14T10:10:00.000Z"), "referred-topics": {{ "samsung", "signal" }}, "message-text": " love samsung the signal is amazing:)" }
+{ "tweetid": 68i64, "user": { "screen-name": "BrionySaltser#395", "lang": "en", "friends_count": 21, "statuses_count": 422, "name": "Briony Saltser", "followers_count": 129 }, "sender-location": point("37.33,67.08"), "send-time": datetime("2006-03-07T10:10:00.000Z"), "referred-topics": {{ "samsung", "shortcut-menu" }}, "message-text": " love samsung its shortcut-menu is amazing:)" }
+{ "tweetid": 69i64, "user": { "screen-name": "MagdaleneWerner$925", "lang": "en", "friends_count": 46, "statuses_count": 446, "name": "Magdalene Werner", "followers_count": 75 }, "sender-location": point("45.77,83.23"), "send-time": datetime("2005-06-09T10:10:00.000Z"), "referred-topics": {{ "iphone", "signal" }}, "message-text": " like iphone the signal is mind-blowing" }
+{ "tweetid": 70i64, "user": { "screen-name": "FlossieBaker$898", "lang": "en", "friends_count": 67, "statuses_count": 63, "name": "Flossie Baker", "followers_count": 50 }, "sender-location": point("44.37,89.4"), "send-time": datetime("2011-07-16T10:10:00.000Z"), "referred-topics": {{ "motorola", "network" }}, "message-text": " like motorola its network is good" }
+{ "tweetid": 71i64, "user": { "screen-name": "GradyGraff$247", "lang": "en", "friends_count": 21, "statuses_count": 58, "name": "Grady Graff", "followers_count": 45 }, "sender-location": point("24.81,67.13"), "send-time": datetime("2012-04-09T10:10:00.000Z"), "referred-topics": {{ "motorola", "reachability" }}, "message-text": " like motorola the reachability is good" }
+{ "tweetid": 72i64, "user": { "screen-name": "MelitaLombardi@324", "lang": "en", "friends_count": 39, "statuses_count": 32, "name": "Melita Lombardi", "followers_count": 167 }, "sender-location": point("24.23,73.03"), "send-time": datetime("2011-02-26T10:10:00.000Z"), "referred-topics": {{ "verizon", "network" }}, "message-text": " hate verizon the network is terrible:(" }
+{ "tweetid": 73i64, "user": { "screen-name": "HerbertPowell_651", "lang": "en", "friends_count": 17, "statuses_count": 57, "name": "Herbert Powell", "followers_count": 167 }, "sender-location": point("47.22,92.69"), "send-time": datetime("2005-01-25T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "customization" }}, "message-text": " love t-mobile its customization is awesome:)" }
+{ "tweetid": 74i64, "user": { "screen-name": "BasilSanborn$23", "lang": "en", "friends_count": 38, "statuses_count": 391, "name": "Basil Sanborn", "followers_count": 108 }, "sender-location": point("30.96,68.0"), "send-time": datetime("2008-12-25T10:10:00.000Z"), "referred-topics": {{ "samsung", "network" }}, "message-text": " can't stand samsung the network is bad" }
+{ "tweetid": 75i64, "user": { "screen-name": "LaurineZoucks$307", "lang": "en", "friends_count": 27, "statuses_count": 161, "name": "Laurine Zoucks", "followers_count": 144 }, "sender-location": point("40.78,91.08"), "send-time": datetime("2009-11-02T10:10:00.000Z"), "referred-topics": {{ "motorola", "customer-service" }}, "message-text": " like motorola the customer-service is amazing" }
+{ "tweetid": 76i64, "user": { "screen-name": "LincolnMarriman@675", "lang": "en", "friends_count": 3, "statuses_count": 389, "name": "Lincoln Marriman", "followers_count": 125 }, "sender-location": point("28.4,83.82"), "send-time": datetime("2006-11-04T10:10:00.000Z"), "referred-topics": {{ "verizon", "customer-service" }}, "message-text": " like verizon the customer-service is mind-blowing" }
+{ "tweetid": 77i64, "user": { "screen-name": "FrancesFinlay#683", "lang": "en", "friends_count": 71, "statuses_count": 174, "name": "Frances Finlay", "followers_count": 32 }, "sender-location": point("29.71,66.36"), "send-time": datetime("2012-04-18T10:10:00.000Z"), "referred-topics": {{ "iphone", "customization" }}, "message-text": " love iphone the customization is awesome" }
+{ "tweetid": 78i64, "user": { "screen-name": "ModestoMarriman_627", "lang": "en", "friends_count": 76, "statuses_count": 2, "name": "Modesto Marriman", "followers_count": 33 }, "sender-location": point("33.77,92.15"), "send-time": datetime("2011-09-26T10:10:00.000Z"), "referred-topics": {{ "samsung", "network" }}, "message-text": " love samsung its network is mind-blowing" }
+{ "tweetid": 79i64, "user": { "screen-name": "FlossieCamp#59", "lang": "en", "friends_count": 17, "statuses_count": 484, "name": "Flossie Camp", "followers_count": 142 }, "sender-location": point("24.67,77.24"), "send-time": datetime("2005-07-03T10:10:00.000Z"), "referred-topics": {{ "iphone", "reachability" }}, "message-text": " like iphone its reachability is awesome:)" }
+{ "tweetid": 80i64, "user": { "screen-name": "DouglasKing@553", "lang": "en", "friends_count": 62, "statuses_count": 251, "name": "Douglas King", "followers_count": 180 }, "sender-location": point("24.84,74.15"), "send-time": datetime("2009-10-09T10:10:00.000Z"), "referred-topics": {{ "sprint", "speed" }}, "message-text": " can't stand sprint the speed is bad:(" }
+{ "tweetid": 81i64, "user": { "screen-name": "WardCasteel@972", "lang": "en", "friends_count": 8, "statuses_count": 358, "name": "Ward Casteel", "followers_count": 51 }, "sender-location": point("41.41,91.32"), "send-time": datetime("2007-05-08T10:10:00.000Z"), "referred-topics": {{ "at&t", "voice-command" }}, "message-text": " can't stand at&t the voice-command is terrible:(" }
+{ "tweetid": 82i64, "user": { "screen-name": "AdelaErskine#579", "lang": "en", "friends_count": 97, "statuses_count": 354, "name": "Adela Erskine", "followers_count": 155 }, "sender-location": point("35.56,68.19"), "send-time": datetime("2009-03-23T10:10:00.000Z"), "referred-topics": {{ "samsung", "touch-screen" }}, "message-text": " hate samsung the touch-screen is bad:(" }
+{ "tweetid": 83i64, "user": { "screen-name": "ClevelandPrevatt#255", "lang": "en", "friends_count": 24, "statuses_count": 159, "name": "Cleveland Prevatt", "followers_count": 68 }, "sender-location": point("38.6,67.51"), "send-time": datetime("2006-10-09T10:10:00.000Z"), "referred-topics": {{ "sprint", "platform" }}, "message-text": " hate sprint its platform is OMG:(" }
+{ "tweetid": 84i64, "user": { "screen-name": "MaxwellTreeby@610", "lang": "en", "friends_count": 21, "statuses_count": 168, "name": "Maxwell Treeby", "followers_count": 138 }, "sender-location": point("38.37,79.64"), "send-time": datetime("2007-07-17T10:10:00.000Z"), "referred-topics": {{ "motorola", "speed" }}, "message-text": " like motorola its speed is mind-blowing" }
+{ "tweetid": 85i64, "user": { "screen-name": "BobbyBastion$235", "lang": "en", "friends_count": 48, "statuses_count": 251, "name": "Bobby Bastion", "followers_count": 123 }, "sender-location": point("45.84,83.03"), "send-time": datetime("2009-03-14T10:10:00.000Z"), "referred-topics": {{ "samsung", "voice-command" }}, "message-text": " love samsung its voice-command is amazing" }
+{ "tweetid": 86i64, "user": { "screen-name": "ClairKanaga$512", "lang": "en", "friends_count": 88, "statuses_count": 274, "name": "Clair Kanaga", "followers_count": 77 }, "sender-location": point("46.34,84.86"), "send-time": datetime("2006-07-15T10:10:00.000Z"), "referred-topics": {{ "samsung", "reachability" }}, "message-text": " love samsung its reachability is mind-blowing:)" }
+{ "tweetid": 87i64, "user": { "screen-name": "HueyLosey_966", "lang": "en", "friends_count": 78, "statuses_count": 32, "name": "Huey Losey", "followers_count": 2 }, "sender-location": point("25.61,78.89"), "send-time": datetime("2011-03-22T10:10:00.000Z"), "referred-topics": {{ "samsung", "reachability" }}, "message-text": " like samsung its reachability is good:)" }
+{ "tweetid": 88i64, "user": { "screen-name": "SooThigpen#463", "lang": "en", "friends_count": 5, "statuses_count": 429, "name": "Soo Thigpen", "followers_count": 18 }, "sender-location": point("34.84,74.43"), "send-time": datetime("2009-03-09T10:10:00.000Z"), "referred-topics": {{ "motorola", "wireless" }}, "message-text": " love motorola the wireless is good:)" }
+{ "tweetid": 89i64, "user": { "screen-name": "LacreshaWire_320", "lang": "en", "friends_count": 92, "statuses_count": 127, "name": "Lacresha Wire", "followers_count": 194 }, "sender-location": point("47.73,86.79"), "send-time": datetime("2007-08-04T10:10:00.000Z"), "referred-topics": {{ "verizon", "wireless" }}, "message-text": " can't stand verizon its wireless is OMG:(" }
+{ "tweetid": 90i64, "user": { "screen-name": "MyriamLambert@966", "lang": "en", "friends_count": 22, "statuses_count": 452, "name": "Myriam Lambert", "followers_count": 193 }, "sender-location": point("41.85,88.44"), "send-time": datetime("2008-12-02T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "plan" }}, "message-text": " hate t-mobile the plan is bad" }
+{ "tweetid": 91i64, "user": { "screen-name": "WoodyWhite@341", "lang": "en", "friends_count": 12, "statuses_count": 183, "name": "Woody White", "followers_count": 31 }, "sender-location": point("29.04,85.35"), "send-time": datetime("2006-02-06T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "network" }}, "message-text": " like t-mobile its network is good" }
+{ "tweetid": 92i64, "user": { "screen-name": "QuinDickinson#157", "lang": "en", "friends_count": 84, "statuses_count": 415, "name": "Quin Dickinson", "followers_count": 9 }, "sender-location": point("40.86,67.52"), "send-time": datetime("2006-01-26T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "signal" }}, "message-text": " can't stand t-mobile the signal is horrible:(" }
+{ "tweetid": 93i64, "user": { "screen-name": "BettieRing@713", "lang": "en", "friends_count": 39, "statuses_count": 373, "name": "Bettie Ring", "followers_count": 98 }, "sender-location": point("26.37,69.03"), "send-time": datetime("2005-10-04T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "reachability" }}, "message-text": " dislike t-mobile the reachability is terrible:(" }
+{ "tweetid": 94i64, "user": { "screen-name": "LinaDraudy_733", "lang": "en", "friends_count": 70, "statuses_count": 228, "name": "Lina Draudy", "followers_count": 9 }, "sender-location": point("39.58,97.38"), "send-time": datetime("2012-03-13T10:10:00.000Z"), "referred-topics": {{ "verizon", "network" }}, "message-text": " like verizon the network is awesome:)" }
+{ "tweetid": 95i64, "user": { "screen-name": "StacyFleming#907", "lang": "en", "friends_count": 37, "statuses_count": 119, "name": "Stacy Fleming", "followers_count": 113 }, "sender-location": point("24.27,94.53"), "send-time": datetime("2007-10-08T10:10:00.000Z"), "referred-topics": {{ "samsung", "platform" }}, "message-text": " love samsung its platform is amazing:)" }
+{ "tweetid": 96i64, "user": { "screen-name": "AmbroseAllshouse_786", "lang": "en", "friends_count": 24, "statuses_count": 299, "name": "Ambrose Allshouse", "followers_count": 23 }, "sender-location": point("34.88,73.05"), "send-time": datetime("2009-01-09T10:10:00.000Z"), "referred-topics": {{ "verizon", "speed" }}, "message-text": " hate verizon the speed is horrible:(" }
+{ "tweetid": 97i64, "user": { "screen-name": "VaughnFocell_20", "lang": "en", "friends_count": 68, "statuses_count": 388, "name": "Vaughn Focell", "followers_count": 171 }, "sender-location": point("34.67,73.46"), "send-time": datetime("2012-01-24T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "customer-service" }}, "message-text": " can't stand t-mobile its customer-service is terrible" }
+{ "tweetid": 98i64, "user": { "screen-name": "UlyssesCrissman#115", "lang": "en", "friends_count": 90, "statuses_count": 250, "name": "Ulysses Crissman", "followers_count": 110 }, "sender-location": point("24.81,93.59"), "send-time": datetime("2008-04-02T10:10:00.000Z"), "referred-topics": {{ "motorola", "customer-service" }}, "message-text": " love motorola its customer-service is awesome" }
+{ "tweetid": 99i64, "user": { "screen-name": "WatCrissman#703", "lang": "en", "friends_count": 50, "statuses_count": 244, "name": "Wat Crissman", "followers_count": 123 }, "sender-location": point("33.22,92.64"), "send-time": datetime("2006-09-15T10:10:00.000Z"), "referred-topics": {{ "motorola", "plan" }}, "message-text": " can't stand motorola the plan is terrible" }
+{ "tweetid": 100i64, "user": { "screen-name": "BambiLaurence$910", "lang": "en", "friends_count": 57, "statuses_count": 311, "name": "Bambi Laurence", "followers_count": 136 }, "sender-location": point("36.88,80.08"), "send-time": datetime("2008-04-26T10:10:00.000Z"), "referred-topics": {{ "sprint", "speed" }}, "message-text": " love sprint its speed is mind-blowing" }
diff --git a/asterix-app/pom.xml b/asterix-app/pom.xml
index e80e33d..6dbc506 100644
--- a/asterix-app/pom.xml
+++ b/asterix-app/pom.xml
@@ -1,23 +1,18 @@
-<!--
- ! Copyright 2009-2013 by The Regents of the University of California
- ! Licensed under the Apache License, Version 2.0 (the "License");
- ! you may not use this file except in compliance with the License.
- ! you may obtain a copy of the License from
- !
- ! http://www.apache.org/licenses/LICENSE-2.0
- !
- ! Unless required by applicable law or agreed to in writing, software
- ! distributed under the License is distributed on an "AS IS" BASIS,
- ! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ! See the License for the specific language governing permissions and
- ! limitations under the License.
- !-->
+<!-- ! Copyright 2009-2013 by The Regents of the University of California
+ ! Licensed under the Apache License, Version 2.0 (the "License"); ! you may
+ not use this file except in compliance with the License. ! you may obtain
+ a copy of the License from ! ! http://www.apache.org/licenses/LICENSE-2.0
+ ! ! Unless required by applicable law or agreed to in writing, software !
+ distributed under the License is distributed on an "AS IS" BASIS, ! WITHOUT
+ WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ! See the
+ License for the specific language governing permissions and ! limitations
+ under the License. ! -->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
</parent>
<artifactId>asterix-app</artifactId>
@@ -90,7 +85,7 @@
<additionalClasspathElement>${basedir}/src/main/resources</additionalClasspathElement>
</additionalClasspathElements> -->
<forkMode>pertest</forkMode>
- <argLine>-enableassertions -Xmx${test.heap.size}m
+ <argLine>-enableassertions -Xmx${test.heap.size}m
-Dfile.encoding=UTF-8
-Djava.util.logging.config.file=src/test/resources/logging.properties
-Xdebug
@@ -106,12 +101,6 @@
<dependencies>
<dependency>
- <groupId>edu.uci.ics.asterix</groupId>
- <artifactId>asterix-algebra</artifactId>
- <version>0.8.1-SNAPSHOT</version>
- <scope>compile</scope>
- </dependency>
- <dependency>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
<type>jar</type>
@@ -149,57 +138,66 @@
<artifactId>algebricks-compiler</artifactId>
</dependency>
<dependency>
- <groupId>edu.uci.ics.hyracks</groupId>
- <artifactId>hyracks-client</artifactId>
- </dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-client</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.asterix</groupId>
+ <artifactId>asterix-algebra</artifactId>
+ <version>0.8.4-SNAPSHOT</version>
+ <scope>compile</scope>
+ </dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-aql</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-om</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-metadata</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-tools</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-common</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
- <dependency>
- <groupId>edu.uci.ics.asterix</groupId>
- <artifactId>asterix-common</artifactId>
- <version>0.8.1-SNAPSHOT</version>
- <type>test-jar</type>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>edu.uci.ics.asterix</groupId>
- <artifactId>asterix-transactions</artifactId>
- <version>0.8.1-SNAPSHOT</version>
- <scope>compile</scope>
- </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.asterix</groupId>
+ <artifactId>asterix-common</artifactId>
+ <version>0.8.4-SNAPSHOT</version>
+ <type>test-jar</type>
+ <scope>test</scope>
+ </dependency>
+ <!-- posssible remove this <dependency> <groupId>com.kenai.nbpwr</groupId>
+ <artifactId>org-apache-commons-io</artifactId> <version>1.3.1-201002241208</version>
+ <scope>test</scope> </dependency> -->
+ <dependency>
+ <groupId>edu.uci.ics.asterix</groupId>
+ <artifactId>asterix-transactions</artifactId>
+ <version>0.8.4-SNAPSHOT</version>
+ <scope>compile</scope>
+ </dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-core</artifactId>
@@ -227,12 +225,12 @@
<type>jar</type>
<scope>test</scope>
</dependency>
- <dependency>
- <groupId>edu.uci.ics.asterix</groupId>
- <artifactId>asterix-test-framework</artifactId>
- <version>0.8.1-SNAPSHOT</version>
- <scope>test</scope>
- </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.asterix</groupId>
+ <artifactId>asterix-test-framework</artifactId>
+ <version>0.8.4-SNAPSHOT</version>
+ <scope>test</scope>
+ </dependency>
</dependencies>
</project>
diff --git a/asterix-app/scripts/asterix/startallncs.sh b/asterix-app/scripts/asterix/startallncs.sh
old mode 100755
new mode 100644
diff --git a/asterix-app/scripts/asterix/startcc.sh b/asterix-app/scripts/asterix/startcc.sh
old mode 100755
new mode 100644
diff --git a/asterix-app/scripts/asterix/startnc.sh b/asterix-app/scripts/asterix/startnc.sh
old mode 100755
new mode 100644
diff --git a/asterix-app/scripts/asterix/stopallncs.sh b/asterix-app/scripts/asterix/stopallncs.sh
old mode 100755
new mode 100644
diff --git a/asterix-app/scripts/asterix/stopcc.sh b/asterix-app/scripts/asterix/stopcc.sh
old mode 100755
new mode 100644
diff --git a/asterix-app/scripts/asterix/stopnc.sh b/asterix-app/scripts/asterix/stopnc.sh
old mode 100755
new mode 100644
diff --git a/asterix-app/scripts/asterix/test.properties b/asterix-app/scripts/asterix/test.properties
old mode 100755
new mode 100644
diff --git a/asterix-app/scripts/idefix/startcc.sh b/asterix-app/scripts/idefix/startcc.sh
old mode 100755
new mode 100644
diff --git a/asterix-app/scripts/idefix/startnc1.sh b/asterix-app/scripts/idefix/startnc1.sh
old mode 100755
new mode 100644
diff --git a/asterix-app/scripts/idefix/startnc2.sh b/asterix-app/scripts/idefix/startnc2.sh
old mode 100755
new mode 100644
diff --git a/asterix-app/scripts/idefix/stopallncs.sh b/asterix-app/scripts/idefix/stopallncs.sh
old mode 100755
new mode 100644
diff --git a/asterix-app/scripts/idefix/stopcc.sh b/asterix-app/scripts/idefix/stopcc.sh
old mode 100755
new mode 100644
diff --git a/asterix-app/scripts/rainbow/execute.sh b/asterix-app/scripts/rainbow/execute.sh
old mode 100755
new mode 100644
diff --git a/asterix-app/scripts/rainbow/startallncs.sh b/asterix-app/scripts/rainbow/startallncs.sh
old mode 100755
new mode 100644
diff --git a/asterix-app/scripts/rainbow/startcc.sh b/asterix-app/scripts/rainbow/startcc.sh
old mode 100755
new mode 100644
diff --git a/asterix-app/scripts/rainbow/startnc.sh b/asterix-app/scripts/rainbow/startnc.sh
old mode 100755
new mode 100644
diff --git a/asterix-app/scripts/rainbow/stopallncs.sh b/asterix-app/scripts/rainbow/stopallncs.sh
old mode 100755
new mode 100644
diff --git a/asterix-app/scripts/rainbow/stopcc.sh b/asterix-app/scripts/rainbow/stopcc.sh
old mode 100755
new mode 100644
diff --git a/asterix-app/scripts/rainbow/stopnc.sh b/asterix-app/scripts/rainbow/stopnc.sh
old mode 100755
new mode 100644
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java
index 60b0cd6..df04195 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java
@@ -86,6 +86,8 @@
SequentialOnceRuleController seqOnceCtrl = new SequentialOnceRuleController(true);
defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrl,
RuleCollections.buildTypeInferenceRuleCollection()));
+ defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrl,
+ RuleCollections.buildAutogenerateIDRuleCollection()));
defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlFullDfs,
RuleCollections.buildNormalizationRuleCollection()));
defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlNoDfs,
@@ -220,8 +222,7 @@
}
- edu.uci.ics.asterix.common.transactions.JobId asterixJobId = JobIdFactory
- .generateJobId();
+ edu.uci.ics.asterix.common.transactions.JobId asterixJobId = JobIdFactory.generateJobId();
queryMetadataProvider.setJobId(asterixJobId);
AqlExpressionToPlanTranslator t = new AqlExpressionToPlanTranslator(queryMetadataProvider, varCounter,
outputDatasetName, statement);
@@ -265,7 +266,6 @@
OptimizationConfUtil.getPhysicalOptimizationConfig().setFrameSize(frameSize);
OptimizationConfUtil.getPhysicalOptimizationConfig().setMaxFramesExternalSort(sortFrameLimit);
OptimizationConfUtil.getPhysicalOptimizationConfig().setMaxFramesHybridHash(joinFrameLimit);
-
HeuristicCompilerFactoryBuilder builder = new HeuristicCompilerFactoryBuilder(
AqlOptimizationContextFactory.INSTANCE);
@@ -280,7 +280,6 @@
builder.setExpressionTypeComputer(AqlExpressionTypeComputer.INSTANCE);
builder.setNullableTypeComputer(AqlNullableTypeComputer.INSTANCE);
-
ICompiler compiler = compilerFactory.createCompiler(plan, queryMetadataProvider, t.getVarCounter());
if (pc.isOptimize()) {
compiler.optimize();
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixAppRuntimeContext.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixAppRuntimeContext.java
index 16e3c14..c789e1f 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixAppRuntimeContext.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixAppRuntimeContext.java
@@ -18,6 +18,7 @@
import java.util.List;
import java.util.logging.Logger;
+import edu.uci.ics.asterix.common.api.AsterixThreadExecutor;
import edu.uci.ics.asterix.common.api.IAsterixAppRuntimeContext;
import edu.uci.ics.asterix.common.config.AsterixCompilerProperties;
import edu.uci.ics.asterix.common.config.AsterixExternalProperties;
@@ -30,9 +31,11 @@
import edu.uci.ics.asterix.common.context.DatasetLifecycleManager;
import edu.uci.ics.asterix.common.exceptions.ACIDException;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.feeds.IFeedManager;
import edu.uci.ics.asterix.common.transactions.IAsterixAppRuntimeContextProvider;
import edu.uci.ics.asterix.common.transactions.ITransactionSubsystem;
import edu.uci.ics.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
+import edu.uci.ics.asterix.metadata.feeds.FeedManager;
import edu.uci.ics.asterix.transaction.management.resource.PersistentLocalResourceRepository;
import edu.uci.ics.asterix.transaction.management.resource.PersistentLocalResourceRepositoryFactory;
import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionSubsystem;
@@ -40,7 +43,7 @@
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.api.io.IIOManager;
import edu.uci.ics.hyracks.api.lifecycle.ILifeCycleComponent;
-import edu.uci.ics.hyracks.api.lifecycle.LifeCycleComponentManager;
+import edu.uci.ics.hyracks.api.lifecycle.ILifeCycleComponentManager;
import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManager;
import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
@@ -64,6 +67,9 @@
import edu.uci.ics.hyracks.storage.common.file.ResourceIdFactoryProvider;
public class AsterixAppRuntimeContext implements IAsterixAppRuntimeContext, IAsterixPropertiesProvider {
+
+ public static final AsterixPropertiesAccessor ASTERIX_PROPERTIES_ACCESSOR = createAsterixPropertiesAccessor();
+
private static final int METADATA_IO_DEVICE_ID = 0;
private ILSMMergePolicyFactory metadataMergePolicyFactory;
@@ -75,6 +81,7 @@
private AsterixStorageProperties storageProperties;
private AsterixTransactionProperties txnProperties;
+ private AsterixThreadExecutor threadExecutor;
private DatasetLifecycleManager indexLifecycleManager;
private IFileMapManager fileMapManager;
private IBufferCache bufferCache;
@@ -86,20 +93,31 @@
private IIOManager ioManager;
private boolean isShuttingdown;
- public AsterixAppRuntimeContext(INCApplicationContext ncApplicationContext) {
+ private IFeedManager feedManager;
+
+ public AsterixAppRuntimeContext(INCApplicationContext ncApplicationContext) throws AsterixException {
this.ncApplicationContext = ncApplicationContext;
+ compilerProperties = new AsterixCompilerProperties(ASTERIX_PROPERTIES_ACCESSOR);
+ externalProperties = new AsterixExternalProperties(ASTERIX_PROPERTIES_ACCESSOR);
+ metadataProperties = new AsterixMetadataProperties(ASTERIX_PROPERTIES_ACCESSOR);
+ storageProperties = new AsterixStorageProperties(ASTERIX_PROPERTIES_ACCESSOR);
+ txnProperties = new AsterixTransactionProperties(ASTERIX_PROPERTIES_ACCESSOR);
+ }
+
+ private static AsterixPropertiesAccessor createAsterixPropertiesAccessor() {
+ AsterixPropertiesAccessor propertiesAccessor = null;
+ try {
+ propertiesAccessor = new AsterixPropertiesAccessor();
+ } catch (AsterixException e) {
+ throw new IllegalStateException("Unable to create properties accessor");
+ }
+ return propertiesAccessor;
}
public void initialize() throws IOException, ACIDException, AsterixException {
- AsterixPropertiesAccessor propertiesAccessor = new AsterixPropertiesAccessor();
- compilerProperties = new AsterixCompilerProperties(propertiesAccessor);
- externalProperties = new AsterixExternalProperties(propertiesAccessor);
- metadataProperties = new AsterixMetadataProperties(propertiesAccessor);
- storageProperties = new AsterixStorageProperties(propertiesAccessor);
- txnProperties = new AsterixTransactionProperties(propertiesAccessor);
-
Logger.getLogger("edu.uci.ics").setLevel(externalProperties.getLogLevel());
+ threadExecutor = new AsterixThreadExecutor(ncApplicationContext.getThreadFactory());
fileMapManager = new AsterixFileMapManager();
ICacheMemoryAllocator allocator = new HeapBufferAllocator();
IPageReplacementStrategy prs = new ClockPageReplacementStrategy();
@@ -121,19 +139,22 @@
resourceIdFactory = (new ResourceIdFactoryProvider(localResourceRepository)).createResourceIdFactory();
indexLifecycleManager = new DatasetLifecycleManager(storageProperties, localResourceRepository,
MetadataPrimaryIndexes.FIRST_AVAILABLE_USER_DATASET_ID);
- IAsterixAppRuntimeContextProvider asterixAppRuntimeContextProvider = new AsterixAppRuntimeContextProviderForRecovery(
+ IAsterixAppRuntimeContextProvider asterixAppRuntimeContextProvider = new AsterixAppRuntimeContextProdiverForRecovery(
this);
txnSubsystem = new TransactionSubsystem(ncApplicationContext.getNodeId(), asterixAppRuntimeContextProvider,
txnProperties);
isShuttingdown = false;
+ feedManager = new FeedManager(ncApplicationContext.getNodeId());
+
// The order of registration is important. The buffer cache must registered before recovery and transaction managers.
- LifeCycleComponentManager.INSTANCE.register((ILifeCycleComponent) bufferCache);
- LifeCycleComponentManager.INSTANCE.register((ILifeCycleComponent) indexLifecycleManager);
- LifeCycleComponentManager.INSTANCE.register((ILifeCycleComponent) txnSubsystem.getTransactionManager());
- LifeCycleComponentManager.INSTANCE.register((ILifeCycleComponent) txnSubsystem.getLogManager());
- LifeCycleComponentManager.INSTANCE.register((ILifeCycleComponent) txnSubsystem.getLockManager());
- LifeCycleComponentManager.INSTANCE.register((ILifeCycleComponent) txnSubsystem.getRecoveryManager());
+ ILifeCycleComponentManager lccm = ncApplicationContext.getLifeCycleComponentManager();
+ lccm.register((ILifeCycleComponent) bufferCache);
+ lccm.register((ILifeCycleComponent) indexLifecycleManager);
+ lccm.register((ILifeCycleComponent) txnSubsystem.getTransactionManager());
+ lccm.register((ILifeCycleComponent) txnSubsystem.getLogManager());
+ lccm.register((ILifeCycleComponent) txnSubsystem.getLockManager());
+ lccm.register((ILifeCycleComponent) txnSubsystem.getRecoveryManager());
}
public boolean isShuttingdown() {
@@ -223,8 +244,16 @@
}
@Override
+ public AsterixThreadExecutor getThreadExecutor() {
+ return threadExecutor;
+ }
+
public ILSMMergePolicyFactory getMetadataMergePolicyFactory() {
return metadataMergePolicyFactory;
}
+ @Override
+ public IFeedManager getFeedManager() {
+ return feedManager;
+ }
}
\ No newline at end of file
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixAppRuntimeContextProdiverForRecovery.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixAppRuntimeContextProdiverForRecovery.java
new file mode 100644
index 0000000..b985d90
--- /dev/null
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixAppRuntimeContextProdiverForRecovery.java
@@ -0,0 +1,105 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.api.common;
+
+import java.util.List;
+
+import edu.uci.ics.asterix.common.api.AsterixThreadExecutor;
+import edu.uci.ics.asterix.common.api.IAsterixAppRuntimeContext;
+import edu.uci.ics.asterix.common.transactions.IAsterixAppRuntimeContextProvider;
+import edu.uci.ics.asterix.common.transactions.ITransactionSubsystem;
+import edu.uci.ics.hyracks.api.io.IIOManager;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManager;
+import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
+import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTracker;
+import edu.uci.ics.hyracks.storage.am.lsm.common.api.IVirtualBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.storage.common.file.ILocalResourceRepository;
+import edu.uci.ics.hyracks.storage.common.file.ResourceIdFactory;
+
+public class AsterixAppRuntimeContextProdiverForRecovery implements IAsterixAppRuntimeContextProvider {
+
+ private final AsterixAppRuntimeContext asterixAppRuntimeContext;
+
+ public AsterixAppRuntimeContextProdiverForRecovery(AsterixAppRuntimeContext asterixAppRuntimeContext) {
+ this.asterixAppRuntimeContext = asterixAppRuntimeContext;
+ }
+
+ @Override
+ public IBufferCache getBufferCache() {
+ return asterixAppRuntimeContext.getBufferCache();
+ }
+
+ @Override
+ public IFileMapProvider getFileMapManager() {
+ return asterixAppRuntimeContext.getFileMapManager();
+ }
+
+ @Override
+ public ITransactionSubsystem getTransactionSubsystem() {
+ return asterixAppRuntimeContext.getTransactionSubsystem();
+ }
+
+ @Override
+ public IIndexLifecycleManager getIndexLifecycleManager() {
+ return asterixAppRuntimeContext.getIndexLifecycleManager();
+ }
+
+ @Override
+ public double getBloomFilterFalsePositiveRate() {
+ return asterixAppRuntimeContext.getBloomFilterFalsePositiveRate();
+ }
+
+ @Override
+ public ILSMIOOperationScheduler getLSMIOScheduler() {
+ return asterixAppRuntimeContext.getLSMIOScheduler();
+ }
+
+ @Override
+ public ILocalResourceRepository getLocalResourceRepository() {
+ return asterixAppRuntimeContext.getLocalResourceRepository();
+ }
+
+ @Override
+ public ResourceIdFactory getResourceIdFactory() {
+ return asterixAppRuntimeContext.getResourceIdFactory();
+ }
+
+ @Override
+ public IIOManager getIOManager() {
+ return asterixAppRuntimeContext.getIOManager();
+ }
+
+ @Override
+ public List<IVirtualBufferCache> getVirtualBufferCaches(int datasetID) {
+ return asterixAppRuntimeContext.getVirtualBufferCaches(datasetID);
+ }
+
+ @Override
+ public ILSMOperationTracker getLSMBTreeOperationTracker(int datasetID) {
+ return asterixAppRuntimeContext.getLSMBTreeOperationTracker(datasetID);
+ }
+
+ @Override
+ public IAsterixAppRuntimeContext getAppContext() {
+ return asterixAppRuntimeContext;
+ }
+
+ @Override
+ public AsterixThreadExecutor getThreadExecutor() {
+ return asterixAppRuntimeContext.getThreadExecutor();
+ }
+}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixAppRuntimeContextProviderForRecovery.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixAppRuntimeContextProviderForRecovery.java
deleted file mode 100644
index 18d004c..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixAppRuntimeContextProviderForRecovery.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.common;
-
-import java.util.List;
-
-import edu.uci.ics.asterix.common.api.IAsterixAppRuntimeContext;
-import edu.uci.ics.asterix.common.transactions.IAsterixAppRuntimeContextProvider;
-import edu.uci.ics.asterix.common.transactions.ITransactionSubsystem;
-import edu.uci.ics.hyracks.api.io.IIOManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManager;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTracker;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.IVirtualBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-import edu.uci.ics.hyracks.storage.common.file.ILocalResourceRepository;
-import edu.uci.ics.hyracks.storage.common.file.ResourceIdFactory;
-
-public class AsterixAppRuntimeContextProviderForRecovery implements IAsterixAppRuntimeContextProvider {
-
- private final AsterixAppRuntimeContext asterixAppRuntimeContext;
-
- public AsterixAppRuntimeContextProviderForRecovery(AsterixAppRuntimeContext asterixAppRuntimeContext) {
- this.asterixAppRuntimeContext = asterixAppRuntimeContext;
- }
-
- @Override
- public IBufferCache getBufferCache() {
- return asterixAppRuntimeContext.getBufferCache();
- }
-
- @Override
- public IFileMapProvider getFileMapManager() {
- return asterixAppRuntimeContext.getFileMapManager();
- }
-
- @Override
- public ITransactionSubsystem getTransactionSubsystem() {
- return asterixAppRuntimeContext.getTransactionSubsystem();
- }
-
- @Override
- public IIndexLifecycleManager getIndexLifecycleManager() {
- return asterixAppRuntimeContext.getIndexLifecycleManager();
- }
-
- @Override
- public double getBloomFilterFalsePositiveRate() {
- return asterixAppRuntimeContext.getBloomFilterFalsePositiveRate();
- }
-
- @Override
- public ILSMIOOperationScheduler getLSMIOScheduler() {
- return asterixAppRuntimeContext.getLSMIOScheduler();
- }
-
- @Override
- public ILocalResourceRepository getLocalResourceRepository() {
- return asterixAppRuntimeContext.getLocalResourceRepository();
- }
-
- @Override
- public ResourceIdFactory getResourceIdFactory() {
- return asterixAppRuntimeContext.getResourceIdFactory();
- }
-
- @Override
- public IIOManager getIOManager() {
- return asterixAppRuntimeContext.getIOManager();
- }
-
- @Override
- public List<IVirtualBufferCache> getVirtualBufferCaches(int datasetID) {
- return asterixAppRuntimeContext.getVirtualBufferCaches(datasetID);
- }
-
- @Override
- public ILSMOperationTracker getLSMBTreeOperationTracker(int datasetID) {
- return asterixAppRuntimeContext.getLSMBTreeOperationTracker(datasetID);
- }
-
- @Override
- public IAsterixAppRuntimeContext getAppContext() {
- return asterixAppRuntimeContext;
- }
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/DDLAPIServlet.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/DDLAPIServlet.java
index 1ae4ba6..51af387 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/DDLAPIServlet.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/DDLAPIServlet.java
@@ -32,7 +32,8 @@
protected List<Statement.Kind> getAllowedStatements() {
Kind[] statementsArray = { Kind.DATAVERSE_DECL, Kind.DATAVERSE_DROP, Kind.DATASET_DECL, Kind.NODEGROUP_DECL,
Kind.NODEGROUP_DROP, Kind.TYPE_DECL, Kind.TYPE_DROP, Kind.CREATE_INDEX, Kind.INDEX_DECL,
- Kind.CREATE_DATAVERSE, Kind.DATASET_DROP, Kind.INDEX_DROP, Kind.CREATE_FUNCTION, Kind.FUNCTION_DROP };
+ Kind.CREATE_DATAVERSE, Kind.DATASET_DROP, Kind.INDEX_DROP, Kind.CREATE_FUNCTION, Kind.FUNCTION_DROP,
+ Kind.CREATE_FEED };
return Arrays.asList(statementsArray);
}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/FeedDashboardServlet.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/FeedDashboardServlet.java
new file mode 100644
index 0000000..5cb53b2
--- /dev/null
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/FeedDashboardServlet.java
@@ -0,0 +1,133 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.api.http.servlet;
+
+import java.awt.image.BufferedImage;
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
+import java.io.PrintWriter;
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import javax.imageio.ImageIO;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import edu.uci.ics.asterix.common.exceptions.ACIDException;
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+import edu.uci.ics.asterix.metadata.MetadataException;
+import edu.uci.ics.asterix.metadata.MetadataManager;
+import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
+import edu.uci.ics.asterix.metadata.entities.FeedActivity;
+import edu.uci.ics.asterix.metadata.entities.FeedActivity.FeedActivityDetails;
+import edu.uci.ics.asterix.metadata.entities.FeedActivity.FeedActivityType;
+
+public class FeedDashboardServlet extends HttpServlet {
+ private static final long serialVersionUID = 1L;
+
+ private static final Logger LOGGER = Logger.getLogger(FeedDashboardServlet.class.getName());
+
+ @Override
+ public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
+ String resourcePath = null;
+ String requestURI = request.getRequestURI();
+
+ if (requestURI.equals("/")) {
+ response.setContentType("text/html");
+ resourcePath = "/feed/dashboard.html";
+ } else {
+ resourcePath = requestURI + ".html";
+ }
+
+ try {
+ InputStream is = FeedDashboardServlet.class.getResourceAsStream(resourcePath);
+ if (is == null) {
+ response.sendError(HttpServletResponse.SC_NOT_FOUND);
+ return;
+ }
+
+ // Special handler for font files and .png resources
+ if (resourcePath.endsWith(".png")) {
+
+ BufferedImage img = ImageIO.read(is);
+ OutputStream outputStream = response.getOutputStream();
+ String formatName = "png";
+ response.setContentType("image/png");
+ ImageIO.write(img, formatName, outputStream);
+ outputStream.close();
+ return;
+
+ }
+
+ response.setCharacterEncoding("utf-8");
+ InputStreamReader isr = new InputStreamReader(is);
+ StringBuilder sb = new StringBuilder();
+ BufferedReader br = new BufferedReader(isr);
+ String line = br.readLine();
+
+ while (line != null) {
+ sb.append(line + "\n");
+ line = br.readLine();
+ }
+
+ String feedName = request.getParameter("feed");
+ String datasetName = request.getParameter("dataset");
+ String dataverseName = request.getParameter("dataverse");
+
+ FeedConnectionId feedId = new FeedConnectionId(dataverseName, feedName, datasetName);
+
+ String outStr = null;
+ if (requestURI.startsWith("/webui/static")) {
+ outStr = sb.toString();
+ } else {
+ MetadataManager.INSTANCE.init();
+ MetadataTransactionContext ctx = MetadataManager.INSTANCE.beginTransaction();
+ FeedActivity activity = MetadataManager.INSTANCE.getRecentActivityOnFeedConnection(ctx, feedId,
+ FeedActivityType.FEED_BEGIN);
+ MetadataManager.INSTANCE.commitTransaction(ctx);
+
+ Map<String, String> activityDetails = activity.getFeedActivityDetails();
+
+ String host = activityDetails.get(FeedActivity.FeedActivityDetails.SUPER_FEED_MANAGER_HOST);
+ int port = Integer.parseInt(activityDetails
+ .get(FeedActivity.FeedActivityDetails.SUPER_FEED_MANAGER_PORT));
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info(" Super Feed Maanger address :" + host + "[" + port + "]");
+ }
+
+ String ingestLocations = activityDetails.get(FeedActivityDetails.INGEST_LOCATIONS);
+ String computeLocations = activityDetails.get(FeedActivityDetails.COMPUTE_LOCATIONS);
+ String storageLocations = activityDetails.get(FeedActivityDetails.STORAGE_LOCATIONS);
+ String ingestionPolicy = activityDetails.get(FeedActivityDetails.FEED_POLICY_NAME);
+ String activeSince = activity.getLastUpdatedTimestamp();
+
+ outStr = String.format(sb.toString(), dataverseName, datasetName, feedName, ingestLocations,
+ computeLocations, storageLocations, ingestionPolicy, activeSince);
+ FeedServletUtil.initiateSubscription(feedId, host, port);
+ }
+
+ PrintWriter out = response.getWriter();
+ out.println(outStr);
+ } catch (ACIDException | MetadataException e) {
+ e.printStackTrace();
+ }
+ }
+
+}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/FeedDataProviderServlet.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/FeedDataProviderServlet.java
new file mode 100644
index 0000000..463ce01
--- /dev/null
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/FeedDataProviderServlet.java
@@ -0,0 +1,118 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.api.http.servlet;
+
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.TimeUnit;
+
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+import edu.uci.ics.asterix.hyracks.bootstrap.FeedLifecycleListener;
+import edu.uci.ics.asterix.metadata.MetadataManager;
+import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
+import edu.uci.ics.asterix.metadata.entities.FeedActivity;
+import edu.uci.ics.asterix.metadata.entities.FeedActivity.FeedActivityDetails;
+
+public class FeedDataProviderServlet extends HttpServlet {
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
+
+ String feedName = request.getParameter("feed");
+ String datasetName = request.getParameter("dataset");
+ String dataverseName = request.getParameter("dataverse");
+
+ String report = getFeedReport(feedName, datasetName, dataverseName);
+ System.out.println(" REPORT " + report);
+ long timestamp = System.currentTimeMillis();
+ JSONObject obj = null;
+ if (report != null) {
+ JSONArray array = new JSONArray();
+ try {
+ obj = new JSONObject();
+ obj.put("type", "report");
+ obj.put("time", timestamp);
+ obj.put("value", report);
+ } catch (JSONException jsoe) {
+ throw new IOException(jsoe);
+ }
+ } else {
+ obj = verifyIfFeedIsAlive(dataverseName, feedName, datasetName);
+ }
+
+ PrintWriter out = response.getWriter();
+ out.println(obj.toString());
+ }
+
+ private String getFeedReport(String feedName, String datasetName, String dataverseName) {
+ FeedConnectionId feedId = new FeedConnectionId(dataverseName, feedName, datasetName);
+ LinkedBlockingQueue<String> queue = FeedLifecycleListener.INSTANCE.getFeedReportQueue(feedId);
+ String report = null;
+ try {
+ report = queue.poll(25, TimeUnit.SECONDS);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ return report;
+ }
+
+ private JSONObject verifyIfFeedIsAlive(String dataverseName, String feedName, String datasetName) {
+ JSONObject obj = new JSONObject();
+ try {
+ MetadataTransactionContext ctx = MetadataManager.INSTANCE.beginTransaction();
+ List<FeedActivity> feedActivities = MetadataManager.INSTANCE
+ .getActiveFeeds(ctx, dataverseName, datasetName);
+ FeedConnectionId feedId = new FeedConnectionId(dataverseName, feedName, datasetName);
+ FeedActivity activity = MetadataManager.INSTANCE.getRecentActivityOnFeedConnection(ctx, feedId, null);
+ switch (activity.getActivityType()) {
+ case FEED_BEGIN:
+ Map<String, String> activityDetails = activity.getFeedActivityDetails();
+ String ingestLocations = activityDetails.get(FeedActivityDetails.INGEST_LOCATIONS);
+ String computeLocations = activityDetails.get(FeedActivityDetails.COMPUTE_LOCATIONS);
+ String storageLocations = activityDetails.get(FeedActivityDetails.STORAGE_LOCATIONS);
+ obj.put("status", "active");
+ obj.put("type", "reload");
+ obj.put("ingestLocations", ingestLocations);
+ obj.put("computeLocations", computeLocations);
+ obj.put("storageLocations", storageLocations);
+ System.out.println(" RE LOADING " + " ingestion at " + ingestLocations + " compute at "
+ + computeLocations + " storage at " + storageLocations);
+ break;
+ case FEED_FAILURE:
+ obj.put("status", "failed");
+ break;
+ case FEED_END:
+ obj.put("status", "ended");
+ break;
+ }
+ } catch (Exception e) {
+ // ignore
+ }
+ return obj;
+
+ }
+}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/FeedServlet.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/FeedServlet.java
new file mode 100644
index 0000000..96b3c31
--- /dev/null
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/FeedServlet.java
@@ -0,0 +1,122 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.api.http.servlet;
+
+import java.awt.image.BufferedImage;
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
+import java.io.PrintWriter;
+import java.util.List;
+
+import javax.imageio.ImageIO;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import edu.uci.ics.asterix.common.exceptions.ACIDException;
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+import edu.uci.ics.asterix.metadata.MetadataException;
+import edu.uci.ics.asterix.metadata.MetadataManager;
+import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
+import edu.uci.ics.asterix.metadata.entities.FeedActivity;
+
+public class FeedServlet extends HttpServlet {
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
+ String resourcePath = null;
+ String requestURI = request.getRequestURI();
+
+ if (requestURI.equals("/")) {
+ response.setContentType("text/html");
+ resourcePath = "/feed/home.html";
+ } else {
+ resourcePath = requestURI;
+ }
+
+ try {
+ InputStream is = FeedServlet.class.getResourceAsStream(resourcePath);
+ if (is == null) {
+ response.sendError(HttpServletResponse.SC_NOT_FOUND);
+ return;
+ }
+
+ // Special handler for font files and .png resources
+ if (resourcePath.endsWith(".png")) {
+
+ BufferedImage img = ImageIO.read(is);
+ OutputStream outputStream = response.getOutputStream();
+ String formatName = "png";
+ response.setContentType("image/png");
+ ImageIO.write(img, formatName, outputStream);
+ outputStream.close();
+ return;
+
+ }
+
+ response.setCharacterEncoding("utf-8");
+ InputStreamReader isr = new InputStreamReader(is);
+ StringBuilder sb = new StringBuilder();
+ BufferedReader br = new BufferedReader(isr);
+ String line = br.readLine();
+
+ while (line != null) {
+ sb.append(line + "\n");
+ line = br.readLine();
+ }
+
+ String outStr = null;
+ if (requestURI.startsWith("/webui/static")) {
+ outStr = sb.toString();
+ } else {
+ MetadataManager.INSTANCE.init();
+ MetadataTransactionContext ctx = MetadataManager.INSTANCE.beginTransaction();
+ List<FeedActivity> lfa = MetadataManager.INSTANCE.getActiveFeeds(ctx, null, null);
+ StringBuilder ldStr = new StringBuilder();
+ ldStr.append("<br />");
+ ldStr.append("<br />");
+ if (lfa == null || lfa.isEmpty()) {
+ ldStr.append("Currently there are no active feeds in the Asterix");
+ } else {
+ ldStr.append("Active Feeds");
+ }
+ FeedConnectionId feedId = null;
+ for (FeedActivity feedActivity : lfa) {
+ feedId = new FeedConnectionId(feedActivity.getDataverseName(), feedActivity.getFeedName(),
+ feedActivity.getDatasetName());
+ ldStr.append("<br />");
+ ldStr.append("<br />");
+ ldStr.append("<a href=\"/feed/dashboard?dataverse=" + feedActivity.getDataverseName() + "&feed="
+ + feedActivity.getFeedName() + "&dataset=" + feedActivity.getDatasetName() + "\">" + feedId
+ + "</a>");
+ ldStr.append("<br />");
+ }
+
+ outStr = String.format(sb.toString(), ldStr.toString());
+ MetadataManager.INSTANCE.commitTransaction(ctx);
+
+ }
+
+ PrintWriter out = response.getWriter();
+ out.println(outStr);
+ } catch (ACIDException | MetadataException e) {
+
+ }
+ }
+}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/FeedServletUtil.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/FeedServletUtil.java
new file mode 100644
index 0000000..ff29a23
--- /dev/null
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/FeedServletUtil.java
@@ -0,0 +1,45 @@
+package edu.uci.ics.asterix.api.http.servlet;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.Socket;
+import java.nio.CharBuffer;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+import edu.uci.ics.asterix.hyracks.bootstrap.FeedLifecycleListener;
+import edu.uci.ics.asterix.metadata.feeds.RemoteSocketMessageListener;
+
+public class FeedServletUtil {
+
+ private static final Logger LOGGER = Logger.getLogger(FeedServletUtil.class.getName());
+ private static final char EOL = (char) "\n".getBytes()[0];
+
+ public static void initiateSubscription(FeedConnectionId feedId, String host, int port) throws IOException {
+ LinkedBlockingQueue<String> outbox = new LinkedBlockingQueue<String>();
+ int subscriptionPort = port + 1;
+ Socket sc = new Socket(host, subscriptionPort);
+ InputStream in = sc.getInputStream();
+
+ CharBuffer buffer = CharBuffer.allocate(50);
+ char ch = 0;
+ while (ch != EOL) {
+ buffer.put(ch);
+ ch = (char) in.read();
+ }
+ buffer.flip();
+ String s = new String(buffer.array());
+ int feedSubscriptionPort = Integer.parseInt(s.trim());
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Response from Super Feed Manager Report Service " + port + " will connect at " + host + " "
+ + port);
+ }
+
+ // register the feed subscription queue with FeedLifecycleListener
+ FeedLifecycleListener.INSTANCE.registerFeedReportQueue(feedId, outbox);
+ RemoteSocketMessageListener listener = new RemoteSocketMessageListener(host, feedSubscriptionPort, outbox);
+ listener.start();
+ }
+}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/QueryResultAPIServlet.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/QueryResultAPIServlet.java
index f06dbcd..05c9023 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/QueryResultAPIServlet.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/QueryResultAPIServlet.java
@@ -81,7 +81,7 @@
JSONObject jsonResponse = new JSONObject();
JSONArray results = new JSONArray();
while (resultReader.read(buffer) > 0) {
- results.put(ResultUtils.getJSONFromBuffer(buffer, resultReader.getFrameTupleAccessor()));
+ ResultUtils.getJSONFromBuffer(buffer, resultReader.getFrameTupleAccessor(), results);
}
jsonResponse.put("results", results);
out.write(jsonResponse.toString());
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/RESTAPIServlet.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/RESTAPIServlet.java
index ba0fa68..49d08f6 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/RESTAPIServlet.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/RESTAPIServlet.java
@@ -14,19 +14,20 @@
*/
package edu.uci.ics.asterix.api.http.servlet;
-import java.util.List;
-import java.io.InputStreamReader;
-import java.io.InputStream;
-import java.io.BufferedReader;
-import java.io.PrintWriter;
import java.io.IOException;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.nio.charset.StandardCharsets;
+import java.util.List;
import java.util.logging.Level;
import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
+import org.apache.commons.io.IOUtils;
import org.json.JSONObject;
import edu.uci.ics.asterix.api.common.APIFramework.DisplayFormat;
@@ -52,24 +53,36 @@
private static final String HYRACKS_CONNECTION_ATTR = "edu.uci.ics.asterix.HYRACKS_CONNECTION";
private static final String HYRACKS_DATASET_ATTR = "edu.uci.ics.asterix.HYRACKS_DATASET";
+
+ @Override
+ protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException,
+ IOException {
+ StringWriter sw = new StringWriter();
+ IOUtils.copy(request.getInputStream(), sw, StandardCharsets.UTF_8.name());
+ String query = sw.toString();
+ handleRequest(request, response, query);
+ }
+
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
+ String query = getQueryParameter(request);
+ handleRequest(request, response, query);
+ }
+
+ public void handleRequest(HttpServletRequest request, HttpServletResponse response, String query)
+ throws IOException {
response.setContentType("application/json");
response.setCharacterEncoding("utf-8");
PrintWriter out = response.getWriter();
-
DisplayFormat format = DisplayFormat.HTML;
-
String contentType = request.getContentType();
-
if ((contentType == null) || (contentType.equals("text/plain"))) {
format = DisplayFormat.TEXT;
} else if (contentType.equals("application/json")) {
format = DisplayFormat.JSON;
}
- String query = getQueryParameter(request);
boolean asyncResults = isAsync(request);
ServletContext context = getServletContext();
@@ -79,7 +92,6 @@
try {
synchronized (context) {
hcc = (IHyracksClientConnection) context.getAttribute(HYRACKS_CONNECTION_ATTR);
-
hds = (IHyracksDataset) context.getAttribute(HYRACKS_DATASET_ATTR);
if (hds == null) {
hds = new HyracksDataset(hcc, ResultReader.FRAME_SIZE, ResultReader.NUM_READERS);
@@ -90,12 +102,10 @@
AQLParser parser = new AQLParser(query);
List<Statement> aqlStatements = parser.Statement();
if (!containsForbiddenStatements(aqlStatements)) {
- SessionConfig sessionConfig = new SessionConfig(true, false, false, false, false, false, true, true, false);
-
+ SessionConfig sessionConfig = new SessionConfig(true, false, false, false, false, false, true, true,
+ false);
MetadataManager.INSTANCE.init();
-
AqlTranslator aqlTranslator = new AqlTranslator(aqlStatements, out, sessionConfig, format);
-
aqlTranslator.compileAndExecute(hcc, hds, asyncResults);
}
} catch (ParseException | TokenMgrError | edu.uci.ics.asterix.aqlplus.parser.TokenMgrError pe) {
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/UpdateAPIServlet.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/UpdateAPIServlet.java
index 772fad6..50adb57 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/UpdateAPIServlet.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/UpdateAPIServlet.java
@@ -30,9 +30,8 @@
}
protected List<Statement.Kind> getAllowedStatements() {
- Kind[] statementsArray = { Kind.DATAVERSE_DECL, Kind.DELETE, Kind.INSERT, Kind.UPDATE,
- Kind.DML_CMD_LIST, Kind.LOAD_FROM_FILE, Kind.BEGIN_FEED,
- Kind.CONTROL_FEED, Kind.COMPACT };
+ Kind[] statementsArray = { Kind.DATAVERSE_DECL, Kind.DELETE, Kind.INSERT, Kind.UPDATE, Kind.DML_CMD_LIST,
+ Kind.LOAD, Kind.CONNECT_FEED, Kind.DISCONNECT_FEED, Kind.SET, Kind.COMPACT };
return Arrays.asList(statementsArray);
}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/java/AsterixJavaClient.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/java/AsterixJavaClient.java
index d550689..472eb19 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/java/AsterixJavaClient.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/api/java/AsterixJavaClient.java
@@ -23,13 +23,13 @@
import edu.uci.ics.asterix.api.common.Job;
import edu.uci.ics.asterix.api.common.SessionConfig;
import edu.uci.ics.asterix.aql.base.Statement;
+import edu.uci.ics.asterix.aql.parser.AQLParser;
+import edu.uci.ics.asterix.aql.parser.ParseException;
import edu.uci.ics.asterix.aql.translator.AqlTranslator;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.metadata.MetadataManager;
import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.asterix.aql.parser.AQLParser;
-import edu.uci.ics.asterix.aql.parser.ParseException;
public class AsterixJavaClient {
private IHyracksClientConnection hcc;
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
index 929465a..8fb0baa 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
@@ -20,8 +20,13 @@
import java.rmi.RemoteException;
import java.util.ArrayList;
import java.util.HashMap;
+import java.util.HashSet;
import java.util.List;
import java.util.Map;
+import java.util.Random;
+import java.util.Set;
+import java.util.logging.Level;
+import java.util.logging.Logger;
import org.json.JSONArray;
import org.json.JSONException;
@@ -32,26 +37,27 @@
import edu.uci.ics.asterix.api.common.Job;
import edu.uci.ics.asterix.api.common.SessionConfig;
import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.expression.BeginFeedStatement;
import edu.uci.ics.asterix.aql.expression.CompactStatement;
-import edu.uci.ics.asterix.aql.expression.ControlFeedStatement;
+import edu.uci.ics.asterix.aql.expression.ConnectFeedStatement;
import edu.uci.ics.asterix.aql.expression.CreateDataverseStatement;
+import edu.uci.ics.asterix.aql.expression.CreateFeedStatement;
import edu.uci.ics.asterix.aql.expression.CreateFunctionStatement;
import edu.uci.ics.asterix.aql.expression.CreateIndexStatement;
import edu.uci.ics.asterix.aql.expression.DatasetDecl;
import edu.uci.ics.asterix.aql.expression.DataverseDecl;
import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
import edu.uci.ics.asterix.aql.expression.DeleteStatement;
+import edu.uci.ics.asterix.aql.expression.DisconnectFeedStatement;
import edu.uci.ics.asterix.aql.expression.DropStatement;
import edu.uci.ics.asterix.aql.expression.ExternalDetailsDecl;
-import edu.uci.ics.asterix.aql.expression.FeedDetailsDecl;
+import edu.uci.ics.asterix.aql.expression.FeedDropStatement;
import edu.uci.ics.asterix.aql.expression.FunctionDecl;
import edu.uci.ics.asterix.aql.expression.FunctionDropStatement;
import edu.uci.ics.asterix.aql.expression.Identifier;
import edu.uci.ics.asterix.aql.expression.IndexDropStatement;
import edu.uci.ics.asterix.aql.expression.InsertStatement;
import edu.uci.ics.asterix.aql.expression.InternalDetailsDecl;
-import edu.uci.ics.asterix.aql.expression.LoadFromFileStatement;
+import edu.uci.ics.asterix.aql.expression.LoadStatement;
import edu.uci.ics.asterix.aql.expression.NodeGroupDropStatement;
import edu.uci.ics.asterix.aql.expression.NodegroupDecl;
import edu.uci.ics.asterix.aql.expression.Query;
@@ -64,6 +70,7 @@
import edu.uci.ics.asterix.common.config.GlobalConfig;
import edu.uci.ics.asterix.common.exceptions.ACIDException;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
import edu.uci.ics.asterix.common.functions.FunctionSignature;
import edu.uci.ics.asterix.file.DatasetOperations;
import edu.uci.ics.asterix.file.DataverseOperations;
@@ -76,27 +83,33 @@
import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
import edu.uci.ics.asterix.metadata.api.IMetadataEntity;
import edu.uci.ics.asterix.metadata.bootstrap.MetadataConstants;
+import edu.uci.ics.asterix.metadata.dataset.hints.DatasetHints;
+import edu.uci.ics.asterix.metadata.dataset.hints.DatasetHints.DatasetNodegroupCardinalityHint;
import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
import edu.uci.ics.asterix.metadata.entities.CompactionPolicy;
import edu.uci.ics.asterix.metadata.entities.Dataset;
import edu.uci.ics.asterix.metadata.entities.Datatype;
import edu.uci.ics.asterix.metadata.entities.Dataverse;
import edu.uci.ics.asterix.metadata.entities.ExternalDatasetDetails;
-import edu.uci.ics.asterix.metadata.entities.FeedDatasetDetails;
+import edu.uci.ics.asterix.metadata.entities.Feed;
+import edu.uci.ics.asterix.metadata.entities.FeedActivity;
+import edu.uci.ics.asterix.metadata.entities.FeedPolicy;
import edu.uci.ics.asterix.metadata.entities.Function;
import edu.uci.ics.asterix.metadata.entities.Index;
import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails;
import edu.uci.ics.asterix.metadata.entities.NodeGroup;
+import edu.uci.ics.asterix.metadata.feeds.BuiltinFeedPolicies;
+import edu.uci.ics.asterix.metadata.feeds.FeedUtil;
import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.asterix.om.types.ATypeTag;
import edu.uci.ics.asterix.om.types.IAType;
import edu.uci.ics.asterix.om.types.TypeSignature;
+import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
import edu.uci.ics.asterix.result.ResultReader;
import edu.uci.ics.asterix.result.ResultUtils;
import edu.uci.ics.asterix.transaction.management.service.transaction.DatasetIdFactory;
import edu.uci.ics.asterix.translator.AbstractAqlTranslator;
-import edu.uci.ics.asterix.translator.CompiledStatements.CompiledBeginFeedStatement;
-import edu.uci.ics.asterix.translator.CompiledStatements.CompiledControlFeedStatement;
+import edu.uci.ics.asterix.translator.CompiledStatements.CompiledConnectFeedStatement;
import edu.uci.ics.asterix.translator.CompiledStatements.CompiledCreateIndexStatement;
import edu.uci.ics.asterix.translator.CompiledStatements.CompiledDatasetDropStatement;
import edu.uci.ics.asterix.translator.CompiledStatements.CompiledDeleteStatement;
@@ -128,6 +141,8 @@
*/
public class AqlTranslator extends AbstractAqlTranslator {
+ private static Logger LOGGER = Logger.getLogger(AqlTranslator.class.getName());
+
private enum ProgressState {
NO_PROGRESS,
ADDED_PENDINGOP_RECORD_TO_METADATA
@@ -248,8 +263,8 @@
break;
}
- case LOAD_FROM_FILE: {
- handleLoadFromFileStatement(metadataProvider, stmt, hcc);
+ case LOAD: {
+ handleLoadStatement(metadataProvider, stmt, hcc);
break;
}
case INSERT: {
@@ -261,13 +276,22 @@
break;
}
- case BEGIN_FEED: {
- handleBeginFeedStatement(metadataProvider, stmt, hcc);
+ case CREATE_FEED: {
+ handleCreateFeedStatement(metadataProvider, stmt, hcc);
break;
}
- case CONTROL_FEED: {
- handleControlFeedStatement(metadataProvider, stmt, hcc);
+ case DROP_FEED: {
+ handleDropFeedStatement(metadataProvider, stmt, hcc);
+ break;
+ }
+ case CONNECT_FEED: {
+ handleConnectFeedStatement(metadataProvider, stmt, hcc);
+ break;
+ }
+
+ case DISCONNECT_FEED: {
+ handleDisconnectFeedStatement(metadataProvider, stmt, hcc);
break;
}
@@ -430,9 +454,14 @@
}
List<String> partitioningExprs = ((InternalDetailsDecl) dd.getDatasetDetailsDecl())
.getPartitioningExprs();
+ boolean autogenerated = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).isAutogenerated();
ARecordType aRecordType = (ARecordType) itemType;
- aRecordType.validatePartitioningExpressions(partitioningExprs);
- String ngName = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getNodegroupName().getValue();
+ aRecordType.validatePartitioningExpressions(partitioningExprs, autogenerated);
+
+ Identifier ngNameId = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getNodegroupName();
+ String ngName = ngNameId != null ? ngNameId.getValue() : configureNodegroupForDataset(dd,
+ dataverseName, mdTxnCtx);
+
String compactionPolicy = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getCompactionPolicy();
Map<String, String> compactionPolicyProperties = ((InternalDetailsDecl) dd.getDatasetDetailsDecl())
.getCompactionPolicyProperties();
@@ -444,7 +473,7 @@
}
datasetDetails = new InternalDatasetDetails(InternalDatasetDetails.FileStructure.BTREE,
InternalDatasetDetails.PartitioningStrategy.HASH, partitioningExprs, partitioningExprs,
- ngName, compactionPolicy, compactionPolicyProperties);
+ ngName, autogenerated, compactionPolicy, compactionPolicyProperties);
break;
}
case EXTERNAL: {
@@ -453,36 +482,7 @@
datasetDetails = new ExternalDatasetDetails(adapter, properties);
break;
}
- case FEED: {
- IAType itemType = dt.getDatatype();
- if (itemType.getTypeTag() != ATypeTag.RECORD) {
- throw new AlgebricksException("Can only partition ARecord's.");
- }
- List<String> partitioningExprs = ((FeedDetailsDecl) dd.getDatasetDetailsDecl())
- .getPartitioningExprs();
- ARecordType aRecordType = (ARecordType) itemType;
- aRecordType.validatePartitioningExpressions(partitioningExprs);
- String ngName = ((FeedDetailsDecl) dd.getDatasetDetailsDecl()).getNodegroupName().getValue();
- String adapter = ((FeedDetailsDecl) dd.getDatasetDetailsDecl()).getAdapterFactoryClassname();
- Map<String, String> configuration = ((FeedDetailsDecl) dd.getDatasetDetailsDecl())
- .getConfiguration();
- FunctionSignature signature = ((FeedDetailsDecl) dd.getDatasetDetailsDecl()).getFunctionSignature();
- String compactionPolicy = ((FeedDetailsDecl) dd.getDatasetDetailsDecl()).getCompactionPolicy();
- Map<String, String> compactionPolicyProperties = ((FeedDetailsDecl) dd.getDatasetDetailsDecl())
- .getCompactionPolicyProperties();
- if (compactionPolicy == null) {
- compactionPolicy = GlobalConfig.DEFAULT_COMPACTION_POLICY_NAME;
- compactionPolicyProperties = GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES;
- } else {
- validateCompactionPolicy(compactionPolicy, compactionPolicyProperties, mdTxnCtx);
- }
- datasetDetails = new FeedDatasetDetails(InternalDatasetDetails.FileStructure.BTREE,
- InternalDatasetDetails.PartitioningStrategy.HASH, partitioningExprs, partitioningExprs,
- ngName, adapter, configuration, signature,
- FeedDatasetDetails.FeedState.INACTIVE.toString(), compactionPolicy,
- compactionPolicyProperties);
- break;
- }
+
}
//#. initialize DatasetIdFactory if it is not initialized.
@@ -495,7 +495,7 @@
DatasetIdFactory.generateDatasetId(), IMetadataEntity.PENDING_ADD_OP);
MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
- if (dd.getDatasetType() == DatasetType.INTERNAL || dd.getDatasetType() == DatasetType.FEED) {
+ if (dd.getDatasetType() == DatasetType.INTERNAL) {
Dataverse dataverse = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(),
dataverseName);
JobSpecification jobSpec = DatasetOperations.createDatasetJobSpec(dataverse, datasetName,
@@ -570,6 +570,58 @@
}
}
+ private String configureNodegroupForDataset(DatasetDecl dd, String dataverse, MetadataTransactionContext mdTxnCtx)
+ throws AsterixException {
+ int nodegroupCardinality = -1;
+ String nodegroupName;
+ String hintValue = dd.getHints().get(DatasetNodegroupCardinalityHint.NAME);
+ if (hintValue == null) {
+ nodegroupName = MetadataConstants.METADATA_DEFAULT_NODEGROUP_NAME;
+ return nodegroupName;
+ } else {
+ int numChosen = 0;
+ boolean valid = DatasetHints.validate(DatasetNodegroupCardinalityHint.NAME,
+ dd.getHints().get(DatasetNodegroupCardinalityHint.NAME)).first;
+ if (!valid) {
+ throw new AsterixException("Incorrect use of hint:" + DatasetNodegroupCardinalityHint.NAME);
+ } else {
+ nodegroupCardinality = Integer.parseInt(dd.getHints().get(DatasetNodegroupCardinalityHint.NAME));
+ }
+ Set<String> nodeNames = AsterixAppContextInfo.getInstance().getMetadataProperties().getNodeNames();
+ Set<String> nodeNamesClone = new HashSet<String>();
+ for (String node : nodeNames) {
+ nodeNamesClone.add(node);
+ }
+ String metadataNodeName = AsterixAppContextInfo.getInstance().getMetadataProperties().getMetadataNodeName();
+ List<String> selectedNodes = new ArrayList<String>();
+ selectedNodes.add(metadataNodeName);
+ numChosen++;
+ nodeNamesClone.remove(metadataNodeName);
+
+ if (numChosen < nodegroupCardinality) {
+ Random random = new Random();
+ String[] nodes = nodeNamesClone.toArray(new String[] {});
+ int[] b = new int[nodeNamesClone.size()];
+ for (int i = 0; i < b.length; i++) {
+ b[i] = i;
+ }
+
+ for (int i = 0; i < nodegroupCardinality - numChosen; i++) {
+ int selected = i + random.nextInt(nodeNamesClone.size() - i);
+ int selNodeIndex = b[selected];
+ selectedNodes.add(nodes[selNodeIndex]);
+ int temp = b[0];
+ b[0] = b[selected];
+ b[selected] = temp;
+ }
+ }
+ nodegroupName = dataverse + ":" + dd.getName().getValue();
+ MetadataManager.INSTANCE.addNodegroup(mdTxnCtx, new NodeGroup(nodegroupName, selectedNodes));
+ return nodegroupName;
+ }
+
+ }
+
private void handleCreateIndexStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
@@ -616,6 +668,19 @@
}
}
+ List<FeedActivity> feedActivities = MetadataManager.INSTANCE.getActiveFeeds(mdTxnCtx, dataverseName,
+ datasetName);
+ if (feedActivities != null && !feedActivities.isEmpty()) {
+ StringBuilder builder = new StringBuilder();
+
+ for (FeedActivity fa : feedActivities) {
+ builder.append(fa + "\n");
+ }
+ throw new AsterixException("Dataset" + datasetName
+ + " is currently being fed into by the following feeds " + "." + builder.toString()
+ + "\nOperation not supported.");
+ }
+
//#. add a new index with PendingAddOp
Index index = new Index(dataverseName, datasetName, indexName, stmtCreateIndex.getIndexType(),
stmtCreateIndex.getFieldExprs(), stmtCreateIndex.getGramLength(), false,
@@ -772,12 +837,32 @@
}
}
+ //# disconnect all feeds from any datasets in the dataverse.
+ List<FeedActivity> feedActivities = MetadataManager.INSTANCE.getActiveFeeds(mdTxnCtx, dataverseName, null);
+ DisconnectFeedStatement disStmt = null;
+ Identifier dvId = new Identifier(dataverseName);
+ for (FeedActivity fa : feedActivities) {
+ disStmt = new DisconnectFeedStatement(dvId, new Identifier(fa.getFeedName()), new Identifier(
+ fa.getDatasetName()));
+ try {
+ handleDisconnectFeedStatement(metadataProvider, disStmt, hcc);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Disconnected feed " + fa.getFeedName() + " from dataset " + fa.getDatasetName());
+ }
+ } catch (Exception exception) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Unable to disconnect feed " + fa.getFeedName() + " from dataset "
+ + fa.getDatasetName() + ". Encountered exception " + exception);
+ }
+ }
+ }
+
//#. prepare jobs which will drop corresponding datasets with indexes.
List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, dataverseName);
for (int j = 0; j < datasets.size(); j++) {
String datasetName = datasets.get(j).getDatasetName();
DatasetType dsType = datasets.get(j).getDatasetType();
- if (dsType == DatasetType.INTERNAL || dsType == DatasetType.FEED) {
+ if (dsType == DatasetType.INTERNAL) {
List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName,
datasetName);
@@ -890,7 +975,22 @@
}
}
- if (ds.getDatasetType() == DatasetType.INTERNAL || ds.getDatasetType() == DatasetType.FEED) {
+ if (ds.getDatasetType() == DatasetType.INTERNAL) {
+ // prepare job spec(s) that would disconnect any active feeds involving the dataset.
+ List<FeedActivity> feedActivities = MetadataManager.INSTANCE.getActiveFeeds(mdTxnCtx, dataverseName,
+ datasetName);
+ List<JobSpecification> disconnectFeedJobSpecs = new ArrayList<JobSpecification>();
+ if (feedActivities != null && !feedActivities.isEmpty()) {
+ for (FeedActivity fa : feedActivities) {
+ JobSpecification jobSpec = FeedOperations.buildDisconnectFeedJobSpec(dataverseName,
+ fa.getFeedName(), datasetName, metadataProvider, fa);
+ disconnectFeedJobSpecs.add(jobSpec);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Disconnected feed " + fa.getFeedName() + " from dataset " + datasetName
+ + " as dataset is being dropped");
+ }
+ }
+ }
//#. prepare jobs to drop the datatset and the indexes in NC
List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
@@ -915,6 +1015,11 @@
bActiveTxn = false;
progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
+ //# disconnect the feeds
+ for (JobSpecification jobSpec : disconnectFeedJobSpecs) {
+ runJob(hcc, jobSpec, true);
+ }
+
//#. run the jobs
for (JobSpecification jobSpec : jobsToExecute) {
runJob(hcc, jobSpec, true);
@@ -927,6 +1032,13 @@
//#. finally, delete the dataset.
MetadataManager.INSTANCE.dropDataset(mdTxnCtx, dataverseName, datasetName);
+ // Drop the associated nodegroup
+ if (ds.getDatasetType() == DatasetType.INTERNAL) {
+ String nodegroup = ((InternalDatasetDetails) ds.getDatasetDetails()).getNodeGroupName();
+ if (!nodegroup.equalsIgnoreCase(MetadataConstants.METADATA_DEFAULT_NODEGROUP_NAME)) {
+ MetadataManager.INSTANCE.dropNodegroup(mdTxnCtx, dataverseName + ":" + datasetName);
+ }
+ }
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
@@ -991,7 +1103,20 @@
+ dataverseName);
}
- if (ds.getDatasetType() == DatasetType.INTERNAL || ds.getDatasetType() == DatasetType.FEED) {
+ List<FeedActivity> feedActivities = MetadataManager.INSTANCE.getActiveFeeds(mdTxnCtx, dataverseName,
+ datasetName);
+ if (feedActivities != null && !feedActivities.isEmpty()) {
+ StringBuilder builder = new StringBuilder();
+
+ for (FeedActivity fa : feedActivities) {
+ builder.append(fa + "\n");
+ }
+ throw new AsterixException("Dataset" + datasetName
+ + " is currently being fed into by the following feeds " + "." + builder.toString()
+ + "\nOperation not supported.");
+ }
+
+ if (ds.getDatasetType() == DatasetType.INTERNAL) {
indexName = stmtIndexDrop.getIndexName().getValue();
Index index = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataverseName, datasetName, indexName);
if (index == null) {
@@ -1175,8 +1300,8 @@
}
}
- private void handleLoadFromFileStatement(AqlMetadataProvider metadataProvider, Statement stmt,
- IHyracksClientConnection hcc) throws Exception {
+ private void handleLoadStatement(AqlMetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc)
+ throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
@@ -1184,7 +1309,7 @@
acquireReadLatch();
List<JobSpecification> jobsToExecute = new ArrayList<JobSpecification>();
try {
- LoadFromFileStatement loadStmt = (LoadFromFileStatement) stmt;
+ LoadStatement loadStmt = (LoadStatement) stmt;
String dataverseName = getActiveDataverseName(loadStmt.getDataverseName());
CompiledLoadFromFileStatement cls = new CompiledLoadFromFileStatement(dataverseName, loadStmt
.getDatasetName().getValue(), loadStmt.getAdapter(), loadStmt.getProperties(),
@@ -1300,64 +1425,186 @@
// Query Compilation (happens under the same ongoing metadata
// transaction)
- JobSpecification spec = APIFramework.compileQuery(declaredFunctions, metadataProvider, query,
+ JobSpecification spec = APIFramework.compileQuery(declaredFunctions, metadataProvider, reWrittenQuery.first,
reWrittenQuery.second, stmt == null ? null : stmt.getDatasetName(), sessionConfig, out, pdf, stmt);
return spec;
}
- private void handleBeginFeedStatement(AqlMetadataProvider metadataProvider, Statement stmt,
+ private void handleCreateFeedStatement(AqlMetadataProvider metadataProvider, Statement stmt,
+ IHyracksClientConnection hcc) throws Exception {
+
+ MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+ metadataProvider.setMetadataTxnContext(mdTxnCtx);
+ acquireWriteLatch();
+
+ String dataverseName = null;
+ String feedName = null;
+ String adaptorName = null;
+ Feed feed = null;
+ try {
+ CreateFeedStatement cfs = (CreateFeedStatement) stmt;
+ dataverseName = getActiveDataverseName(cfs.getDataverseName());
+ feedName = cfs.getFeedName().getValue();
+ adaptorName = cfs.getAdaptorName();
+
+ feed = MetadataManager.INSTANCE.getFeed(metadataProvider.getMetadataTxnContext(), dataverseName, feedName);
+ if (feed != null) {
+ if (cfs.getIfNotExists()) {
+ MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+ return;
+ } else {
+ throw new AlgebricksException("A feed with this name " + adaptorName + " already exists.");
+ }
+ }
+
+ feed = new Feed(dataverseName, feedName, adaptorName, cfs.getAdaptorConfiguration(),
+ cfs.getAppliedFunction());
+ MetadataManager.INSTANCE.addFeed(metadataProvider.getMetadataTxnContext(), feed);
+ MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+ } catch (Exception e) {
+ abort(e, e, mdTxnCtx);
+ throw e;
+ } finally {
+ releaseWriteLatch();
+ }
+ }
+
+ private void handleDropFeedStatement(AqlMetadataProvider metadataProvider, Statement stmt,
+ IHyracksClientConnection hcc) throws Exception {
+ MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+ metadataProvider.setMetadataTxnContext(mdTxnCtx);
+ acquireWriteLatch();
+
+ try {
+ FeedDropStatement stmtFeedDrop = (FeedDropStatement) stmt;
+ String dataverseName = getActiveDataverseName(stmtFeedDrop.getDataverseName());
+ String feedName = stmtFeedDrop.getFeedName().getValue();
+ Feed feed = MetadataManager.INSTANCE.getFeed(mdTxnCtx, dataverseName, feedName);
+ if (feed == null) {
+ if (!stmtFeedDrop.getIfExists()) {
+ throw new AlgebricksException("There is no feed with this name " + feedName + ".");
+ }
+ }
+
+ List<FeedActivity> feedActivities;
+ try {
+ feedActivities = MetadataManager.INSTANCE.getConnectFeedActivitiesForFeed(mdTxnCtx, dataverseName,
+ feedName);
+ MetadataManager.INSTANCE.dropFeed(mdTxnCtx, dataverseName, feedName);
+ MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+ } catch (Exception e) {
+ MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ throw new MetadataException(e);
+ }
+
+ List<JobSpecification> jobSpecs = new ArrayList<JobSpecification>();
+ for (FeedActivity feedActivity : feedActivities) {
+ JobSpecification jobSpec = FeedOperations.buildDisconnectFeedJobSpec(dataverseName, feedName,
+ feedActivity.getDatasetName(), metadataProvider, feedActivity);
+ jobSpecs.add(jobSpec);
+ }
+
+ for (JobSpecification spec : jobSpecs) {
+ runJob(hcc, spec, true);
+ }
+
+ } catch (Exception e) {
+ abort(e, e, mdTxnCtx);
+ throw e;
+ } finally {
+ releaseWriteLatch();
+ }
+ }
+
+ private void handleConnectFeedStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireReadLatch();
-
+ boolean readLatchAcquired = true;
try {
+ ConnectFeedStatement cfs = (ConnectFeedStatement) stmt;
+ String dataverseName = getActiveDataverseName(cfs.getDataverseName());
metadataProvider.setWriteTransaction(true);
- BeginFeedStatement bfs = (BeginFeedStatement) stmt;
- String dataverseName = getActiveDataverseName(bfs.getDataverseName());
- CompiledBeginFeedStatement cbfs = new CompiledBeginFeedStatement(dataverseName, bfs.getDatasetName()
- .getValue(), bfs.getQuery(), bfs.getVarCounter());
+ CompiledConnectFeedStatement cbfs = new CompiledConnectFeedStatement(dataverseName, cfs.getFeedName(), cfs
+ .getDatasetName().getValue(), cfs.getPolicy(), cfs.getQuery(), cfs.getVarCounter());
- Dataset dataset;
- dataset = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(), dataverseName, bfs
- .getDatasetName().getValue());
+ Dataset dataset = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(),
+ dataverseName, cfs.getDatasetName().getValue());
if (dataset == null) {
- throw new AsterixException("Unknown dataset :" + bfs.getDatasetName().getValue());
+ throw new AsterixException("Unknown target dataset :" + cfs.getDatasetName().getValue());
}
- IDatasetDetails datasetDetails = dataset.getDatasetDetails();
- if (datasetDetails.getDatasetType() != DatasetType.FEED) {
- throw new IllegalArgumentException("Dataset " + bfs.getDatasetName().getValue()
- + " is not a feed dataset");
- }
- bfs.initialize(metadataProvider.getMetadataTxnContext(), dataset);
- cbfs.setQuery(bfs.getQuery());
- metadataProvider.getConfig().put(FunctionUtils.IMPORT_PRIVATE_FUNCTIONS, "" + Boolean.TRUE);
- JobSpecification compiled = rewriteCompileQuery(metadataProvider, bfs.getQuery(), cbfs);
+ if (!dataset.getDatasetType().equals(DatasetType.INTERNAL)) {
+ throw new AsterixException("Statement not applicable. Dataset " + cfs.getDatasetName().getValue()
+ + " is not of required type " + DatasetType.INTERNAL);
+ }
+
+ Feed feed = MetadataManager.INSTANCE.getFeed(metadataProvider.getMetadataTxnContext(), dataverseName,
+ cfs.getFeedName());
+ if (feed == null) {
+ throw new AsterixException("Unknown source feed: " + cfs.getFeedName());
+ }
+
+ FeedConnectionId feedConnId = new FeedConnectionId(dataverseName, cfs.getFeedName(), cfs.getDatasetName()
+ .getValue());
+ FeedActivity recentActivity = MetadataManager.INSTANCE.getRecentActivityOnFeedConnection(mdTxnCtx,
+ feedConnId, null);
+ boolean isFeedActive = FeedUtil.isFeedActive(recentActivity);
+ if (isFeedActive && !cfs.forceConnect()) {
+ throw new AsterixException("Feed " + cfs.getDatasetName().getValue()
+ + " is currently ACTIVE. Operation not supported");
+ }
+
+ FeedPolicy feedPolicy = MetadataManager.INSTANCE.getFeedPolicy(mdTxnCtx, dataverseName,
+ cbfs.getPolicyName());
+ if (feedPolicy == null) {
+ feedPolicy = MetadataManager.INSTANCE.getFeedPolicy(mdTxnCtx,
+ MetadataConstants.METADATA_DATAVERSE_NAME, cbfs.getPolicyName());
+ if (feedPolicy == null) {
+ throw new AsterixException("Unknown feed policy" + cbfs.getPolicyName());
+ }
+ }
+
+ cfs.initialize(metadataProvider.getMetadataTxnContext(), dataset, feed);
+ cbfs.setQuery(cfs.getQuery());
+ metadataProvider.getConfig().put(FunctionUtils.IMPORT_PRIVATE_FUNCTIONS, "" + Boolean.TRUE);
+ metadataProvider.getConfig().put(BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY, cbfs.getPolicyName());
+ JobSpecification compiled = rewriteCompileQuery(metadataProvider, cfs.getQuery(), cbfs);
+ JobSpecification newJobSpec = FeedUtil.alterJobSpecificationForFeed(compiled, feedConnId, feedPolicy);
+
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Altered feed ingestion spec to wrap operators");
+ }
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
-
- if (compiled != null) {
- runJob(hcc, compiled, true);
+ String waitForCompletionParam = metadataProvider.getConfig().get(ConnectFeedStatement.WAIT_FOR_COMPLETION);
+ boolean waitForCompletion = waitForCompletionParam == null ? false : Boolean
+ .valueOf(waitForCompletionParam);
+ if (waitForCompletion) {
+ releaseReadLatch();
+ readLatchAcquired = false;
}
-
+ runJob(hcc, newJobSpec, waitForCompletion);
} catch (Exception e) {
if (bActiveTxn) {
abort(e, e, mdTxnCtx);
}
throw e;
} finally {
- releaseReadLatch();
+ if (readLatchAcquired) {
+ releaseReadLatch();
+ }
}
}
- private void handleControlFeedStatement(AqlMetadataProvider metadataProvider, Statement stmt,
+ private void handleDisconnectFeedStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
@@ -1365,17 +1612,42 @@
acquireReadLatch();
try {
- ControlFeedStatement cfs = (ControlFeedStatement) stmt;
+ DisconnectFeedStatement cfs = (DisconnectFeedStatement) stmt;
String dataverseName = getActiveDataverseName(cfs.getDataverseName());
- CompiledControlFeedStatement clcfs = new CompiledControlFeedStatement(cfs.getOperationType(),
- dataverseName, cfs.getDatasetName().getValue(), cfs.getAlterAdapterConfParams());
- JobSpecification jobSpec = FeedOperations.buildControlFeedJobSpec(clcfs, metadataProvider);
+
+ String datasetName = cfs.getDatasetName().getValue();
+ Dataset dataset = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(),
+ dataverseName, cfs.getDatasetName().getValue());
+ if (dataset == null) {
+ throw new AsterixException("Unknown dataset :" + cfs.getDatasetName().getValue() + " in dataverse "
+ + dataverseName);
+ }
+ if (!dataset.getDatasetType().equals(DatasetType.INTERNAL)) {
+ throw new AsterixException("Statement not applicable. Dataset " + cfs.getDatasetName().getValue()
+ + " is not of required type " + DatasetType.INTERNAL);
+ }
+
+ Feed feed = MetadataManager.INSTANCE.getFeed(metadataProvider.getMetadataTxnContext(), dataverseName, cfs
+ .getFeedName().getValue());
+ if (feed == null) {
+ throw new AsterixException("Unknown source feed :" + cfs.getFeedName());
+ }
+
+ FeedActivity feedActivity = MetadataManager.INSTANCE.getRecentActivityOnFeedConnection(mdTxnCtx,
+ new FeedConnectionId(dataverseName, feed.getFeedName(), datasetName), null);
+
+ boolean isFeedActive = FeedUtil.isFeedActive(feedActivity);
+ if (!isFeedActive) {
+ throw new AsterixException("Feed " + cfs.getDatasetName().getValue()
+ + " is currently INACTIVE. Operation not supported");
+ }
+
+ JobSpecification jobSpec = FeedOperations.buildDisconnectFeedJobSpec(dataverseName, cfs.getFeedName()
+ .getValue(), cfs.getDatasetName().getValue(), metadataProvider, feedActivity);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
-
runJob(hcc, jobSpec, true);
-
} catch (Exception e) {
if (bActiveTxn) {
abort(e, e, mdTxnCtx);
@@ -1405,7 +1677,7 @@
if (ds == null) {
throw new AlgebricksException("There is no dataset with this name " + datasetName + " in dataverse "
+ dataverseName + ".");
- } else if (ds.getDatasetType() != DatasetType.INTERNAL && ds.getDatasetType() != DatasetType.FEED) {
+ } else if (ds.getDatasetType() != DatasetType.INTERNAL) {
throw new AlgebricksException("Cannot compact the extrenal dataset " + datasetName + ".");
}
@@ -1478,21 +1750,24 @@
resultReader.open(jobId, metadataProvider.getResultSetId());
buffer.clear();
+ JSONArray results = new JSONArray();
while (resultReader.read(buffer) > 0) {
- response.put("results",
- ResultUtils.getJSONFromBuffer(buffer, resultReader.getFrameTupleAccessor()));
+ ResultUtils.getJSONFromBuffer(buffer, resultReader.getFrameTupleAccessor(), results);
buffer.clear();
- switch (pdf) {
- case HTML:
- ResultUtils.prettyPrintHTML(out, response);
- break;
- case TEXT:
- case JSON:
- out.print(response);
- break;
- }
- out.flush();
}
+
+ response.put("results", results);
+ switch (pdf) {
+ case HTML:
+ ResultUtils.prettyPrintHTML(out, response);
+ break;
+ case TEXT:
+ case JSON:
+ out.print(response);
+ break;
+ }
+ out.flush();
+
if (pdf == DisplayFormat.HTML) {
out.println("</pre>");
}
@@ -1617,4 +1892,4 @@
throw new IllegalStateException(rootE);
}
}
-}
+}
\ No newline at end of file
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
index d99c4da..30736c0 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
@@ -207,7 +207,7 @@
if (dataset == null) {
throw new AsterixException("Could not find dataset " + datasetName + " in dataverse " + dataverseName);
}
- if (dataset.getDatasetType() != DatasetType.INTERNAL && dataset.getDatasetType() != DatasetType.FEED) {
+ if (dataset.getDatasetType() != DatasetType.INTERNAL) {
throw new AsterixException("Cannot load data into dataset (" + datasetName + ")" + "of type "
+ dataset.getDatasetType());
}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/FeedOperations.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/FeedOperations.java
index c7ad8bd..5e52f3e 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/FeedOperations.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/FeedOperations.java
@@ -14,21 +14,11 @@
*/
package edu.uci.ics.asterix.file;
-import java.io.File;
-import java.util.ArrayList;
-import java.util.List;
import java.util.logging.Logger;
-import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.external.feed.lifecycle.AlterFeedMessage;
-import edu.uci.ics.asterix.external.feed.lifecycle.FeedMessage;
-import edu.uci.ics.asterix.external.feed.lifecycle.IFeedMessage;
-import edu.uci.ics.asterix.external.feed.lifecycle.IFeedMessage.MessageType;
import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.entities.Dataset;
-import edu.uci.ics.asterix.metadata.entities.FeedDatasetDetails;
-import edu.uci.ics.asterix.translator.CompiledStatements.CompiledControlFeedStatement;
+import edu.uci.ics.asterix.metadata.entities.FeedActivity;
import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -57,60 +47,17 @@
* @throws AsterixException
* @throws AlgebricksException
*/
- public static JobSpecification buildControlFeedJobSpec(CompiledControlFeedStatement controlFeedStatement,
- AqlMetadataProvider metadataProvider) throws AsterixException, AlgebricksException {
- switch (controlFeedStatement.getOperationType()) {
- case ALTER:
- case END: {
- return createSendMessageToFeedJobSpec(controlFeedStatement, metadataProvider);
- }
- default: {
- throw new AsterixException("Unknown Operation Type: " + controlFeedStatement.getOperationType());
- }
-
- }
- }
-
- private static JobSpecification createSendMessageToFeedJobSpec(CompiledControlFeedStatement controlFeedStatement,
- AqlMetadataProvider metadataProvider) throws AsterixException {
- String dataverseName = controlFeedStatement.getDataverseName() == null ? metadataProvider
- .getDefaultDataverseName() : controlFeedStatement.getDataverseName();
- String datasetName = controlFeedStatement.getDatasetName();
- String datasetPath = dataverseName + File.separator + datasetName;
-
- LOGGER.info(" DATASETPATH: " + datasetPath);
-
- Dataset dataset;
- try {
- dataset = metadataProvider.findDataset(dataverseName, datasetName);
- } catch (AlgebricksException e) {
- throw new AsterixException(e);
- }
- if (dataset == null) {
- throw new AsterixException("FEED DATASET: No metadata for dataset " + datasetName);
- }
- if (dataset.getDatasetType() != DatasetType.FEED) {
- throw new AsterixException("Operation not support for dataset type " + dataset.getDatasetType());
- }
+ public static JobSpecification buildDisconnectFeedJobSpec(String dataverseName, String feedName,
+ String datasetName, AqlMetadataProvider metadataProvider, FeedActivity feedActivity)
+ throws AsterixException, AlgebricksException {
JobSpecification spec = JobSpecificationUtils.createJobSpecification();
IOperatorDescriptor feedMessenger;
AlgebricksPartitionConstraint messengerPc;
- List<IFeedMessage> feedMessages = new ArrayList<IFeedMessage>();
- switch (controlFeedStatement.getOperationType()) {
- case END:
- feedMessages.add(new FeedMessage(MessageType.STOP));
- break;
- case ALTER:
- feedMessages.add(new AlterFeedMessage(controlFeedStatement.getProperties()));
- break;
- }
-
try {
- Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = metadataProvider.buildFeedMessengerRuntime(
- metadataProvider, spec, (FeedDatasetDetails) dataset.getDatasetDetails(), dataverseName,
- datasetName, feedMessages);
+ Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = metadataProvider
+ .buildDisconnectFeedMessengerRuntime(spec, dataverseName, feedName, datasetName, feedActivity);
feedMessenger = p.first;
messengerPc = p.second;
} catch (AlgebricksException e) {
@@ -121,9 +68,7 @@
NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(spec);
AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, nullSink, messengerPc);
-
spec.connect(new OneToOneConnectorDescriptor(spec), feedMessenger, 0, nullSink, 0);
-
spec.addRoot(nullSink);
return spec;
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryIndexOperationsHelper.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryIndexOperationsHelper.java
index 5ce62d7..34a008f 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryIndexOperationsHelper.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryIndexOperationsHelper.java
@@ -233,7 +233,11 @@
protected void setSecondaryRecDescAndComparators(IndexType indexType, List<String> secondaryKeyFields,
int gramLength, AqlMetadataProvider metadataProvider) throws AlgebricksException, AsterixException {
secondaryFieldAccessEvalFactories = new ICopyEvaluatorFactory[numSecondaryKeys];
- secondaryComparatorFactories = new IBinaryComparatorFactory[numSecondaryKeys + numPrimaryKeys];
+ if (indexType == IndexType.RTREE) {
+ secondaryComparatorFactories = new IBinaryComparatorFactory[numSecondaryKeys];
+ } else {
+ secondaryComparatorFactories = new IBinaryComparatorFactory[numSecondaryKeys + numPrimaryKeys];
+ }
secondaryBloomFilterKeyFields = new int[numSecondaryKeys];
ISerializerDeserializer[] secondaryRecFields = new ISerializerDeserializer[numPrimaryKeys + numSecondaryKeys];
ITypeTraits[] secondaryTypeTraits = new ITypeTraits[numSecondaryKeys + numPrimaryKeys];
@@ -257,7 +261,9 @@
for (int i = 0; i < numPrimaryKeys; i++) {
secondaryRecFields[numSecondaryKeys + i] = primaryRecDesc.getFields()[i];
secondaryTypeTraits[numSecondaryKeys + i] = primaryRecDesc.getTypeTraits()[i];
- secondaryComparatorFactories[numSecondaryKeys + i] = primaryComparatorFactories[i];
+ if (indexType != IndexType.RTREE) {
+ secondaryComparatorFactories[numSecondaryKeys + i] = primaryComparatorFactories[i];
+ }
}
secondaryRecDesc = new RecordDescriptor(secondaryRecFields, secondaryTypeTraits);
}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/AsterixStateDumpHandler.java b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/AsterixStateDumpHandler.java
new file mode 100644
index 0000000..bcb720f
--- /dev/null
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/AsterixStateDumpHandler.java
@@ -0,0 +1,34 @@
+package edu.uci.ics.asterix.hyracks.bootstrap;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+
+import edu.uci.ics.hyracks.api.application.IStateDumpHandler;
+import edu.uci.ics.hyracks.api.lifecycle.ILifeCycleComponentManager;
+
+public class AsterixStateDumpHandler implements IStateDumpHandler {
+ private final String nodeId;
+ private final Path dumpPath;
+ private final ILifeCycleComponentManager lccm;
+
+ public AsterixStateDumpHandler(String nodeId, String dumpPath, ILifeCycleComponentManager lccm) {
+ this.nodeId = nodeId;
+ this.dumpPath = Paths.get(dumpPath);
+ this.lccm = lccm;
+ }
+
+ @Override
+ public void dumpState(OutputStream os) throws IOException {
+ dumpPath.toFile().mkdirs();
+ File df = dumpPath.resolve(nodeId + "-" + System.currentTimeMillis() + ".dump").toFile();
+ try (FileOutputStream fos = new FileOutputStream(df)) {
+ lccm.dumpState(fos);
+ }
+ os.write(df.getAbsolutePath().getBytes("UTF-8"));
+ }
+
+}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
index 3ecb83d..7ac368b 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
@@ -22,12 +22,15 @@
import org.eclipse.jetty.servlet.ServletHolder;
import edu.uci.ics.asterix.api.http.servlet.APIServlet;
+import edu.uci.ics.asterix.api.http.servlet.AsterixSDKServlet;
import edu.uci.ics.asterix.api.http.servlet.DDLAPIServlet;
+import edu.uci.ics.asterix.api.http.servlet.FeedDashboardServlet;
+import edu.uci.ics.asterix.api.http.servlet.FeedDataProviderServlet;
+import edu.uci.ics.asterix.api.http.servlet.FeedServlet;
import edu.uci.ics.asterix.api.http.servlet.QueryAPIServlet;
import edu.uci.ics.asterix.api.http.servlet.QueryResultAPIServlet;
import edu.uci.ics.asterix.api.http.servlet.QueryStatusAPIServlet;
import edu.uci.ics.asterix.api.http.servlet.UpdateAPIServlet;
-import edu.uci.ics.asterix.api.http.servlet.AsterixSDKServlet;
import edu.uci.ics.asterix.common.api.AsterixThreadFactory;
import edu.uci.ics.asterix.common.config.AsterixExternalProperties;
import edu.uci.ics.asterix.common.config.AsterixMetadataProperties;
@@ -39,6 +42,7 @@
import edu.uci.ics.hyracks.api.application.ICCApplicationEntryPoint;
import edu.uci.ics.hyracks.api.client.HyracksConnection;
import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.lifecycle.LifeCycleComponentManager;
public class CCApplicationEntryPoint implements ICCApplicationEntryPoint {
private static final Logger LOGGER = Logger.getLogger(CCApplicationEntryPoint.class.getName());
@@ -47,6 +51,8 @@
private Server webServer;
private Server jsonAPIServer;
+ private Server feedServer;
+
private static IAsterixStateProxy proxy;
private ICCApplicationContext appCtx;
@@ -57,19 +63,30 @@
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Starting Asterix cluster controller");
}
- appCtx.setThreadFactory(AsterixThreadFactory.INSTANCE);
- AsterixAppContextInfo.initialize(appCtx);
+
+ appCtx.setThreadFactory(new AsterixThreadFactory(new LifeCycleComponentManager()));
+ AsterixAppContextInfo.initialize(appCtx, getNewHyracksClientConnection());
+
proxy = AsterixStateProxy.registerRemoteObject();
appCtx.setDistributedState(proxy);
AsterixMetadataProperties metadataProperties = AsterixAppContextInfo.getInstance().getMetadataProperties();
MetadataManager.INSTANCE = new MetadataManager(proxy, metadataProperties);
+ AsterixAppContextInfo.getInstance().getCCApplicationContext()
+ .addJobLifecycleListener(FeedLifecycleListener.INSTANCE);
+
AsterixExternalProperties externalProperties = AsterixAppContextInfo.getInstance().getExternalProperties();
setupWebServer(externalProperties);
webServer.start();
+
setupJSONAPIServer(externalProperties);
jsonAPIServer.start();
+ ExternalLibraryBootstrap.setUpExternaLibraries(false);
+
+ setupFeedServer(externalProperties);
+ feedServer.start();
+
ccAppCtx.addClusterLifecycleListener(ClusterLifecycleListener.INSTANCE);
}
@@ -121,4 +138,21 @@
context.addServlet(new ServletHolder(new DDLAPIServlet()), "/ddl");
context.addServlet(new ServletHolder(new AsterixSDKServlet()), "/");
}
-}
+
+ private void setupFeedServer(AsterixExternalProperties externalProperties) throws Exception {
+ feedServer = new Server(externalProperties.getFeedServerPort());
+
+ ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
+ context.setContextPath("/");
+
+ IHyracksClientConnection hcc = getNewHyracksClientConnection();
+ context.setAttribute(HYRACKS_CONNECTION_ATTR, hcc);
+
+ feedServer.setHandler(context);
+ context.addServlet(new ServletHolder(new FeedServlet()), "/");
+ context.addServlet(new ServletHolder(new FeedDashboardServlet()), "/feed/dashboard");
+ context.addServlet(new ServletHolder(new FeedDataProviderServlet()), "/feed/data");
+
+ // add paths here
+ }
+}
\ No newline at end of file
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/ClusterLifecycleListener.java b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/ClusterLifecycleListener.java
index a864e61..950afe4 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/ClusterLifecycleListener.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/ClusterLifecycleListener.java
@@ -14,22 +14,53 @@
*/
package edu.uci.ics.asterix.hyracks.bootstrap;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
import java.util.Map;
import java.util.Set;
+import java.util.concurrent.LinkedBlockingQueue;
import java.util.logging.Level;
import java.util.logging.Logger;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.event.schema.cluster.Node;
+import edu.uci.ics.asterix.metadata.api.IClusterEventsSubscriber;
+import edu.uci.ics.asterix.metadata.api.IClusterManagementWork;
+import edu.uci.ics.asterix.metadata.cluster.AddNodeWork;
+import edu.uci.ics.asterix.metadata.cluster.AddNodeWorkResponse;
+import edu.uci.ics.asterix.metadata.cluster.ClusterManager;
+import edu.uci.ics.asterix.metadata.cluster.IClusterManagementWorkResponse;
+import edu.uci.ics.asterix.metadata.cluster.IClusterManagementWorkResponse.Status;
+import edu.uci.ics.asterix.metadata.cluster.RemoveNodeWork;
+import edu.uci.ics.asterix.metadata.cluster.RemoveNodeWorkResponse;
import edu.uci.ics.asterix.om.util.AsterixClusterProperties;
import edu.uci.ics.hyracks.api.application.IClusterLifecycleListener;
public class ClusterLifecycleListener implements IClusterLifecycleListener {
+ private static final Logger LOGGER = Logger.getLogger(ClusterLifecycleListener.class.getName());
+
+ private static final LinkedBlockingQueue<Set<IClusterManagementWork>> workRequestQueue = new LinkedBlockingQueue<Set<IClusterManagementWork>>();
+
+ private static ClusterWorkExecutor eventHandler = new ClusterWorkExecutor(workRequestQueue);
+
+ private static List<IClusterManagementWorkResponse> pendingWorkResponses = new ArrayList<IClusterManagementWorkResponse>();
+
public static ClusterLifecycleListener INSTANCE = new ClusterLifecycleListener();
private ClusterLifecycleListener() {
+ Thread t = new Thread(eventHandler);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Starting cluster event handler");
+ }
+ t.start();
}
- private static final Logger LOGGER = Logger.getLogger(ClusterLifecycleListener.class.getName());
+ public enum ClusterEventType {
+ NODE_JOIN,
+ NODE_FAILURE
+ }
@Override
public void notifyNodeJoin(String nodeId, Map<String, String> ncConfiguration) {
@@ -37,6 +68,21 @@
LOGGER.info("NC: " + nodeId + " joined");
}
AsterixClusterProperties.INSTANCE.addNCConfiguration(nodeId, ncConfiguration);
+ Set<String> nodeAddition = new HashSet<String>();
+ nodeAddition.add(nodeId);
+ updateProgress(ClusterEventType.NODE_JOIN, nodeAddition);
+ Set<IClusterEventsSubscriber> subscribers = ClusterManager.INSTANCE.getRegisteredClusterEventSubscribers();
+ Set<IClusterManagementWork> work = new HashSet<IClusterManagementWork>();
+ for (IClusterEventsSubscriber sub : subscribers) {
+ Set<IClusterManagementWork> workRequest = sub.notifyNodeJoin(nodeId);
+ if (workRequest != null && !workRequest.isEmpty()) {
+ work.addAll(workRequest);
+ }
+ }
+ if (!work.isEmpty()) {
+ executeWorkSet(work);
+ }
+
}
public void notifyNodeFailure(Set<String> deadNodeIds) {
@@ -46,7 +92,113 @@
}
AsterixClusterProperties.INSTANCE.removeNCConfiguration(deadNode);
}
-
+ updateProgress(ClusterEventType.NODE_FAILURE, deadNodeIds);
+ Set<IClusterEventsSubscriber> subscribers = ClusterManager.INSTANCE.getRegisteredClusterEventSubscribers();
+ Set<IClusterManagementWork> work = new HashSet<IClusterManagementWork>();
+ for (IClusterEventsSubscriber sub : subscribers) {
+ Set<IClusterManagementWork> workRequest = sub.notifyNodeFailure(deadNodeIds);
+ if (workRequest != null && !workRequest.isEmpty()) {
+ work.addAll(workRequest);
+ }
+ }
+ if (!work.isEmpty()) {
+ executeWorkSet(work);
+ }
}
+ private void updateProgress(ClusterEventType eventType, Set<String> nodeIds) {
+ List<IClusterManagementWorkResponse> completedResponses = new ArrayList<IClusterManagementWorkResponse>();
+ boolean isComplete = false;
+ for (IClusterManagementWorkResponse resp : pendingWorkResponses) {
+ switch (eventType) {
+ case NODE_FAILURE:
+ isComplete = ((RemoveNodeWorkResponse) resp).updateProgress(nodeIds);
+ if (isComplete) {
+ resp.setStatus(Status.SUCCESS);
+ resp.getWork().getSourceSubscriber().notifyRequestCompletion(resp);
+ completedResponses.add(resp);
+ }
+ break;
+
+ case NODE_JOIN:
+ isComplete = ((AddNodeWorkResponse) resp).updateProgress(nodeIds.iterator().next());
+ if (isComplete) {
+ resp.setStatus(Status.SUCCESS);
+ resp.getWork().getSourceSubscriber().notifyRequestCompletion(resp);
+ completedResponses.add(resp);
+ }
+ break;
+ }
+ }
+ pendingWorkResponses.removeAll(completedResponses);
+ }
+
+ private void executeWorkSet(Set<IClusterManagementWork> workSet) {
+ int nodesToAdd = 0;
+ Set<String> nodesToRemove = new HashSet<String>();
+ Set<AddNodeWork> nodeAdditionRequests = new HashSet<AddNodeWork>();
+ Set<IClusterManagementWork> nodeRemovalRequests = new HashSet<IClusterManagementWork>();
+ for (IClusterManagementWork w : workSet) {
+ switch (w.getClusterManagementWorkType()) {
+ case ADD_NODE:
+ if (nodesToAdd < ((AddNodeWork) w).getNumberOfNodes()) {
+ nodesToAdd = ((AddNodeWork) w).getNumberOfNodes();
+ }
+ nodeAdditionRequests.add((AddNodeWork) w);
+ break;
+ case REMOVE_NODE:
+ nodesToRemove.addAll(((RemoveNodeWork) w).getNodesToBeRemoved());
+ nodeRemovalRequests.add(w);
+ RemoveNodeWorkResponse response = new RemoveNodeWorkResponse((RemoveNodeWork) w, Status.IN_PROGRESS);
+ pendingWorkResponses.add(response);
+ break;
+ }
+ }
+
+ List<String> addedNodes = new ArrayList<String>();
+ String asterixInstanceName = AsterixClusterProperties.INSTANCE.getCluster().getInstanceName();
+ for (int i = 0; i < nodesToAdd; i++) {
+ Node node = AsterixClusterProperties.INSTANCE.getAvailableSubstitutionNode();
+ if (node != null) {
+ try {
+ ClusterManager.INSTANCE.addNode(node);
+ addedNodes.add(asterixInstanceName + "_" + node.getId());
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Added NC at:" + node.getId());
+ }
+ } catch (AsterixException e) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Unable to add NC at:" + node.getId());
+ }
+ e.printStackTrace();
+ }
+ } else {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Unable to add NC: no more available nodes");
+ }
+
+ }
+ }
+
+ for (AddNodeWork w : nodeAdditionRequests) {
+ int n = w.getNumberOfNodes();
+ List<String> nodesToBeAddedForWork = new ArrayList<String>();
+ for (int i = 0; i < n && i < addedNodes.size(); i++) {
+ nodesToBeAddedForWork.add(addedNodes.get(i));
+ }
+ if (nodesToBeAddedForWork.isEmpty()) {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Unable to satisfy request by " + w);
+ }
+ AddNodeWorkResponse response = new AddNodeWorkResponse(w, nodesToBeAddedForWork);
+ response.setStatus(Status.FAILURE);
+ w.getSourceSubscriber().notifyRequestCompletion(response);
+
+ } else {
+ AddNodeWorkResponse response = new AddNodeWorkResponse(w, nodesToBeAddedForWork);
+ pendingWorkResponses.add(response);
+ }
+ }
+
+ }
}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/ClusterWorkExecutor.java b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/ClusterWorkExecutor.java
new file mode 100644
index 0000000..77581c7
--- /dev/null
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/ClusterWorkExecutor.java
@@ -0,0 +1,102 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.hyracks.bootstrap;
+
+import java.util.HashSet;
+import java.util.Set;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.event.schema.cluster.Node;
+import edu.uci.ics.asterix.metadata.api.IClusterManagementWork;
+import edu.uci.ics.asterix.metadata.cluster.AddNodeWork;
+import edu.uci.ics.asterix.metadata.cluster.ClusterManager;
+import edu.uci.ics.asterix.metadata.cluster.RemoveNodeWork;
+import edu.uci.ics.asterix.om.util.AsterixClusterProperties;
+
+public class ClusterWorkExecutor implements Runnable {
+
+ private static final Logger LOGGER = Logger.getLogger(ClusterWorkExecutor.class.getName());
+
+ private final LinkedBlockingQueue<Set<IClusterManagementWork>> inbox;
+
+ public ClusterWorkExecutor(LinkedBlockingQueue<Set<IClusterManagementWork>> inbox) {
+ this.inbox = inbox;
+ }
+
+ @Override
+ public void run() {
+ while (true) {
+ try {
+ Set<IClusterManagementWork> workSet = inbox.take();
+ int nodesToAdd = 0;
+ Set<String> nodesToRemove = new HashSet<String>();
+ Set<IClusterManagementWork> nodeAdditionRequests = new HashSet<IClusterManagementWork>();
+ Set<IClusterManagementWork> nodeRemovalRequests = new HashSet<IClusterManagementWork>();
+ for (IClusterManagementWork w : workSet) {
+ switch (w.getClusterManagementWorkType()) {
+ case ADD_NODE:
+ if (nodesToAdd < ((AddNodeWork) w).getNumberOfNodes()) {
+ nodesToAdd = ((AddNodeWork) w).getNumberOfNodes();
+ }
+ nodeAdditionRequests.add(w);
+ break;
+ case REMOVE_NODE:
+ nodesToRemove.addAll(((RemoveNodeWork) w).getNodesToBeRemoved());
+ nodeRemovalRequests.add(w);
+ break;
+ }
+ }
+
+ Set<Node> addedNodes = new HashSet<Node>();
+ for (int i = 0; i < nodesToAdd; i++) {
+ Node node = AsterixClusterProperties.INSTANCE.getAvailableSubstitutionNode();
+ if (node != null) {
+ try {
+ ClusterManager.INSTANCE.addNode(node);
+ addedNodes.add(node);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Added NC at:" + node.getId());
+ }
+ } catch (AsterixException e) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Unable to add NC at:" + node.getId());
+ }
+ e.printStackTrace();
+ }
+ } else {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Unable to add NC: no more available nodes");
+ }
+ }
+ }
+
+ } catch (InterruptedException e) {
+ if (LOGGER.isLoggable(Level.SEVERE)) {
+ LOGGER.severe("interruped" + e.getMessage());
+ }
+ throw new IllegalStateException(e);
+ } catch (Exception e) {
+ if (LOGGER.isLoggable(Level.SEVERE)) {
+ LOGGER.severe("Unexpected exception in handling cluster event" + e.getMessage());
+ }
+ }
+
+ }
+ }
+
+}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/ExternalLibraryBootstrap.java b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/ExternalLibraryBootstrap.java
new file mode 100755
index 0000000..d2f3345
--- /dev/null
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/ExternalLibraryBootstrap.java
@@ -0,0 +1,316 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.hyracks.bootstrap;
+
+import java.io.File;
+import java.io.FilenameFilter;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.rmi.RemoteException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import javax.xml.bind.JAXBContext;
+import javax.xml.bind.Unmarshaller;
+
+import edu.uci.ics.asterix.common.exceptions.ACIDException;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.functions.FunctionSignature;
+import edu.uci.ics.asterix.external.library.ExternalLibrary;
+import edu.uci.ics.asterix.external.library.LibraryAdapter;
+import edu.uci.ics.asterix.external.library.LibraryFunction;
+import edu.uci.ics.asterix.metadata.MetadataManager;
+import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
+import edu.uci.ics.asterix.metadata.api.IMetadataEntity;
+import edu.uci.ics.asterix.metadata.entities.DatasourceAdapter;
+import edu.uci.ics.asterix.metadata.entities.DatasourceAdapter.AdapterType;
+import edu.uci.ics.asterix.metadata.entities.Dataverse;
+import edu.uci.ics.asterix.metadata.feeds.AdapterIdentifier;
+import edu.uci.ics.asterix.metadata.functions.ExternalLibraryManager;
+import edu.uci.ics.asterix.runtime.formats.NonTaggedDataFormat;
+
+public class ExternalLibraryBootstrap {
+
+ private static Logger LOGGER = Logger.getLogger(ExternalLibraryBootstrap.class.getName());
+
+ public static void setUpExternaLibraries(boolean isMetadataNode) throws Exception {
+
+ Map<String, List<String>> uninstalledLibs = null;
+ if (isMetadataNode) {
+ uninstalledLibs = uninstallLibraries();
+ }
+
+ File installLibDir = getLibraryInstallDir();
+ if (installLibDir.exists()) {
+ for (String dataverse : installLibDir.list()) {
+ File dataverseDir = new File(installLibDir, dataverse);
+ String[] libraries = dataverseDir.list();
+ for (String library : libraries) {
+ registerLibrary(dataverse, library, isMetadataNode, installLibDir);
+ if (isMetadataNode) {
+ File libraryDir = new File(installLibDir.getAbsolutePath() + File.separator + dataverse
+ + File.separator + library);
+ installLibraryIfNeeded(dataverse, libraryDir, uninstalledLibs);
+ }
+ }
+ }
+ }
+ }
+
+ private static Map<String, List<String>> uninstallLibraries() throws Exception {
+ Map<String, List<String>> uninstalledLibs = new HashMap<String, List<String>>();
+ File uninstallLibDir = getLibraryUninstallDir();
+ String[] uninstallLibNames;
+ if (uninstallLibDir.exists()) {
+ uninstallLibNames = uninstallLibDir.list();
+ for (String uninstallLibName : uninstallLibNames) {
+ String[] components = uninstallLibName.split("\\.");
+ String dataverse = components[0];
+ String libName = components[1];
+ uninstallLibrary(dataverse, libName);
+ new File(uninstallLibDir, uninstallLibName).delete();
+ List<String> uinstalledLibsInDv = uninstalledLibs.get(dataverse);
+ if (uinstalledLibsInDv == null) {
+ uinstalledLibsInDv = new ArrayList<String>();
+ uninstalledLibs.put(dataverse, uinstalledLibsInDv);
+ }
+ uinstalledLibsInDv.add(libName);
+ }
+ }
+ return uninstalledLibs;
+ }
+
+ private static boolean uninstallLibrary(String dataverse, String libraryName) throws AsterixException,
+ RemoteException, ACIDException {
+ MetadataTransactionContext mdTxnCtx = null;
+ try {
+ mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+ Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverse);
+ if (dv == null) {
+ return false;
+ }
+
+ edu.uci.ics.asterix.metadata.entities.Library library = MetadataManager.INSTANCE.getLibrary(mdTxnCtx,
+ dataverse, libraryName);
+ if (library == null) {
+ return false;
+ }
+
+ List<edu.uci.ics.asterix.metadata.entities.Function> functions = MetadataManager.INSTANCE
+ .getDataverseFunctions(mdTxnCtx, dataverse);
+ for (edu.uci.ics.asterix.metadata.entities.Function function : functions) {
+ if (function.getName().startsWith(libraryName + "#")) {
+ MetadataManager.INSTANCE.dropFunction(mdTxnCtx, new FunctionSignature(dataverse,
+ function.getName(), function.getArity()));
+ }
+ }
+
+ List<edu.uci.ics.asterix.metadata.entities.DatasourceAdapter> adapters = MetadataManager.INSTANCE
+ .getDataverseAdapters(mdTxnCtx, dataverse);
+ for (edu.uci.ics.asterix.metadata.entities.DatasourceAdapter adapter : adapters) {
+ if (adapter.getAdapterIdentifier().getAdapterName().startsWith(libraryName + "#")) {
+ MetadataManager.INSTANCE.dropAdapter(mdTxnCtx, dataverse, adapter.getAdapterIdentifier()
+ .getAdapterName());
+ }
+ }
+
+ MetadataManager.INSTANCE.dropLibrary(mdTxnCtx, dataverse, libraryName);
+ MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+ } catch (Exception e) {
+ MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ throw new AsterixException(e);
+ }
+ return true;
+ }
+
+ // Each element of a library is installed as part of a transaction. Any
+ // failure in installing an element does not effect installation of other
+ // libraries
+ private static void installLibraryIfNeeded(String dataverse, final File libraryDir,
+ Map<String, List<String>> uninstalledLibs) throws Exception {
+
+ String libraryName = libraryDir.getName();
+ List<String> uninstalledLibsInDv = uninstalledLibs.get(dataverse);
+ boolean wasUninstalled = uninstalledLibsInDv != null && uninstalledLibsInDv.contains(libraryName);
+
+ MetadataTransactionContext mdTxnCtx = null;
+ try {
+ mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+ edu.uci.ics.asterix.metadata.entities.Library libraryInMetadata = MetadataManager.INSTANCE.getLibrary(
+ mdTxnCtx, dataverse, libraryName);
+ if (libraryInMetadata != null && !wasUninstalled) {
+ return;
+ }
+
+ String[] libraryDescriptors = libraryDir.list(new FilenameFilter() {
+ @Override
+ public boolean accept(File dir, String name) {
+ return name.endsWith(".xml");
+ }
+ });
+
+ ExternalLibrary library = getLibrary(new File(libraryDir + File.separator + libraryDescriptors[0]));
+
+ if (libraryDescriptors.length == 0) {
+ throw new Exception("No library descriptors defined");
+ } else if (libraryDescriptors.length > 1) {
+ throw new Exception("More than 1 library descriptors defined");
+ }
+
+ Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverse);
+ if (dv == null) {
+ MetadataManager.INSTANCE.addDataverse(mdTxnCtx, new Dataverse(dataverse,
+ NonTaggedDataFormat.NON_TAGGED_DATA_FORMAT, IMetadataEntity.PENDING_NO_OP));
+ }
+ if (library.getLibraryFunctions() != null) {
+ for (LibraryFunction function : library.getLibraryFunctions().getLibraryFunction()) {
+ String[] fargs = function.getArguments().trim().split(",");
+ List<String> args = new ArrayList<String>();
+ for (String arg : fargs) {
+ args.add(arg);
+ }
+ edu.uci.ics.asterix.metadata.entities.Function f = new edu.uci.ics.asterix.metadata.entities.Function(
+ dataverse, libraryName + "#" + function.getName(), args.size(), args,
+ function.getReturnType(), function.getDefinition(), library.getLanguage(),
+ function.getFunctionType());
+ MetadataManager.INSTANCE.addFunction(mdTxnCtx, f);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Installed function: " + libraryName + "#" + function.getName());
+ }
+ }
+ }
+
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Installed functions contain in library :" + libraryName);
+ }
+
+ if (library.getLibraryAdapters() != null) {
+ for (LibraryAdapter adapter : library.getLibraryAdapters().getLibraryAdapter()) {
+ String adapterFactoryClass = adapter.getFactoryClass();
+ String adapterName = libraryName + "#" + adapter.getName();
+ AdapterIdentifier aid = new AdapterIdentifier(dataverse, adapterName);
+ DatasourceAdapter dsa = new DatasourceAdapter(aid, adapterFactoryClass, AdapterType.EXTERNAL);
+ MetadataManager.INSTANCE.addAdapter(mdTxnCtx, dsa);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Installed adapter: " + adapterName);
+ }
+ }
+ }
+
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Installed adapters contain in library :" + libraryName);
+ }
+
+ MetadataManager.INSTANCE.addLibrary(mdTxnCtx, new edu.uci.ics.asterix.metadata.entities.Library(dataverse,
+ libraryName));
+
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Added library " + libraryName + "to Metadata");
+ }
+
+ MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+ } catch (Exception e) {
+ e.printStackTrace();
+ if (LOGGER.isLoggable(Level.SEVERE)) {
+ LOGGER.info("Exception in installing library " + libraryName);
+ }
+ MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ }
+ }
+
+ private static void registerLibrary(String dataverse, String libraryName, boolean isMetadataNode, File installLibDir)
+ throws Exception {
+ ClassLoader classLoader = getLibraryClassLoader(dataverse, libraryName);
+ ExternalLibraryManager.registerLibraryClassLoader(dataverse, libraryName, classLoader);
+ }
+
+ private static ExternalLibrary getLibrary(File libraryXMLPath) throws Exception {
+ JAXBContext configCtx = JAXBContext.newInstance(ExternalLibrary.class);
+ Unmarshaller unmarshaller = configCtx.createUnmarshaller();
+ ExternalLibrary library = (ExternalLibrary) unmarshaller.unmarshal(libraryXMLPath);
+ return library;
+ }
+
+ private static ClassLoader getLibraryClassLoader(String dataverse, String libraryName) throws Exception {
+
+ File installDir = getLibraryInstallDir();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Installing lirbary " + libraryName + " in dataverse " + dataverse + "."
+ + " Install Directory: " + installDir.getAbsolutePath());
+ }
+
+ File libDir = new File(installDir.getAbsolutePath() + File.separator + dataverse + File.separator + libraryName);
+ FilenameFilter jarFileFilter = new FilenameFilter() {
+ public boolean accept(File dir, String name) {
+ return name.endsWith(".jar");
+ }
+ };
+
+ String[] jarsInLibDir = libDir.list(jarFileFilter);
+ if (jarsInLibDir.length > 1) {
+ throw new Exception("Incorrect library structure: found multiple library jars");
+ }
+ if (jarsInLibDir.length < 0) {
+ throw new Exception("Incorrect library structure: could not find library jar");
+ }
+
+ File libJar = new File(libDir, jarsInLibDir[0]);
+ File libDependencyDir = new File(libDir.getAbsolutePath() + File.separator + "lib");
+ int numDependencies = 1;
+ String[] libraryDependencies = null;
+ if (libDependencyDir.exists()) {
+ libraryDependencies = libDependencyDir.list(jarFileFilter);
+ numDependencies += libraryDependencies.length;
+ }
+
+ ClassLoader parentClassLoader = ExternalLibraryBootstrap.class.getClassLoader();
+ URL[] urls = new URL[numDependencies];
+ int count = 0;
+ urls[count++] = libJar.toURL();
+
+ if (libraryDependencies != null && libraryDependencies.length > 0) {
+ for (String dependency : libraryDependencies) {
+ File file = new File(libDependencyDir + File.separator + dependency);
+ urls[count++] = file.toURL();
+ }
+ }
+
+ if (LOGGER.isLoggable(Level.INFO)) {
+ StringBuilder logMesg = new StringBuilder("Classpath for library " + libraryName + "\n");
+ for (URL url : urls) {
+ logMesg.append(url.getFile() + "\n");
+ }
+ LOGGER.info(logMesg.toString());
+ }
+
+ ClassLoader classLoader = new URLClassLoader(urls, parentClassLoader);
+ return classLoader;
+ }
+
+ private static File getLibraryInstallDir() {
+ String workingDir = System.getProperty("user.dir");
+ return new File(workingDir + File.separator + "library");
+ }
+
+ private static File getLibraryUninstallDir() {
+ String workingDir = System.getProperty("user.dir");
+ return new File(workingDir + File.separator + "uninstall");
+ }
+
+}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/FeedLifecycleListener.java b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/FeedLifecycleListener.java
new file mode 100644
index 0000000..fea44a2
--- /dev/null
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/FeedLifecycleListener.java
@@ -0,0 +1,1187 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.hyracks.bootstrap;
+
+import java.io.PrintWriter;
+import java.io.Serializable;
+import java.rmi.RemoteException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Random;
+import java.util.Set;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.commons.lang3.StringUtils;
+
+import edu.uci.ics.asterix.api.common.APIFramework.DisplayFormat;
+import edu.uci.ics.asterix.api.common.SessionConfig;
+import edu.uci.ics.asterix.aql.base.Statement;
+import edu.uci.ics.asterix.aql.expression.ConnectFeedStatement;
+import edu.uci.ics.asterix.aql.expression.DataverseDecl;
+import edu.uci.ics.asterix.aql.expression.DisconnectFeedStatement;
+import edu.uci.ics.asterix.aql.expression.Identifier;
+import edu.uci.ics.asterix.aql.translator.AqlTranslator;
+import edu.uci.ics.asterix.common.exceptions.ACIDException;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+import edu.uci.ics.asterix.common.feeds.SuperFeedManager;
+import edu.uci.ics.asterix.event.schema.cluster.Cluster;
+import edu.uci.ics.asterix.event.schema.cluster.Node;
+import edu.uci.ics.asterix.file.JobSpecificationUtils;
+import edu.uci.ics.asterix.hyracks.bootstrap.FeedLifecycleListener.FeedFailure.FailureType;
+import edu.uci.ics.asterix.metadata.MetadataException;
+import edu.uci.ics.asterix.metadata.MetadataManager;
+import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
+import edu.uci.ics.asterix.metadata.api.IClusterEventsSubscriber;
+import edu.uci.ics.asterix.metadata.api.IClusterManagementWork;
+import edu.uci.ics.asterix.metadata.bootstrap.MetadataConstants;
+import edu.uci.ics.asterix.metadata.cluster.AddNodeWork;
+import edu.uci.ics.asterix.metadata.cluster.ClusterManager;
+import edu.uci.ics.asterix.metadata.cluster.IClusterManagementWorkResponse;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.entities.Dataverse;
+import edu.uci.ics.asterix.metadata.entities.FeedActivity;
+import edu.uci.ics.asterix.metadata.entities.FeedActivity.FeedActivityDetails;
+import edu.uci.ics.asterix.metadata.entities.FeedActivity.FeedActivityType;
+import edu.uci.ics.asterix.metadata.entities.FeedPolicy;
+import edu.uci.ics.asterix.metadata.feeds.BuiltinFeedPolicies;
+import edu.uci.ics.asterix.metadata.feeds.FeedIntakeOperatorDescriptor;
+import edu.uci.ics.asterix.metadata.feeds.FeedManagerElectMessage;
+import edu.uci.ics.asterix.metadata.feeds.FeedMetaOperatorDescriptor;
+import edu.uci.ics.asterix.metadata.feeds.FeedPolicyAccessor;
+import edu.uci.ics.asterix.metadata.feeds.IFeedMessage;
+import edu.uci.ics.asterix.metadata.feeds.MessageListener;
+import edu.uci.ics.asterix.metadata.feeds.MessageListener.IMessageAnalyzer;
+import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
+import edu.uci.ics.asterix.om.util.AsterixClusterProperties;
+import edu.uci.ics.asterix.om.util.AsterixClusterProperties.State;
+import edu.uci.ics.asterix.runtime.formats.NonTaggedDataFormat;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
+import edu.uci.ics.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
+import edu.uci.ics.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.constraints.Constraint;
+import edu.uci.ics.hyracks.api.constraints.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.api.constraints.expressions.ConstraintExpression;
+import edu.uci.ics.hyracks.api.constraints.expressions.LValueConstraintExpression;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.OperatorDescriptorId;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.job.IActivityClusterGraphGeneratorFactory;
+import edu.uci.ics.hyracks.api.job.IJobLifecycleListener;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.job.JobInfo;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.api.job.JobStatus;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.misc.NullSinkOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.lsm.common.dataflow.LSMTreeIndexInsertUpdateDeleteOperatorDescriptor;
+
+/**
+ * A listener that subscribes to events associated with cluster membership (nodes joining/leaving the cluster)
+ * and job lifecycle (start/end of a job). Subscription to such events allows keeping track of feed ingestion jobs
+ * and take any corrective action that may be required when a node involved in a feed leaves the cluster.
+ */
+public class FeedLifecycleListener implements IJobLifecycleListener, IClusterEventsSubscriber, Serializable {
+
+ private static final long serialVersionUID = 1L;
+
+ private static final Logger LOGGER = Logger.getLogger(FeedLifecycleListener.class.getName());
+
+ public static FeedLifecycleListener INSTANCE = new FeedLifecycleListener();
+
+ public static final int FEED_HEALTH_PORT = 2999;
+
+ private LinkedBlockingQueue<Message> jobEventInbox;
+ private LinkedBlockingQueue<IClusterManagementWorkResponse> responseInbox;
+ private Map<FeedInfo, List<String>> dependentFeeds = new HashMap<FeedInfo, List<String>>();
+ private IMessageAnalyzer healthDataParser;
+ private MessageListener feedHealthDataListener;
+ private ExecutorService executorService = Executors.newCachedThreadPool();
+ private Map<FeedConnectionId, LinkedBlockingQueue<String>> feedReportQueue = new HashMap<FeedConnectionId, LinkedBlockingQueue<String>>();
+ private State state;
+
+ private FeedLifecycleListener() {
+ jobEventInbox = new LinkedBlockingQueue<Message>();
+ feedJobNotificationHandler = new FeedJobNotificationHandler(jobEventInbox);
+ responseInbox = new LinkedBlockingQueue<IClusterManagementWorkResponse>();
+ feedWorkRequestResponseHandler = new FeedWorkRequestResponseHandler(responseInbox);
+ this.healthDataParser = new FeedHealthDataParser();
+ feedHealthDataListener = new MessageListener(FEED_HEALTH_PORT, healthDataParser.getMessageQueue());
+ try {
+ feedHealthDataListener.start();
+ } catch (Exception e) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Unable to start Feed health data listener");
+ }
+ }
+ executorService.execute(feedJobNotificationHandler);
+ executorService.execute(feedWorkRequestResponseHandler);
+ ClusterManager.INSTANCE.registerSubscriber(this);
+ state = AsterixClusterProperties.INSTANCE.getState();
+
+ }
+
+ private final FeedJobNotificationHandler feedJobNotificationHandler;
+ private final FeedWorkRequestResponseHandler feedWorkRequestResponseHandler;
+
+ @Override
+ public void notifyJobStart(JobId jobId) throws HyracksException {
+ if (feedJobNotificationHandler.isRegisteredFeed(jobId)) {
+ jobEventInbox.add(new Message(jobId, Message.MessageKind.JOB_START));
+ }
+ }
+
+ @Override
+ public void notifyJobFinish(JobId jobId) throws HyracksException {
+ if (feedJobNotificationHandler.isRegisteredFeed(jobId)) {
+ jobEventInbox.add(new Message(jobId, Message.MessageKind.JOB_FINISH));
+ }
+ }
+
+ @Override
+ public void notifyJobCreation(JobId jobId, IActivityClusterGraphGeneratorFactory acggf) throws HyracksException {
+
+ JobSpecification spec = acggf.getJobSpecification();
+ boolean feedIngestionJob = false;
+ FeedConnectionId feedId = null;
+ Map<String, String> feedPolicy = null;
+ for (IOperatorDescriptor opDesc : spec.getOperatorMap().values()) {
+ if (!(opDesc instanceof FeedIntakeOperatorDescriptor)) {
+ continue;
+ }
+ feedId = ((FeedIntakeOperatorDescriptor) opDesc).getFeedId();
+ feedPolicy = ((FeedIntakeOperatorDescriptor) opDesc).getFeedPolicy();
+ feedIngestionJob = true;
+ break;
+ }
+ if (feedIngestionJob) {
+ feedJobNotificationHandler.registerFeed(feedId, jobId, spec, feedPolicy);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Registered feed: " + feedId + " ingestion policy "
+ + feedPolicy.get(BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY));
+ }
+ }
+
+ }
+
+ public void registerFeedReportQueue(FeedConnectionId feedId, LinkedBlockingQueue<String> queue) {
+ feedReportQueue.put(feedId, queue);
+ }
+
+ public void deregisterFeedReportQueue(FeedConnectionId feedId, LinkedBlockingQueue<String> queue) {
+ feedReportQueue.remove(feedId);
+ }
+
+ public LinkedBlockingQueue<String> getFeedReportQueue(FeedConnectionId feedId) {
+ return feedReportQueue.get(feedId);
+ }
+
+ private static class Message {
+ public JobId jobId;
+
+ public enum MessageKind {
+ JOB_START,
+ JOB_FINISH
+ }
+
+ public MessageKind messageKind;
+
+ public Message(JobId jobId, MessageKind msgKind) {
+ this.jobId = jobId;
+ this.messageKind = msgKind;
+ }
+ }
+
+ public static class FeedFailureReport {
+ public Map<FeedInfo, List<FeedFailure>> failures = new HashMap<FeedInfo, List<FeedFailure>>();
+
+ @Override
+ public String toString() {
+ StringBuilder builder = new StringBuilder();
+ for (Map.Entry<FeedLifecycleListener.FeedInfo, List<FeedLifecycleListener.FeedFailure>> entry : failures
+ .entrySet()) {
+ builder.append(entry.getKey() + " -> failures");
+ for (FeedFailure failure : entry.getValue()) {
+ builder.append("failure -> " + failure);
+ }
+ }
+ return builder.toString();
+ }
+ }
+
+ private static class FeedHealthDataParser implements IMessageAnalyzer {
+
+ private LinkedBlockingQueue<String> inbox = new LinkedBlockingQueue<String>();
+
+ @Override
+ public LinkedBlockingQueue<String> getMessageQueue() {
+ return inbox;
+ }
+
+ }
+
+ private static class FeedJobNotificationHandler implements Runnable, Serializable {
+
+ private static final long serialVersionUID = 1L;
+ private LinkedBlockingQueue<Message> inbox;
+ private Map<JobId, FeedInfo> registeredFeeds = new HashMap<JobId, FeedInfo>();
+ private FeedMessenger feedMessenger;
+ private LinkedBlockingQueue<FeedMessengerMessage> messengerOutbox;
+ private int superFeedManagerPort = 3000;
+
+ public FeedJobNotificationHandler(LinkedBlockingQueue<Message> inbox) {
+ this.inbox = inbox;
+ messengerOutbox = new LinkedBlockingQueue<FeedMessengerMessage>();
+ feedMessenger = new FeedMessenger(messengerOutbox);
+ (new Thread(feedMessenger)).start();
+ }
+
+ public boolean isRegisteredFeed(JobId jobId) {
+ return registeredFeeds.containsKey(jobId);
+ }
+
+ public void registerFeed(FeedConnectionId feedId, JobId jobId, JobSpecification jobSpec,
+ Map<String, String> feedPolicy) {
+ if (registeredFeeds.containsKey(jobId)) {
+ throw new IllegalStateException(" Feed already registered ");
+ }
+ registeredFeeds.put(jobId, new FeedInfo(feedId, jobSpec, feedPolicy, jobId));
+ }
+
+ public void deregisterFeed(JobId jobId) {
+ FeedInfo feedInfo = registeredFeeds.remove(jobId);
+ if (feedInfo != null) {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("DeRegistered Feed Info :" + feedInfo);
+ }
+ }
+ }
+
+ public void deregisterFeed(FeedInfo feedInfo) {
+ JobId jobId = feedInfo.jobId;
+ deregisterFeed(jobId);
+ }
+
+ @Override
+ public void run() {
+ Message mesg;
+ while (true) {
+ try {
+ mesg = inbox.take();
+ FeedInfo feedInfo = registeredFeeds.get(mesg.jobId);
+ switch (mesg.messageKind) {
+ case JOB_START:
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Job started for feed id" + feedInfo.feedConnectionId);
+ }
+ handleJobStartMessage(feedInfo, mesg);
+ break;
+ case JOB_FINISH:
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Job finished for feed id" + feedInfo.feedConnectionId);
+ }
+ handleJobFinishMessage(feedInfo, mesg);
+ deregisterFeed(mesg.jobId);
+ break;
+ }
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+
+ }
+ }
+
+ private void handleJobStartMessage(FeedInfo feedInfo, Message message) {
+
+ JobSpecification jobSpec = feedInfo.jobSpec;
+
+ List<OperatorDescriptorId> ingestOperatorIds = new ArrayList<OperatorDescriptorId>();
+ List<OperatorDescriptorId> computeOperatorIds = new ArrayList<OperatorDescriptorId>();
+ List<OperatorDescriptorId> storageOperatorIds = new ArrayList<OperatorDescriptorId>();
+
+ Map<OperatorDescriptorId, IOperatorDescriptor> operators = jobSpec.getOperatorMap();
+ for (Entry<OperatorDescriptorId, IOperatorDescriptor> entry : operators.entrySet()) {
+ IOperatorDescriptor opDesc = entry.getValue();
+ IOperatorDescriptor actualOp = null;
+ if (opDesc instanceof FeedMetaOperatorDescriptor) {
+ actualOp = ((FeedMetaOperatorDescriptor) opDesc).getCoreOperator();
+ } else {
+ actualOp = opDesc;
+ }
+
+ if (actualOp instanceof AlgebricksMetaOperatorDescriptor) {
+ AlgebricksMetaOperatorDescriptor op = ((AlgebricksMetaOperatorDescriptor) actualOp);
+ IPushRuntimeFactory[] runtimeFactories = op.getPipeline().getRuntimeFactories();
+ for (IPushRuntimeFactory rf : runtimeFactories) {
+ if (rf instanceof AssignRuntimeFactory) {
+ computeOperatorIds.add(entry.getKey());
+ }
+ }
+ } else if (actualOp instanceof LSMTreeIndexInsertUpdateDeleteOperatorDescriptor) {
+ storageOperatorIds.add(entry.getKey());
+ } else if (actualOp instanceof FeedIntakeOperatorDescriptor) {
+ ingestOperatorIds.add(entry.getKey());
+ }
+ }
+
+ try {
+ IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
+ JobInfo info = hcc.getJobInfo(message.jobId);
+ feedInfo.jobInfo = info;
+ Map<String, String> feedActivityDetails = new HashMap<String, String>();
+ StringBuilder ingestLocs = new StringBuilder();
+ for (OperatorDescriptorId ingestOpId : ingestOperatorIds) {
+ Map<Integer, String> operatorLocations = info.getOperatorLocations().get(ingestOpId);
+ int nOperatorInstances = operatorLocations.size();
+ for (int i = 0; i < nOperatorInstances; i++) {
+ feedInfo.ingestLocations.add(operatorLocations.get(i));
+ }
+ }
+ StringBuilder computeLocs = new StringBuilder();
+ for (OperatorDescriptorId computeOpId : computeOperatorIds) {
+ Map<Integer, String> operatorLocations = info.getOperatorLocations().get(computeOpId);
+ if (operatorLocations != null) {
+ int nOperatorInstances = operatorLocations.size();
+ for (int i = 0; i < nOperatorInstances; i++) {
+ feedInfo.computeLocations.add(operatorLocations.get(i));
+ }
+ } else {
+ feedInfo.computeLocations.addAll(feedInfo.ingestLocations);
+ }
+ }
+
+ StringBuilder storageLocs = new StringBuilder();
+ for (OperatorDescriptorId storageOpId : storageOperatorIds) {
+ Map<Integer, String> operatorLocations = info.getOperatorLocations().get(storageOpId);
+ int nOperatorInstances = operatorLocations.size();
+ for (int i = 0; i < nOperatorInstances; i++) {
+ feedInfo.storageLocations.add(operatorLocations.get(i));
+ }
+ }
+
+ ingestLocs.append(StringUtils.join(feedInfo.ingestLocations, ","));
+ computeLocs.append(StringUtils.join(feedInfo.computeLocations, ","));
+ storageLocs.append(StringUtils.join(feedInfo.storageLocations, ","));
+
+ feedActivityDetails.put(FeedActivity.FeedActivityDetails.INGEST_LOCATIONS, ingestLocs.toString());
+ feedActivityDetails.put(FeedActivity.FeedActivityDetails.COMPUTE_LOCATIONS, computeLocs.toString());
+ feedActivityDetails.put(FeedActivity.FeedActivityDetails.STORAGE_LOCATIONS, storageLocs.toString());
+ String policyName = feedInfo.feedPolicy.get(BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY);
+ feedActivityDetails.put(FeedActivity.FeedActivityDetails.FEED_POLICY_NAME, policyName);
+
+ FeedPolicyAccessor policyAccessor = new FeedPolicyAccessor(feedInfo.feedPolicy);
+ if (policyAccessor.collectStatistics() || policyAccessor.isElastic()) {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Feed " + feedInfo.feedConnectionId + " requires Super Feed Manager");
+ }
+ configureSuperFeedManager(feedInfo, feedActivityDetails);
+ }
+
+ MetadataManager.INSTANCE.acquireWriteLatch();
+ MetadataTransactionContext mdTxnCtx = null;
+ try {
+ mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+ FeedActivity fa = MetadataManager.INSTANCE.getRecentActivityOnFeedConnection(mdTxnCtx,
+ feedInfo.feedConnectionId, null);
+ FeedActivityType nextState = FeedActivityType.FEED_BEGIN;
+ FeedActivity feedActivity = new FeedActivity(feedInfo.feedConnectionId.getDataverse(),
+ feedInfo.feedConnectionId.getFeedName(), feedInfo.feedConnectionId.getDatasetName(),
+ nextState, feedActivityDetails);
+ MetadataManager.INSTANCE.registerFeedActivity(mdTxnCtx, feedInfo.feedConnectionId, feedActivity);
+ MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+ } catch (Exception e) {
+ MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ } finally {
+ MetadataManager.INSTANCE.releaseWriteLatch();
+ }
+ } catch (Exception e) {
+ // TODO Add Exception handling here
+ }
+
+ }
+
+ private void configureSuperFeedManager(FeedInfo feedInfo, Map<String, String> feedActivityDetails) {
+ // TODO Auto-generated method stub
+ int superFeedManagerIndex = new Random().nextInt(feedInfo.ingestLocations.size());
+ String superFeedManagerHost = feedInfo.ingestLocations.get(superFeedManagerIndex);
+
+ Cluster cluster = AsterixClusterProperties.INSTANCE.getCluster();
+ String instanceName = cluster.getInstanceName();
+ String node = superFeedManagerHost.substring(instanceName.length() + 1);
+ String hostIp = null;
+ for (Node n : cluster.getNode()) {
+ if (n.getId().equals(node)) {
+ hostIp = n.getClusterIp();
+ break;
+ }
+ }
+ if (hostIp == null) {
+ throw new IllegalStateException("Unknown node " + superFeedManagerHost);
+ }
+
+ feedActivityDetails.put(FeedActivity.FeedActivityDetails.SUPER_FEED_MANAGER_HOST, hostIp);
+ feedActivityDetails
+ .put(FeedActivity.FeedActivityDetails.SUPER_FEED_MANAGER_PORT, "" + superFeedManagerPort);
+
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Super Feed Manager for " + feedInfo.feedConnectionId + " is " + hostIp + " node "
+ + superFeedManagerHost);
+ }
+
+ FeedManagerElectMessage feedMessage = new FeedManagerElectMessage(hostIp, superFeedManagerHost,
+ superFeedManagerPort, feedInfo.feedConnectionId);
+ superFeedManagerPort += SuperFeedManager.PORT_RANGE_ASSIGNED;
+ messengerOutbox.add(new FeedMessengerMessage(feedMessage, feedInfo));
+
+ }
+
+ private void handleJobFinishMessage(FeedInfo feedInfo, Message message) {
+ MetadataManager.INSTANCE.acquireWriteLatch();
+ MetadataTransactionContext mdTxnCtx = null;
+ boolean feedFailedDueToPostSubmissionNodeLoss = verfyReasonForFailure(feedInfo);
+ if (!feedFailedDueToPostSubmissionNodeLoss) {
+ try {
+ IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
+ JobInfo info = hcc.getJobInfo(message.jobId);
+ JobStatus status = info.getStatus();
+ boolean failure = status != null && status.equals(JobStatus.FAILURE);
+ FeedActivityType activityType = FeedActivityType.FEED_END;
+ Map<String, String> details = new HashMap<String, String>();
+ if (failure) {
+ activityType = FeedActivityType.FEED_FAILURE;
+ }
+ mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+ FeedActivity feedActivity = new FeedActivity(feedInfo.feedConnectionId.getDataverse(),
+ feedInfo.feedConnectionId.getFeedName(), feedInfo.feedConnectionId.getDatasetName(),
+ activityType, details);
+ MetadataManager.INSTANCE.registerFeedActivity(mdTxnCtx, new FeedConnectionId(
+ feedInfo.feedConnectionId.getDataverse(), feedInfo.feedConnectionId.getFeedName(),
+ feedInfo.feedConnectionId.getDatasetName()), feedActivity);
+ MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+ } catch (RemoteException | ACIDException | MetadataException e) {
+ try {
+ MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ } catch (RemoteException | ACIDException ae) {
+ throw new IllegalStateException(" Unable to abort ");
+ }
+ } catch (Exception e) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Exception in handling job fninsh message " + message.jobId + "["
+ + message.messageKind + "]" + " for job " + message.jobId);
+ }
+ } finally {
+ MetadataManager.INSTANCE.releaseWriteLatch();
+ }
+ } else {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Attempt to revive feed");
+ }
+ FeedsActivator activator = new FeedsActivator();
+ String dataverse = feedInfo.feedConnectionId.getDataverse();
+ String datasetName = feedInfo.feedConnectionId.getDatasetName();
+ String feedName = feedInfo.feedConnectionId.getFeedName();
+ String feedPolicy = feedInfo.feedPolicy.get(BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY);
+ activator.reviveFeed(dataverse, feedName, datasetName, feedPolicy);
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Revived Feed");
+ }
+
+ }
+ }
+
+ private boolean verfyReasonForFailure(FeedInfo feedInfo) {
+ JobSpecification spec = feedInfo.jobSpec;
+ Set<Constraint> userConstraints = spec.getUserConstraints();
+ List<String> locations = new ArrayList<String>();
+ for (Constraint constraint : userConstraints) {
+ LValueConstraintExpression lexpr = constraint.getLValue();
+ ConstraintExpression cexpr = constraint.getRValue();
+ switch (lexpr.getTag()) {
+ case PARTITION_LOCATION:
+ String location = (String) ((ConstantExpression) cexpr).getValue();
+ locations.add(location);
+ break;
+ }
+ }
+ Set<String> participantNodes = AsterixClusterProperties.INSTANCE.getParticipantNodes();
+ List<String> nodesFailedPostSubmission = new ArrayList<String>();
+ for (String location : locations) {
+ if (!participantNodes.contains(location)) {
+ nodesFailedPostSubmission.add(location);
+ }
+ }
+
+ if (nodesFailedPostSubmission.size() > 0) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Feed failed as nodes failed post submission");
+ }
+ return true;
+ } else {
+ return false;
+ }
+
+ }
+
+ public static class FeedMessengerMessage {
+ private final IFeedMessage message;
+ private final FeedInfo feedInfo;
+
+ public FeedMessengerMessage(IFeedMessage message, FeedInfo feedInfo) {
+ this.message = message;
+ this.feedInfo = feedInfo;
+ }
+
+ public IFeedMessage getMessage() {
+ return message;
+ }
+
+ public FeedInfo getFeedInfo() {
+ return feedInfo;
+ }
+ }
+
+ private static class FeedMessenger implements Runnable {
+
+ private final LinkedBlockingQueue<FeedMessengerMessage> inbox;
+
+ public FeedMessenger(LinkedBlockingQueue<FeedMessengerMessage> inbox) {
+ this.inbox = inbox;
+ }
+
+ public void run() {
+ while (true) {
+ FeedMessengerMessage message = null;
+ try {
+ message = inbox.take();
+ FeedInfo feedInfo = message.getFeedInfo();
+ switch (message.getMessage().getMessageType()) {
+ case SUPER_FEED_MANAGER_ELECT:
+ Thread.sleep(2000);
+ sendSuperFeedManangerElectMessage(feedInfo,
+ (FeedManagerElectMessage) message.getMessage());
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Sent super feed manager election message" + message.getMessage());
+ }
+ }
+ } catch (InterruptedException ie) {
+ break;
+ }
+ }
+ }
+
+ }
+ }
+
+ public static class FeedInfo {
+ public FeedConnectionId feedConnectionId;
+ public JobSpecification jobSpec;
+ public List<String> ingestLocations = new ArrayList<String>();
+ public List<String> computeLocations = new ArrayList<String>();
+ public List<String> storageLocations = new ArrayList<String>();
+ public JobInfo jobInfo;
+ public Map<String, String> feedPolicy;
+ public JobId jobId;
+
+ public FeedInfo(FeedConnectionId feedId, JobSpecification jobSpec, Map<String, String> feedPolicy, JobId jobId) {
+ this.feedConnectionId = feedId;
+ this.jobSpec = jobSpec;
+ this.feedPolicy = feedPolicy;
+ this.jobId = jobId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (!(o instanceof FeedInfo)) {
+ return false;
+ }
+ return ((FeedInfo) o).feedConnectionId.equals(feedConnectionId);
+ }
+
+ @Override
+ public int hashCode() {
+ return feedConnectionId.hashCode();
+ }
+
+ @Override
+ public String toString() {
+ return feedConnectionId + " job id " + jobId;
+ }
+
+ }
+
+ @Override
+ public Set<IClusterManagementWork> notifyNodeFailure(Set<String> deadNodeIds) {
+ Collection<FeedInfo> feedInfos = feedJobNotificationHandler.registeredFeeds.values();
+ FeedFailureReport failureReport = new FeedFailureReport();
+ for (FeedInfo feedInfo : feedInfos) {
+ for (String deadNodeId : deadNodeIds) {
+ if (feedInfo.ingestLocations.contains(deadNodeId)) {
+ List<FeedFailure> failures = failureReport.failures.get(feedInfo);
+ if (failures == null) {
+ failures = new ArrayList<FeedFailure>();
+ failureReport.failures.put(feedInfo, failures);
+ }
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Inestion Node Failure! " + deadNodeId);
+ }
+ failures.add(new FeedFailure(FeedFailure.FailureType.INGESTION_NODE, deadNodeId));
+ }
+ if (feedInfo.computeLocations.contains(deadNodeId)) {
+ List<FeedFailure> failures = failureReport.failures.get(feedInfo);
+ if (failures == null) {
+ failures = new ArrayList<FeedFailure>();
+ failureReport.failures.put(feedInfo, failures);
+ }
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Compute Node Failure! " + deadNodeId);
+ }
+ failures.add(new FeedFailure(FeedFailure.FailureType.COMPUTE_NODE, deadNodeId));
+ }
+ if (feedInfo.storageLocations.contains(deadNodeId)) {
+ List<FeedFailure> failures = failureReport.failures.get(feedInfo);
+ if (failures == null) {
+ failures = new ArrayList<FeedFailure>();
+ failureReport.failures.put(feedInfo, failures);
+ }
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Storage Node Failure! " + deadNodeId);
+ }
+ failures.add(new FeedFailure(FeedFailure.FailureType.STORAGE_NODE, deadNodeId));
+ }
+ }
+ }
+ if (failureReport.failures.isEmpty()) {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ StringBuilder builder = new StringBuilder();
+ builder.append("No feed is affected by the failure of node(s): ");
+ for (String deadNodeId : deadNodeIds) {
+ builder.append(deadNodeId + " ");
+ }
+ LOGGER.info(builder.toString());
+ }
+ return new HashSet<IClusterManagementWork>();
+ } else {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ StringBuilder builder = new StringBuilder();
+ builder.append("Feed affected by the failure of node(s): ");
+ for (String deadNodeId : deadNodeIds) {
+ builder.append(deadNodeId + " ");
+ }
+ builder.append("\n");
+ for (FeedInfo fInfo : failureReport.failures.keySet()) {
+ builder.append(fInfo.feedConnectionId);
+ feedJobNotificationHandler.deregisterFeed(fInfo);
+ }
+ LOGGER.warning(builder.toString());
+ }
+ return handleFailure(failureReport);
+ }
+ }
+
+ private Set<IClusterManagementWork> handleFailure(FeedFailureReport failureReport) {
+ reportFeedFailure(failureReport);
+ Set<IClusterManagementWork> work = new HashSet<IClusterManagementWork>();
+ Map<String, Map<FeedInfo, List<FailureType>>> failureMap = new HashMap<String, Map<FeedInfo, List<FailureType>>>();
+ FeedPolicyAccessor fpa = null;
+ List<FeedInfo> feedsToTerminate = new ArrayList<FeedInfo>();
+ for (Map.Entry<FeedInfo, List<FeedFailure>> entry : failureReport.failures.entrySet()) {
+ FeedInfo feedInfo = entry.getKey();
+ fpa = new FeedPolicyAccessor(feedInfo.feedPolicy);
+ if (!fpa.continueOnHardwareFailure()) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Feed " + feedInfo.feedConnectionId + " is governed by policy "
+ + feedInfo.feedPolicy.get(BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY));
+ LOGGER.warning("Feed policy does not require feed to recover from hardware failure. Feed will terminate");
+ }
+ continue;
+ } else {
+ // insert feed recovery mode
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Feed " + feedInfo.feedConnectionId + " is governed by policy "
+ + feedInfo.feedPolicy.get(BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY));
+ LOGGER.info("Feed policy requires feed to recover from hardware failure. Attempting to recover feed");
+ }
+ }
+
+ List<FeedFailure> feedFailures = entry.getValue();
+ boolean recoveryPossible = true;
+ for (FeedFailure feedFailure : feedFailures) {
+ switch (feedFailure.failureType) {
+ case COMPUTE_NODE:
+ case INGESTION_NODE:
+ Map<FeedInfo, List<FailureType>> failuresBecauseOfThisNode = failureMap.get(feedFailure.nodeId);
+ if (failuresBecauseOfThisNode == null) {
+ failuresBecauseOfThisNode = new HashMap<FeedInfo, List<FailureType>>();
+ failuresBecauseOfThisNode.put(feedInfo, new ArrayList<FailureType>());
+ failureMap.put(feedFailure.nodeId, failuresBecauseOfThisNode);
+ }
+ List<FailureType> feedF = failuresBecauseOfThisNode.get(feedInfo);
+ if (feedF == null) {
+ feedF = new ArrayList<FailureType>();
+ failuresBecauseOfThisNode.put(feedInfo, feedF);
+ }
+ feedF.add(feedFailure.failureType);
+ break;
+ case STORAGE_NODE:
+ recoveryPossible = false;
+ if (LOGGER.isLoggable(Level.SEVERE)) {
+ LOGGER.severe("Unrecoverable situation! lost storage node for the feed "
+ + feedInfo.feedConnectionId);
+ }
+ List<String> requiredNodeIds = dependentFeeds.get(feedInfo);
+ if (requiredNodeIds == null) {
+ requiredNodeIds = new ArrayList<String>();
+ dependentFeeds.put(feedInfo, requiredNodeIds);
+ }
+ requiredNodeIds.add(feedFailure.nodeId);
+ failuresBecauseOfThisNode = failureMap.get(feedFailure.nodeId);
+ if (failuresBecauseOfThisNode != null) {
+ failuresBecauseOfThisNode.remove(feedInfo);
+ if (failuresBecauseOfThisNode.isEmpty()) {
+ failureMap.remove(feedFailure.nodeId);
+ }
+ }
+ feedsToTerminate.add(feedInfo);
+ break;
+ }
+ }
+ if (!recoveryPossible) {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Terminating irrecoverable feed (loss of storage node) ");
+ }
+ }
+ }
+
+ if (!feedsToTerminate.isEmpty()) {
+ Thread t = new Thread(new FeedsDeActivator(feedsToTerminate));
+ t.start();
+ }
+
+ int numRequiredNodes = 0;
+ for (Entry<String, Map<FeedInfo, List<FeedFailure.FailureType>>> entry : failureMap.entrySet()) {
+ Map<FeedInfo, List<FeedFailure.FailureType>> v = entry.getValue();
+ for (FeedInfo finfo : feedsToTerminate) {
+ v.remove(finfo);
+ }
+ if (v.size() > 0) {
+ numRequiredNodes++;
+ }
+ }
+
+ if (numRequiredNodes > 0) {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Number of additional nodes requested " + numRequiredNodes);
+ }
+ AddNodeWork addNodesWork = new AddNodeWork(failureMap.keySet().size(), this);
+ work.add(addNodesWork);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ Map<FeedInfo, List<FeedFailure>> feedFailures = failureReport.failures;
+ for (Entry<FeedInfo, List<FeedFailure>> entry : feedFailures.entrySet()) {
+ for (FeedFailure f : entry.getValue()) {
+ LOGGER.info("Feed Failure! " + f.failureType + " " + f.nodeId);
+ }
+ }
+ }
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Registered work id: " + addNodesWork.getWorkId());
+ }
+ feedWorkRequestResponseHandler.registerFeedWork(addNodesWork.getWorkId(), failureReport);
+ } else {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Not requesting any new node. Feeds unrecoverable until the lost node(s) rejoin");
+ }
+ }
+ return work;
+ }
+
+ private void reportFeedFailure(FeedFailureReport failureReport) {
+ MetadataTransactionContext ctx = null;
+ FeedActivity fa = null;
+ Map<String, String> feedActivityDetails = new HashMap<String, String>();
+ StringBuilder builder = new StringBuilder();
+ MetadataManager.INSTANCE.acquireWriteLatch();
+ try {
+ ctx = MetadataManager.INSTANCE.beginTransaction();
+ for (Entry<FeedInfo, List<FeedFailure>> entry : failureReport.failures.entrySet()) {
+ FeedInfo feedInfo = entry.getKey();
+ List<FeedFailure> feedFailures = entry.getValue();
+ for (FeedFailure failure : feedFailures) {
+ builder.append(failure + ",");
+ }
+ builder.deleteCharAt(builder.length() - 1);
+ feedActivityDetails.put(FeedActivityDetails.FEED_NODE_FAILURE, builder.toString());
+ fa = new FeedActivity(feedInfo.feedConnectionId.getDataverse(),
+ feedInfo.feedConnectionId.getFeedName(), feedInfo.feedConnectionId.getDatasetName(),
+ FeedActivityType.FEED_FAILURE, feedActivityDetails);
+ MetadataManager.INSTANCE.registerFeedActivity(ctx, feedInfo.feedConnectionId, fa);
+ }
+ MetadataManager.INSTANCE.commitTransaction(ctx);
+ } catch (Exception e) {
+ if (ctx != null) {
+ try {
+ MetadataManager.INSTANCE.abortTransaction(ctx);
+ } catch (Exception e2) {
+ e2.addSuppressed(e);
+ throw new IllegalStateException("Unable to abort transaction " + e2);
+ }
+ }
+ } finally {
+ MetadataManager.INSTANCE.releaseWriteLatch();
+ }
+ }
+
+ private static void sendSuperFeedManangerElectMessage(FeedInfo feedInfo, FeedManagerElectMessage electMessage) {
+ try {
+ Dataverse dataverse = new Dataverse(feedInfo.feedConnectionId.getDataverse(),
+ NonTaggedDataFormat.NON_TAGGED_DATA_FORMAT, 0);
+ AqlMetadataProvider metadataProvider = new AqlMetadataProvider(dataverse);
+ JobSpecification spec = JobSpecificationUtils.createJobSpecification();
+
+ IOperatorDescriptor feedMessenger;
+ AlgebricksPartitionConstraint messengerPc;
+ Set<String> locations = new HashSet<String>();
+ locations.addAll(feedInfo.computeLocations);
+ locations.addAll(feedInfo.ingestLocations);
+ locations.addAll(feedInfo.storageLocations);
+
+ Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = metadataProvider.buildSendFeedMessageRuntime(
+ spec, dataverse.getDataverseName(), feedInfo.feedConnectionId.getFeedName(),
+ feedInfo.feedConnectionId.getDatasetName(), electMessage, locations.toArray(new String[] {}));
+ feedMessenger = p.first;
+ messengerPc = p.second;
+ AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, feedMessenger, messengerPc);
+
+ NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(spec);
+ AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, nullSink, messengerPc);
+ spec.connect(new OneToOneConnectorDescriptor(spec), feedMessenger, 0, nullSink, 0);
+ spec.addRoot(nullSink);
+
+ JobId jobId = AsterixAppContextInfo.getInstance().getHcc().startJob(spec);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info(" Super Feed Manager Message: " + electMessage + " Job Id " + jobId);
+ }
+
+ } catch (Exception e) {
+ e.printStackTrace();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Exception in sending super feed manager elect message: " + feedInfo.feedConnectionId + " "
+ + e.getMessage());
+ }
+ }
+ }
+
+ public static class FeedFailure {
+
+ public enum FailureType {
+ INGESTION_NODE,
+ COMPUTE_NODE,
+ STORAGE_NODE
+ }
+
+ public FailureType failureType;
+ public String nodeId;
+
+ public FeedFailure(FailureType failureType, String nodeId) {
+ this.failureType = failureType;
+ this.nodeId = nodeId;
+ }
+
+ @Override
+ public String toString() {
+ return failureType + " (" + nodeId + ") ";
+ }
+ }
+
+ @Override
+ public Set<IClusterManagementWork> notifyNodeJoin(String joinedNodeId) {
+ State newState = AsterixClusterProperties.INSTANCE.getState();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info(joinedNodeId + " joined the cluster. " + "Asterix state: " + newState);
+ }
+
+ boolean needToReActivateFeeds = !newState.equals(state) && (newState == State.ACTIVE);
+ if (needToReActivateFeeds) {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info(joinedNodeId + " Resuming loser feeds (if any)");
+ }
+ try {
+ FeedsActivator activator = new FeedsActivator();
+ (new Thread(activator)).start();
+ } catch (Exception e) {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Exception in resuming feeds" + e.getMessage());
+ }
+ }
+ state = newState;
+ } else {
+ List<FeedInfo> feedsThatCanBeRevived = new ArrayList<FeedInfo>();
+ for (Entry<FeedInfo, List<String>> entry : dependentFeeds.entrySet()) {
+ List<String> requiredNodeIds = entry.getValue();
+ if (requiredNodeIds.contains(joinedNodeId)) {
+ requiredNodeIds.remove(joinedNodeId);
+ if (requiredNodeIds.isEmpty()) {
+ feedsThatCanBeRevived.add(entry.getKey());
+ }
+ }
+ }
+ if (!feedsThatCanBeRevived.isEmpty()) {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info(joinedNodeId + " Resuming feeds after rejoining of node " + joinedNodeId);
+ }
+ FeedsActivator activator = new FeedsActivator(feedsThatCanBeRevived);
+ (new Thread(activator)).start();
+ }
+ }
+ return null;
+ }
+
+ @Override
+ public void notifyRequestCompletion(IClusterManagementWorkResponse response) {
+ try {
+ responseInbox.put(response);
+ } catch (InterruptedException e) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Interrupted exception");
+ }
+ }
+ }
+
+ @Override
+ public void notifyStateChange(State previousState, State newState) {
+ switch (newState) {
+ case ACTIVE:
+ if (previousState.equals(State.UNUSABLE)) {
+ try {
+ FeedsActivator activator = new FeedsActivator();
+ (new Thread(activator)).start();
+ } catch (Exception e) {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Exception in resuming feeds" + e.getMessage());
+ }
+ }
+ }
+ break;
+ }
+
+ }
+
+ private static class FeedsActivator implements Runnable {
+
+ private List<FeedInfo> feedsToRevive;
+ private Mode mode;
+
+ public enum Mode {
+ REVIVAL_POST_CLUSTER_REBOOT,
+ REVIVAL_POST_NODE_REJOIN
+ }
+
+ public FeedsActivator() {
+ this.mode = Mode.REVIVAL_POST_CLUSTER_REBOOT;
+ }
+
+ public FeedsActivator(List<FeedInfo> feedsToRevive) {
+ this.feedsToRevive = feedsToRevive;
+ this.mode = Mode.REVIVAL_POST_NODE_REJOIN;
+ }
+
+ @Override
+ public void run() {
+ switch (mode) {
+ case REVIVAL_POST_CLUSTER_REBOOT:
+ revivePostClusterReboot();
+ break;
+ case REVIVAL_POST_NODE_REJOIN:
+ try {
+ Thread.sleep(10000);
+ } catch (InterruptedException e1) {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Attempt to resume feed interrupted");
+ }
+ throw new IllegalStateException(e1.getMessage());
+ }
+ for (FeedInfo finfo : feedsToRevive) {
+ try {
+ JobId jobId = AsterixAppContextInfo.getInstance().getHcc().startJob(finfo.jobSpec);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Resumed feed :" + finfo.feedConnectionId + " job id " + jobId);
+ LOGGER.info("Job:" + finfo.jobSpec);
+ }
+ } catch (Exception e) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Unable to resume feed " + finfo.feedConnectionId + " " + e.getMessage());
+ }
+ }
+ }
+ }
+ }
+
+ private void revivePostClusterReboot() {
+ MetadataTransactionContext ctx = null;
+
+ try {
+
+ Thread.sleep(4000);
+ MetadataManager.INSTANCE.init();
+ ctx = MetadataManager.INSTANCE.beginTransaction();
+ List<FeedActivity> activeFeeds = MetadataManager.INSTANCE.getActiveFeeds(ctx, null, null);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Attempt to resume feeds that were active prior to instance shutdown!");
+ LOGGER.info("Number of feeds affected:" + activeFeeds.size());
+ for (FeedActivity fa : activeFeeds) {
+ LOGGER.info("Active feed " + fa.getDataverseName() + ":" + fa.getDatasetName());
+ }
+ }
+ for (FeedActivity fa : activeFeeds) {
+ String feedPolicy = fa.getFeedActivityDetails().get(FeedActivityDetails.FEED_POLICY_NAME);
+ FeedPolicy policy = MetadataManager.INSTANCE.getFeedPolicy(ctx, fa.getDataverseName(), feedPolicy);
+ if (policy == null) {
+ policy = MetadataManager.INSTANCE.getFeedPolicy(ctx, MetadataConstants.METADATA_DATAVERSE_NAME,
+ feedPolicy);
+ if (policy == null) {
+ if (LOGGER.isLoggable(Level.SEVERE)) {
+ LOGGER.severe("Unable to resume feed: " + fa.getDataverseName() + ":"
+ + fa.getDatasetName() + "." + " Unknown policy :" + feedPolicy);
+ }
+ continue;
+ }
+ }
+
+ FeedPolicyAccessor fpa = new FeedPolicyAccessor(policy.getProperties());
+ if (fpa.autoRestartOnClusterReboot()) {
+ String dataverse = fa.getDataverseName();
+ String datasetName = fa.getDatasetName();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Resuming feed after cluster revival: " + dataverse + ":" + datasetName
+ + " using policy " + feedPolicy);
+ }
+ reviveFeed(dataverse, fa.getFeedName(), datasetName, feedPolicy);
+ } else {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Feed " + fa.getDataverseName() + ":" + fa.getDatasetName()
+ + " governed by policy" + feedPolicy
+ + " does not state auto restart after cluster revival");
+ }
+ }
+ }
+ MetadataManager.INSTANCE.commitTransaction(ctx);
+
+ } catch (Exception e) {
+ e.printStackTrace();
+ try {
+ MetadataManager.INSTANCE.abortTransaction(ctx);
+ } catch (Exception e1) {
+ if (LOGGER.isLoggable(Level.SEVERE)) {
+ LOGGER.severe("Exception in aborting" + e.getMessage());
+ }
+ throw new IllegalStateException(e1);
+ }
+ }
+ }
+
+ public void reviveFeed(String dataverse, String feedName, String dataset, String feedPolicy) {
+ PrintWriter writer = new PrintWriter(System.out, true);
+ SessionConfig pc = new SessionConfig(true, false, false, false, false, false, true, true, false);
+ try {
+ DataverseDecl dataverseDecl = new DataverseDecl(new Identifier(dataverse));
+ ConnectFeedStatement stmt = new ConnectFeedStatement(new Identifier(dataverse),
+ new Identifier(feedName), new Identifier(dataset), feedPolicy, 0);
+ stmt.setForceConnect(true);
+ List<Statement> statements = new ArrayList<Statement>();
+ statements.add(dataverseDecl);
+ statements.add(stmt);
+ AqlTranslator translator = new AqlTranslator(statements, writer, pc, DisplayFormat.TEXT);
+ translator.compileAndExecute(AsterixAppContextInfo.getInstance().getHcc(), null, false);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Resumed feed: " + dataverse + ":" + dataset + " using policy " + feedPolicy);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Exception in resuming loser feed: " + dataverse + ":" + dataset + " using policy "
+ + feedPolicy + " Exception " + e.getMessage());
+ }
+ }
+ }
+ }
+
+ public static class FeedsDeActivator implements Runnable {
+
+ private List<FeedInfo> feedsToTerminate;
+
+ public FeedsDeActivator(List<FeedInfo> feedsToTerminate) {
+ this.feedsToTerminate = feedsToTerminate;
+ }
+
+ @Override
+ public void run() {
+ for (FeedInfo feedInfo : feedsToTerminate) {
+ endFeed(feedInfo);
+ }
+ }
+
+ private void endFeed(FeedInfo feedInfo) {
+ MetadataTransactionContext ctx = null;
+ PrintWriter writer = new PrintWriter(System.out, true);
+ SessionConfig pc = new SessionConfig(true, false, false, false, false, false, true, true, false);
+ try {
+ ctx = MetadataManager.INSTANCE.beginTransaction();
+ DisconnectFeedStatement stmt = new DisconnectFeedStatement(new Identifier(
+ feedInfo.feedConnectionId.getDataverse()), new Identifier(
+ feedInfo.feedConnectionId.getFeedName()), new Identifier(
+ feedInfo.feedConnectionId.getDatasetName()));
+ List<Statement> statements = new ArrayList<Statement>();
+ DataverseDecl dataverseDecl = new DataverseDecl(
+ new Identifier(feedInfo.feedConnectionId.getDataverse()));
+ statements.add(dataverseDecl);
+ statements.add(stmt);
+ AqlTranslator translator = new AqlTranslator(statements, writer, pc, DisplayFormat.TEXT);
+ translator.compileAndExecute(AsterixAppContextInfo.getInstance().getHcc(), null, false);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("End urecoverable feed: " + feedInfo.feedConnectionId);
+ }
+ MetadataManager.INSTANCE.commitTransaction(ctx);
+ } catch (Exception e) {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Exception in ending loser feed: " + feedInfo.feedConnectionId + " Exception "
+ + e.getMessage());
+ }
+ e.printStackTrace();
+ try {
+ MetadataManager.INSTANCE.abortTransaction(ctx);
+ } catch (Exception e2) {
+ e2.addSuppressed(e);
+ if (LOGGER.isLoggable(Level.SEVERE)) {
+ LOGGER.severe("Exception in aborting transaction! System is in inconsistent state");
+ }
+ }
+
+ }
+
+ }
+ }
+}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/FeedWorkRequestResponseHandler.java b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/FeedWorkRequestResponseHandler.java
new file mode 100644
index 0000000..fa27e7e
--- /dev/null
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/FeedWorkRequestResponseHandler.java
@@ -0,0 +1,337 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.hyracks.bootstrap;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Random;
+import java.util.Set;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.hyracks.bootstrap.FeedLifecycleListener.FeedFailure;
+import edu.uci.ics.asterix.hyracks.bootstrap.FeedLifecycleListener.FeedFailureReport;
+import edu.uci.ics.asterix.hyracks.bootstrap.FeedLifecycleListener.FeedInfo;
+import edu.uci.ics.asterix.hyracks.bootstrap.FeedLifecycleListener.FeedsDeActivator;
+import edu.uci.ics.asterix.metadata.api.IClusterManagementWork;
+import edu.uci.ics.asterix.metadata.cluster.AddNodeWork;
+import edu.uci.ics.asterix.metadata.cluster.AddNodeWorkResponse;
+import edu.uci.ics.asterix.metadata.cluster.IClusterManagementWorkResponse;
+import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
+import edu.uci.ics.asterix.om.util.AsterixClusterProperties;
+import edu.uci.ics.hyracks.api.constraints.Constraint;
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.constraints.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.api.constraints.expressions.ConstraintExpression;
+import edu.uci.ics.hyracks.api.constraints.expressions.ConstraintExpression.ExpressionTag;
+import edu.uci.ics.hyracks.api.constraints.expressions.LValueConstraintExpression;
+import edu.uci.ics.hyracks.api.constraints.expressions.PartitionCountExpression;
+import edu.uci.ics.hyracks.api.constraints.expressions.PartitionLocationExpression;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.OperatorDescriptorId;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+
+public class FeedWorkRequestResponseHandler implements Runnable {
+
+ private static final Logger LOGGER = Logger.getLogger(FeedWorkRequestResponseHandler.class.getName());
+
+ private final LinkedBlockingQueue<IClusterManagementWorkResponse> inbox;
+
+ private Map<Integer, FeedFailureReport> feedsWaitingForResponse = new HashMap<Integer, FeedFailureReport>();
+
+ public FeedWorkRequestResponseHandler(LinkedBlockingQueue<IClusterManagementWorkResponse> inbox) {
+ this.inbox = inbox;
+ }
+
+ @Override
+ public void run() {
+ while (true) {
+ IClusterManagementWorkResponse response = null;
+ try {
+ response = inbox.take();
+ } catch (InterruptedException e) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Interrupted exception " + e.getMessage());
+ }
+ }
+ IClusterManagementWork submittedWork = response.getWork();
+ switch (submittedWork.getClusterManagementWorkType()) {
+ case ADD_NODE:
+ AddNodeWorkResponse resp = (AddNodeWorkResponse) response;
+ switch (resp.getStatus()) {
+ case FAILURE:
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Request " + resp.getWork() + " not completed");
+ }
+ break;
+ case SUCCESS:
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Request " + resp.getWork() + " completed");
+ }
+ break;
+ }
+
+ AddNodeWork work = (AddNodeWork) submittedWork;
+ FeedFailureReport failureReport = feedsWaitingForResponse.remove(work.getWorkId());
+ Set<FeedInfo> affectedFeeds = failureReport.failures.keySet();
+ for (FeedInfo feedInfo : affectedFeeds) {
+ try {
+ recoverFeed(feedInfo, work, resp, failureReport.failures.get(feedInfo));
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Recovered feed:" + feedInfo);
+ }
+ } catch (Exception e) {
+ if (LOGGER.isLoggable(Level.SEVERE)) {
+ LOGGER.severe("Unable to recover feed:" + feedInfo);
+ }
+ }
+ }
+ break;
+ case REMOVE_NODE:
+ break;
+ }
+ }
+ }
+
+ private void recoverFeed(FeedInfo feedInfo, AddNodeWork work, AddNodeWorkResponse resp,
+ List<FeedFailure> feedFailures) throws Exception {
+ List<String> failedNodeIds = new ArrayList<String>();
+ for (FeedFailure feedFailure : feedFailures) {
+ failedNodeIds.add(feedFailure.nodeId);
+ }
+ List<String> chosenReplacements = new ArrayList<String>();
+ switch (resp.getStatus()) {
+ case FAILURE:
+ for (FeedFailure feedFailure : feedFailures) {
+ switch (feedFailure.failureType) {
+ case INGESTION_NODE:
+ String replacement = getInternalReplacement(feedInfo, feedFailure, failedNodeIds,
+ chosenReplacements);
+ chosenReplacements.add(replacement);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Existing node:" + replacement + " chosen to replace "
+ + feedFailure.nodeId);
+ }
+ alterFeedJobSpec(feedInfo, resp, feedFailure.nodeId, replacement);
+ break;
+ }
+ }
+ break;
+ case SUCCESS:
+ List<String> nodesAdded = resp.getNodesAdded();
+ int numNodesAdded = nodesAdded.size();
+ int nodeIndex = 0;
+ for (FeedFailure feedFailure : feedFailures) {
+ switch (feedFailure.failureType) {
+ case INGESTION_NODE:
+ String replacement = null;
+ if (nodeIndex <= numNodesAdded - 1) {
+ replacement = nodesAdded.get(nodeIndex);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Newly added node:" + replacement + " chosen to replace "
+ + feedFailure.nodeId);
+ }
+ } else {
+ replacement = getInternalReplacement(feedInfo, feedFailure, failedNodeIds,
+ chosenReplacements);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Existing node:" + replacement + " chosen to replace "
+ + feedFailure.nodeId);
+ }
+ chosenReplacements.add(replacement);
+ }
+ alterFeedJobSpec(feedInfo, resp, feedFailure.nodeId, replacement);
+ nodeIndex++;
+ break;
+ default: // ingestion nodes and compute nodes (in currrent implementation) coincide.
+ // so correcting ingestion node failure also takes care of compute nodes failure.
+ // Storage node failures cannot be recovered from as in current implementation, we
+ // do not have data replication.
+ }
+ }
+ break;
+ }
+
+ JobSpecification spec = feedInfo.jobSpec;
+ System.out.println("Final recovery Job Spec \n" + spec);
+ Thread.sleep(5000);
+ AsterixAppContextInfo.getInstance().getHcc().startJob(feedInfo.jobSpec);
+ }
+
+ private String getInternalReplacement(FeedInfo feedInfo, FeedFailure feedFailure, List<String> failedNodeIds,
+ List<String> chosenReplacements) {
+ String failedNodeId = feedFailure.nodeId;
+ String replacement = null;;
+ // TODO 1st preference is given to any other participant node that is not involved in the feed.
+ // 2nd preference is given to a compute node.
+ // 3rd preference is given to a storage node
+ Set<String> participantNodes = AsterixClusterProperties.INSTANCE.getParticipantNodes();
+ if (participantNodes != null && !participantNodes.isEmpty()) {
+ List<String> pNodesClone = new ArrayList<String>();
+ pNodesClone.addAll(participantNodes);
+ pNodesClone.removeAll(feedInfo.storageLocations);
+ pNodesClone.removeAll(feedInfo.computeLocations);
+ pNodesClone.removeAll(feedInfo.ingestLocations);
+ pNodesClone.removeAll(chosenReplacements);
+
+ if (LOGGER.isLoggable(Level.INFO)) {
+ for (String candidateNode : pNodesClone) {
+ LOGGER.info("Candidate for replacement:" + candidateNode);
+ }
+ }
+ if (!pNodesClone.isEmpty()) {
+ String[] participantNodesArray = pNodesClone.toArray(new String[] {});
+
+ replacement = participantNodesArray[new Random().nextInt(participantNodesArray.length)];
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Participant Node: " + replacement + " chosen as replacement for " + failedNodeId);
+ }
+ }
+ }
+
+ if (replacement == null) {
+ feedInfo.computeLocations.removeAll(failedNodeIds);
+ boolean computeNodeSubstitute = (feedInfo.computeLocations.size() > 1);
+ if (computeNodeSubstitute) {
+ replacement = feedInfo.computeLocations.get(0);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Compute node:" + replacement + " chosen to replace " + failedNodeId);
+ }
+ } else {
+ replacement = feedInfo.storageLocations.get(0);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Storage node:" + replacement + " chosen to replace " + failedNodeId);
+ }
+ }
+ }
+ return replacement;
+ }
+
+ private void alterFeedJobSpec(FeedInfo feedInfo, AddNodeWorkResponse resp, String failedNodeId, String replacement) {
+ if (replacement == null) {
+ if (LOGGER.isLoggable(Level.SEVERE)) {
+ LOGGER.severe("Unable to find replacement for failed node :" + failedNodeId);
+ LOGGER.severe("Feed: " + feedInfo.feedConnectionId + " will be terminated");
+ }
+ List<FeedInfo> feedsToTerminate = new ArrayList<FeedInfo>();
+ feedsToTerminate.add(feedInfo);
+ Thread t = new Thread(new FeedsDeActivator(feedsToTerminate));
+ t.start();
+ } else {
+ replaceNode(feedInfo.jobSpec, failedNodeId, replacement);
+ }
+ }
+
+ private void replaceNode(JobSpecification jobSpec, String failedNodeId, String replacementNode) {
+ Set<Constraint> userConstraints = jobSpec.getUserConstraints();
+ List<Constraint> locationConstraintsToReplace = new ArrayList<Constraint>();
+ List<Constraint> countConstraintsToReplace = new ArrayList<Constraint>();
+ List<OperatorDescriptorId> modifiedOperators = new ArrayList<OperatorDescriptorId>();
+ Map<OperatorDescriptorId, List<Constraint>> candidateConstraints = new HashMap<OperatorDescriptorId, List<Constraint>>();
+ Map<OperatorDescriptorId, Map<Integer, String>> newConstraints = new HashMap<OperatorDescriptorId, Map<Integer, String>>();
+ OperatorDescriptorId opId = null;
+ for (Constraint constraint : userConstraints) {
+ LValueConstraintExpression lexpr = constraint.getLValue();
+ ConstraintExpression cexpr = constraint.getRValue();
+ switch (lexpr.getTag()) {
+ case PARTITION_COUNT:
+ opId = ((PartitionCountExpression) lexpr).getOperatorDescriptorId();
+ if (modifiedOperators.contains(opId)) {
+ countConstraintsToReplace.add(constraint);
+ } else {
+ List<Constraint> clist = candidateConstraints.get(opId);
+ if (clist == null) {
+ clist = new ArrayList<Constraint>();
+ candidateConstraints.put(opId, clist);
+ }
+ clist.add(constraint);
+ }
+ break;
+ case PARTITION_LOCATION:
+ opId = ((PartitionLocationExpression) lexpr).getOperatorDescriptorId();
+ String oldLocation = (String) ((ConstantExpression) cexpr).getValue();
+ if (oldLocation.equals(failedNodeId)) {
+ locationConstraintsToReplace.add(constraint);
+ modifiedOperators.add(((PartitionLocationExpression) lexpr).getOperatorDescriptorId());
+ Map<Integer, String> newLocs = newConstraints.get(opId);
+ if (newLocs == null) {
+ newLocs = new HashMap<Integer, String>();
+ newConstraints.put(opId, newLocs);
+ }
+ int partition = ((PartitionLocationExpression) lexpr).getPartition();
+ newLocs.put(partition, replacementNode);
+ } else {
+ if (modifiedOperators.contains(opId)) {
+ locationConstraintsToReplace.add(constraint);
+ Map<Integer, String> newLocs = newConstraints.get(opId);
+ if (newLocs == null) {
+ newLocs = new HashMap<Integer, String>();
+ newConstraints.put(opId, newLocs);
+ }
+ int partition = ((PartitionLocationExpression) lexpr).getPartition();
+ newLocs.put(partition, oldLocation);
+ } else {
+ List<Constraint> clist = candidateConstraints.get(opId);
+ if (clist == null) {
+ clist = new ArrayList<Constraint>();
+ candidateConstraints.put(opId, clist);
+ }
+ clist.add(constraint);
+ }
+ }
+ break;
+ }
+ }
+
+ jobSpec.getUserConstraints().removeAll(locationConstraintsToReplace);
+ jobSpec.getUserConstraints().removeAll(countConstraintsToReplace);
+
+ for (OperatorDescriptorId mopId : modifiedOperators) {
+ List<Constraint> clist = candidateConstraints.get(mopId);
+ if (clist != null && !clist.isEmpty()) {
+ jobSpec.getUserConstraints().removeAll(clist);
+
+ for (Constraint c : clist) {
+ if (c.getLValue().getTag().equals(ExpressionTag.PARTITION_LOCATION)) {
+ ConstraintExpression cexpr = c.getRValue();
+ int partition = ((PartitionLocationExpression) c.getLValue()).getPartition();
+ String oldLocation = (String) ((ConstantExpression) cexpr).getValue();
+ newConstraints.get(mopId).put(partition, oldLocation);
+ }
+ }
+ }
+ }
+
+ for (Entry<OperatorDescriptorId, Map<Integer, String>> entry : newConstraints.entrySet()) {
+ OperatorDescriptorId nopId = entry.getKey();
+ Map<Integer, String> clist = entry.getValue();
+ IOperatorDescriptor op = jobSpec.getOperatorMap().get(nopId);
+ String[] locations = new String[clist.size()];
+ for (int i = 0; i < locations.length; i++) {
+ locations[i] = clist.get(i);
+ }
+ PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, op, locations);
+ }
+
+ }
+
+ public void registerFeedWork(int workId, FeedFailureReport failureReport) {
+ feedsWaitingForResponse.put(workId, failureReport);
+ }
+}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java
index 96d0617..e750c07 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java
@@ -14,7 +14,6 @@
*/
package edu.uci.ics.asterix.hyracks.bootstrap;
-import java.rmi.RemoteException;
import java.rmi.server.UnicastRemoteObject;
import java.util.HashMap;
import java.util.Map;
@@ -25,17 +24,22 @@
import edu.uci.ics.asterix.common.api.AsterixThreadFactory;
import edu.uci.ics.asterix.common.api.IAsterixAppRuntimeContext;
import edu.uci.ics.asterix.common.config.AsterixMetadataProperties;
+import edu.uci.ics.asterix.common.config.AsterixTransactionProperties;
import edu.uci.ics.asterix.common.config.IAsterixPropertiesProvider;
import edu.uci.ics.asterix.common.transactions.IRecoveryManager;
import edu.uci.ics.asterix.common.transactions.IRecoveryManager.SystemState;
+import edu.uci.ics.asterix.event.schema.cluster.Cluster;
+import edu.uci.ics.asterix.event.schema.cluster.Node;
import edu.uci.ics.asterix.metadata.MetadataManager;
import edu.uci.ics.asterix.metadata.MetadataNode;
import edu.uci.ics.asterix.metadata.api.IAsterixStateProxy;
import edu.uci.ics.asterix.metadata.api.IMetadataNode;
import edu.uci.ics.asterix.metadata.bootstrap.MetadataBootstrap;
+import edu.uci.ics.asterix.om.util.AsterixClusterProperties;
import edu.uci.ics.asterix.transaction.management.resource.PersistentLocalResourceRepository;
import edu.uci.ics.hyracks.api.application.INCApplicationContext;
import edu.uci.ics.hyracks.api.application.INCApplicationEntryPoint;
+import edu.uci.ics.hyracks.api.lifecycle.ILifeCycleComponentManager;
import edu.uci.ics.hyracks.api.lifecycle.LifeCycleComponentManager;
public class NCApplicationEntryPoint implements INCApplicationEntryPoint {
@@ -50,23 +54,29 @@
@Override
public void start(INCApplicationContext ncAppCtx, String[] args) throws Exception {
- ncAppCtx.setThreadFactory(AsterixThreadFactory.INSTANCE);
+ ncAppCtx.setThreadFactory(new AsterixThreadFactory(ncAppCtx.getLifeCycleComponentManager()));
ncApplicationContext = ncAppCtx;
nodeId = ncApplicationContext.getNodeId();
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Starting Asterix node controller TAKE NOTE: " + nodeId);
}
- JVMShutdownHook sHook = new JVMShutdownHook(this);
- Runtime.getRuntime().addShutdownHook(sHook);
-
runtimeContext = new AsterixAppRuntimeContext(ncApplicationContext);
+ AsterixMetadataProperties metadataProperties = ((IAsterixPropertiesProvider) runtimeContext)
+ .getMetadataProperties();
+ if (!metadataProperties.getNodeNames().contains(ncApplicationContext.getNodeId())) {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Substitute node joining : " + ncApplicationContext.getNodeId());
+ }
+ updateOnNodeJoin();
+ }
runtimeContext.initialize();
ncApplicationContext.setApplicationObject(runtimeContext);
// #. recover if the system is corrupted by checking system state.
IRecoveryManager recoveryMgr = runtimeContext.getTransactionSubsystem().getRecoveryManager();
systemState = recoveryMgr.getSystemState();
+
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("System is in a state: " + systemState);
}
@@ -99,7 +109,7 @@
MetadataBootstrap.stopUniverse();
}
- LifeCycleComponentManager.INSTANCE.stopAll(false);
+ ncApplicationContext.getLifeCycleComponentManager().stopAll(false);
runtimeContext.deinitialize();
} else {
if (LOGGER.isLoggable(Level.INFO)) {
@@ -129,77 +139,96 @@
isMetadataNode = nodeId.equals(metadataProperties.getMetadataNodeName());
if (isMetadataNode) {
- registerRemoteMetadataNode(proxy);
-
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Bootstrapping metadata");
}
- MetadataManager.INSTANCE = new MetadataManager(proxy, metadataProperties);
- MetadataManager.INSTANCE.init();
+ MetadataNode.INSTANCE.initialize(runtimeContext);
+
+ // This is a special case, we just give the metadataNode directly.
+ // This way we can delay the registration of the metadataNode until
+ // it is completely initialized.
+ MetadataManager.INSTANCE = new MetadataManager(proxy, MetadataNode.INSTANCE);
MetadataBootstrap.startUniverse(((IAsterixPropertiesProvider) runtimeContext), ncApplicationContext,
systemState == SystemState.NEW_UNIVERSE);
MetadataBootstrap.startDDLRecovery();
+
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Metadata node bound");
+ }
}
+ ExternalLibraryBootstrap.setUpExternaLibraries(isMetadataNode);
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Starting lifecycle components");
}
-
+
Map<String, String> lifecycleMgmtConfiguration = new HashMap<String, String>();
- String key = LifeCycleComponentManager.Config.DUMP_PATH_KEY;
- String value = metadataProperties.getCoredumpPath(nodeId);
- lifecycleMgmtConfiguration.put(key, value);
+ String dumpPathKey = LifeCycleComponentManager.Config.DUMP_PATH_KEY;
+ String dumpPath = metadataProperties.getCoredumpPath(nodeId);
+ lifecycleMgmtConfiguration.put(dumpPathKey, dumpPath);
if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Coredump directory for NC is: " + value);
+ LOGGER.info("Coredump directory for NC is: " + dumpPath);
}
- LifeCycleComponentManager.INSTANCE.configure(lifecycleMgmtConfiguration);
+ ILifeCycleComponentManager lccm = ncApplicationContext.getLifeCycleComponentManager();
+ lccm.configure(lifecycleMgmtConfiguration);
if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Configured:" + LifeCycleComponentManager.INSTANCE);
+ LOGGER.info("Configured:" + lccm);
}
-
- LifeCycleComponentManager.INSTANCE.startAll();
+ ncApplicationContext.setStateDumpHandler(new AsterixStateDumpHandler(ncApplicationContext.getNodeId(), lccm
+ .getDumpPath(), lccm));
+
+ lccm.startAll();
IRecoveryManager recoveryMgr = runtimeContext.getTransactionSubsystem().getRecoveryManager();
recoveryMgr.checkpoint(true);
+
+ if (isMetadataNode) {
+ IMetadataNode stub = null;
+ stub = (IMetadataNode) UnicastRemoteObject.exportObject(MetadataNode.INSTANCE, 0);
+ proxy.setMetadataNode(stub);
+ }
// TODO
// reclaim storage for orphaned index artifacts in NCs.
}
- public void registerRemoteMetadataNode(IAsterixStateProxy proxy) throws RemoteException {
- IMetadataNode stub = null;
- MetadataNode.INSTANCE.initialize(runtimeContext);
- stub = (IMetadataNode) UnicastRemoteObject.exportObject(MetadataNode.INSTANCE, 0);
- proxy.setMetadataNode(stub);
+ private void updateOnNodeJoin() {
+ AsterixMetadataProperties metadataProperties = ((IAsterixPropertiesProvider) runtimeContext)
+ .getMetadataProperties();
+ if (!metadataProperties.getNodeNames().contains(nodeId)) {
+ metadataProperties.getNodeNames().add(nodeId);
+ Cluster cluster = AsterixClusterProperties.INSTANCE.getCluster();
+ String asterixInstanceName = cluster.getInstanceName();
+ AsterixTransactionProperties txnProperties = ((IAsterixPropertiesProvider) runtimeContext)
+ .getTransactionProperties();
+ Node self = null;
+ for (Node node : cluster.getSubstituteNodes().getNode()) {
+ String ncId = asterixInstanceName + "_" + node.getId();
+ if (ncId.equalsIgnoreCase(nodeId)) {
+ String storeDir = node.getStore() == null ? cluster.getStore() : node.getStore();
+ metadataProperties.getStores().put(nodeId, storeDir.split(","));
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Metadata node bound");
- }
- }
+ String coredumpPath = node.getLogDir() == null ? cluster.getLogDir() : node.getLogDir();
+ metadataProperties.getCoredumpPaths().put(nodeId, coredumpPath);
- /**
- * Shutdown hook that invokes {@link NCApplicationEntryPoint#stop() stop} method.
- */
- private static class JVMShutdownHook extends Thread {
+ String txnLogDir = node.getTxnLogDir() == null ? cluster.getTxnLogDir() : node.getTxnLogDir();
+ txnProperties.getLogDirectories().put(nodeId, txnLogDir);
- private final NCApplicationEntryPoint ncAppEntryPoint;
-
- public JVMShutdownHook(NCApplicationEntryPoint ncAppEntryPoint) {
- this.ncAppEntryPoint = ncAppEntryPoint;
- }
-
- public void run() {
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Shutdown hook in progress");
- }
- try {
- ncAppEntryPoint.stop();
- } catch (Exception e) {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.warning("Exception in executing shutdown hook" + e);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Store set to : " + storeDir);
+ LOGGER.info("Coredump dir set to : " + coredumpPath);
+ LOGGER.info("Transaction log dir set to :" + txnLogDir);
+ }
+ self = node;
+ break;
}
}
+ if (self != null) {
+ cluster.getSubstituteNodes().getNode().remove(self);
+ cluster.getNode().add(self);
+ } else {
+ throw new IllegalStateException("Unknown node joining the cluster");
+ }
}
}
-
-}
\ No newline at end of file
+}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/result/ResultUtils.java b/asterix-app/src/main/java/edu/uci/ics/asterix/result/ResultUtils.java
index 1900fa3..18ed62a 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/result/ResultUtils.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/result/ResultUtils.java
@@ -35,8 +35,8 @@
import edu.uci.ics.hyracks.dataflow.common.comm.util.ByteBufferInputStream;
public class ResultUtils {
- public static JSONArray getJSONFromBuffer(ByteBuffer buffer, IFrameTupleAccessor fta) throws HyracksDataException {
- JSONArray resultRecords = new JSONArray();
+ public static void getJSONFromBuffer(ByteBuffer buffer, IFrameTupleAccessor fta, JSONArray resultRecords)
+ throws HyracksDataException {
ByteBufferInputStream bbis = new ByteBufferInputStream();
try {
@@ -56,7 +56,6 @@
throw new HyracksDataException(e);
}
}
- return resultRecords;
}
public static JSONObject getErrorResponse(int errorCode, String errorMessage, String errorSummary,
@@ -206,7 +205,7 @@
/**
* Read the template file which is stored as a resource and return its content. If the file does not exist or is
* not readable return the default template string.
- *
+ *
* @param path
* The path to the resource template file
* @param defaultTemplate
diff --git a/asterix-app/src/main/resources/asterix-build-configuration.xml b/asterix-app/src/main/resources/asterix-build-configuration.xml
index 7a91205..d798cd5 100644
--- a/asterix-app/src/main/resources/asterix-build-configuration.xml
+++ b/asterix-app/src/main/resources/asterix-build-configuration.xml
@@ -25,6 +25,15 @@
<ncId>nc2</ncId>
<txnLogDirPath>target/txnLogDir/nc2</txnLogDirPath>
</transactionLogDir>
+
+ <property>
+ <name>max.wait.active.cluster</name>
+ <value>60</value>
+ <description>Maximum wait (in seconds) for a cluster to be ACTIVE (all nodes are available)
+ before a submitted query/statement can be executed. (Default = 60 seconds)
+ </description>
+ </property>
+
<property>
<name>log.level</name>
<value>WARNING</value>
diff --git a/asterix-app/src/main/resources/feed/dashboard.html b/asterix-app/src/main/resources/feed/dashboard.html
new file mode 100644
index 0000000..805f8ea
--- /dev/null
+++ b/asterix-app/src/main/resources/feed/dashboard.html
@@ -0,0 +1,127 @@
+<html>
+ <head>
+ <script type="text/javascript" src="http://ajax.googleapis.com/ajax/libs/jquery/1.6.2/jquery.min.js"></script>
+ <script type="text/javascript" src="/webui/static/js/smoothie.js"></script>
+ <script type="text/javascript">
+ $(document).ready(function() {
+ var feedSeries = new TimeSeries();
+ var state = "ACTIVE";
+ var dataverse = "%s";
+ var dataset = "%s";
+ var feed = "%s";
+ var ingestLocations = "%s";
+ var computeLocations = "%s";
+ var storageLocations = "%s";
+ var ingestionPolicy = "%s";
+ var activeSince = "%s";
+ var targetUrl = "/feed/data?dataverse=" + dataverse + "&dataset=" + dataset + "&feed=" + feed;
+
+
+ var ingestionNodes = ingestLocations.split(",");
+ var numIngestionNodes = ingestionNodes.length;
+ var seriesOptions = { strokeStyle: 'rgba(0, 255, 0, 1)', fillStyle: 'rgba(0, 255, 0, 0.2)', lineWidth: 4 };
+ var ingestionTimeSeries = new Array();
+ var graphNames = new Array();
+
+ $.ajaxSetup({ cache: false });
+ setInterval(fetchFeedReport, 5000);
+ function fetchFeedReport() {
+ $.ajax({
+ url: '/feed/data?dataverse=' + dataverse + '&dataset=' + dataset + '&feed=' + feed,
+ method: 'GET',
+ dataType: 'json',
+ success: onFeedReportReceived
+ });
+ }
+
+
+ function onFeedReportReceived(data) {
+ var status = data["status"];
+ if(status == ("ended")){
+ ingestLocations = " ";
+ computeLocations = " ";
+ storageLocations = " ";
+ ingestionPolicy = " ";
+ state = "INACTIVE";
+ document.location.reload(true);
+ } else {
+ var type = data["type"];
+ if (type == ("reload")) {
+ ingestLocations = data["ingestLocations"];
+ computeLocations = data["computeLocations"];
+ storageLocations = data["storageLocations"];
+ document.location.reload(true);
+ } else {
+ var report = data["value"];
+ var tputArray = report.split("|");
+ var covered = 0;
+ var totalTput = 0;
+ for( var i = 0; i < tputArray.length-1; i ++){
+ ingestionTimeSeries[i].append(data["time"], tputArray[i]);
+ covered++;
+ totalTput += parseInt(tputArray[i]);
+ }
+ for( var j = covered; j < numIngestionNodes; j++){
+ ingestionTimeSeries[j].append(data["time"], 0);
+ }
+ ingestionTimeSeries[numIngestionNodes].append(data["time"], totalTput);
+ }
+ }
+ }
+
+ function myYRangeFunction(range) {
+ var min = 0;
+ var max = 5000;
+ return {min: min, max: max};
+ }
+
+ function initTimeline(ingestLocations) {
+
+ document.write("<i>" + "Feed Ingestion" + " " + "<i>");
+ document.write("<br />" + "Ingestion Locations: " + ingestLocations.replace(",",", "));
+ document.write("<br />" + "Compute Locations: " + computeLocations.replace(",",", "));
+ document.write("<br />" + "Storage Locations: " + storageLocations.replace(",",", "));
+ document.write("<br />" + "Ingestion Policy: " + ingestionPolicy);
+ document.write("<br />" + "Status: " + state);
+ document.write("<br />");
+ document.write("<br />");
+
+ for( var i = 0; i < numIngestionNodes; i++){
+ graphNames[i] = "Partition " + i;
+ }
+
+ if(numIngestionNodes > 1){
+ graphNames[numIngestionNodes] = "IngestionThroughput";
+ drawCanvas(graphNames[numIngestionNodes]);
+ ingestionTimeSeries[numIngestionNodes] = new TimeSeries();
+ drawChart(graphNames[numIngestionNodes], ingestionTimeSeries[numIngestionNodes]);
+ }
+
+ for( var j = 0; j < numIngestionNodes; j++){
+ drawCanvas(graphNames[j]);
+ ingestionTimeSeries[j] = new TimeSeries();
+ drawChart(graphNames[j], ingestionTimeSeries[j]);
+ }
+ }
+
+ function drawCanvas(chartName) {
+ document.write("<br />");
+ document.write("<br />");
+ document.write("<i>" + chartName + "</i>");
+ document.write("<br />");
+ document.write("<canvas id="+ "\"" + chartName + "\"" + " " + "width=\"500\" height=\"250\"></canvas>");
+ }
+
+ function drawChart(chartName, ingestionTimeSeries) {
+ var ingestionChart = new SmoothieChart({ timestampFormatter:SmoothieChart.timeFormatter, interpolation:'linear', minValue:0, millisPerPixel: 20, grid: { strokeStyle: '#555555', lineWidth: 1, millisPerLine: 1000, verticalSections: 10 }});
+ ingestionChart.addTimeSeries(ingestionTimeSeries, seriesOptions);
+ ingestionChart.streamTo(document.getElementById(chartName, 500));
+ }
+
+ initTimeline(ingestLocations);
+ });
+ </script>
+ </head>
+ <body></body>
+</html>
+
diff --git a/asterix-app/src/main/resources/feed/home.html b/asterix-app/src/main/resources/feed/home.html
new file mode 100644
index 0000000..5b1721b
--- /dev/null
+++ b/asterix-app/src/main/resources/feed/home.html
@@ -0,0 +1,82 @@
+<!--
+ ! Copyright 2009-2013 by The Regents of the University of California
+ ! Licensed under the Apache License, Version 2.0 (the "License");
+ ! you may not use this file except in compliance with the License.
+ ! you may obtain a copy of the License from
+ !
+ ! http://www.apache.org/licenses/LICENSE-2.0
+ !
+ ! Unless required by applicable law or agreed to in writing, software
+ ! distributed under the License is distributed on an "AS IS" BASIS,
+ ! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ! See the License for the specific language governing permissions and
+ ! limitations under the License.
+ !-->
+<!DOCTYPE html>
+<html lang="en">
+<head>
+<meta name="description" content="ASTERIX WEB PAGE" />
+<meta name="viewport" content="width=device-width, initial-scale=1.0">
+<link href='http://fonts.googleapis.com/css?family=Bitter|PT+Sans+Caption|Open+Sans' rel='stylesheet' type='text/css'>
+<script src="/webui/static/js/jquery.min.js"></script>
+
+<link href="/webui/static/css/bootstrap.min.css" rel="stylesheet" type="text/css" />
+<link href="/webui/static/css/bootstrap-responsive.min.css" rel="stylesheet" type="text/css" />
+
+<script src="/webui/static/js/bootstrap.min.js"></script>
+
+<link href="/webui/static/css/style.css" rel="stylesheet" type="text/css" />
+
+
+<meta charset=utf-8 />
+<title>AsterixDB Web Interface</title>
+</head>
+
+<body>
+ <div class="navbar navbar-fixed-top">
+ <div class="navbar-inner">
+ <div class="container">
+ <a class="btn btn-navbar" data-toggle="collapse" data-target=".nav-collapse">
+ <span class="icon-bar"></span>
+ <span class="icon-bar"></span>
+ <span class="icon-bar"></span>
+ </a>
+
+ <!-- Temporary logo placeholder -->
+ <a class="brand" href="#"><img src="/webui/static/img/finalasterixlogo.png"></a>
+
+ <div class="nav-collapse collapse">
+ <ul class="nav">
+ <li><a href="http://code.google.com/p/asterixdb/" target="_blank">
+ Open source<img class="extarget" src="/webui/static/img/targetlink.png"/></a></li>
+ <li><a href="http://code.google.com/p/asterixdb/issues/list" target="_blank">
+ File issues<img class="extarget" src="/webui/static/img/targetlink.png"/></a></li>
+ <li><a href="https://groups.google.com/forum/?fromgroups#!forum/asterixdb-users" target="_blank">
+ Contact<img class="extarget" src="/webui/static/img/targetlink.png"/></a></li>
+ </ul>
+ </div><!--/.nav-collapse -->
+ </div>
+ </div>
+ </div>
+
+ <div class="content">
+ <div class="container">
+ <div class="row-fluid">
+ <div class="span12">
+ %s
+ </div>
+ </div>
+ </div>
+ </div>
+ <div class="footer">
+ <section class="line"><hr></section>
+ <section class="content">
+ <section class="left">
+ </section>
+ <section class="right">
+ © Copyright 2013 University of California, Irvine
+ </section>
+ </section>
+ </div>
+</body>
+</html>
diff --git a/asterix-app/src/main/resources/feed/smoothie.js b/asterix-app/src/main/resources/feed/smoothie.js
new file mode 100644
index 0000000..4e46fa7
--- /dev/null
+++ b/asterix-app/src/main/resources/feed/smoothie.js
@@ -0,0 +1,660 @@
+// MIT License:
+//
+// Copyright (c) 2010-2013, Joe Walnes
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+/**
+ * Smoothie Charts - http://smoothiecharts.org/
+ * (c) 2010-2013, Joe Walnes
+ * 2013, Drew Noakes
+ *
+ * v1.0: Main charting library, by Joe Walnes
+ * v1.1: Auto scaling of axis, by Neil Dunn
+ * v1.2: fps (frames per second) option, by Mathias Petterson
+ * v1.3: Fix for divide by zero, by Paul Nikitochkin
+ * v1.4: Set minimum, top-scale padding, remove timeseries, add optional timer to reset bounds, by Kelley Reynolds
+ * v1.5: Set default frames per second to 50... smoother.
+ * .start(), .stop() methods for conserving CPU, by Dmitry Vyal
+ * options.interpolation = 'bezier' or 'line', by Dmitry Vyal
+ * options.maxValue to fix scale, by Dmitry Vyal
+ * v1.6: minValue/maxValue will always get converted to floats, by Przemek Matylla
+ * v1.7: options.grid.fillStyle may be a transparent color, by Dmitry A. Shashkin
+ * Smooth rescaling, by Kostas Michalopoulos
+ * v1.8: Set max length to customize number of live points in the dataset with options.maxDataSetLength, by Krishna Narni
+ * v1.9: Display timestamps along the bottom, by Nick and Stev-io
+ * (https://groups.google.com/forum/?fromgroups#!topic/smoothie-charts/-Ywse8FCpKI%5B1-25%5D)
+ * Refactored by Krishna Narni, to support timestamp formatting function
+ * v1.10: Switch to requestAnimationFrame, removed the now obsoleted options.fps, by Gergely Imreh
+ * v1.11: options.grid.sharpLines option added, by @drewnoakes
+ * Addressed warning seen in Firefox when seriesOption.fillStyle undefined, by @drewnoakes
+ * v1.12: Support for horizontalLines added, by @drewnoakes
+ * Support for yRangeFunction callback added, by @drewnoakes
+ * v1.13: Fixed typo (#32), by @alnikitich
+ * v1.14: Timer cleared when last TimeSeries removed (#23), by @davidgaleano
+ * Fixed diagonal line on chart at start/end of data stream, by @drewnoakes
+ * v1.15: Support for npm package (#18), by @dominictarr
+ * Fixed broken removeTimeSeries function (#24) by @davidgaleano
+ * Minor performance and tidying, by @drewnoakes
+ * v1.16: Bug fix introduced in v1.14 relating to timer creation/clearance (#23), by @drewnoakes
+ * TimeSeries.append now deals with out-of-order timestamps, and can merge duplicates, by @zacwitte (#12)
+ * Documentation and some local variable renaming for clarity, by @drewnoakes
+ * v1.17: Allow control over font size (#10), by @drewnoakes
+ * Timestamp text won't overlap, by @drewnoakes
+ * v1.18: Allow control of max/min label precision, by @drewnoakes
+ * Added 'borderVisible' chart option, by @drewnoakes
+ * Allow drawing series with fill but no stroke (line), by @drewnoakes
+ */
+
+;(function(exports) {
+
+ var Util = {
+ extend: function() {
+ arguments[0] = arguments[0] || {};
+ for (var i = 1; i < arguments.length; i++)
+ {
+ for (var key in arguments[i])
+ {
+ if (arguments[i].hasOwnProperty(key))
+ {
+ if (typeof(arguments[i][key]) === 'object') {
+ if (arguments[i][key] instanceof Array) {
+ arguments[0][key] = arguments[i][key];
+ } else {
+ arguments[0][key] = Util.extend(arguments[0][key], arguments[i][key]);
+ }
+ } else {
+ arguments[0][key] = arguments[i][key];
+ }
+ }
+ }
+ }
+ return arguments[0];
+ }
+ };
+
+ /**
+ * Initialises a new <code>TimeSeries</code> with optional data options.
+ *
+ * Options are of the form (defaults shown):
+ *
+ * <pre>
+ * {
+ * resetBounds: true, // enables/disables automatic scaling of the y-axis
+ * resetBoundsInterval: 3000 // the period between scaling calculations, in millis
+ * }
+ * </pre>
+ *
+ * Presentation options for TimeSeries are specified as an argument to <code>SmoothieChart.addTimeSeries</code>.
+ *
+ * @constructor
+ */
+ function TimeSeries(options) {
+ this.options = Util.extend({}, TimeSeries.defaultOptions, options);
+ this.data = [];
+ this.maxValue = Number.NaN; // The maximum value ever seen in this TimeSeries.
+ this.minValue = Number.NaN; // The minimum value ever seen in this TimeSeries.
+ }
+
+ TimeSeries.defaultOptions = {
+ resetBoundsInterval: 3000,
+ resetBounds: true
+ };
+
+ /**
+ * Recalculate the min/max values for this <code>TimeSeries</code> object.
+ *
+ * This causes the graph to scale itself in the y-axis.
+ */
+ TimeSeries.prototype.resetBounds = function() {
+ if (this.data.length) {
+ // Walk through all data points, finding the min/max value
+ this.maxValue = this.data[0][1];
+ this.minValue = this.data[0][1];
+ for (var i = 1; i < this.data.length; i++) {
+ var value = this.data[i][1];
+ if (value > this.maxValue) {
+ this.maxValue = value;
+ }
+ if (value < this.minValue) {
+ this.minValue = value;
+ }
+ }
+ } else {
+ // No data exists, so set min/max to NaN
+ this.maxValue = Number.NaN;
+ this.minValue = Number.NaN;
+ }
+ };
+
+ /**
+ * Adds a new data point to the <code>TimeSeries</code>, preserving chronological order.
+ *
+ * @param timestamp the position, in time, of this data point
+ * @param value the value of this data point
+ * @param sumRepeatedTimeStampValues if <code>timestamp</code> has an exact match in the series, this flag controls
+ * whether it is replaced, or the values summed (defaults to false.)
+ */
+ TimeSeries.prototype.append = function(timestamp, value, sumRepeatedTimeStampValues) {
+ // Rewind until we hit an older timestamp
+ var i = this.data.length - 1;
+ while (i > 0 && this.data[i][0] > timestamp) {
+ i--;
+ }
+
+ if (this.data.length > 0 && this.data[i][0] === timestamp) {
+ // Update existing values in the array
+ if (sumRepeatedTimeStampValues) {
+ // Sum this value into the existing 'bucket'
+ this.data[i][1] += value;
+ value = this.data[i][1];
+ } else {
+ // Replace the previous value
+ this.data[i][1] = value;
+ }
+ } else if (i < this.data.length - 1) {
+ // Splice into the correct position to keep timestamps in order
+ this.data.splice(i + 1, 0, [timestamp, value]);
+ } else {
+ // Add to the end of the array
+ this.data.push([timestamp, value]);
+ }
+
+ this.maxValue = isNaN(this.maxValue) ? value : Math.max(this.maxValue, value);
+ this.minValue = isNaN(this.minValue) ? value : Math.min(this.minValue, value);
+ };
+
+ TimeSeries.prototype.dropOldData = function(oldestValidTime, maxDataSetLength) {
+ // We must always keep one expired data point as we need this to draw the
+ // line that comes into the chart from the left, but any points prior to that can be removed.
+ var removeCount = 0;
+ while (this.data.length - removeCount >= maxDataSetLength && this.data[removeCount + 1][0] < oldestValidTime) {
+ removeCount++;
+ }
+ if (removeCount !== 0) {
+ this.data.splice(0, removeCount);
+ }
+ };
+
+ /**
+ * Initialises a new <code>SmoothieChart</code>.
+ *
+ * Options are optional, and should be of the form below. Just specify the values you
+ * need and the rest will be given sensible defaults as shown:
+ *
+ * <pre>
+ * {
+ * minValue: undefined, // specify to clamp the lower y-axis to a given value
+ * maxValue: undefined, // specify to clamp the upper y-axis to a given value
+ * maxValueScale: 1, // allows proportional padding to be added above the chart. for 10% padding, specify 1.1.
+ * yRangeFunction: undefined, // function({min: , max: }) { return {min: , max: }; }
+ * scaleSmoothing: 0.125, // controls the rate at which y-value zoom animation occurs
+ * millisPerPixel: 20, // sets the speed at which the chart pans by
+ * maxDataSetLength: 2,
+ * interpolation: 'bezier' // or 'linear'
+ * timestampFormatter: null, // Optional function to format time stamps for bottom of chart. You may use SmoothieChart.timeFormatter, or your own: function(date) { return ''; }
+ * horizontalLines: [], // [ { value: 0, color: '#ffffff', lineWidth: 1 } ],
+ * grid:
+ * {
+ * fillStyle: '#000000', // the background colour of the chart
+ * lineWidth: 1, // the pixel width of grid lines
+ * strokeStyle: '#777777', // colour of grid lines
+ * millisPerLine: 1000, // distance between vertical grid lines
+ * sharpLines: false, // controls whether grid lines are 1px sharp, or softened
+ * verticalSections: 2, // number of vertical sections marked out by horizontal grid lines
+ * borderVisible: true // whether the grid lines trace the border of the chart or not
+ * },
+ * labels
+ * {
+ * disabled: false, // enables/disables labels showing the min/max values
+ * fillStyle: '#ffffff', // colour for text of labels,
+ * fontSize: 15,
+ * fontFamily: 'sans-serif',
+ * precision: 2
+ * },
+ * }
+ * </pre>
+ *
+ * @constructor
+ */
+ function SmoothieChart(options) {
+ this.options = Util.extend({}, SmoothieChart.defaultChartOptions, options);
+ this.seriesSet = [];
+ this.currentValueRange = 1;
+ this.currentVisMinValue = 0;
+ }
+
+ SmoothieChart.defaultChartOptions = {
+ millisPerPixel: 20,
+ maxValueScale: 1,
+ interpolation: 'bezier',
+ scaleSmoothing: 0.125,
+ maxDataSetLength: 2,
+ grid: {
+ fillStyle: '#000000',
+ strokeStyle: '#777777',
+ lineWidth: 1,
+ sharpLines: false,
+ millisPerLine: 1000,
+ verticalSections: 2,
+ borderVisible: true
+ },
+ labels: {
+ fillStyle: '#ffffff',
+ disabled: false,
+ fontSize: 10,
+ fontFamily: 'monospace',
+ precision: 2
+ },
+ horizontalLines: []
+ };
+
+ // Based on http://inspirit.github.com/jsfeat/js/compatibility.js
+ SmoothieChart.AnimateCompatibility = (function() {
+ // TODO this global variable will cause bugs if more than one chart is used and the browser does not support *requestAnimationFrame natively
+ var lastTime = 0,
+ requestAnimationFrame = function(callback, element) {
+ var requestAnimationFrame =
+ window.requestAnimationFrame ||
+ window.webkitRequestAnimationFrame ||
+ window.mozRequestAnimationFrame ||
+ window.oRequestAnimationFrame ||
+ window.msRequestAnimationFrame ||
+ function(callback) {
+ var currTime = new Date().getTime(),
+ timeToCall = Math.max(0, 16 - (currTime - lastTime)),
+ id = window.setTimeout(function() {
+ callback(currTime + timeToCall);
+ }, timeToCall);
+ lastTime = currTime + timeToCall;
+ return id;
+ };
+ return requestAnimationFrame.call(window, callback, element);
+ },
+ cancelAnimationFrame = function(id) {
+ var cancelAnimationFrame =
+ window.cancelAnimationFrame ||
+ function(id) {
+ clearTimeout(id);
+ };
+ return cancelAnimationFrame.call(window, id);
+ };
+
+ return {
+ requestAnimationFrame: requestAnimationFrame,
+ cancelAnimationFrame: cancelAnimationFrame
+ };
+ })();
+
+ SmoothieChart.defaultSeriesPresentationOptions = {
+ lineWidth: 1,
+ strokeStyle: '#ffffff'
+ };
+
+ /**
+ * Adds a <code>TimeSeries</code> to this chart, with optional presentation options.
+ *
+ * Presentation options should be of the form (defaults shown):
+ *
+ * <pre>
+ * {
+ * lineWidth: 1,
+ * strokeStyle: '#ffffff',
+ * fillStyle: undefined
+ * }
+ * </pre>
+ */
+ SmoothieChart.prototype.addTimeSeries = function(timeSeries, options) {
+ this.seriesSet.push({timeSeries: timeSeries, options: Util.extend({}, SmoothieChart.defaultSeriesPresentationOptions, options)});
+ if (timeSeries.options.resetBounds && timeSeries.options.resetBoundsInterval > 0) {
+ timeSeries.resetBoundsTimerId = setInterval(
+ function() {
+ timeSeries.resetBounds();
+ },
+ timeSeries.options.resetBoundsInterval
+ );
+ }
+ };
+
+ /**
+ * Removes the specified <code>TimeSeries</code> from the chart.
+ */
+ SmoothieChart.prototype.removeTimeSeries = function(timeSeries) {
+ // Find the correct timeseries to remove, and remove it
+ var numSeries = this.seriesSet.length;
+ for (var i = 0; i < numSeries; i++) {
+ if (this.seriesSet[i].timeSeries === timeSeries) {
+ this.seriesSet.splice(i, 1);
+ break;
+ }
+ }
+ // If a timer was operating for that timeseries, remove it
+ if (timeSeries.resetBoundsTimerId) {
+ // Stop resetting the bounds, if we were
+ clearInterval(timeSeries.resetBoundsTimerId);
+ }
+ };
+
+ /**
+ * Instructs the <code>SmoothieChart</code> to start rendering to the provided canvas, with specified delay.
+ *
+ * @param canvas the target canvas element
+ * @param delayMillis an amount of time to wait before a data point is shown. This can prevent the end of the series
+ * from appearing on screen, with new values flashing into view, at the expense of some latency.
+ */
+ SmoothieChart.prototype.streamTo = function(canvas, delayMillis) {
+ this.canvas = canvas;
+ this.delay = delayMillis;
+ this.start();
+ };
+
+ /**
+ * Starts the animation of this chart.
+ */
+ SmoothieChart.prototype.start = function() {
+ if (this.frame) {
+ // We're already running, so just return
+ return;
+ }
+
+ // Renders a frame, and queues the next frame for later rendering
+ var animate = function() {
+ this.frame = SmoothieChart.AnimateCompatibility.requestAnimationFrame(function() {
+ this.render();
+ animate();
+ }.bind(this));
+ }.bind(this);
+
+ animate();
+ };
+
+ /**
+ * Stops the animation of this chart.
+ */
+ SmoothieChart.prototype.stop = function() {
+ if (this.frame) {
+ SmoothieChart.AnimateCompatibility.cancelAnimationFrame(this.frame);
+ delete this.frame;
+ }
+ };
+
+ SmoothieChart.prototype.updateValueRange = function() {
+ // Calculate the current scale of the chart, from all time series.
+ var chartOptions = this.options,
+ chartMaxValue = Number.NaN,
+ chartMinValue = Number.NaN;
+
+ for (var d = 0; d < this.seriesSet.length; d++) {
+ // TODO(ndunn): We could calculate / track these values as they stream in.
+ var timeSeries = this.seriesSet[d].timeSeries;
+ if (!isNaN(timeSeries.maxValue)) {
+ chartMaxValue = !isNaN(chartMaxValue) ? Math.max(chartMaxValue, timeSeries.maxValue) : timeSeries.maxValue;
+ }
+
+ if (!isNaN(timeSeries.minValue)) {
+ chartMinValue = !isNaN(chartMinValue) ? Math.min(chartMinValue, timeSeries.minValue) : timeSeries.minValue;
+ }
+ }
+
+ // Scale the chartMaxValue to add padding at the top if required
+ if (chartOptions.maxValue != null) {
+ chartMaxValue = chartOptions.maxValue;
+ } else {
+ chartMaxValue *= chartOptions.maxValueScale;
+ }
+
+ // Set the minimum if we've specified one
+ if (chartOptions.minValue != null) {
+ chartMinValue = chartOptions.minValue;
+ }
+
+ // If a custom range function is set, call it
+ if (this.options.yRangeFunction) {
+ var range = this.options.yRangeFunction({min: chartMinValue, max: chartMaxValue});
+ chartMinValue = range.min;
+ chartMaxValue = range.max;
+ }
+
+ if (!isNaN(chartMaxValue) && !isNaN(chartMinValue)) {
+ var targetValueRange = chartMaxValue - chartMinValue;
+ this.currentValueRange += chartOptions.scaleSmoothing * (targetValueRange - this.currentValueRange);
+ this.currentVisMinValue += chartOptions.scaleSmoothing * (chartMinValue - this.currentVisMinValue);
+ }
+
+ this.valueRange = { min: chartMinValue, max: chartMaxValue };
+ };
+
+ SmoothieChart.prototype.render = function(canvas, time) {
+ canvas = canvas || this.canvas;
+ time = time || new Date().getTime() - (this.delay || 0);
+
+ // TODO only render if the chart has moved at least 1px since the last rendered frame
+
+ // Round time down to pixel granularity, so motion appears smoother.
+ time -= time % this.options.millisPerPixel;
+
+ var context = canvas.getContext('2d'),
+ chartOptions = this.options,
+ dimensions = { top: 0, left: 0, width: canvas.clientWidth, height: canvas.clientHeight },
+ // Calculate the threshold time for the oldest data points.
+ oldestValidTime = time - (dimensions.width * chartOptions.millisPerPixel),
+ valueToYPixel = function(value) {
+ var offset = value - this.currentVisMinValue;
+ return this.currentValueRange === 0
+ ? dimensions.height
+ : dimensions.height - (Math.round((offset / this.currentValueRange) * dimensions.height));
+ }.bind(this),
+ timeToXPixel = function(t) {
+ return Math.round(dimensions.width - ((time - t) / chartOptions.millisPerPixel));
+ };
+
+ this.updateValueRange();
+
+ context.font = chartOptions.labels.fontSize + 'px ' + chartOptions.labels.fontFamily;
+
+ // Save the state of the canvas context, any transformations applied in this method
+ // will get removed from the stack at the end of this method when .restore() is called.
+ context.save();
+
+ // Move the origin.
+ context.translate(dimensions.left, dimensions.top);
+
+ // Create a clipped rectangle - anything we draw will be constrained to this rectangle.
+ // This prevents the occasional pixels from curves near the edges overrunning and creating
+ // screen cheese (that phrase should need no explanation).
+ context.beginPath();
+ context.rect(0, 0, dimensions.width, dimensions.height);
+ context.clip();
+
+ // Clear the working area.
+ context.save();
+ context.fillStyle = chartOptions.grid.fillStyle;
+ context.clearRect(0, 0, dimensions.width, dimensions.height);
+ context.fillRect(0, 0, dimensions.width, dimensions.height);
+ context.restore();
+
+ // Grid lines...
+ context.save();
+ context.lineWidth = chartOptions.grid.lineWidth;
+ context.strokeStyle = chartOptions.grid.strokeStyle;
+ // Vertical (time) dividers.
+ if (chartOptions.grid.millisPerLine > 0) {
+ var textUntilX = dimensions.width - context.measureText(minValueString).width + 4;
+ for (var t = time - (time % chartOptions.grid.millisPerLine);
+ t >= oldestValidTime;
+ t -= chartOptions.grid.millisPerLine) {
+ var gx = timeToXPixel(t);
+ if (chartOptions.grid.sharpLines) {
+ gx -= 0.5;
+ }
+ context.beginPath();
+ context.moveTo(gx, 0);
+ context.lineTo(gx, dimensions.height);
+ context.stroke();
+ context.closePath();
+
+ // Display timestamp at bottom of this line if requested, and it won't overlap
+ if (chartOptions.timestampFormatter && gx < textUntilX) {
+ // Formats the timestamp based on user specified formatting function
+ // SmoothieChart.timeFormatter function above is one such formatting option
+ var tx = new Date(t),
+ ts = chartOptions.timestampFormatter(tx),
+ tsWidth = context.measureText(ts).width;
+ textUntilX = gx - tsWidth - 2;
+ context.fillStyle = chartOptions.labels.fillStyle;
+ context.fillText(ts, gx - tsWidth, dimensions.height - 2);
+ }
+ }
+ }
+
+ // Horizontal (value) dividers.
+ for (var v = 1; v < chartOptions.grid.verticalSections; v++) {
+ var gy = Math.round(v * dimensions.height / chartOptions.grid.verticalSections);
+ if (chartOptions.grid.sharpLines) {
+ gy -= 0.5;
+ }
+ context.beginPath();
+ context.moveTo(0, gy);
+ context.lineTo(dimensions.width, gy);
+ context.stroke();
+ context.closePath();
+ }
+ // Bounding rectangle.
+ if (chartOptions.grid.borderVisible) {
+ context.beginPath();
+ context.strokeRect(0, 0, dimensions.width, dimensions.height);
+ context.closePath();
+ }
+ context.restore();
+
+ // Draw any horizontal lines...
+ if (chartOptions.horizontalLines && chartOptions.horizontalLines.length) {
+ for (var hl = 0; hl < chartOptions.horizontalLines.length; hl++) {
+ var line = chartOptions.horizontalLines[hl],
+ hly = Math.round(valueToYPixel(line.value)) - 0.5;
+ context.strokeStyle = line.color || '#ffffff';
+ context.lineWidth = line.lineWidth || 1;
+ context.beginPath();
+ context.moveTo(0, hly);
+ context.lineTo(dimensions.width, hly);
+ context.stroke();
+ context.closePath();
+ }
+ }
+
+ // For each data set...
+ for (var d = 0; d < this.seriesSet.length; d++) {
+ context.save();
+ var timeSeries = this.seriesSet[d].timeSeries,
+ dataSet = timeSeries.data,
+ seriesOptions = this.seriesSet[d].options;
+
+ // Delete old data that's moved off the left of the chart.
+ timeSeries.dropOldData(oldestValidTime, chartOptions.maxDataSetLength);
+
+ // Set style for this dataSet.
+ context.lineWidth = seriesOptions.lineWidth;
+ context.strokeStyle = seriesOptions.strokeStyle;
+ // Draw the line...
+ context.beginPath();
+ // Retain lastX, lastY for calculating the control points of bezier curves.
+ var firstX = 0, lastX = 0, lastY = 0;
+ for (var i = 0; i < dataSet.length && dataSet.length !== 1; i++) {
+ var x = timeToXPixel(dataSet[i][0]),
+ y = valueToYPixel(dataSet[i][1]);
+
+ if (i === 0) {
+ firstX = x;
+ context.moveTo(x, y);
+ } else {
+ switch (chartOptions.interpolation) {
+ case "linear":
+ case "line": {
+ context.lineTo(x,y);
+ break;
+ }
+ case "bezier":
+ default: {
+ // Great explanation of Bezier curves: http://en.wikipedia.org/wiki/Bezier_curve#Quadratic_curves
+ //
+ // Assuming A was the last point in the line plotted and B is the new point,
+ // we draw a curve with control points P and Q as below.
+ //
+ // A---P
+ // |
+ // |
+ // |
+ // Q---B
+ //
+ // Importantly, A and P are at the same y coordinate, as are B and Q. This is
+ // so adjacent curves appear to flow as one.
+ //
+ context.bezierCurveTo( // startPoint (A) is implicit from last iteration of loop
+ Math.round((lastX + x) / 2), lastY, // controlPoint1 (P)
+ Math.round((lastX + x)) / 2, y, // controlPoint2 (Q)
+ x, y); // endPoint (B)
+ break;
+ }
+ }
+ }
+
+ lastX = x; lastY = y;
+ }
+
+ if (dataSet.length > 1) {
+ if (seriesOptions.fillStyle) {
+ // Close up the fill region.
+ context.lineTo(dimensions.width + seriesOptions.lineWidth + 1, lastY);
+ context.lineTo(dimensions.width + seriesOptions.lineWidth + 1, dimensions.height + seriesOptions.lineWidth + 1);
+ context.lineTo(firstX, dimensions.height + seriesOptions.lineWidth);
+ context.fillStyle = seriesOptions.fillStyle;
+ context.fill();
+ }
+
+ if (seriesOptions.strokeStyle && seriesOptions.strokeStyle !== 'none') {
+ context.stroke();
+ }
+ context.closePath();
+ }
+ context.restore();
+ }
+
+ // Draw the axis values on the chart.
+ if (!chartOptions.labels.disabled && !isNaN(this.valueRange.min) && !isNaN(this.valueRange.max)) {
+ var maxValueString = parseFloat(this.valueRange.max).toFixed(chartOptions.labels.precision),
+ minValueString = parseFloat(this.valueRange.min).toFixed(chartOptions.labels.precision);
+ context.fillStyle = chartOptions.labels.fillStyle;
+ context.fillText(maxValueString, dimensions.width - context.measureText(maxValueString).width - 2, chartOptions.labels.fontSize);
+ context.fillText(minValueString, dimensions.width - context.measureText(minValueString).width - 2, dimensions.height - 2);
+ }
+
+ context.restore(); // See .save() above.
+ };
+
+ // Sample timestamp formatting function
+ SmoothieChart.timeFormatter = function(date) {
+ function pad2(number) { return (number < 10 ? '0' : '') + number }
+ return pad2(date.getHours()) + ':' + pad2(date.getMinutes()) + ':' + pad2(date.getSeconds());
+ };
+
+ exports.TimeSeries = TimeSeries;
+ exports.SmoothieChart = SmoothieChart;
+
+})(typeof exports === 'undefined' ? this : exports);
+
diff --git a/asterix-app/src/main/resources/test.properties b/asterix-app/src/main/resources/test.properties
old mode 100755
new mode 100644
diff --git a/asterix-app/src/main/resources/webui/static/js/jquery.autosize-min.js b/asterix-app/src/main/resources/webui/static/js/jquery.autosize-min.js
index b4303a6..2cff1f8 100644
--- a/asterix-app/src/main/resources/webui/static/js/jquery.autosize-min.js
+++ b/asterix-app/src/main/resources/webui/static/js/jquery.autosize-min.js
@@ -1,7 +1 @@
-/*!
- jQuery Autosize v1.16.7
- (c) 2013 Jack Moore - jacklmoore.com
- updated: 2013-03-20
- license: http://www.opensource.org/licenses/mit-license.php
-*/
-(function(e){var t,o,n={className:"autosizejs",append:"",callback:!1},i="hidden",s="border-box",a="lineHeight",l='<textarea tabindex="-1" style="position:absolute; top:-999px; left:0; right:auto; bottom:auto; border:0; -moz-box-sizing:content-box; -webkit-box-sizing:content-box; box-sizing:content-box; word-wrap:break-word; height:0 !important; min-height:0 !important; overflow:hidden;"/>',r=["fontFamily","fontSize","fontWeight","fontStyle","letterSpacing","textTransform","wordSpacing","textIndent"],c="oninput",h="onpropertychange",p=e(l).data("autosize",!0)[0];p.style.lineHeight="99px","99px"===e(p).css(a)&&r.push(a),p.style.lineHeight="",e.fn.autosize=function(a){return a=e.extend({},n,a||{}),p.parentNode!==document.body&&(e(document.body).append(p),p.value="\n\n\n",p.scrollTop=9e4,t=p.scrollHeight===p.scrollTop+p.clientHeight),this.each(function(){function n(){o=b,p.className=a.className,e.each(r,function(e,t){p.style[t]=f.css(t)})}function l(){var e,s,l;if(o!==b&&n(),!d){d=!0,p.value=b.value+a.append,p.style.overflowY=b.style.overflowY,l=parseInt(b.style.height,10),p.style.width=Math.max(f.width(),0)+"px",t?e=p.scrollHeight:(p.scrollTop=0,p.scrollTop=9e4,e=p.scrollTop);var r=parseInt(f.css("maxHeight"),10);r=r&&r>0?r:9e4,e>r?(e=r,s="scroll"):u>e&&(e=u),e+=x,b.style.overflowY=s||i,l!==e&&(b.style.height=e+"px",w&&a.callback.call(b)),setTimeout(function(){d=!1},1)}}var u,d,g,b=this,f=e(b),x=0,w=e.isFunction(a.callback);f.data("autosize")||((f.css("box-sizing")===s||f.css("-moz-box-sizing")===s||f.css("-webkit-box-sizing")===s)&&(x=f.outerHeight()-f.height()),u=Math.max(parseInt(f.css("minHeight"),10)-x,f.height()),g="none"===f.css("resize")||"vertical"===f.css("resize")?"none":"horizontal",f.css({overflow:i,overflowY:i,wordWrap:"break-word",resize:g}).data("autosize",!0),h in b?c in b?b[c]=b.onkeyup=l:b[h]=l:b[c]=l,e(window).on("resize",function(){d=!1,l()}),f.on("autosize",function(){d=!1,l()}),l())})}})(window.jQuery||window.Zepto);
\ No newline at end of file
+(function(e){var t,o,n={className:"autosizejs",append:"",callback:!1},i="hidden",s="border-box",a="lineHeight",l='<textarea tabindex="-1" style="position:absolute; top:-999px; left:0; right:auto; bottom:auto; border:0; -moz-box-sizing:content-box; -webkit-box-sizing:content-box; box-sizing:content-box; word-wrap:break-word; height:0 !important; min-height:0 !important; overflow:hidden;"/>',r=["fontFamily","fontSize","fontWeight","fontStyle","letterSpacing","textTransform","wordSpacing","textIndent"],c="oninput",h="onpropertychange",p=e(l).data("autosize",!0)[0];p.style.lineHeight="99px","99px"===e(p).css(a)&&r.push(a),p.style.lineHeight="",e.fn.autosize=function(a){return a=e.extend({},n,a||{}),p.parentNode!==document.body&&(e(document.body).append(p),p.value="\n\n\n",p.scrollTop=9e4,t=p.scrollHeight===p.scrollTop+p.clientHeight),this.each(function(){function n(){o=b,p.className=a.className,e.each(r,function(e,t){p.style[t]=f.css(t)})}function l(){var e,s,l;if(o!==b&&n(),!d){d=!0,p.value=b.value+a.append,p.style.overflowY=b.style.overflowY,l=parseInt(b.style.height,10),p.style.width=Math.max(f.width(),0)+"px",t?e=p.scrollHeight:(p.scrollTop=0,p.scrollTop=9e4,e=p.scrollTop);var r=parseInt(f.css("maxHeight"),10);r=r&&r>0?r:9e4,e>r?(e=r,s="scroll"):u>e&&(e=u),e+=x,b.style.overflowY=s||i,l!==e&&(b.style.height=e+"px",w&&a.callback.call(b)),setTimeout(function(){d=!1},1)}}var u,d,g,b=this,f=e(b),x=0,w=e.isFunction(a.callback);f.data("autosize")||((f.css("box-sizing")===s||f.css("-moz-box-sizing")===s||f.css("-webkit-box-sizing")===s)&&(x=f.outerHeight()-f.height()),u=Math.max(parseInt(f.css("minHeight"),10)-x,f.height()),g="none"===f.css("resize")||"vertical"===f.css("resize")?"none":"horizontal",f.css({overflow:i,overflowY:i,wordWrap:"break-word",resize:g}).data("autosize",!0),h in b?c in b?b[c]=b.onkeyup=l:b[h]=l:b[c]=l,e(window).on("resize",function(){d=!1,l()}),f.on("autosize",function(){d=!1,l()}),l())})}})(window.jQuery||window.Zepto);
diff --git a/asterix-app/src/main/resources/webui/static/js/jquery.min.js b/asterix-app/src/main/resources/webui/static/js/jquery.min.js
index 006e953..0292c87 100644
--- a/asterix-app/src/main/resources/webui/static/js/jquery.min.js
+++ b/asterix-app/src/main/resources/webui/static/js/jquery.min.js
@@ -1,5 +1,3 @@
-/*! jQuery v1.9.1 | (c) 2005, 2012 jQuery Foundation, Inc. | jquery.org/license
-//@ sourceMappingURL=jquery.min.map
-*/(function(e,t){var n,r,i=typeof t,o=e.document,a=e.location,s=e.jQuery,u=e.$,l={},c=[],p="1.9.1",f=c.concat,d=c.push,h=c.slice,g=c.indexOf,m=l.toString,y=l.hasOwnProperty,v=p.trim,b=function(e,t){return new b.fn.init(e,t,r)},x=/[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/.source,w=/\S+/g,T=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,N=/^(?:(<[\w\W]+>)[^>]*|#([\w-]*))$/,C=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,k=/^[\],:{}\s]*$/,E=/(?:^|:|,)(?:\s*\[)+/g,S=/\\(?:["\\\/bfnrt]|u[\da-fA-F]{4})/g,A=/"[^"\\\r\n]*"|true|false|null|-?(?:\d+\.|)\d+(?:[eE][+-]?\d+|)/g,j=/^-ms-/,D=/-([\da-z])/gi,L=function(e,t){return t.toUpperCase()},H=function(e){(o.addEventListener||"load"===e.type||"complete"===o.readyState)&&(q(),b.ready())},q=function(){o.addEventListener?(o.removeEventListener("DOMContentLoaded",H,!1),e.removeEventListener("load",H,!1)):(o.detachEvent("onreadystatechange",H),e.detachEvent("onload",H))};b.fn=b.prototype={jquery:p,constructor:b,init:function(e,n,r){var i,a;if(!e)return this;if("string"==typeof e){if(i="<"===e.charAt(0)&&">"===e.charAt(e.length-1)&&e.length>=3?[null,e,null]:N.exec(e),!i||!i[1]&&n)return!n||n.jquery?(n||r).find(e):this.constructor(n).find(e);if(i[1]){if(n=n instanceof b?n[0]:n,b.merge(this,b.parseHTML(i[1],n&&n.nodeType?n.ownerDocument||n:o,!0)),C.test(i[1])&&b.isPlainObject(n))for(i in n)b.isFunction(this[i])?this[i](n[i]):this.attr(i,n[i]);return this}if(a=o.getElementById(i[2]),a&&a.parentNode){if(a.id!==i[2])return r.find(e);this.length=1,this[0]=a}return this.context=o,this.selector=e,this}return e.nodeType?(this.context=this[0]=e,this.length=1,this):b.isFunction(e)?r.ready(e):(e.selector!==t&&(this.selector=e.selector,this.context=e.context),b.makeArray(e,this))},selector:"",length:0,size:function(){return this.length},toArray:function(){return h.call(this)},get:function(e){return null==e?this.toArray():0>e?this[this.length+e]:this[e]},pushStack:function(e){var t=b.merge(this.constructor(),e);return t.prevObject=this,t.context=this.context,t},each:function(e,t){return b.each(this,e,t)},ready:function(e){return b.ready.promise().done(e),this},slice:function(){return this.pushStack(h.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},eq:function(e){var t=this.length,n=+e+(0>e?t:0);return this.pushStack(n>=0&&t>n?[this[n]]:[])},map:function(e){return this.pushStack(b.map(this,function(t,n){return e.call(t,n,t)}))},end:function(){return this.prevObject||this.constructor(null)},push:d,sort:[].sort,splice:[].splice},b.fn.init.prototype=b.fn,b.extend=b.fn.extend=function(){var e,n,r,i,o,a,s=arguments[0]||{},u=1,l=arguments.length,c=!1;for("boolean"==typeof s&&(c=s,s=arguments[1]||{},u=2),"object"==typeof s||b.isFunction(s)||(s={}),l===u&&(s=this,--u);l>u;u++)if(null!=(o=arguments[u]))for(i in o)e=s[i],r=o[i],s!==r&&(c&&r&&(b.isPlainObject(r)||(n=b.isArray(r)))?(n?(n=!1,a=e&&b.isArray(e)?e:[]):a=e&&b.isPlainObject(e)?e:{},s[i]=b.extend(c,a,r)):r!==t&&(s[i]=r));return s},b.extend({noConflict:function(t){return e.$===b&&(e.$=u),t&&e.jQuery===b&&(e.jQuery=s),b},isReady:!1,readyWait:1,holdReady:function(e){e?b.readyWait++:b.ready(!0)},ready:function(e){if(e===!0?!--b.readyWait:!b.isReady){if(!o.body)return setTimeout(b.ready);b.isReady=!0,e!==!0&&--b.readyWait>0||(n.resolveWith(o,[b]),b.fn.trigger&&b(o).trigger("ready").off("ready"))}},isFunction:function(e){return"function"===b.type(e)},isArray:Array.isArray||function(e){return"array"===b.type(e)},isWindow:function(e){return null!=e&&e==e.window},isNumeric:function(e){return!isNaN(parseFloat(e))&&isFinite(e)},type:function(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?l[m.call(e)]||"object":typeof e},isPlainObject:function(e){if(!e||"object"!==b.type(e)||e.nodeType||b.isWindow(e))return!1;try{if(e.constructor&&!y.call(e,"constructor")&&!y.call(e.constructor.prototype,"isPrototypeOf"))return!1}catch(n){return!1}var r;for(r in e);return r===t||y.call(e,r)},isEmptyObject:function(e){var t;for(t in e)return!1;return!0},error:function(e){throw Error(e)},parseHTML:function(e,t,n){if(!e||"string"!=typeof e)return null;"boolean"==typeof t&&(n=t,t=!1),t=t||o;var r=C.exec(e),i=!n&&[];return r?[t.createElement(r[1])]:(r=b.buildFragment([e],t,i),i&&b(i).remove(),b.merge([],r.childNodes))},parseJSON:function(n){return e.JSON&&e.JSON.parse?e.JSON.parse(n):null===n?n:"string"==typeof n&&(n=b.trim(n),n&&k.test(n.replace(S,"@").replace(A,"]").replace(E,"")))?Function("return "+n)():(b.error("Invalid JSON: "+n),t)},parseXML:function(n){var r,i;if(!n||"string"!=typeof n)return null;try{e.DOMParser?(i=new DOMParser,r=i.parseFromString(n,"text/xml")):(r=new ActiveXObject("Microsoft.XMLDOM"),r.async="false",r.loadXML(n))}catch(o){r=t}return r&&r.documentElement&&!r.getElementsByTagName("parsererror").length||b.error("Invalid XML: "+n),r},noop:function(){},globalEval:function(t){t&&b.trim(t)&&(e.execScript||function(t){e.eval.call(e,t)})(t)},camelCase:function(e){return e.replace(j,"ms-").replace(D,L)},nodeName:function(e,t){return e.nodeName&&e.nodeName.toLowerCase()===t.toLowerCase()},each:function(e,t,n){var r,i=0,o=e.length,a=M(e);if(n){if(a){for(;o>i;i++)if(r=t.apply(e[i],n),r===!1)break}else for(i in e)if(r=t.apply(e[i],n),r===!1)break}else if(a){for(;o>i;i++)if(r=t.call(e[i],i,e[i]),r===!1)break}else for(i in e)if(r=t.call(e[i],i,e[i]),r===!1)break;return e},trim:v&&!v.call("\ufeff\u00a0")?function(e){return null==e?"":v.call(e)}:function(e){return null==e?"":(e+"").replace(T,"")},makeArray:function(e,t){var n=t||[];return null!=e&&(M(Object(e))?b.merge(n,"string"==typeof e?[e]:e):d.call(n,e)),n},inArray:function(e,t,n){var r;if(t){if(g)return g.call(t,e,n);for(r=t.length,n=n?0>n?Math.max(0,r+n):n:0;r>n;n++)if(n in t&&t[n]===e)return n}return-1},merge:function(e,n){var r=n.length,i=e.length,o=0;if("number"==typeof r)for(;r>o;o++)e[i++]=n[o];else while(n[o]!==t)e[i++]=n[o++];return e.length=i,e},grep:function(e,t,n){var r,i=[],o=0,a=e.length;for(n=!!n;a>o;o++)r=!!t(e[o],o),n!==r&&i.push(e[o]);return i},map:function(e,t,n){var r,i=0,o=e.length,a=M(e),s=[];if(a)for(;o>i;i++)r=t(e[i],i,n),null!=r&&(s[s.length]=r);else for(i in e)r=t(e[i],i,n),null!=r&&(s[s.length]=r);return f.apply([],s)},guid:1,proxy:function(e,n){var r,i,o;return"string"==typeof n&&(o=e[n],n=e,e=o),b.isFunction(e)?(r=h.call(arguments,2),i=function(){return e.apply(n||this,r.concat(h.call(arguments)))},i.guid=e.guid=e.guid||b.guid++,i):t},access:function(e,n,r,i,o,a,s){var u=0,l=e.length,c=null==r;if("object"===b.type(r)){o=!0;for(u in r)b.access(e,n,u,r[u],!0,a,s)}else if(i!==t&&(o=!0,b.isFunction(i)||(s=!0),c&&(s?(n.call(e,i),n=null):(c=n,n=function(e,t,n){return c.call(b(e),n)})),n))for(;l>u;u++)n(e[u],r,s?i:i.call(e[u],u,n(e[u],r)));return o?e:c?n.call(e):l?n(e[0],r):a},now:function(){return(new Date).getTime()}}),b.ready.promise=function(t){if(!n)if(n=b.Deferred(),"complete"===o.readyState)setTimeout(b.ready);else if(o.addEventListener)o.addEventListener("DOMContentLoaded",H,!1),e.addEventListener("load",H,!1);else{o.attachEvent("onreadystatechange",H),e.attachEvent("onload",H);var r=!1;try{r=null==e.frameElement&&o.documentElement}catch(i){}r&&r.doScroll&&function a(){if(!b.isReady){try{r.doScroll("left")}catch(e){return setTimeout(a,50)}q(),b.ready()}}()}return n.promise(t)},b.each("Boolean Number String Function Array Date RegExp Object Error".split(" "),function(e,t){l["[object "+t+"]"]=t.toLowerCase()});function M(e){var t=e.length,n=b.type(e);return b.isWindow(e)?!1:1===e.nodeType&&t?!0:"array"===n||"function"!==n&&(0===t||"number"==typeof t&&t>0&&t-1 in e)}r=b(o);var _={};function F(e){var t=_[e]={};return b.each(e.match(w)||[],function(e,n){t[n]=!0}),t}b.Callbacks=function(e){e="string"==typeof e?_[e]||F(e):b.extend({},e);var n,r,i,o,a,s,u=[],l=!e.once&&[],c=function(t){for(r=e.memory&&t,i=!0,a=s||0,s=0,o=u.length,n=!0;u&&o>a;a++)if(u[a].apply(t[0],t[1])===!1&&e.stopOnFalse){r=!1;break}n=!1,u&&(l?l.length&&c(l.shift()):r?u=[]:p.disable())},p={add:function(){if(u){var t=u.length;(function i(t){b.each(t,function(t,n){var r=b.type(n);"function"===r?e.unique&&p.has(n)||u.push(n):n&&n.length&&"string"!==r&&i(n)})})(arguments),n?o=u.length:r&&(s=t,c(r))}return this},remove:function(){return u&&b.each(arguments,function(e,t){var r;while((r=b.inArray(t,u,r))>-1)u.splice(r,1),n&&(o>=r&&o--,a>=r&&a--)}),this},has:function(e){return e?b.inArray(e,u)>-1:!(!u||!u.length)},empty:function(){return u=[],this},disable:function(){return u=l=r=t,this},disabled:function(){return!u},lock:function(){return l=t,r||p.disable(),this},locked:function(){return!l},fireWith:function(e,t){return t=t||[],t=[e,t.slice?t.slice():t],!u||i&&!l||(n?l.push(t):c(t)),this},fire:function(){return p.fireWith(this,arguments),this},fired:function(){return!!i}};return p},b.extend({Deferred:function(e){var t=[["resolve","done",b.Callbacks("once memory"),"resolved"],["reject","fail",b.Callbacks("once memory"),"rejected"],["notify","progress",b.Callbacks("memory")]],n="pending",r={state:function(){return n},always:function(){return i.done(arguments).fail(arguments),this},then:function(){var e=arguments;return b.Deferred(function(n){b.each(t,function(t,o){var a=o[0],s=b.isFunction(e[t])&&e[t];i[o[1]](function(){var e=s&&s.apply(this,arguments);e&&b.isFunction(e.promise)?e.promise().done(n.resolve).fail(n.reject).progress(n.notify):n[a+"With"](this===r?n.promise():this,s?[e]:arguments)})}),e=null}).promise()},promise:function(e){return null!=e?b.extend(e,r):r}},i={};return r.pipe=r.then,b.each(t,function(e,o){var a=o[2],s=o[3];r[o[1]]=a.add,s&&a.add(function(){n=s},t[1^e][2].disable,t[2][2].lock),i[o[0]]=function(){return i[o[0]+"With"](this===i?r:this,arguments),this},i[o[0]+"With"]=a.fireWith}),r.promise(i),e&&e.call(i,i),i},when:function(e){var t=0,n=h.call(arguments),r=n.length,i=1!==r||e&&b.isFunction(e.promise)?r:0,o=1===i?e:b.Deferred(),a=function(e,t,n){return function(r){t[e]=this,n[e]=arguments.length>1?h.call(arguments):r,n===s?o.notifyWith(t,n):--i||o.resolveWith(t,n)}},s,u,l;if(r>1)for(s=Array(r),u=Array(r),l=Array(r);r>t;t++)n[t]&&b.isFunction(n[t].promise)?n[t].promise().done(a(t,l,n)).fail(o.reject).progress(a(t,u,s)):--i;return i||o.resolveWith(l,n),o.promise()}}),b.support=function(){var t,n,r,a,s,u,l,c,p,f,d=o.createElement("div");if(d.setAttribute("className","t"),d.innerHTML=" <link/><table></table><a href='/a'>a</a><input type='checkbox'/>",n=d.getElementsByTagName("*"),r=d.getElementsByTagName("a")[0],!n||!r||!n.length)return{};s=o.createElement("select"),l=s.appendChild(o.createElement("option")),a=d.getElementsByTagName("input")[0],r.style.cssText="top:1px;float:left;opacity:.5",t={getSetAttribute:"t"!==d.className,leadingWhitespace:3===d.firstChild.nodeType,tbody:!d.getElementsByTagName("tbody").length,htmlSerialize:!!d.getElementsByTagName("link").length,style:/top/.test(r.getAttribute("style")),hrefNormalized:"/a"===r.getAttribute("href"),opacity:/^0.5/.test(r.style.opacity),cssFloat:!!r.style.cssFloat,checkOn:!!a.value,optSelected:l.selected,enctype:!!o.createElement("form").enctype,html5Clone:"<:nav></:nav>"!==o.createElement("nav").cloneNode(!0).outerHTML,boxModel:"CSS1Compat"===o.compatMode,deleteExpando:!0,noCloneEvent:!0,inlineBlockNeedsLayout:!1,shrinkWrapBlocks:!1,reliableMarginRight:!0,boxSizingReliable:!0,pixelPosition:!1},a.checked=!0,t.noCloneChecked=a.cloneNode(!0).checked,s.disabled=!0,t.optDisabled=!l.disabled;try{delete d.test}catch(h){t.deleteExpando=!1}a=o.createElement("input"),a.setAttribute("value",""),t.input=""===a.getAttribute("value"),a.value="t",a.setAttribute("type","radio"),t.radioValue="t"===a.value,a.setAttribute("checked","t"),a.setAttribute("name","t"),u=o.createDocumentFragment(),u.appendChild(a),t.appendChecked=a.checked,t.checkClone=u.cloneNode(!0).cloneNode(!0).lastChild.checked,d.attachEvent&&(d.attachEvent("onclick",function(){t.noCloneEvent=!1}),d.cloneNode(!0).click());for(f in{submit:!0,change:!0,focusin:!0})d.setAttribute(c="on"+f,"t"),t[f+"Bubbles"]=c in e||d.attributes[c].expando===!1;return d.style.backgroundClip="content-box",d.cloneNode(!0).style.backgroundClip="",t.clearCloneStyle="content-box"===d.style.backgroundClip,b(function(){var n,r,a,s="padding:0;margin:0;border:0;display:block;box-sizing:content-box;-moz-box-sizing:content-box;-webkit-box-sizing:content-box;",u=o.getElementsByTagName("body")[0];u&&(n=o.createElement("div"),n.style.cssText="border:0;width:0;height:0;position:absolute;top:0;left:-9999px;margin-top:1px",u.appendChild(n).appendChild(d),d.innerHTML="<table><tr><td></td><td>t</td></tr></table>",a=d.getElementsByTagName("td"),a[0].style.cssText="padding:0;margin:0;border:0;display:none",p=0===a[0].offsetHeight,a[0].style.display="",a[1].style.display="none",t.reliableHiddenOffsets=p&&0===a[0].offsetHeight,d.innerHTML="",d.style.cssText="box-sizing:border-box;-moz-box-sizing:border-box;-webkit-box-sizing:border-box;padding:1px;border:1px;display:block;width:4px;margin-top:1%;position:absolute;top:1%;",t.boxSizing=4===d.offsetWidth,t.doesNotIncludeMarginInBodyOffset=1!==u.offsetTop,e.getComputedStyle&&(t.pixelPosition="1%"!==(e.getComputedStyle(d,null)||{}).top,t.boxSizingReliable="4px"===(e.getComputedStyle(d,null)||{width:"4px"}).width,r=d.appendChild(o.createElement("div")),r.style.cssText=d.style.cssText=s,r.style.marginRight=r.style.width="0",d.style.width="1px",t.reliableMarginRight=!parseFloat((e.getComputedStyle(r,null)||{}).marginRight)),typeof d.style.zoom!==i&&(d.innerHTML="",d.style.cssText=s+"width:1px;padding:1px;display:inline;zoom:1",t.inlineBlockNeedsLayout=3===d.offsetWidth,d.style.display="block",d.innerHTML="<div></div>",d.firstChild.style.width="5px",t.shrinkWrapBlocks=3!==d.offsetWidth,t.inlineBlockNeedsLayout&&(u.style.zoom=1)),u.removeChild(n),n=d=a=r=null)}),n=s=u=l=r=a=null,t}();var O=/(?:\{[\s\S]*\}|\[[\s\S]*\])$/,B=/([A-Z])/g;function P(e,n,r,i){if(b.acceptData(e)){var o,a,s=b.expando,u="string"==typeof n,l=e.nodeType,p=l?b.cache:e,f=l?e[s]:e[s]&&s;if(f&&p[f]&&(i||p[f].data)||!u||r!==t)return f||(l?e[s]=f=c.pop()||b.guid++:f=s),p[f]||(p[f]={},l||(p[f].toJSON=b.noop)),("object"==typeof n||"function"==typeof n)&&(i?p[f]=b.extend(p[f],n):p[f].data=b.extend(p[f].data,n)),o=p[f],i||(o.data||(o.data={}),o=o.data),r!==t&&(o[b.camelCase(n)]=r),u?(a=o[n],null==a&&(a=o[b.camelCase(n)])):a=o,a}}function R(e,t,n){if(b.acceptData(e)){var r,i,o,a=e.nodeType,s=a?b.cache:e,u=a?e[b.expando]:b.expando;if(s[u]){if(t&&(o=n?s[u]:s[u].data)){b.isArray(t)?t=t.concat(b.map(t,b.camelCase)):t in o?t=[t]:(t=b.camelCase(t),t=t in o?[t]:t.split(" "));for(r=0,i=t.length;i>r;r++)delete o[t[r]];if(!(n?$:b.isEmptyObject)(o))return}(n||(delete s[u].data,$(s[u])))&&(a?b.cleanData([e],!0):b.support.deleteExpando||s!=s.window?delete s[u]:s[u]=null)}}}b.extend({cache:{},expando:"jQuery"+(p+Math.random()).replace(/\D/g,""),noData:{embed:!0,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000",applet:!0},hasData:function(e){return e=e.nodeType?b.cache[e[b.expando]]:e[b.expando],!!e&&!$(e)},data:function(e,t,n){return P(e,t,n)},removeData:function(e,t){return R(e,t)},_data:function(e,t,n){return P(e,t,n,!0)},_removeData:function(e,t){return R(e,t,!0)},acceptData:function(e){if(e.nodeType&&1!==e.nodeType&&9!==e.nodeType)return!1;var t=e.nodeName&&b.noData[e.nodeName.toLowerCase()];return!t||t!==!0&&e.getAttribute("classid")===t}}),b.fn.extend({data:function(e,n){var r,i,o=this[0],a=0,s=null;if(e===t){if(this.length&&(s=b.data(o),1===o.nodeType&&!b._data(o,"parsedAttrs"))){for(r=o.attributes;r.length>a;a++)i=r[a].name,i.indexOf("data-")||(i=b.camelCase(i.slice(5)),W(o,i,s[i]));b._data(o,"parsedAttrs",!0)}return s}return"object"==typeof e?this.each(function(){b.data(this,e)}):b.access(this,function(n){return n===t?o?W(o,e,b.data(o,e)):null:(this.each(function(){b.data(this,e,n)}),t)},null,n,arguments.length>1,null,!0)},removeData:function(e){return this.each(function(){b.removeData(this,e)})}});function W(e,n,r){if(r===t&&1===e.nodeType){var i="data-"+n.replace(B,"-$1").toLowerCase();if(r=e.getAttribute(i),"string"==typeof r){try{r="true"===r?!0:"false"===r?!1:"null"===r?null:+r+""===r?+r:O.test(r)?b.parseJSON(r):r}catch(o){}b.data(e,n,r)}else r=t}return r}function $(e){var t;for(t in e)if(("data"!==t||!b.isEmptyObject(e[t]))&&"toJSON"!==t)return!1;return!0}b.extend({queue:function(e,n,r){var i;return e?(n=(n||"fx")+"queue",i=b._data(e,n),r&&(!i||b.isArray(r)?i=b._data(e,n,b.makeArray(r)):i.push(r)),i||[]):t},dequeue:function(e,t){t=t||"fx";var n=b.queue(e,t),r=n.length,i=n.shift(),o=b._queueHooks(e,t),a=function(){b.dequeue(e,t)};"inprogress"===i&&(i=n.shift(),r--),o.cur=i,i&&("fx"===t&&n.unshift("inprogress"),delete o.stop,i.call(e,a,o)),!r&&o&&o.empty.fire()},_queueHooks:function(e,t){var n=t+"queueHooks";return b._data(e,n)||b._data(e,n,{empty:b.Callbacks("once memory").add(function(){b._removeData(e,t+"queue"),b._removeData(e,n)})})}}),b.fn.extend({queue:function(e,n){var r=2;return"string"!=typeof e&&(n=e,e="fx",r--),r>arguments.length?b.queue(this[0],e):n===t?this:this.each(function(){var t=b.queue(this,e,n);b._queueHooks(this,e),"fx"===e&&"inprogress"!==t[0]&&b.dequeue(this,e)})},dequeue:function(e){return this.each(function(){b.dequeue(this,e)})},delay:function(e,t){return e=b.fx?b.fx.speeds[e]||e:e,t=t||"fx",this.queue(t,function(t,n){var r=setTimeout(t,e);n.stop=function(){clearTimeout(r)}})},clearQueue:function(e){return this.queue(e||"fx",[])},promise:function(e,n){var r,i=1,o=b.Deferred(),a=this,s=this.length,u=function(){--i||o.resolveWith(a,[a])};"string"!=typeof e&&(n=e,e=t),e=e||"fx";while(s--)r=b._data(a[s],e+"queueHooks"),r&&r.empty&&(i++,r.empty.add(u));return u(),o.promise(n)}});var I,z,X=/[\t\r\n]/g,U=/\r/g,V=/^(?:input|select|textarea|button|object)$/i,Y=/^(?:a|area)$/i,J=/^(?:checked|selected|autofocus|autoplay|async|controls|defer|disabled|hidden|loop|multiple|open|readonly|required|scoped)$/i,G=/^(?:checked|selected)$/i,Q=b.support.getSetAttribute,K=b.support.input;b.fn.extend({attr:function(e,t){return b.access(this,b.attr,e,t,arguments.length>1)},removeAttr:function(e){return this.each(function(){b.removeAttr(this,e)})},prop:function(e,t){return b.access(this,b.prop,e,t,arguments.length>1)},removeProp:function(e){return e=b.propFix[e]||e,this.each(function(){try{this[e]=t,delete this[e]}catch(n){}})},addClass:function(e){var t,n,r,i,o,a=0,s=this.length,u="string"==typeof e&&e;if(b.isFunction(e))return this.each(function(t){b(this).addClass(e.call(this,t,this.className))});if(u)for(t=(e||"").match(w)||[];s>a;a++)if(n=this[a],r=1===n.nodeType&&(n.className?(" "+n.className+" ").replace(X," "):" ")){o=0;while(i=t[o++])0>r.indexOf(" "+i+" ")&&(r+=i+" ");n.className=b.trim(r)}return this},removeClass:function(e){var t,n,r,i,o,a=0,s=this.length,u=0===arguments.length||"string"==typeof e&&e;if(b.isFunction(e))return this.each(function(t){b(this).removeClass(e.call(this,t,this.className))});if(u)for(t=(e||"").match(w)||[];s>a;a++)if(n=this[a],r=1===n.nodeType&&(n.className?(" "+n.className+" ").replace(X," "):"")){o=0;while(i=t[o++])while(r.indexOf(" "+i+" ")>=0)r=r.replace(" "+i+" "," ");n.className=e?b.trim(r):""}return this},toggleClass:function(e,t){var n=typeof e,r="boolean"==typeof t;return b.isFunction(e)?this.each(function(n){b(this).toggleClass(e.call(this,n,this.className,t),t)}):this.each(function(){if("string"===n){var o,a=0,s=b(this),u=t,l=e.match(w)||[];while(o=l[a++])u=r?u:!s.hasClass(o),s[u?"addClass":"removeClass"](o)}else(n===i||"boolean"===n)&&(this.className&&b._data(this,"__className__",this.className),this.className=this.className||e===!1?"":b._data(this,"__className__")||"")})},hasClass:function(e){var t=" "+e+" ",n=0,r=this.length;for(;r>n;n++)if(1===this[n].nodeType&&(" "+this[n].className+" ").replace(X," ").indexOf(t)>=0)return!0;return!1},val:function(e){var n,r,i,o=this[0];{if(arguments.length)return i=b.isFunction(e),this.each(function(n){var o,a=b(this);1===this.nodeType&&(o=i?e.call(this,n,a.val()):e,null==o?o="":"number"==typeof o?o+="":b.isArray(o)&&(o=b.map(o,function(e){return null==e?"":e+""})),r=b.valHooks[this.type]||b.valHooks[this.nodeName.toLowerCase()],r&&"set"in r&&r.set(this,o,"value")!==t||(this.value=o))});if(o)return r=b.valHooks[o.type]||b.valHooks[o.nodeName.toLowerCase()],r&&"get"in r&&(n=r.get(o,"value"))!==t?n:(n=o.value,"string"==typeof n?n.replace(U,""):null==n?"":n)}}}),b.extend({valHooks:{option:{get:function(e){var t=e.attributes.value;return!t||t.specified?e.value:e.text}},select:{get:function(e){var t,n,r=e.options,i=e.selectedIndex,o="select-one"===e.type||0>i,a=o?null:[],s=o?i+1:r.length,u=0>i?s:o?i:0;for(;s>u;u++)if(n=r[u],!(!n.selected&&u!==i||(b.support.optDisabled?n.disabled:null!==n.getAttribute("disabled"))||n.parentNode.disabled&&b.nodeName(n.parentNode,"optgroup"))){if(t=b(n).val(),o)return t;a.push(t)}return a},set:function(e,t){var n=b.makeArray(t);return b(e).find("option").each(function(){this.selected=b.inArray(b(this).val(),n)>=0}),n.length||(e.selectedIndex=-1),n}}},attr:function(e,n,r){var o,a,s,u=e.nodeType;if(e&&3!==u&&8!==u&&2!==u)return typeof e.getAttribute===i?b.prop(e,n,r):(a=1!==u||!b.isXMLDoc(e),a&&(n=n.toLowerCase(),o=b.attrHooks[n]||(J.test(n)?z:I)),r===t?o&&a&&"get"in o&&null!==(s=o.get(e,n))?s:(typeof e.getAttribute!==i&&(s=e.getAttribute(n)),null==s?t:s):null!==r?o&&a&&"set"in o&&(s=o.set(e,r,n))!==t?s:(e.setAttribute(n,r+""),r):(b.removeAttr(e,n),t))},removeAttr:function(e,t){var n,r,i=0,o=t&&t.match(w);if(o&&1===e.nodeType)while(n=o[i++])r=b.propFix[n]||n,J.test(n)?!Q&&G.test(n)?e[b.camelCase("default-"+n)]=e[r]=!1:e[r]=!1:b.attr(e,n,""),e.removeAttribute(Q?n:r)},attrHooks:{type:{set:function(e,t){if(!b.support.radioValue&&"radio"===t&&b.nodeName(e,"input")){var n=e.value;return e.setAttribute("type",t),n&&(e.value=n),t}}}},propFix:{tabindex:"tabIndex",readonly:"readOnly","for":"htmlFor","class":"className",maxlength:"maxLength",cellspacing:"cellSpacing",cellpadding:"cellPadding",rowspan:"rowSpan",colspan:"colSpan",usemap:"useMap",frameborder:"frameBorder",contenteditable:"contentEditable"},prop:function(e,n,r){var i,o,a,s=e.nodeType;if(e&&3!==s&&8!==s&&2!==s)return a=1!==s||!b.isXMLDoc(e),a&&(n=b.propFix[n]||n,o=b.propHooks[n]),r!==t?o&&"set"in o&&(i=o.set(e,r,n))!==t?i:e[n]=r:o&&"get"in o&&null!==(i=o.get(e,n))?i:e[n]},propHooks:{tabIndex:{get:function(e){var n=e.getAttributeNode("tabindex");return n&&n.specified?parseInt(n.value,10):V.test(e.nodeName)||Y.test(e.nodeName)&&e.href?0:t}}}}),z={get:function(e,n){var r=b.prop(e,n),i="boolean"==typeof r&&e.getAttribute(n),o="boolean"==typeof r?K&&Q?null!=i:G.test(n)?e[b.camelCase("default-"+n)]:!!i:e.getAttributeNode(n);return o&&o.value!==!1?n.toLowerCase():t},set:function(e,t,n){return t===!1?b.removeAttr(e,n):K&&Q||!G.test(n)?e.setAttribute(!Q&&b.propFix[n]||n,n):e[b.camelCase("default-"+n)]=e[n]=!0,n}},K&&Q||(b.attrHooks.value={get:function(e,n){var r=e.getAttributeNode(n);return b.nodeName(e,"input")?e.defaultValue:r&&r.specified?r.value:t},set:function(e,n,r){return b.nodeName(e,"input")?(e.defaultValue=n,t):I&&I.set(e,n,r)}}),Q||(I=b.valHooks.button={get:function(e,n){var r=e.getAttributeNode(n);return r&&("id"===n||"name"===n||"coords"===n?""!==r.value:r.specified)?r.value:t},set:function(e,n,r){var i=e.getAttributeNode(r);return i||e.setAttributeNode(i=e.ownerDocument.createAttribute(r)),i.value=n+="","value"===r||n===e.getAttribute(r)?n:t}},b.attrHooks.contenteditable={get:I.get,set:function(e,t,n){I.set(e,""===t?!1:t,n)}},b.each(["width","height"],function(e,n){b.attrHooks[n]=b.extend(b.attrHooks[n],{set:function(e,r){return""===r?(e.setAttribute(n,"auto"),r):t}})})),b.support.hrefNormalized||(b.each(["href","src","width","height"],function(e,n){b.attrHooks[n]=b.extend(b.attrHooks[n],{get:function(e){var r=e.getAttribute(n,2);return null==r?t:r}})}),b.each(["href","src"],function(e,t){b.propHooks[t]={get:function(e){return e.getAttribute(t,4)}}})),b.support.style||(b.attrHooks.style={get:function(e){return e.style.cssText||t},set:function(e,t){return e.style.cssText=t+""}}),b.support.optSelected||(b.propHooks.selected=b.extend(b.propHooks.selected,{get:function(e){var t=e.parentNode;return t&&(t.selectedIndex,t.parentNode&&t.parentNode.selectedIndex),null}})),b.support.enctype||(b.propFix.enctype="encoding"),b.support.checkOn||b.each(["radio","checkbox"],function(){b.valHooks[this]={get:function(e){return null===e.getAttribute("value")?"on":e.value}}}),b.each(["radio","checkbox"],function(){b.valHooks[this]=b.extend(b.valHooks[this],{set:function(e,n){return b.isArray(n)?e.checked=b.inArray(b(e).val(),n)>=0:t}})});var Z=/^(?:input|select|textarea)$/i,et=/^key/,tt=/^(?:mouse|contextmenu)|click/,nt=/^(?:focusinfocus|focusoutblur)$/,rt=/^([^.]*)(?:\.(.+)|)$/;function it(){return!0}function ot(){return!1}b.event={global:{},add:function(e,n,r,o,a){var s,u,l,c,p,f,d,h,g,m,y,v=b._data(e);if(v){r.handler&&(c=r,r=c.handler,a=c.selector),r.guid||(r.guid=b.guid++),(u=v.events)||(u=v.events={}),(f=v.handle)||(f=v.handle=function(e){return typeof b===i||e&&b.event.triggered===e.type?t:b.event.dispatch.apply(f.elem,arguments)},f.elem=e),n=(n||"").match(w)||[""],l=n.length;while(l--)s=rt.exec(n[l])||[],g=y=s[1],m=(s[2]||"").split(".").sort(),p=b.event.special[g]||{},g=(a?p.delegateType:p.bindType)||g,p=b.event.special[g]||{},d=b.extend({type:g,origType:y,data:o,handler:r,guid:r.guid,selector:a,needsContext:a&&b.expr.match.needsContext.test(a),namespace:m.join(".")},c),(h=u[g])||(h=u[g]=[],h.delegateCount=0,p.setup&&p.setup.call(e,o,m,f)!==!1||(e.addEventListener?e.addEventListener(g,f,!1):e.attachEvent&&e.attachEvent("on"+g,f))),p.add&&(p.add.call(e,d),d.handler.guid||(d.handler.guid=r.guid)),a?h.splice(h.delegateCount++,0,d):h.push(d),b.event.global[g]=!0;e=null}},remove:function(e,t,n,r,i){var o,a,s,u,l,c,p,f,d,h,g,m=b.hasData(e)&&b._data(e);if(m&&(c=m.events)){t=(t||"").match(w)||[""],l=t.length;while(l--)if(s=rt.exec(t[l])||[],d=g=s[1],h=(s[2]||"").split(".").sort(),d){p=b.event.special[d]||{},d=(r?p.delegateType:p.bindType)||d,f=c[d]||[],s=s[2]&&RegExp("(^|\\.)"+h.join("\\.(?:.*\\.|)")+"(\\.|$)"),u=o=f.length;while(o--)a=f[o],!i&&g!==a.origType||n&&n.guid!==a.guid||s&&!s.test(a.namespace)||r&&r!==a.selector&&("**"!==r||!a.selector)||(f.splice(o,1),a.selector&&f.delegateCount--,p.remove&&p.remove.call(e,a));u&&!f.length&&(p.teardown&&p.teardown.call(e,h,m.handle)!==!1||b.removeEvent(e,d,m.handle),delete c[d])}else for(d in c)b.event.remove(e,d+t[l],n,r,!0);b.isEmptyObject(c)&&(delete m.handle,b._removeData(e,"events"))}},trigger:function(n,r,i,a){var s,u,l,c,p,f,d,h=[i||o],g=y.call(n,"type")?n.type:n,m=y.call(n,"namespace")?n.namespace.split("."):[];if(l=f=i=i||o,3!==i.nodeType&&8!==i.nodeType&&!nt.test(g+b.event.triggered)&&(g.indexOf(".")>=0&&(m=g.split("."),g=m.shift(),m.sort()),u=0>g.indexOf(":")&&"on"+g,n=n[b.expando]?n:new b.Event(g,"object"==typeof n&&n),n.isTrigger=!0,n.namespace=m.join("."),n.namespace_re=n.namespace?RegExp("(^|\\.)"+m.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,n.result=t,n.target||(n.target=i),r=null==r?[n]:b.makeArray(r,[n]),p=b.event.special[g]||{},a||!p.trigger||p.trigger.apply(i,r)!==!1)){if(!a&&!p.noBubble&&!b.isWindow(i)){for(c=p.delegateType||g,nt.test(c+g)||(l=l.parentNode);l;l=l.parentNode)h.push(l),f=l;f===(i.ownerDocument||o)&&h.push(f.defaultView||f.parentWindow||e)}d=0;while((l=h[d++])&&!n.isPropagationStopped())n.type=d>1?c:p.bindType||g,s=(b._data(l,"events")||{})[n.type]&&b._data(l,"handle"),s&&s.apply(l,r),s=u&&l[u],s&&b.acceptData(l)&&s.apply&&s.apply(l,r)===!1&&n.preventDefault();if(n.type=g,!(a||n.isDefaultPrevented()||p._default&&p._default.apply(i.ownerDocument,r)!==!1||"click"===g&&b.nodeName(i,"a")||!b.acceptData(i)||!u||!i[g]||b.isWindow(i))){f=i[u],f&&(i[u]=null),b.event.triggered=g;try{i[g]()}catch(v){}b.event.triggered=t,f&&(i[u]=f)}return n.result}},dispatch:function(e){e=b.event.fix(e);var n,r,i,o,a,s=[],u=h.call(arguments),l=(b._data(this,"events")||{})[e.type]||[],c=b.event.special[e.type]||{};if(u[0]=e,e.delegateTarget=this,!c.preDispatch||c.preDispatch.call(this,e)!==!1){s=b.event.handlers.call(this,e,l),n=0;while((o=s[n++])&&!e.isPropagationStopped()){e.currentTarget=o.elem,a=0;while((i=o.handlers[a++])&&!e.isImmediatePropagationStopped())(!e.namespace_re||e.namespace_re.test(i.namespace))&&(e.handleObj=i,e.data=i.data,r=((b.event.special[i.origType]||{}).handle||i.handler).apply(o.elem,u),r!==t&&(e.result=r)===!1&&(e.preventDefault(),e.stopPropagation()))}return c.postDispatch&&c.postDispatch.call(this,e),e.result}},handlers:function(e,n){var r,i,o,a,s=[],u=n.delegateCount,l=e.target;if(u&&l.nodeType&&(!e.button||"click"!==e.type))for(;l!=this;l=l.parentNode||this)if(1===l.nodeType&&(l.disabled!==!0||"click"!==e.type)){for(o=[],a=0;u>a;a++)i=n[a],r=i.selector+" ",o[r]===t&&(o[r]=i.needsContext?b(r,this).index(l)>=0:b.find(r,this,null,[l]).length),o[r]&&o.push(i);o.length&&s.push({elem:l,handlers:o})}return n.length>u&&s.push({elem:this,handlers:n.slice(u)}),s},fix:function(e){if(e[b.expando])return e;var t,n,r,i=e.type,a=e,s=this.fixHooks[i];s||(this.fixHooks[i]=s=tt.test(i)?this.mouseHooks:et.test(i)?this.keyHooks:{}),r=s.props?this.props.concat(s.props):this.props,e=new b.Event(a),t=r.length;while(t--)n=r[t],e[n]=a[n];return e.target||(e.target=a.srcElement||o),3===e.target.nodeType&&(e.target=e.target.parentNode),e.metaKey=!!e.metaKey,s.filter?s.filter(e,a):e},props:"altKey bubbles cancelable ctrlKey currentTarget eventPhase metaKey relatedTarget shiftKey target timeStamp view which".split(" "),fixHooks:{},keyHooks:{props:"char charCode key keyCode".split(" "),filter:function(e,t){return null==e.which&&(e.which=null!=t.charCode?t.charCode:t.keyCode),e}},mouseHooks:{props:"button buttons clientX clientY fromElement offsetX offsetY pageX pageY screenX screenY toElement".split(" "),filter:function(e,n){var r,i,a,s=n.button,u=n.fromElement;return null==e.pageX&&null!=n.clientX&&(i=e.target.ownerDocument||o,a=i.documentElement,r=i.body,e.pageX=n.clientX+(a&&a.scrollLeft||r&&r.scrollLeft||0)-(a&&a.clientLeft||r&&r.clientLeft||0),e.pageY=n.clientY+(a&&a.scrollTop||r&&r.scrollTop||0)-(a&&a.clientTop||r&&r.clientTop||0)),!e.relatedTarget&&u&&(e.relatedTarget=u===e.target?n.toElement:u),e.which||s===t||(e.which=1&s?1:2&s?3:4&s?2:0),e}},special:{load:{noBubble:!0},click:{trigger:function(){return b.nodeName(this,"input")&&"checkbox"===this.type&&this.click?(this.click(),!1):t}},focus:{trigger:function(){if(this!==o.activeElement&&this.focus)try{return this.focus(),!1}catch(e){}},delegateType:"focusin"},blur:{trigger:function(){return this===o.activeElement&&this.blur?(this.blur(),!1):t},delegateType:"focusout"},beforeunload:{postDispatch:function(e){e.result!==t&&(e.originalEvent.returnValue=e.result)}}},simulate:function(e,t,n,r){var i=b.extend(new b.Event,n,{type:e,isSimulated:!0,originalEvent:{}});r?b.event.trigger(i,null,t):b.event.dispatch.call(t,i),i.isDefaultPrevented()&&n.preventDefault()}},b.removeEvent=o.removeEventListener?function(e,t,n){e.removeEventListener&&e.removeEventListener(t,n,!1)}:function(e,t,n){var r="on"+t;e.detachEvent&&(typeof e[r]===i&&(e[r]=null),e.detachEvent(r,n))},b.Event=function(e,n){return this instanceof b.Event?(e&&e.type?(this.originalEvent=e,this.type=e.type,this.isDefaultPrevented=e.defaultPrevented||e.returnValue===!1||e.getPreventDefault&&e.getPreventDefault()?it:ot):this.type=e,n&&b.extend(this,n),this.timeStamp=e&&e.timeStamp||b.now(),this[b.expando]=!0,t):new b.Event(e,n)},b.Event.prototype={isDefaultPrevented:ot,isPropagationStopped:ot,isImmediatePropagationStopped:ot,preventDefault:function(){var e=this.originalEvent;this.isDefaultPrevented=it,e&&(e.preventDefault?e.preventDefault():e.returnValue=!1)},stopPropagation:function(){var e=this.originalEvent;this.isPropagationStopped=it,e&&(e.stopPropagation&&e.stopPropagation(),e.cancelBubble=!0)},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=it,this.stopPropagation()}},b.each({mouseenter:"mouseover",mouseleave:"mouseout"},function(e,t){b.event.special[e]={delegateType:t,bindType:t,handle:function(e){var n,r=this,i=e.relatedTarget,o=e.handleObj;
+(function(e,t){var n,r,i=typeof t,o=e.document,a=e.location,s=e.jQuery,u=e.$,l={},c=[],p="1.9.1",f=c.concat,d=c.push,h=c.slice,g=c.indexOf,m=l.toString,y=l.hasOwnProperty,v=p.trim,b=function(e,t){return new b.fn.init(e,t,r)},x=/[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/.source,w=/\S+/g,T=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,N=/^(?:(<[\w\W]+>)[^>]*|#([\w-]*))$/,C=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,k=/^[\],:{}\s]*$/,E=/(?:^|:|,)(?:\s*\[)+/g,S=/\\(?:["\\\/bfnrt]|u[\da-fA-F]{4})/g,A=/"[^"\\\r\n]*"|true|false|null|-?(?:\d+\.|)\d+(?:[eE][+-]?\d+|)/g,j=/^-ms-/,D=/-([\da-z])/gi,L=function(e,t){return t.toUpperCase()},H=function(e){(o.addEventListener||"load"===e.type||"complete"===o.readyState)&&(q(),b.ready())},q=function(){o.addEventListener?(o.removeEventListener("DOMContentLoaded",H,!1),e.removeEventListener("load",H,!1)):(o.detachEvent("onreadystatechange",H),e.detachEvent("onload",H))};b.fn=b.prototype={jquery:p,constructor:b,init:function(e,n,r){var i,a;if(!e)return this;if("string"==typeof e){if(i="<"===e.charAt(0)&&">"===e.charAt(e.length-1)&&e.length>=3?[null,e,null]:N.exec(e),!i||!i[1]&&n)return!n||n.jquery?(n||r).find(e):this.constructor(n).find(e);if(i[1]){if(n=n instanceof b?n[0]:n,b.merge(this,b.parseHTML(i[1],n&&n.nodeType?n.ownerDocument||n:o,!0)),C.test(i[1])&&b.isPlainObject(n))for(i in n)b.isFunction(this[i])?this[i](n[i]):this.attr(i,n[i]);return this}if(a=o.getElementById(i[2]),a&&a.parentNode){if(a.id!==i[2])return r.find(e);this.length=1,this[0]=a}return this.context=o,this.selector=e,this}return e.nodeType?(this.context=this[0]=e,this.length=1,this):b.isFunction(e)?r.ready(e):(e.selector!==t&&(this.selector=e.selector,this.context=e.context),b.makeArray(e,this))},selector:"",length:0,size:function(){return this.length},toArray:function(){return h.call(this)},get:function(e){return null==e?this.toArray():0>e?this[this.length+e]:this[e]},pushStack:function(e){var t=b.merge(this.constructor(),e);return t.prevObject=this,t.context=this.context,t},each:function(e,t){return b.each(this,e,t)},ready:function(e){return b.ready.promise().done(e),this},slice:function(){return this.pushStack(h.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},eq:function(e){var t=this.length,n=+e+(0>e?t:0);return this.pushStack(n>=0&&t>n?[this[n]]:[])},map:function(e){return this.pushStack(b.map(this,function(t,n){return e.call(t,n,t)}))},end:function(){return this.prevObject||this.constructor(null)},push:d,sort:[].sort,splice:[].splice},b.fn.init.prototype=b.fn,b.extend=b.fn.extend=function(){var e,n,r,i,o,a,s=arguments[0]||{},u=1,l=arguments.length,c=!1;for("boolean"==typeof s&&(c=s,s=arguments[1]||{},u=2),"object"==typeof s||b.isFunction(s)||(s={}),l===u&&(s=this,--u);l>u;u++)if(null!=(o=arguments[u]))for(i in o)e=s[i],r=o[i],s!==r&&(c&&r&&(b.isPlainObject(r)||(n=b.isArray(r)))?(n?(n=!1,a=e&&b.isArray(e)?e:[]):a=e&&b.isPlainObject(e)?e:{},s[i]=b.extend(c,a,r)):r!==t&&(s[i]=r));return s},b.extend({noConflict:function(t){return e.$===b&&(e.$=u),t&&e.jQuery===b&&(e.jQuery=s),b},isReady:!1,readyWait:1,holdReady:function(e){e?b.readyWait++:b.ready(!0)},ready:function(e){if(e===!0?!--b.readyWait:!b.isReady){if(!o.body)return setTimeout(b.ready);b.isReady=!0,e!==!0&&--b.readyWait>0||(n.resolveWith(o,[b]),b.fn.trigger&&b(o).trigger("ready").off("ready"))}},isFunction:function(e){return"function"===b.type(e)},isArray:Array.isArray||function(e){return"array"===b.type(e)},isWindow:function(e){return null!=e&&e==e.window},isNumeric:function(e){return!isNaN(parseFloat(e))&&isFinite(e)},type:function(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?l[m.call(e)]||"object":typeof e},isPlainObject:function(e){if(!e||"object"!==b.type(e)||e.nodeType||b.isWindow(e))return!1;try{if(e.constructor&&!y.call(e,"constructor")&&!y.call(e.constructor.prototype,"isPrototypeOf"))return!1}catch(n){return!1}var r;for(r in e);return r===t||y.call(e,r)},isEmptyObject:function(e){var t;for(t in e)return!1;return!0},error:function(e){throw Error(e)},parseHTML:function(e,t,n){if(!e||"string"!=typeof e)return null;"boolean"==typeof t&&(n=t,t=!1),t=t||o;var r=C.exec(e),i=!n&&[];return r?[t.createElement(r[1])]:(r=b.buildFragment([e],t,i),i&&b(i).remove(),b.merge([],r.childNodes))},parseJSON:function(n){return e.JSON&&e.JSON.parse?e.JSON.parse(n):null===n?n:"string"==typeof n&&(n=b.trim(n),n&&k.test(n.replace(S,"@").replace(A,"]").replace(E,"")))?Function("return "+n)():(b.error("Invalid JSON: "+n),t)},parseXML:function(n){var r,i;if(!n||"string"!=typeof n)return null;try{e.DOMParser?(i=new DOMParser,r=i.parseFromString(n,"text/xml")):(r=new ActiveXObject("Microsoft.XMLDOM"),r.async="false",r.loadXML(n))}catch(o){r=t}return r&&r.documentElement&&!r.getElementsByTagName("parsererror").length||b.error("Invalid XML: "+n),r},noop:function(){},globalEval:function(t){t&&b.trim(t)&&(e.execScript||function(t){e.eval.call(e,t)})(t)},camelCase:function(e){return e.replace(j,"ms-").replace(D,L)},nodeName:function(e,t){return e.nodeName&&e.nodeName.toLowerCase()===t.toLowerCase()},each:function(e,t,n){var r,i=0,o=e.length,a=M(e);if(n){if(a){for(;o>i;i++)if(r=t.apply(e[i],n),r===!1)break}else for(i in e)if(r=t.apply(e[i],n),r===!1)break}else if(a){for(;o>i;i++)if(r=t.call(e[i],i,e[i]),r===!1)break}else for(i in e)if(r=t.call(e[i],i,e[i]),r===!1)break;return e},trim:v&&!v.call("\ufeff\u00a0")?function(e){return null==e?"":v.call(e)}:function(e){return null==e?"":(e+"").replace(T,"")},makeArray:function(e,t){var n=t||[];return null!=e&&(M(Object(e))?b.merge(n,"string"==typeof e?[e]:e):d.call(n,e)),n},inArray:function(e,t,n){var r;if(t){if(g)return g.call(t,e,n);for(r=t.length,n=n?0>n?Math.max(0,r+n):n:0;r>n;n++)if(n in t&&t[n]===e)return n}return-1},merge:function(e,n){var r=n.length,i=e.length,o=0;if("number"==typeof r)for(;r>o;o++)e[i++]=n[o];else while(n[o]!==t)e[i++]=n[o++];return e.length=i,e},grep:function(e,t,n){var r,i=[],o=0,a=e.length;for(n=!!n;a>o;o++)r=!!t(e[o],o),n!==r&&i.push(e[o]);return i},map:function(e,t,n){var r,i=0,o=e.length,a=M(e),s=[];if(a)for(;o>i;i++)r=t(e[i],i,n),null!=r&&(s[s.length]=r);else for(i in e)r=t(e[i],i,n),null!=r&&(s[s.length]=r);return f.apply([],s)},guid:1,proxy:function(e,n){var r,i,o;return"string"==typeof n&&(o=e[n],n=e,e=o),b.isFunction(e)?(r=h.call(arguments,2),i=function(){return e.apply(n||this,r.concat(h.call(arguments)))},i.guid=e.guid=e.guid||b.guid++,i):t},access:function(e,n,r,i,o,a,s){var u=0,l=e.length,c=null==r;if("object"===b.type(r)){o=!0;for(u in r)b.access(e,n,u,r[u],!0,a,s)}else if(i!==t&&(o=!0,b.isFunction(i)||(s=!0),c&&(s?(n.call(e,i),n=null):(c=n,n=function(e,t,n){return c.call(b(e),n)})),n))for(;l>u;u++)n(e[u],r,s?i:i.call(e[u],u,n(e[u],r)));return o?e:c?n.call(e):l?n(e[0],r):a},now:function(){return(new Date).getTime()}}),b.ready.promise=function(t){if(!n)if(n=b.Deferred(),"complete"===o.readyState)setTimeout(b.ready);else if(o.addEventListener)o.addEventListener("DOMContentLoaded",H,!1),e.addEventListener("load",H,!1);else{o.attachEvent("onreadystatechange",H),e.attachEvent("onload",H);var r=!1;try{r=null==e.frameElement&&o.documentElement}catch(i){}r&&r.doScroll&&function a(){if(!b.isReady){try{r.doScroll("left")}catch(e){return setTimeout(a,50)}q(),b.ready()}}()}return n.promise(t)},b.each("Boolean Number String Function Array Date RegExp Object Error".split(" "),function(e,t){l["[object "+t+"]"]=t.toLowerCase()});function M(e){var t=e.length,n=b.type(e);return b.isWindow(e)?!1:1===e.nodeType&&t?!0:"array"===n||"function"!==n&&(0===t||"number"==typeof t&&t>0&&t-1 in e)}r=b(o);var _={};function F(e){var t=_[e]={};return b.each(e.match(w)||[],function(e,n){t[n]=!0}),t}b.Callbacks=function(e){e="string"==typeof e?_[e]||F(e):b.extend({},e);var n,r,i,o,a,s,u=[],l=!e.once&&[],c=function(t){for(r=e.memory&&t,i=!0,a=s||0,s=0,o=u.length,n=!0;u&&o>a;a++)if(u[a].apply(t[0],t[1])===!1&&e.stopOnFalse){r=!1;break}n=!1,u&&(l?l.length&&c(l.shift()):r?u=[]:p.disable())},p={add:function(){if(u){var t=u.length;(function i(t){b.each(t,function(t,n){var r=b.type(n);"function"===r?e.unique&&p.has(n)||u.push(n):n&&n.length&&"string"!==r&&i(n)})})(arguments),n?o=u.length:r&&(s=t,c(r))}return this},remove:function(){return u&&b.each(arguments,function(e,t){var r;while((r=b.inArray(t,u,r))>-1)u.splice(r,1),n&&(o>=r&&o--,a>=r&&a--)}),this},has:function(e){return e?b.inArray(e,u)>-1:!(!u||!u.length)},empty:function(){return u=[],this},disable:function(){return u=l=r=t,this},disabled:function(){return!u},lock:function(){return l=t,r||p.disable(),this},locked:function(){return!l},fireWith:function(e,t){return t=t||[],t=[e,t.slice?t.slice():t],!u||i&&!l||(n?l.push(t):c(t)),this},fire:function(){return p.fireWith(this,arguments),this},fired:function(){return!!i}};return p},b.extend({Deferred:function(e){var t=[["resolve","done",b.Callbacks("once memory"),"resolved"],["reject","fail",b.Callbacks("once memory"),"rejected"],["notify","progress",b.Callbacks("memory")]],n="pending",r={state:function(){return n},always:function(){return i.done(arguments).fail(arguments),this},then:function(){var e=arguments;return b.Deferred(function(n){b.each(t,function(t,o){var a=o[0],s=b.isFunction(e[t])&&e[t];i[o[1]](function(){var e=s&&s.apply(this,arguments);e&&b.isFunction(e.promise)?e.promise().done(n.resolve).fail(n.reject).progress(n.notify):n[a+"With"](this===r?n.promise():this,s?[e]:arguments)})}),e=null}).promise()},promise:function(e){return null!=e?b.extend(e,r):r}},i={};return r.pipe=r.then,b.each(t,function(e,o){var a=o[2],s=o[3];r[o[1]]=a.add,s&&a.add(function(){n=s},t[1^e][2].disable,t[2][2].lock),i[o[0]]=function(){return i[o[0]+"With"](this===i?r:this,arguments),this},i[o[0]+"With"]=a.fireWith}),r.promise(i),e&&e.call(i,i),i},when:function(e){var t=0,n=h.call(arguments),r=n.length,i=1!==r||e&&b.isFunction(e.promise)?r:0,o=1===i?e:b.Deferred(),a=function(e,t,n){return function(r){t[e]=this,n[e]=arguments.length>1?h.call(arguments):r,n===s?o.notifyWith(t,n):--i||o.resolveWith(t,n)}},s,u,l;if(r>1)for(s=Array(r),u=Array(r),l=Array(r);r>t;t++)n[t]&&b.isFunction(n[t].promise)?n[t].promise().done(a(t,l,n)).fail(o.reject).progress(a(t,u,s)):--i;return i||o.resolveWith(l,n),o.promise()}}),b.support=function(){var t,n,r,a,s,u,l,c,p,f,d=o.createElement("div");if(d.setAttribute("className","t"),d.innerHTML=" <link/><table></table><a href='/a'>a</a><input type='checkbox'/>",n=d.getElementsByTagName("*"),r=d.getElementsByTagName("a")[0],!n||!r||!n.length)return{};s=o.createElement("select"),l=s.appendChild(o.createElement("option")),a=d.getElementsByTagName("input")[0],r.style.cssText="top:1px;float:left;opacity:.5",t={getSetAttribute:"t"!==d.className,leadingWhitespace:3===d.firstChild.nodeType,tbody:!d.getElementsByTagName("tbody").length,htmlSerialize:!!d.getElementsByTagName("link").length,style:/top/.test(r.getAttribute("style")),hrefNormalized:"/a"===r.getAttribute("href"),opacity:/^0.5/.test(r.style.opacity),cssFloat:!!r.style.cssFloat,checkOn:!!a.value,optSelected:l.selected,enctype:!!o.createElement("form").enctype,html5Clone:"<:nav></:nav>"!==o.createElement("nav").cloneNode(!0).outerHTML,boxModel:"CSS1Compat"===o.compatMode,deleteExpando:!0,noCloneEvent:!0,inlineBlockNeedsLayout:!1,shrinkWrapBlocks:!1,reliableMarginRight:!0,boxSizingReliable:!0,pixelPosition:!1},a.checked=!0,t.noCloneChecked=a.cloneNode(!0).checked,s.disabled=!0,t.optDisabled=!l.disabled;try{delete d.test}catch(h){t.deleteExpando=!1}a=o.createElement("input"),a.setAttribute("value",""),t.input=""===a.getAttribute("value"),a.value="t",a.setAttribute("type","radio"),t.radioValue="t"===a.value,a.setAttribute("checked","t"),a.setAttribute("name","t"),u=o.createDocumentFragment(),u.appendChild(a),t.appendChecked=a.checked,t.checkClone=u.cloneNode(!0).cloneNode(!0).lastChild.checked,d.attachEvent&&(d.attachEvent("onclick",function(){t.noCloneEvent=!1}),d.cloneNode(!0).click());for(f in{submit:!0,change:!0,focusin:!0})d.setAttribute(c="on"+f,"t"),t[f+"Bubbles"]=c in e||d.attributes[c].expando===!1;return d.style.backgroundClip="content-box",d.cloneNode(!0).style.backgroundClip="",t.clearCloneStyle="content-box"===d.style.backgroundClip,b(function(){var n,r,a,s="padding:0;margin:0;border:0;display:block;box-sizing:content-box;-moz-box-sizing:content-box;-webkit-box-sizing:content-box;",u=o.getElementsByTagName("body")[0];u&&(n=o.createElement("div"),n.style.cssText="border:0;width:0;height:0;position:absolute;top:0;left:-9999px;margin-top:1px",u.appendChild(n).appendChild(d),d.innerHTML="<table><tr><td></td><td>t</td></tr></table>",a=d.getElementsByTagName("td"),a[0].style.cssText="padding:0;margin:0;border:0;display:none",p=0===a[0].offsetHeight,a[0].style.display="",a[1].style.display="none",t.reliableHiddenOffsets=p&&0===a[0].offsetHeight,d.innerHTML="",d.style.cssText="box-sizing:border-box;-moz-box-sizing:border-box;-webkit-box-sizing:border-box;padding:1px;border:1px;display:block;width:4px;margin-top:1%;position:absolute;top:1%;",t.boxSizing=4===d.offsetWidth,t.doesNotIncludeMarginInBodyOffset=1!==u.offsetTop,e.getComputedStyle&&(t.pixelPosition="1%"!==(e.getComputedStyle(d,null)||{}).top,t.boxSizingReliable="4px"===(e.getComputedStyle(d,null)||{width:"4px"}).width,r=d.appendChild(o.createElement("div")),r.style.cssText=d.style.cssText=s,r.style.marginRight=r.style.width="0",d.style.width="1px",t.reliableMarginRight=!parseFloat((e.getComputedStyle(r,null)||{}).marginRight)),typeof d.style.zoom!==i&&(d.innerHTML="",d.style.cssText=s+"width:1px;padding:1px;display:inline;zoom:1",t.inlineBlockNeedsLayout=3===d.offsetWidth,d.style.display="block",d.innerHTML="<div></div>",d.firstChild.style.width="5px",t.shrinkWrapBlocks=3!==d.offsetWidth,t.inlineBlockNeedsLayout&&(u.style.zoom=1)),u.removeChild(n),n=d=a=r=null)}),n=s=u=l=r=a=null,t}();var O=/(?:\{[\s\S]*\}|\[[\s\S]*\])$/,B=/([A-Z])/g;function P(e,n,r,i){if(b.acceptData(e)){var o,a,s=b.expando,u="string"==typeof n,l=e.nodeType,p=l?b.cache:e,f=l?e[s]:e[s]&&s;if(f&&p[f]&&(i||p[f].data)||!u||r!==t)return f||(l?e[s]=f=c.pop()||b.guid++:f=s),p[f]||(p[f]={},l||(p[f].toJSON=b.noop)),("object"==typeof n||"function"==typeof n)&&(i?p[f]=b.extend(p[f],n):p[f].data=b.extend(p[f].data,n)),o=p[f],i||(o.data||(o.data={}),o=o.data),r!==t&&(o[b.camelCase(n)]=r),u?(a=o[n],null==a&&(a=o[b.camelCase(n)])):a=o,a}}function R(e,t,n){if(b.acceptData(e)){var r,i,o,a=e.nodeType,s=a?b.cache:e,u=a?e[b.expando]:b.expando;if(s[u]){if(t&&(o=n?s[u]:s[u].data)){b.isArray(t)?t=t.concat(b.map(t,b.camelCase)):t in o?t=[t]:(t=b.camelCase(t),t=t in o?[t]:t.split(" "));for(r=0,i=t.length;i>r;r++)delete o[t[r]];if(!(n?$:b.isEmptyObject)(o))return}(n||(delete s[u].data,$(s[u])))&&(a?b.cleanData([e],!0):b.support.deleteExpando||s!=s.window?delete s[u]:s[u]=null)}}}b.extend({cache:{},expando:"jQuery"+(p+Math.random()).replace(/\D/g,""),noData:{embed:!0,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000",applet:!0},hasData:function(e){return e=e.nodeType?b.cache[e[b.expando]]:e[b.expando],!!e&&!$(e)},data:function(e,t,n){return P(e,t,n)},removeData:function(e,t){return R(e,t)},_data:function(e,t,n){return P(e,t,n,!0)},_removeData:function(e,t){return R(e,t,!0)},acceptData:function(e){if(e.nodeType&&1!==e.nodeType&&9!==e.nodeType)return!1;var t=e.nodeName&&b.noData[e.nodeName.toLowerCase()];return!t||t!==!0&&e.getAttribute("classid")===t}}),b.fn.extend({data:function(e,n){var r,i,o=this[0],a=0,s=null;if(e===t){if(this.length&&(s=b.data(o),1===o.nodeType&&!b._data(o,"parsedAttrs"))){for(r=o.attributes;r.length>a;a++)i=r[a].name,i.indexOf("data-")||(i=b.camelCase(i.slice(5)),W(o,i,s[i]));b._data(o,"parsedAttrs",!0)}return s}return"object"==typeof e?this.each(function(){b.data(this,e)}):b.access(this,function(n){return n===t?o?W(o,e,b.data(o,e)):null:(this.each(function(){b.data(this,e,n)}),t)},null,n,arguments.length>1,null,!0)},removeData:function(e){return this.each(function(){b.removeData(this,e)})}});function W(e,n,r){if(r===t&&1===e.nodeType){var i="data-"+n.replace(B,"-$1").toLowerCase();if(r=e.getAttribute(i),"string"==typeof r){try{r="true"===r?!0:"false"===r?!1:"null"===r?null:+r+""===r?+r:O.test(r)?b.parseJSON(r):r}catch(o){}b.data(e,n,r)}else r=t}return r}function $(e){var t;for(t in e)if(("data"!==t||!b.isEmptyObject(e[t]))&&"toJSON"!==t)return!1;return!0}b.extend({queue:function(e,n,r){var i;return e?(n=(n||"fx")+"queue",i=b._data(e,n),r&&(!i||b.isArray(r)?i=b._data(e,n,b.makeArray(r)):i.push(r)),i||[]):t},dequeue:function(e,t){t=t||"fx";var n=b.queue(e,t),r=n.length,i=n.shift(),o=b._queueHooks(e,t),a=function(){b.dequeue(e,t)};"inprogress"===i&&(i=n.shift(),r--),o.cur=i,i&&("fx"===t&&n.unshift("inprogress"),delete o.stop,i.call(e,a,o)),!r&&o&&o.empty.fire()},_queueHooks:function(e,t){var n=t+"queueHooks";return b._data(e,n)||b._data(e,n,{empty:b.Callbacks("once memory").add(function(){b._removeData(e,t+"queue"),b._removeData(e,n)})})}}),b.fn.extend({queue:function(e,n){var r=2;return"string"!=typeof e&&(n=e,e="fx",r--),r>arguments.length?b.queue(this[0],e):n===t?this:this.each(function(){var t=b.queue(this,e,n);b._queueHooks(this,e),"fx"===e&&"inprogress"!==t[0]&&b.dequeue(this,e)})},dequeue:function(e){return this.each(function(){b.dequeue(this,e)})},delay:function(e,t){return e=b.fx?b.fx.speeds[e]||e:e,t=t||"fx",this.queue(t,function(t,n){var r=setTimeout(t,e);n.stop=function(){clearTimeout(r)}})},clearQueue:function(e){return this.queue(e||"fx",[])},promise:function(e,n){var r,i=1,o=b.Deferred(),a=this,s=this.length,u=function(){--i||o.resolveWith(a,[a])};"string"!=typeof e&&(n=e,e=t),e=e||"fx";while(s--)r=b._data(a[s],e+"queueHooks"),r&&r.empty&&(i++,r.empty.add(u));return u(),o.promise(n)}});var I,z,X=/[\t\r\n]/g,U=/\r/g,V=/^(?:input|select|textarea|button|object)$/i,Y=/^(?:a|area)$/i,J=/^(?:checked|selected|autofocus|autoplay|async|controls|defer|disabled|hidden|loop|multiple|open|readonly|required|scoped)$/i,G=/^(?:checked|selected)$/i,Q=b.support.getSetAttribute,K=b.support.input;b.fn.extend({attr:function(e,t){return b.access(this,b.attr,e,t,arguments.length>1)},removeAttr:function(e){return this.each(function(){b.removeAttr(this,e)})},prop:function(e,t){return b.access(this,b.prop,e,t,arguments.length>1)},removeProp:function(e){return e=b.propFix[e]||e,this.each(function(){try{this[e]=t,delete this[e]}catch(n){}})},addClass:function(e){var t,n,r,i,o,a=0,s=this.length,u="string"==typeof e&&e;if(b.isFunction(e))return this.each(function(t){b(this).addClass(e.call(this,t,this.className))});if(u)for(t=(e||"").match(w)||[];s>a;a++)if(n=this[a],r=1===n.nodeType&&(n.className?(" "+n.className+" ").replace(X," "):" ")){o=0;while(i=t[o++])0>r.indexOf(" "+i+" ")&&(r+=i+" ");n.className=b.trim(r)}return this},removeClass:function(e){var t,n,r,i,o,a=0,s=this.length,u=0===arguments.length||"string"==typeof e&&e;if(b.isFunction(e))return this.each(function(t){b(this).removeClass(e.call(this,t,this.className))});if(u)for(t=(e||"").match(w)||[];s>a;a++)if(n=this[a],r=1===n.nodeType&&(n.className?(" "+n.className+" ").replace(X," "):"")){o=0;while(i=t[o++])while(r.indexOf(" "+i+" ")>=0)r=r.replace(" "+i+" "," ");n.className=e?b.trim(r):""}return this},toggleClass:function(e,t){var n=typeof e,r="boolean"==typeof t;return b.isFunction(e)?this.each(function(n){b(this).toggleClass(e.call(this,n,this.className,t),t)}):this.each(function(){if("string"===n){var o,a=0,s=b(this),u=t,l=e.match(w)||[];while(o=l[a++])u=r?u:!s.hasClass(o),s[u?"addClass":"removeClass"](o)}else(n===i||"boolean"===n)&&(this.className&&b._data(this,"__className__",this.className),this.className=this.className||e===!1?"":b._data(this,"__className__")||"")})},hasClass:function(e){var t=" "+e+" ",n=0,r=this.length;for(;r>n;n++)if(1===this[n].nodeType&&(" "+this[n].className+" ").replace(X," ").indexOf(t)>=0)return!0;return!1},val:function(e){var n,r,i,o=this[0];{if(arguments.length)return i=b.isFunction(e),this.each(function(n){var o,a=b(this);1===this.nodeType&&(o=i?e.call(this,n,a.val()):e,null==o?o="":"number"==typeof o?o+="":b.isArray(o)&&(o=b.map(o,function(e){return null==e?"":e+""})),r=b.valHooks[this.type]||b.valHooks[this.nodeName.toLowerCase()],r&&"set"in r&&r.set(this,o,"value")!==t||(this.value=o))});if(o)return r=b.valHooks[o.type]||b.valHooks[o.nodeName.toLowerCase()],r&&"get"in r&&(n=r.get(o,"value"))!==t?n:(n=o.value,"string"==typeof n?n.replace(U,""):null==n?"":n)}}}),b.extend({valHooks:{option:{get:function(e){var t=e.attributes.value;return!t||t.specified?e.value:e.text}},select:{get:function(e){var t,n,r=e.options,i=e.selectedIndex,o="select-one"===e.type||0>i,a=o?null:[],s=o?i+1:r.length,u=0>i?s:o?i:0;for(;s>u;u++)if(n=r[u],!(!n.selected&&u!==i||(b.support.optDisabled?n.disabled:null!==n.getAttribute("disabled"))||n.parentNode.disabled&&b.nodeName(n.parentNode,"optgroup"))){if(t=b(n).val(),o)return t;a.push(t)}return a},set:function(e,t){var n=b.makeArray(t);return b(e).find("option").each(function(){this.selected=b.inArray(b(this).val(),n)>=0}),n.length||(e.selectedIndex=-1),n}}},attr:function(e,n,r){var o,a,s,u=e.nodeType;if(e&&3!==u&&8!==u&&2!==u)return typeof e.getAttribute===i?b.prop(e,n,r):(a=1!==u||!b.isXMLDoc(e),a&&(n=n.toLowerCase(),o=b.attrHooks[n]||(J.test(n)?z:I)),r===t?o&&a&&"get"in o&&null!==(s=o.get(e,n))?s:(typeof e.getAttribute!==i&&(s=e.getAttribute(n)),null==s?t:s):null!==r?o&&a&&"set"in o&&(s=o.set(e,r,n))!==t?s:(e.setAttribute(n,r+""),r):(b.removeAttr(e,n),t))},removeAttr:function(e,t){var n,r,i=0,o=t&&t.match(w);if(o&&1===e.nodeType)while(n=o[i++])r=b.propFix[n]||n,J.test(n)?!Q&&G.test(n)?e[b.camelCase("default-"+n)]=e[r]=!1:e[r]=!1:b.attr(e,n,""),e.removeAttribute(Q?n:r)},attrHooks:{type:{set:function(e,t){if(!b.support.radioValue&&"radio"===t&&b.nodeName(e,"input")){var n=e.value;return e.setAttribute("type",t),n&&(e.value=n),t}}}},propFix:{tabindex:"tabIndex",readonly:"readOnly","for":"htmlFor","class":"className",maxlength:"maxLength",cellspacing:"cellSpacing",cellpadding:"cellPadding",rowspan:"rowSpan",colspan:"colSpan",usemap:"useMap",frameborder:"frameBorder",contenteditable:"contentEditable"},prop:function(e,n,r){var i,o,a,s=e.nodeType;if(e&&3!==s&&8!==s&&2!==s)return a=1!==s||!b.isXMLDoc(e),a&&(n=b.propFix[n]||n,o=b.propHooks[n]),r!==t?o&&"set"in o&&(i=o.set(e,r,n))!==t?i:e[n]=r:o&&"get"in o&&null!==(i=o.get(e,n))?i:e[n]},propHooks:{tabIndex:{get:function(e){var n=e.getAttributeNode("tabindex");return n&&n.specified?parseInt(n.value,10):V.test(e.nodeName)||Y.test(e.nodeName)&&e.href?0:t}}}}),z={get:function(e,n){var r=b.prop(e,n),i="boolean"==typeof r&&e.getAttribute(n),o="boolean"==typeof r?K&&Q?null!=i:G.test(n)?e[b.camelCase("default-"+n)]:!!i:e.getAttributeNode(n);return o&&o.value!==!1?n.toLowerCase():t},set:function(e,t,n){return t===!1?b.removeAttr(e,n):K&&Q||!G.test(n)?e.setAttribute(!Q&&b.propFix[n]||n,n):e[b.camelCase("default-"+n)]=e[n]=!0,n}},K&&Q||(b.attrHooks.value={get:function(e,n){var r=e.getAttributeNode(n);return b.nodeName(e,"input")?e.defaultValue:r&&r.specified?r.value:t},set:function(e,n,r){return b.nodeName(e,"input")?(e.defaultValue=n,t):I&&I.set(e,n,r)}}),Q||(I=b.valHooks.button={get:function(e,n){var r=e.getAttributeNode(n);return r&&("id"===n||"name"===n||"coords"===n?""!==r.value:r.specified)?r.value:t},set:function(e,n,r){var i=e.getAttributeNode(r);return i||e.setAttributeNode(i=e.ownerDocument.createAttribute(r)),i.value=n+="","value"===r||n===e.getAttribute(r)?n:t}},b.attrHooks.contenteditable={get:I.get,set:function(e,t,n){I.set(e,""===t?!1:t,n)}},b.each(["width","height"],function(e,n){b.attrHooks[n]=b.extend(b.attrHooks[n],{set:function(e,r){return""===r?(e.setAttribute(n,"auto"),r):t}})})),b.support.hrefNormalized||(b.each(["href","src","width","height"],function(e,n){b.attrHooks[n]=b.extend(b.attrHooks[n],{get:function(e){var r=e.getAttribute(n,2);return null==r?t:r}})}),b.each(["href","src"],function(e,t){b.propHooks[t]={get:function(e){return e.getAttribute(t,4)}}})),b.support.style||(b.attrHooks.style={get:function(e){return e.style.cssText||t},set:function(e,t){return e.style.cssText=t+""}}),b.support.optSelected||(b.propHooks.selected=b.extend(b.propHooks.selected,{get:function(e){var t=e.parentNode;return t&&(t.selectedIndex,t.parentNode&&t.parentNode.selectedIndex),null}})),b.support.enctype||(b.propFix.enctype="encoding"),b.support.checkOn||b.each(["radio","checkbox"],function(){b.valHooks[this]={get:function(e){return null===e.getAttribute("value")?"on":e.value}}}),b.each(["radio","checkbox"],function(){b.valHooks[this]=b.extend(b.valHooks[this],{set:function(e,n){return b.isArray(n)?e.checked=b.inArray(b(e).val(),n)>=0:t}})});var Z=/^(?:input|select|textarea)$/i,et=/^key/,tt=/^(?:mouse|contextmenu)|click/,nt=/^(?:focusinfocus|focusoutblur)$/,rt=/^([^.]*)(?:\.(.+)|)$/;function it(){return!0}function ot(){return!1}b.event={global:{},add:function(e,n,r,o,a){var s,u,l,c,p,f,d,h,g,m,y,v=b._data(e);if(v){r.handler&&(c=r,r=c.handler,a=c.selector),r.guid||(r.guid=b.guid++),(u=v.events)||(u=v.events={}),(f=v.handle)||(f=v.handle=function(e){return typeof b===i||e&&b.event.triggered===e.type?t:b.event.dispatch.apply(f.elem,arguments)},f.elem=e),n=(n||"").match(w)||[""],l=n.length;while(l--)s=rt.exec(n[l])||[],g=y=s[1],m=(s[2]||"").split(".").sort(),p=b.event.special[g]||{},g=(a?p.delegateType:p.bindType)||g,p=b.event.special[g]||{},d=b.extend({type:g,origType:y,data:o,handler:r,guid:r.guid,selector:a,needsContext:a&&b.expr.match.needsContext.test(a),namespace:m.join(".")},c),(h=u[g])||(h=u[g]=[],h.delegateCount=0,p.setup&&p.setup.call(e,o,m,f)!==!1||(e.addEventListener?e.addEventListener(g,f,!1):e.attachEvent&&e.attachEvent("on"+g,f))),p.add&&(p.add.call(e,d),d.handler.guid||(d.handler.guid=r.guid)),a?h.splice(h.delegateCount++,0,d):h.push(d),b.event.global[g]=!0;e=null}},remove:function(e,t,n,r,i){var o,a,s,u,l,c,p,f,d,h,g,m=b.hasData(e)&&b._data(e);if(m&&(c=m.events)){t=(t||"").match(w)||[""],l=t.length;while(l--)if(s=rt.exec(t[l])||[],d=g=s[1],h=(s[2]||"").split(".").sort(),d){p=b.event.special[d]||{},d=(r?p.delegateType:p.bindType)||d,f=c[d]||[],s=s[2]&&RegExp("(^|\\.)"+h.join("\\.(?:.*\\.|)")+"(\\.|$)"),u=o=f.length;while(o--)a=f[o],!i&&g!==a.origType||n&&n.guid!==a.guid||s&&!s.test(a.namespace)||r&&r!==a.selector&&("**"!==r||!a.selector)||(f.splice(o,1),a.selector&&f.delegateCount--,p.remove&&p.remove.call(e,a));u&&!f.length&&(p.teardown&&p.teardown.call(e,h,m.handle)!==!1||b.removeEvent(e,d,m.handle),delete c[d])}else for(d in c)b.event.remove(e,d+t[l],n,r,!0);b.isEmptyObject(c)&&(delete m.handle,b._removeData(e,"events"))}},trigger:function(n,r,i,a){var s,u,l,c,p,f,d,h=[i||o],g=y.call(n,"type")?n.type:n,m=y.call(n,"namespace")?n.namespace.split("."):[];if(l=f=i=i||o,3!==i.nodeType&&8!==i.nodeType&&!nt.test(g+b.event.triggered)&&(g.indexOf(".")>=0&&(m=g.split("."),g=m.shift(),m.sort()),u=0>g.indexOf(":")&&"on"+g,n=n[b.expando]?n:new b.Event(g,"object"==typeof n&&n),n.isTrigger=!0,n.namespace=m.join("."),n.namespace_re=n.namespace?RegExp("(^|\\.)"+m.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,n.result=t,n.target||(n.target=i),r=null==r?[n]:b.makeArray(r,[n]),p=b.event.special[g]||{},a||!p.trigger||p.trigger.apply(i,r)!==!1)){if(!a&&!p.noBubble&&!b.isWindow(i)){for(c=p.delegateType||g,nt.test(c+g)||(l=l.parentNode);l;l=l.parentNode)h.push(l),f=l;f===(i.ownerDocument||o)&&h.push(f.defaultView||f.parentWindow||e)}d=0;while((l=h[d++])&&!n.isPropagationStopped())n.type=d>1?c:p.bindType||g,s=(b._data(l,"events")||{})[n.type]&&b._data(l,"handle"),s&&s.apply(l,r),s=u&&l[u],s&&b.acceptData(l)&&s.apply&&s.apply(l,r)===!1&&n.preventDefault();if(n.type=g,!(a||n.isDefaultPrevented()||p._default&&p._default.apply(i.ownerDocument,r)!==!1||"click"===g&&b.nodeName(i,"a")||!b.acceptData(i)||!u||!i[g]||b.isWindow(i))){f=i[u],f&&(i[u]=null),b.event.triggered=g;try{i[g]()}catch(v){}b.event.triggered=t,f&&(i[u]=f)}return n.result}},dispatch:function(e){e=b.event.fix(e);var n,r,i,o,a,s=[],u=h.call(arguments),l=(b._data(this,"events")||{})[e.type]||[],c=b.event.special[e.type]||{};if(u[0]=e,e.delegateTarget=this,!c.preDispatch||c.preDispatch.call(this,e)!==!1){s=b.event.handlers.call(this,e,l),n=0;while((o=s[n++])&&!e.isPropagationStopped()){e.currentTarget=o.elem,a=0;while((i=o.handlers[a++])&&!e.isImmediatePropagationStopped())(!e.namespace_re||e.namespace_re.test(i.namespace))&&(e.handleObj=i,e.data=i.data,r=((b.event.special[i.origType]||{}).handle||i.handler).apply(o.elem,u),r!==t&&(e.result=r)===!1&&(e.preventDefault(),e.stopPropagation()))}return c.postDispatch&&c.postDispatch.call(this,e),e.result}},handlers:function(e,n){var r,i,o,a,s=[],u=n.delegateCount,l=e.target;if(u&&l.nodeType&&(!e.button||"click"!==e.type))for(;l!=this;l=l.parentNode||this)if(1===l.nodeType&&(l.disabled!==!0||"click"!==e.type)){for(o=[],a=0;u>a;a++)i=n[a],r=i.selector+" ",o[r]===t&&(o[r]=i.needsContext?b(r,this).index(l)>=0:b.find(r,this,null,[l]).length),o[r]&&o.push(i);o.length&&s.push({elem:l,handlers:o})}return n.length>u&&s.push({elem:this,handlers:n.slice(u)}),s},fix:function(e){if(e[b.expando])return e;var t,n,r,i=e.type,a=e,s=this.fixHooks[i];s||(this.fixHooks[i]=s=tt.test(i)?this.mouseHooks:et.test(i)?this.keyHooks:{}),r=s.props?this.props.concat(s.props):this.props,e=new b.Event(a),t=r.length;while(t--)n=r[t],e[n]=a[n];return e.target||(e.target=a.srcElement||o),3===e.target.nodeType&&(e.target=e.target.parentNode),e.metaKey=!!e.metaKey,s.filter?s.filter(e,a):e},props:"altKey bubbles cancelable ctrlKey currentTarget eventPhase metaKey relatedTarget shiftKey target timeStamp view which".split(" "),fixHooks:{},keyHooks:{props:"char charCode key keyCode".split(" "),filter:function(e,t){return null==e.which&&(e.which=null!=t.charCode?t.charCode:t.keyCode),e}},mouseHooks:{props:"button buttons clientX clientY fromElement offsetX offsetY pageX pageY screenX screenY toElement".split(" "),filter:function(e,n){var r,i,a,s=n.button,u=n.fromElement;return null==e.pageX&&null!=n.clientX&&(i=e.target.ownerDocument||o,a=i.documentElement,r=i.body,e.pageX=n.clientX+(a&&a.scrollLeft||r&&r.scrollLeft||0)-(a&&a.clientLeft||r&&r.clientLeft||0),e.pageY=n.clientY+(a&&a.scrollTop||r&&r.scrollTop||0)-(a&&a.clientTop||r&&r.clientTop||0)),!e.relatedTarget&&u&&(e.relatedTarget=u===e.target?n.toElement:u),e.which||s===t||(e.which=1&s?1:2&s?3:4&s?2:0),e}},special:{load:{noBubble:!0},click:{trigger:function(){return b.nodeName(this,"input")&&"checkbox"===this.type&&this.click?(this.click(),!1):t}},focus:{trigger:function(){if(this!==o.activeElement&&this.focus)try{return this.focus(),!1}catch(e){}},delegateType:"focusin"},blur:{trigger:function(){return this===o.activeElement&&this.blur?(this.blur(),!1):t},delegateType:"focusout"},beforeunload:{postDispatch:function(e){e.result!==t&&(e.originalEvent.returnValue=e.result)}}},simulate:function(e,t,n,r){var i=b.extend(new b.Event,n,{type:e,isSimulated:!0,originalEvent:{}});r?b.event.trigger(i,null,t):b.event.dispatch.call(t,i),i.isDefaultPrevented()&&n.preventDefault()}},b.removeEvent=o.removeEventListener?function(e,t,n){e.removeEventListener&&e.removeEventListener(t,n,!1)}:function(e,t,n){var r="on"+t;e.detachEvent&&(typeof e[r]===i&&(e[r]=null),e.detachEvent(r,n))},b.Event=function(e,n){return this instanceof b.Event?(e&&e.type?(this.originalEvent=e,this.type=e.type,this.isDefaultPrevented=e.defaultPrevented||e.returnValue===!1||e.getPreventDefault&&e.getPreventDefault()?it:ot):this.type=e,n&&b.extend(this,n),this.timeStamp=e&&e.timeStamp||b.now(),this[b.expando]=!0,t):new b.Event(e,n)},b.Event.prototype={isDefaultPrevented:ot,isPropagationStopped:ot,isImmediatePropagationStopped:ot,preventDefault:function(){var e=this.originalEvent;this.isDefaultPrevented=it,e&&(e.preventDefault?e.preventDefault():e.returnValue=!1)},stopPropagation:function(){var e=this.originalEvent;this.isPropagationStopped=it,e&&(e.stopPropagation&&e.stopPropagation(),e.cancelBubble=!0)},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=it,this.stopPropagation()}},b.each({mouseenter:"mouseover",mouseleave:"mouseout"},function(e,t){b.event.special[e]={delegateType:t,bindType:t,handle:function(e){var n,r=this,i=e.relatedTarget,o=e.handleObj;
return(!i||i!==r&&!b.contains(r,i))&&(e.type=o.origType,n=o.handler.apply(this,arguments),e.type=t),n}}}),b.support.submitBubbles||(b.event.special.submit={setup:function(){return b.nodeName(this,"form")?!1:(b.event.add(this,"click._submit keypress._submit",function(e){var n=e.target,r=b.nodeName(n,"input")||b.nodeName(n,"button")?n.form:t;r&&!b._data(r,"submitBubbles")&&(b.event.add(r,"submit._submit",function(e){e._submit_bubble=!0}),b._data(r,"submitBubbles",!0))}),t)},postDispatch:function(e){e._submit_bubble&&(delete e._submit_bubble,this.parentNode&&!e.isTrigger&&b.event.simulate("submit",this.parentNode,e,!0))},teardown:function(){return b.nodeName(this,"form")?!1:(b.event.remove(this,"._submit"),t)}}),b.support.changeBubbles||(b.event.special.change={setup:function(){return Z.test(this.nodeName)?(("checkbox"===this.type||"radio"===this.type)&&(b.event.add(this,"propertychange._change",function(e){"checked"===e.originalEvent.propertyName&&(this._just_changed=!0)}),b.event.add(this,"click._change",function(e){this._just_changed&&!e.isTrigger&&(this._just_changed=!1),b.event.simulate("change",this,e,!0)})),!1):(b.event.add(this,"beforeactivate._change",function(e){var t=e.target;Z.test(t.nodeName)&&!b._data(t,"changeBubbles")&&(b.event.add(t,"change._change",function(e){!this.parentNode||e.isSimulated||e.isTrigger||b.event.simulate("change",this.parentNode,e,!0)}),b._data(t,"changeBubbles",!0))}),t)},handle:function(e){var n=e.target;return this!==n||e.isSimulated||e.isTrigger||"radio"!==n.type&&"checkbox"!==n.type?e.handleObj.handler.apply(this,arguments):t},teardown:function(){return b.event.remove(this,"._change"),!Z.test(this.nodeName)}}),b.support.focusinBubbles||b.each({focus:"focusin",blur:"focusout"},function(e,t){var n=0,r=function(e){b.event.simulate(t,e.target,b.event.fix(e),!0)};b.event.special[t]={setup:function(){0===n++&&o.addEventListener(e,r,!0)},teardown:function(){0===--n&&o.removeEventListener(e,r,!0)}}}),b.fn.extend({on:function(e,n,r,i,o){var a,s;if("object"==typeof e){"string"!=typeof n&&(r=r||n,n=t);for(a in e)this.on(a,n,r,e[a],o);return this}if(null==r&&null==i?(i=n,r=n=t):null==i&&("string"==typeof n?(i=r,r=t):(i=r,r=n,n=t)),i===!1)i=ot;else if(!i)return this;return 1===o&&(s=i,i=function(e){return b().off(e),s.apply(this,arguments)},i.guid=s.guid||(s.guid=b.guid++)),this.each(function(){b.event.add(this,e,i,r,n)})},one:function(e,t,n,r){return this.on(e,t,n,r,1)},off:function(e,n,r){var i,o;if(e&&e.preventDefault&&e.handleObj)return i=e.handleObj,b(e.delegateTarget).off(i.namespace?i.origType+"."+i.namespace:i.origType,i.selector,i.handler),this;if("object"==typeof e){for(o in e)this.off(o,n,e[o]);return this}return(n===!1||"function"==typeof n)&&(r=n,n=t),r===!1&&(r=ot),this.each(function(){b.event.remove(this,e,r,n)})},bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return 1===arguments.length?this.off(e,"**"):this.off(t,e||"**",n)},trigger:function(e,t){return this.each(function(){b.event.trigger(e,t,this)})},triggerHandler:function(e,n){var r=this[0];return r?b.event.trigger(e,n,r,!0):t}}),function(e,t){var n,r,i,o,a,s,u,l,c,p,f,d,h,g,m,y,v,x="sizzle"+-new Date,w=e.document,T={},N=0,C=0,k=it(),E=it(),S=it(),A=typeof t,j=1<<31,D=[],L=D.pop,H=D.push,q=D.slice,M=D.indexOf||function(e){var t=0,n=this.length;for(;n>t;t++)if(this[t]===e)return t;return-1},_="[\\x20\\t\\r\\n\\f]",F="(?:\\\\.|[\\w-]|[^\\x00-\\xa0])+",O=F.replace("w","w#"),B="([*^$|!~]?=)",P="\\["+_+"*("+F+")"+_+"*(?:"+B+_+"*(?:(['\"])((?:\\\\.|[^\\\\])*?)\\3|("+O+")|)|)"+_+"*\\]",R=":("+F+")(?:\\(((['\"])((?:\\\\.|[^\\\\])*?)\\3|((?:\\\\.|[^\\\\()[\\]]|"+P.replace(3,8)+")*)|.*)\\)|)",W=RegExp("^"+_+"+|((?:^|[^\\\\])(?:\\\\.)*)"+_+"+$","g"),$=RegExp("^"+_+"*,"+_+"*"),I=RegExp("^"+_+"*([\\x20\\t\\r\\n\\f>+~])"+_+"*"),z=RegExp(R),X=RegExp("^"+O+"$"),U={ID:RegExp("^#("+F+")"),CLASS:RegExp("^\\.("+F+")"),NAME:RegExp("^\\[name=['\"]?("+F+")['\"]?\\]"),TAG:RegExp("^("+F.replace("w","w*")+")"),ATTR:RegExp("^"+P),PSEUDO:RegExp("^"+R),CHILD:RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+_+"*(even|odd|(([+-]|)(\\d*)n|)"+_+"*(?:([+-]|)"+_+"*(\\d+)|))"+_+"*\\)|)","i"),needsContext:RegExp("^"+_+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+_+"*((?:-\\d)?\\d*)"+_+"*\\)|)(?=[^-]|$)","i")},V=/[\x20\t\r\n\f]*[+~]/,Y=/^[^{]+\{\s*\[native code/,J=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,G=/^(?:input|select|textarea|button)$/i,Q=/^h\d$/i,K=/'|\\/g,Z=/\=[\x20\t\r\n\f]*([^'"\]]*)[\x20\t\r\n\f]*\]/g,et=/\\([\da-fA-F]{1,6}[\x20\t\r\n\f]?|.)/g,tt=function(e,t){var n="0x"+t-65536;return n!==n?t:0>n?String.fromCharCode(n+65536):String.fromCharCode(55296|n>>10,56320|1023&n)};try{q.call(w.documentElement.childNodes,0)[0].nodeType}catch(nt){q=function(e){var t,n=[];while(t=this[e++])n.push(t);return n}}function rt(e){return Y.test(e+"")}function it(){var e,t=[];return e=function(n,r){return t.push(n+=" ")>i.cacheLength&&delete e[t.shift()],e[n]=r}}function ot(e){return e[x]=!0,e}function at(e){var t=p.createElement("div");try{return e(t)}catch(n){return!1}finally{t=null}}function st(e,t,n,r){var i,o,a,s,u,l,f,g,m,v;if((t?t.ownerDocument||t:w)!==p&&c(t),t=t||p,n=n||[],!e||"string"!=typeof e)return n;if(1!==(s=t.nodeType)&&9!==s)return[];if(!d&&!r){if(i=J.exec(e))if(a=i[1]){if(9===s){if(o=t.getElementById(a),!o||!o.parentNode)return n;if(o.id===a)return n.push(o),n}else if(t.ownerDocument&&(o=t.ownerDocument.getElementById(a))&&y(t,o)&&o.id===a)return n.push(o),n}else{if(i[2])return H.apply(n,q.call(t.getElementsByTagName(e),0)),n;if((a=i[3])&&T.getByClassName&&t.getElementsByClassName)return H.apply(n,q.call(t.getElementsByClassName(a),0)),n}if(T.qsa&&!h.test(e)){if(f=!0,g=x,m=t,v=9===s&&e,1===s&&"object"!==t.nodeName.toLowerCase()){l=ft(e),(f=t.getAttribute("id"))?g=f.replace(K,"\\$&"):t.setAttribute("id",g),g="[id='"+g+"'] ",u=l.length;while(u--)l[u]=g+dt(l[u]);m=V.test(e)&&t.parentNode||t,v=l.join(",")}if(v)try{return H.apply(n,q.call(m.querySelectorAll(v),0)),n}catch(b){}finally{f||t.removeAttribute("id")}}}return wt(e.replace(W,"$1"),t,n,r)}a=st.isXML=function(e){var t=e&&(e.ownerDocument||e).documentElement;return t?"HTML"!==t.nodeName:!1},c=st.setDocument=function(e){var n=e?e.ownerDocument||e:w;return n!==p&&9===n.nodeType&&n.documentElement?(p=n,f=n.documentElement,d=a(n),T.tagNameNoComments=at(function(e){return e.appendChild(n.createComment("")),!e.getElementsByTagName("*").length}),T.attributes=at(function(e){e.innerHTML="<select></select>";var t=typeof e.lastChild.getAttribute("multiple");return"boolean"!==t&&"string"!==t}),T.getByClassName=at(function(e){return e.innerHTML="<div class='hidden e'></div><div class='hidden'></div>",e.getElementsByClassName&&e.getElementsByClassName("e").length?(e.lastChild.className="e",2===e.getElementsByClassName("e").length):!1}),T.getByName=at(function(e){e.id=x+0,e.innerHTML="<a name='"+x+"'></a><div name='"+x+"'></div>",f.insertBefore(e,f.firstChild);var t=n.getElementsByName&&n.getElementsByName(x).length===2+n.getElementsByName(x+0).length;return T.getIdNotName=!n.getElementById(x),f.removeChild(e),t}),i.attrHandle=at(function(e){return e.innerHTML="<a href='#'></a>",e.firstChild&&typeof e.firstChild.getAttribute!==A&&"#"===e.firstChild.getAttribute("href")})?{}:{href:function(e){return e.getAttribute("href",2)},type:function(e){return e.getAttribute("type")}},T.getIdNotName?(i.find.ID=function(e,t){if(typeof t.getElementById!==A&&!d){var n=t.getElementById(e);return n&&n.parentNode?[n]:[]}},i.filter.ID=function(e){var t=e.replace(et,tt);return function(e){return e.getAttribute("id")===t}}):(i.find.ID=function(e,n){if(typeof n.getElementById!==A&&!d){var r=n.getElementById(e);return r?r.id===e||typeof r.getAttributeNode!==A&&r.getAttributeNode("id").value===e?[r]:t:[]}},i.filter.ID=function(e){var t=e.replace(et,tt);return function(e){var n=typeof e.getAttributeNode!==A&&e.getAttributeNode("id");return n&&n.value===t}}),i.find.TAG=T.tagNameNoComments?function(e,n){return typeof n.getElementsByTagName!==A?n.getElementsByTagName(e):t}:function(e,t){var n,r=[],i=0,o=t.getElementsByTagName(e);if("*"===e){while(n=o[i++])1===n.nodeType&&r.push(n);return r}return o},i.find.NAME=T.getByName&&function(e,n){return typeof n.getElementsByName!==A?n.getElementsByName(name):t},i.find.CLASS=T.getByClassName&&function(e,n){return typeof n.getElementsByClassName===A||d?t:n.getElementsByClassName(e)},g=[],h=[":focus"],(T.qsa=rt(n.querySelectorAll))&&(at(function(e){e.innerHTML="<select><option selected=''></option></select>",e.querySelectorAll("[selected]").length||h.push("\\["+_+"*(?:checked|disabled|ismap|multiple|readonly|selected|value)"),e.querySelectorAll(":checked").length||h.push(":checked")}),at(function(e){e.innerHTML="<input type='hidden' i=''/>",e.querySelectorAll("[i^='']").length&&h.push("[*^$]="+_+"*(?:\"\"|'')"),e.querySelectorAll(":enabled").length||h.push(":enabled",":disabled"),e.querySelectorAll("*,:x"),h.push(",.*:")})),(T.matchesSelector=rt(m=f.matchesSelector||f.mozMatchesSelector||f.webkitMatchesSelector||f.oMatchesSelector||f.msMatchesSelector))&&at(function(e){T.disconnectedMatch=m.call(e,"div"),m.call(e,"[s!='']:x"),g.push("!=",R)}),h=RegExp(h.join("|")),g=RegExp(g.join("|")),y=rt(f.contains)||f.compareDocumentPosition?function(e,t){var n=9===e.nodeType?e.documentElement:e,r=t&&t.parentNode;return e===r||!(!r||1!==r.nodeType||!(n.contains?n.contains(r):e.compareDocumentPosition&&16&e.compareDocumentPosition(r)))}:function(e,t){if(t)while(t=t.parentNode)if(t===e)return!0;return!1},v=f.compareDocumentPosition?function(e,t){var r;return e===t?(u=!0,0):(r=t.compareDocumentPosition&&e.compareDocumentPosition&&e.compareDocumentPosition(t))?1&r||e.parentNode&&11===e.parentNode.nodeType?e===n||y(w,e)?-1:t===n||y(w,t)?1:0:4&r?-1:1:e.compareDocumentPosition?-1:1}:function(e,t){var r,i=0,o=e.parentNode,a=t.parentNode,s=[e],l=[t];if(e===t)return u=!0,0;if(!o||!a)return e===n?-1:t===n?1:o?-1:a?1:0;if(o===a)return ut(e,t);r=e;while(r=r.parentNode)s.unshift(r);r=t;while(r=r.parentNode)l.unshift(r);while(s[i]===l[i])i++;return i?ut(s[i],l[i]):s[i]===w?-1:l[i]===w?1:0},u=!1,[0,0].sort(v),T.detectDuplicates=u,p):p},st.matches=function(e,t){return st(e,null,null,t)},st.matchesSelector=function(e,t){if((e.ownerDocument||e)!==p&&c(e),t=t.replace(Z,"='$1']"),!(!T.matchesSelector||d||g&&g.test(t)||h.test(t)))try{var n=m.call(e,t);if(n||T.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(r){}return st(t,p,null,[e]).length>0},st.contains=function(e,t){return(e.ownerDocument||e)!==p&&c(e),y(e,t)},st.attr=function(e,t){var n;return(e.ownerDocument||e)!==p&&c(e),d||(t=t.toLowerCase()),(n=i.attrHandle[t])?n(e):d||T.attributes?e.getAttribute(t):((n=e.getAttributeNode(t))||e.getAttribute(t))&&e[t]===!0?t:n&&n.specified?n.value:null},st.error=function(e){throw Error("Syntax error, unrecognized expression: "+e)},st.uniqueSort=function(e){var t,n=[],r=1,i=0;if(u=!T.detectDuplicates,e.sort(v),u){for(;t=e[r];r++)t===e[r-1]&&(i=n.push(r));while(i--)e.splice(n[i],1)}return e};function ut(e,t){var n=t&&e,r=n&&(~t.sourceIndex||j)-(~e.sourceIndex||j);if(r)return r;if(n)while(n=n.nextSibling)if(n===t)return-1;return e?1:-1}function lt(e){return function(t){var n=t.nodeName.toLowerCase();return"input"===n&&t.type===e}}function ct(e){return function(t){var n=t.nodeName.toLowerCase();return("input"===n||"button"===n)&&t.type===e}}function pt(e){return ot(function(t){return t=+t,ot(function(n,r){var i,o=e([],n.length,t),a=o.length;while(a--)n[i=o[a]]&&(n[i]=!(r[i]=n[i]))})})}o=st.getText=function(e){var t,n="",r=0,i=e.nodeType;if(i){if(1===i||9===i||11===i){if("string"==typeof e.textContent)return e.textContent;for(e=e.firstChild;e;e=e.nextSibling)n+=o(e)}else if(3===i||4===i)return e.nodeValue}else for(;t=e[r];r++)n+=o(t);return n},i=st.selectors={cacheLength:50,createPseudo:ot,match:U,find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(et,tt),e[3]=(e[4]||e[5]||"").replace(et,tt),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||st.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&st.error(e[0]),e},PSEUDO:function(e){var t,n=!e[5]&&e[2];return U.CHILD.test(e[0])?null:(e[4]?e[2]=e[4]:n&&z.test(n)&&(t=ft(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){return"*"===e?function(){return!0}:(e=e.replace(et,tt).toLowerCase(),function(t){return t.nodeName&&t.nodeName.toLowerCase()===e})},CLASS:function(e){var t=k[e+" "];return t||(t=RegExp("(^|"+_+")"+e+"("+_+"|$)"))&&k(e,function(e){return t.test(e.className||typeof e.getAttribute!==A&&e.getAttribute("class")||"")})},ATTR:function(e,t,n){return function(r){var i=st.attr(r,e);return null==i?"!="===t:t?(i+="","="===t?i===n:"!="===t?i!==n:"^="===t?n&&0===i.indexOf(n):"*="===t?n&&i.indexOf(n)>-1:"$="===t?n&&i.slice(-n.length)===n:"~="===t?(" "+i+" ").indexOf(n)>-1:"|="===t?i===n||i.slice(0,n.length+1)===n+"-":!1):!0}},CHILD:function(e,t,n,r,i){var o="nth"!==e.slice(0,3),a="last"!==e.slice(-4),s="of-type"===t;return 1===r&&0===i?function(e){return!!e.parentNode}:function(t,n,u){var l,c,p,f,d,h,g=o!==a?"nextSibling":"previousSibling",m=t.parentNode,y=s&&t.nodeName.toLowerCase(),v=!u&&!s;if(m){if(o){while(g){p=t;while(p=p[g])if(s?p.nodeName.toLowerCase()===y:1===p.nodeType)return!1;h=g="only"===e&&!h&&"nextSibling"}return!0}if(h=[a?m.firstChild:m.lastChild],a&&v){c=m[x]||(m[x]={}),l=c[e]||[],d=l[0]===N&&l[1],f=l[0]===N&&l[2],p=d&&m.childNodes[d];while(p=++d&&p&&p[g]||(f=d=0)||h.pop())if(1===p.nodeType&&++f&&p===t){c[e]=[N,d,f];break}}else if(v&&(l=(t[x]||(t[x]={}))[e])&&l[0]===N)f=l[1];else while(p=++d&&p&&p[g]||(f=d=0)||h.pop())if((s?p.nodeName.toLowerCase()===y:1===p.nodeType)&&++f&&(v&&((p[x]||(p[x]={}))[e]=[N,f]),p===t))break;return f-=i,f===r||0===f%r&&f/r>=0}}},PSEUDO:function(e,t){var n,r=i.pseudos[e]||i.setFilters[e.toLowerCase()]||st.error("unsupported pseudo: "+e);return r[x]?r(t):r.length>1?(n=[e,e,"",t],i.setFilters.hasOwnProperty(e.toLowerCase())?ot(function(e,n){var i,o=r(e,t),a=o.length;while(a--)i=M.call(e,o[a]),e[i]=!(n[i]=o[a])}):function(e){return r(e,0,n)}):r}},pseudos:{not:ot(function(e){var t=[],n=[],r=s(e.replace(W,"$1"));return r[x]?ot(function(e,t,n,i){var o,a=r(e,null,i,[]),s=e.length;while(s--)(o=a[s])&&(e[s]=!(t[s]=o))}):function(e,i,o){return t[0]=e,r(t,null,o,n),!n.pop()}}),has:ot(function(e){return function(t){return st(e,t).length>0}}),contains:ot(function(e){return function(t){return(t.textContent||t.innerText||o(t)).indexOf(e)>-1}}),lang:ot(function(e){return X.test(e||"")||st.error("unsupported lang: "+e),e=e.replace(et,tt).toLowerCase(),function(t){var n;do if(n=d?t.getAttribute("xml:lang")||t.getAttribute("lang"):t.lang)return n=n.toLowerCase(),n===e||0===n.indexOf(e+"-");while((t=t.parentNode)&&1===t.nodeType);return!1}}),target:function(t){var n=e.location&&e.location.hash;return n&&n.slice(1)===t.id},root:function(e){return e===f},focus:function(e){return e===p.activeElement&&(!p.hasFocus||p.hasFocus())&&!!(e.type||e.href||~e.tabIndex)},enabled:function(e){return e.disabled===!1},disabled:function(e){return e.disabled===!0},checked:function(e){var t=e.nodeName.toLowerCase();return"input"===t&&!!e.checked||"option"===t&&!!e.selected},selected:function(e){return e.parentNode&&e.parentNode.selectedIndex,e.selected===!0},empty:function(e){for(e=e.firstChild;e;e=e.nextSibling)if(e.nodeName>"@"||3===e.nodeType||4===e.nodeType)return!1;return!0},parent:function(e){return!i.pseudos.empty(e)},header:function(e){return Q.test(e.nodeName)},input:function(e){return G.test(e.nodeName)},button:function(e){var t=e.nodeName.toLowerCase();return"input"===t&&"button"===e.type||"button"===t},text:function(e){var t;return"input"===e.nodeName.toLowerCase()&&"text"===e.type&&(null==(t=e.getAttribute("type"))||t.toLowerCase()===e.type)},first:pt(function(){return[0]}),last:pt(function(e,t){return[t-1]}),eq:pt(function(e,t,n){return[0>n?n+t:n]}),even:pt(function(e,t){var n=0;for(;t>n;n+=2)e.push(n);return e}),odd:pt(function(e,t){var n=1;for(;t>n;n+=2)e.push(n);return e}),lt:pt(function(e,t,n){var r=0>n?n+t:n;for(;--r>=0;)e.push(r);return e}),gt:pt(function(e,t,n){var r=0>n?n+t:n;for(;t>++r;)e.push(r);return e})}};for(n in{radio:!0,checkbox:!0,file:!0,password:!0,image:!0})i.pseudos[n]=lt(n);for(n in{submit:!0,reset:!0})i.pseudos[n]=ct(n);function ft(e,t){var n,r,o,a,s,u,l,c=E[e+" "];if(c)return t?0:c.slice(0);s=e,u=[],l=i.preFilter;while(s){(!n||(r=$.exec(s)))&&(r&&(s=s.slice(r[0].length)||s),u.push(o=[])),n=!1,(r=I.exec(s))&&(n=r.shift(),o.push({value:n,type:r[0].replace(W," ")}),s=s.slice(n.length));for(a in i.filter)!(r=U[a].exec(s))||l[a]&&!(r=l[a](r))||(n=r.shift(),o.push({value:n,type:a,matches:r}),s=s.slice(n.length));if(!n)break}return t?s.length:s?st.error(e):E(e,u).slice(0)}function dt(e){var t=0,n=e.length,r="";for(;n>t;t++)r+=e[t].value;return r}function ht(e,t,n){var i=t.dir,o=n&&"parentNode"===i,a=C++;return t.first?function(t,n,r){while(t=t[i])if(1===t.nodeType||o)return e(t,n,r)}:function(t,n,s){var u,l,c,p=N+" "+a;if(s){while(t=t[i])if((1===t.nodeType||o)&&e(t,n,s))return!0}else while(t=t[i])if(1===t.nodeType||o)if(c=t[x]||(t[x]={}),(l=c[i])&&l[0]===p){if((u=l[1])===!0||u===r)return u===!0}else if(l=c[i]=[p],l[1]=e(t,n,s)||r,l[1]===!0)return!0}}function gt(e){return e.length>1?function(t,n,r){var i=e.length;while(i--)if(!e[i](t,n,r))return!1;return!0}:e[0]}function mt(e,t,n,r,i){var o,a=[],s=0,u=e.length,l=null!=t;for(;u>s;s++)(o=e[s])&&(!n||n(o,r,i))&&(a.push(o),l&&t.push(s));return a}function yt(e,t,n,r,i,o){return r&&!r[x]&&(r=yt(r)),i&&!i[x]&&(i=yt(i,o)),ot(function(o,a,s,u){var l,c,p,f=[],d=[],h=a.length,g=o||xt(t||"*",s.nodeType?[s]:s,[]),m=!e||!o&&t?g:mt(g,f,e,s,u),y=n?i||(o?e:h||r)?[]:a:m;if(n&&n(m,y,s,u),r){l=mt(y,d),r(l,[],s,u),c=l.length;while(c--)(p=l[c])&&(y[d[c]]=!(m[d[c]]=p))}if(o){if(i||e){if(i){l=[],c=y.length;while(c--)(p=y[c])&&l.push(m[c]=p);i(null,y=[],l,u)}c=y.length;while(c--)(p=y[c])&&(l=i?M.call(o,p):f[c])>-1&&(o[l]=!(a[l]=p))}}else y=mt(y===a?y.splice(h,y.length):y),i?i(null,a,y,u):H.apply(a,y)})}function vt(e){var t,n,r,o=e.length,a=i.relative[e[0].type],s=a||i.relative[" "],u=a?1:0,c=ht(function(e){return e===t},s,!0),p=ht(function(e){return M.call(t,e)>-1},s,!0),f=[function(e,n,r){return!a&&(r||n!==l)||((t=n).nodeType?c(e,n,r):p(e,n,r))}];for(;o>u;u++)if(n=i.relative[e[u].type])f=[ht(gt(f),n)];else{if(n=i.filter[e[u].type].apply(null,e[u].matches),n[x]){for(r=++u;o>r;r++)if(i.relative[e[r].type])break;return yt(u>1&>(f),u>1&&dt(e.slice(0,u-1)).replace(W,"$1"),n,r>u&&vt(e.slice(u,r)),o>r&&vt(e=e.slice(r)),o>r&&dt(e))}f.push(n)}return gt(f)}function bt(e,t){var n=0,o=t.length>0,a=e.length>0,s=function(s,u,c,f,d){var h,g,m,y=[],v=0,b="0",x=s&&[],w=null!=d,T=l,C=s||a&&i.find.TAG("*",d&&u.parentNode||u),k=N+=null==T?1:Math.random()||.1;for(w&&(l=u!==p&&u,r=n);null!=(h=C[b]);b++){if(a&&h){g=0;while(m=e[g++])if(m(h,u,c)){f.push(h);break}w&&(N=k,r=++n)}o&&((h=!m&&h)&&v--,s&&x.push(h))}if(v+=b,o&&b!==v){g=0;while(m=t[g++])m(x,y,u,c);if(s){if(v>0)while(b--)x[b]||y[b]||(y[b]=L.call(f));y=mt(y)}H.apply(f,y),w&&!s&&y.length>0&&v+t.length>1&&st.uniqueSort(f)}return w&&(N=k,l=T),x};return o?ot(s):s}s=st.compile=function(e,t){var n,r=[],i=[],o=S[e+" "];if(!o){t||(t=ft(e)),n=t.length;while(n--)o=vt(t[n]),o[x]?r.push(o):i.push(o);o=S(e,bt(i,r))}return o};function xt(e,t,n){var r=0,i=t.length;for(;i>r;r++)st(e,t[r],n);return n}function wt(e,t,n,r){var o,a,u,l,c,p=ft(e);if(!r&&1===p.length){if(a=p[0]=p[0].slice(0),a.length>2&&"ID"===(u=a[0]).type&&9===t.nodeType&&!d&&i.relative[a[1].type]){if(t=i.find.ID(u.matches[0].replace(et,tt),t)[0],!t)return n;e=e.slice(a.shift().value.length)}o=U.needsContext.test(e)?0:a.length;while(o--){if(u=a[o],i.relative[l=u.type])break;if((c=i.find[l])&&(r=c(u.matches[0].replace(et,tt),V.test(a[0].type)&&t.parentNode||t))){if(a.splice(o,1),e=r.length&&dt(a),!e)return H.apply(n,q.call(r,0)),n;break}}}return s(e,p)(r,t,d,n,V.test(e)),n}i.pseudos.nth=i.pseudos.eq;function Tt(){}i.filters=Tt.prototype=i.pseudos,i.setFilters=new Tt,c(),st.attr=b.attr,b.find=st,b.expr=st.selectors,b.expr[":"]=b.expr.pseudos,b.unique=st.uniqueSort,b.text=st.getText,b.isXMLDoc=st.isXML,b.contains=st.contains}(e);var at=/Until$/,st=/^(?:parents|prev(?:Until|All))/,ut=/^.[^:#\[\.,]*$/,lt=b.expr.match.needsContext,ct={children:!0,contents:!0,next:!0,prev:!0};b.fn.extend({find:function(e){var t,n,r,i=this.length;if("string"!=typeof e)return r=this,this.pushStack(b(e).filter(function(){for(t=0;i>t;t++)if(b.contains(r[t],this))return!0}));for(n=[],t=0;i>t;t++)b.find(e,this[t],n);return n=this.pushStack(i>1?b.unique(n):n),n.selector=(this.selector?this.selector+" ":"")+e,n},has:function(e){var t,n=b(e,this),r=n.length;return this.filter(function(){for(t=0;r>t;t++)if(b.contains(this,n[t]))return!0})},not:function(e){return this.pushStack(ft(this,e,!1))},filter:function(e){return this.pushStack(ft(this,e,!0))},is:function(e){return!!e&&("string"==typeof e?lt.test(e)?b(e,this.context).index(this[0])>=0:b.filter(e,this).length>0:this.filter(e).length>0)},closest:function(e,t){var n,r=0,i=this.length,o=[],a=lt.test(e)||"string"!=typeof e?b(e,t||this.context):0;for(;i>r;r++){n=this[r];while(n&&n.ownerDocument&&n!==t&&11!==n.nodeType){if(a?a.index(n)>-1:b.find.matchesSelector(n,e)){o.push(n);break}n=n.parentNode}}return this.pushStack(o.length>1?b.unique(o):o)},index:function(e){return e?"string"==typeof e?b.inArray(this[0],b(e)):b.inArray(e.jquery?e[0]:e,this):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(e,t){var n="string"==typeof e?b(e,t):b.makeArray(e&&e.nodeType?[e]:e),r=b.merge(this.get(),n);return this.pushStack(b.unique(r))},addBack:function(e){return this.add(null==e?this.prevObject:this.prevObject.filter(e))}}),b.fn.andSelf=b.fn.addBack;function pt(e,t){do e=e[t];while(e&&1!==e.nodeType);return e}b.each({parent:function(e){var t=e.parentNode;return t&&11!==t.nodeType?t:null},parents:function(e){return b.dir(e,"parentNode")},parentsUntil:function(e,t,n){return b.dir(e,"parentNode",n)},next:function(e){return pt(e,"nextSibling")},prev:function(e){return pt(e,"previousSibling")},nextAll:function(e){return b.dir(e,"nextSibling")},prevAll:function(e){return b.dir(e,"previousSibling")},nextUntil:function(e,t,n){return b.dir(e,"nextSibling",n)},prevUntil:function(e,t,n){return b.dir(e,"previousSibling",n)},siblings:function(e){return b.sibling((e.parentNode||{}).firstChild,e)},children:function(e){return b.sibling(e.firstChild)},contents:function(e){return b.nodeName(e,"iframe")?e.contentDocument||e.contentWindow.document:b.merge([],e.childNodes)}},function(e,t){b.fn[e]=function(n,r){var i=b.map(this,t,n);return at.test(e)||(r=n),r&&"string"==typeof r&&(i=b.filter(r,i)),i=this.length>1&&!ct[e]?b.unique(i):i,this.length>1&&st.test(e)&&(i=i.reverse()),this.pushStack(i)}}),b.extend({filter:function(e,t,n){return n&&(e=":not("+e+")"),1===t.length?b.find.matchesSelector(t[0],e)?[t[0]]:[]:b.find.matches(e,t)},dir:function(e,n,r){var i=[],o=e[n];while(o&&9!==o.nodeType&&(r===t||1!==o.nodeType||!b(o).is(r)))1===o.nodeType&&i.push(o),o=o[n];return i},sibling:function(e,t){var n=[];for(;e;e=e.nextSibling)1===e.nodeType&&e!==t&&n.push(e);return n}});function ft(e,t,n){if(t=t||0,b.isFunction(t))return b.grep(e,function(e,r){var i=!!t.call(e,r,e);return i===n});if(t.nodeType)return b.grep(e,function(e){return e===t===n});if("string"==typeof t){var r=b.grep(e,function(e){return 1===e.nodeType});if(ut.test(t))return b.filter(t,r,!n);t=b.filter(t,r)}return b.grep(e,function(e){return b.inArray(e,t)>=0===n})}function dt(e){var t=ht.split("|"),n=e.createDocumentFragment();if(n.createElement)while(t.length)n.createElement(t.pop());return n}var ht="abbr|article|aside|audio|bdi|canvas|data|datalist|details|figcaption|figure|footer|header|hgroup|mark|meter|nav|output|progress|section|summary|time|video",gt=/ jQuery\d+="(?:null|\d+)"/g,mt=RegExp("<(?:"+ht+")[\\s/>]","i"),yt=/^\s+/,vt=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,bt=/<([\w:]+)/,xt=/<tbody/i,wt=/<|&#?\w+;/,Tt=/<(?:script|style|link)/i,Nt=/^(?:checkbox|radio)$/i,Ct=/checked\s*(?:[^=]|=\s*.checked.)/i,kt=/^$|\/(?:java|ecma)script/i,Et=/^true\/(.*)/,St=/^\s*<!(?:\[CDATA\[|--)|(?:\]\]|--)>\s*$/g,At={option:[1,"<select multiple='multiple'>","</select>"],legend:[1,"<fieldset>","</fieldset>"],area:[1,"<map>","</map>"],param:[1,"<object>","</object>"],thead:[1,"<table>","</table>"],tr:[2,"<table><tbody>","</tbody></table>"],col:[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],_default:b.support.htmlSerialize?[0,"",""]:[1,"X<div>","</div>"]},jt=dt(o),Dt=jt.appendChild(o.createElement("div"));At.optgroup=At.option,At.tbody=At.tfoot=At.colgroup=At.caption=At.thead,At.th=At.td,b.fn.extend({text:function(e){return b.access(this,function(e){return e===t?b.text(this):this.empty().append((this[0]&&this[0].ownerDocument||o).createTextNode(e))},null,e,arguments.length)},wrapAll:function(e){if(b.isFunction(e))return this.each(function(t){b(this).wrapAll(e.call(this,t))});if(this[0]){var t=b(e,this[0].ownerDocument).eq(0).clone(!0);this[0].parentNode&&t.insertBefore(this[0]),t.map(function(){var e=this;while(e.firstChild&&1===e.firstChild.nodeType)e=e.firstChild;return e}).append(this)}return this},wrapInner:function(e){return b.isFunction(e)?this.each(function(t){b(this).wrapInner(e.call(this,t))}):this.each(function(){var t=b(this),n=t.contents();n.length?n.wrapAll(e):t.append(e)})},wrap:function(e){var t=b.isFunction(e);return this.each(function(n){b(this).wrapAll(t?e.call(this,n):e)})},unwrap:function(){return this.parent().each(function(){b.nodeName(this,"body")||b(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,!0,function(e){(1===this.nodeType||11===this.nodeType||9===this.nodeType)&&this.appendChild(e)})},prepend:function(){return this.domManip(arguments,!0,function(e){(1===this.nodeType||11===this.nodeType||9===this.nodeType)&&this.insertBefore(e,this.firstChild)})},before:function(){return this.domManip(arguments,!1,function(e){this.parentNode&&this.parentNode.insertBefore(e,this)})},after:function(){return this.domManip(arguments,!1,function(e){this.parentNode&&this.parentNode.insertBefore(e,this.nextSibling)})},remove:function(e,t){var n,r=0;for(;null!=(n=this[r]);r++)(!e||b.filter(e,[n]).length>0)&&(t||1!==n.nodeType||b.cleanData(Ot(n)),n.parentNode&&(t&&b.contains(n.ownerDocument,n)&&Mt(Ot(n,"script")),n.parentNode.removeChild(n)));return this},empty:function(){var e,t=0;for(;null!=(e=this[t]);t++){1===e.nodeType&&b.cleanData(Ot(e,!1));while(e.firstChild)e.removeChild(e.firstChild);e.options&&b.nodeName(e,"select")&&(e.options.length=0)}return this},clone:function(e,t){return e=null==e?!1:e,t=null==t?e:t,this.map(function(){return b.clone(this,e,t)})},html:function(e){return b.access(this,function(e){var n=this[0]||{},r=0,i=this.length;if(e===t)return 1===n.nodeType?n.innerHTML.replace(gt,""):t;if(!("string"!=typeof e||Tt.test(e)||!b.support.htmlSerialize&&mt.test(e)||!b.support.leadingWhitespace&&yt.test(e)||At[(bt.exec(e)||["",""])[1].toLowerCase()])){e=e.replace(vt,"<$1></$2>");try{for(;i>r;r++)n=this[r]||{},1===n.nodeType&&(b.cleanData(Ot(n,!1)),n.innerHTML=e);n=0}catch(o){}}n&&this.empty().append(e)},null,e,arguments.length)},replaceWith:function(e){var t=b.isFunction(e);return t||"string"==typeof e||(e=b(e).not(this).detach()),this.domManip([e],!0,function(e){var t=this.nextSibling,n=this.parentNode;n&&(b(this).remove(),n.insertBefore(e,t))})},detach:function(e){return this.remove(e,!0)},domManip:function(e,n,r){e=f.apply([],e);var i,o,a,s,u,l,c=0,p=this.length,d=this,h=p-1,g=e[0],m=b.isFunction(g);if(m||!(1>=p||"string"!=typeof g||b.support.checkClone)&&Ct.test(g))return this.each(function(i){var o=d.eq(i);m&&(e[0]=g.call(this,i,n?o.html():t)),o.domManip(e,n,r)});if(p&&(l=b.buildFragment(e,this[0].ownerDocument,!1,this),i=l.firstChild,1===l.childNodes.length&&(l=i),i)){for(n=n&&b.nodeName(i,"tr"),s=b.map(Ot(l,"script"),Ht),a=s.length;p>c;c++)o=l,c!==h&&(o=b.clone(o,!0,!0),a&&b.merge(s,Ot(o,"script"))),r.call(n&&b.nodeName(this[c],"table")?Lt(this[c],"tbody"):this[c],o,c);if(a)for(u=s[s.length-1].ownerDocument,b.map(s,qt),c=0;a>c;c++)o=s[c],kt.test(o.type||"")&&!b._data(o,"globalEval")&&b.contains(u,o)&&(o.src?b.ajax({url:o.src,type:"GET",dataType:"script",async:!1,global:!1,"throws":!0}):b.globalEval((o.text||o.textContent||o.innerHTML||"").replace(St,"")));l=i=null}return this}});function Lt(e,t){return e.getElementsByTagName(t)[0]||e.appendChild(e.ownerDocument.createElement(t))}function Ht(e){var t=e.getAttributeNode("type");return e.type=(t&&t.specified)+"/"+e.type,e}function qt(e){var t=Et.exec(e.type);return t?e.type=t[1]:e.removeAttribute("type"),e}function Mt(e,t){var n,r=0;for(;null!=(n=e[r]);r++)b._data(n,"globalEval",!t||b._data(t[r],"globalEval"))}function _t(e,t){if(1===t.nodeType&&b.hasData(e)){var n,r,i,o=b._data(e),a=b._data(t,o),s=o.events;if(s){delete a.handle,a.events={};for(n in s)for(r=0,i=s[n].length;i>r;r++)b.event.add(t,n,s[n][r])}a.data&&(a.data=b.extend({},a.data))}}function Ft(e,t){var n,r,i;if(1===t.nodeType){if(n=t.nodeName.toLowerCase(),!b.support.noCloneEvent&&t[b.expando]){i=b._data(t);for(r in i.events)b.removeEvent(t,r,i.handle);t.removeAttribute(b.expando)}"script"===n&&t.text!==e.text?(Ht(t).text=e.text,qt(t)):"object"===n?(t.parentNode&&(t.outerHTML=e.outerHTML),b.support.html5Clone&&e.innerHTML&&!b.trim(t.innerHTML)&&(t.innerHTML=e.innerHTML)):"input"===n&&Nt.test(e.type)?(t.defaultChecked=t.checked=e.checked,t.value!==e.value&&(t.value=e.value)):"option"===n?t.defaultSelected=t.selected=e.defaultSelected:("input"===n||"textarea"===n)&&(t.defaultValue=e.defaultValue)}}b.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(e,t){b.fn[e]=function(e){var n,r=0,i=[],o=b(e),a=o.length-1;for(;a>=r;r++)n=r===a?this:this.clone(!0),b(o[r])[t](n),d.apply(i,n.get());return this.pushStack(i)}});function Ot(e,n){var r,o,a=0,s=typeof e.getElementsByTagName!==i?e.getElementsByTagName(n||"*"):typeof e.querySelectorAll!==i?e.querySelectorAll(n||"*"):t;if(!s)for(s=[],r=e.childNodes||e;null!=(o=r[a]);a++)!n||b.nodeName(o,n)?s.push(o):b.merge(s,Ot(o,n));return n===t||n&&b.nodeName(e,n)?b.merge([e],s):s}function Bt(e){Nt.test(e.type)&&(e.defaultChecked=e.checked)}b.extend({clone:function(e,t,n){var r,i,o,a,s,u=b.contains(e.ownerDocument,e);if(b.support.html5Clone||b.isXMLDoc(e)||!mt.test("<"+e.nodeName+">")?o=e.cloneNode(!0):(Dt.innerHTML=e.outerHTML,Dt.removeChild(o=Dt.firstChild)),!(b.support.noCloneEvent&&b.support.noCloneChecked||1!==e.nodeType&&11!==e.nodeType||b.isXMLDoc(e)))for(r=Ot(o),s=Ot(e),a=0;null!=(i=s[a]);++a)r[a]&&Ft(i,r[a]);if(t)if(n)for(s=s||Ot(e),r=r||Ot(o),a=0;null!=(i=s[a]);a++)_t(i,r[a]);else _t(e,o);return r=Ot(o,"script"),r.length>0&&Mt(r,!u&&Ot(e,"script")),r=s=i=null,o},buildFragment:function(e,t,n,r){var i,o,a,s,u,l,c,p=e.length,f=dt(t),d=[],h=0;for(;p>h;h++)if(o=e[h],o||0===o)if("object"===b.type(o))b.merge(d,o.nodeType?[o]:o);else if(wt.test(o)){s=s||f.appendChild(t.createElement("div")),u=(bt.exec(o)||["",""])[1].toLowerCase(),c=At[u]||At._default,s.innerHTML=c[1]+o.replace(vt,"<$1></$2>")+c[2],i=c[0];while(i--)s=s.lastChild;if(!b.support.leadingWhitespace&&yt.test(o)&&d.push(t.createTextNode(yt.exec(o)[0])),!b.support.tbody){o="table"!==u||xt.test(o)?"<table>"!==c[1]||xt.test(o)?0:s:s.firstChild,i=o&&o.childNodes.length;while(i--)b.nodeName(l=o.childNodes[i],"tbody")&&!l.childNodes.length&&o.removeChild(l)
-}b.merge(d,s.childNodes),s.textContent="";while(s.firstChild)s.removeChild(s.firstChild);s=f.lastChild}else d.push(t.createTextNode(o));s&&f.removeChild(s),b.support.appendChecked||b.grep(Ot(d,"input"),Bt),h=0;while(o=d[h++])if((!r||-1===b.inArray(o,r))&&(a=b.contains(o.ownerDocument,o),s=Ot(f.appendChild(o),"script"),a&&Mt(s),n)){i=0;while(o=s[i++])kt.test(o.type||"")&&n.push(o)}return s=null,f},cleanData:function(e,t){var n,r,o,a,s=0,u=b.expando,l=b.cache,p=b.support.deleteExpando,f=b.event.special;for(;null!=(n=e[s]);s++)if((t||b.acceptData(n))&&(o=n[u],a=o&&l[o])){if(a.events)for(r in a.events)f[r]?b.event.remove(n,r):b.removeEvent(n,r,a.handle);l[o]&&(delete l[o],p?delete n[u]:typeof n.removeAttribute!==i?n.removeAttribute(u):n[u]=null,c.push(o))}}});var Pt,Rt,Wt,$t=/alpha\([^)]*\)/i,It=/opacity\s*=\s*([^)]*)/,zt=/^(top|right|bottom|left)$/,Xt=/^(none|table(?!-c[ea]).+)/,Ut=/^margin/,Vt=RegExp("^("+x+")(.*)$","i"),Yt=RegExp("^("+x+")(?!px)[a-z%]+$","i"),Jt=RegExp("^([+-])=("+x+")","i"),Gt={BODY:"block"},Qt={position:"absolute",visibility:"hidden",display:"block"},Kt={letterSpacing:0,fontWeight:400},Zt=["Top","Right","Bottom","Left"],en=["Webkit","O","Moz","ms"];function tn(e,t){if(t in e)return t;var n=t.charAt(0).toUpperCase()+t.slice(1),r=t,i=en.length;while(i--)if(t=en[i]+n,t in e)return t;return r}function nn(e,t){return e=t||e,"none"===b.css(e,"display")||!b.contains(e.ownerDocument,e)}function rn(e,t){var n,r,i,o=[],a=0,s=e.length;for(;s>a;a++)r=e[a],r.style&&(o[a]=b._data(r,"olddisplay"),n=r.style.display,t?(o[a]||"none"!==n||(r.style.display=""),""===r.style.display&&nn(r)&&(o[a]=b._data(r,"olddisplay",un(r.nodeName)))):o[a]||(i=nn(r),(n&&"none"!==n||!i)&&b._data(r,"olddisplay",i?n:b.css(r,"display"))));for(a=0;s>a;a++)r=e[a],r.style&&(t&&"none"!==r.style.display&&""!==r.style.display||(r.style.display=t?o[a]||"":"none"));return e}b.fn.extend({css:function(e,n){return b.access(this,function(e,n,r){var i,o,a={},s=0;if(b.isArray(n)){for(o=Rt(e),i=n.length;i>s;s++)a[n[s]]=b.css(e,n[s],!1,o);return a}return r!==t?b.style(e,n,r):b.css(e,n)},e,n,arguments.length>1)},show:function(){return rn(this,!0)},hide:function(){return rn(this)},toggle:function(e){var t="boolean"==typeof e;return this.each(function(){(t?e:nn(this))?b(this).show():b(this).hide()})}}),b.extend({cssHooks:{opacity:{get:function(e,t){if(t){var n=Wt(e,"opacity");return""===n?"1":n}}}},cssNumber:{columnCount:!0,fillOpacity:!0,fontWeight:!0,lineHeight:!0,opacity:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{"float":b.support.cssFloat?"cssFloat":"styleFloat"},style:function(e,n,r,i){if(e&&3!==e.nodeType&&8!==e.nodeType&&e.style){var o,a,s,u=b.camelCase(n),l=e.style;if(n=b.cssProps[u]||(b.cssProps[u]=tn(l,u)),s=b.cssHooks[n]||b.cssHooks[u],r===t)return s&&"get"in s&&(o=s.get(e,!1,i))!==t?o:l[n];if(a=typeof r,"string"===a&&(o=Jt.exec(r))&&(r=(o[1]+1)*o[2]+parseFloat(b.css(e,n)),a="number"),!(null==r||"number"===a&&isNaN(r)||("number"!==a||b.cssNumber[u]||(r+="px"),b.support.clearCloneStyle||""!==r||0!==n.indexOf("background")||(l[n]="inherit"),s&&"set"in s&&(r=s.set(e,r,i))===t)))try{l[n]=r}catch(c){}}},css:function(e,n,r,i){var o,a,s,u=b.camelCase(n);return n=b.cssProps[u]||(b.cssProps[u]=tn(e.style,u)),s=b.cssHooks[n]||b.cssHooks[u],s&&"get"in s&&(a=s.get(e,!0,r)),a===t&&(a=Wt(e,n,i)),"normal"===a&&n in Kt&&(a=Kt[n]),""===r||r?(o=parseFloat(a),r===!0||b.isNumeric(o)?o||0:a):a},swap:function(e,t,n,r){var i,o,a={};for(o in t)a[o]=e.style[o],e.style[o]=t[o];i=n.apply(e,r||[]);for(o in t)e.style[o]=a[o];return i}}),e.getComputedStyle?(Rt=function(t){return e.getComputedStyle(t,null)},Wt=function(e,n,r){var i,o,a,s=r||Rt(e),u=s?s.getPropertyValue(n)||s[n]:t,l=e.style;return s&&(""!==u||b.contains(e.ownerDocument,e)||(u=b.style(e,n)),Yt.test(u)&&Ut.test(n)&&(i=l.width,o=l.minWidth,a=l.maxWidth,l.minWidth=l.maxWidth=l.width=u,u=s.width,l.width=i,l.minWidth=o,l.maxWidth=a)),u}):o.documentElement.currentStyle&&(Rt=function(e){return e.currentStyle},Wt=function(e,n,r){var i,o,a,s=r||Rt(e),u=s?s[n]:t,l=e.style;return null==u&&l&&l[n]&&(u=l[n]),Yt.test(u)&&!zt.test(n)&&(i=l.left,o=e.runtimeStyle,a=o&&o.left,a&&(o.left=e.currentStyle.left),l.left="fontSize"===n?"1em":u,u=l.pixelLeft+"px",l.left=i,a&&(o.left=a)),""===u?"auto":u});function on(e,t,n){var r=Vt.exec(t);return r?Math.max(0,r[1]-(n||0))+(r[2]||"px"):t}function an(e,t,n,r,i){var o=n===(r?"border":"content")?4:"width"===t?1:0,a=0;for(;4>o;o+=2)"margin"===n&&(a+=b.css(e,n+Zt[o],!0,i)),r?("content"===n&&(a-=b.css(e,"padding"+Zt[o],!0,i)),"margin"!==n&&(a-=b.css(e,"border"+Zt[o]+"Width",!0,i))):(a+=b.css(e,"padding"+Zt[o],!0,i),"padding"!==n&&(a+=b.css(e,"border"+Zt[o]+"Width",!0,i)));return a}function sn(e,t,n){var r=!0,i="width"===t?e.offsetWidth:e.offsetHeight,o=Rt(e),a=b.support.boxSizing&&"border-box"===b.css(e,"boxSizing",!1,o);if(0>=i||null==i){if(i=Wt(e,t,o),(0>i||null==i)&&(i=e.style[t]),Yt.test(i))return i;r=a&&(b.support.boxSizingReliable||i===e.style[t]),i=parseFloat(i)||0}return i+an(e,t,n||(a?"border":"content"),r,o)+"px"}function un(e){var t=o,n=Gt[e];return n||(n=ln(e,t),"none"!==n&&n||(Pt=(Pt||b("<iframe frameborder='0' width='0' height='0'/>").css("cssText","display:block !important")).appendTo(t.documentElement),t=(Pt[0].contentWindow||Pt[0].contentDocument).document,t.write("<!doctype html><html><body>"),t.close(),n=ln(e,t),Pt.detach()),Gt[e]=n),n}function ln(e,t){var n=b(t.createElement(e)).appendTo(t.body),r=b.css(n[0],"display");return n.remove(),r}b.each(["height","width"],function(e,n){b.cssHooks[n]={get:function(e,r,i){return r?0===e.offsetWidth&&Xt.test(b.css(e,"display"))?b.swap(e,Qt,function(){return sn(e,n,i)}):sn(e,n,i):t},set:function(e,t,r){var i=r&&Rt(e);return on(e,t,r?an(e,n,r,b.support.boxSizing&&"border-box"===b.css(e,"boxSizing",!1,i),i):0)}}}),b.support.opacity||(b.cssHooks.opacity={get:function(e,t){return It.test((t&&e.currentStyle?e.currentStyle.filter:e.style.filter)||"")?.01*parseFloat(RegExp.$1)+"":t?"1":""},set:function(e,t){var n=e.style,r=e.currentStyle,i=b.isNumeric(t)?"alpha(opacity="+100*t+")":"",o=r&&r.filter||n.filter||"";n.zoom=1,(t>=1||""===t)&&""===b.trim(o.replace($t,""))&&n.removeAttribute&&(n.removeAttribute("filter"),""===t||r&&!r.filter)||(n.filter=$t.test(o)?o.replace($t,i):o+" "+i)}}),b(function(){b.support.reliableMarginRight||(b.cssHooks.marginRight={get:function(e,n){return n?b.swap(e,{display:"inline-block"},Wt,[e,"marginRight"]):t}}),!b.support.pixelPosition&&b.fn.position&&b.each(["top","left"],function(e,n){b.cssHooks[n]={get:function(e,r){return r?(r=Wt(e,n),Yt.test(r)?b(e).position()[n]+"px":r):t}}})}),b.expr&&b.expr.filters&&(b.expr.filters.hidden=function(e){return 0>=e.offsetWidth&&0>=e.offsetHeight||!b.support.reliableHiddenOffsets&&"none"===(e.style&&e.style.display||b.css(e,"display"))},b.expr.filters.visible=function(e){return!b.expr.filters.hidden(e)}),b.each({margin:"",padding:"",border:"Width"},function(e,t){b.cssHooks[e+t]={expand:function(n){var r=0,i={},o="string"==typeof n?n.split(" "):[n];for(;4>r;r++)i[e+Zt[r]+t]=o[r]||o[r-2]||o[0];return i}},Ut.test(e)||(b.cssHooks[e+t].set=on)});var cn=/%20/g,pn=/\[\]$/,fn=/\r?\n/g,dn=/^(?:submit|button|image|reset|file)$/i,hn=/^(?:input|select|textarea|keygen)/i;b.fn.extend({serialize:function(){return b.param(this.serializeArray())},serializeArray:function(){return this.map(function(){var e=b.prop(this,"elements");return e?b.makeArray(e):this}).filter(function(){var e=this.type;return this.name&&!b(this).is(":disabled")&&hn.test(this.nodeName)&&!dn.test(e)&&(this.checked||!Nt.test(e))}).map(function(e,t){var n=b(this).val();return null==n?null:b.isArray(n)?b.map(n,function(e){return{name:t.name,value:e.replace(fn,"\r\n")}}):{name:t.name,value:n.replace(fn,"\r\n")}}).get()}}),b.param=function(e,n){var r,i=[],o=function(e,t){t=b.isFunction(t)?t():null==t?"":t,i[i.length]=encodeURIComponent(e)+"="+encodeURIComponent(t)};if(n===t&&(n=b.ajaxSettings&&b.ajaxSettings.traditional),b.isArray(e)||e.jquery&&!b.isPlainObject(e))b.each(e,function(){o(this.name,this.value)});else for(r in e)gn(r,e[r],n,o);return i.join("&").replace(cn,"+")};function gn(e,t,n,r){var i;if(b.isArray(t))b.each(t,function(t,i){n||pn.test(e)?r(e,i):gn(e+"["+("object"==typeof i?t:"")+"]",i,n,r)});else if(n||"object"!==b.type(t))r(e,t);else for(i in t)gn(e+"["+i+"]",t[i],n,r)}b.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error contextmenu".split(" "),function(e,t){b.fn[t]=function(e,n){return arguments.length>0?this.on(t,null,e,n):this.trigger(t)}}),b.fn.hover=function(e,t){return this.mouseenter(e).mouseleave(t||e)};var mn,yn,vn=b.now(),bn=/\?/,xn=/#.*$/,wn=/([?&])_=[^&]*/,Tn=/^(.*?):[ \t]*([^\r\n]*)\r?$/gm,Nn=/^(?:about|app|app-storage|.+-extension|file|res|widget):$/,Cn=/^(?:GET|HEAD)$/,kn=/^\/\//,En=/^([\w.+-]+:)(?:\/\/([^\/?#:]*)(?::(\d+)|)|)/,Sn=b.fn.load,An={},jn={},Dn="*/".concat("*");try{yn=a.href}catch(Ln){yn=o.createElement("a"),yn.href="",yn=yn.href}mn=En.exec(yn.toLowerCase())||[];function Hn(e){return function(t,n){"string"!=typeof t&&(n=t,t="*");var r,i=0,o=t.toLowerCase().match(w)||[];if(b.isFunction(n))while(r=o[i++])"+"===r[0]?(r=r.slice(1)||"*",(e[r]=e[r]||[]).unshift(n)):(e[r]=e[r]||[]).push(n)}}function qn(e,n,r,i){var o={},a=e===jn;function s(u){var l;return o[u]=!0,b.each(e[u]||[],function(e,u){var c=u(n,r,i);return"string"!=typeof c||a||o[c]?a?!(l=c):t:(n.dataTypes.unshift(c),s(c),!1)}),l}return s(n.dataTypes[0])||!o["*"]&&s("*")}function Mn(e,n){var r,i,o=b.ajaxSettings.flatOptions||{};for(i in n)n[i]!==t&&((o[i]?e:r||(r={}))[i]=n[i]);return r&&b.extend(!0,e,r),e}b.fn.load=function(e,n,r){if("string"!=typeof e&&Sn)return Sn.apply(this,arguments);var i,o,a,s=this,u=e.indexOf(" ");return u>=0&&(i=e.slice(u,e.length),e=e.slice(0,u)),b.isFunction(n)?(r=n,n=t):n&&"object"==typeof n&&(a="POST"),s.length>0&&b.ajax({url:e,type:a,dataType:"html",data:n}).done(function(e){o=arguments,s.html(i?b("<div>").append(b.parseHTML(e)).find(i):e)}).complete(r&&function(e,t){s.each(r,o||[e.responseText,t,e])}),this},b.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){b.fn[t]=function(e){return this.on(t,e)}}),b.each(["get","post"],function(e,n){b[n]=function(e,r,i,o){return b.isFunction(r)&&(o=o||i,i=r,r=t),b.ajax({url:e,type:n,dataType:o,data:r,success:i})}}),b.extend({active:0,lastModified:{},etag:{},ajaxSettings:{url:yn,type:"GET",isLocal:Nn.test(mn[1]),global:!0,processData:!0,async:!0,contentType:"application/x-www-form-urlencoded; charset=UTF-8",accepts:{"*":Dn,text:"text/plain",html:"text/html",xml:"application/xml, text/xml",json:"application/json, text/javascript"},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText"},converters:{"* text":e.String,"text html":!0,"text json":b.parseJSON,"text xml":b.parseXML},flatOptions:{url:!0,context:!0}},ajaxSetup:function(e,t){return t?Mn(Mn(e,b.ajaxSettings),t):Mn(b.ajaxSettings,e)},ajaxPrefilter:Hn(An),ajaxTransport:Hn(jn),ajax:function(e,n){"object"==typeof e&&(n=e,e=t),n=n||{};var r,i,o,a,s,u,l,c,p=b.ajaxSetup({},n),f=p.context||p,d=p.context&&(f.nodeType||f.jquery)?b(f):b.event,h=b.Deferred(),g=b.Callbacks("once memory"),m=p.statusCode||{},y={},v={},x=0,T="canceled",N={readyState:0,getResponseHeader:function(e){var t;if(2===x){if(!c){c={};while(t=Tn.exec(a))c[t[1].toLowerCase()]=t[2]}t=c[e.toLowerCase()]}return null==t?null:t},getAllResponseHeaders:function(){return 2===x?a:null},setRequestHeader:function(e,t){var n=e.toLowerCase();return x||(e=v[n]=v[n]||e,y[e]=t),this},overrideMimeType:function(e){return x||(p.mimeType=e),this},statusCode:function(e){var t;if(e)if(2>x)for(t in e)m[t]=[m[t],e[t]];else N.always(e[N.status]);return this},abort:function(e){var t=e||T;return l&&l.abort(t),k(0,t),this}};if(h.promise(N).complete=g.add,N.success=N.done,N.error=N.fail,p.url=((e||p.url||yn)+"").replace(xn,"").replace(kn,mn[1]+"//"),p.type=n.method||n.type||p.method||p.type,p.dataTypes=b.trim(p.dataType||"*").toLowerCase().match(w)||[""],null==p.crossDomain&&(r=En.exec(p.url.toLowerCase()),p.crossDomain=!(!r||r[1]===mn[1]&&r[2]===mn[2]&&(r[3]||("http:"===r[1]?80:443))==(mn[3]||("http:"===mn[1]?80:443)))),p.data&&p.processData&&"string"!=typeof p.data&&(p.data=b.param(p.data,p.traditional)),qn(An,p,n,N),2===x)return N;u=p.global,u&&0===b.active++&&b.event.trigger("ajaxStart"),p.type=p.type.toUpperCase(),p.hasContent=!Cn.test(p.type),o=p.url,p.hasContent||(p.data&&(o=p.url+=(bn.test(o)?"&":"?")+p.data,delete p.data),p.cache===!1&&(p.url=wn.test(o)?o.replace(wn,"$1_="+vn++):o+(bn.test(o)?"&":"?")+"_="+vn++)),p.ifModified&&(b.lastModified[o]&&N.setRequestHeader("If-Modified-Since",b.lastModified[o]),b.etag[o]&&N.setRequestHeader("If-None-Match",b.etag[o])),(p.data&&p.hasContent&&p.contentType!==!1||n.contentType)&&N.setRequestHeader("Content-Type",p.contentType),N.setRequestHeader("Accept",p.dataTypes[0]&&p.accepts[p.dataTypes[0]]?p.accepts[p.dataTypes[0]]+("*"!==p.dataTypes[0]?", "+Dn+"; q=0.01":""):p.accepts["*"]);for(i in p.headers)N.setRequestHeader(i,p.headers[i]);if(p.beforeSend&&(p.beforeSend.call(f,N,p)===!1||2===x))return N.abort();T="abort";for(i in{success:1,error:1,complete:1})N[i](p[i]);if(l=qn(jn,p,n,N)){N.readyState=1,u&&d.trigger("ajaxSend",[N,p]),p.async&&p.timeout>0&&(s=setTimeout(function(){N.abort("timeout")},p.timeout));try{x=1,l.send(y,k)}catch(C){if(!(2>x))throw C;k(-1,C)}}else k(-1,"No Transport");function k(e,n,r,i){var c,y,v,w,T,C=n;2!==x&&(x=2,s&&clearTimeout(s),l=t,a=i||"",N.readyState=e>0?4:0,r&&(w=_n(p,N,r)),e>=200&&300>e||304===e?(p.ifModified&&(T=N.getResponseHeader("Last-Modified"),T&&(b.lastModified[o]=T),T=N.getResponseHeader("etag"),T&&(b.etag[o]=T)),204===e?(c=!0,C="nocontent"):304===e?(c=!0,C="notmodified"):(c=Fn(p,w),C=c.state,y=c.data,v=c.error,c=!v)):(v=C,(e||!C)&&(C="error",0>e&&(e=0))),N.status=e,N.statusText=(n||C)+"",c?h.resolveWith(f,[y,C,N]):h.rejectWith(f,[N,C,v]),N.statusCode(m),m=t,u&&d.trigger(c?"ajaxSuccess":"ajaxError",[N,p,c?y:v]),g.fireWith(f,[N,C]),u&&(d.trigger("ajaxComplete",[N,p]),--b.active||b.event.trigger("ajaxStop")))}return N},getScript:function(e,n){return b.get(e,t,n,"script")},getJSON:function(e,t,n){return b.get(e,t,n,"json")}});function _n(e,n,r){var i,o,a,s,u=e.contents,l=e.dataTypes,c=e.responseFields;for(s in c)s in r&&(n[c[s]]=r[s]);while("*"===l[0])l.shift(),o===t&&(o=e.mimeType||n.getResponseHeader("Content-Type"));if(o)for(s in u)if(u[s]&&u[s].test(o)){l.unshift(s);break}if(l[0]in r)a=l[0];else{for(s in r){if(!l[0]||e.converters[s+" "+l[0]]){a=s;break}i||(i=s)}a=a||i}return a?(a!==l[0]&&l.unshift(a),r[a]):t}function Fn(e,t){var n,r,i,o,a={},s=0,u=e.dataTypes.slice(),l=u[0];if(e.dataFilter&&(t=e.dataFilter(t,e.dataType)),u[1])for(i in e.converters)a[i.toLowerCase()]=e.converters[i];for(;r=u[++s];)if("*"!==r){if("*"!==l&&l!==r){if(i=a[l+" "+r]||a["* "+r],!i)for(n in a)if(o=n.split(" "),o[1]===r&&(i=a[l+" "+o[0]]||a["* "+o[0]])){i===!0?i=a[n]:a[n]!==!0&&(r=o[0],u.splice(s--,0,r));break}if(i!==!0)if(i&&e["throws"])t=i(t);else try{t=i(t)}catch(c){return{state:"parsererror",error:i?c:"No conversion from "+l+" to "+r}}}l=r}return{state:"success",data:t}}b.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/(?:java|ecma)script/},converters:{"text script":function(e){return b.globalEval(e),e}}}),b.ajaxPrefilter("script",function(e){e.cache===t&&(e.cache=!1),e.crossDomain&&(e.type="GET",e.global=!1)}),b.ajaxTransport("script",function(e){if(e.crossDomain){var n,r=o.head||b("head")[0]||o.documentElement;return{send:function(t,i){n=o.createElement("script"),n.async=!0,e.scriptCharset&&(n.charset=e.scriptCharset),n.src=e.url,n.onload=n.onreadystatechange=function(e,t){(t||!n.readyState||/loaded|complete/.test(n.readyState))&&(n.onload=n.onreadystatechange=null,n.parentNode&&n.parentNode.removeChild(n),n=null,t||i(200,"success"))},r.insertBefore(n,r.firstChild)},abort:function(){n&&n.onload(t,!0)}}}});var On=[],Bn=/(=)\?(?=&|$)|\?\?/;b.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=On.pop()||b.expando+"_"+vn++;return this[e]=!0,e}}),b.ajaxPrefilter("json jsonp",function(n,r,i){var o,a,s,u=n.jsonp!==!1&&(Bn.test(n.url)?"url":"string"==typeof n.data&&!(n.contentType||"").indexOf("application/x-www-form-urlencoded")&&Bn.test(n.data)&&"data");return u||"jsonp"===n.dataTypes[0]?(o=n.jsonpCallback=b.isFunction(n.jsonpCallback)?n.jsonpCallback():n.jsonpCallback,u?n[u]=n[u].replace(Bn,"$1"+o):n.jsonp!==!1&&(n.url+=(bn.test(n.url)?"&":"?")+n.jsonp+"="+o),n.converters["script json"]=function(){return s||b.error(o+" was not called"),s[0]},n.dataTypes[0]="json",a=e[o],e[o]=function(){s=arguments},i.always(function(){e[o]=a,n[o]&&(n.jsonpCallback=r.jsonpCallback,On.push(o)),s&&b.isFunction(a)&&a(s[0]),s=a=t}),"script"):t});var Pn,Rn,Wn=0,$n=e.ActiveXObject&&function(){var e;for(e in Pn)Pn[e](t,!0)};function In(){try{return new e.XMLHttpRequest}catch(t){}}function zn(){try{return new e.ActiveXObject("Microsoft.XMLHTTP")}catch(t){}}b.ajaxSettings.xhr=e.ActiveXObject?function(){return!this.isLocal&&In()||zn()}:In,Rn=b.ajaxSettings.xhr(),b.support.cors=!!Rn&&"withCredentials"in Rn,Rn=b.support.ajax=!!Rn,Rn&&b.ajaxTransport(function(n){if(!n.crossDomain||b.support.cors){var r;return{send:function(i,o){var a,s,u=n.xhr();if(n.username?u.open(n.type,n.url,n.async,n.username,n.password):u.open(n.type,n.url,n.async),n.xhrFields)for(s in n.xhrFields)u[s]=n.xhrFields[s];n.mimeType&&u.overrideMimeType&&u.overrideMimeType(n.mimeType),n.crossDomain||i["X-Requested-With"]||(i["X-Requested-With"]="XMLHttpRequest");try{for(s in i)u.setRequestHeader(s,i[s])}catch(l){}u.send(n.hasContent&&n.data||null),r=function(e,i){var s,l,c,p;try{if(r&&(i||4===u.readyState))if(r=t,a&&(u.onreadystatechange=b.noop,$n&&delete Pn[a]),i)4!==u.readyState&&u.abort();else{p={},s=u.status,l=u.getAllResponseHeaders(),"string"==typeof u.responseText&&(p.text=u.responseText);try{c=u.statusText}catch(f){c=""}s||!n.isLocal||n.crossDomain?1223===s&&(s=204):s=p.text?200:404}}catch(d){i||o(-1,d)}p&&o(s,c,p,l)},n.async?4===u.readyState?setTimeout(r):(a=++Wn,$n&&(Pn||(Pn={},b(e).unload($n)),Pn[a]=r),u.onreadystatechange=r):r()},abort:function(){r&&r(t,!0)}}}});var Xn,Un,Vn=/^(?:toggle|show|hide)$/,Yn=RegExp("^(?:([+-])=|)("+x+")([a-z%]*)$","i"),Jn=/queueHooks$/,Gn=[nr],Qn={"*":[function(e,t){var n,r,i=this.createTween(e,t),o=Yn.exec(t),a=i.cur(),s=+a||0,u=1,l=20;if(o){if(n=+o[2],r=o[3]||(b.cssNumber[e]?"":"px"),"px"!==r&&s){s=b.css(i.elem,e,!0)||n||1;do u=u||".5",s/=u,b.style(i.elem,e,s+r);while(u!==(u=i.cur()/a)&&1!==u&&--l)}i.unit=r,i.start=s,i.end=o[1]?s+(o[1]+1)*n:n}return i}]};function Kn(){return setTimeout(function(){Xn=t}),Xn=b.now()}function Zn(e,t){b.each(t,function(t,n){var r=(Qn[t]||[]).concat(Qn["*"]),i=0,o=r.length;for(;o>i;i++)if(r[i].call(e,t,n))return})}function er(e,t,n){var r,i,o=0,a=Gn.length,s=b.Deferred().always(function(){delete u.elem}),u=function(){if(i)return!1;var t=Xn||Kn(),n=Math.max(0,l.startTime+l.duration-t),r=n/l.duration||0,o=1-r,a=0,u=l.tweens.length;for(;u>a;a++)l.tweens[a].run(o);return s.notifyWith(e,[l,o,n]),1>o&&u?n:(s.resolveWith(e,[l]),!1)},l=s.promise({elem:e,props:b.extend({},t),opts:b.extend(!0,{specialEasing:{}},n),originalProperties:t,originalOptions:n,startTime:Xn||Kn(),duration:n.duration,tweens:[],createTween:function(t,n){var r=b.Tween(e,l.opts,t,n,l.opts.specialEasing[t]||l.opts.easing);return l.tweens.push(r),r},stop:function(t){var n=0,r=t?l.tweens.length:0;if(i)return this;for(i=!0;r>n;n++)l.tweens[n].run(1);return t?s.resolveWith(e,[l,t]):s.rejectWith(e,[l,t]),this}}),c=l.props;for(tr(c,l.opts.specialEasing);a>o;o++)if(r=Gn[o].call(l,e,c,l.opts))return r;return Zn(l,c),b.isFunction(l.opts.start)&&l.opts.start.call(e,l),b.fx.timer(b.extend(u,{elem:e,anim:l,queue:l.opts.queue})),l.progress(l.opts.progress).done(l.opts.done,l.opts.complete).fail(l.opts.fail).always(l.opts.always)}function tr(e,t){var n,r,i,o,a;for(i in e)if(r=b.camelCase(i),o=t[r],n=e[i],b.isArray(n)&&(o=n[1],n=e[i]=n[0]),i!==r&&(e[r]=n,delete e[i]),a=b.cssHooks[r],a&&"expand"in a){n=a.expand(n),delete e[r];for(i in n)i in e||(e[i]=n[i],t[i]=o)}else t[r]=o}b.Animation=b.extend(er,{tweener:function(e,t){b.isFunction(e)?(t=e,e=["*"]):e=e.split(" ");var n,r=0,i=e.length;for(;i>r;r++)n=e[r],Qn[n]=Qn[n]||[],Qn[n].unshift(t)},prefilter:function(e,t){t?Gn.unshift(e):Gn.push(e)}});function nr(e,t,n){var r,i,o,a,s,u,l,c,p,f=this,d=e.style,h={},g=[],m=e.nodeType&&nn(e);n.queue||(c=b._queueHooks(e,"fx"),null==c.unqueued&&(c.unqueued=0,p=c.empty.fire,c.empty.fire=function(){c.unqueued||p()}),c.unqueued++,f.always(function(){f.always(function(){c.unqueued--,b.queue(e,"fx").length||c.empty.fire()})})),1===e.nodeType&&("height"in t||"width"in t)&&(n.overflow=[d.overflow,d.overflowX,d.overflowY],"inline"===b.css(e,"display")&&"none"===b.css(e,"float")&&(b.support.inlineBlockNeedsLayout&&"inline"!==un(e.nodeName)?d.zoom=1:d.display="inline-block")),n.overflow&&(d.overflow="hidden",b.support.shrinkWrapBlocks||f.always(function(){d.overflow=n.overflow[0],d.overflowX=n.overflow[1],d.overflowY=n.overflow[2]}));for(i in t)if(a=t[i],Vn.exec(a)){if(delete t[i],u=u||"toggle"===a,a===(m?"hide":"show"))continue;g.push(i)}if(o=g.length){s=b._data(e,"fxshow")||b._data(e,"fxshow",{}),"hidden"in s&&(m=s.hidden),u&&(s.hidden=!m),m?b(e).show():f.done(function(){b(e).hide()}),f.done(function(){var t;b._removeData(e,"fxshow");for(t in h)b.style(e,t,h[t])});for(i=0;o>i;i++)r=g[i],l=f.createTween(r,m?s[r]:0),h[r]=s[r]||b.style(e,r),r in s||(s[r]=l.start,m&&(l.end=l.start,l.start="width"===r||"height"===r?1:0))}}function rr(e,t,n,r,i){return new rr.prototype.init(e,t,n,r,i)}b.Tween=rr,rr.prototype={constructor:rr,init:function(e,t,n,r,i,o){this.elem=e,this.prop=n,this.easing=i||"swing",this.options=t,this.start=this.now=this.cur(),this.end=r,this.unit=o||(b.cssNumber[n]?"":"px")},cur:function(){var e=rr.propHooks[this.prop];return e&&e.get?e.get(this):rr.propHooks._default.get(this)},run:function(e){var t,n=rr.propHooks[this.prop];return this.pos=t=this.options.duration?b.easing[this.easing](e,this.options.duration*e,0,1,this.options.duration):e,this.now=(this.end-this.start)*t+this.start,this.options.step&&this.options.step.call(this.elem,this.now,this),n&&n.set?n.set(this):rr.propHooks._default.set(this),this}},rr.prototype.init.prototype=rr.prototype,rr.propHooks={_default:{get:function(e){var t;return null==e.elem[e.prop]||e.elem.style&&null!=e.elem.style[e.prop]?(t=b.css(e.elem,e.prop,""),t&&"auto"!==t?t:0):e.elem[e.prop]},set:function(e){b.fx.step[e.prop]?b.fx.step[e.prop](e):e.elem.style&&(null!=e.elem.style[b.cssProps[e.prop]]||b.cssHooks[e.prop])?b.style(e.elem,e.prop,e.now+e.unit):e.elem[e.prop]=e.now}}},rr.propHooks.scrollTop=rr.propHooks.scrollLeft={set:function(e){e.elem.nodeType&&e.elem.parentNode&&(e.elem[e.prop]=e.now)}},b.each(["toggle","show","hide"],function(e,t){var n=b.fn[t];b.fn[t]=function(e,r,i){return null==e||"boolean"==typeof e?n.apply(this,arguments):this.animate(ir(t,!0),e,r,i)}}),b.fn.extend({fadeTo:function(e,t,n,r){return this.filter(nn).css("opacity",0).show().end().animate({opacity:t},e,n,r)},animate:function(e,t,n,r){var i=b.isEmptyObject(e),o=b.speed(t,n,r),a=function(){var t=er(this,b.extend({},e),o);a.finish=function(){t.stop(!0)},(i||b._data(this,"finish"))&&t.stop(!0)};return a.finish=a,i||o.queue===!1?this.each(a):this.queue(o.queue,a)},stop:function(e,n,r){var i=function(e){var t=e.stop;delete e.stop,t(r)};return"string"!=typeof e&&(r=n,n=e,e=t),n&&e!==!1&&this.queue(e||"fx",[]),this.each(function(){var t=!0,n=null!=e&&e+"queueHooks",o=b.timers,a=b._data(this);if(n)a[n]&&a[n].stop&&i(a[n]);else for(n in a)a[n]&&a[n].stop&&Jn.test(n)&&i(a[n]);for(n=o.length;n--;)o[n].elem!==this||null!=e&&o[n].queue!==e||(o[n].anim.stop(r),t=!1,o.splice(n,1));(t||!r)&&b.dequeue(this,e)})},finish:function(e){return e!==!1&&(e=e||"fx"),this.each(function(){var t,n=b._data(this),r=n[e+"queue"],i=n[e+"queueHooks"],o=b.timers,a=r?r.length:0;for(n.finish=!0,b.queue(this,e,[]),i&&i.cur&&i.cur.finish&&i.cur.finish.call(this),t=o.length;t--;)o[t].elem===this&&o[t].queue===e&&(o[t].anim.stop(!0),o.splice(t,1));for(t=0;a>t;t++)r[t]&&r[t].finish&&r[t].finish.call(this);delete n.finish})}});function ir(e,t){var n,r={height:e},i=0;for(t=t?1:0;4>i;i+=2-t)n=Zt[i],r["margin"+n]=r["padding"+n]=e;return t&&(r.opacity=r.width=e),r}b.each({slideDown:ir("show"),slideUp:ir("hide"),slideToggle:ir("toggle"),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},function(e,t){b.fn[e]=function(e,n,r){return this.animate(t,e,n,r)}}),b.speed=function(e,t,n){var r=e&&"object"==typeof e?b.extend({},e):{complete:n||!n&&t||b.isFunction(e)&&e,duration:e,easing:n&&t||t&&!b.isFunction(t)&&t};return r.duration=b.fx.off?0:"number"==typeof r.duration?r.duration:r.duration in b.fx.speeds?b.fx.speeds[r.duration]:b.fx.speeds._default,(null==r.queue||r.queue===!0)&&(r.queue="fx"),r.old=r.complete,r.complete=function(){b.isFunction(r.old)&&r.old.call(this),r.queue&&b.dequeue(this,r.queue)},r},b.easing={linear:function(e){return e},swing:function(e){return.5-Math.cos(e*Math.PI)/2}},b.timers=[],b.fx=rr.prototype.init,b.fx.tick=function(){var e,n=b.timers,r=0;for(Xn=b.now();n.length>r;r++)e=n[r],e()||n[r]!==e||n.splice(r--,1);n.length||b.fx.stop(),Xn=t},b.fx.timer=function(e){e()&&b.timers.push(e)&&b.fx.start()},b.fx.interval=13,b.fx.start=function(){Un||(Un=setInterval(b.fx.tick,b.fx.interval))},b.fx.stop=function(){clearInterval(Un),Un=null},b.fx.speeds={slow:600,fast:200,_default:400},b.fx.step={},b.expr&&b.expr.filters&&(b.expr.filters.animated=function(e){return b.grep(b.timers,function(t){return e===t.elem}).length}),b.fn.offset=function(e){if(arguments.length)return e===t?this:this.each(function(t){b.offset.setOffset(this,e,t)});var n,r,o={top:0,left:0},a=this[0],s=a&&a.ownerDocument;if(s)return n=s.documentElement,b.contains(n,a)?(typeof a.getBoundingClientRect!==i&&(o=a.getBoundingClientRect()),r=or(s),{top:o.top+(r.pageYOffset||n.scrollTop)-(n.clientTop||0),left:o.left+(r.pageXOffset||n.scrollLeft)-(n.clientLeft||0)}):o},b.offset={setOffset:function(e,t,n){var r=b.css(e,"position");"static"===r&&(e.style.position="relative");var i=b(e),o=i.offset(),a=b.css(e,"top"),s=b.css(e,"left"),u=("absolute"===r||"fixed"===r)&&b.inArray("auto",[a,s])>-1,l={},c={},p,f;u?(c=i.position(),p=c.top,f=c.left):(p=parseFloat(a)||0,f=parseFloat(s)||0),b.isFunction(t)&&(t=t.call(e,n,o)),null!=t.top&&(l.top=t.top-o.top+p),null!=t.left&&(l.left=t.left-o.left+f),"using"in t?t.using.call(e,l):i.css(l)}},b.fn.extend({position:function(){if(this[0]){var e,t,n={top:0,left:0},r=this[0];return"fixed"===b.css(r,"position")?t=r.getBoundingClientRect():(e=this.offsetParent(),t=this.offset(),b.nodeName(e[0],"html")||(n=e.offset()),n.top+=b.css(e[0],"borderTopWidth",!0),n.left+=b.css(e[0],"borderLeftWidth",!0)),{top:t.top-n.top-b.css(r,"marginTop",!0),left:t.left-n.left-b.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent||o.documentElement;while(e&&!b.nodeName(e,"html")&&"static"===b.css(e,"position"))e=e.offsetParent;return e||o.documentElement})}}),b.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(e,n){var r=/Y/.test(n);b.fn[e]=function(i){return b.access(this,function(e,i,o){var a=or(e);return o===t?a?n in a?a[n]:a.document.documentElement[i]:e[i]:(a?a.scrollTo(r?b(a).scrollLeft():o,r?o:b(a).scrollTop()):e[i]=o,t)},e,i,arguments.length,null)}});function or(e){return b.isWindow(e)?e:9===e.nodeType?e.defaultView||e.parentWindow:!1}b.each({Height:"height",Width:"width"},function(e,n){b.each({padding:"inner"+e,content:n,"":"outer"+e},function(r,i){b.fn[i]=function(i,o){var a=arguments.length&&(r||"boolean"!=typeof i),s=r||(i===!0||o===!0?"margin":"border");return b.access(this,function(n,r,i){var o;return b.isWindow(n)?n.document.documentElement["client"+e]:9===n.nodeType?(o=n.documentElement,Math.max(n.body["scroll"+e],o["scroll"+e],n.body["offset"+e],o["offset"+e],o["client"+e])):i===t?b.css(n,r,s):b.style(n,r,i,s)},n,a?i:t,a,null)}})}),e.jQuery=e.$=b,"function"==typeof define&&define.amd&&define.amd.jQuery&&define("jquery",[],function(){return b})})(window);
\ No newline at end of file
+}b.merge(d,s.childNodes),s.textContent="";while(s.firstChild)s.removeChild(s.firstChild);s=f.lastChild}else d.push(t.createTextNode(o));s&&f.removeChild(s),b.support.appendChecked||b.grep(Ot(d,"input"),Bt),h=0;while(o=d[h++])if((!r||-1===b.inArray(o,r))&&(a=b.contains(o.ownerDocument,o),s=Ot(f.appendChild(o),"script"),a&&Mt(s),n)){i=0;while(o=s[i++])kt.test(o.type||"")&&n.push(o)}return s=null,f},cleanData:function(e,t){var n,r,o,a,s=0,u=b.expando,l=b.cache,p=b.support.deleteExpando,f=b.event.special;for(;null!=(n=e[s]);s++)if((t||b.acceptData(n))&&(o=n[u],a=o&&l[o])){if(a.events)for(r in a.events)f[r]?b.event.remove(n,r):b.removeEvent(n,r,a.handle);l[o]&&(delete l[o],p?delete n[u]:typeof n.removeAttribute!==i?n.removeAttribute(u):n[u]=null,c.push(o))}}});var Pt,Rt,Wt,$t=/alpha\([^)]*\)/i,It=/opacity\s*=\s*([^)]*)/,zt=/^(top|right|bottom|left)$/,Xt=/^(none|table(?!-c[ea]).+)/,Ut=/^margin/,Vt=RegExp("^("+x+")(.*)$","i"),Yt=RegExp("^("+x+")(?!px)[a-z%]+$","i"),Jt=RegExp("^([+-])=("+x+")","i"),Gt={BODY:"block"},Qt={position:"absolute",visibility:"hidden",display:"block"},Kt={letterSpacing:0,fontWeight:400},Zt=["Top","Right","Bottom","Left"],en=["Webkit","O","Moz","ms"];function tn(e,t){if(t in e)return t;var n=t.charAt(0).toUpperCase()+t.slice(1),r=t,i=en.length;while(i--)if(t=en[i]+n,t in e)return t;return r}function nn(e,t){return e=t||e,"none"===b.css(e,"display")||!b.contains(e.ownerDocument,e)}function rn(e,t){var n,r,i,o=[],a=0,s=e.length;for(;s>a;a++)r=e[a],r.style&&(o[a]=b._data(r,"olddisplay"),n=r.style.display,t?(o[a]||"none"!==n||(r.style.display=""),""===r.style.display&&nn(r)&&(o[a]=b._data(r,"olddisplay",un(r.nodeName)))):o[a]||(i=nn(r),(n&&"none"!==n||!i)&&b._data(r,"olddisplay",i?n:b.css(r,"display"))));for(a=0;s>a;a++)r=e[a],r.style&&(t&&"none"!==r.style.display&&""!==r.style.display||(r.style.display=t?o[a]||"":"none"));return e}b.fn.extend({css:function(e,n){return b.access(this,function(e,n,r){var i,o,a={},s=0;if(b.isArray(n)){for(o=Rt(e),i=n.length;i>s;s++)a[n[s]]=b.css(e,n[s],!1,o);return a}return r!==t?b.style(e,n,r):b.css(e,n)},e,n,arguments.length>1)},show:function(){return rn(this,!0)},hide:function(){return rn(this)},toggle:function(e){var t="boolean"==typeof e;return this.each(function(){(t?e:nn(this))?b(this).show():b(this).hide()})}}),b.extend({cssHooks:{opacity:{get:function(e,t){if(t){var n=Wt(e,"opacity");return""===n?"1":n}}}},cssNumber:{columnCount:!0,fillOpacity:!0,fontWeight:!0,lineHeight:!0,opacity:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{"float":b.support.cssFloat?"cssFloat":"styleFloat"},style:function(e,n,r,i){if(e&&3!==e.nodeType&&8!==e.nodeType&&e.style){var o,a,s,u=b.camelCase(n),l=e.style;if(n=b.cssProps[u]||(b.cssProps[u]=tn(l,u)),s=b.cssHooks[n]||b.cssHooks[u],r===t)return s&&"get"in s&&(o=s.get(e,!1,i))!==t?o:l[n];if(a=typeof r,"string"===a&&(o=Jt.exec(r))&&(r=(o[1]+1)*o[2]+parseFloat(b.css(e,n)),a="number"),!(null==r||"number"===a&&isNaN(r)||("number"!==a||b.cssNumber[u]||(r+="px"),b.support.clearCloneStyle||""!==r||0!==n.indexOf("background")||(l[n]="inherit"),s&&"set"in s&&(r=s.set(e,r,i))===t)))try{l[n]=r}catch(c){}}},css:function(e,n,r,i){var o,a,s,u=b.camelCase(n);return n=b.cssProps[u]||(b.cssProps[u]=tn(e.style,u)),s=b.cssHooks[n]||b.cssHooks[u],s&&"get"in s&&(a=s.get(e,!0,r)),a===t&&(a=Wt(e,n,i)),"normal"===a&&n in Kt&&(a=Kt[n]),""===r||r?(o=parseFloat(a),r===!0||b.isNumeric(o)?o||0:a):a},swap:function(e,t,n,r){var i,o,a={};for(o in t)a[o]=e.style[o],e.style[o]=t[o];i=n.apply(e,r||[]);for(o in t)e.style[o]=a[o];return i}}),e.getComputedStyle?(Rt=function(t){return e.getComputedStyle(t,null)},Wt=function(e,n,r){var i,o,a,s=r||Rt(e),u=s?s.getPropertyValue(n)||s[n]:t,l=e.style;return s&&(""!==u||b.contains(e.ownerDocument,e)||(u=b.style(e,n)),Yt.test(u)&&Ut.test(n)&&(i=l.width,o=l.minWidth,a=l.maxWidth,l.minWidth=l.maxWidth=l.width=u,u=s.width,l.width=i,l.minWidth=o,l.maxWidth=a)),u}):o.documentElement.currentStyle&&(Rt=function(e){return e.currentStyle},Wt=function(e,n,r){var i,o,a,s=r||Rt(e),u=s?s[n]:t,l=e.style;return null==u&&l&&l[n]&&(u=l[n]),Yt.test(u)&&!zt.test(n)&&(i=l.left,o=e.runtimeStyle,a=o&&o.left,a&&(o.left=e.currentStyle.left),l.left="fontSize"===n?"1em":u,u=l.pixelLeft+"px",l.left=i,a&&(o.left=a)),""===u?"auto":u});function on(e,t,n){var r=Vt.exec(t);return r?Math.max(0,r[1]-(n||0))+(r[2]||"px"):t}function an(e,t,n,r,i){var o=n===(r?"border":"content")?4:"width"===t?1:0,a=0;for(;4>o;o+=2)"margin"===n&&(a+=b.css(e,n+Zt[o],!0,i)),r?("content"===n&&(a-=b.css(e,"padding"+Zt[o],!0,i)),"margin"!==n&&(a-=b.css(e,"border"+Zt[o]+"Width",!0,i))):(a+=b.css(e,"padding"+Zt[o],!0,i),"padding"!==n&&(a+=b.css(e,"border"+Zt[o]+"Width",!0,i)));return a}function sn(e,t,n){var r=!0,i="width"===t?e.offsetWidth:e.offsetHeight,o=Rt(e),a=b.support.boxSizing&&"border-box"===b.css(e,"boxSizing",!1,o);if(0>=i||null==i){if(i=Wt(e,t,o),(0>i||null==i)&&(i=e.style[t]),Yt.test(i))return i;r=a&&(b.support.boxSizingReliable||i===e.style[t]),i=parseFloat(i)||0}return i+an(e,t,n||(a?"border":"content"),r,o)+"px"}function un(e){var t=o,n=Gt[e];return n||(n=ln(e,t),"none"!==n&&n||(Pt=(Pt||b("<iframe frameborder='0' width='0' height='0'/>").css("cssText","display:block !important")).appendTo(t.documentElement),t=(Pt[0].contentWindow||Pt[0].contentDocument).document,t.write("<!doctype html><html><body>"),t.close(),n=ln(e,t),Pt.detach()),Gt[e]=n),n}function ln(e,t){var n=b(t.createElement(e)).appendTo(t.body),r=b.css(n[0],"display");return n.remove(),r}b.each(["height","width"],function(e,n){b.cssHooks[n]={get:function(e,r,i){return r?0===e.offsetWidth&&Xt.test(b.css(e,"display"))?b.swap(e,Qt,function(){return sn(e,n,i)}):sn(e,n,i):t},set:function(e,t,r){var i=r&&Rt(e);return on(e,t,r?an(e,n,r,b.support.boxSizing&&"border-box"===b.css(e,"boxSizing",!1,i),i):0)}}}),b.support.opacity||(b.cssHooks.opacity={get:function(e,t){return It.test((t&&e.currentStyle?e.currentStyle.filter:e.style.filter)||"")?.01*parseFloat(RegExp.$1)+"":t?"1":""},set:function(e,t){var n=e.style,r=e.currentStyle,i=b.isNumeric(t)?"alpha(opacity="+100*t+")":"",o=r&&r.filter||n.filter||"";n.zoom=1,(t>=1||""===t)&&""===b.trim(o.replace($t,""))&&n.removeAttribute&&(n.removeAttribute("filter"),""===t||r&&!r.filter)||(n.filter=$t.test(o)?o.replace($t,i):o+" "+i)}}),b(function(){b.support.reliableMarginRight||(b.cssHooks.marginRight={get:function(e,n){return n?b.swap(e,{display:"inline-block"},Wt,[e,"marginRight"]):t}}),!b.support.pixelPosition&&b.fn.position&&b.each(["top","left"],function(e,n){b.cssHooks[n]={get:function(e,r){return r?(r=Wt(e,n),Yt.test(r)?b(e).position()[n]+"px":r):t}}})}),b.expr&&b.expr.filters&&(b.expr.filters.hidden=function(e){return 0>=e.offsetWidth&&0>=e.offsetHeight||!b.support.reliableHiddenOffsets&&"none"===(e.style&&e.style.display||b.css(e,"display"))},b.expr.filters.visible=function(e){return!b.expr.filters.hidden(e)}),b.each({margin:"",padding:"",border:"Width"},function(e,t){b.cssHooks[e+t]={expand:function(n){var r=0,i={},o="string"==typeof n?n.split(" "):[n];for(;4>r;r++)i[e+Zt[r]+t]=o[r]||o[r-2]||o[0];return i}},Ut.test(e)||(b.cssHooks[e+t].set=on)});var cn=/%20/g,pn=/\[\]$/,fn=/\r?\n/g,dn=/^(?:submit|button|image|reset|file)$/i,hn=/^(?:input|select|textarea|keygen)/i;b.fn.extend({serialize:function(){return b.param(this.serializeArray())},serializeArray:function(){return this.map(function(){var e=b.prop(this,"elements");return e?b.makeArray(e):this}).filter(function(){var e=this.type;return this.name&&!b(this).is(":disabled")&&hn.test(this.nodeName)&&!dn.test(e)&&(this.checked||!Nt.test(e))}).map(function(e,t){var n=b(this).val();return null==n?null:b.isArray(n)?b.map(n,function(e){return{name:t.name,value:e.replace(fn,"\r\n")}}):{name:t.name,value:n.replace(fn,"\r\n")}}).get()}}),b.param=function(e,n){var r,i=[],o=function(e,t){t=b.isFunction(t)?t():null==t?"":t,i[i.length]=encodeURIComponent(e)+"="+encodeURIComponent(t)};if(n===t&&(n=b.ajaxSettings&&b.ajaxSettings.traditional),b.isArray(e)||e.jquery&&!b.isPlainObject(e))b.each(e,function(){o(this.name,this.value)});else for(r in e)gn(r,e[r],n,o);return i.join("&").replace(cn,"+")};function gn(e,t,n,r){var i;if(b.isArray(t))b.each(t,function(t,i){n||pn.test(e)?r(e,i):gn(e+"["+("object"==typeof i?t:"")+"]",i,n,r)});else if(n||"object"!==b.type(t))r(e,t);else for(i in t)gn(e+"["+i+"]",t[i],n,r)}b.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error contextmenu".split(" "),function(e,t){b.fn[t]=function(e,n){return arguments.length>0?this.on(t,null,e,n):this.trigger(t)}}),b.fn.hover=function(e,t){return this.mouseenter(e).mouseleave(t||e)};var mn,yn,vn=b.now(),bn=/\?/,xn=/#.*$/,wn=/([?&])_=[^&]*/,Tn=/^(.*?):[ \t]*([^\r\n]*)\r?$/gm,Nn=/^(?:about|app|app-storage|.+-extension|file|res|widget):$/,Cn=/^(?:GET|HEAD)$/,kn=/^\/\//,En=/^([\w.+-]+:)(?:\/\/([^\/?#:]*)(?::(\d+)|)|)/,Sn=b.fn.load,An={},jn={},Dn="*/".concat("*");try{yn=a.href}catch(Ln){yn=o.createElement("a"),yn.href="",yn=yn.href}mn=En.exec(yn.toLowerCase())||[];function Hn(e){return function(t,n){"string"!=typeof t&&(n=t,t="*");var r,i=0,o=t.toLowerCase().match(w)||[];if(b.isFunction(n))while(r=o[i++])"+"===r[0]?(r=r.slice(1)||"*",(e[r]=e[r]||[]).unshift(n)):(e[r]=e[r]||[]).push(n)}}function qn(e,n,r,i){var o={},a=e===jn;function s(u){var l;return o[u]=!0,b.each(e[u]||[],function(e,u){var c=u(n,r,i);return"string"!=typeof c||a||o[c]?a?!(l=c):t:(n.dataTypes.unshift(c),s(c),!1)}),l}return s(n.dataTypes[0])||!o["*"]&&s("*")}function Mn(e,n){var r,i,o=b.ajaxSettings.flatOptions||{};for(i in n)n[i]!==t&&((o[i]?e:r||(r={}))[i]=n[i]);return r&&b.extend(!0,e,r),e}b.fn.load=function(e,n,r){if("string"!=typeof e&&Sn)return Sn.apply(this,arguments);var i,o,a,s=this,u=e.indexOf(" ");return u>=0&&(i=e.slice(u,e.length),e=e.slice(0,u)),b.isFunction(n)?(r=n,n=t):n&&"object"==typeof n&&(a="POST"),s.length>0&&b.ajax({url:e,type:a,dataType:"html",data:n}).done(function(e){o=arguments,s.html(i?b("<div>").append(b.parseHTML(e)).find(i):e)}).complete(r&&function(e,t){s.each(r,o||[e.responseText,t,e])}),this},b.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){b.fn[t]=function(e){return this.on(t,e)}}),b.each(["get","post"],function(e,n){b[n]=function(e,r,i,o){return b.isFunction(r)&&(o=o||i,i=r,r=t),b.ajax({url:e,type:n,dataType:o,data:r,success:i})}}),b.extend({active:0,lastModified:{},etag:{},ajaxSettings:{url:yn,type:"GET",isLocal:Nn.test(mn[1]),global:!0,processData:!0,async:!0,contentType:"application/x-www-form-urlencoded; charset=UTF-8",accepts:{"*":Dn,text:"text/plain",html:"text/html",xml:"application/xml, text/xml",json:"application/json, text/javascript"},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText"},converters:{"* text":e.String,"text html":!0,"text json":b.parseJSON,"text xml":b.parseXML},flatOptions:{url:!0,context:!0}},ajaxSetup:function(e,t){return t?Mn(Mn(e,b.ajaxSettings),t):Mn(b.ajaxSettings,e)},ajaxPrefilter:Hn(An),ajaxTransport:Hn(jn),ajax:function(e,n){"object"==typeof e&&(n=e,e=t),n=n||{};var r,i,o,a,s,u,l,c,p=b.ajaxSetup({},n),f=p.context||p,d=p.context&&(f.nodeType||f.jquery)?b(f):b.event,h=b.Deferred(),g=b.Callbacks("once memory"),m=p.statusCode||{},y={},v={},x=0,T="canceled",N={readyState:0,getResponseHeader:function(e){var t;if(2===x){if(!c){c={};while(t=Tn.exec(a))c[t[1].toLowerCase()]=t[2]}t=c[e.toLowerCase()]}return null==t?null:t},getAllResponseHeaders:function(){return 2===x?a:null},setRequestHeader:function(e,t){var n=e.toLowerCase();return x||(e=v[n]=v[n]||e,y[e]=t),this},overrideMimeType:function(e){return x||(p.mimeType=e),this},statusCode:function(e){var t;if(e)if(2>x)for(t in e)m[t]=[m[t],e[t]];else N.always(e[N.status]);return this},abort:function(e){var t=e||T;return l&&l.abort(t),k(0,t),this}};if(h.promise(N).complete=g.add,N.success=N.done,N.error=N.fail,p.url=((e||p.url||yn)+"").replace(xn,"").replace(kn,mn[1]+"//"),p.type=n.method||n.type||p.method||p.type,p.dataTypes=b.trim(p.dataType||"*").toLowerCase().match(w)||[""],null==p.crossDomain&&(r=En.exec(p.url.toLowerCase()),p.crossDomain=!(!r||r[1]===mn[1]&&r[2]===mn[2]&&(r[3]||("http:"===r[1]?80:443))==(mn[3]||("http:"===mn[1]?80:443)))),p.data&&p.processData&&"string"!=typeof p.data&&(p.data=b.param(p.data,p.traditional)),qn(An,p,n,N),2===x)return N;u=p.global,u&&0===b.active++&&b.event.trigger("ajaxStart"),p.type=p.type.toUpperCase(),p.hasContent=!Cn.test(p.type),o=p.url,p.hasContent||(p.data&&(o=p.url+=(bn.test(o)?"&":"?")+p.data,delete p.data),p.cache===!1&&(p.url=wn.test(o)?o.replace(wn,"$1_="+vn++):o+(bn.test(o)?"&":"?")+"_="+vn++)),p.ifModified&&(b.lastModified[o]&&N.setRequestHeader("If-Modified-Since",b.lastModified[o]),b.etag[o]&&N.setRequestHeader("If-None-Match",b.etag[o])),(p.data&&p.hasContent&&p.contentType!==!1||n.contentType)&&N.setRequestHeader("Content-Type",p.contentType),N.setRequestHeader("Accept",p.dataTypes[0]&&p.accepts[p.dataTypes[0]]?p.accepts[p.dataTypes[0]]+("*"!==p.dataTypes[0]?", "+Dn+"; q=0.01":""):p.accepts["*"]);for(i in p.headers)N.setRequestHeader(i,p.headers[i]);if(p.beforeSend&&(p.beforeSend.call(f,N,p)===!1||2===x))return N.abort();T="abort";for(i in{success:1,error:1,complete:1})N[i](p[i]);if(l=qn(jn,p,n,N)){N.readyState=1,u&&d.trigger("ajaxSend",[N,p]),p.async&&p.timeout>0&&(s=setTimeout(function(){N.abort("timeout")},p.timeout));try{x=1,l.send(y,k)}catch(C){if(!(2>x))throw C;k(-1,C)}}else k(-1,"No Transport");function k(e,n,r,i){var c,y,v,w,T,C=n;2!==x&&(x=2,s&&clearTimeout(s),l=t,a=i||"",N.readyState=e>0?4:0,r&&(w=_n(p,N,r)),e>=200&&300>e||304===e?(p.ifModified&&(T=N.getResponseHeader("Last-Modified"),T&&(b.lastModified[o]=T),T=N.getResponseHeader("etag"),T&&(b.etag[o]=T)),204===e?(c=!0,C="nocontent"):304===e?(c=!0,C="notmodified"):(c=Fn(p,w),C=c.state,y=c.data,v=c.error,c=!v)):(v=C,(e||!C)&&(C="error",0>e&&(e=0))),N.status=e,N.statusText=(n||C)+"",c?h.resolveWith(f,[y,C,N]):h.rejectWith(f,[N,C,v]),N.statusCode(m),m=t,u&&d.trigger(c?"ajaxSuccess":"ajaxError",[N,p,c?y:v]),g.fireWith(f,[N,C]),u&&(d.trigger("ajaxComplete",[N,p]),--b.active||b.event.trigger("ajaxStop")))}return N},getScript:function(e,n){return b.get(e,t,n,"script")},getJSON:function(e,t,n){return b.get(e,t,n,"json")}});function _n(e,n,r){var i,o,a,s,u=e.contents,l=e.dataTypes,c=e.responseFields;for(s in c)s in r&&(n[c[s]]=r[s]);while("*"===l[0])l.shift(),o===t&&(o=e.mimeType||n.getResponseHeader("Content-Type"));if(o)for(s in u)if(u[s]&&u[s].test(o)){l.unshift(s);break}if(l[0]in r)a=l[0];else{for(s in r){if(!l[0]||e.converters[s+" "+l[0]]){a=s;break}i||(i=s)}a=a||i}return a?(a!==l[0]&&l.unshift(a),r[a]):t}function Fn(e,t){var n,r,i,o,a={},s=0,u=e.dataTypes.slice(),l=u[0];if(e.dataFilter&&(t=e.dataFilter(t,e.dataType)),u[1])for(i in e.converters)a[i.toLowerCase()]=e.converters[i];for(;r=u[++s];)if("*"!==r){if("*"!==l&&l!==r){if(i=a[l+" "+r]||a["* "+r],!i)for(n in a)if(o=n.split(" "),o[1]===r&&(i=a[l+" "+o[0]]||a["* "+o[0]])){i===!0?i=a[n]:a[n]!==!0&&(r=o[0],u.splice(s--,0,r));break}if(i!==!0)if(i&&e["throws"])t=i(t);else try{t=i(t)}catch(c){return{state:"parsererror",error:i?c:"No conversion from "+l+" to "+r}}}l=r}return{state:"success",data:t}}b.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/(?:java|ecma)script/},converters:{"text script":function(e){return b.globalEval(e),e}}}),b.ajaxPrefilter("script",function(e){e.cache===t&&(e.cache=!1),e.crossDomain&&(e.type="GET",e.global=!1)}),b.ajaxTransport("script",function(e){if(e.crossDomain){var n,r=o.head||b("head")[0]||o.documentElement;return{send:function(t,i){n=o.createElement("script"),n.async=!0,e.scriptCharset&&(n.charset=e.scriptCharset),n.src=e.url,n.onload=n.onreadystatechange=function(e,t){(t||!n.readyState||/loaded|complete/.test(n.readyState))&&(n.onload=n.onreadystatechange=null,n.parentNode&&n.parentNode.removeChild(n),n=null,t||i(200,"success"))},r.insertBefore(n,r.firstChild)},abort:function(){n&&n.onload(t,!0)}}}});var On=[],Bn=/(=)\?(?=&|$)|\?\?/;b.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=On.pop()||b.expando+"_"+vn++;return this[e]=!0,e}}),b.ajaxPrefilter("json jsonp",function(n,r,i){var o,a,s,u=n.jsonp!==!1&&(Bn.test(n.url)?"url":"string"==typeof n.data&&!(n.contentType||"").indexOf("application/x-www-form-urlencoded")&&Bn.test(n.data)&&"data");return u||"jsonp"===n.dataTypes[0]?(o=n.jsonpCallback=b.isFunction(n.jsonpCallback)?n.jsonpCallback():n.jsonpCallback,u?n[u]=n[u].replace(Bn,"$1"+o):n.jsonp!==!1&&(n.url+=(bn.test(n.url)?"&":"?")+n.jsonp+"="+o),n.converters["script json"]=function(){return s||b.error(o+" was not called"),s[0]},n.dataTypes[0]="json",a=e[o],e[o]=function(){s=arguments},i.always(function(){e[o]=a,n[o]&&(n.jsonpCallback=r.jsonpCallback,On.push(o)),s&&b.isFunction(a)&&a(s[0]),s=a=t}),"script"):t});var Pn,Rn,Wn=0,$n=e.ActiveXObject&&function(){var e;for(e in Pn)Pn[e](t,!0)};function In(){try{return new e.XMLHttpRequest}catch(t){}}function zn(){try{return new e.ActiveXObject("Microsoft.XMLHTTP")}catch(t){}}b.ajaxSettings.xhr=e.ActiveXObject?function(){return!this.isLocal&&In()||zn()}:In,Rn=b.ajaxSettings.xhr(),b.support.cors=!!Rn&&"withCredentials"in Rn,Rn=b.support.ajax=!!Rn,Rn&&b.ajaxTransport(function(n){if(!n.crossDomain||b.support.cors){var r;return{send:function(i,o){var a,s,u=n.xhr();if(n.username?u.open(n.type,n.url,n.async,n.username,n.password):u.open(n.type,n.url,n.async),n.xhrFields)for(s in n.xhrFields)u[s]=n.xhrFields[s];n.mimeType&&u.overrideMimeType&&u.overrideMimeType(n.mimeType),n.crossDomain||i["X-Requested-With"]||(i["X-Requested-With"]="XMLHttpRequest");try{for(s in i)u.setRequestHeader(s,i[s])}catch(l){}u.send(n.hasContent&&n.data||null),r=function(e,i){var s,l,c,p;try{if(r&&(i||4===u.readyState))if(r=t,a&&(u.onreadystatechange=b.noop,$n&&delete Pn[a]),i)4!==u.readyState&&u.abort();else{p={},s=u.status,l=u.getAllResponseHeaders(),"string"==typeof u.responseText&&(p.text=u.responseText);try{c=u.statusText}catch(f){c=""}s||!n.isLocal||n.crossDomain?1223===s&&(s=204):s=p.text?200:404}}catch(d){i||o(-1,d)}p&&o(s,c,p,l)},n.async?4===u.readyState?setTimeout(r):(a=++Wn,$n&&(Pn||(Pn={},b(e).unload($n)),Pn[a]=r),u.onreadystatechange=r):r()},abort:function(){r&&r(t,!0)}}}});var Xn,Un,Vn=/^(?:toggle|show|hide)$/,Yn=RegExp("^(?:([+-])=|)("+x+")([a-z%]*)$","i"),Jn=/queueHooks$/,Gn=[nr],Qn={"*":[function(e,t){var n,r,i=this.createTween(e,t),o=Yn.exec(t),a=i.cur(),s=+a||0,u=1,l=20;if(o){if(n=+o[2],r=o[3]||(b.cssNumber[e]?"":"px"),"px"!==r&&s){s=b.css(i.elem,e,!0)||n||1;do u=u||".5",s/=u,b.style(i.elem,e,s+r);while(u!==(u=i.cur()/a)&&1!==u&&--l)}i.unit=r,i.start=s,i.end=o[1]?s+(o[1]+1)*n:n}return i}]};function Kn(){return setTimeout(function(){Xn=t}),Xn=b.now()}function Zn(e,t){b.each(t,function(t,n){var r=(Qn[t]||[]).concat(Qn["*"]),i=0,o=r.length;for(;o>i;i++)if(r[i].call(e,t,n))return})}function er(e,t,n){var r,i,o=0,a=Gn.length,s=b.Deferred().always(function(){delete u.elem}),u=function(){if(i)return!1;var t=Xn||Kn(),n=Math.max(0,l.startTime+l.duration-t),r=n/l.duration||0,o=1-r,a=0,u=l.tweens.length;for(;u>a;a++)l.tweens[a].run(o);return s.notifyWith(e,[l,o,n]),1>o&&u?n:(s.resolveWith(e,[l]),!1)},l=s.promise({elem:e,props:b.extend({},t),opts:b.extend(!0,{specialEasing:{}},n),originalProperties:t,originalOptions:n,startTime:Xn||Kn(),duration:n.duration,tweens:[],createTween:function(t,n){var r=b.Tween(e,l.opts,t,n,l.opts.specialEasing[t]||l.opts.easing);return l.tweens.push(r),r},stop:function(t){var n=0,r=t?l.tweens.length:0;if(i)return this;for(i=!0;r>n;n++)l.tweens[n].run(1);return t?s.resolveWith(e,[l,t]):s.rejectWith(e,[l,t]),this}}),c=l.props;for(tr(c,l.opts.specialEasing);a>o;o++)if(r=Gn[o].call(l,e,c,l.opts))return r;return Zn(l,c),b.isFunction(l.opts.start)&&l.opts.start.call(e,l),b.fx.timer(b.extend(u,{elem:e,anim:l,queue:l.opts.queue})),l.progress(l.opts.progress).done(l.opts.done,l.opts.complete).fail(l.opts.fail).always(l.opts.always)}function tr(e,t){var n,r,i,o,a;for(i in e)if(r=b.camelCase(i),o=t[r],n=e[i],b.isArray(n)&&(o=n[1],n=e[i]=n[0]),i!==r&&(e[r]=n,delete e[i]),a=b.cssHooks[r],a&&"expand"in a){n=a.expand(n),delete e[r];for(i in n)i in e||(e[i]=n[i],t[i]=o)}else t[r]=o}b.Animation=b.extend(er,{tweener:function(e,t){b.isFunction(e)?(t=e,e=["*"]):e=e.split(" ");var n,r=0,i=e.length;for(;i>r;r++)n=e[r],Qn[n]=Qn[n]||[],Qn[n].unshift(t)},prefilter:function(e,t){t?Gn.unshift(e):Gn.push(e)}});function nr(e,t,n){var r,i,o,a,s,u,l,c,p,f=this,d=e.style,h={},g=[],m=e.nodeType&&nn(e);n.queue||(c=b._queueHooks(e,"fx"),null==c.unqueued&&(c.unqueued=0,p=c.empty.fire,c.empty.fire=function(){c.unqueued||p()}),c.unqueued++,f.always(function(){f.always(function(){c.unqueued--,b.queue(e,"fx").length||c.empty.fire()})})),1===e.nodeType&&("height"in t||"width"in t)&&(n.overflow=[d.overflow,d.overflowX,d.overflowY],"inline"===b.css(e,"display")&&"none"===b.css(e,"float")&&(b.support.inlineBlockNeedsLayout&&"inline"!==un(e.nodeName)?d.zoom=1:d.display="inline-block")),n.overflow&&(d.overflow="hidden",b.support.shrinkWrapBlocks||f.always(function(){d.overflow=n.overflow[0],d.overflowX=n.overflow[1],d.overflowY=n.overflow[2]}));for(i in t)if(a=t[i],Vn.exec(a)){if(delete t[i],u=u||"toggle"===a,a===(m?"hide":"show"))continue;g.push(i)}if(o=g.length){s=b._data(e,"fxshow")||b._data(e,"fxshow",{}),"hidden"in s&&(m=s.hidden),u&&(s.hidden=!m),m?b(e).show():f.done(function(){b(e).hide()}),f.done(function(){var t;b._removeData(e,"fxshow");for(t in h)b.style(e,t,h[t])});for(i=0;o>i;i++)r=g[i],l=f.createTween(r,m?s[r]:0),h[r]=s[r]||b.style(e,r),r in s||(s[r]=l.start,m&&(l.end=l.start,l.start="width"===r||"height"===r?1:0))}}function rr(e,t,n,r,i){return new rr.prototype.init(e,t,n,r,i)}b.Tween=rr,rr.prototype={constructor:rr,init:function(e,t,n,r,i,o){this.elem=e,this.prop=n,this.easing=i||"swing",this.options=t,this.start=this.now=this.cur(),this.end=r,this.unit=o||(b.cssNumber[n]?"":"px")},cur:function(){var e=rr.propHooks[this.prop];return e&&e.get?e.get(this):rr.propHooks._default.get(this)},run:function(e){var t,n=rr.propHooks[this.prop];return this.pos=t=this.options.duration?b.easing[this.easing](e,this.options.duration*e,0,1,this.options.duration):e,this.now=(this.end-this.start)*t+this.start,this.options.step&&this.options.step.call(this.elem,this.now,this),n&&n.set?n.set(this):rr.propHooks._default.set(this),this}},rr.prototype.init.prototype=rr.prototype,rr.propHooks={_default:{get:function(e){var t;return null==e.elem[e.prop]||e.elem.style&&null!=e.elem.style[e.prop]?(t=b.css(e.elem,e.prop,""),t&&"auto"!==t?t:0):e.elem[e.prop]},set:function(e){b.fx.step[e.prop]?b.fx.step[e.prop](e):e.elem.style&&(null!=e.elem.style[b.cssProps[e.prop]]||b.cssHooks[e.prop])?b.style(e.elem,e.prop,e.now+e.unit):e.elem[e.prop]=e.now}}},rr.propHooks.scrollTop=rr.propHooks.scrollLeft={set:function(e){e.elem.nodeType&&e.elem.parentNode&&(e.elem[e.prop]=e.now)}},b.each(["toggle","show","hide"],function(e,t){var n=b.fn[t];b.fn[t]=function(e,r,i){return null==e||"boolean"==typeof e?n.apply(this,arguments):this.animate(ir(t,!0),e,r,i)}}),b.fn.extend({fadeTo:function(e,t,n,r){return this.filter(nn).css("opacity",0).show().end().animate({opacity:t},e,n,r)},animate:function(e,t,n,r){var i=b.isEmptyObject(e),o=b.speed(t,n,r),a=function(){var t=er(this,b.extend({},e),o);a.finish=function(){t.stop(!0)},(i||b._data(this,"finish"))&&t.stop(!0)};return a.finish=a,i||o.queue===!1?this.each(a):this.queue(o.queue,a)},stop:function(e,n,r){var i=function(e){var t=e.stop;delete e.stop,t(r)};return"string"!=typeof e&&(r=n,n=e,e=t),n&&e!==!1&&this.queue(e||"fx",[]),this.each(function(){var t=!0,n=null!=e&&e+"queueHooks",o=b.timers,a=b._data(this);if(n)a[n]&&a[n].stop&&i(a[n]);else for(n in a)a[n]&&a[n].stop&&Jn.test(n)&&i(a[n]);for(n=o.length;n--;)o[n].elem!==this||null!=e&&o[n].queue!==e||(o[n].anim.stop(r),t=!1,o.splice(n,1));(t||!r)&&b.dequeue(this,e)})},finish:function(e){return e!==!1&&(e=e||"fx"),this.each(function(){var t,n=b._data(this),r=n[e+"queue"],i=n[e+"queueHooks"],o=b.timers,a=r?r.length:0;for(n.finish=!0,b.queue(this,e,[]),i&&i.cur&&i.cur.finish&&i.cur.finish.call(this),t=o.length;t--;)o[t].elem===this&&o[t].queue===e&&(o[t].anim.stop(!0),o.splice(t,1));for(t=0;a>t;t++)r[t]&&r[t].finish&&r[t].finish.call(this);delete n.finish})}});function ir(e,t){var n,r={height:e},i=0;for(t=t?1:0;4>i;i+=2-t)n=Zt[i],r["margin"+n]=r["padding"+n]=e;return t&&(r.opacity=r.width=e),r}b.each({slideDown:ir("show"),slideUp:ir("hide"),slideToggle:ir("toggle"),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},function(e,t){b.fn[e]=function(e,n,r){return this.animate(t,e,n,r)}}),b.speed=function(e,t,n){var r=e&&"object"==typeof e?b.extend({},e):{complete:n||!n&&t||b.isFunction(e)&&e,duration:e,easing:n&&t||t&&!b.isFunction(t)&&t};return r.duration=b.fx.off?0:"number"==typeof r.duration?r.duration:r.duration in b.fx.speeds?b.fx.speeds[r.duration]:b.fx.speeds._default,(null==r.queue||r.queue===!0)&&(r.queue="fx"),r.old=r.complete,r.complete=function(){b.isFunction(r.old)&&r.old.call(this),r.queue&&b.dequeue(this,r.queue)},r},b.easing={linear:function(e){return e},swing:function(e){return.5-Math.cos(e*Math.PI)/2}},b.timers=[],b.fx=rr.prototype.init,b.fx.tick=function(){var e,n=b.timers,r=0;for(Xn=b.now();n.length>r;r++)e=n[r],e()||n[r]!==e||n.splice(r--,1);n.length||b.fx.stop(),Xn=t},b.fx.timer=function(e){e()&&b.timers.push(e)&&b.fx.start()},b.fx.interval=13,b.fx.start=function(){Un||(Un=setInterval(b.fx.tick,b.fx.interval))},b.fx.stop=function(){clearInterval(Un),Un=null},b.fx.speeds={slow:600,fast:200,_default:400},b.fx.step={},b.expr&&b.expr.filters&&(b.expr.filters.animated=function(e){return b.grep(b.timers,function(t){return e===t.elem}).length}),b.fn.offset=function(e){if(arguments.length)return e===t?this:this.each(function(t){b.offset.setOffset(this,e,t)});var n,r,o={top:0,left:0},a=this[0],s=a&&a.ownerDocument;if(s)return n=s.documentElement,b.contains(n,a)?(typeof a.getBoundingClientRect!==i&&(o=a.getBoundingClientRect()),r=or(s),{top:o.top+(r.pageYOffset||n.scrollTop)-(n.clientTop||0),left:o.left+(r.pageXOffset||n.scrollLeft)-(n.clientLeft||0)}):o},b.offset={setOffset:function(e,t,n){var r=b.css(e,"position");"static"===r&&(e.style.position="relative");var i=b(e),o=i.offset(),a=b.css(e,"top"),s=b.css(e,"left"),u=("absolute"===r||"fixed"===r)&&b.inArray("auto",[a,s])>-1,l={},c={},p,f;u?(c=i.position(),p=c.top,f=c.left):(p=parseFloat(a)||0,f=parseFloat(s)||0),b.isFunction(t)&&(t=t.call(e,n,o)),null!=t.top&&(l.top=t.top-o.top+p),null!=t.left&&(l.left=t.left-o.left+f),"using"in t?t.using.call(e,l):i.css(l)}},b.fn.extend({position:function(){if(this[0]){var e,t,n={top:0,left:0},r=this[0];return"fixed"===b.css(r,"position")?t=r.getBoundingClientRect():(e=this.offsetParent(),t=this.offset(),b.nodeName(e[0],"html")||(n=e.offset()),n.top+=b.css(e[0],"borderTopWidth",!0),n.left+=b.css(e[0],"borderLeftWidth",!0)),{top:t.top-n.top-b.css(r,"marginTop",!0),left:t.left-n.left-b.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent||o.documentElement;while(e&&!b.nodeName(e,"html")&&"static"===b.css(e,"position"))e=e.offsetParent;return e||o.documentElement})}}),b.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(e,n){var r=/Y/.test(n);b.fn[e]=function(i){return b.access(this,function(e,i,o){var a=or(e);return o===t?a?n in a?a[n]:a.document.documentElement[i]:e[i]:(a?a.scrollTo(r?b(a).scrollLeft():o,r?o:b(a).scrollTop()):e[i]=o,t)},e,i,arguments.length,null)}});function or(e){return b.isWindow(e)?e:9===e.nodeType?e.defaultView||e.parentWindow:!1}b.each({Height:"height",Width:"width"},function(e,n){b.each({padding:"inner"+e,content:n,"":"outer"+e},function(r,i){b.fn[i]=function(i,o){var a=arguments.length&&(r||"boolean"!=typeof i),s=r||(i===!0||o===!0?"margin":"border");return b.access(this,function(n,r,i){var o;return b.isWindow(n)?n.document.documentElement["client"+e]:9===n.nodeType?(o=n.documentElement,Math.max(n.body["scroll"+e],o["scroll"+e],n.body["offset"+e],o["offset"+e],o["client"+e])):i===t?b.css(n,r,s):b.style(n,r,i,s)},n,a?i:t,a,null)}})}),e.jQuery=e.$=b,"function"==typeof define&&define.amd&&define.amd.jQuery&&define("jquery",[],function(){return b})})(window);
diff --git a/asterix-app/src/main/resources/webui/static/js/smoothie.js b/asterix-app/src/main/resources/webui/static/js/smoothie.js
new file mode 100644
index 0000000..60c6624
--- /dev/null
+++ b/asterix-app/src/main/resources/webui/static/js/smoothie.js
@@ -0,0 +1,558 @@
+
+;(function(exports) {
+
+ var Util = {
+ extend: function() {
+ arguments[0] = arguments[0] || {};
+ for (var i = 1; i < arguments.length; i++)
+ {
+ for (var key in arguments[i])
+ {
+ if (arguments[i].hasOwnProperty(key))
+ {
+ if (typeof(arguments[i][key]) === 'object') {
+ if (arguments[i][key] instanceof Array) {
+ arguments[0][key] = arguments[i][key];
+ } else {
+ arguments[0][key] = Util.extend(arguments[0][key], arguments[i][key]);
+ }
+ } else {
+ arguments[0][key] = arguments[i][key];
+ }
+ }
+ }
+ }
+ return arguments[0];
+ }
+ };
+
+ /**
+ * Initialises a new <code>TimeSeries</code> with optional data options.
+ *
+ * Options are of the form (defaults shown):
+ *
+ * <pre>
+ * {
+ * resetBounds: true,
+ * resetBoundsInterval: 3000
+ * }
+ * </pre>
+ *
+ * Presentation options for TimeSeries are specified as an argument to <code>SmoothieChart.addTimeSeries</code>.
+ *
+ * @constructor
+ */
+ function TimeSeries(options) {
+ this.options = Util.extend({}, TimeSeries.defaultOptions, options);
+ this.data = [];
+ this.maxValue = Number.NaN;
+ this.minValue = Number.NaN;
+ }
+
+ TimeSeries.defaultOptions = {
+ resetBoundsInterval: 3000,
+ resetBounds: false
+ };
+
+ /**
+ * Recalculate the min/max values for this <code>TimeSeries</code> object.
+ *
+ * This causes the graph to scale itself in the y-axis.
+ */
+ TimeSeries.prototype.resetBounds = function() {
+ if (this.data.length) {
+
+ this.maxValue = this.data[0][1];
+ this.minValue = this.data[0][1];
+ for (var i = 1; i < this.data.length; i++) {
+ var value = this.data[i][1];
+ if (value > this.maxValue) {
+ this.maxValue = value;
+ }
+ if (value < this.minValue) {
+ this.minValue = value;
+ }
+ }
+ } else {
+
+ this.maxValue = Number.NaN;
+ this.minValue = Number.NaN;
+ }
+ };
+
+ /**
+ * Adds a new data point to the <code>TimeSeries</code>, preserving chronological order.
+ *
+ * @param timestamp the position, in time, of this data point
+ * @param value the value of this data point
+ * @param sumRepeatedTimeStampValues if <code>timestamp</code> has an exact match in the series, this flag controls
+ * whether it is replaced, or the values summed (defaults to false.)
+ */
+ TimeSeries.prototype.append = function(timestamp, value, sumRepeatedTimeStampValues) {
+
+ var i = this.data.length - 1;
+ while (i > 0 && this.data[i][0] > timestamp) {
+ i--;
+ }
+
+ if (this.data.length > 0 && this.data[i][0] === timestamp) {
+
+ if (sumRepeatedTimeStampValues) {
+
+ this.data[i][1] += value;
+ value = this.data[i][1];
+ } else {
+
+ this.data[i][1] = value;
+ }
+ } else if (i < this.data.length - 1) {
+
+ this.data.splice(i + 1, 0, [timestamp, value]);
+ } else {
+
+ this.data.push([timestamp, value]);
+ }
+
+ this.maxValue = isNaN(this.maxValue) ? value : Math.max(this.maxValue, value);
+ this.minValue = isNaN(this.minValue) ? value : Math.min(this.minValue, value);
+ };
+
+ TimeSeries.prototype.dropOldData = function(oldestValidTime, maxDataSetLength) {
+
+
+ var removeCount = 0;
+ while (this.data.length - removeCount >= maxDataSetLength && this.data[removeCount + 1][0] < oldestValidTime) {
+ removeCount++;
+ }
+ if (removeCount !== 0) {
+ this.data.splice(0, removeCount);
+ }
+ };
+
+ /**
+ * Initialises a new <code>SmoothieChart</code>.
+ *
+ * Options are optional, and should be of the form below. Just specify the values you
+ * need and the rest will be given sensible defaults as shown:
+ *
+ * <pre>
+ * {
+ * minValue: undefined,
+ * maxValue: undefined,
+ * maxValueScale: 1,
+ * yRangeFunction: undefined,
+ * scaleSmoothing: 0.125,
+ * millisPerPixel: 20,
+ * maxDataSetLength: 2,
+ * interpolation: 'bezier'
+ * timestampFormatter: null,
+ * horizontalLines: [],
+ * grid:
+ * {
+ * fillStyle: '#000000',
+ * lineWidth: 1,
+ * strokeStyle: '#777777',
+ * millisPerLine: 1000,
+ * sharpLines: false,
+ * verticalSections: 2,
+ * borderVisible: true
+ * },
+ * labels
+ * {
+ * disabled: false,
+ * fillStyle: '#ffffff',
+ * fontSize: 15,
+ * fontFamily: 'sans-serif',
+ * precision: 2
+ * },
+ * }
+ * </pre>
+ *
+ * @constructor
+ */
+ function SmoothieChart(options) {
+ this.options = Util.extend({}, SmoothieChart.defaultChartOptions, options);
+ this.seriesSet = [];
+ this.currentValueRange = 1;
+ this.currentVisMinValue = 0;
+ }
+
+ SmoothieChart.defaultChartOptions = {
+ millisPerPixel: 20,
+ maxValueScale: 1,
+ interpolation: 'bezier',
+ scaleSmoothing: 0.125,
+ maxDataSetLength: 2,
+ grid: {
+ fillStyle: '#000000',
+ strokeStyle: '#777777',
+ lineWidth: 1,
+ sharpLines: false,
+ millisPerLine: 1000,
+ verticalSections: 2,
+ borderVisible: true
+ },
+ labels: {
+ fillStyle: '#ffffff',
+ disabled: false,
+ fontSize: 10,
+ fontFamily: 'monospace',
+ precision: 2
+ },
+ horizontalLines: []
+ };
+
+ SmoothieChart.AnimateCompatibility = (function() {
+ var lastTime = 0,
+ requestAnimationFrame = function(callback, element) {
+ var requestAnimationFrame =
+ window.requestAnimationFrame ||
+ window.webkitRequestAnimationFrame ||
+ window.mozRequestAnimationFrame ||
+ window.oRequestAnimationFrame ||
+ window.msRequestAnimationFrame ||
+ function(callback) {
+ var currTime = new Date().getTime(),
+ timeToCall = Math.max(0, 16 - (currTime - lastTime)),
+ id = window.setTimeout(function() {
+ callback(currTime + timeToCall);
+ }, timeToCall);
+ lastTime = currTime + timeToCall;
+ return id;
+ };
+ return requestAnimationFrame.call(window, callback, element);
+ },
+ cancelAnimationFrame = function(id) {
+ var cancelAnimationFrame =
+ window.cancelAnimationFrame ||
+ function(id) {
+ clearTimeout(id);
+ };
+ return cancelAnimationFrame.call(window, id);
+ };
+
+ return {
+ requestAnimationFrame: requestAnimationFrame,
+ cancelAnimationFrame: cancelAnimationFrame
+ };
+ })();
+
+ SmoothieChart.defaultSeriesPresentationOptions = {
+ lineWidth: 1,
+ strokeStyle: '#ffffff'
+ };
+
+ /**
+ * Adds a <code>TimeSeries</code> to this chart, with optional presentation options.
+ *
+ * Presentation options should be of the form (defaults shown):
+ *
+ * <pre>
+ * {
+ * lineWidth: 1,
+ * strokeStyle: '#ffffff',
+ * fillStyle: undefined
+ * }
+ * </pre>
+ */
+ SmoothieChart.prototype.addTimeSeries = function(timeSeries, options) {
+ this.seriesSet.push({timeSeries: timeSeries, options: Util.extend({}, SmoothieChart.defaultSeriesPresentationOptions, options)});
+ if (timeSeries.options.resetBounds && timeSeries.options.resetBoundsInterval > 0) {
+ timeSeries.resetBoundsTimerId = setInterval(
+ function() {
+ timeSeries.resetBounds();
+ },
+ timeSeries.options.resetBoundsInterval
+ );
+ }
+ };
+
+ /**
+ * Removes the specified <code>TimeSeries</code> from the chart.
+ */
+ SmoothieChart.prototype.removeTimeSeries = function(timeSeries) {
+ var numSeries = this.seriesSet.length;
+ for (var i = 0; i < numSeries; i++) {
+ if (this.seriesSet[i].timeSeries === timeSeries) {
+ this.seriesSet.splice(i, 1);
+ break;
+ }
+ }
+ if (timeSeries.resetBoundsTimerId) {
+ clearInterval(timeSeries.resetBoundsTimerId);
+ }
+ };
+
+ /**
+ * Instructs the <code>SmoothieChart</code> to start rendering to the provided canvas, with specified delay.
+ *
+ * @param canvas the target canvas element
+ * @param delayMillis an amount of time to wait before a data point is shown. This can prevent the end of the series
+ * from appearing on screen, with new values flashing into view, at the expense of some latency.
+ */
+ SmoothieChart.prototype.streamTo = function(canvas, delayMillis) {
+ this.canvas = canvas;
+ this.delay = delayMillis;
+ this.start();
+ };
+
+ /**
+ * Starts the animation of this chart.
+ */
+ SmoothieChart.prototype.start = function() {
+ if (this.frame) {
+ return;
+ }
+
+ var animate = function() {
+ this.frame = SmoothieChart.AnimateCompatibility.requestAnimationFrame(function() {
+ this.render();
+ animate();
+ }.bind(this));
+ }.bind(this);
+
+ animate();
+ };
+
+ /**
+ * Stops the animation of this chart.
+ */
+ SmoothieChart.prototype.stop = function() {
+ if (this.frame) {
+ SmoothieChart.AnimateCompatibility.cancelAnimationFrame(this.frame);
+ delete this.frame;
+ }
+ };
+
+ SmoothieChart.prototype.updateValueRange = function() {
+ var chartOptions = this.options,
+ chartMaxValue = Number.NaN,
+ chartMinValue = Number.NaN;
+
+ for (var d = 0; d < this.seriesSet.length; d++) {
+ var timeSeries = this.seriesSet[d].timeSeries;
+ if (!isNaN(timeSeries.maxValue)) {
+ chartMaxValue = !isNaN(chartMaxValue) ? Math.max(chartMaxValue, timeSeries.maxValue) : timeSeries.maxValue;
+ }
+
+ if (!isNaN(timeSeries.minValue)) {
+ chartMinValue = !isNaN(chartMinValue) ? Math.min(chartMinValue, timeSeries.minValue) : timeSeries.minValue;
+ }
+ }
+
+ if (chartOptions.maxValue != null) {
+ chartMaxValue = chartOptions.maxValue;
+ } else {
+ chartMaxValue *= chartOptions.maxValueScale;
+ }
+
+ if (chartOptions.minValue != null) {
+ chartMinValue = chartOptions.minValue;
+ }
+
+ if (this.options.yRangeFunction) {
+ var range = this.options.yRangeFunction({min: chartMinValue, max: chartMaxValue});
+ chartMinValue = range.min;
+ chartMaxValue = range.max;
+ }
+
+ if (!isNaN(chartMaxValue) && !isNaN(chartMinValue)) {
+ var targetValueRange = chartMaxValue - chartMinValue;
+ this.currentValueRange += chartOptions.scaleSmoothing * (targetValueRange - this.currentValueRange);
+ this.currentVisMinValue += chartOptions.scaleSmoothing * (chartMinValue - this.currentVisMinValue);
+ }
+
+ this.valueRange = { min: chartMinValue, max: chartMaxValue };
+ };
+
+ SmoothieChart.prototype.render = function(canvas, time) {
+ canvas = canvas || this.canvas;
+ time = time || new Date().getTime() - (this.delay || 0);
+
+
+ time -= time % this.options.millisPerPixel;
+
+ var context = canvas.getContext('2d'),
+ chartOptions = this.options,
+ dimensions = { top: 0, left: 0, width: canvas.clientWidth, height: canvas.clientHeight },
+ oldestValidTime = time - (dimensions.width * chartOptions.millisPerPixel),
+ valueToYPixel = function(value) {
+ var offset = value - this.currentVisMinValue;
+ return this.currentValueRange === 0
+ ? dimensions.height
+ : dimensions.height - (Math.round((offset / this.currentValueRange) * dimensions.height));
+ }.bind(this),
+ timeToXPixel = function(t) {
+ return Math.round(dimensions.width - ((time - t) / chartOptions.millisPerPixel));
+ };
+
+ this.updateValueRange();
+
+ context.font = chartOptions.labels.fontSize + 'px ' + chartOptions.labels.fontFamily;
+
+ context.save();
+
+ context.translate(dimensions.left, dimensions.top);
+
+ context.beginPath();
+ context.rect(0, 0, dimensions.width, dimensions.height);
+ context.clip();
+
+ context.save();
+ context.fillStyle = chartOptions.grid.fillStyle;
+ context.clearRect(0, 0, dimensions.width, dimensions.height);
+ context.fillRect(0, 0, dimensions.width, dimensions.height);
+ context.restore();
+
+ context.save();
+ context.lineWidth = chartOptions.grid.lineWidth;
+ context.strokeStyle = chartOptions.grid.strokeStyle;
+ if (chartOptions.grid.millisPerLine > 0) {
+ var textUntilX = dimensions.width - context.measureText(minValueString).width + 4;
+ for (var t = time - (time % chartOptions.grid.millisPerLine);
+ t >= oldestValidTime;
+ t -= chartOptions.grid.millisPerLine) {
+ var gx = timeToXPixel(t);
+ if (chartOptions.grid.sharpLines) {
+ gx -= 0.5;
+ }
+ context.beginPath();
+ context.moveTo(gx, 0);
+ context.lineTo(gx, dimensions.height);
+ context.stroke();
+ context.closePath();
+
+ if (chartOptions.timestampFormatter && gx < textUntilX) {
+
+ var tx = new Date(t),
+ ts = chartOptions.timestampFormatter(tx),
+ tsWidth = context.measureText(ts).width;
+ textUntilX = gx - tsWidth - 2;
+ context.fillStyle = chartOptions.labels.fillStyle;
+ context.fillText(ts, gx - tsWidth, dimensions.height - 2);
+ }
+ }
+ }
+
+ for (var v = 1; v < chartOptions.grid.verticalSections; v++) {
+ var gy = Math.round(v * dimensions.height / chartOptions.grid.verticalSections);
+ if (chartOptions.grid.sharpLines) {
+ gy -= 0.5;
+ }
+ context.beginPath();
+ context.moveTo(0, gy);
+ context.lineTo(dimensions.width, gy);
+ context.stroke();
+ context.closePath();
+ }
+ if (chartOptions.grid.borderVisible) {
+ context.beginPath();
+ context.strokeRect(0, 0, dimensions.width, dimensions.height);
+ context.closePath();
+ }
+ context.restore();
+
+ if (chartOptions.horizontalLines && chartOptions.horizontalLines.length) {
+ for (var hl = 0; hl < chartOptions.horizontalLines.length; hl++) {
+ var line = chartOptions.horizontalLines[hl],
+ hly = Math.round(valueToYPixel(line.value)) - 0.5;
+ context.strokeStyle = line.color || '#ffffff';
+ context.lineWidth = line.lineWidth || 1;
+ context.beginPath();
+ context.moveTo(0, hly);
+ context.lineTo(dimensions.width, hly);
+ context.stroke();
+ context.closePath();
+ }
+ }
+
+ for (var d = 0; d < this.seriesSet.length; d++) {
+ context.save();
+ var timeSeries = this.seriesSet[d].timeSeries,
+ dataSet = timeSeries.data,
+ seriesOptions = this.seriesSet[d].options;
+
+ timeSeries.dropOldData(oldestValidTime, chartOptions.maxDataSetLength);
+
+ context.lineWidth = seriesOptions.lineWidth;
+ context.strokeStyle = seriesOptions.strokeStyle;
+ context.beginPath();
+ var firstX = 0, lastX = 0, lastY = 0;
+ for (var i = 0; i < dataSet.length && dataSet.length !== 1; i++) {
+ var x = timeToXPixel(dataSet[i][0]),
+ y = valueToYPixel(dataSet[i][1]);
+
+ if (i === 0) {
+ firstX = x;
+ context.moveTo(x, y);
+ } else {
+ switch (chartOptions.interpolation) {
+ case "linear":
+ case "line": {
+ context.lineTo(x,y);
+ break;
+ }
+ case "bezier":
+ default: {
+
+
+
+
+
+
+
+
+
+ context.bezierCurveTo(
+ Math.round((lastX + x) / 2), lastY,
+ Math.round((lastX + x)) / 2, y,
+ x, y);
+ break;
+ }
+ }
+ }
+
+ lastX = x; lastY = y;
+ }
+
+ if (dataSet.length > 1) {
+ if (seriesOptions.fillStyle) {
+
+ context.lineTo(dimensions.width + seriesOptions.lineWidth + 1, lastY);
+ context.lineTo(dimensions.width + seriesOptions.lineWidth + 1, dimensions.height + seriesOptions.lineWidth + 1);
+ context.lineTo(firstX, dimensions.height + seriesOptions.lineWidth);
+ context.fillStyle = seriesOptions.fillStyle;
+ context.fill();
+ }
+
+ if (seriesOptions.strokeStyle && seriesOptions.strokeStyle !== 'none') {
+ context.stroke();
+ }
+ context.closePath();
+ }
+ context.restore();
+ }
+
+
+ if (!chartOptions.labels.disabled && !isNaN(this.valueRange.min) && !isNaN(this.valueRange.max)) {
+ var maxValueString = parseFloat(this.valueRange.max).toFixed(chartOptions.labels.precision),
+ minValueString = parseFloat(this.valueRange.min).toFixed(chartOptions.labels.precision);
+ context.fillStyle = chartOptions.labels.fillStyle;
+ context.fillText(maxValueString, dimensions.width - context.measureText(maxValueString).width - 2, chartOptions.labels.fontSize);
+ context.fillText(minValueString, dimensions.width - context.measureText(minValueString).width - 2, dimensions.height - 2);
+ }
+
+ context.restore();
+ };
+
+
+ SmoothieChart.timeFormatter = function(date) {
+ function pad2(number) { return (number < 10 ? '0' : '') + number }
+ return pad2(date.getHours()) + ':' + pad2(date.getMinutes()) + ':' + pad2(date.getSeconds());
+ };
+
+ exports.TimeSeries = TimeSeries;
+ exports.SmoothieChart = SmoothieChart;
+
+})(typeof exports === 'undefined' ? this : exports);
+
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/aql/AQLTestSuite.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/aql/AQLTestSuite.java
index 777d90b..f65870d 100644
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/aql/AQLTestSuite.java
+++ b/asterix-app/src/test/java/edu/uci/ics/asterix/test/aql/AQLTestSuite.java
@@ -20,10 +20,14 @@
import junit.framework.Test;
import junit.framework.TestSuite;
+
+import org.apache.commons.lang3.StringUtils;
+
import edu.uci.ics.asterix.aql.parser.ParseException;
public class AQLTestSuite extends TestSuite {
- private static String AQLTS_PATH = "src/test/resources/AQLTS/queries/";
+ private static String AQLTS_PATH = StringUtils.join(new String[] {"src", "test",
+ "resources", "AQLTS", "queries" + File.separator}, File.separator);
public static Test suite() throws ParseException, UnsupportedEncodingException, FileNotFoundException {
File testData = new File(AQLTS_PATH);
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/dml/DmlTest.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/dml/DmlTest.java
index c781562..33e1f88 100644
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/dml/DmlTest.java
+++ b/asterix-app/src/test/java/edu/uci/ics/asterix/test/dml/DmlTest.java
@@ -30,7 +30,7 @@
public class DmlTest {
private static final String[] ASTERIX_DATA_DIRS = new String[] { "nc1data", "nc2data" };
- private static final String PATH_ACTUAL = "dmltest/";
+ private static final String PATH_ACTUAL = "dmltest" + File.separator;
private static final String SEPARATOR = File.separator;
private static final String PATH_BASE = "src" + SEPARATOR + "test" + SEPARATOR + "resources" + SEPARATOR + "dmlts"
+ SEPARATOR;
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java
index 98cc330..771b87f 100644
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java
+++ b/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java
@@ -19,6 +19,7 @@
import java.util.Collection;
import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang3.StringUtils;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
@@ -41,8 +42,9 @@
private TestCaseContext tcCtx;
- private static final String PATH_ACTUAL = "mdtest/";
- private static final String PATH_BASE = "src/test/resources/metadata/";
+ private static final String PATH_ACTUAL = "mdtest" + File.separator;
+ private static final String PATH_BASE = StringUtils.join(new String[] { "src", "test", "resources",
+ "metadata" + File.separator }, File.separator);
private static final String TEST_CONFIG_FILE_NAME = "asterix-build-configuration.xml";
private static final String WEB_SERVER_PORT = "19002";
@@ -76,8 +78,6 @@
for (String d : AsterixHyracksIntegrationUtil.ASTERIX_DATA_DIRS) {
TestsUtils.deleteRec(new File(d));
}
-
- deleteTransactionLogs();
}
private static void deleteTransactionLogs() throws Exception {
@@ -105,7 +105,7 @@
@Test
public void test() throws Exception {
- TestsUtils.executeTest(PATH_ACTUAL, tcCtx, null);
+ TestsUtils.executeTest(PATH_ACTUAL, tcCtx, null, false);
}
-}
\ No newline at end of file
+}
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/optimizer/OptimizerTest.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/optimizer/OptimizerTest.java
index 53b7995..189d113 100644
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/optimizer/OptimizerTest.java
+++ b/asterix-app/src/test/java/edu/uci/ics/asterix/test/optimizer/OptimizerTest.java
@@ -106,8 +106,7 @@
if (files == null || files.length == 0) {
outdir.delete();
}
-
- deleteTransactionLogs();
+ AsterixHyracksIntegrationUtil.deinit();
}
private static void suiteBuild(File dir, Collection<Object[]> testArgs, String path) {
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java
index eb256c6..193cf06 100644
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java
+++ b/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java
@@ -17,8 +17,11 @@
import java.io.File;
import java.util.ArrayList;
import java.util.Collection;
+import java.util.logging.Level;
+import java.util.logging.Logger;
import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang3.StringUtils;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
@@ -40,8 +43,12 @@
*/
@RunWith(Parameterized.class)
public class ExecutionTest {
- private static final String PATH_ACTUAL = "rttest/";
- private static final String PATH_BASE = "src/test/resources/runtimets/";
+
+ private static final Logger LOGGER = Logger.getLogger(ExecutionTest.class.getName());
+
+ private static final String PATH_ACTUAL = "rttest" + File.separator;
+ private static final String PATH_BASE = StringUtils.join(new String[] { "src", "test", "resources", "runtimets" },
+ File.separator);
private static final String TEST_CONFIG_FILE_NAME = "asterix-build-configuration.xml";
private static final String[] ASTERIX_DATA_DIRS = new String[] { "nc1data", "nc2data" };
@@ -50,6 +57,10 @@
@BeforeClass
public static void setUp() throws Exception {
+ System.out.println("Starting setup");
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Starting setup");
+ }
System.setProperty(GlobalConfig.CONFIG_FILE_PROPERTY, TEST_CONFIG_FILE_NAME);
System.setProperty(GlobalConfig.WEB_SERVER_PORT_PROPERTY, "19002");
File outdir = new File(PATH_ACTUAL);
@@ -60,16 +71,22 @@
deleteTransactionLogs();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("initializing pseudo cluster");
+ }
AsterixHyracksIntegrationUtil.init();
- // TODO: Uncomment when hadoop version is upgraded and adapters are
- // ported.
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("initializing HDFS");
+ }
+
HDFSCluster.getInstance().setup();
// Set the node resolver to be the identity resolver that expects node names
// to be node controller ids; a valid assumption in test environment.
System.setProperty(FileSystemBasedAdapter.NODE_RESOLVER_FACTORY_PROPERTY,
IdentitiyResolverFactory.class.getName());
+
}
@AfterClass
@@ -84,8 +101,6 @@
for (String d : ASTERIX_DATA_DIRS) {
TestsUtils.deleteRec(new File(d));
}
-
- deleteTransactionLogs();
HDFSCluster.getInstance().cleanup();
}
@@ -116,6 +131,6 @@
@Test
public void test() throws Exception {
- TestsUtils.executeTest(PATH_ACTUAL, tcCtx, null);
+ TestsUtils.executeTest(PATH_ACTUAL, tcCtx, null, false);
}
}
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/HDFSCluster.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/HDFSCluster.java
index d6a813c..2318a04 100644
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/HDFSCluster.java
+++ b/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/HDFSCluster.java
@@ -88,8 +88,10 @@
}
public void cleanup() throws Exception {
- dfsCluster.shutdown();
- cleanupLocal();
+ if (dfsCluster != null) {
+ dfsCluster.shutdown();
+ cleanupLocal();
+ }
}
public static void main(String[] args) throws Exception {
diff --git a/asterix-app/src/test/resources/hadoop/conf/core-site.xml b/asterix-app/src/test/resources/hadoop/conf/core-site.xml
index 5b1023c..433325a 100644
--- a/asterix-app/src/test/resources/hadoop/conf/core-site.xml
+++ b/asterix-app/src/test/resources/hadoop/conf/core-site.xml
@@ -21,7 +21,7 @@
<property>
<name>fs.default.name</name>
- <value>hdfs://127.0.0.1:31888</value>
+ <value>hdfs://192.168.0.103:31888</value>
</property>
<property>
<name>hadoop.tmp.dir</name>
diff --git a/asterix-app/src/test/resources/logging.properties b/asterix-app/src/test/resources/logging.properties
index cf1457f..c904be4 100644
--- a/asterix-app/src/test/resources/logging.properties
+++ b/asterix-app/src/test/resources/logging.properties
@@ -79,3 +79,5 @@
#edu.uci.ics.asterix.level = FINE
#edu.uci.ics.hyracks.algebricks.level = FINE
#edu.uci.ics.hyracks.level = INFO
+edu.uci.ics.asterix.test = INFO
+edu.uci.ics.asterix.installer.test = INFO
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta15/meta15.3.query.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta15/meta15.3.query.aql
index dc11189..dc662b5 100644
--- a/asterix-app/src/test/resources/metadata/queries/basic/meta15/meta15.3.query.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta15/meta15.3.query.aql
@@ -5,5 +5,5 @@
*/
for $l in dataset('Metadata.DatasourceAdapter')
+order by $l.AdapterName
return $l
-
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta16/meta16.3.query.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta16/meta16.3.query.aql
index bbc08e8..5cd2069 100644
--- a/asterix-app/src/test/resources/metadata/queries/basic/meta16/meta16.3.query.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta16/meta16.3.query.aql
@@ -1,5 +1,6 @@
use dataverse Metadata;
for $c in dataset('Dataset')
+where $c.DataverseName='Metadata'
return $c
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta17/meta17.3.query.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta17/meta17.3.query.aql
index b1e3979..866e241 100644
--- a/asterix-app/src/test/resources/metadata/queries/basic/meta17/meta17.3.query.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta17/meta17.3.query.aql
@@ -1,4 +1,5 @@
use dataverse Metadata;
for $c in dataset('Datatype')
+where $c.DataverseName='Metadata'
return $c
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/metadata_compaction_policy/metadata_compaction_policy.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/basic/metadata_compaction_policy/metadata_compaction_policy.1.ddl.aql
new file mode 100644
index 0000000..a017e75
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/metadata_compaction_policy/metadata_compaction_policy.1.ddl.aql
@@ -0,0 +1,6 @@
+/*
+ * Description : query the Metadata dataset CompactionPolicy
+ * Expected Res : Success
+ * Date : 13 Nov. 2013
+ * Issue : 646
+ */
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/metadata_compaction_policy/metadata_compaction_policy.2.update.aql b/asterix-app/src/test/resources/metadata/queries/basic/metadata_compaction_policy/metadata_compaction_policy.2.update.aql
new file mode 100644
index 0000000..a017e75
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/metadata_compaction_policy/metadata_compaction_policy.2.update.aql
@@ -0,0 +1,6 @@
+/*
+ * Description : query the Metadata dataset CompactionPolicy
+ * Expected Res : Success
+ * Date : 13 Nov. 2013
+ * Issue : 646
+ */
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/metadata_compaction_policy/metadata_compaction_policy.3.query.aql b/asterix-app/src/test/resources/metadata/queries/basic/metadata_compaction_policy/metadata_compaction_policy.3.query.aql
new file mode 100644
index 0000000..a9c9b19
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/metadata_compaction_policy/metadata_compaction_policy.3.query.aql
@@ -0,0 +1,9 @@
+/*
+ * Description : query the Metadata dataset CompactionPolicy
+ * Expected Res : Success
+ * Date : 13 Nov. 2013
+ * Issue : 646
+ */
+
+for $x in dataset Metadata.CompactionPolicy
+return $x
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/metadata_dataset/metadata_dataset.3.query.aql b/asterix-app/src/test/resources/metadata/queries/basic/metadata_dataset/metadata_dataset.3.query.aql
index bbc08e8..5cd2069 100644
--- a/asterix-app/src/test/resources/metadata/queries/basic/metadata_dataset/metadata_dataset.3.query.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/metadata_dataset/metadata_dataset.3.query.aql
@@ -1,5 +1,6 @@
use dataverse Metadata;
for $c in dataset('Dataset')
+where $c.DataverseName='Metadata'
return $c
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/metadata_datatype/metadata_datatype.3.query.aql b/asterix-app/src/test/resources/metadata/queries/basic/metadata_datatype/metadata_datatype.3.query.aql
index 1f19f84..5f7eede 100644
--- a/asterix-app/src/test/resources/metadata/queries/basic/metadata_datatype/metadata_datatype.3.query.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/metadata_datatype/metadata_datatype.3.query.aql
@@ -1,4 +1,5 @@
use dataverse Metadata;
for $c in dataset('Datatype')
+where $c.DataverseName='Metadata'
return $c
diff --git a/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_1/issue_251_dataset_hint_1.1.adm b/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_1/issue_251_dataset_hint_1.1.adm
index e69de29..f7672a7 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_1/issue_251_dataset_hint_1.1.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_1/issue_251_dataset_hint_1.1.adm
Binary files differ
diff --git a/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_2.adm b/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_2.adm
index 7c82b18..1434eca 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_2.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_2.adm
@@ -1 +1 @@
-{ "DataverseName": "test", "DatasetName": "Book", "DataTypeName": "LineType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ { "Name": "CARDINALITY", "Value": "2000" } }}, "Timestamp": "Tue Jan 29 19:11:26 PST 2013" }
+{ "DataverseName": "test", "DatasetName": "Book", "DataTypeName": "LineType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "Autogenerated": false, "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "Hints": {{ { "Name": "CARDINALITY", "Value": "2000" } }}, "Timestamp": "Mon Aug 26 13:22:02 PDT 2013", "DatasetId": 106, "PendingOp": 0 }
diff --git a/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_2/issue_251_dataset_hint_2.1.adm b/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_2/issue_251_dataset_hint_2.1.adm
index 0fb70db..8bd6917 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_2/issue_251_dataset_hint_2.1.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_2/issue_251_dataset_hint_2.1.adm
@@ -1 +1 @@
-{ "DataverseName": "test", "DatasetName": "Book", "DataTypeName": "LineType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ { "Name": "CARDINALITY", "Value": "2000" } }}, "Timestamp": "Mon Sep 23 00:01:03 PDT 2013", "DatasetId": 106, "PendingOp": 0 }
\ No newline at end of file
+{ "DataverseName": "test", "DatasetName": "Book", "DataTypeName": "LineType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ { "Name": "CARDINALITY", "Value": "2000" } }}, "Timestamp": "Thu Sep 26 03:03:21 PDT 2013", "DatasetId": 103, "PendingOp": 0 }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_3.adm b/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_3.adm
index f931b40..d24d85b 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_3.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_3.adm
@@ -1 +1 @@
-{ "DataverseName": "test", "DatasetName": "Book", "DataTypeName": "LineType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ { "Name": "CARDINALITY", "Value": "2000" } }}, "Timestamp": "Tue Jan 29 19:00:38 PST 2013" }
+{ "DataverseName": "test", "DatasetName": "Book", "DataTypeName": "LineType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "Autogenerated": false, "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "Hints": {{ { "Name": "CARDINALITY", "Value": "2000" } }}, "Timestamp": "Mon Aug 26 13:22:02 PDT 2013", "DatasetId": 107, "PendingOp": 0 }
diff --git a/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_3/issue_251_dataset_hint_3.1.adm b/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_3/issue_251_dataset_hint_3.1.adm
index 9ec7ae7..0054ecc 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_3/issue_251_dataset_hint_3.1.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_3/issue_251_dataset_hint_3.1.adm
@@ -1 +1 @@
-{ "DataverseName": "test", "DatasetName": "Book", "DataTypeName": "LineType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ { "Name": "CARDINALITY", "Value": "2000" } }}, "Timestamp": "Mon Sep 23 00:04:09 PDT 2013", "DatasetId": 107, "PendingOp": 0 }
\ No newline at end of file
+{ "DataverseName": "test", "DatasetName": "Book", "DataTypeName": "LineType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ { "Name": "CARDINALITY", "Value": "2000" } }}, "Timestamp": "Thu Sep 26 03:05:13 PDT 2013", "DatasetId": 104, "PendingOp": 0 }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_4.adm b/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_4.adm
index efd3a7e..d20a857 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_4.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_4.adm
@@ -1 +1 @@
-{ "DataverseName": "test", "DatasetName": "Book", "DataTypeName": "LineType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 18:59:57 PST 2013" }
+{ "DataverseName": "test", "DatasetName": "Book", "DataTypeName": "LineType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "Autogenerated": false, "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Mon Aug 26 13:22:03 PDT 2013", "DatasetId": 108, "PendingOp": 0 }
diff --git a/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_4/issue_251_dataset_hint_4.1.adm b/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_4/issue_251_dataset_hint_4.1.adm
index 98f9d7d..cee173c 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_4/issue_251_dataset_hint_4.1.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_4/issue_251_dataset_hint_4.1.adm
@@ -1 +1 @@
-{ "DataverseName": "test", "DatasetName": "Book", "DataTypeName": "LineType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Mon Sep 23 00:25:30 PDT 2013", "DatasetId": 108, "PendingOp": 0 }
\ No newline at end of file
+{ "DataverseName": "test", "DatasetName": "Book", "DataTypeName": "LineType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Sep 26 03:07:19 PDT 2013", "DatasetId": 105, "PendingOp": 0 }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta02.adm b/asterix-app/src/test/resources/metadata/results/basic/meta02.adm
index 394af5c..c3b36bf 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/meta02.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta02.adm
@@ -1 +1 @@
-{ "DataverseName": "testdv", "DatasetName": "dst01", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 18:54:34 PST 2013" }
+{ "DataverseName": "testdv", "DatasetName": "dst01", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "Autogenerated": false, "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Mon Aug 26 12:17:53 PDT 2013", "DatasetId": 101, "PendingOp": 0 }
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta02/meta02.1.adm b/asterix-app/src/test/resources/metadata/results/basic/meta02/meta02.1.adm
index 8fb7bc2..f66cd9f 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/meta02/meta02.1.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta02/meta02.1.adm
@@ -1 +1 @@
-{ "DataverseName": "testdv", "DatasetName": "dst01", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Mon Sep 23 00:06:09 PDT 2013", "DatasetId": 101, "PendingOp": 0 }
\ No newline at end of file
+{ "DataverseName": "testdv", "DatasetName": "dst01", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Sep 26 02:41:09 PDT 2013", "DatasetId": 101, "PendingOp": 0 }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta09.adm b/asterix-app/src/test/resources/metadata/results/basic/meta09.adm
index 9bcb2a4..bb7cfca 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/meta09.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta09.adm
@@ -1 +1 @@
-{ "DataverseName": "test", "DatasetName": "t1", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 18:55:25 PST 2013" }
+{ "DataverseName": "test", "DatasetName": "t1", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "Autogenerated": false, "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Mon Aug 26 13:17:15 PDT 2013", "DatasetId": 103, "PendingOp": 0 }
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta09/meta09.1.adm b/asterix-app/src/test/resources/metadata/results/basic/meta09/meta09.1.adm
index 371ee07..c66b89d 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/meta09/meta09.1.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta09/meta09.1.adm
@@ -1 +1 @@
-{ "DataverseName": "test", "DatasetName": "t1", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Mon Sep 23 00:04:08 PDT 2013", "DatasetId": 103, "PendingOp": 0 }
\ No newline at end of file
+{ "DataverseName": "test", "DatasetName": "t1", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Sep 26 02:43:46 PDT 2013", "DatasetId": 102, "PendingOp": 0 }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta15.adm b/asterix-app/src/test/resources/metadata/results/basic/meta15.adm
index 4414ed0..451fa99 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/meta15.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta15.adm
@@ -1,6 +1,10 @@
-{ "DataverseName": "Metadata", "Name": "cnn_feed", "Classname": "edu.uci.ics.asterix.external.adapter.factory.CNNFeedAdapterFactory", "Type": "INTERNAL", "Timestamp": "Sun Nov 25 20:55:22 PST 2012" }
-{ "DataverseName": "Metadata", "Name": "hdfs", "Classname": "edu.uci.ics.asterix.external.adapter.factory.HDFSAdapterFactory", "Type": "INTERNAL", "Timestamp": "Sun Nov 25 20:55:22 PST 2012" }
-{ "DataverseName": "Metadata", "Name": "hive", "Classname": "edu.uci.ics.asterix.external.adapter.factory.HiveAdapterFactory", "Type": "INTERNAL", "Timestamp": "Sun Nov 25 20:55:22 PST 2012" }
-{ "DataverseName": "Metadata", "Name": "localfs", "Classname": "edu.uci.ics.asterix.external.adapter.factory.NCFileSystemAdapterFactory", "Type": "INTERNAL", "Timestamp": "Sun Nov 25 20:55:22 PST 2012" }
-{ "DataverseName": "Metadata", "Name": "pull_twitter", "Classname": "edu.uci.ics.asterix.external.adapter.factory.PullBasedTwitterAdapterFactory", "Type": "INTERNAL", "Timestamp": "Sun Nov 25 20:55:22 PST 2012" }
-{ "DataverseName": "Metadata", "Name": "rss_feed", "Classname": "edu.uci.ics.asterix.external.adapter.factory.RSSFeedAdapterFactory", "Type": "INTERNAL", "Timestamp": "Sun Nov 25 20:55:22 PST 2012" }
+{ "DataverseName": "Metadata", "Name": "cnn_feed", "Classname": "edu.uci.ics.asterix.external.adapter.factory.CNNFeedAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
+{ "DataverseName": "Metadata", "Name": "file_feed", "Classname": "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
+{ "DataverseName": "Metadata", "Name": "hdfs", "Classname": "edu.uci.ics.asterix.external.adapter.factory.HDFSAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
+{ "DataverseName": "Metadata", "Name": "hive", "Classname": "edu.uci.ics.asterix.external.adapter.factory.HiveAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
+{ "DataverseName": "Metadata", "Name": "localfs", "Classname": "edu.uci.ics.asterix.external.adapter.factory.NCFileSystemAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
+{ "DataverseName": "Metadata", "Name": "pull_twitter", "Classname": "edu.uci.ics.asterix.external.adapter.factory.PullBasedTwitterAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
+{ "DataverseName": "Metadata", "Name": "rss_feed", "Classname": "edu.uci.ics.asterix.external.adapter.factory.RSSFeedAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
+{ "DataverseName": "Metadata", "Name": "socket_adaptor", "Classname": "edu.uci.ics.asterix.tools.external.data.GenericSocketFeedAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
+{ "DataverseName": "Metadata", "Name": "socket_client", "Classname": "edu.uci.ics.asterix.tools.external.data.SocketClientAdapterFactory", "Type": "INTERNAL", "Timestamp": "Wed Nov 20 14:45:58 IST 2013" }
+{ "DataverseName": "Metadata", "Name": "twitter_firehose", "Classname": "edu.uci.ics.asterix.tools.external.data.TwitterFirehoseFeedAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta15/meta15.1.adm b/asterix-app/src/test/resources/metadata/results/basic/meta15/meta15.1.adm
index 4414ed0..66733d3 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/meta15/meta15.1.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta15/meta15.1.adm
@@ -1,6 +1,11 @@
-{ "DataverseName": "Metadata", "Name": "cnn_feed", "Classname": "edu.uci.ics.asterix.external.adapter.factory.CNNFeedAdapterFactory", "Type": "INTERNAL", "Timestamp": "Sun Nov 25 20:55:22 PST 2012" }
-{ "DataverseName": "Metadata", "Name": "hdfs", "Classname": "edu.uci.ics.asterix.external.adapter.factory.HDFSAdapterFactory", "Type": "INTERNAL", "Timestamp": "Sun Nov 25 20:55:22 PST 2012" }
-{ "DataverseName": "Metadata", "Name": "hive", "Classname": "edu.uci.ics.asterix.external.adapter.factory.HiveAdapterFactory", "Type": "INTERNAL", "Timestamp": "Sun Nov 25 20:55:22 PST 2012" }
-{ "DataverseName": "Metadata", "Name": "localfs", "Classname": "edu.uci.ics.asterix.external.adapter.factory.NCFileSystemAdapterFactory", "Type": "INTERNAL", "Timestamp": "Sun Nov 25 20:55:22 PST 2012" }
-{ "DataverseName": "Metadata", "Name": "pull_twitter", "Classname": "edu.uci.ics.asterix.external.adapter.factory.PullBasedTwitterAdapterFactory", "Type": "INTERNAL", "Timestamp": "Sun Nov 25 20:55:22 PST 2012" }
-{ "DataverseName": "Metadata", "Name": "rss_feed", "Classname": "edu.uci.ics.asterix.external.adapter.factory.RSSFeedAdapterFactory", "Type": "INTERNAL", "Timestamp": "Sun Nov 25 20:55:22 PST 2012" }
+{ "DataverseName": "Metadata", "Name": "azure_twitter", "Classname": "edu.uci.ics.asterix.external.adapter.factory.PullBasedAzureTwitterAdapterFactory", "Type": "INTERNAL", "Timestamp": "Thu Oct 24 01:39:27 PDT 2013" }
+{ "DataverseName": "Metadata", "Name": "cnn_feed", "Classname": "edu.uci.ics.asterix.external.adapter.factory.CNNFeedAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
+{ "DataverseName": "Metadata", "Name": "file_feed", "Classname": "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
+{ "DataverseName": "Metadata", "Name": "hdfs", "Classname": "edu.uci.ics.asterix.external.adapter.factory.HDFSAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
+{ "DataverseName": "Metadata", "Name": "hive", "Classname": "edu.uci.ics.asterix.external.adapter.factory.HiveAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
+{ "DataverseName": "Metadata", "Name": "localfs", "Classname": "edu.uci.ics.asterix.external.adapter.factory.NCFileSystemAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
+{ "DataverseName": "Metadata", "Name": "pull_twitter", "Classname": "edu.uci.ics.asterix.external.adapter.factory.PullBasedTwitterAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
+{ "DataverseName": "Metadata", "Name": "rss_feed", "Classname": "edu.uci.ics.asterix.external.adapter.factory.RSSFeedAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
+{ "DataverseName": "Metadata", "Name": "socket_adaptor", "Classname": "edu.uci.ics.asterix.tools.external.data.GenericSocketFeedAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
+{ "DataverseName": "Metadata", "Name": "socket_client", "Classname": "edu.uci.ics.asterix.tools.external.data.SocketClientAdapterFactory", "Type": "INTERNAL", "Timestamp": "Wed Nov 20 14:45:58 IST 2013" }
+{ "DataverseName": "Metadata", "Name": "twitter_firehose", "Classname": "edu.uci.ics.asterix.tools.external.data.TwitterFirehoseFeedAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta16.adm b/asterix-app/src/test/resources/metadata/results/basic/meta16.adm
index b56fe7c..8bcdb22 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/meta16.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta16.adm
@@ -1,8 +1,12 @@
-{ "DataverseName": "Metadata", "DatasetName": "Dataset", "DataTypeName": "DatasetRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName" ], "PrimaryKey": [ "DataverseName", "DatasetName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
-{ "DataverseName": "Metadata", "DatasetName": "DatasourceAdapter", "DataTypeName": "DatasourceAdapterRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name" ], "PrimaryKey": [ "DataverseName", "Name" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
-{ "DataverseName": "Metadata", "DatasetName": "Datatype", "DataTypeName": "DatatypeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatatypeName" ], "PrimaryKey": [ "DataverseName", "DatatypeName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
-{ "DataverseName": "Metadata", "DatasetName": "Dataverse", "DataTypeName": "DataverseRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName" ], "PrimaryKey": [ "DataverseName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
-{ "DataverseName": "Metadata", "DatasetName": "Function", "DataTypeName": "FunctionRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name", "Arity" ], "PrimaryKey": [ "DataverseName", "Name", "Arity" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
-{ "DataverseName": "Metadata", "DatasetName": "Index", "DataTypeName": "IndexRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName", "IndexName" ], "PrimaryKey": [ "DataverseName", "DatasetName", "IndexName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
-{ "DataverseName": "Metadata", "DatasetName": "Node", "DataTypeName": "NodeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "NodeName" ], "PrimaryKey": [ "NodeName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
-{ "DataverseName": "Metadata", "DatasetName": "Nodegroup", "DataTypeName": "NodeGroupRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "GroupName" ], "PrimaryKey": [ "GroupName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
+{ "DataverseName": "Metadata", "DatasetName": "Dataset", "DataTypeName": "DatasetRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName" ], "PrimaryKey": [ "DataverseName", "DatasetName" ], "Autogenerated": false, "GroupName": "MetadataGroup" }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013", "DatasetId": 2, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "DatasourceAdapter", "DataTypeName": "DatasourceAdapterRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name" ], "PrimaryKey": [ "DataverseName", "Name" ], "Autogenerated": false, "GroupName": "MetadataGroup" }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013", "DatasetId": 8, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Datatype", "DataTypeName": "DatatypeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatatypeName" ], "PrimaryKey": [ "DataverseName", "DatatypeName" ], "Autogenerated": false, "GroupName": "MetadataGroup" }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013", "DatasetId": 3, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Dataverse", "DataTypeName": "DataverseRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName" ], "PrimaryKey": [ "DataverseName" ], "Autogenerated": false, "GroupName": "MetadataGroup" }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013", "DatasetId": 1, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Feed", "DataTypeName": "FeedRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "FeedName" ], "PrimaryKey": [ "DataverseName", "FeedName" ], "Autogenerated": false, "GroupName": "MetadataGroup" }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013", "DatasetId": 10, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "FeedActivity", "DataTypeName": "FeedActivityRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "FeedName", "DatasetName", "ActivityId" ], "PrimaryKey": [ "DataverseName", "FeedName", "DatasetName", "ActivityId" ], "Autogenerated": false, "GroupName": "MetadataGroup" }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013", "DatasetId": 11, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "FeedPolicy", "DataTypeName": "FeedPolicyRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "PolicyName" ], "PrimaryKey": [ "DataverseName", "PolicyName" ], "Autogenerated": false, "GroupName": "MetadataGroup" }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013", "DatasetId": 12, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Function", "DataTypeName": "FunctionRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name", "Arity" ], "PrimaryKey": [ "DataverseName", "Name", "Arity" ], "Autogenerated": false, "GroupName": "MetadataGroup" }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013", "DatasetId": 7, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Index", "DataTypeName": "IndexRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName", "IndexName" ], "PrimaryKey": [ "DataverseName", "DatasetName", "IndexName" ], "Autogenerated": false, "GroupName": "MetadataGroup" }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013", "DatasetId": 4, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Library", "DataTypeName": "LibraryRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name" ], "PrimaryKey": [ "DataverseName", "Name" ], "Autogenerated": false, "GroupName": "MetadataGroup" }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013", "DatasetId": 9, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Node", "DataTypeName": "NodeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "NodeName" ], "PrimaryKey": [ "NodeName" ], "Autogenerated": false, "GroupName": "MetadataGroup" }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013", "DatasetId": 5, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Nodegroup", "DataTypeName": "NodeGroupRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "GroupName" ], "PrimaryKey": [ "GroupName" ], "Autogenerated": false, "GroupName": "MetadataGroup" }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013", "DatasetId": 6, "PendingOp": 0 }
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta16/meta16.1.adm b/asterix-app/src/test/resources/metadata/results/basic/meta16/meta16.1.adm
index 414d951..f33ce9d 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/meta16/meta16.1.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta16/meta16.1.adm
@@ -1,9 +1,13 @@
-{ "DataverseName": "Metadata", "DatasetName": "CompactionPolicy", "DataTypeName": "CompactionPolicyRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "CompactionPolicy" ], "PrimaryKey": [ "DataverseName", "CompactionPolicy" ], "GroupName": "MetadataGroup", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013", "DatasetId": 9, "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Dataset", "DataTypeName": "DatasetRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName" ], "PrimaryKey": [ "DataverseName", "DatasetName" ], "GroupName": "MetadataGroup", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013", "DatasetId": 2, "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "DatasourceAdapter", "DataTypeName": "DatasourceAdapterRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name" ], "PrimaryKey": [ "DataverseName", "Name" ], "GroupName": "MetadataGroup", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013", "DatasetId": 8, "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Datatype", "DataTypeName": "DatatypeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatatypeName" ], "PrimaryKey": [ "DataverseName", "DatatypeName" ], "GroupName": "MetadataGroup", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013", "DatasetId": 3, "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Dataverse", "DataTypeName": "DataverseRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName" ], "PrimaryKey": [ "DataverseName" ], "GroupName": "MetadataGroup", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013", "DatasetId": 1, "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Function", "DataTypeName": "FunctionRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name", "Arity" ], "PrimaryKey": [ "DataverseName", "Name", "Arity" ], "GroupName": "MetadataGroup", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013", "DatasetId": 7, "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Index", "DataTypeName": "IndexRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName", "IndexName" ], "PrimaryKey": [ "DataverseName", "DatasetName", "IndexName" ], "GroupName": "MetadataGroup", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013", "DatasetId": 4, "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Node", "DataTypeName": "NodeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "NodeName" ], "PrimaryKey": [ "NodeName" ], "GroupName": "MetadataGroup", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013", "DatasetId": 5, "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Nodegroup", "DataTypeName": "NodeGroupRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "GroupName" ], "PrimaryKey": [ "GroupName" ], "GroupName": "MetadataGroup", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013", "DatasetId": 6, "PendingOp": 0 }
\ No newline at end of file
+{ "DataverseName": "Metadata", "DatasetName": "CompactionPolicy", "DataTypeName": "CompactionPolicyRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "CompactionPolicy" ], "PrimaryKey": [ "DataverseName", "CompactionPolicy" ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013", "DatasetId": 13, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Dataset", "DataTypeName": "DatasetRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName" ], "PrimaryKey": [ "DataverseName", "DatasetName" ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013", "DatasetId": 2, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "DatasourceAdapter", "DataTypeName": "DatasourceAdapterRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name" ], "PrimaryKey": [ "DataverseName", "Name" ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013", "DatasetId": 8, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Datatype", "DataTypeName": "DatatypeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatatypeName" ], "PrimaryKey": [ "DataverseName", "DatatypeName" ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013", "DatasetId": 3, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Dataverse", "DataTypeName": "DataverseRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName" ], "PrimaryKey": [ "DataverseName" ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013", "DatasetId": 1, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Feed", "DataTypeName": "FeedRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "FeedName" ], "PrimaryKey": [ "DataverseName", "FeedName" ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013", "DatasetId": 10, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "FeedActivity", "DataTypeName": "FeedActivityRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "FeedName", "DatasetName", "ActivityId" ], "PrimaryKey": [ "DataverseName", "FeedName", "DatasetName", "ActivityId" ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013", "DatasetId": 11, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "FeedPolicy", "DataTypeName": "FeedPolicyRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "PolicyName" ], "PrimaryKey": [ "DataverseName", "PolicyName" ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013", "DatasetId": 12, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Function", "DataTypeName": "FunctionRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name", "Arity" ], "PrimaryKey": [ "DataverseName", "Name", "Arity" ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013", "DatasetId": 7, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Index", "DataTypeName": "IndexRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName", "IndexName" ], "PrimaryKey": [ "DataverseName", "DatasetName", "IndexName" ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013", "DatasetId": 4, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Library", "DataTypeName": "LibraryRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name" ], "PrimaryKey": [ "DataverseName", "Name" ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013", "DatasetId": 9, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Node", "DataTypeName": "NodeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "NodeName" ], "PrimaryKey": [ "NodeName" ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013", "DatasetId": 5, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Nodegroup", "DataTypeName": "NodeGroupRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "GroupName" ], "PrimaryKey": [ "GroupName" ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013", "DatasetId": 6, "PendingOp": 0 }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta17.adm b/asterix-app/src/test/resources/metadata/results/basic/meta17.adm
index 2de89a6..8f50f49 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/meta17.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta17.adm
@@ -1,60 +1,67 @@
-{ "DataverseName": "Metadata", "DatatypeName": "DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "DataTypeName", "FieldType": "string" }, { "FieldName": "DatasetType", "FieldType": "string" }, { "FieldName": "InternalDetails", "FieldType": "Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "ExternalDetails", "FieldType": "Field_ExternalDetails_in_DatasetRecordType" }, { "FieldName": "FeedDetails", "FieldType": "Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "Hints", "FieldType": "Field_Hints_in_DatasetRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "DatasourceAdapterRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Classname", "FieldType": "string" }, { "FieldName": "Type", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:30 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatatypeName", "FieldType": "string" }, { "FieldName": "Derived", "FieldType": "Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "DataverseRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DataFormat", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType_ItemType" }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FieldName", "FieldType": "string" }, { "FieldName": "FieldType", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Function_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Hints_in_DatasetRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "Field_Hints_in_DatasetRecordType_ItemType", "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Hints_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_NodeNames_in_NodeGroupRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "string", "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_OrderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Params_in_FunctionRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Fri Feb 08 15:49:30 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_PartitioningKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_PartitioningKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_PrimaryKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_PrimaryKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_SearchKey_in_IndexRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_UnorderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "FunctionRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Arity", "FieldType": "string" }, { "FieldName": "Params", "FieldType": "Field_Params_in_FunctionRecordType" }, { "FieldName": "ReturnType", "FieldType": "string" }, { "FieldName": "Definition", "FieldType": "string" }, { "FieldName": "Language", "FieldType": "string" }, { "FieldName": "Kind", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:30 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "IndexRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "IndexName", "FieldType": "string" }, { "FieldName": "IndexStructure", "FieldType": "string" }, { "FieldName": "SearchKey", "FieldType": "Field_SearchKey_in_IndexRecordType" }, { "FieldName": "IsPrimary", "FieldType": "boolean" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "NodeGroupRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "GroupName", "FieldType": "string" }, { "FieldName": "NodeNames", "FieldType": "Field_NodeNames_in_NodeGroupRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "NodeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "NodeName", "FieldType": "string" }, { "FieldName": "NumberOfCores", "FieldType": "int32" }, { "FieldName": "WorkingMemorySize", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Tag", "FieldType": "string" }, { "FieldName": "IsAnonymous", "FieldType": "boolean" }, { "FieldName": "EnumValues", "FieldType": "Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Record", "FieldType": "Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Union", "FieldType": "Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "UnorderedList", "FieldType": "Field_UnorderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "OrderedList", "FieldType": "Field_OrderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DatasourceAdapter", "FieldType": "string" }, { "FieldName": "Properties", "FieldType": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FileStructure", "FieldType": "string" }, { "FieldName": "PartitioningStrategy", "FieldType": "string" }, { "FieldName": "PartitioningKey", "FieldType": "Field_PartitioningKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "PrimaryKey", "FieldType": "Field_PrimaryKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "GroupName", "FieldType": "string" }, { "FieldName": "DatasourceAdapter", "FieldType": "string" }, { "FieldName": "Properties", "FieldType": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "Function", "FieldType": "Field_Function_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "Status", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FileStructure", "FieldType": "string" }, { "FieldName": "PartitioningStrategy", "FieldType": "string" }, { "FieldName": "PartitioningKey", "FieldType": "Field_PartitioningKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "PrimaryKey", "FieldType": "Field_PrimaryKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "GroupName", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "IsOpen", "FieldType": "boolean" }, { "FieldName": "Fields", "FieldType": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "boolean", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "circle", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "date", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "datetime", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "double", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "duration", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "float", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "int16", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "int32", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "int64", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "int8", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "interval", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "line", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "null", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "point", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "point3d", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "polygon", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "rectangle", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "string", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "time", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "DataTypeName", "FieldType": "string" }, { "FieldName": "DatasetType", "FieldType": "string" }, { "FieldName": "InternalDetails", "FieldType": "Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "ExternalDetails", "FieldType": "Field_ExternalDetails_in_DatasetRecordType" }, { "FieldName": "Hints", "FieldType": "Field_Hints_in_DatasetRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" }, { "FieldName": "DatasetId", "FieldType": "int32" }, { "FieldName": "PendingOp", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "DatasourceAdapterRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Classname", "FieldType": "string" }, { "FieldName": "Type", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatatypeName", "FieldType": "string" }, { "FieldName": "Derived", "FieldType": "Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "DataverseRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DataFormat", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" }, { "FieldName": "PendingOp", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "FeedActivityRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "FeedName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "ActivityId", "FieldType": "int32" }, { "FieldName": "ActivityType", "FieldType": "string" }, { "FieldName": "Details", "FieldType": "Field_Details_in_FeedActivityRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "FeedPolicyRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "PolicyName", "FieldType": "string" }, { "FieldName": "Description", "FieldType": "string" }, { "FieldName": "Properties", "FieldType": "Field_Properties_in_FeedPolicyRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "FeedRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "FeedName", "FieldType": "string" }, { "FieldName": "AdaptorName", "FieldType": "string" }, { "FieldName": "AdaptorConfiguration", "FieldType": "Field_AdaptorConfiguration_in_FeedRecordType" }, { "FieldName": "Function", "FieldType": "Field_Function_in_FeedRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_AdaptorConfiguration_in_FeedRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "Field_AdaptorConfiguration_in_FeedRecordType_ItemType", "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_AdaptorConfiguration_in_FeedRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Details_in_FeedActivityRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "Field_Details_in_FeedActivityRecordType_ItemType", "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Details_in_FeedActivityRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType_ItemType" }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FieldName", "FieldType": "string" }, { "FieldName": "FieldType", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Function_in_FeedRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Hints_in_DatasetRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "Field_Hints_in_DatasetRecordType_ItemType", "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Hints_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_NodeNames_in_NodeGroupRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "string", "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_OrderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Params_in_FunctionRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_PartitioningKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_PrimaryKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_FeedPolicyRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "Field_Properties_in_FeedPolicyRecordType_ItemType", "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_FeedPolicyRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_SearchKey_in_IndexRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_UnorderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "FunctionRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Arity", "FieldType": "string" }, { "FieldName": "Params", "FieldType": "Field_Params_in_FunctionRecordType" }, { "FieldName": "ReturnType", "FieldType": "string" }, { "FieldName": "Definition", "FieldType": "string" }, { "FieldName": "Language", "FieldType": "string" }, { "FieldName": "Kind", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "IndexRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "IndexName", "FieldType": "string" }, { "FieldName": "IndexStructure", "FieldType": "string" }, { "FieldName": "SearchKey", "FieldType": "Field_SearchKey_in_IndexRecordType" }, { "FieldName": "IsPrimary", "FieldType": "boolean" }, { "FieldName": "Timestamp", "FieldType": "string" }, { "FieldName": "PendingOp", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "LibraryRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "NodeGroupRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "GroupName", "FieldType": "string" }, { "FieldName": "NodeNames", "FieldType": "Field_NodeNames_in_NodeGroupRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "NodeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "NodeName", "FieldType": "string" }, { "FieldName": "NumberOfCores", "FieldType": "int32" }, { "FieldName": "WorkingMemorySize", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Tag", "FieldType": "string" }, { "FieldName": "IsAnonymous", "FieldType": "boolean" }, { "FieldName": "EnumValues", "FieldType": "Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Record", "FieldType": "Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Union", "FieldType": "Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "UnorderedList", "FieldType": "Field_UnorderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "OrderedList", "FieldType": "Field_OrderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DatasourceAdapter", "FieldType": "string" }, { "FieldName": "Properties", "FieldType": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FileStructure", "FieldType": "string" }, { "FieldName": "PartitioningStrategy", "FieldType": "string" }, { "FieldName": "PartitioningKey", "FieldType": "Field_PartitioningKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "PrimaryKey", "FieldType": "Field_PrimaryKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "Autogenerated", "FieldType": "boolean" }, { "FieldName": "GroupName", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "IsOpen", "FieldType": "boolean" }, { "FieldName": "Fields", "FieldType": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "boolean", "Derived": null, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "circle", "Derived": null, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "date", "Derived": null, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "datetime", "Derived": null, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "day-time-duration", "Derived": null, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "double", "Derived": null, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "duration", "Derived": null, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "float", "Derived": null, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "int16", "Derived": null, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "int32", "Derived": null, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "int64", "Derived": null, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "int8", "Derived": null, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "interval", "Derived": null, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "line", "Derived": null, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "null", "Derived": null, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "point", "Derived": null, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "point3d", "Derived": null, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "polygon", "Derived": null, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "rectangle", "Derived": null, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "string", "Derived": null, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "time", "Derived": null, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "uuid", "Derived": null, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "year-month-duration", "Derived": null, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta17/meta17.1.adm b/asterix-app/src/test/resources/metadata/results/basic/meta17/meta17.1.adm
index 1995946..66a5a68 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/meta17/meta17.1.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta17/meta17.1.adm
@@ -1,67 +1,70 @@
-{ "DataverseName": "Metadata", "DatatypeName": "CompactionPolicyRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "PolicyName", "FieldType": "string" }, { "FieldName": "Classname", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "DataTypeName", "FieldType": "string" }, { "FieldName": "DatasetType", "FieldType": "string" }, { "FieldName": "InternalDetails", "FieldType": "Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "ExternalDetails", "FieldType": "Field_ExternalDetails_in_DatasetRecordType" }, { "FieldName": "FeedDetails", "FieldType": "Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "Hints", "FieldType": "Field_Hints_in_DatasetRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" }, { "FieldName": "DatasetId", "FieldType": "int32" }, { "FieldName": "PendingOp", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "DatasourceAdapterRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Classname", "FieldType": "string" }, { "FieldName": "Type", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatatypeName", "FieldType": "string" }, { "FieldName": "Derived", "FieldType": "Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "DataverseRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DataFormat", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" }, { "FieldName": "PendingOp", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_CompactionPolicyProperties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_CompactionPolicyProperties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_CompactionPolicyProperties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_CompactionPolicyProperties_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_CompactionPolicyProperties_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_CompactionPolicyProperties_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType_ItemType" }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FieldName", "FieldType": "string" }, { "FieldName": "FieldType", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Function_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Hints_in_DatasetRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "Field_Hints_in_DatasetRecordType_ItemType", "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Hints_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_NodeNames_in_NodeGroupRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "string", "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_OrderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Params_in_FunctionRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_PartitioningKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_PartitioningKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_PrimaryKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_PrimaryKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_SearchKey_in_IndexRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_UnorderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "FunctionRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Arity", "FieldType": "string" }, { "FieldName": "Params", "FieldType": "Field_Params_in_FunctionRecordType" }, { "FieldName": "ReturnType", "FieldType": "string" }, { "FieldName": "Definition", "FieldType": "string" }, { "FieldName": "Language", "FieldType": "string" }, { "FieldName": "Kind", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "IndexRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "IndexName", "FieldType": "string" }, { "FieldName": "IndexStructure", "FieldType": "string" }, { "FieldName": "SearchKey", "FieldType": "Field_SearchKey_in_IndexRecordType" }, { "FieldName": "IsPrimary", "FieldType": "boolean" }, { "FieldName": "Timestamp", "FieldType": "string" }, { "FieldName": "PendingOp", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "NodeGroupRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "GroupName", "FieldType": "string" }, { "FieldName": "NodeNames", "FieldType": "Field_NodeNames_in_NodeGroupRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "NodeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "NodeName", "FieldType": "string" }, { "FieldName": "NumberOfCores", "FieldType": "int32" }, { "FieldName": "WorkingMemorySize", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Tag", "FieldType": "string" }, { "FieldName": "IsAnonymous", "FieldType": "boolean" }, { "FieldName": "EnumValues", "FieldType": "Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Record", "FieldType": "Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Union", "FieldType": "Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "UnorderedList", "FieldType": "Field_UnorderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "OrderedList", "FieldType": "Field_OrderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DatasourceAdapter", "FieldType": "string" }, { "FieldName": "Properties", "FieldType": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FileStructure", "FieldType": "string" }, { "FieldName": "PartitioningStrategy", "FieldType": "string" }, { "FieldName": "PartitioningKey", "FieldType": "Field_PartitioningKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "PrimaryKey", "FieldType": "Field_PrimaryKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "GroupName", "FieldType": "string" }, { "FieldName": "DatasourceAdapter", "FieldType": "string" }, { "FieldName": "Properties", "FieldType": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "Function", "FieldType": "Field_Function_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "Status", "FieldType": "string" }, { "FieldName": "CompactionPolicy", "FieldType": "string" }, { "FieldName": "CompactionPolicyProperties", "FieldType": "Field_CompactionPolicyProperties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FileStructure", "FieldType": "string" }, { "FieldName": "PartitioningStrategy", "FieldType": "string" }, { "FieldName": "PartitioningKey", "FieldType": "Field_PartitioningKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "PrimaryKey", "FieldType": "Field_PrimaryKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "GroupName", "FieldType": "string" }, { "FieldName": "CompactionPolicy", "FieldType": "string" }, { "FieldName": "CompactionPolicyProperties", "FieldType": "Field_CompactionPolicyProperties_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "IsOpen", "FieldType": "boolean" }, { "FieldName": "Fields", "FieldType": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "boolean", "Derived": null, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "circle", "Derived": null, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "date", "Derived": null, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "datetime", "Derived": null, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "day-time-duration", "Derived": null, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "double", "Derived": null, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "duration", "Derived": null, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "float", "Derived": null, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "int16", "Derived": null, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "int32", "Derived": null, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "int64", "Derived": null, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "int8", "Derived": null, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "interval", "Derived": null, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "line", "Derived": null, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "null", "Derived": null, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "point", "Derived": null, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "point3d", "Derived": null, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "polygon", "Derived": null, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "rectangle", "Derived": null, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "string", "Derived": null, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "time", "Derived": null, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "year-month-duration", "Derived": null, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
\ No newline at end of file
+{ "DataverseName": "Metadata", "DatatypeName": "CompactionPolicyRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "CompactionPolicy", "FieldType": "string" }, { "FieldName": "Classname", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "DataTypeName", "FieldType": "string" }, { "FieldName": "DatasetType", "FieldType": "string" }, { "FieldName": "InternalDetails", "FieldType": "Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "ExternalDetails", "FieldType": "Field_ExternalDetails_in_DatasetRecordType" }, { "FieldName": "Hints", "FieldType": "Field_Hints_in_DatasetRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" }, { "FieldName": "DatasetId", "FieldType": "int32" }, { "FieldName": "PendingOp", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "DatasourceAdapterRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Classname", "FieldType": "string" }, { "FieldName": "Type", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatatypeName", "FieldType": "string" }, { "FieldName": "Derived", "FieldType": "Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "DataverseRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DataFormat", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" }, { "FieldName": "PendingOp", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "FeedActivityRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "FeedName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "ActivityId", "FieldType": "int32" }, { "FieldName": "ActivityType", "FieldType": "string" }, { "FieldName": "Details", "FieldType": "Field_Details_in_FeedActivityRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "FeedPolicyRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "PolicyName", "FieldType": "string" }, { "FieldName": "Description", "FieldType": "string" }, { "FieldName": "Properties", "FieldType": "Field_Properties_in_FeedPolicyRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "FeedRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "FeedName", "FieldType": "string" }, { "FieldName": "AdaptorName", "FieldType": "string" }, { "FieldName": "AdaptorConfiguration", "FieldType": "Field_AdaptorConfiguration_in_FeedRecordType" }, { "FieldName": "Function", "FieldType": "Field_Function_in_FeedRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_AdaptorConfiguration_in_FeedRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "Field_AdaptorConfiguration_in_FeedRecordType_ItemType", "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_AdaptorConfiguration_in_FeedRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_CompactionPolicyProperties_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_CompactionPolicyProperties_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_CompactionPolicyProperties_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Details_in_FeedActivityRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "Field_Details_in_FeedActivityRecordType_ItemType", "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Details_in_FeedActivityRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType_ItemType" }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FieldName", "FieldType": "string" }, { "FieldName": "FieldType", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Function_in_FeedRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Hints_in_DatasetRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "Field_Hints_in_DatasetRecordType_ItemType", "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Hints_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_NodeNames_in_NodeGroupRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "string", "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_OrderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Params_in_FunctionRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_PartitioningKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_PrimaryKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_FeedPolicyRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "Field_Properties_in_FeedPolicyRecordType_ItemType", "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_FeedPolicyRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_SearchKey_in_IndexRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_UnorderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "FunctionRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Arity", "FieldType": "string" }, { "FieldName": "Params", "FieldType": "Field_Params_in_FunctionRecordType" }, { "FieldName": "ReturnType", "FieldType": "string" }, { "FieldName": "Definition", "FieldType": "string" }, { "FieldName": "Language", "FieldType": "string" }, { "FieldName": "Kind", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "IndexRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "IndexName", "FieldType": "string" }, { "FieldName": "IndexStructure", "FieldType": "string" }, { "FieldName": "SearchKey", "FieldType": "Field_SearchKey_in_IndexRecordType" }, { "FieldName": "IsPrimary", "FieldType": "boolean" }, { "FieldName": "Timestamp", "FieldType": "string" }, { "FieldName": "PendingOp", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "LibraryRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "NodeGroupRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "GroupName", "FieldType": "string" }, { "FieldName": "NodeNames", "FieldType": "Field_NodeNames_in_NodeGroupRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "NodeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "NodeName", "FieldType": "string" }, { "FieldName": "NumberOfCores", "FieldType": "int32" }, { "FieldName": "WorkingMemorySize", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Tag", "FieldType": "string" }, { "FieldName": "IsAnonymous", "FieldType": "boolean" }, { "FieldName": "EnumValues", "FieldType": "Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Record", "FieldType": "Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Union", "FieldType": "Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "UnorderedList", "FieldType": "Field_UnorderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "OrderedList", "FieldType": "Field_OrderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DatasourceAdapter", "FieldType": "string" }, { "FieldName": "Properties", "FieldType": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FileStructure", "FieldType": "string" }, { "FieldName": "PartitioningStrategy", "FieldType": "string" }, { "FieldName": "PartitioningKey", "FieldType": "Field_PartitioningKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "PrimaryKey", "FieldType": "Field_PrimaryKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "GroupName", "FieldType": "string" }, { "FieldName": "Autogenerated", "FieldType": "boolean" }, { "FieldName": "CompactionPolicy", "FieldType": "string" }, { "FieldName": "CompactionPolicyProperties", "FieldType": "Field_CompactionPolicyProperties_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "IsOpen", "FieldType": "boolean" }, { "FieldName": "Fields", "FieldType": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "boolean", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "circle", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "date", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "datetime", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "day-time-duration", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "double", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "duration", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "float", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "int16", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "int32", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "int64", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "int8", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "interval", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "line", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "null", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "point", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "point3d", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "polygon", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "rectangle", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "string", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "time", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "uuid", "Derived": null, "Timestamp": "Thu Oct 24 01:40:50 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "year-month-duration", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta19.adm b/asterix-app/src/test/resources/metadata/results/basic/meta19.adm
index 607bfd1..ee088a1 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/meta19.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta19.adm
@@ -1,11 +1,15 @@
-{ "DataverseName": "Metadata", "DatasetName": "Dataset", "IndexName": "Dataset", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatasetName" ], "IsPrimary": true, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Dataset", "IndexName": "DatatypeName", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatatypeName", "DatasetName" ], "IsPrimary": false, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Dataset", "IndexName": "GroupName", "IndexStructure": "BTREE", "SearchKey": [ "GroupName", "DataverseName", "DatasetName" ], "IsPrimary": false, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "DatasourceAdapter", "IndexName": "DatasourceAdapter", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "Name" ], "IsPrimary": true, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Datatype", "IndexName": "Datatype", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatatypeName" ], "IsPrimary": true, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Datatype", "IndexName": "DatatypeName", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "NestedDatatypeName", "TopDatatypeName" ], "IsPrimary": false, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Dataverse", "IndexName": "Dataverse", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName" ], "IsPrimary": true, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Function", "IndexName": "Function", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "Name", "Arity" ], "IsPrimary": true, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Index", "IndexName": "Index", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatasetName", "IndexName" ], "IsPrimary": true, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Node", "IndexName": "Node", "IndexStructure": "BTREE", "SearchKey": [ "NodeName" ], "IsPrimary": true, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Nodegroup", "IndexName": "Nodegroup", "IndexStructure": "BTREE", "SearchKey": [ "GroupName" ], "IsPrimary": true, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
+{ "DataverseName": "Metadata", "DatasetName": "Dataset", "IndexName": "Dataset", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatasetName" ], "IsPrimary": true, "Timestamp": "Tue Jul 16 22:46:42 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Dataset", "IndexName": "DatatypeName", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatatypeName", "DatasetName" ], "IsPrimary": false, "Timestamp": "Tue Jul 16 22:46:42 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Dataset", "IndexName": "GroupName", "IndexStructure": "BTREE", "SearchKey": [ "GroupName", "DataverseName", "DatasetName" ], "IsPrimary": false, "Timestamp": "Tue Jul 16 22:46:42 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "DatasourceAdapter", "IndexName": "DatasourceAdapter", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "Name" ], "IsPrimary": true, "Timestamp": "Tue Jul 16 22:46:42 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Datatype", "IndexName": "Datatype", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatatypeName" ], "IsPrimary": true, "Timestamp": "Tue Jul 16 22:46:42 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Datatype", "IndexName": "DatatypeName", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "NestedDatatypeName", "TopDatatypeName" ], "IsPrimary": false, "Timestamp": "Tue Jul 16 22:46:42 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Dataverse", "IndexName": "Dataverse", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName" ], "IsPrimary": true, "Timestamp": "Tue Jul 16 22:46:42 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Feed", "IndexName": "Feed", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "FeedName" ], "IsPrimary": true, "Timestamp": "Tue Jul 16 22:46:42 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "FeedActivity", "IndexName": "FeedActivity", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "FeedName", "DatasetName", "ActivityId" ], "IsPrimary": true, "Timestamp": "Tue Jul 16 22:46:42 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "FeedPolicy", "IndexName": "FeedPolicy", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "PolicyName" ], "IsPrimary": true, "Timestamp": "Tue Jul 16 22:46:42 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Function", "IndexName": "Function", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "Name", "Arity" ], "IsPrimary": true, "Timestamp": "Tue Jul 16 22:46:42 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Index", "IndexName": "Index", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatasetName", "IndexName" ], "IsPrimary": true, "Timestamp": "Tue Jul 16 22:46:42 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Library", "IndexName": "Library", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "Name" ], "IsPrimary": true, "Timestamp": "Tue Jul 16 22:46:42 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Node", "IndexName": "Node", "IndexStructure": "BTREE", "SearchKey": [ "NodeName" ], "IsPrimary": true, "Timestamp": "Tue Jul 16 22:46:42 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Nodegroup", "IndexName": "Nodegroup", "IndexStructure": "BTREE", "SearchKey": [ "GroupName" ], "IsPrimary": true, "Timestamp": "Tue Jul 16 22:46:42 PDT 2013", "PendingOp": 0 }
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta19/meta19.1.adm b/asterix-app/src/test/resources/metadata/results/basic/meta19/meta19.1.adm
index 7fdcb8d..a8dfc95 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/meta19/meta19.1.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta19/meta19.1.adm
@@ -1,12 +1,16 @@
-{ "DataverseName": "Metadata", "DatasetName": "CompactionPolicy", "IndexName": "CompactionPolicy", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "CompactionPolicy" ], "IsPrimary": true, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013", "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Dataset", "IndexName": "Dataset", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatasetName" ], "IsPrimary": true, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013", "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Dataset", "IndexName": "DatatypeName", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatatypeName", "DatasetName" ], "IsPrimary": false, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013", "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Dataset", "IndexName": "GroupName", "IndexStructure": "BTREE", "SearchKey": [ "GroupName", "DataverseName", "DatasetName" ], "IsPrimary": false, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013", "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "DatasourceAdapter", "IndexName": "DatasourceAdapter", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "Name" ], "IsPrimary": true, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013", "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Datatype", "IndexName": "Datatype", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatatypeName" ], "IsPrimary": true, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013", "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Datatype", "IndexName": "DatatypeName", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "NestedDatatypeName", "TopDatatypeName" ], "IsPrimary": false, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013", "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Dataverse", "IndexName": "Dataverse", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName" ], "IsPrimary": true, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013", "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Function", "IndexName": "Function", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "Name", "Arity" ], "IsPrimary": true, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013", "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Index", "IndexName": "Index", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatasetName", "IndexName" ], "IsPrimary": true, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013", "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Node", "IndexName": "Node", "IndexStructure": "BTREE", "SearchKey": [ "NodeName" ], "IsPrimary": true, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013", "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Nodegroup", "IndexName": "Nodegroup", "IndexStructure": "BTREE", "SearchKey": [ "GroupName" ], "IsPrimary": true, "Timestamp": "Mon Sep 23 00:04:06 PDT 2013", "PendingOp": 0 }
\ No newline at end of file
+{ "DataverseName": "Metadata", "DatasetName": "CompactionPolicy", "IndexName": "CompactionPolicy", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "CompactionPolicy" ], "IsPrimary": true, "Timestamp": "Thu Sep 26 02:39:06 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Dataset", "IndexName": "Dataset", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatasetName" ], "IsPrimary": true, "Timestamp": "Thu Sep 26 02:39:06 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Dataset", "IndexName": "DatatypeName", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatatypeName", "DatasetName" ], "IsPrimary": false, "Timestamp": "Thu Sep 26 02:39:06 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Dataset", "IndexName": "GroupName", "IndexStructure": "BTREE", "SearchKey": [ "GroupName", "DataverseName", "DatasetName" ], "IsPrimary": false, "Timestamp": "Thu Sep 26 02:39:06 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "DatasourceAdapter", "IndexName": "DatasourceAdapter", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "Name" ], "IsPrimary": true, "Timestamp": "Thu Sep 26 02:39:06 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Datatype", "IndexName": "Datatype", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatatypeName" ], "IsPrimary": true, "Timestamp": "Thu Sep 26 02:39:06 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Datatype", "IndexName": "DatatypeName", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "NestedDatatypeName", "TopDatatypeName" ], "IsPrimary": false, "Timestamp": "Thu Sep 26 02:39:06 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Dataverse", "IndexName": "Dataverse", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName" ], "IsPrimary": true, "Timestamp": "Thu Sep 26 02:39:06 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Feed", "IndexName": "Feed", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "FeedName" ], "IsPrimary": true, "Timestamp": "Thu Sep 26 02:39:06 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "FeedActivity", "IndexName": "FeedActivity", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "FeedName", "DatasetName", "ActivityId" ], "IsPrimary": true, "Timestamp": "Thu Sep 26 02:39:06 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "FeedPolicy", "IndexName": "FeedPolicy", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "PolicyName" ], "IsPrimary": true, "Timestamp": "Thu Sep 26 02:39:06 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Function", "IndexName": "Function", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "Name", "Arity" ], "IsPrimary": true, "Timestamp": "Thu Sep 26 02:39:06 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Index", "IndexName": "Index", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatasetName", "IndexName" ], "IsPrimary": true, "Timestamp": "Thu Sep 26 02:39:06 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Library", "IndexName": "Library", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "Name" ], "IsPrimary": true, "Timestamp": "Thu Sep 26 02:39:06 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Node", "IndexName": "Node", "IndexStructure": "BTREE", "SearchKey": [ "NodeName" ], "IsPrimary": true, "Timestamp": "Thu Sep 26 02:39:06 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Nodegroup", "IndexName": "Nodegroup", "IndexStructure": "BTREE", "SearchKey": [ "GroupName" ], "IsPrimary": true, "Timestamp": "Thu Sep 26 02:39:06 PDT 2013", "PendingOp": 0 }
diff --git a/asterix-app/src/test/resources/metadata/results/basic/metadata_compaction_policy/metadata_compaction_policy.1.adm b/asterix-app/src/test/resources/metadata/results/basic/metadata_compaction_policy/metadata_compaction_policy.1.adm
new file mode 100644
index 0000000..633e93a
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/results/basic/metadata_compaction_policy/metadata_compaction_policy.1.adm
@@ -0,0 +1,2 @@
+{ "DataverseName": "Metadata", "CompactionPolicy": "constant", "Classname": "edu.uci.ics.hyracks.storage.am.lsm.common.impls.ConstantMergePolicyFactory" }
+{ "DataverseName": "Metadata", "CompactionPolicy": "prefix", "Classname": "edu.uci.ics.hyracks.storage.am.lsm.common.impls.PrefixMergePolicyFactory" }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/results/basic/metadata_dataset.adm b/asterix-app/src/test/resources/metadata/results/basic/metadata_dataset.adm
index 8abc339..8bcdb22 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/metadata_dataset.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/metadata_dataset.adm
@@ -1,8 +1,12 @@
-{ "DataverseName": "Metadata", "DatasetName": "Dataset", "DataTypeName": "DatasetRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName" ], "PrimaryKey": [ "DataverseName", "DatasetName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "DatasourceAdapter", "DataTypeName": "DatasourceAdapterRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name" ], "PrimaryKey": [ "DataverseName", "Name" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Datatype", "DataTypeName": "DatatypeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatatypeName" ], "PrimaryKey": [ "DataverseName", "DatatypeName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Dataverse", "DataTypeName": "DataverseRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName" ], "PrimaryKey": [ "DataverseName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Function", "DataTypeName": "FunctionRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name", "Arity" ], "PrimaryKey": [ "DataverseName", "Name", "Arity" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Index", "DataTypeName": "IndexRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName", "IndexName" ], "PrimaryKey": [ "DataverseName", "DatasetName", "IndexName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Node", "DataTypeName": "NodeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "NodeName" ], "PrimaryKey": [ "NodeName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Nodegroup", "DataTypeName": "NodeGroupRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "GroupName" ], "PrimaryKey": [ "GroupName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
+{ "DataverseName": "Metadata", "DatasetName": "Dataset", "DataTypeName": "DatasetRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName" ], "PrimaryKey": [ "DataverseName", "DatasetName" ], "Autogenerated": false, "GroupName": "MetadataGroup" }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013", "DatasetId": 2, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "DatasourceAdapter", "DataTypeName": "DatasourceAdapterRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name" ], "PrimaryKey": [ "DataverseName", "Name" ], "Autogenerated": false, "GroupName": "MetadataGroup" }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013", "DatasetId": 8, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Datatype", "DataTypeName": "DatatypeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatatypeName" ], "PrimaryKey": [ "DataverseName", "DatatypeName" ], "Autogenerated": false, "GroupName": "MetadataGroup" }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013", "DatasetId": 3, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Dataverse", "DataTypeName": "DataverseRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName" ], "PrimaryKey": [ "DataverseName" ], "Autogenerated": false, "GroupName": "MetadataGroup" }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013", "DatasetId": 1, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Feed", "DataTypeName": "FeedRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "FeedName" ], "PrimaryKey": [ "DataverseName", "FeedName" ], "Autogenerated": false, "GroupName": "MetadataGroup" }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013", "DatasetId": 10, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "FeedActivity", "DataTypeName": "FeedActivityRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "FeedName", "DatasetName", "ActivityId" ], "PrimaryKey": [ "DataverseName", "FeedName", "DatasetName", "ActivityId" ], "Autogenerated": false, "GroupName": "MetadataGroup" }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013", "DatasetId": 11, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "FeedPolicy", "DataTypeName": "FeedPolicyRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "PolicyName" ], "PrimaryKey": [ "DataverseName", "PolicyName" ], "Autogenerated": false, "GroupName": "MetadataGroup" }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013", "DatasetId": 12, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Function", "DataTypeName": "FunctionRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name", "Arity" ], "PrimaryKey": [ "DataverseName", "Name", "Arity" ], "Autogenerated": false, "GroupName": "MetadataGroup" }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013", "DatasetId": 7, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Index", "DataTypeName": "IndexRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName", "IndexName" ], "PrimaryKey": [ "DataverseName", "DatasetName", "IndexName" ], "Autogenerated": false, "GroupName": "MetadataGroup" }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013", "DatasetId": 4, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Library", "DataTypeName": "LibraryRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name" ], "PrimaryKey": [ "DataverseName", "Name" ], "Autogenerated": false, "GroupName": "MetadataGroup" }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013", "DatasetId": 9, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Node", "DataTypeName": "NodeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "NodeName" ], "PrimaryKey": [ "NodeName" ], "Autogenerated": false, "GroupName": "MetadataGroup" }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013", "DatasetId": 5, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Nodegroup", "DataTypeName": "NodeGroupRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "GroupName" ], "PrimaryKey": [ "GroupName" ], "Autogenerated": false, "GroupName": "MetadataGroup" }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013", "DatasetId": 6, "PendingOp": 0 }
diff --git a/asterix-app/src/test/resources/metadata/results/basic/metadata_dataset/metadata_dataset.1.adm b/asterix-app/src/test/resources/metadata/results/basic/metadata_dataset/metadata_dataset.1.adm
index edf8f9d..f33ce9d 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/metadata_dataset/metadata_dataset.1.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/metadata_dataset/metadata_dataset.1.adm
@@ -1,9 +1,13 @@
-{ "DataverseName": "Metadata", "DatasetName": "CompactionPolicy", "DataTypeName": "CompactionPolicyRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "CompactionPolicy" ], "PrimaryKey": [ "DataverseName", "CompactionPolicy" ], "GroupName": "MetadataGroup", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Mon Sep 23 00:06:07 PDT 2013", "DatasetId": 9, "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Dataset", "DataTypeName": "DatasetRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName" ], "PrimaryKey": [ "DataverseName", "DatasetName" ], "GroupName": "MetadataGroup", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Mon Sep 23 00:06:07 PDT 2013", "DatasetId": 2, "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "DatasourceAdapter", "DataTypeName": "DatasourceAdapterRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name" ], "PrimaryKey": [ "DataverseName", "Name" ], "GroupName": "MetadataGroup", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Mon Sep 23 00:06:07 PDT 2013", "DatasetId": 8, "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Datatype", "DataTypeName": "DatatypeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatatypeName" ], "PrimaryKey": [ "DataverseName", "DatatypeName" ], "GroupName": "MetadataGroup", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Mon Sep 23 00:06:07 PDT 2013", "DatasetId": 3, "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Dataverse", "DataTypeName": "DataverseRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName" ], "PrimaryKey": [ "DataverseName" ], "GroupName": "MetadataGroup", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Mon Sep 23 00:06:07 PDT 2013", "DatasetId": 1, "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Function", "DataTypeName": "FunctionRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name", "Arity" ], "PrimaryKey": [ "DataverseName", "Name", "Arity" ], "GroupName": "MetadataGroup", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Mon Sep 23 00:06:07 PDT 2013", "DatasetId": 7, "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Index", "DataTypeName": "IndexRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName", "IndexName" ], "PrimaryKey": [ "DataverseName", "DatasetName", "IndexName" ], "GroupName": "MetadataGroup", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Mon Sep 23 00:06:07 PDT 2013", "DatasetId": 4, "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Node", "DataTypeName": "NodeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "NodeName" ], "PrimaryKey": [ "NodeName" ], "GroupName": "MetadataGroup", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Mon Sep 23 00:06:07 PDT 2013", "DatasetId": 5, "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Nodegroup", "DataTypeName": "NodeGroupRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "GroupName" ], "PrimaryKey": [ "GroupName" ], "GroupName": "MetadataGroup", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Mon Sep 23 00:06:07 PDT 2013", "DatasetId": 6, "PendingOp": 0 }
\ No newline at end of file
+{ "DataverseName": "Metadata", "DatasetName": "CompactionPolicy", "DataTypeName": "CompactionPolicyRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "CompactionPolicy" ], "PrimaryKey": [ "DataverseName", "CompactionPolicy" ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013", "DatasetId": 13, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Dataset", "DataTypeName": "DatasetRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName" ], "PrimaryKey": [ "DataverseName", "DatasetName" ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013", "DatasetId": 2, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "DatasourceAdapter", "DataTypeName": "DatasourceAdapterRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name" ], "PrimaryKey": [ "DataverseName", "Name" ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013", "DatasetId": 8, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Datatype", "DataTypeName": "DatatypeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatatypeName" ], "PrimaryKey": [ "DataverseName", "DatatypeName" ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013", "DatasetId": 3, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Dataverse", "DataTypeName": "DataverseRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName" ], "PrimaryKey": [ "DataverseName" ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013", "DatasetId": 1, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Feed", "DataTypeName": "FeedRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "FeedName" ], "PrimaryKey": [ "DataverseName", "FeedName" ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013", "DatasetId": 10, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "FeedActivity", "DataTypeName": "FeedActivityRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "FeedName", "DatasetName", "ActivityId" ], "PrimaryKey": [ "DataverseName", "FeedName", "DatasetName", "ActivityId" ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013", "DatasetId": 11, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "FeedPolicy", "DataTypeName": "FeedPolicyRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "PolicyName" ], "PrimaryKey": [ "DataverseName", "PolicyName" ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013", "DatasetId": 12, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Function", "DataTypeName": "FunctionRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name", "Arity" ], "PrimaryKey": [ "DataverseName", "Name", "Arity" ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013", "DatasetId": 7, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Index", "DataTypeName": "IndexRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName", "IndexName" ], "PrimaryKey": [ "DataverseName", "DatasetName", "IndexName" ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013", "DatasetId": 4, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Library", "DataTypeName": "LibraryRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name" ], "PrimaryKey": [ "DataverseName", "Name" ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013", "DatasetId": 9, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Node", "DataTypeName": "NodeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "NodeName" ], "PrimaryKey": [ "NodeName" ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013", "DatasetId": 5, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Nodegroup", "DataTypeName": "NodeGroupRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "GroupName" ], "PrimaryKey": [ "GroupName" ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013", "DatasetId": 6, "PendingOp": 0 }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/results/basic/metadata_datatype.adm b/asterix-app/src/test/resources/metadata/results/basic/metadata_datatype.adm
index b351cfb..6dc3613 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/metadata_datatype.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/metadata_datatype.adm
@@ -1,56 +1,67 @@
-{ "DataverseName": "Metadata", "DatatypeName": "DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "DataTypeName", "FieldType": "string" }, { "FieldName": "DatasetType", "FieldType": "string" }, { "FieldName": "InternalDetails", "FieldType": "Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "ExternalDetails", "FieldType": "Field_ExternalDetails_in_DatasetRecordType" }, { "FieldName": "FeedDetails", "FieldType": "Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "DatasourceAdapterRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Classname", "FieldType": "string" }, { "FieldName": "Type", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatatypeName", "FieldType": "string" }, { "FieldName": "Derived", "FieldType": "Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "DataverseRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DataFormat", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType_ItemType" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FieldName", "FieldType": "string" }, { "FieldName": "FieldType", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_NodeNames_in_NodeGroupRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "string", "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_OrderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Params_in_FunctionRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_PartitioningKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_PartitioningKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_PrimaryKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_PrimaryKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_SearchKey_in_IndexRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_UnorderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "FunctionRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Arity", "FieldType": "string" }, { "FieldName": "Params", "FieldType": "Field_Params_in_FunctionRecordType" }, { "FieldName": "ReturnType", "FieldType": "string" }, { "FieldName": "Definition", "FieldType": "string" }, { "FieldName": "Language", "FieldType": "string" }, { "FieldName": "Kind", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "IndexRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "IndexName", "FieldType": "string" }, { "FieldName": "IndexStructure", "FieldType": "string" }, { "FieldName": "SearchKey", "FieldType": "Field_SearchKey_in_IndexRecordType" }, { "FieldName": "IsPrimary", "FieldType": "boolean" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "NodeGroupRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "GroupName", "FieldType": "string" }, { "FieldName": "NodeNames", "FieldType": "Field_NodeNames_in_NodeGroupRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "NodeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "NodeName", "FieldType": "string" }, { "FieldName": "NumberOfCores", "FieldType": "int32" }, { "FieldName": "WorkingMemorySize", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Tag", "FieldType": "string" }, { "FieldName": "IsAnonymous", "FieldType": "boolean" }, { "FieldName": "EnumValues", "FieldType": "Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Record", "FieldType": "Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Union", "FieldType": "Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "UnorderedList", "FieldType": "Field_UnorderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "OrderedList", "FieldType": "Field_OrderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DatasourceAdapter", "FieldType": "string" }, { "FieldName": "Properties", "FieldType": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FileStructure", "FieldType": "string" }, { "FieldName": "PartitioningStrategy", "FieldType": "string" }, { "FieldName": "PartitioningKey", "FieldType": "Field_PartitioningKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "PrimaryKey", "FieldType": "Field_PrimaryKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "GroupName", "FieldType": "string" }, { "FieldName": "DatasourceAdapter", "FieldType": "string" }, { "FieldName": "Properties", "FieldType": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "Function", "FieldType": "string" }, { "FieldName": "Status", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FileStructure", "FieldType": "string" }, { "FieldName": "PartitioningStrategy", "FieldType": "string" }, { "FieldName": "PartitioningKey", "FieldType": "Field_PartitioningKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "PrimaryKey", "FieldType": "Field_PrimaryKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "GroupName", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "IsOpen", "FieldType": "boolean" }, { "FieldName": "Fields", "FieldType": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "boolean", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "circle", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "date", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "datetime", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "double", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "duration", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "float", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "int16", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "int32", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "int64", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "int8", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "line", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "null", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "point", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "point3d", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "polygon", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "rectangle", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "string", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "time", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "DataTypeName", "FieldType": "string" }, { "FieldName": "DatasetType", "FieldType": "string" }, { "FieldName": "InternalDetails", "FieldType": "Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "ExternalDetails", "FieldType": "Field_ExternalDetails_in_DatasetRecordType" }, { "FieldName": "Hints", "FieldType": "Field_Hints_in_DatasetRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" }, { "FieldName": "DatasetId", "FieldType": "int32" }, { "FieldName": "PendingOp", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "DatasourceAdapterRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Classname", "FieldType": "string" }, { "FieldName": "Type", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatatypeName", "FieldType": "string" }, { "FieldName": "Derived", "FieldType": "Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "DataverseRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DataFormat", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" }, { "FieldName": "PendingOp", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "FeedActivityRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "FeedName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "ActivityId", "FieldType": "int32" }, { "FieldName": "ActivityType", "FieldType": "string" }, { "FieldName": "Details", "FieldType": "Field_Details_in_FeedActivityRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "FeedPolicyRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "PolicyName", "FieldType": "string" }, { "FieldName": "Description", "FieldType": "string" }, { "FieldName": "Properties", "FieldType": "Field_Properties_in_FeedPolicyRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "FeedRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "FeedName", "FieldType": "string" }, { "FieldName": "AdaptorName", "FieldType": "string" }, { "FieldName": "AdaptorConfiguration", "FieldType": "Field_AdaptorConfiguration_in_FeedRecordType" }, { "FieldName": "Function", "FieldType": "Field_Function_in_FeedRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_AdaptorConfiguration_in_FeedRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "Field_AdaptorConfiguration_in_FeedRecordType_ItemType", "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_AdaptorConfiguration_in_FeedRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Details_in_FeedActivityRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "Field_Details_in_FeedActivityRecordType_ItemType", "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Details_in_FeedActivityRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType_ItemType" }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FieldName", "FieldType": "string" }, { "FieldName": "FieldType", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Function_in_FeedRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Hints_in_DatasetRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "Field_Hints_in_DatasetRecordType_ItemType", "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Hints_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_NodeNames_in_NodeGroupRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "string", "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_OrderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Params_in_FunctionRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_PartitioningKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_PrimaryKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_FeedPolicyRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "Field_Properties_in_FeedPolicyRecordType_ItemType", "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_FeedPolicyRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_SearchKey_in_IndexRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_UnorderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "FunctionRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Arity", "FieldType": "string" }, { "FieldName": "Params", "FieldType": "Field_Params_in_FunctionRecordType" }, { "FieldName": "ReturnType", "FieldType": "string" }, { "FieldName": "Definition", "FieldType": "string" }, { "FieldName": "Language", "FieldType": "string" }, { "FieldName": "Kind", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "IndexRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "IndexName", "FieldType": "string" }, { "FieldName": "IndexStructure", "FieldType": "string" }, { "FieldName": "SearchKey", "FieldType": "Field_SearchKey_in_IndexRecordType" }, { "FieldName": "IsPrimary", "FieldType": "boolean" }, { "FieldName": "Timestamp", "FieldType": "string" }, { "FieldName": "PendingOp", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "LibraryRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "NodeGroupRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "GroupName", "FieldType": "string" }, { "FieldName": "NodeNames", "FieldType": "Field_NodeNames_in_NodeGroupRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "NodeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "NodeName", "FieldType": "string" }, { "FieldName": "NumberOfCores", "FieldType": "int32" }, { "FieldName": "WorkingMemorySize", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Tag", "FieldType": "string" }, { "FieldName": "IsAnonymous", "FieldType": "boolean" }, { "FieldName": "EnumValues", "FieldType": "Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Record", "FieldType": "Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Union", "FieldType": "Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "UnorderedList", "FieldType": "Field_UnorderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "OrderedList", "FieldType": "Field_OrderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DatasourceAdapter", "FieldType": "string" }, { "FieldName": "Properties", "FieldType": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FileStructure", "FieldType": "string" }, { "FieldName": "PartitioningStrategy", "FieldType": "string" }, { "FieldName": "PartitioningKey", "FieldType": "Field_PartitioningKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "PrimaryKey", "FieldType": "Field_PrimaryKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "Autogenerated", "FieldType": "boolean" }, { "FieldName": "GroupName", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "IsOpen", "FieldType": "boolean" }, { "FieldName": "Fields", "FieldType": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "boolean", "Derived": null, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "circle", "Derived": null, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "date", "Derived": null, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "datetime", "Derived": null, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "day-time-duration", "Derived": null, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "double", "Derived": null, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "duration", "Derived": null, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "float", "Derived": null, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "int16", "Derived": null, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "int32", "Derived": null, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "int64", "Derived": null, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "int8", "Derived": null, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "interval", "Derived": null, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "line", "Derived": null, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "null", "Derived": null, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "point", "Derived": null, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "point3d", "Derived": null, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "polygon", "Derived": null, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "rectangle", "Derived": null, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "string", "Derived": null, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "time", "Derived": null, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "uuid", "Derived": null, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "year-month-duration", "Derived": null, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/results/basic/metadata_datatype/metadata_datatype.1.adm b/asterix-app/src/test/resources/metadata/results/basic/metadata_datatype/metadata_datatype.1.adm
index 4c22058..4081076 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/metadata_datatype/metadata_datatype.1.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/metadata_datatype/metadata_datatype.1.adm
@@ -1,67 +1,70 @@
-{ "DataverseName": "Metadata", "DatatypeName": "CompactionPolicyRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "PolicyName", "FieldType": "string" }, { "FieldName": "Classname", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "DataTypeName", "FieldType": "string" }, { "FieldName": "DatasetType", "FieldType": "string" }, { "FieldName": "InternalDetails", "FieldType": "Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "ExternalDetails", "FieldType": "Field_ExternalDetails_in_DatasetRecordType" }, { "FieldName": "FeedDetails", "FieldType": "Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "Hints", "FieldType": "Field_Hints_in_DatasetRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" }, { "FieldName": "DatasetId", "FieldType": "int32" }, { "FieldName": "PendingOp", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "DatasourceAdapterRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Classname", "FieldType": "string" }, { "FieldName": "Type", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatatypeName", "FieldType": "string" }, { "FieldName": "Derived", "FieldType": "Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "DataverseRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DataFormat", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" }, { "FieldName": "PendingOp", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_CompactionPolicyProperties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_CompactionPolicyProperties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_CompactionPolicyProperties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_CompactionPolicyProperties_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_CompactionPolicyProperties_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_CompactionPolicyProperties_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType_ItemType" }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FieldName", "FieldType": "string" }, { "FieldName": "FieldType", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Function_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Hints_in_DatasetRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "Field_Hints_in_DatasetRecordType_ItemType", "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Hints_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_NodeNames_in_NodeGroupRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "string", "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_OrderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Params_in_FunctionRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_PartitioningKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_PartitioningKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_PrimaryKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_PrimaryKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_SearchKey_in_IndexRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_UnorderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "FunctionRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Arity", "FieldType": "string" }, { "FieldName": "Params", "FieldType": "Field_Params_in_FunctionRecordType" }, { "FieldName": "ReturnType", "FieldType": "string" }, { "FieldName": "Definition", "FieldType": "string" }, { "FieldName": "Language", "FieldType": "string" }, { "FieldName": "Kind", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "IndexRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "IndexName", "FieldType": "string" }, { "FieldName": "IndexStructure", "FieldType": "string" }, { "FieldName": "SearchKey", "FieldType": "Field_SearchKey_in_IndexRecordType" }, { "FieldName": "IsPrimary", "FieldType": "boolean" }, { "FieldName": "Timestamp", "FieldType": "string" }, { "FieldName": "PendingOp", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "NodeGroupRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "GroupName", "FieldType": "string" }, { "FieldName": "NodeNames", "FieldType": "Field_NodeNames_in_NodeGroupRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "NodeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "NodeName", "FieldType": "string" }, { "FieldName": "NumberOfCores", "FieldType": "int32" }, { "FieldName": "WorkingMemorySize", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Tag", "FieldType": "string" }, { "FieldName": "IsAnonymous", "FieldType": "boolean" }, { "FieldName": "EnumValues", "FieldType": "Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Record", "FieldType": "Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Union", "FieldType": "Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "UnorderedList", "FieldType": "Field_UnorderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "OrderedList", "FieldType": "Field_OrderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DatasourceAdapter", "FieldType": "string" }, { "FieldName": "Properties", "FieldType": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FileStructure", "FieldType": "string" }, { "FieldName": "PartitioningStrategy", "FieldType": "string" }, { "FieldName": "PartitioningKey", "FieldType": "Field_PartitioningKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "PrimaryKey", "FieldType": "Field_PrimaryKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "GroupName", "FieldType": "string" }, { "FieldName": "DatasourceAdapter", "FieldType": "string" }, { "FieldName": "Properties", "FieldType": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "Function", "FieldType": "Field_Function_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "Status", "FieldType": "string" }, { "FieldName": "CompactionPolicy", "FieldType": "string" }, { "FieldName": "CompactionPolicyProperties", "FieldType": "Field_CompactionPolicyProperties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FileStructure", "FieldType": "string" }, { "FieldName": "PartitioningStrategy", "FieldType": "string" }, { "FieldName": "PartitioningKey", "FieldType": "Field_PartitioningKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "PrimaryKey", "FieldType": "Field_PrimaryKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "GroupName", "FieldType": "string" }, { "FieldName": "CompactionPolicy", "FieldType": "string" }, { "FieldName": "CompactionPolicyProperties", "FieldType": "Field_CompactionPolicyProperties_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "IsOpen", "FieldType": "boolean" }, { "FieldName": "Fields", "FieldType": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "boolean", "Derived": null, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "circle", "Derived": null, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "date", "Derived": null, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "datetime", "Derived": null, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "day-time-duration", "Derived": null, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "double", "Derived": null, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "duration", "Derived": null, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "float", "Derived": null, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "int16", "Derived": null, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "int32", "Derived": null, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "int64", "Derived": null, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "int8", "Derived": null, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "interval", "Derived": null, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "line", "Derived": null, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "null", "Derived": null, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "point", "Derived": null, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "point3d", "Derived": null, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "polygon", "Derived": null, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "rectangle", "Derived": null, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "string", "Derived": null, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "time", "Derived": null, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "year-month-duration", "Derived": null, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
\ No newline at end of file
+{ "DataverseName": "Metadata", "DatatypeName": "CompactionPolicyRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "CompactionPolicy", "FieldType": "string" }, { "FieldName": "Classname", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "DataTypeName", "FieldType": "string" }, { "FieldName": "DatasetType", "FieldType": "string" }, { "FieldName": "InternalDetails", "FieldType": "Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "ExternalDetails", "FieldType": "Field_ExternalDetails_in_DatasetRecordType" }, { "FieldName": "Hints", "FieldType": "Field_Hints_in_DatasetRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" }, { "FieldName": "DatasetId", "FieldType": "int32" }, { "FieldName": "PendingOp", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "DatasourceAdapterRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Classname", "FieldType": "string" }, { "FieldName": "Type", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatatypeName", "FieldType": "string" }, { "FieldName": "Derived", "FieldType": "Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "DataverseRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DataFormat", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" }, { "FieldName": "PendingOp", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "FeedActivityRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "FeedName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "ActivityId", "FieldType": "int32" }, { "FieldName": "ActivityType", "FieldType": "string" }, { "FieldName": "Details", "FieldType": "Field_Details_in_FeedActivityRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "FeedPolicyRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "PolicyName", "FieldType": "string" }, { "FieldName": "Description", "FieldType": "string" }, { "FieldName": "Properties", "FieldType": "Field_Properties_in_FeedPolicyRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "FeedRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "FeedName", "FieldType": "string" }, { "FieldName": "AdaptorName", "FieldType": "string" }, { "FieldName": "AdaptorConfiguration", "FieldType": "Field_AdaptorConfiguration_in_FeedRecordType" }, { "FieldName": "Function", "FieldType": "Field_Function_in_FeedRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_AdaptorConfiguration_in_FeedRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "Field_AdaptorConfiguration_in_FeedRecordType_ItemType", "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_AdaptorConfiguration_in_FeedRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_CompactionPolicyProperties_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_CompactionPolicyProperties_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_CompactionPolicyProperties_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Details_in_FeedActivityRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "Field_Details_in_FeedActivityRecordType_ItemType", "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Details_in_FeedActivityRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType_ItemType" }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FieldName", "FieldType": "string" }, { "FieldName": "FieldType", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Function_in_FeedRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Hints_in_DatasetRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "Field_Hints_in_DatasetRecordType_ItemType", "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Hints_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_NodeNames_in_NodeGroupRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "string", "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_OrderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Params_in_FunctionRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_PartitioningKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_PrimaryKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_FeedPolicyRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "Field_Properties_in_FeedPolicyRecordType_ItemType", "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_FeedPolicyRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_SearchKey_in_IndexRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_UnorderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "FunctionRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Arity", "FieldType": "string" }, { "FieldName": "Params", "FieldType": "Field_Params_in_FunctionRecordType" }, { "FieldName": "ReturnType", "FieldType": "string" }, { "FieldName": "Definition", "FieldType": "string" }, { "FieldName": "Language", "FieldType": "string" }, { "FieldName": "Kind", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "IndexRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "IndexName", "FieldType": "string" }, { "FieldName": "IndexStructure", "FieldType": "string" }, { "FieldName": "SearchKey", "FieldType": "Field_SearchKey_in_IndexRecordType" }, { "FieldName": "IsPrimary", "FieldType": "boolean" }, { "FieldName": "Timestamp", "FieldType": "string" }, { "FieldName": "PendingOp", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "LibraryRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "NodeGroupRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "GroupName", "FieldType": "string" }, { "FieldName": "NodeNames", "FieldType": "Field_NodeNames_in_NodeGroupRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "NodeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "NodeName", "FieldType": "string" }, { "FieldName": "NumberOfCores", "FieldType": "int32" }, { "FieldName": "WorkingMemorySize", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Tag", "FieldType": "string" }, { "FieldName": "IsAnonymous", "FieldType": "boolean" }, { "FieldName": "EnumValues", "FieldType": "Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Record", "FieldType": "Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Union", "FieldType": "Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "UnorderedList", "FieldType": "Field_UnorderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "OrderedList", "FieldType": "Field_OrderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DatasourceAdapter", "FieldType": "string" }, { "FieldName": "Properties", "FieldType": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FileStructure", "FieldType": "string" }, { "FieldName": "PartitioningStrategy", "FieldType": "string" }, { "FieldName": "PartitioningKey", "FieldType": "Field_PartitioningKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "PrimaryKey", "FieldType": "Field_PrimaryKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "GroupName", "FieldType": "string" }, { "FieldName": "Autogenerated", "FieldType": "boolean" }, { "FieldName": "CompactionPolicy", "FieldType": "string" }, { "FieldName": "CompactionPolicyProperties", "FieldType": "Field_CompactionPolicyProperties_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "IsOpen", "FieldType": "boolean" }, { "FieldName": "Fields", "FieldType": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "boolean", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "circle", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "date", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "datetime", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "day-time-duration", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "double", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "duration", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "float", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "int16", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "int32", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "int64", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "int8", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "interval", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "line", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "null", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "point", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "point3d", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "polygon", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "rectangle", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "string", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "time", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "uuid", "Derived": null, "Timestamp": "Thu Oct 24 01:40:50 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "year-month-duration", "Derived": null, "Timestamp": "Thu Sep 26 03:23:51 PDT 2013" }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/results/basic/metadata_index.adm b/asterix-app/src/test/resources/metadata/results/basic/metadata_index.adm
index 607bfd1..a4ee9d3 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/metadata_index.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/metadata_index.adm
@@ -1,11 +1,15 @@
-{ "DataverseName": "Metadata", "DatasetName": "Dataset", "IndexName": "Dataset", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatasetName" ], "IsPrimary": true, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Dataset", "IndexName": "DatatypeName", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatatypeName", "DatasetName" ], "IsPrimary": false, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Dataset", "IndexName": "GroupName", "IndexStructure": "BTREE", "SearchKey": [ "GroupName", "DataverseName", "DatasetName" ], "IsPrimary": false, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "DatasourceAdapter", "IndexName": "DatasourceAdapter", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "Name" ], "IsPrimary": true, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Datatype", "IndexName": "Datatype", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatatypeName" ], "IsPrimary": true, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Datatype", "IndexName": "DatatypeName", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "NestedDatatypeName", "TopDatatypeName" ], "IsPrimary": false, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Dataverse", "IndexName": "Dataverse", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName" ], "IsPrimary": true, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Function", "IndexName": "Function", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "Name", "Arity" ], "IsPrimary": true, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Index", "IndexName": "Index", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatasetName", "IndexName" ], "IsPrimary": true, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Node", "IndexName": "Node", "IndexStructure": "BTREE", "SearchKey": [ "NodeName" ], "IsPrimary": true, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Nodegroup", "IndexName": "Nodegroup", "IndexStructure": "BTREE", "SearchKey": [ "GroupName" ], "IsPrimary": true, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
+{ "DataverseName": "Metadata", "DatasetName": "Dataset", "IndexName": "Dataset", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatasetName" ], "IsPrimary": true, "Timestamp": "Tue Jul 16 22:49:39 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Dataset", "IndexName": "DatatypeName", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatatypeName", "DatasetName" ], "IsPrimary": false, "Timestamp": "Tue Jul 16 22:49:39 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Dataset", "IndexName": "GroupName", "IndexStructure": "BTREE", "SearchKey": [ "GroupName", "DataverseName", "DatasetName" ], "IsPrimary": false, "Timestamp": "Tue Jul 16 22:49:39 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "DatasourceAdapter", "IndexName": "DatasourceAdapter", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "Name" ], "IsPrimary": true, "Timestamp": "Tue Jul 16 22:49:39 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Datatype", "IndexName": "Datatype", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatatypeName" ], "IsPrimary": true, "Timestamp": "Tue Jul 16 22:49:39 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Datatype", "IndexName": "DatatypeName", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "NestedDatatypeName", "TopDatatypeName" ], "IsPrimary": false, "Timestamp": "Tue Jul 16 22:49:39 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Dataverse", "IndexName": "Dataverse", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName" ], "IsPrimary": true, "Timestamp": "Tue Jul 16 22:49:39 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Feed", "IndexName": "Feed", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "FeedName" ], "IsPrimary": true, "Timestamp": "Tue Jul 16 22:49:39 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "FeedActivity", "IndexName": "FeedActivity", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "FeedName", "DatasetName", "ActivityId" ], "IsPrimary": true, "Timestamp": "Tue Jul 16 22:49:39 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "FeedPolicy", "IndexName": "FeedPolicy", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "PolicyName" ], "IsPrimary": true, "Timestamp": "Tue Jul 16 22:49:39 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Function", "IndexName": "Function", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "Name", "Arity" ], "IsPrimary": true, "Timestamp": "Tue Jul 16 22:49:39 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Index", "IndexName": "Index", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatasetName", "IndexName" ], "IsPrimary": true, "Timestamp": "Tue Jul 16 22:49:39 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Library", "IndexName": "Library", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "Name" ], "IsPrimary": true, "Timestamp": "Tue Jul 16 22:49:39 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Node", "IndexName": "Node", "IndexStructure": "BTREE", "SearchKey": [ "NodeName" ], "IsPrimary": true, "Timestamp": "Tue Jul 16 22:49:39 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Nodegroup", "IndexName": "Nodegroup", "IndexStructure": "BTREE", "SearchKey": [ "GroupName" ], "IsPrimary": true, "Timestamp": "Tue Jul 16 22:49:39 PDT 2013", "PendingOp": 0 }
diff --git a/asterix-app/src/test/resources/metadata/results/basic/metadata_index/metadata_index.1.adm b/asterix-app/src/test/resources/metadata/results/basic/metadata_index/metadata_index.1.adm
index 072bcde..a8dfc95 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/metadata_index/metadata_index.1.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/metadata_index/metadata_index.1.adm
@@ -1,12 +1,16 @@
-{ "DataverseName": "Metadata", "DatasetName": "CompactionPolicy", "IndexName": "CompactionPolicy", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "CompactionPolicy" ], "IsPrimary": true, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013", "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Dataset", "IndexName": "Dataset", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatasetName" ], "IsPrimary": true, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013", "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Dataset", "IndexName": "DatatypeName", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatatypeName", "DatasetName" ], "IsPrimary": false, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013", "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Dataset", "IndexName": "GroupName", "IndexStructure": "BTREE", "SearchKey": [ "GroupName", "DataverseName", "DatasetName" ], "IsPrimary": false, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013", "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "DatasourceAdapter", "IndexName": "DatasourceAdapter", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "Name" ], "IsPrimary": true, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013", "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Datatype", "IndexName": "Datatype", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatatypeName" ], "IsPrimary": true, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013", "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Datatype", "IndexName": "DatatypeName", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "NestedDatatypeName", "TopDatatypeName" ], "IsPrimary": false, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013", "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Dataverse", "IndexName": "Dataverse", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName" ], "IsPrimary": true, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013", "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Function", "IndexName": "Function", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "Name", "Arity" ], "IsPrimary": true, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013", "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Index", "IndexName": "Index", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatasetName", "IndexName" ], "IsPrimary": true, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013", "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Node", "IndexName": "Node", "IndexStructure": "BTREE", "SearchKey": [ "NodeName" ], "IsPrimary": true, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013", "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Nodegroup", "IndexName": "Nodegroup", "IndexStructure": "BTREE", "SearchKey": [ "GroupName" ], "IsPrimary": true, "Timestamp": "Mon Sep 23 00:25:26 PDT 2013", "PendingOp": 0 }
\ No newline at end of file
+{ "DataverseName": "Metadata", "DatasetName": "CompactionPolicy", "IndexName": "CompactionPolicy", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "CompactionPolicy" ], "IsPrimary": true, "Timestamp": "Thu Sep 26 02:39:06 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Dataset", "IndexName": "Dataset", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatasetName" ], "IsPrimary": true, "Timestamp": "Thu Sep 26 02:39:06 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Dataset", "IndexName": "DatatypeName", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatatypeName", "DatasetName" ], "IsPrimary": false, "Timestamp": "Thu Sep 26 02:39:06 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Dataset", "IndexName": "GroupName", "IndexStructure": "BTREE", "SearchKey": [ "GroupName", "DataverseName", "DatasetName" ], "IsPrimary": false, "Timestamp": "Thu Sep 26 02:39:06 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "DatasourceAdapter", "IndexName": "DatasourceAdapter", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "Name" ], "IsPrimary": true, "Timestamp": "Thu Sep 26 02:39:06 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Datatype", "IndexName": "Datatype", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatatypeName" ], "IsPrimary": true, "Timestamp": "Thu Sep 26 02:39:06 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Datatype", "IndexName": "DatatypeName", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "NestedDatatypeName", "TopDatatypeName" ], "IsPrimary": false, "Timestamp": "Thu Sep 26 02:39:06 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Dataverse", "IndexName": "Dataverse", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName" ], "IsPrimary": true, "Timestamp": "Thu Sep 26 02:39:06 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Feed", "IndexName": "Feed", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "FeedName" ], "IsPrimary": true, "Timestamp": "Thu Sep 26 02:39:06 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "FeedActivity", "IndexName": "FeedActivity", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "FeedName", "DatasetName", "ActivityId" ], "IsPrimary": true, "Timestamp": "Thu Sep 26 02:39:06 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "FeedPolicy", "IndexName": "FeedPolicy", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "PolicyName" ], "IsPrimary": true, "Timestamp": "Thu Sep 26 02:39:06 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Function", "IndexName": "Function", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "Name", "Arity" ], "IsPrimary": true, "Timestamp": "Thu Sep 26 02:39:06 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Index", "IndexName": "Index", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatasetName", "IndexName" ], "IsPrimary": true, "Timestamp": "Thu Sep 26 02:39:06 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Library", "IndexName": "Library", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "Name" ], "IsPrimary": true, "Timestamp": "Thu Sep 26 02:39:06 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Node", "IndexName": "Node", "IndexStructure": "BTREE", "SearchKey": [ "NodeName" ], "IsPrimary": true, "Timestamp": "Thu Sep 26 02:39:06 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Nodegroup", "IndexName": "Nodegroup", "IndexStructure": "BTREE", "SearchKey": [ "GroupName" ], "IsPrimary": true, "Timestamp": "Thu Sep 26 02:39:06 PDT 2013", "PendingOp": 0 }
diff --git a/asterix-app/src/test/resources/metadata/testsuite.xml b/asterix-app/src/test/resources/metadata/testsuite.xml
index a668801..9a69eff 100644
--- a/asterix-app/src/test/resources/metadata/testsuite.xml
+++ b/asterix-app/src/test/resources/metadata/testsuite.xml
@@ -120,6 +120,11 @@
</compilation-unit>
</test-case>
<test-case FilePath="basic">
+ <compilation-unit name="metadata_compaction_policy">
+ <output-dir compare="Text">metadata_compaction_policy</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="basic">
<compilation-unit name="metadata_dataset">
<output-dir compare="Text">metadata_dataset</output-dir>
</compilation-unit>
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/all-scan.aql b/asterix-app/src/test/resources/nontagged/custord/local/all-scan.aql
index 1bd4b73..cd51b5e 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/all-scan.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/all-scan.aql
@@ -52,4 +52,4 @@
let $c3 := int32("320")
let $c4 := int64("640")
return {"int8": $c1,"int16": $c2,"int32": $c3, "int8co": $o.int8co, "int64": $c4}
-*/
\ No newline at end of file
+*/
diff --git a/asterix-app/src/test/resources/optimizerts/queries/rtree-secondary-index-open.aql b/asterix-app/src/test/resources/optimizerts/queries/rtree-secondary-index-open.aql
index b4a5796..14d31f0 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/rtree-secondary-index-open.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/rtree-secondary-index-open.aql
@@ -29,5 +29,5 @@
write output to nc1:"rttest/index_rtree-secondary-index-open.adm";
for $o in dataset('MyData')
-where spatial-intersect($o.point, create-polygon(create-point(4.0,1.0), create-point(4.0,4.0), create-point(12.0,4.0), create-point(12.0,1.0)))
+where spatial-intersect($o.point, create-polygon([4.0,1.0,4.0,4.0,12.0,4.0,12.0,1.0]))
return {"id":$o.id}
diff --git a/asterix-app/src/test/resources/optimizerts/queries/rtree-secondary-index.aql b/asterix-app/src/test/resources/optimizerts/queries/rtree-secondary-index.aql
index 41a5a91..be95f18 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/rtree-secondary-index.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/rtree-secondary-index.aql
@@ -29,5 +29,5 @@
write output to nc1:"rttest/index_rtree-secondary-index.adm";
for $o in dataset('MyData')
-where spatial-intersect($o.point, create-polygon(create-point(4.0,1.0), create-point(4.0,4.0), create-point(12.0,4.0), create-point(12.0,1.0)))
+where spatial-intersect($o.point, create-polygon([4.0,1.0,4.0,4.0,12.0,4.0,12.0,1.0]))
return {"id":$o.id}
diff --git a/asterix-app/src/test/resources/optimizerts/queries/skip-index/dont-skip-primary-index-search-in-delete.aql b/asterix-app/src/test/resources/optimizerts/queries/skip-index/dont-skip-primary-index-search-in-delete.aql
new file mode 100644
index 0000000..1c6f069
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/queries/skip-index/dont-skip-primary-index-search-in-delete.aql
@@ -0,0 +1,31 @@
+/*
+ * Description : The hint to skip secondary indexes don't apply to the primary index
+ * : This test is intended to verify that the primary index is still used instead of a scan
+ * Expected Result : Success
+ * Date : 21th December 2013
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
+use dataverse test;
+
+create type AddressType as closed {
+ number: int32,
+ street: string,
+ city: string
+}
+
+create type CustomerType as closed {
+ cid: int32,
+ name: string,
+ age: int32?,
+ address: AddressType?,
+ interests: {{string}},
+ children: [ { name: string, age: int32? } ]
+}
+
+create dataset Customers(CustomerType) primary key cid;
+
+delete $c from dataset Customers where $c.cid /*+ skip-index */ < 10;
+
diff --git a/asterix-app/src/test/resources/optimizerts/queries/skip-index/dont-skip-primary-index.aql b/asterix-app/src/test/resources/optimizerts/queries/skip-index/dont-skip-primary-index.aql
new file mode 100644
index 0000000..77f9e8e
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/queries/skip-index/dont-skip-primary-index.aql
@@ -0,0 +1,23 @@
+/*
+ * Description : The hint to skip secondary indexes don't apply to the primary index
+ * : This test is intended to verify that the primary index is still used instead of a scan
+ * Expected Result : Success
+ * Date : 21th December 2013
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+write output to nc1:"rttest/btree-index_btree-primary-16.adm";
+
+create type TestType as open {
+ fname : string,
+ lname : string
+}
+
+create dataset testdst(TestType) primary key fname,lname;
+
+for $emp in dataset('testdst')
+where $emp.fname /*+ skip-index */ >= "Craig" and $emp.lname /*+ skip-index */ >= "Kevin" and $emp.fname /*+ skip-index */ <= "Mary" and $emp.lname /*+ skip-index */ <= "Tomes"
+return $emp
diff --git a/asterix-app/src/test/resources/optimizerts/queries/skip-index/skip-ngram-index-search-in-delete.aql b/asterix-app/src/test/resources/optimizerts/queries/skip-index/skip-ngram-index-search-in-delete.aql
new file mode 100644
index 0000000..b8fc5ab
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/queries/skip-index/skip-ngram-index-search-in-delete.aql
@@ -0,0 +1,24 @@
+/*
+ * Description : Notice the query hint to avoid using any secondary index to evaluate the predicate in the where clause
+ * Expected Res : Success
+ * Date : 21th December 2013
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
+use dataverse test;
+
+create type DBLPType as closed {
+ id: int32,
+ dblpid: string,
+ title: string,
+ authors: string,
+ misc: string
+}
+
+create dataset DBLP(DBLPType) primary key id;
+
+create index ngram_index on DBLP(title) type ngram(3);
+
+delete $o from dataset DBLP where /*+ skip-index */ contains($o.title, "Multimedia")
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/optimizerts/queries/skip-index/skip-ngram-index.aql b/asterix-app/src/test/resources/optimizerts/queries/skip-index/skip-ngram-index.aql
new file mode 100644
index 0000000..1ae5fe9
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/queries/skip-index/skip-ngram-index.aql
@@ -0,0 +1,28 @@
+/*
+ * Description : Notice the query hint to avoid using any secondary index to evaluate the predicate in the where clause
+ * Expected Res : Success
+ * Date : 21th December 2013
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type DBLPType as closed {
+ id: int32,
+ dblpid: string,
+ title: string,
+ authors: string,
+ misc: string
+}
+
+create dataset DBLP(DBLPType) primary key id;
+
+create index ngram_index on DBLP(title) type ngram(3);
+
+write output to nc1:"rttest/inverted-index-basic_ngram-contains.adm";
+
+for $o in dataset('DBLP')
+where /*+ skip-index */ contains($o.title, "Multimedia")
+order by $o.id
+return $o
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/optimizerts/queries/skip-index/skip-rtree-index-search-in-delete.aql b/asterix-app/src/test/resources/optimizerts/queries/skip-index/skip-rtree-index-search-in-delete.aql
new file mode 100644
index 0000000..3403ef1
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/queries/skip-index/skip-rtree-index-search-in-delete.aql
@@ -0,0 +1,29 @@
+/*
+ * Description : Notice the query hint to avoid using any secondary index to evaluate the predicate in the where clause
+ * Expected Res : Success
+ * Date : 21th December 2013
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
+use dataverse test;
+
+create type MyRecord as closed {
+ id: int32,
+ point: point,
+ kwds: string,
+ line1: line,
+ line2: line,
+ poly1: polygon,
+ poly2: polygon,
+ rec: rectangle,
+ circle: circle
+}
+
+create dataset MyData(MyRecord)
+ primary key id;
+
+create index rtree_index_point on MyData(point) type rtree;
+
+delete $m from dataset MyData where /*+ skip-index */ spatial-intersect($m.point, create-polygon([0.0,1.0,0.0,4.0,12.0,4.0,12.0,1.0]));
diff --git a/asterix-app/src/test/resources/optimizerts/queries/skip-index/skip-rtree-secondary-index.aql b/asterix-app/src/test/resources/optimizerts/queries/skip-index/skip-rtree-secondary-index.aql
new file mode 100644
index 0000000..52fb83d
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/queries/skip-index/skip-rtree-secondary-index.aql
@@ -0,0 +1,39 @@
+/*
+ * Description : Notice the query hint to avoid using any secondary index to evaluate the predicate in the where clause
+ * Expected Res : Success
+ * Date : 21th December 2013
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type MyRecord as closed {
+ id: int32,
+ point: point,
+ kwds: string,
+ line1: line,
+ line2: line,
+ poly1: polygon,
+ poly2: polygon,
+ rec: rectangle,
+ circle: circle
+}
+
+create nodegroup group1 if not exists on nc1, nc2;
+
+create dataset MyData(MyRecord)
+ primary key id on group1;
+
+load dataset MyData
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/spatial/spatialData.json"),("format"="adm")) pre-sorted;
+
+create index rtree_index_point on MyData(point) type rtree;
+
+
+write output to nc1:"rttest/index_rtree-secondary-index.adm";
+
+for $o in dataset('MyData')
+where /*+ skip-index */ spatial-intersect($o.point, create-polygon([4.0,1.0,4.0,4.0,12.0,4.0,12.0,1.0]))
+return {"id":$o.id}
diff --git a/asterix-app/src/test/resources/optimizerts/queries/skip-index/skip-secondary-btree-index-2.aql b/asterix-app/src/test/resources/optimizerts/queries/skip-index/skip-secondary-btree-index-2.aql
new file mode 100644
index 0000000..05156f6
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/queries/skip-index/skip-secondary-btree-index-2.aql
@@ -0,0 +1,25 @@
+/*
+ * Description : Notice the query hint to avoid using any secondary index to evaluate the predicate in the where clause
+ * Expected Res : Success
+ * Date : 21th December 2013
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+write output to nc1:"rttest/btree-index_btree-secondary-57.adm";
+
+create type TestType as open {
+ id : int32,
+ fname : string,
+ lname : string
+}
+
+create dataset testdst(TestType) primary key id;
+
+create index sec_Idx on testdst(fname);
+
+for $emp in dataset('testdst')
+where $emp.fname /*+ skip-index */ >= "Max" and $emp.fname <= "Roger"
+return $emp
diff --git a/asterix-app/src/test/resources/optimizerts/queries/skip-index/skip-secondary-btree-index-search-in-delete.aql b/asterix-app/src/test/resources/optimizerts/queries/skip-index/skip-secondary-btree-index-search-in-delete.aql
new file mode 100644
index 0000000..a0096ff
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/queries/skip-index/skip-secondary-btree-index-search-in-delete.aql
@@ -0,0 +1,32 @@
+/*
+ * Description : Notice the query hint to avoid using any secondary index to evaluate the predicate in the where clause
+ * Expected Res : Success
+ * Date : 21th December 2013
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
+use dataverse test;
+
+create type AddressType as closed {
+ number: int32,
+ street: string,
+ city: string
+}
+
+create type CustomerType as closed {
+ cid: int32,
+ name: string,
+ age: int32,
+ address: AddressType?,
+ interests: {{string}},
+ children: [ { name: string, age: int32? } ]
+}
+
+create dataset Customers(CustomerType) primary key cid;
+
+create index age_index on Customers(age);
+
+delete $c from dataset Customers where $c.age /*+ skip-index */ < 20;
+
diff --git a/asterix-app/src/test/resources/optimizerts/queries/skip-index/skip-secondary-btree-index.aql b/asterix-app/src/test/resources/optimizerts/queries/skip-index/skip-secondary-btree-index.aql
new file mode 100644
index 0000000..abf77c6
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/queries/skip-index/skip-secondary-btree-index.aql
@@ -0,0 +1,25 @@
+/*
+ * Description : Notice the query hint to avoid using any secondary index to evaluate the predicate in the where clause
+ * Expected Res : Success
+ * Date : 21th December 2013
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+write output to nc1:"rttest/btree-index_btree-secondary-57.adm";
+
+create type TestType as open {
+ id : int32,
+ fname : string,
+ lname : string
+}
+
+create dataset testdst(TestType) primary key id;
+
+create index sec_Idx on testdst(fname);
+
+for $emp in dataset('testdst')
+where $emp.fname /*+ skip-index */ >= "Max" and $emp.fname /*+ skip-index */ <= "Roger"
+return $emp
diff --git a/asterix-app/src/test/resources/optimizerts/queries/skip-index/skip-word-index-search-in-delete.aql b/asterix-app/src/test/resources/optimizerts/queries/skip-index/skip-word-index-search-in-delete.aql
new file mode 100644
index 0000000..53010a0
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/queries/skip-index/skip-word-index-search-in-delete.aql
@@ -0,0 +1,25 @@
+/*
+ * Description : Notice the query hint to avoid using any secondary index to evaluate the predicate in the where clause
+ * Expected Res : Success
+ * Date : 21th December 2013
+ */
+
+
+drop dataverse test if exists;
+create dataverse test;
+
+use dataverse test;
+
+create type DBLPType as closed {
+ id: int32,
+ dblpid: string,
+ title: string,
+ authors: string,
+ misc: string
+}
+
+create dataset DBLP(DBLPType) primary key id;
+
+create index keyword_index on DBLP(title) type keyword;
+
+delete $o from dataset DBLP where /*+ skip-index */ similarity-jaccard(word-tokens($o.title), word-tokens("Transactions for Cooperative Environments")) >= 0.5f
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/optimizerts/queries/skip-index/skip-word-index.aql b/asterix-app/src/test/resources/optimizerts/queries/skip-index/skip-word-index.aql
new file mode 100644
index 0000000..a9cdfbb
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/queries/skip-index/skip-word-index.aql
@@ -0,0 +1,29 @@
+/*
+ * Description : Notice the query hint to avoid using any secondary index to evaluate the predicate in the where clause
+ * Expected Res : Success
+ * Date : 21th December 2013
+ */
+
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type DBLPType as closed {
+ id: int32,
+ dblpid: string,
+ title: string,
+ authors: string,
+ misc: string
+}
+
+create dataset DBLP(DBLPType) primary key id;
+
+create index keyword_index on DBLP(title) type keyword;
+
+write output to nc1:"rttest/inverted-index-basic_word-jaccard.adm";
+
+for $o in dataset('DBLP')
+let $jacc := /*+ skip-index */ similarity-jaccard(word-tokens($o.title), word-tokens("Transactions for Cooperative Environments"))
+where $jacc >= 0.5f
+return $o
diff --git a/asterix-app/src/test/resources/optimizerts/results/push_limit.plan b/asterix-app/src/test/resources/optimizerts/results/push_limit.plan
index c4860d3..fcd5b75 100644
--- a/asterix-app/src/test/resources/optimizerts/results/push_limit.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/push_limit.plan
@@ -5,11 +5,11 @@
-- STREAM_LIMIT |UNPARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- SORT_MERGE_EXCHANGE [$$9(ASC) ] |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- STREAM_LIMIT |PARTITIONED|
+ -- STREAM_LIMIT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/optimizerts/results/rtree-secondary-index-open.plan b/asterix-app/src/test/resources/optimizerts/results/rtree-secondary-index-open.plan
index 1845ed7..993dd23 100644
--- a/asterix-app/src/test/resources/optimizerts/results/rtree-secondary-index-open.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/rtree-secondary-index-open.plan
@@ -8,11 +8,11 @@
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STABLE_SORT [$$22(ASC)] |PARTITIONED|
+ -- STABLE_SORT [$$19(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- RTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/optimizerts/results/rtree-secondary-index.plan b/asterix-app/src/test/resources/optimizerts/results/rtree-secondary-index.plan
index 1845ed7..993dd23 100644
--- a/asterix-app/src/test/resources/optimizerts/results/rtree-secondary-index.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/rtree-secondary-index.plan
@@ -8,11 +8,11 @@
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STABLE_SORT [$$22(ASC)] |PARTITIONED|
+ -- STABLE_SORT [$$19(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- RTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/optimizerts/results/skip-index/dont-skip-primary-index-search-in-delete.plan b/asterix-app/src/test/resources/optimizerts/results/skip-index/dont-skip-primary-index-search-in-delete.plan
new file mode 100644
index 0000000..64f00d4
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/results/skip-index/dont-skip-primary-index-search-in-delete.plan
@@ -0,0 +1,16 @@
+-- COMMIT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INSERT_DELETE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- MATERIALIZE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$12] |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/skip-index/dont-skip-primary-index.plan b/asterix-app/src/test/resources/optimizerts/results/skip-index/dont-skip-primary-index.plan
new file mode 100644
index 0000000..6f13ebb
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/results/skip-index/dont-skip-primary-index.plan
@@ -0,0 +1,8 @@
+-- DISTRIBUTE_RESULT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/skip-index/skip-ngram-index-search-in-delete.plan b/asterix-app/src/test/resources/optimizerts/results/skip-index/skip-ngram-index-search-in-delete.plan
new file mode 100644
index 0000000..37c6587
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/results/skip-index/skip-ngram-index-search-in-delete.plan
@@ -0,0 +1,21 @@
+-- COMMIT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INDEX_INSERT_DELETE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INSERT_DELETE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- MATERIALIZE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$11] |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/skip-index/skip-ngram-index.plan b/asterix-app/src/test/resources/optimizerts/results/skip-index/skip-ngram-index.plan
new file mode 100644
index 0000000..fa19891
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/results/skip-index/skip-ngram-index.plan
@@ -0,0 +1,9 @@
+-- DISTRIBUTE_RESULT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- SORT_MERGE_EXCHANGE [$$5(ASC) ] |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/optimizerts/results/skip-index/skip-rtree-index-search-in-delete.plan b/asterix-app/src/test/resources/optimizerts/results/skip-index/skip-rtree-index-search-in-delete.plan
new file mode 100644
index 0000000..cf609cb
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/results/skip-index/skip-rtree-index-search-in-delete.plan
@@ -0,0 +1,23 @@
+-- COMMIT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INDEX_INSERT_DELETE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INSERT_DELETE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- MATERIALIZE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$17] |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/optimizerts/results/skip-index/skip-rtree-secondary-index.plan b/asterix-app/src/test/resources/optimizerts/results/skip-index/skip-rtree-secondary-index.plan
new file mode 100644
index 0000000..8e0e202
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/results/skip-index/skip-rtree-secondary-index.plan
@@ -0,0 +1,10 @@
+-- DISTRIBUTE_RESULT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/optimizerts/results/skip-index/skip-secondary-btree-index-2.plan b/asterix-app/src/test/resources/optimizerts/results/skip-index/skip-secondary-btree-index-2.plan
new file mode 100644
index 0000000..807e509
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/results/skip-index/skip-secondary-btree-index-2.plan
@@ -0,0 +1,17 @@
+-- DISTRIBUTE_RESULT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$11(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/skip-index/skip-secondary-btree-index-search-in-delete.plan b/asterix-app/src/test/resources/optimizerts/results/skip-index/skip-secondary-btree-index-search-in-delete.plan
new file mode 100644
index 0000000..67ec849
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/results/skip-index/skip-secondary-btree-index-search-in-delete.plan
@@ -0,0 +1,21 @@
+-- COMMIT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INDEX_INSERT_DELETE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INSERT_DELETE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- MATERIALIZE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$12] |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/skip-index/skip-secondary-btree-index.plan b/asterix-app/src/test/resources/optimizerts/results/skip-index/skip-secondary-btree-index.plan
new file mode 100644
index 0000000..3693813
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/results/skip-index/skip-secondary-btree-index.plan
@@ -0,0 +1,10 @@
+-- DISTRIBUTE_RESULT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/skip-index/skip-word-index-search-in-delete.plan b/asterix-app/src/test/resources/optimizerts/results/skip-index/skip-word-index-search-in-delete.plan
new file mode 100644
index 0000000..c3b9264
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/results/skip-index/skip-word-index-search-in-delete.plan
@@ -0,0 +1,21 @@
+-- COMMIT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INDEX_INSERT_DELETE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INSERT_DELETE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- MATERIALIZE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/skip-index/skip-word-index.plan b/asterix-app/src/test/resources/optimizerts/results/skip-index/skip-word-index.plan
new file mode 100644
index 0000000..ef7eae2
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/results/skip-index/skip-word-index.plan
@@ -0,0 +1,8 @@
+-- DISTRIBUTE_RESULT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443-2/query-issue443-2.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443-2/query-issue443-2.2.update.aql
deleted file mode 100644
index e69de29..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443-2/query-issue443-2.2.update.aql
+++ /dev/null
diff --git a/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443/query-issue443.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443/query-issue443.1.ddl.aql
deleted file mode 100644
index e69de29..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443/query-issue443.1.ddl.aql
+++ /dev/null
diff --git a/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443/query-issue443.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443/query-issue443.2.update.aql
deleted file mode 100644
index e69de29..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443/query-issue443.2.update.aql
+++ /dev/null
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/insert-record-function/insert-record-function.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/dml/insert-record-function/insert-record-function.1.ddl.aql
new file mode 100644
index 0000000..1883c94
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/insert-record-function/insert-record-function.1.ddl.aql
@@ -0,0 +1,24 @@
+/*
+ * Test case Name : insert-into-empty-dataset.aql
+ * Description : Check that we can insert into an empty dataset
+ * Expected Result : Success
+ * Date : May 2 2012
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
+use dataverse test;
+
+create type LineIDType as closed {
+ l_orderkey: int32,
+ l_linenumber: int32,
+ l_suppkey: int32
+}
+
+create dataset LineID(LineIDType)
+ primary key l_orderkey, l_linenumber;
+
+create dataset LineID2(LineIDType)
+ primary key l_orderkey, l_linenumber;
+
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/insert-record-function/insert-record-function.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/dml/insert-record-function/insert-record-function.2.update.aql
new file mode 100644
index 0000000..e155837
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/insert-record-function/insert-record-function.2.update.aql
@@ -0,0 +1,35 @@
+/*
+ * Test case Name : insert-into-empty-dataset.aql
+ * Description : Check that we can insert into an empty dataset
+ * Expected Result : Success
+ * Date : May 2 2012
+ */
+
+use dataverse test;
+
+insert into dataset LineID (
+let $x:=1
+let $y:=2
+let $z:=3
+return {
+ "l_orderkey": $x,
+ "l_linenumber": $y,
+ "l_suppkey": $z
+}
+);
+
+insert into dataset LineID (
+let $x:=2
+let $y:=3
+let $z:=4
+return {
+ "l_orderkey": $x,
+ "l_linenumber": $y,
+ "l_suppkey": $z
+}
+);
+
+insert into dataset LineID2 (
+ for $x in dataset LineID
+ return flow-record($x)
+);
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/insert-record-function/insert-record-function.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/dml/insert-record-function/insert-record-function.3.query.aql
new file mode 100644
index 0000000..e4763e5
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/insert-record-function/insert-record-function.3.query.aql
@@ -0,0 +1,12 @@
+/*
+ * Test case Name : insert-into-empty-dataset.aql
+ * Description : Check that we can insert into an empty dataset
+ * Expected Result : Success
+ * Date : May 2 2012
+ */
+
+use dataverse test;
+
+for $c in dataset('LineID2')
+order by $c.l_orderkey, $c.l_linenumber
+return $c
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/load-with-ngram-index/load-with-ngram-index.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/dml/load-with-ngram-index/load-with-ngram-index.1.ddl.aql
new file mode 100644
index 0000000..9dfefe0
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/load-with-ngram-index/load-with-ngram-index.1.ddl.aql
@@ -0,0 +1,18 @@
+drop dataverse test if exists;
+create dataverse test;
+
+use dataverse test;
+
+create type DBLPType as closed {
+ id: int32,
+ dblpid: string,
+ title: string,
+ authors: string,
+ misc: string
+}
+
+create dataset DBLP(DBLPType) primary key id;
+create dataset DBLP1(DBLPType) primary key id;
+
+create index ngram_index on DBLP(title) type ngram(3);
+create index ngram_index1 on DBLP1(title) type ngram(3);
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/load-with-ngram-index/load-with-ngram-index.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/dml/load-with-ngram-index/load-with-ngram-index.2.update.aql
new file mode 100644
index 0000000..b3f20ad
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/load-with-ngram-index/load-with-ngram-index.2.update.aql
@@ -0,0 +1,4 @@
+use dataverse test;
+
+load dataset DBLP using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/dblp-small/dblp-small-id.txt"),("format"="delimited-text"),("delimiter"=":")) pre-sorted;
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/load-with-ngram-index/load-with-ngram-index.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/dml/load-with-ngram-index/load-with-ngram-index.3.query.aql
new file mode 100644
index 0000000..866b045
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/load-with-ngram-index/load-with-ngram-index.3.query.aql
@@ -0,0 +1,6 @@
+use dataverse test;
+
+for $o in dataset('DBLP')
+where contains($o.title, "Multimedia")
+order by $o.id
+return $o
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/load-with-rtree-index/load-with-rtree-index.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/dml/load-with-rtree-index/load-with-rtree-index.1.ddl.aql
new file mode 100644
index 0000000..6a21ae8
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/load-with-rtree-index/load-with-rtree-index.1.ddl.aql
@@ -0,0 +1,22 @@
+drop dataverse test if exists;
+create dataverse test;
+
+use dataverse test;
+
+create type MyRecord as closed {
+ id: int32,
+ point: point,
+ kwds: string,
+ line1: line,
+ line2: line,
+ poly1: polygon,
+ poly2: polygon,
+ rec: rectangle,
+ circle: circle
+}
+
+create dataset MyData(MyRecord)
+ primary key id;
+
+create index rtree_index_point on MyData(point) type rtree;
+
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/load-with-rtree-index/load-with-rtree-index.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/dml/load-with-rtree-index/load-with-rtree-index.2.update.aql
new file mode 100644
index 0000000..2c4c9ed
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/load-with-rtree-index/load-with-rtree-index.2.update.aql
@@ -0,0 +1,6 @@
+use dataverse test;
+
+load dataset MyData
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/spatial/spatialData.json"),("format"="adm")) pre-sorted;
+
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/load-with-rtree-index/load-with-rtree-index.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/dml/load-with-rtree-index/load-with-rtree-index.3.query.aql
new file mode 100644
index 0000000..a8d43ff
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/load-with-rtree-index/load-with-rtree-index.3.query.aql
@@ -0,0 +1,6 @@
+use dataverse test;
+
+for $o in dataset('MyData')
+where spatial-intersect($o.point, create-polygon([0.0,1.0,0.0,4.0,12.0,4.0,12.0,1.0]))
+order by $o.id
+return {"id":$o.id}
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/load-with-word-index/load-with-word-index.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/dml/load-with-word-index/load-with-word-index.1.ddl.aql
new file mode 100644
index 0000000..db60934
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/load-with-word-index/load-with-word-index.1.ddl.aql
@@ -0,0 +1,18 @@
+drop dataverse test if exists;
+create dataverse test;
+
+use dataverse test;
+
+create type DBLPType as closed {
+ id: int32,
+ dblpid: string,
+ title: string,
+ authors: string,
+ misc: string
+}
+
+create dataset DBLP(DBLPType) primary key id;
+create dataset DBLP1(DBLPType) primary key id;
+
+create index keyword_index on DBLP(title) type keyword;
+create index keyword_index1 on DBLP1(title) type keyword;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/load-with-word-index/load-with-word-index.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/dml/load-with-word-index/load-with-word-index.2.update.aql
new file mode 100644
index 0000000..441e8a4
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/load-with-word-index/load-with-word-index.2.update.aql
@@ -0,0 +1,6 @@
+use dataverse test;
+
+load dataset DBLP using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/dblp-small/dblp-small-id.txt"),("format"="delimited-text"),("delimiter"=":")) pre-sorted;
+
+
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/load-with-word-index/load-with-word-index.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/dml/load-with-word-index/load-with-word-index.3.query.aql
new file mode 100644
index 0000000..92d3667
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/load-with-word-index/load-with-word-index.3.query.aql
@@ -0,0 +1,6 @@
+use dataverse test;
+
+for $o in dataset('DBLP')
+let $jacc := similarity-jaccard-check(word-tokens($o.title), word-tokens("Transactions for Cooperative Environments"), 0.5f)
+where $jacc[0]
+return $o
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/scan-delete-rtree-secondary-index-nullable/scan-delete-rtree-secondary-index-nullable.5.query.aql b/asterix-app/src/test/resources/runtimets/queries/dml/scan-delete-rtree-secondary-index-nullable/scan-delete-rtree-secondary-index-nullable.5.query.aql
index e453b2a..87fc60d 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/scan-delete-rtree-secondary-index-nullable/scan-delete-rtree-secondary-index-nullable.5.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/scan-delete-rtree-secondary-index-nullable/scan-delete-rtree-secondary-index-nullable.5.query.aql
@@ -8,6 +8,6 @@
use dataverse test;
for $o in dataset('MyData')
-where spatial-intersect($o.point, create-polygon(create-point(0.0,1.0), create-point(0.0,4.0), create-point(12.0,4.0), create-point(12.0,1.0)))
+where spatial-intersect($o.point, create-polygon([0.0,1.0,0.0,4.0,12.0,4.0,12.0,1.0]))
order by $o.id
return {"id":$o.id}
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/scan-delete-rtree-secondary-index/scan-delete-rtree-secondary-index.5.query.aql b/asterix-app/src/test/resources/runtimets/queries/dml/scan-delete-rtree-secondary-index/scan-delete-rtree-secondary-index.5.query.aql
index 5b8dbf7..a8d43ff 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/scan-delete-rtree-secondary-index/scan-delete-rtree-secondary-index.5.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/scan-delete-rtree-secondary-index/scan-delete-rtree-secondary-index.5.query.aql
@@ -1,6 +1,6 @@
use dataverse test;
for $o in dataset('MyData')
-where spatial-intersect($o.point, create-polygon(create-point(0.0,1.0), create-point(0.0,4.0), create-point(12.0,4.0), create-point(12.0,1.0)))
+where spatial-intersect($o.point, create-polygon([0.0,1.0,0.0,4.0,12.0,4.0,12.0,1.0]))
order by $o.id
return {"id":$o.id}
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/scan-insert-rtree-secondary-index-nullable/scan-insert-rtree-secondary-index-nullable.5.query.aql b/asterix-app/src/test/resources/runtimets/queries/dml/scan-insert-rtree-secondary-index-nullable/scan-insert-rtree-secondary-index-nullable.5.query.aql
index d3e9cbb..5f828b3 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/scan-insert-rtree-secondary-index-nullable/scan-insert-rtree-secondary-index-nullable.5.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/scan-insert-rtree-secondary-index-nullable/scan-insert-rtree-secondary-index-nullable.5.query.aql
@@ -8,6 +8,6 @@
use dataverse test;
for $o in dataset('MyMiniData')
-where spatial-intersect($o.point, create-polygon(create-point(0.0,1.0), create-point(0.0,4.0), create-point(12.0,4.0), create-point(12.0,1.0)))
+where spatial-intersect($o.point, create-polygon([0.0,1.0,0.0,4.0,12.0,4.0,12.0,1.0]))
order by $o.id
return {"id":$o.id}
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/scan-insert-rtree-secondary-index/scan-insert-rtree-secondary-index.5.query.aql b/asterix-app/src/test/resources/runtimets/queries/dml/scan-insert-rtree-secondary-index/scan-insert-rtree-secondary-index.5.query.aql
index 412f7da..fce3d9b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/scan-insert-rtree-secondary-index/scan-insert-rtree-secondary-index.5.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/scan-insert-rtree-secondary-index/scan-insert-rtree-secondary-index.5.query.aql
@@ -1,6 +1,6 @@
use dataverse test;
for $o in dataset('MyMiniData')
-where spatial-intersect($o.point, create-polygon(create-point(0.0,1.0), create-point(0.0,4.0), create-point(12.0,4.0), create-point(12.0,1.0)))
+where spatial-intersect($o.point, create-polygon([0.0,1.0,0.0,4.0,12.0,4.0,12.0,1.0]))
order by $o.id
return {"id":$o.id}
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_01/feeds_01.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_01/feeds_01.1.ddl.aql
index 4705cce..ef2df84 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_01/feeds_01.1.ddl.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_01/feeds_01.1.ddl.aql
@@ -15,8 +15,9 @@
timestamp : string
}
-create feed dataset TweetFeed(TweetType)
-using "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory"
-(("output-type-name"="TweetType"),("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("tuple-interval"="10"))
+create dataset Tweets(TweetType)
primary key id;
+create feed TweetFeed
+using file_feed
+(("output-type-name"="TweetType"),("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("tuple-interval"="10"))
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_01/feeds_01.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_01/feeds_01.3.query.aql
index 5ee2e87..392e471 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_01/feeds_01.3.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_01/feeds_01.3.query.aql
@@ -4,6 +4,6 @@
* Date : 24th Dec 2012
*/
-for $x in dataset('Metadata.Dataset')
-where $x.DataverseName='feeds' and $x.DatasetName='TweetFeed'
+for $x in dataset('Metadata.Feed')
+where $x.DataverseName='feeds' and $x.FeedName='TweetFeed'
return $x
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02/feeds_02.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02/feeds_02.1.ddl.aql
index aafd2c9..a2e2f7d 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02/feeds_02.1.ddl.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02/feeds_02.1.ddl.aql
@@ -9,17 +9,18 @@
create dataverse feeds;
use dataverse feeds;
+
create type TweetType as closed {
id: string,
username : string,
location : string,
text : string,
timestamp : string
-}
+};
-create feed dataset TweetFeed(TweetType)
-using "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory"
-(("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("output-type-name"="TweetType"),("tuple-interval"="10"))
+create dataset Tweets(TweetType)
primary key id;
-
+create feed TweetFeed
+using file_feed
+(("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("type-name"="TweetType"),("tuple-interval"="10"));
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02/feeds_02.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02/feeds_02.2.update.aql
index 01b0925..3f06b7a 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02/feeds_02.2.update.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02/feeds_02.2.update.aql
@@ -6,5 +6,7 @@
*/
use dataverse feeds;
-
-begin feed TweetFeed;
+
+set wait-for-completion-feed "true";
+
+connect feed TweetFeed to dataset Tweets;
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02/feeds_02.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02/feeds_02.3.query.aql
index 230aa40..3230002 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02/feeds_02.3.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02/feeds_02.3.query.aql
@@ -7,6 +7,6 @@
use dataverse feeds;
-for $x in dataset('TweetFeed')
+for $x in dataset('Tweets')
order by $x.id
return $x
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_03/feeds_03.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_03/feeds_03.1.ddl.aql
index 488ed2f..f15c481 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_03/feeds_03.1.ddl.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_03/feeds_03.1.ddl.aql
@@ -16,13 +16,14 @@
timestamp : string
}
+create dataset Tweets(TweetType)
+primary key id;
+
create function feed_processor($x) {
$x
}
-create feed dataset TweetFeed(TweetType)
-using "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory"
+create feed TweetFeed
+using file_feed
(("output-type-name"="TweetType"),("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("tuple-interval"="10"))
-apply function feed_processor@1
-primary key id;
-
+apply function feed_processor;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_03/feeds_03.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_03/feeds_03.3.query.aql
index 1922f39..a7ee344 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_03/feeds_03.3.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_03/feeds_03.3.query.aql
@@ -4,6 +4,6 @@
* Date : 24th Dec 2012
*/
-for $x in dataset('Metadata.Dataset')
-where $x.DataverseName='feeds' and $x.DatasetName='TweetFeed'
+for $x in dataset('Metadata.Feed')
+where $x.DataverseName='feeds' and $x.FeedName='TweetFeed'
return $x
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04/feeds_04.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04/feeds_04.1.ddl.aql
index 326b2d5..8333f7c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04/feeds_04.1.ddl.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04/feeds_04.1.ddl.aql
@@ -17,8 +17,9 @@
timestamp : string
}
-create feed dataset TweetFeed(TweetType)
-using "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory"
-(("fs"="hdfs"),("hdfs"="hdfs://127.0.0.1:31888"),("path"="/asterix/obamatweets.adm"),("format"="adm"),("input-format"="text-input-format"),("output-type-name"="TweetType"),("tuple-interval"="10"))
+create dataset Tweets(TweetType)
primary key id;
+create feed TweetFeed
+using file_feed
+(("fs"="hdfs"),("hdfs"="hdfs://127.0.0.1:31888"),("path"="/asterix/obamatweets.adm"),("format"="adm"),("input-format"="text-input-format"),("type-name"="TweetType"),("tuple-interval"="10"));
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04/feeds_04.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04/feeds_04.2.update.aql
index 060576e..e87111c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04/feeds_04.2.update.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04/feeds_04.2.update.aql
@@ -8,4 +8,6 @@
use dataverse feeds;
-begin feed TweetFeed;
+set wait-for-completion-feed "true";
+
+connect feed TweetFeed to dataset Tweets;
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04/feeds_04.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04/feeds_04.3.query.aql
index 714dd80..451acde 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04/feeds_04.3.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04/feeds_04.3.query.aql
@@ -7,6 +7,6 @@
*/
use dataverse feeds;
-for $x in dataset('TweetFeed')
+for $x in dataset('Tweets')
order by $x.id
return $x
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_05/feeds_05.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_05/feeds_05.1.ddl.aql
new file mode 100644
index 0000000..1d4f619
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_05/feeds_05.1.ddl.aql
@@ -0,0 +1,38 @@
+/*
+ * Description : Create a feed dataset that uses the synthetic feed simulator adapter.
+ The synthetic feed simulator uses the Social-Data generator to generate data and simulate a feed.
+ The feed lasts a configured duration with data arriving at a configured rate (tweets per second).
+ Verify the existence of data after the feed finishes.
+
+ * Expected Res : Success
+ * Date : 20th Jun 2013
+ */
+
+drop dataverse feeds if exists;
+create dataverse feeds;
+use dataverse feeds;
+
+create type TwitterUserType as closed {
+ screen-name: string,
+ lang: string,
+ friends_count: int32,
+ statuses_count: int32,
+ name: string,
+ followers_count: int32
+}
+
+create type TweetMessageType as closed {
+ tweetid: int64,
+ user: TwitterUserType,
+ sender-location: point,
+ send-time: datetime,
+ referred-topics: {{ string }},
+ message-text: string
+}
+
+create dataset SyntheticTweets(TweetMessageType)
+primary key tweetid;
+
+create feed SyntheticTweetFeed
+using twitter_firehose
+(("duration"="5"),("tps"="50"),("tput-duration"="5"),("dataverse-dataset"="feeds:SyntheticTweets"),("mode"="controlled"));
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_05/feeds_05.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_05/feeds_05.2.update.aql
new file mode 100644
index 0000000..d9e5404
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_05/feeds_05.2.update.aql
@@ -0,0 +1,15 @@
+/*
+ * Description : Create a feed dataset that uses the synthetic feed simulator adapter.
+ The synthetic feed simulator uses the Social-Data generator to generate data and simulate a feed.
+ The feed lasts a configured duration with data arriving at a configured rate (tweets per second).
+ Verify the existence of data after the feed finishes.
+
+ * Expected Res : Success
+ * Date : 20th Jun 2013
+ */
+
+use dataverse feeds;
+
+set wait-for-completion-feed "true";
+
+connect feed SyntheticTweetFeed to dataset SyntheticTweets;
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_05/feeds_05.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_05/feeds_05.3.query.aql
new file mode 100644
index 0000000..dd520da
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_05/feeds_05.3.query.aql
@@ -0,0 +1,21 @@
+/*
+ * Description : Create a feed dataset that uses the synthetic feed simulator adapter.
+ The synthetic feed simulator uses the Social-Data generator to generate data and simulate a feed.
+ The feed lasts a configured duration with data arriving at a configured rate (tweets per second).
+ Verify the existence of data after the feed finishes.
+
+ * Expected Res : Success
+ * Date : 20th Jun 2013
+ */
+
+use dataverse feeds;
+
+let $totalTweets:=count(
+for $x in dataset('SyntheticTweets')
+return $x)
+return
+(if($totalTweets > 0)
+ then 1
+else
+ 0
+)
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.1.ddl.aql
new file mode 100644
index 0000000..09bea2a
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.1.ddl.aql
@@ -0,0 +1,47 @@
+/*
+ * Description : Create a feed dataset and a feed using the generic socket feed adaptor.
+ To drive the socket adaptor based feed, we define another feed using the
+ client socket test adaptor. Content from the file is read by the test adaptor
+ and forwarded to the socket feed adaptor that is listening at a known socket.
+ At the end, data is collected in the dataset that was fed by the socket feed adaptor.
+ Verify the existence of data in the dataset.
+
+ * Expected Res : Success
+ * Date : 20th Nov 2013
+ */
+
+drop dataverse feeds if exists;
+create dataverse feeds;
+use dataverse feeds;
+
+create type TwitterUserType as closed {
+screen-name: string,
+lang: string,
+friends_count: int32,
+statuses_count: int32,
+name: string,
+followers_count: int32
+}
+
+create type TweetMessageType as closed {
+tweetid: int64,
+user: TwitterUserType,
+sender-location: point,
+send-time: datetime,
+referred-topics: {{ string }},
+message-text: string
+}
+
+create dataset MyTweets(TweetMessageType)
+primary key tweetid;
+
+create dataset DummyTweets(TweetMessageType)
+primary key tweetid;
+
+create feed socket_feed
+using socket_adaptor
+(("sockets"="127.0.0.1:9009"),("addressType"="IP"),("type-name"="TweetMessageType"),("format"="adm"));
+
+create feed client_test_feed
+using socket_client
+(("sockets"="127.0.0.1:9009"),("addressType"="IP"),("format"="adm"),("file_splits"="data/twitter/tw_messages_100.adm"));
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.2.update.aql
new file mode 100644
index 0000000..40cbd75
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.2.update.aql
@@ -0,0 +1,17 @@
+/*
+ * Description : Create a feed dataset and a feed using the generic socket feed adaptor.
+ To drive the socket adaptor based feed, we define another feed using the
+ client socket test adaptor. Content from the file is read by the test adaptor
+ and forwarded to the socket feed adaptor that is listening at a known socket.
+ At the end, data is collected in the dataset that was fed by the socket feed adaptor.
+ Verify the existence of data in the dataset.
+
+ * Expected Res : Success
+ * Date : 20th Nov 2013
+*/
+
+use dataverse feeds;
+
+set wait-for-completion-feed "false";
+
+connect feed socket_feed to dataset MyTweets;
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.3.sleep.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.3.sleep.aql
new file mode 100644
index 0000000..8bd1af1
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.3.sleep.aql
@@ -0,0 +1 @@
+2000
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.4.update.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.4.update.aql
new file mode 100644
index 0000000..13b9618
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.4.update.aql
@@ -0,0 +1,17 @@
+/*
+ * Description : Create a feed dataset and a feed using the generic socket feed adaptor.
+ To drive the socket adaptor based feed, we define another feed using the
+ client socket test adaptor. Content from the file is read by the test adaptor
+ and forwarded to the socket feed adaptor that is listening at a known socket.
+ At the end, data is collected in the dataset that was fed by the socket feed adaptor.
+ Verify the existence of data in the dataset.
+
+ * Expected Res : Success
+ * Date : 20th Nov 2013
+*/
+
+use dataverse feeds;
+
+set wait-for-completion-feed "true";
+
+connect feed client_test_feed to dataset DummyTweets;
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.5.query.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.5.query.aql
new file mode 100644
index 0000000..70cf71c
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.5.query.aql
@@ -0,0 +1,17 @@
+/*
+ * Description : Create a feed dataset and a feed using the generic socket feed adaptor.
+ To drive the socket adaptor based feed, we define another feed using the
+ client socket test adaptor. Content from the file is read by the test adaptor
+ and forwarded to the socket feed adaptor that is listening at a known socket.
+ At the end, data is collected in the dataset that was fed by the socket feed adaptor.
+ Verify the existence of data in the dataset.
+
+ * Expected Res : Success
+ * Date : 20th Nov 2013
+*/
+
+use dataverse feeds;
+
+for $x in dataset MyTweets
+order by $x.tweetid
+return $x
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds/issue_230_feeds.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds/issue_230_feeds.1.ddl.aql
index 62ac61f..d5a1c92 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds/issue_230_feeds.1.ddl.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds/issue_230_feeds.1.ddl.aql
@@ -8,6 +8,7 @@
create dataverse feeds;
use dataverse feeds;
+
create type TweetType as closed {
id: string,
username : string,
@@ -16,8 +17,11 @@
timestamp : string
}
-create feed dataset TweetFeed(TweetType)
-using "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory"
-(("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("output-type-name"="TweetType"),("tuple-interval"="10"))
+create dataset Tweets(TweetType)
primary key id;
+create feed TweetFeed
+using file_feed
+(("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("type-name"="TweetType"),("tuple-interval"="10"));
+
+
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds/issue_230_feeds.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds/issue_230_feeds.2.update.aql
index 0e22d6e..fc71769 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds/issue_230_feeds.2.update.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds/issue_230_feeds.2.update.aql
@@ -6,5 +6,7 @@
*/
use dataverse feeds;
-
-begin feed feeds.TweetFeed;
+
+set wait-for-completion-feed "true";
+
+connect feed feeds.TweetFeed to dataset Tweets;
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds/issue_230_feeds.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds/issue_230_feeds.3.query.aql
index 48e18e2..227913d 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds/issue_230_feeds.3.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds/issue_230_feeds.3.query.aql
@@ -6,6 +6,6 @@
*/
use dataverse feeds;
-for $x in dataset('TweetFeed')
+for $x in dataset('Tweets')
order by $x.id
return $x
diff --git a/asterix-app/src/test/resources/runtimets/queries/flwor/let33/let33.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/flwor/let33/let33.1.ddl.aql
new file mode 100644
index 0000000..a90731e
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/flwor/let33/let33.1.ddl.aql
@@ -0,0 +1,16 @@
+/*
+ * Description : Test hoisting a variable that does not get inlined
+ * Expected Result : Success
+ * Date : 8th November 2013
+ */
+
+drop dataverse foo if exists;
+create dataverse foo if not exists;
+use dataverse foo;
+
+create type fbuser as open {
+id: int32,
+name: string
+};
+
+create dataset fb(fbuser) primary key id;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/flwor/let33/let33.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/flwor/let33/let33.2.update.aql
new file mode 100644
index 0000000..839149f
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/flwor/let33/let33.2.update.aql
@@ -0,0 +1,15 @@
+/*
+ * Description : Test hoisting a variable that does not get inlined
+ * Expected Result : Success
+ * Date : 8th November 2013
+ */
+
+use dataverse foo;
+
+insert into dataset fb(
+{"id": 1, "name": "Tom"}
+);
+
+insert into dataset fb(
+{"id": 2, "name": "Mike"}
+);
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/flwor/let33/let33.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/flwor/let33/let33.3.query.aql
new file mode 100644
index 0000000..22de755
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/flwor/let33/let33.3.query.aql
@@ -0,0 +1,13 @@
+/*
+ * Description : Test hoisting a variable that does not get inlined
+ * Expected Result : Success
+ * Date : 8th November 2013
+ */
+
+use dataverse foo;
+
+let $recs := {{ {"id":1, "name": "Tom"}, {"id":2, "name": "Till"} }}
+for $f in dataset fb
+for $r in $recs
+where $r.name = $f.name
+return {"name": $r.name}
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_7/issue_251_dataset_hint_7.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_7/issue_251_dataset_hint_7.1.ddl.aql
index ca09806..3f000ed 100644
--- a/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_7/issue_251_dataset_hint_7.1.ddl.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_7/issue_251_dataset_hint_7.1.ddl.aql
@@ -17,9 +17,10 @@
timestamp : string
}
-create feed dataset TweetFeed(TweetType)
-using "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory"
-(("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("output-type-name"="TweetType"),("tuple-interval"="10"))
+create dataset Tweets(TweetType)
primary key id
hints(cardinality=200);
+create feed TweetFeed
+using file_feed
+(("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("type-name"="TweetType"),("tuple-interval"="10"));
diff --git a/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_7/issue_251_dataset_hint_7.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_7/issue_251_dataset_hint_7.2.update.aql
index 7851440..19e0930d 100644
--- a/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_7/issue_251_dataset_hint_7.2.update.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_7/issue_251_dataset_hint_7.2.update.aql
@@ -8,4 +8,6 @@
use dataverse feeds;
-begin feed feeds.TweetFeed;
+set wait-for-completion-feed "true";
+
+connect feed TweetFeed to dataset Tweets;
diff --git a/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_7/issue_251_dataset_hint_7.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_7/issue_251_dataset_hint_7.3.query.aql
index f1127f0..7ee0dcf 100644
--- a/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_7/issue_251_dataset_hint_7.3.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_7/issue_251_dataset_hint_7.3.query.aql
@@ -8,7 +8,7 @@
use dataverse feeds;
-for $x in dataset('TweetFeed')
+for $x in dataset('Tweets')
order by $x.id
return $x
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index-nullable/rtree-secondary-index-nullable.4.query.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index-nullable/rtree-secondary-index-nullable.4.query.aql
index 9d067bc..0589145 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index-nullable/rtree-secondary-index-nullable.4.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index-nullable/rtree-secondary-index-nullable.4.query.aql
@@ -1,6 +1,6 @@
use dataverse test;
for $o in dataset('MyData')
-where spatial-intersect($o.point, create-polygon(create-point(4.0,1.0), create-point(4.0,4.0), create-point(12.0,4.0), create-point(12.0,1.0)))
+where spatial-intersect($o.point, create-polygon([4.0,1.0,4.0,4.0,12.0,4.0,12.0,1.0]))
order by $o.id
return {"id":$o.id}
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index-open/rtree-secondary-index-open.4.query.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index-open/rtree-secondary-index-open.4.query.aql
index 9d067bc..0589145 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index-open/rtree-secondary-index-open.4.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index-open/rtree-secondary-index-open.4.query.aql
@@ -1,6 +1,6 @@
use dataverse test;
for $o in dataset('MyData')
-where spatial-intersect($o.point, create-polygon(create-point(4.0,1.0), create-point(4.0,4.0), create-point(12.0,4.0), create-point(12.0,1.0)))
+where spatial-intersect($o.point, create-polygon([4.0,1.0,4.0,4.0,12.0,4.0,12.0,1.0]))
order by $o.id
return {"id":$o.id}
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index/rtree-secondary-index.4.query.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index/rtree-secondary-index.4.query.aql
index 9d067bc..0589145 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index/rtree-secondary-index.4.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index/rtree-secondary-index.4.query.aql
@@ -1,6 +1,6 @@
use dataverse test;
for $o in dataset('MyData')
-where spatial-intersect($o.point, create-polygon(create-point(4.0,1.0), create-point(4.0,4.0), create-point(12.0,4.0), create-point(12.0,1.0)))
+where spatial-intersect($o.point, create-polygon([4.0,1.0,4.0,4.0,12.0,4.0,12.0,1.0]))
order by $o.id
return {"id":$o.id}
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue442/query-issue442.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue442/query-issue442.1.ddl.aql
deleted file mode 100644
index e69de29..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue442/query-issue442.1.ddl.aql
+++ /dev/null
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue442/query-issue442.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue442/query-issue442.2.update.aql
deleted file mode 100644
index e69de29..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue442/query-issue442.2.update.aql
+++ /dev/null
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue656/query-issue656.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue656/query-issue656.1.ddl.aql
new file mode 100644
index 0000000..f530dd2
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue656/query-issue656.1.ddl.aql
@@ -0,0 +1,15 @@
+/*
+ * Description : Issue656
+ * Expected Result : Success
+ * Date : 6 December 2013
+ * Notes : This test was written to verify the fix for issue656.
+ */
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type bartype as open {
+ id: uuid
+};
+
+create dataset barset(bartype) primary key id autogenerated;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue656/query-issue656.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue656/query-issue656.2.update.aql
new file mode 100644
index 0000000..5b865d3
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue656/query-issue656.2.update.aql
@@ -0,0 +1,14 @@
+/*
+ * Description : Issue656
+ * Expected Result : Success
+ * Date : 6 December 2013
+ * Notes : This test was written to verify the fix for issue656.
+ */
+use dataverse test;
+
+insert into dataset barset(
+ {
+ }
+);
+
+
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue656/query-issue656.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue656/query-issue656.3.query.aql
new file mode 100644
index 0000000..029be16
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue656/query-issue656.3.query.aql
@@ -0,0 +1,11 @@
+/*
+ * Description : Issue656
+ * Expected Result : Success
+ * Date : 6 December 2013
+ * Notes : This test was written to verify the fix for issue656.
+ */
+use dataverse test;
+
+for $b in dataset barset
+return $b
+
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/spatial/polygon_accessor/polygon_accessor.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/spatial/polygon_accessor/polygon_accessor.3.query.aql
index 54ccf92..c00ff16 100644
--- a/asterix-app/src/test/resources/runtimets/queries/spatial/polygon_accessor/polygon_accessor.3.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/spatial/polygon_accessor/polygon_accessor.3.query.aql
@@ -6,7 +6,7 @@
use dataverse test;
-let $polygon := create-polygon(create-point(1.0,1.0), create-point(2.0,2.0), create-point(3.0,3.0), create-point(4.0,4.0))
+let $polygon := create-polygon([1.0,1.0,2.0,2.0,3.0,3.0,4.0,4.0])
let $polygon_list := get-points($polygon)
for $p in $polygon_list
return $p
diff --git a/asterix-app/src/test/resources/runtimets/queries/spatial/spatial-area/spatial-area.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/spatial/spatial-area/spatial-area.3.query.aql
index 940dfe8..72a02a8 100644
--- a/asterix-app/src/test/resources/runtimets/queries/spatial/spatial-area/spatial-area.3.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/spatial/spatial-area/spatial-area.3.query.aql
@@ -1,6 +1,6 @@
use dataverse test;
-let $polygonArea := spatial-area(create-polygon(create-point(1.0,1.0),create-point(1.0,4.0),create-point(3.0,4.0),create-point(3.0,1.0)))
+let $polygonArea := spatial-area(create-polygon([1.0,1.0,1.0,4.0,3.0,4.0,3.0,1.0]))
let $circleArea := spatial-area(create-circle(create-point(0.0,0.0), 1.0))
let $rectangleArea := spatial-area(create-rectangle(create-point(0.0,5.0), create-point(8.0,8.0)))
return {"polygonArea":$polygonArea, "circleArea":$circleArea, "rectangleArea":$rectangleArea}
diff --git a/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/udf23/udf23.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/udf23/udf23.3.query.aql
index f43c299..d2e4d2f 100644
--- a/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/udf23/udf23.3.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/udf23/udf23.3.query.aql
@@ -7,5 +7,7 @@
use dataverse test;
for $l in dataset('Metadata.Dataset')
+where $l.DataverseName='Metadata'
+order by $l.DatasetName
limit test.numRows()
return $l
diff --git a/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv02/cross-dv02.1.adm b/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv02/cross-dv02.1.adm
index 89f66a8..ecfd466 100644
--- a/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv02/cross-dv02.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv02/cross-dv02.1.adm
@@ -1,4 +1,4 @@
-{ "DataverseName": "student", "DatasetName": "gdstd", "DataTypeName": "stdType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Sun Sep 22 22:26:56 PDT 2013", "DatasetId": 662, "PendingOp": 0 }
-{ "DataverseName": "student", "DatasetName": "ugdstd", "DataTypeName": "stdType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Sun Sep 22 22:26:56 PDT 2013", "DatasetId": 661, "PendingOp": 0 }
-{ "DataverseName": "teacher", "DatasetName": "prof", "DataTypeName": "tchrType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Sun Sep 22 22:26:56 PDT 2013", "DatasetId": 663, "PendingOp": 0 }
-{ "DataverseName": "teacher", "DatasetName": "pstdoc", "DataTypeName": "tchrType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Sun Sep 22 22:26:56 PDT 2013", "DatasetId": 664, "PendingOp": 0 }
\ No newline at end of file
+{ "DataverseName": "student", "DatasetName": "gdstd", "DataTypeName": "stdType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Tue Sep 24 22:20:30 PDT 2013", "DatasetId": 102, "PendingOp": 0 }
+{ "DataverseName": "student", "DatasetName": "ugdstd", "DataTypeName": "stdType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Tue Sep 24 22:20:30 PDT 2013", "DatasetId": 101, "PendingOp": 0 }
+{ "DataverseName": "teacher", "DatasetName": "prof", "DataTypeName": "tchrType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Tue Sep 24 22:20:31 PDT 2013", "DatasetId": 103, "PendingOp": 0 }
+{ "DataverseName": "teacher", "DatasetName": "pstdoc", "DataTypeName": "tchrType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Tue Sep 24 22:20:31 PDT 2013", "DatasetId": 104, "PendingOp": 0 }
diff --git a/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv04/cross-dv04.1.adm b/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv04/cross-dv04.1.adm
index f9981d1..103415b 100644
--- a/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv04/cross-dv04.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv04/cross-dv04.1.adm
@@ -1,4 +1,4 @@
-{ "DataverseName": "student", "DatasetName": "gdstd", "DataTypeName": "stdType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Sun Sep 22 22:26:57 PDT 2013", "DatasetId": 674, "PendingOp": 0 }
-{ "DataverseName": "student", "DatasetName": "ugdstd", "DataTypeName": "stdType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Sun Sep 22 22:26:57 PDT 2013", "DatasetId": 673, "PendingOp": 0 }
-{ "DataverseName": "teacher", "DatasetName": "prof", "DataTypeName": "tchrType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Sun Sep 22 22:26:57 PDT 2013", "DatasetId": 675, "PendingOp": 0 }
-{ "DataverseName": "teacher", "DatasetName": "pstdoc", "DataTypeName": "tchrType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Sun Sep 22 22:26:57 PDT 2013", "DatasetId": 676, "PendingOp": 0 }
\ No newline at end of file
+{ "DataverseName": "student", "DatasetName": "gdstd", "DataTypeName": "stdType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Tue Sep 24 22:22:48 PDT 2013", "DatasetId": 110, "PendingOp": 0 }
+{ "DataverseName": "student", "DatasetName": "ugdstd", "DataTypeName": "stdType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Tue Sep 24 22:22:48 PDT 2013", "DatasetId": 109, "PendingOp": 0 }
+{ "DataverseName": "teacher", "DatasetName": "prof", "DataTypeName": "tchrType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Tue Sep 24 22:22:48 PDT 2013", "DatasetId": 111, "PendingOp": 0 }
+{ "DataverseName": "teacher", "DatasetName": "pstdoc", "DataTypeName": "tchrType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Tue Sep 24 22:22:48 PDT 2013", "DatasetId": 112, "PendingOp": 0 }
diff --git a/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv19/cross-dv19.1.adm b/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv19/cross-dv19.1.adm
index 2d04f8b..3308085 100644
--- a/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv19/cross-dv19.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv19/cross-dv19.1.adm
@@ -1,7 +1,7 @@
-{ "DataverseName": "test1", "DatasetName": "TwitterData", "DataTypeName": "Tweet", "DatasetType": "EXTERNAL", "InternalDetails": null, "ExternalDetails": { "DatasourceAdapter": "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter", "Properties": [ { "Name": "path", "Value": "nc1://data/twitter/extrasmalltweets.txt" }, { "Name": "format", "Value": "adm" } ] }, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Sun Sep 22 22:26:58 PDT 2013", "DatasetId": 684, "PendingOp": 0 }
-{ "DataverseName": "test1", "DatasetName": "t1", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Sun Sep 22 22:26:58 PDT 2013", "DatasetId": 678, "PendingOp": 0 }
-{ "DataverseName": "test1", "DatasetName": "t2", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Sun Sep 22 22:26:58 PDT 2013", "DatasetId": 681, "PendingOp": 0 }
-{ "DataverseName": "test1", "DatasetName": "t3", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Sun Sep 22 22:26:58 PDT 2013", "DatasetId": 682, "PendingOp": 0 }
-{ "DataverseName": "test2", "DatasetName": "t2", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Sun Sep 22 22:26:58 PDT 2013", "DatasetId": 679, "PendingOp": 0 }
-{ "DataverseName": "test2", "DatasetName": "t3", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Sun Sep 22 22:26:58 PDT 2013", "DatasetId": 680, "PendingOp": 0 }
-{ "DataverseName": "test2", "DatasetName": "t4", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Sun Sep 22 22:26:58 PDT 2013", "DatasetId": 683, "PendingOp": 0 }
\ No newline at end of file
+{ "DataverseName": "test1", "DatasetName": "TwitterData", "DataTypeName": "Tweet", "DatasetType": "EXTERNAL", "InternalDetails": null, "ExternalDetails": { "DatasourceAdapter": "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter", "Properties": [ { "Name": "path", "Value": "nc1://data/twitter/extrasmalltweets.txt" }, { "Name": "format", "Value": "adm" } ] }, "Hints": {{ }}, "Timestamp": "Tue Sep 24 22:25:02 PDT 2013", "DatasetId": 119, "PendingOp": 0 }
+{ "DataverseName": "test1", "DatasetName": "t1", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Tue Sep 24 22:25:02 PDT 2013", "DatasetId": 113, "PendingOp": 0 }
+{ "DataverseName": "test1", "DatasetName": "t2", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Tue Sep 24 22:25:02 PDT 2013", "DatasetId": 116, "PendingOp": 0 }
+{ "DataverseName": "test1", "DatasetName": "t3", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Tue Sep 24 22:25:02 PDT 2013", "DatasetId": 117, "PendingOp": 0 }
+{ "DataverseName": "test2", "DatasetName": "t2", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Tue Sep 24 22:25:02 PDT 2013", "DatasetId": 114, "PendingOp": 0 }
+{ "DataverseName": "test2", "DatasetName": "t3", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Tue Sep 24 22:25:02 PDT 2013", "DatasetId": 115, "PendingOp": 0 }
+{ "DataverseName": "test2", "DatasetName": "t4", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Tue Sep 24 22:25:02 PDT 2013", "DatasetId": 118, "PendingOp": 0 }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/dml/insert-record-function/insert-record-function.1.adm b/asterix-app/src/test/resources/runtimets/results/dml/insert-record-function/insert-record-function.1.adm
new file mode 100644
index 0000000..3ea2b0c
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/dml/insert-record-function/insert-record-function.1.adm
@@ -0,0 +1,2 @@
+{ "l_orderkey": 1, "l_linenumber": 2, "l_suppkey": 3 }
+{ "l_orderkey": 2, "l_linenumber": 3, "l_suppkey": 4 }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/dml/load-with-ngram-index/load-with-ngram-index.1.adm b/asterix-app/src/test/resources/runtimets/results/dml/load-with-ngram-index/load-with-ngram-index.1.adm
new file mode 100644
index 0000000..8a99b26
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/dml/load-with-ngram-index/load-with-ngram-index.1.adm
@@ -0,0 +1,3 @@
+{ "id": 4, "dblpid": "books/acm/kim95/ChristodoulakisK95", "title": "Multimedia Information Systems Issues and Approaches.", "authors": "Stavros Christodoulakis Leonidas Koveos", "misc": "2002-01-03 318-337 1995 Modern Database Systems db/books/collections/kim95.html#ChristodoulakisK95" }
+{ "id": 89, "dblpid": "conf/icip/SchonfeldL98", "title": "VORTEX Video Retrieval and Tracking from Compressed Multimedia Databases.", "authors": "Dan Schonfeld Dan Lelescu", "misc": "2002-11-05 123-127 1998 ICIP (3) db/conf/icip/icip1998-3.html#SchonfeldL98" }
+{ "id": 90, "dblpid": "conf/hicss/SchonfeldL99", "title": "VORTEX Video Retrieval and Tracking from Compressed Multimedia Databases ¾ Visual Search Engine.", "authors": "Dan Schonfeld Dan Lelescu", "misc": "2002-01-03 1999 HICSS http //computer.org/proceedings/hicss/0001/00013/00013006abs.htm db/conf/hicss/hicss1999-3.html#SchonfeldL99" }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/dml/load-with-rtree-index/load-with-rtree-index.1.adm b/asterix-app/src/test/resources/runtimets/results/dml/load-with-rtree-index/load-with-rtree-index.1.adm
new file mode 100644
index 0000000..5f5be7c
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/dml/load-with-rtree-index/load-with-rtree-index.1.adm
@@ -0,0 +1,3 @@
+{ "id": 10 }
+{ "id": 12 }
+{ "id": 20 }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/dml/load-with-word-index/load-with-word-index.1.adm b/asterix-app/src/test/resources/runtimets/results/dml/load-with-word-index/load-with-word-index.1.adm
new file mode 100644
index 0000000..5bf6ae0
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/dml/load-with-word-index/load-with-word-index.1.adm
@@ -0,0 +1 @@
+{ "id": 9, "dblpid": "books/acm/kim95/Kaiser95", "title": "Cooperative Transactions for Multiuser Environments.", "authors": "Gail E. Kaiser", "misc": "2002-01-03 409-433 1995 Modern Database Systems db/books/collections/kim95.html#Kaiser95" }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/feeds/feeds_01/feeds_01.1.adm b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_01/feeds_01.1.adm
index dec4c6c..1867da3 100644
--- a/asterix-app/src/test/resources/runtimets/results/feeds/feeds_01/feeds_01.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_01/feeds_01.1.adm
@@ -1 +1 @@
-{ "DataverseName": "feeds", "DatasetName": "TweetFeed", "DataTypeName": "TweetType", "DatasetType": "FEED", "InternalDetails": null, "ExternalDetails": null, "FeedDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "DatasourceAdapter": "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory", "Properties": [ { "Name": "output-type-name", "Value": "TweetType" }, { "Name": "fs", "Value": "localfs" }, { "Name": "path", "Value": "nc1://data/twitter/obamatweets.adm" }, { "Name": "format", "Value": "adm" }, { "Name": "tuple-interval", "Value": "10" } ], "Function": null, "Status": "INACTIVE", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "Hints": {{ }}, "Timestamp": "Sun Sep 22 22:44:30 PDT 2013", "DatasetId": 705, "PendingOp": 0 }
\ No newline at end of file
+{ "DataverseName": "feeds", "FeedName": "TweetFeed", "AdaptorName": "file_feed", "AdaptorConfiguration": {{ { "Name": "output-type-name", "Value": "TweetType" }, { "Name": "fs", "Value": "localfs" }, { "Name": "path", "Value": "nc1://data/twitter/obamatweets.adm" }, { "Name": "format", "Value": "adm" }, { "Name": "tuple-interval", "Value": "10" } }}, "Function": null, "Timestamp": "Tue Sep 24 22:30:47 PDT 2013" }
diff --git a/asterix-app/src/test/resources/runtimets/results/feeds/feeds_03/feeds_03.1.adm b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_03/feeds_03.1.adm
index 3fa1d10..485c7af 100644
--- a/asterix-app/src/test/resources/runtimets/results/feeds/feeds_03/feeds_03.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_03/feeds_03.1.adm
@@ -1 +1 @@
-{ "DataverseName": "feeds", "DatasetName": "TweetFeed", "DataTypeName": "TweetType", "DatasetType": "FEED", "InternalDetails": null, "ExternalDetails": null, "FeedDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "DatasourceAdapter": "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory", "Properties": [ { "Name": "output-type-name", "Value": "TweetType" }, { "Name": "fs", "Value": "localfs" }, { "Name": "path", "Value": "nc1://data/twitter/obamatweets.adm" }, { "Name": "format", "Value": "adm" }, { "Name": "tuple-interval", "Value": "10" } ], "Function": "feeds.feed_processor@1", "Status": "INACTIVE", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "Hints": {{ }}, "Timestamp": "Sun Sep 22 22:44:30 PDT 2013", "DatasetId": 707, "PendingOp": 0 }
\ No newline at end of file
+{ "DataverseName": "feeds", "FeedName": "TweetFeed", "AdaptorName": "file_feed", "AdaptorConfiguration": {{ { "Name": "output-type-name", "Value": "TweetType" }, { "Name": "fs", "Value": "localfs" }, { "Name": "path", "Value": "nc1://data/twitter/obamatweets.adm" }, { "Name": "format", "Value": "adm" }, { "Name": "tuple-interval", "Value": "10" } }}, "Function": "feeds.feed_processor@1", "Timestamp": "Tue Sep 24 22:35:03 PDT 2013" }
diff --git a/asterix-app/src/test/resources/runtimets/results/feeds/feeds_05/feeds_05.1.adm b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_05/feeds_05.1.adm
new file mode 100644
index 0000000..d00491f
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_05/feeds_05.1.adm
@@ -0,0 +1 @@
+1
diff --git a/asterix-app/src/test/resources/runtimets/results/feeds/feeds_06/feeds_06.1.adm b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_06/feeds_06.1.adm
new file mode 100644
index 0000000..f60be3e
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_06/feeds_06.1.adm
@@ -0,0 +1,100 @@
+{ "tweetid": 1i64, "user": { "screen-name": "EdwardLeslie#333", "lang": "en", "friends_count": 31, "statuses_count": 107, "name": "Edward Leslie", "followers_count": 80 }, "sender-location": point("29.37,78.8"), "send-time": datetime("2005-10-14T10:10:00.000Z"), "referred-topics": {{ "at&t", "network" }}, "message-text": " can't stand at&t the network is terrible:(" }
+{ "tweetid": 2i64, "user": { "screen-name": "PenniBauerle$865", "lang": "en", "friends_count": 32, "statuses_count": 308, "name": "Penni Bauerle", "followers_count": 97 }, "sender-location": point("37.99,83.51"), "send-time": datetime("2011-09-23T10:10:00.000Z"), "referred-topics": {{ "iphone", "plan" }}, "message-text": " love iphone its plan is awesome" }
+{ "tweetid": 3i64, "user": { "screen-name": "TrudiSaline$17", "lang": "en", "friends_count": 2, "statuses_count": 248, "name": "Trudi Saline", "followers_count": 154 }, "sender-location": point("48.17,93.4"), "send-time": datetime("2007-07-02T10:10:00.000Z"), "referred-topics": {{ "sprint", "3G" }}, "message-text": " like sprint its 3G is good:)" }
+{ "tweetid": 4i64, "user": { "screen-name": "EdytheMurray#502", "lang": "en", "friends_count": 23, "statuses_count": 142, "name": "Edythe Murray", "followers_count": 164 }, "sender-location": point("24.63,90.02"), "send-time": datetime("2008-03-16T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "voice-clarity" }}, "message-text": " like t-mobile the voice-clarity is good:)" }
+{ "tweetid": 5i64, "user": { "screen-name": "CoralMoon#517", "lang": "en", "friends_count": 35, "statuses_count": 3, "name": "Coral Moon", "followers_count": 67 }, "sender-location": point("32.05,75.79"), "send-time": datetime("2006-02-18T10:10:00.000Z"), "referred-topics": {{ "samsung", "touch-screen" }}, "message-text": " love samsung the touch-screen is mind-blowing" }
+{ "tweetid": 6i64, "user": { "screen-name": "CarriePinney#881", "lang": "en", "friends_count": 77, "statuses_count": 113, "name": "Carrie Pinney", "followers_count": 120 }, "sender-location": point("45.72,93.27"), "send-time": datetime("2011-12-02T10:10:00.000Z"), "referred-topics": {{ "sprint", "speed" }}, "message-text": " love sprint its speed is awesome:)" }
+{ "tweetid": 7i64, "user": { "screen-name": "AmadoTomey_367", "lang": "en", "friends_count": 28, "statuses_count": 379, "name": "Amado Tomey", "followers_count": 119 }, "sender-location": point("43.0,96.53"), "send-time": datetime("2011-07-04T10:10:00.000Z"), "referred-topics": {{ "verizon", "platform" }}, "message-text": " hate verizon its platform is OMG:(" }
+{ "tweetid": 8i64, "user": { "screen-name": "OdellWallace#398", "lang": "en", "friends_count": 10, "statuses_count": 89, "name": "Odell Wallace", "followers_count": 4 }, "sender-location": point("28.61,90.69"), "send-time": datetime("2012-01-09T10:10:00.000Z"), "referred-topics": {{ "motorola", "signal" }}, "message-text": " love motorola its signal is amazing:)" }
+{ "tweetid": 9i64, "user": { "screen-name": "NickLing#80", "lang": "en", "friends_count": 99, "statuses_count": 291, "name": "Nick Ling", "followers_count": 144 }, "sender-location": point("33.59,71.74"), "send-time": datetime("2011-05-14T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "speed" }}, "message-text": " hate t-mobile the speed is horrible:(" }
+{ "tweetid": 10i64, "user": { "screen-name": "MickeyDunkle_962", "lang": "en", "friends_count": 46, "statuses_count": 429, "name": "Mickey Dunkle", "followers_count": 110 }, "sender-location": point("28.72,70.51"), "send-time": datetime("2006-05-02T10:10:00.000Z"), "referred-topics": {{ "at&t", "reachability" }}, "message-text": " can't stand at&t its reachability is OMG:(" }
+{ "tweetid": 11i64, "user": { "screen-name": "AlaynaKnopsnider$684", "lang": "en", "friends_count": 70, "statuses_count": 425, "name": "Alayna Knopsnider", "followers_count": 106 }, "sender-location": point("35.4,69.61"), "send-time": datetime("2012-08-15T10:10:00.000Z"), "referred-topics": {{ "sprint", "voice-command" }}, "message-text": " dislike sprint the voice-command is bad" }
+{ "tweetid": 12i64, "user": { "screen-name": "SeraphinaWall_37", "lang": "en", "friends_count": 34, "statuses_count": 43, "name": "Seraphina Wall", "followers_count": 101 }, "sender-location": point("27.83,95.15"), "send-time": datetime("2010-02-08T10:10:00.000Z"), "referred-topics": {{ "motorola", "signal" }}, "message-text": " like motorola its signal is amazing:)" }
+{ "tweetid": 13i64, "user": { "screen-name": "TonyaKnopsnider#342", "lang": "en", "friends_count": 96, "statuses_count": 479, "name": "Tonya Knopsnider", "followers_count": 105 }, "sender-location": point("27.95,74.39"), "send-time": datetime("2008-05-26T10:10:00.000Z"), "referred-topics": {{ "motorola", "voicemail-service" }}, "message-text": " dislike motorola its voicemail-service is bad" }
+{ "tweetid": 14i64, "user": { "screen-name": "SkylerStough#713", "lang": "en", "friends_count": 29, "statuses_count": 41, "name": "Skyler Stough", "followers_count": 118 }, "sender-location": point("39.72,68.97"), "send-time": datetime("2012-05-21T10:10:00.000Z"), "referred-topics": {{ "iphone", "3G" }}, "message-text": " love iphone its 3G is awesome:)" }
+{ "tweetid": 15i64, "user": { "screen-name": "IrisMillard$830", "lang": "en", "friends_count": 9, "statuses_count": 56, "name": "Iris Millard", "followers_count": 127 }, "sender-location": point("27.59,95.34"), "send-time": datetime("2010-02-07T10:10:00.000Z"), "referred-topics": {{ "sprint", "voice-clarity" }}, "message-text": " like sprint the voice-clarity is amazing" }
+{ "tweetid": 16i64, "user": { "screen-name": "KaylynBrinigh_817", "lang": "en", "friends_count": 11, "statuses_count": 448, "name": "Kaylyn Brinigh", "followers_count": 53 }, "sender-location": point("25.19,79.71"), "send-time": datetime("2005-04-06T10:10:00.000Z"), "referred-topics": {{ "samsung", "customization" }}, "message-text": " love samsung its customization is amazing:)" }
+{ "tweetid": 17i64, "user": { "screen-name": "SungHoopengarner#732", "lang": "en", "friends_count": 55, "statuses_count": 129, "name": "Sung Hoopengarner", "followers_count": 152 }, "sender-location": point("47.75,93.12"), "send-time": datetime("2010-01-04T10:10:00.000Z"), "referred-topics": {{ "motorola", "voice-command" }}, "message-text": " dislike motorola its voice-command is horrible:(" }
+{ "tweetid": 18i64, "user": { "screen-name": "RenatoRyals_261", "lang": "en", "friends_count": 46, "statuses_count": 439, "name": "Renato Ryals", "followers_count": 73 }, "sender-location": point("38.48,75.0"), "send-time": datetime("2010-04-14T10:10:00.000Z"), "referred-topics": {{ "sprint", "signal" }}, "message-text": " love sprint its signal is good:)" }
+{ "tweetid": 19i64, "user": { "screen-name": "JohnnieHanseu#755", "lang": "en", "friends_count": 84, "statuses_count": 281, "name": "Johnnie Hanseu", "followers_count": 70 }, "sender-location": point("42.75,70.91"), "send-time": datetime("2010-06-12T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "3G" }}, "message-text": " like t-mobile its 3G is mind-blowing:)" }
+{ "tweetid": 20i64, "user": { "screen-name": "LindseyRahl#362", "lang": "en", "friends_count": 27, "statuses_count": 458, "name": "Lindsey Rahl", "followers_count": 24 }, "sender-location": point("36.2,94.8"), "send-time": datetime("2007-01-02T10:10:00.000Z"), "referred-topics": {{ "at&t", "voice-command" }}, "message-text": " can't stand at&t the voice-command is horrible:(" }
+{ "tweetid": 21i64, "user": { "screen-name": "CearaLing$289", "lang": "en", "friends_count": 39, "statuses_count": 177, "name": "Ceara Ling", "followers_count": 40 }, "sender-location": point("39.58,71.28"), "send-time": datetime("2008-05-20T10:10:00.000Z"), "referred-topics": {{ "samsung", "reachability" }}, "message-text": " like samsung the reachability is amazing:)" }
+{ "tweetid": 22i64, "user": { "screen-name": "DomoniqueEisenmann_636", "lang": "en", "friends_count": 27, "statuses_count": 465, "name": "Domonique Eisenmann", "followers_count": 166 }, "sender-location": point("47.11,77.87"), "send-time": datetime("2008-10-24T10:10:00.000Z"), "referred-topics": {{ "sprint", "voice-command" }}, "message-text": " can't stand sprint its voice-command is horrible:(" }
+{ "tweetid": 23i64, "user": { "screen-name": "MelanieGadow$539", "lang": "en", "friends_count": 34, "statuses_count": 112, "name": "Melanie Gadow", "followers_count": 65 }, "sender-location": point("31.9,87.22"), "send-time": datetime("2012-07-26T10:10:00.000Z"), "referred-topics": {{ "sprint", "speed" }}, "message-text": " like sprint its speed is mind-blowing:)" }
+{ "tweetid": 24i64, "user": { "screen-name": "HewiePeters#654", "lang": "en", "friends_count": 8, "statuses_count": 309, "name": "Hewie Peters", "followers_count": 15 }, "sender-location": point("42.84,90.27"), "send-time": datetime("2011-02-09T10:10:00.000Z"), "referred-topics": {{ "at&t", "wireless" }}, "message-text": " hate at&t its wireless is terrible:(" }
+{ "tweetid": 25i64, "user": { "screen-name": "HollisJudge#731", "lang": "en", "friends_count": 58, "statuses_count": 211, "name": "Hollis Judge", "followers_count": 190 }, "sender-location": point("34.33,83.22"), "send-time": datetime("2006-11-21T10:10:00.000Z"), "referred-topics": {{ "samsung", "voice-clarity" }}, "message-text": " dislike samsung its voice-clarity is OMG:(" }
+{ "tweetid": 26i64, "user": { "screen-name": "DemarcusHarrow$822", "lang": "en", "friends_count": 60, "statuses_count": 171, "name": "Demarcus Harrow", "followers_count": 151 }, "sender-location": point("37.01,80.04"), "send-time": datetime("2012-07-19T10:10:00.000Z"), "referred-topics": {{ "at&t", "shortcut-menu" }}, "message-text": " like at&t its shortcut-menu is awesome" }
+{ "tweetid": 27i64, "user": { "screen-name": "OrsonBauerle$52", "lang": "en", "friends_count": 91, "statuses_count": 271, "name": "Orson Bauerle", "followers_count": 144 }, "sender-location": point("48.91,75.54"), "send-time": datetime("2010-02-18T10:10:00.000Z"), "referred-topics": {{ "samsung", "speed" }}, "message-text": " love samsung the speed is amazing:)" }
+{ "tweetid": 28i64, "user": { "screen-name": "ChadBeach#363", "lang": "en", "friends_count": 88, "statuses_count": 275, "name": "Chad Beach", "followers_count": 142 }, "sender-location": point("35.5,73.83"), "send-time": datetime("2007-07-28T10:10:00.000Z"), "referred-topics": {{ "motorola", "signal" }}, "message-text": " love motorola its signal is mind-blowing" }
+{ "tweetid": 29i64, "user": { "screen-name": "LupeNewbern#345", "lang": "en", "friends_count": 99, "statuses_count": 45, "name": "Lupe Newbern", "followers_count": 86 }, "sender-location": point("35.07,70.43"), "send-time": datetime("2010-12-23T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "voicemail-service" }}, "message-text": " love t-mobile its voicemail-service is awesome" }
+{ "tweetid": 30i64, "user": { "screen-name": "LoydJohnston@664", "lang": "en", "friends_count": 86, "statuses_count": 10, "name": "Loyd Johnston", "followers_count": 58 }, "sender-location": point("42.55,72.33"), "send-time": datetime("2010-02-01T10:10:00.000Z"), "referred-topics": {{ "iphone", "network" }}, "message-text": " like iphone the network is awesome" }
+{ "tweetid": 31i64, "user": { "screen-name": "VerityMunson#211", "lang": "en", "friends_count": 75, "statuses_count": 359, "name": "Verity Munson", "followers_count": 165 }, "sender-location": point("30.65,77.21"), "send-time": datetime("2009-01-06T10:10:00.000Z"), "referred-topics": {{ "verizon", "voice-command" }}, "message-text": " can't stand verizon the voice-command is bad:(" }
+{ "tweetid": 32i64, "user": { "screen-name": "RinaHerndon#616", "lang": "en", "friends_count": 19, "statuses_count": 265, "name": "Rina Herndon", "followers_count": 26 }, "sender-location": point("40.76,75.79"), "send-time": datetime("2009-09-19T10:10:00.000Z"), "referred-topics": {{ "verizon", "shortcut-menu" }}, "message-text": " love verizon the shortcut-menu is mind-blowing:)" }
+{ "tweetid": 33i64, "user": { "screen-name": "MadelaineSchreckengost@250", "lang": "en", "friends_count": 45, "statuses_count": 310, "name": "Madelaine Schreckengost", "followers_count": 153 }, "sender-location": point("30.35,66.43"), "send-time": datetime("2005-07-08T10:10:00.000Z"), "referred-topics": {{ "at&t", "plan" }}, "message-text": " can't stand at&t the plan is bad" }
+{ "tweetid": 34i64, "user": { "screen-name": "RadclyffeStaymates_289", "lang": "en", "friends_count": 50, "statuses_count": 188, "name": "Radclyffe Staymates", "followers_count": 97 }, "sender-location": point("45.42,77.18"), "send-time": datetime("2012-05-06T10:10:00.000Z"), "referred-topics": {{ "at&t", "customer-service" }}, "message-text": " hate at&t its customer-service is OMG" }
+{ "tweetid": 35i64, "user": { "screen-name": "VernieAlice$968", "lang": "en", "friends_count": 70, "statuses_count": 491, "name": "Vernie Alice", "followers_count": 193 }, "sender-location": point("28.03,79.37"), "send-time": datetime("2010-01-19T10:10:00.000Z"), "referred-topics": {{ "motorola", "voice-command" }}, "message-text": " can't stand motorola the voice-command is horrible" }
+{ "tweetid": 36i64, "user": { "screen-name": "GertieDugger#987", "lang": "en", "friends_count": 22, "statuses_count": 72, "name": "Gertie Dugger", "followers_count": 12 }, "sender-location": point("25.77,92.7"), "send-time": datetime("2009-09-25T10:10:00.000Z"), "referred-topics": {{ "sprint", "touch-screen" }}, "message-text": " like sprint its touch-screen is awesome" }
+{ "tweetid": 37i64, "user": { "screen-name": "AggieBollinger@675", "lang": "en", "friends_count": 45, "statuses_count": 175, "name": "Aggie Bollinger", "followers_count": 67 }, "sender-location": point("42.6,68.28"), "send-time": datetime("2012-02-22T10:10:00.000Z"), "referred-topics": {{ "sprint", "voice-clarity" }}, "message-text": " love sprint its voice-clarity is awesome" }
+{ "tweetid": 38i64, "user": { "screen-name": "JocelynPatton$328", "lang": "en", "friends_count": 35, "statuses_count": 484, "name": "Jocelyn Patton", "followers_count": 174 }, "sender-location": point("28.77,88.28"), "send-time": datetime("2006-12-09T10:10:00.000Z"), "referred-topics": {{ "at&t", "wireless" }}, "message-text": " hate at&t the wireless is horrible:(" }
+{ "tweetid": 39i64, "user": { "screen-name": "CandelariaHujsak#602", "lang": "en", "friends_count": 28, "statuses_count": 499, "name": "Candelaria Hujsak", "followers_count": 94 }, "sender-location": point("36.09,96.94"), "send-time": datetime("2007-11-23T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "wireless" }}, "message-text": " can't stand t-mobile the wireless is terrible:(" }
+{ "tweetid": 40i64, "user": { "screen-name": "DamarisMueller#283", "lang": "en", "friends_count": 46, "statuses_count": 122, "name": "Damaris Mueller", "followers_count": 189 }, "sender-location": point("44.31,73.93"), "send-time": datetime("2012-02-28T10:10:00.000Z"), "referred-topics": {{ "sprint", "wireless" }}, "message-text": " like sprint its wireless is awesome" }
+{ "tweetid": 41i64, "user": { "screen-name": "ChuckPhilbrick_884", "lang": "en", "friends_count": 73, "statuses_count": 237, "name": "Chuck Philbrick", "followers_count": 35 }, "sender-location": point("35.39,81.04"), "send-time": datetime("2012-05-07T10:10:00.000Z"), "referred-topics": {{ "verizon", "plan" }}, "message-text": " love verizon its plan is good:)" }
+{ "tweetid": 42i64, "user": { "screen-name": "BraxtonKifer_723", "lang": "en", "friends_count": 65, "statuses_count": 459, "name": "Braxton Kifer", "followers_count": 6 }, "sender-location": point("30.23,70.06"), "send-time": datetime("2007-10-15T10:10:00.000Z"), "referred-topics": {{ "verizon", "touch-screen" }}, "message-text": " dislike verizon the touch-screen is horrible" }
+{ "tweetid": 43i64, "user": { "screen-name": "DeshawnPorter#734", "lang": "en", "friends_count": 26, "statuses_count": 408, "name": "Deshawn Porter", "followers_count": 14 }, "sender-location": point("35.2,82.65"), "send-time": datetime("2005-10-06T10:10:00.000Z"), "referred-topics": {{ "sprint", "wireless" }}, "message-text": " love sprint its wireless is amazing" }
+{ "tweetid": 44i64, "user": { "screen-name": "SamanthaBeach$879", "lang": "en", "friends_count": 95, "statuses_count": 481, "name": "Samantha Beach", "followers_count": 119 }, "sender-location": point("30.28,89.79"), "send-time": datetime("2005-09-20T10:10:00.000Z"), "referred-topics": {{ "motorola", "network" }}, "message-text": " love motorola the network is mind-blowing:)" }
+{ "tweetid": 45i64, "user": { "screen-name": "NoelleBash_83", "lang": "en", "friends_count": 4, "statuses_count": 148, "name": "Noelle Bash", "followers_count": 139 }, "sender-location": point("42.4,96.94"), "send-time": datetime("2007-01-05T10:10:00.000Z"), "referred-topics": {{ "iphone", "platform" }}, "message-text": " hate iphone its platform is terrible:(" }
+{ "tweetid": 46i64, "user": { "screen-name": "RuthWells#712", "lang": "en", "friends_count": 51, "statuses_count": 415, "name": "Ruth Wells", "followers_count": 57 }, "sender-location": point("31.93,82.03"), "send-time": datetime("2007-04-21T10:10:00.000Z"), "referred-topics": {{ "iphone", "customization" }}, "message-text": " dislike iphone the customization is bad:(" }
+{ "tweetid": 47i64, "user": { "screen-name": "NakiaClose@771", "lang": "en", "friends_count": 59, "statuses_count": 239, "name": "Nakia Close", "followers_count": 105 }, "sender-location": point("47.06,92.54"), "send-time": datetime("2005-02-18T10:10:00.000Z"), "referred-topics": {{ "motorola", "3G" }}, "message-text": " can't stand motorola its 3G is OMG:(" }
+{ "tweetid": 48i64, "user": { "screen-name": "EmLinton#420", "lang": "en", "friends_count": 87, "statuses_count": 481, "name": "Em Linton", "followers_count": 141 }, "sender-location": point("35.6,88.2"), "send-time": datetime("2006-09-24T10:10:00.000Z"), "referred-topics": {{ "iphone", "customer-service" }}, "message-text": " hate iphone its customer-service is horrible" }
+{ "tweetid": 49i64, "user": { "screen-name": "DarbyPatton_703", "lang": "en", "friends_count": 40, "statuses_count": 79, "name": "Darby Patton", "followers_count": 159 }, "sender-location": point("36.57,84.01"), "send-time": datetime("2006-06-14T10:10:00.000Z"), "referred-topics": {{ "verizon", "platform" }}, "message-text": " love verizon its platform is good" }
+{ "tweetid": 50i64, "user": { "screen-name": "WilburStephenson$295", "lang": "en", "friends_count": 57, "statuses_count": 337, "name": "Wilbur Stephenson", "followers_count": 188 }, "sender-location": point("38.35,83.92"), "send-time": datetime("2006-10-14T10:10:00.000Z"), "referred-topics": {{ "motorola", "plan" }}, "message-text": " dislike motorola the plan is OMG:(" }
+{ "tweetid": 51i64, "user": { "screen-name": "PalmerHahn@368", "lang": "en", "friends_count": 13, "statuses_count": 196, "name": "Palmer Hahn", "followers_count": 69 }, "sender-location": point("48.96,88.74"), "send-time": datetime("2006-01-07T10:10:00.000Z"), "referred-topics": {{ "samsung", "shortcut-menu" }}, "message-text": " like samsung its shortcut-menu is awesome" }
+{ "tweetid": 52i64, "user": { "screen-name": "HarlanWynne_297", "lang": "en", "friends_count": 71, "statuses_count": 262, "name": "Harlan Wynne", "followers_count": 151 }, "sender-location": point("41.05,93.92"), "send-time": datetime("2008-07-08T10:10:00.000Z"), "referred-topics": {{ "samsung", "platform" }}, "message-text": " like samsung its platform is awesome" }
+{ "tweetid": 53i64, "user": { "screen-name": "GrettaCable#405", "lang": "en", "friends_count": 7, "statuses_count": 324, "name": "Gretta Cable", "followers_count": 82 }, "sender-location": point("40.6,71.86"), "send-time": datetime("2010-11-16T10:10:00.000Z"), "referred-topics": {{ "iphone", "network" }}, "message-text": " like iphone its network is amazing:)" }
+{ "tweetid": 54i64, "user": { "screen-name": "PhilipaRing_461", "lang": "en", "friends_count": 43, "statuses_count": 53, "name": "Philipa Ring", "followers_count": 164 }, "sender-location": point("30.47,90.14"), "send-time": datetime("2011-12-24T10:10:00.000Z"), "referred-topics": {{ "motorola", "voicemail-service" }}, "message-text": " like motorola its voicemail-service is amazing" }
+{ "tweetid": 55i64, "user": { "screen-name": "LindseyBurch_187", "lang": "en", "friends_count": 9, "statuses_count": 54, "name": "Lindsey Burch", "followers_count": 6 }, "sender-location": point("31.66,68.68"), "send-time": datetime("2011-12-21T10:10:00.000Z"), "referred-topics": {{ "samsung", "touch-screen" }}, "message-text": " can't stand samsung its touch-screen is terrible" }
+{ "tweetid": 56i64, "user": { "screen-name": "AnnabelLosey_61", "lang": "en", "friends_count": 53, "statuses_count": 381, "name": "Annabel Losey", "followers_count": 133 }, "sender-location": point("37.33,85.16"), "send-time": datetime("2005-11-14T10:10:00.000Z"), "referred-topics": {{ "sprint", "customization" }}, "message-text": " can't stand sprint the customization is horrible:(" }
+{ "tweetid": 57i64, "user": { "screen-name": "HectorLalty@132", "lang": "en", "friends_count": 2, "statuses_count": 195, "name": "Hector Lalty", "followers_count": 92 }, "sender-location": point("46.52,80.45"), "send-time": datetime("2012-04-15T10:10:00.000Z"), "referred-topics": {{ "iphone", "reachability" }}, "message-text": " hate iphone the reachability is bad:(" }
+{ "tweetid": 58i64, "user": { "screen-name": "KatieWilkins_817", "lang": "en", "friends_count": 95, "statuses_count": 476, "name": "Katie Wilkins", "followers_count": 151 }, "sender-location": point("44.72,69.13"), "send-time": datetime("2006-11-01T10:10:00.000Z"), "referred-topics": {{ "sprint", "voice-command" }}, "message-text": " like sprint the voice-command is amazing:)" }
+{ "tweetid": 59i64, "user": { "screen-name": "BrianneRamsey$451", "lang": "en", "friends_count": 13, "statuses_count": 69, "name": "Brianne Ramsey", "followers_count": 102 }, "sender-location": point("37.02,80.95"), "send-time": datetime("2007-02-08T10:10:00.000Z"), "referred-topics": {{ "verizon", "network" }}, "message-text": " dislike verizon the network is terrible" }
+{ "tweetid": 60i64, "user": { "screen-name": "RinaHujsak#7", "lang": "en", "friends_count": 69, "statuses_count": 73, "name": "Rina Hujsak", "followers_count": 63 }, "sender-location": point("28.27,73.68"), "send-time": datetime("2009-03-28T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "network" }}, "message-text": " like t-mobile its network is amazing:)" }
+{ "tweetid": 61i64, "user": { "screen-name": "GertieSadley$508", "lang": "en", "friends_count": 35, "statuses_count": 235, "name": "Gertie Sadley", "followers_count": 87 }, "sender-location": point("40.19,86.0"), "send-time": datetime("2006-07-27T10:10:00.000Z"), "referred-topics": {{ "at&t", "reachability" }}, "message-text": " love at&t its reachability is mind-blowing:)" }
+{ "tweetid": 62i64, "user": { "screen-name": "AaronJackson_273", "lang": "en", "friends_count": 98, "statuses_count": 205, "name": "Aaron Jackson", "followers_count": 128 }, "sender-location": point("48.11,85.01"), "send-time": datetime("2011-05-14T10:10:00.000Z"), "referred-topics": {{ "iphone", "voice-command" }}, "message-text": " like iphone the voice-command is awesome:)" }
+{ "tweetid": 63i64, "user": { "screen-name": "CreightonHujsak$142", "lang": "en", "friends_count": 21, "statuses_count": 68, "name": "Creighton Hujsak", "followers_count": 70 }, "sender-location": point("40.55,90.98"), "send-time": datetime("2010-08-15T10:10:00.000Z"), "referred-topics": {{ "samsung", "voicemail-service" }}, "message-text": " love samsung the voicemail-service is amazing" }
+{ "tweetid": 64i64, "user": { "screen-name": "KazukoWilkinson$204", "lang": "en", "friends_count": 51, "statuses_count": 147, "name": "Kazuko Wilkinson", "followers_count": 86 }, "sender-location": point("29.64,94.45"), "send-time": datetime("2008-08-24T10:10:00.000Z"), "referred-topics": {{ "motorola", "speed" }}, "message-text": " love motorola the speed is mind-blowing:)" }
+{ "tweetid": 65i64, "user": { "screen-name": "GonzaloDiegel#186", "lang": "en", "friends_count": 80, "statuses_count": 149, "name": "Gonzalo Diegel", "followers_count": 89 }, "sender-location": point("48.68,83.09"), "send-time": datetime("2008-04-24T10:10:00.000Z"), "referred-topics": {{ "at&t", "voicemail-service" }}, "message-text": " dislike at&t its voicemail-service is horrible:(" }
+{ "tweetid": 66i64, "user": { "screen-name": "KizzyKanaga$317", "lang": "en", "friends_count": 52, "statuses_count": 330, "name": "Kizzy Kanaga", "followers_count": 6 }, "sender-location": point("27.96,90.03"), "send-time": datetime("2009-10-19T10:10:00.000Z"), "referred-topics": {{ "at&t", "touch-screen" }}, "message-text": " like at&t the touch-screen is amazing" }
+{ "tweetid": 67i64, "user": { "screen-name": "CraigTreeby@171", "lang": "en", "friends_count": 72, "statuses_count": 44, "name": "Craig Treeby", "followers_count": 155 }, "sender-location": point("48.99,91.21"), "send-time": datetime("2006-02-14T10:10:00.000Z"), "referred-topics": {{ "samsung", "signal" }}, "message-text": " love samsung the signal is amazing:)" }
+{ "tweetid": 68i64, "user": { "screen-name": "BrionySaltser#395", "lang": "en", "friends_count": 21, "statuses_count": 422, "name": "Briony Saltser", "followers_count": 129 }, "sender-location": point("37.33,67.08"), "send-time": datetime("2006-03-07T10:10:00.000Z"), "referred-topics": {{ "samsung", "shortcut-menu" }}, "message-text": " love samsung its shortcut-menu is amazing:)" }
+{ "tweetid": 69i64, "user": { "screen-name": "MagdaleneWerner$925", "lang": "en", "friends_count": 46, "statuses_count": 446, "name": "Magdalene Werner", "followers_count": 75 }, "sender-location": point("45.77,83.23"), "send-time": datetime("2005-06-09T10:10:00.000Z"), "referred-topics": {{ "iphone", "signal" }}, "message-text": " like iphone the signal is mind-blowing" }
+{ "tweetid": 70i64, "user": { "screen-name": "FlossieBaker$898", "lang": "en", "friends_count": 67, "statuses_count": 63, "name": "Flossie Baker", "followers_count": 50 }, "sender-location": point("44.37,89.4"), "send-time": datetime("2011-07-16T10:10:00.000Z"), "referred-topics": {{ "motorola", "network" }}, "message-text": " like motorola its network is good" }
+{ "tweetid": 71i64, "user": { "screen-name": "GradyGraff$247", "lang": "en", "friends_count": 21, "statuses_count": 58, "name": "Grady Graff", "followers_count": 45 }, "sender-location": point("24.81,67.13"), "send-time": datetime("2012-04-09T10:10:00.000Z"), "referred-topics": {{ "motorola", "reachability" }}, "message-text": " like motorola the reachability is good" }
+{ "tweetid": 72i64, "user": { "screen-name": "MelitaLombardi@324", "lang": "en", "friends_count": 39, "statuses_count": 32, "name": "Melita Lombardi", "followers_count": 167 }, "sender-location": point("24.23,73.03"), "send-time": datetime("2011-02-26T10:10:00.000Z"), "referred-topics": {{ "verizon", "network" }}, "message-text": " hate verizon the network is terrible:(" }
+{ "tweetid": 73i64, "user": { "screen-name": "HerbertPowell_651", "lang": "en", "friends_count": 17, "statuses_count": 57, "name": "Herbert Powell", "followers_count": 167 }, "sender-location": point("47.22,92.69"), "send-time": datetime("2005-01-25T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "customization" }}, "message-text": " love t-mobile its customization is awesome:)" }
+{ "tweetid": 74i64, "user": { "screen-name": "BasilSanborn$23", "lang": "en", "friends_count": 38, "statuses_count": 391, "name": "Basil Sanborn", "followers_count": 108 }, "sender-location": point("30.96,68.0"), "send-time": datetime("2008-12-25T10:10:00.000Z"), "referred-topics": {{ "samsung", "network" }}, "message-text": " can't stand samsung the network is bad" }
+{ "tweetid": 75i64, "user": { "screen-name": "LaurineZoucks$307", "lang": "en", "friends_count": 27, "statuses_count": 161, "name": "Laurine Zoucks", "followers_count": 144 }, "sender-location": point("40.78,91.08"), "send-time": datetime("2009-11-02T10:10:00.000Z"), "referred-topics": {{ "motorola", "customer-service" }}, "message-text": " like motorola the customer-service is amazing" }
+{ "tweetid": 76i64, "user": { "screen-name": "LincolnMarriman@675", "lang": "en", "friends_count": 3, "statuses_count": 389, "name": "Lincoln Marriman", "followers_count": 125 }, "sender-location": point("28.4,83.82"), "send-time": datetime("2006-11-04T10:10:00.000Z"), "referred-topics": {{ "verizon", "customer-service" }}, "message-text": " like verizon the customer-service is mind-blowing" }
+{ "tweetid": 77i64, "user": { "screen-name": "FrancesFinlay#683", "lang": "en", "friends_count": 71, "statuses_count": 174, "name": "Frances Finlay", "followers_count": 32 }, "sender-location": point("29.71,66.36"), "send-time": datetime("2012-04-18T10:10:00.000Z"), "referred-topics": {{ "iphone", "customization" }}, "message-text": " love iphone the customization is awesome" }
+{ "tweetid": 78i64, "user": { "screen-name": "ModestoMarriman_627", "lang": "en", "friends_count": 76, "statuses_count": 2, "name": "Modesto Marriman", "followers_count": 33 }, "sender-location": point("33.77,92.15"), "send-time": datetime("2011-09-26T10:10:00.000Z"), "referred-topics": {{ "samsung", "network" }}, "message-text": " love samsung its network is mind-blowing" }
+{ "tweetid": 79i64, "user": { "screen-name": "FlossieCamp#59", "lang": "en", "friends_count": 17, "statuses_count": 484, "name": "Flossie Camp", "followers_count": 142 }, "sender-location": point("24.67,77.24"), "send-time": datetime("2005-07-03T10:10:00.000Z"), "referred-topics": {{ "iphone", "reachability" }}, "message-text": " like iphone its reachability is awesome:)" }
+{ "tweetid": 80i64, "user": { "screen-name": "DouglasKing@553", "lang": "en", "friends_count": 62, "statuses_count": 251, "name": "Douglas King", "followers_count": 180 }, "sender-location": point("24.84,74.15"), "send-time": datetime("2009-10-09T10:10:00.000Z"), "referred-topics": {{ "sprint", "speed" }}, "message-text": " can't stand sprint the speed is bad:(" }
+{ "tweetid": 81i64, "user": { "screen-name": "WardCasteel@972", "lang": "en", "friends_count": 8, "statuses_count": 358, "name": "Ward Casteel", "followers_count": 51 }, "sender-location": point("41.41,91.32"), "send-time": datetime("2007-05-08T10:10:00.000Z"), "referred-topics": {{ "at&t", "voice-command" }}, "message-text": " can't stand at&t the voice-command is terrible:(" }
+{ "tweetid": 82i64, "user": { "screen-name": "AdelaErskine#579", "lang": "en", "friends_count": 97, "statuses_count": 354, "name": "Adela Erskine", "followers_count": 155 }, "sender-location": point("35.56,68.19"), "send-time": datetime("2009-03-23T10:10:00.000Z"), "referred-topics": {{ "samsung", "touch-screen" }}, "message-text": " hate samsung the touch-screen is bad:(" }
+{ "tweetid": 83i64, "user": { "screen-name": "ClevelandPrevatt#255", "lang": "en", "friends_count": 24, "statuses_count": 159, "name": "Cleveland Prevatt", "followers_count": 68 }, "sender-location": point("38.6,67.51"), "send-time": datetime("2006-10-09T10:10:00.000Z"), "referred-topics": {{ "sprint", "platform" }}, "message-text": " hate sprint its platform is OMG:(" }
+{ "tweetid": 84i64, "user": { "screen-name": "MaxwellTreeby@610", "lang": "en", "friends_count": 21, "statuses_count": 168, "name": "Maxwell Treeby", "followers_count": 138 }, "sender-location": point("38.37,79.64"), "send-time": datetime("2007-07-17T10:10:00.000Z"), "referred-topics": {{ "motorola", "speed" }}, "message-text": " like motorola its speed is mind-blowing" }
+{ "tweetid": 85i64, "user": { "screen-name": "BobbyBastion$235", "lang": "en", "friends_count": 48, "statuses_count": 251, "name": "Bobby Bastion", "followers_count": 123 }, "sender-location": point("45.84,83.03"), "send-time": datetime("2009-03-14T10:10:00.000Z"), "referred-topics": {{ "samsung", "voice-command" }}, "message-text": " love samsung its voice-command is amazing" }
+{ "tweetid": 86i64, "user": { "screen-name": "ClairKanaga$512", "lang": "en", "friends_count": 88, "statuses_count": 274, "name": "Clair Kanaga", "followers_count": 77 }, "sender-location": point("46.34,84.86"), "send-time": datetime("2006-07-15T10:10:00.000Z"), "referred-topics": {{ "samsung", "reachability" }}, "message-text": " love samsung its reachability is mind-blowing:)" }
+{ "tweetid": 87i64, "user": { "screen-name": "HueyLosey_966", "lang": "en", "friends_count": 78, "statuses_count": 32, "name": "Huey Losey", "followers_count": 2 }, "sender-location": point("25.61,78.89"), "send-time": datetime("2011-03-22T10:10:00.000Z"), "referred-topics": {{ "samsung", "reachability" }}, "message-text": " like samsung its reachability is good:)" }
+{ "tweetid": 88i64, "user": { "screen-name": "SooThigpen#463", "lang": "en", "friends_count": 5, "statuses_count": 429, "name": "Soo Thigpen", "followers_count": 18 }, "sender-location": point("34.84,74.43"), "send-time": datetime("2009-03-09T10:10:00.000Z"), "referred-topics": {{ "motorola", "wireless" }}, "message-text": " love motorola the wireless is good:)" }
+{ "tweetid": 89i64, "user": { "screen-name": "LacreshaWire_320", "lang": "en", "friends_count": 92, "statuses_count": 127, "name": "Lacresha Wire", "followers_count": 194 }, "sender-location": point("47.73,86.79"), "send-time": datetime("2007-08-04T10:10:00.000Z"), "referred-topics": {{ "verizon", "wireless" }}, "message-text": " can't stand verizon its wireless is OMG:(" }
+{ "tweetid": 90i64, "user": { "screen-name": "MyriamLambert@966", "lang": "en", "friends_count": 22, "statuses_count": 452, "name": "Myriam Lambert", "followers_count": 193 }, "sender-location": point("41.85,88.44"), "send-time": datetime("2008-12-02T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "plan" }}, "message-text": " hate t-mobile the plan is bad" }
+{ "tweetid": 91i64, "user": { "screen-name": "WoodyWhite@341", "lang": "en", "friends_count": 12, "statuses_count": 183, "name": "Woody White", "followers_count": 31 }, "sender-location": point("29.04,85.35"), "send-time": datetime("2006-02-06T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "network" }}, "message-text": " like t-mobile its network is good" }
+{ "tweetid": 92i64, "user": { "screen-name": "QuinDickinson#157", "lang": "en", "friends_count": 84, "statuses_count": 415, "name": "Quin Dickinson", "followers_count": 9 }, "sender-location": point("40.86,67.52"), "send-time": datetime("2006-01-26T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "signal" }}, "message-text": " can't stand t-mobile the signal is horrible:(" }
+{ "tweetid": 93i64, "user": { "screen-name": "BettieRing@713", "lang": "en", "friends_count": 39, "statuses_count": 373, "name": "Bettie Ring", "followers_count": 98 }, "sender-location": point("26.37,69.03"), "send-time": datetime("2005-10-04T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "reachability" }}, "message-text": " dislike t-mobile the reachability is terrible:(" }
+{ "tweetid": 94i64, "user": { "screen-name": "LinaDraudy_733", "lang": "en", "friends_count": 70, "statuses_count": 228, "name": "Lina Draudy", "followers_count": 9 }, "sender-location": point("39.58,97.38"), "send-time": datetime("2012-03-13T10:10:00.000Z"), "referred-topics": {{ "verizon", "network" }}, "message-text": " like verizon the network is awesome:)" }
+{ "tweetid": 95i64, "user": { "screen-name": "StacyFleming#907", "lang": "en", "friends_count": 37, "statuses_count": 119, "name": "Stacy Fleming", "followers_count": 113 }, "sender-location": point("24.27,94.53"), "send-time": datetime("2007-10-08T10:10:00.000Z"), "referred-topics": {{ "samsung", "platform" }}, "message-text": " love samsung its platform is amazing:)" }
+{ "tweetid": 96i64, "user": { "screen-name": "AmbroseAllshouse_786", "lang": "en", "friends_count": 24, "statuses_count": 299, "name": "Ambrose Allshouse", "followers_count": 23 }, "sender-location": point("34.88,73.05"), "send-time": datetime("2009-01-09T10:10:00.000Z"), "referred-topics": {{ "verizon", "speed" }}, "message-text": " hate verizon the speed is horrible:(" }
+{ "tweetid": 97i64, "user": { "screen-name": "VaughnFocell_20", "lang": "en", "friends_count": 68, "statuses_count": 388, "name": "Vaughn Focell", "followers_count": 171 }, "sender-location": point("34.67,73.46"), "send-time": datetime("2012-01-24T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "customer-service" }}, "message-text": " can't stand t-mobile its customer-service is terrible" }
+{ "tweetid": 98i64, "user": { "screen-name": "UlyssesCrissman#115", "lang": "en", "friends_count": 90, "statuses_count": 250, "name": "Ulysses Crissman", "followers_count": 110 }, "sender-location": point("24.81,93.59"), "send-time": datetime("2008-04-02T10:10:00.000Z"), "referred-topics": {{ "motorola", "customer-service" }}, "message-text": " love motorola its customer-service is awesome" }
+{ "tweetid": 99i64, "user": { "screen-name": "WatCrissman#703", "lang": "en", "friends_count": 50, "statuses_count": 244, "name": "Wat Crissman", "followers_count": 123 }, "sender-location": point("33.22,92.64"), "send-time": datetime("2006-09-15T10:10:00.000Z"), "referred-topics": {{ "motorola", "plan" }}, "message-text": " can't stand motorola the plan is terrible" }
+{ "tweetid": 100i64, "user": { "screen-name": "BambiLaurence$910", "lang": "en", "friends_count": 57, "statuses_count": 311, "name": "Bambi Laurence", "followers_count": 136 }, "sender-location": point("36.88,80.08"), "send-time": datetime("2008-04-26T10:10:00.000Z"), "referred-topics": {{ "sprint", "speed" }}, "message-text": " love sprint its speed is mind-blowing" }
diff --git a/asterix-app/src/test/resources/runtimets/results/flwor/let33/let33.1.adm b/asterix-app/src/test/resources/runtimets/results/flwor/let33/let33.1.adm
new file mode 100644
index 0000000..783934c
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/flwor/let33/let33.1.adm
@@ -0,0 +1 @@
+{ "name": "Tom" }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443-2/query-issue443-2.1.ddl.aql b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue656/query-issue656.1.adm
similarity index 100%
rename from asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443-2/query-issue443-2.1.ddl.aql
rename to asterix-app/src/test/resources/runtimets/results/open-closed/query-issue656/query-issue656.1.adm
diff --git a/asterix-app/src/test/resources/runtimets/results/user-defined-functions/udf23/udf23.1.adm b/asterix-app/src/test/resources/runtimets/results/user-defined-functions/udf23/udf23.1.adm
index c084678..7a45f5b 100644
--- a/asterix-app/src/test/resources/runtimets/results/user-defined-functions/udf23/udf23.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/user-defined-functions/udf23/udf23.1.adm
@@ -1,6 +1,6 @@
-{ "DataverseName": "DMLTest", "DatasetName": "FacebookUsers1", "DataTypeName": "FacebookUserType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Sun Sep 22 22:26:07 PDT 2013", "DatasetId": 375, "PendingOp": 0 }
-{ "DataverseName": "DMLTest", "DatasetName": "FacebookUsers2", "DataTypeName": "FacebookUserType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Sun Sep 22 22:26:07 PDT 2013", "DatasetId": 376, "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "CompactionPolicy", "DataTypeName": "CompactionPolicyRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "CompactionPolicy" ], "PrimaryKey": [ "DataverseName", "CompactionPolicy" ], "GroupName": "MetadataGroup", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Sun Sep 22 22:24:38 PDT 2013", "DatasetId": 9, "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Dataset", "DataTypeName": "DatasetRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName" ], "PrimaryKey": [ "DataverseName", "DatasetName" ], "GroupName": "MetadataGroup", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Sun Sep 22 22:24:38 PDT 2013", "DatasetId": 2, "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "DatasourceAdapter", "DataTypeName": "DatasourceAdapterRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name" ], "PrimaryKey": [ "DataverseName", "Name" ], "GroupName": "MetadataGroup", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Sun Sep 22 22:24:38 PDT 2013", "DatasetId": 8, "PendingOp": 0 }
-{ "DataverseName": "Metadata", "DatasetName": "Datatype", "DataTypeName": "DatatypeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatatypeName" ], "PrimaryKey": [ "DataverseName", "DatatypeName" ], "GroupName": "MetadataGroup", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Sun Sep 22 22:24:38 PDT 2013", "DatasetId": 3, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "CompactionPolicy", "DataTypeName": "CompactionPolicyRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "CompactionPolicy" ], "PrimaryKey": [ "DataverseName", "CompactionPolicy" ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Oct 24 01:49:04 PDT 2013", "DatasetId": 13, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Dataset", "DataTypeName": "DatasetRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName" ], "PrimaryKey": [ "DataverseName", "DatasetName" ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Oct 24 01:49:04 PDT 2013", "DatasetId": 2, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "DatasourceAdapter", "DataTypeName": "DatasourceAdapterRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name" ], "PrimaryKey": [ "DataverseName", "Name" ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Oct 24 01:49:04 PDT 2013", "DatasetId": 8, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Datatype", "DataTypeName": "DatatypeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatatypeName" ], "PrimaryKey": [ "DataverseName", "DatatypeName" ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Oct 24 01:49:04 PDT 2013", "DatasetId": 3, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Dataverse", "DataTypeName": "DataverseRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName" ], "PrimaryKey": [ "DataverseName" ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Oct 24 01:49:04 PDT 2013", "DatasetId": 1, "PendingOp": 0 }
+{ "DataverseName": "Metadata", "DatasetName": "Feed", "DataTypeName": "FeedRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "FeedName" ], "PrimaryKey": [ "DataverseName", "FeedName" ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolernace-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{ }}, "Timestamp": "Thu Oct 24 01:49:04 PDT 2013", "DatasetId": 10, "PendingOp": 0 }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/testsuite.xml b/asterix-app/src/test/resources/runtimets/testsuite.xml
index a256fb5..d7b4c75 100644
--- a/asterix-app/src/test/resources/runtimets/testsuite.xml
+++ b/asterix-app/src/test/resources/runtimets/testsuite.xml
@@ -50,6 +50,11 @@
</compilation-unit>
</test-case>
</test-group>
+ <test-case FilePath="flwor">
+ <compilation-unit name="let33">
+ <output-dir compare="Text">let33</output-dir>
+ </compilation-unit>
+ </test-case>
<test-group name="aggregate">
<test-case FilePath="aggregate">
<compilation-unit name="issue531_string_min_max">
@@ -1099,6 +1104,22 @@
</compilation-unit>
</test-case>
<test-case FilePath="dml">
+ <compilation-unit name="load-with-ngram-index">
+ <output-dir compare="Text">load-with-ngram-index</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="dml">
+ <compilation-unit name="load-with-rtree-index">
+ <output-dir compare="Text">load-with-rtree-index</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="dml">
+ <compilation-unit name="load-with-word-index">
+ <output-dir compare="Text">load-with-word-index</output-dir>
+ </compilation-unit>
+ </test-case>
+
+ <test-case FilePath="dml">
<compilation-unit name="opentype-c2o-recursive">
<output-dir compare="Text">opentype-c2o-recursive</output-dir>
</compilation-unit>
@@ -2294,11 +2315,13 @@
</compilation-unit>
</test-case>
-->
+ <!--
<test-case FilePath="misc">
<compilation-unit name="tid_01">
<output-dir compare="Text">tid_01</output-dir>
</compilation-unit>
</test-case>
+ -->
<test-case FilePath="misc">
<compilation-unit name="year_01">
<output-dir compare="Text">year_01</output-dir>
@@ -4413,6 +4436,16 @@
</compilation-unit>
</test-case>
<test-case FilePath="feeds">
+ <compilation-unit name="feeds_05">
+ <output-dir compare="Text">feeds_05</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="feeds">
+ <compilation-unit name="feeds_06">
+ <output-dir compare="Text">feeds_06</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="feeds">
<compilation-unit name="issue_230_feeds">
<output-dir compare="Text">issue_230_feeds</output-dir>
</compilation-unit>
diff --git a/asterix-aql/pom.xml b/asterix-aql/pom.xml
index 465c3b9..5ec2b0f 100644
--- a/asterix-aql/pom.xml
+++ b/asterix-aql/pom.xml
@@ -17,7 +17,7 @@
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
</parent>
<artifactId>asterix-aql</artifactId>
<build>
@@ -115,22 +115,28 @@
</build>
<dependencies>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>4.8.1</version>
+ <scope>test</scope>
+ </dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-common</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-om</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-metadata</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/AbstractExpression.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/AbstractExpression.java
index 95eefa0..6b41e25 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/AbstractExpression.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/AbstractExpression.java
@@ -20,20 +20,20 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionAnnotation;
public abstract class AbstractExpression implements Expression {
- protected List<IExpressionAnnotation> hints;
-
- public void addHint(IExpressionAnnotation hint) {
- if (hints == null) {
- hints = new ArrayList<IExpressionAnnotation>();
- }
- hints.add(hint);
+ protected List<IExpressionAnnotation> hints;
+
+ public void addHint(IExpressionAnnotation hint) {
+ if (hints == null) {
+ hints = new ArrayList<IExpressionAnnotation>();
+ }
+ hints.add(hint);
}
-
+
public boolean hasHints() {
- return hints != null;
+ return hints != null;
}
-
+
public List<IExpressionAnnotation> getHints() {
- return hints;
+ return hints;
}
}
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/Literal.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/Literal.java
index ad00112..566481e 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/Literal.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/Literal.java
@@ -34,28 +34,28 @@
}
abstract public Object getValue();
-
+
abstract public Type getLiteralType();
public String getStringValue() {
return getValue().toString();
- }
-
+ }
+
@Override
public int hashCode() {
return getValue().hashCode();
- }
+ }
public boolean equals(Object obj) {
if (!(obj instanceof Literal)) {
return false;
}
- Literal literal = (Literal)obj;
+ Literal literal = (Literal) obj;
return getValue().equals(literal.getValue());
- }
-
+ }
+
@Override
public String toString() {
return getStringValue();
- }
+ }
}
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/Statement.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/Statement.java
index 34693e4..2948f51 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/Statement.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/Statement.java
@@ -25,7 +25,7 @@
UPDATE,
DML_CMD_LIST,
FUNCTION_DECL,
- LOAD_FROM_FILE,
+ LOAD,
NODEGROUP_DECL,
NODEGROUP_DROP,
QUERY,
@@ -37,8 +37,10 @@
INDEX_DECL,
CREATE_DATAVERSE,
INDEX_DROP,
- BEGIN_FEED,
- CONTROL_FEED,
+ CREATE_FEED,
+ DROP_FEED,
+ CONNECT_FEED,
+ DISCONNECT_FEED,
CREATE_FUNCTION,
FUNCTION_DROP,
COMPACT
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/BeginFeedStatement.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/BeginFeedStatement.java
deleted file mode 100644
index a119e2a..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/BeginFeedStatement.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import java.io.StringReader;
-import java.util.List;
-
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
-import edu.uci.ics.asterix.aql.parser.AQLParser;
-import edu.uci.ics.asterix.aql.parser.ParseException;
-import edu.uci.ics.asterix.aql.util.FunctionUtils;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.common.functions.FunctionConstants;
-import edu.uci.ics.asterix.common.functions.FunctionSignature;
-import edu.uci.ics.asterix.metadata.MetadataException;
-import edu.uci.ics.asterix.metadata.MetadataManager;
-import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
-import edu.uci.ics.asterix.metadata.entities.Dataset;
-import edu.uci.ics.asterix.metadata.entities.FeedDatasetDetails;
-import edu.uci.ics.asterix.metadata.entities.Function;
-
-public class BeginFeedStatement implements Statement {
-
- private final Identifier dataverseName;
- private final Identifier datasetName;
- private Query query;
- private int varCounter;
-
- public BeginFeedStatement(Identifier dataverseName, Identifier datasetName, int varCounter) {
- this.dataverseName = dataverseName;
- this.datasetName = datasetName;
- this.varCounter = varCounter;
- }
-
- public void initialize(MetadataTransactionContext mdTxnCtx, Dataset dataset) throws MetadataException {
- query = new Query();
- FeedDatasetDetails feedDetails = (FeedDatasetDetails) dataset.getDatasetDetails();
- String functionName = feedDetails.getFunction() == null ? null : feedDetails.getFunction().getName();
- StringBuilder builder = new StringBuilder();
- builder.append("set" + " " + FunctionUtils.IMPORT_PRIVATE_FUNCTIONS + " " + "'" + Boolean.TRUE + "'" + ";\n");
- builder.append("insert into dataset " + datasetName + " ");
-
- if (functionName == null) {
- builder.append(" (" + " for $x in feed-ingest ('" + datasetName + "') ");
- builder.append(" return $x");
- } else {
- int arity = feedDetails.getFunction().getArity();
- FunctionSignature signature = new FunctionSignature(dataset.getDataverseName(), functionName, arity);
- Function function = MetadataManager.INSTANCE.getFunction(mdTxnCtx, signature);
- if (function == null) {
- throw new MetadataException(" Unknown function " + feedDetails.getFunction());
- }
- if (function.getLanguage().equalsIgnoreCase(Function.LANGUAGE_AQL)) {
- String param = function.getParams().get(0);
- builder.append(" (" + " for" + " " + param + " in feed-ingest ('" + datasetName + "') ");
- builder.append(" let $y:=(" + function.getFunctionBody() + ")" + " return $y");
- } else {
- builder.append(" (" + " for $x in feed-ingest ('" + datasetName + "') ");
- builder.append(" let $y:=" + function.getName() + "(" + "$x" + ")");
- builder.append(" return $y");
- }
-
- }
- builder.append(")");
- builder.append(";");
- AQLParser parser = new AQLParser(new StringReader(builder.toString()));
-
- List<Statement> statements;
- try {
- statements = parser.Statement();
- query = ((InsertStatement) statements.get(1)).getQuery();
- } catch (ParseException pe) {
- throw new MetadataException(pe);
- }
-
- }
-
- public Identifier getDataverseName() {
- return dataverseName;
- }
-
- public Identifier getDatasetName() {
- return datasetName;
- }
-
- public Query getQuery() {
- return query;
- }
-
- public int getVarCounter() {
- return varCounter;
- }
-
- @Override
- public Kind getKind() {
- return Kind.BEGIN_FEED;
- }
-
- @Override
- public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
- return visitor.visitBeginFeedStatement(this, arg);
- }
-
- @Override
- public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
- visitor.visit(this, arg);
- }
-
-}
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/ConnectFeedStatement.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/ConnectFeedStatement.java
new file mode 100644
index 0000000..3ffc1cc
--- /dev/null
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/ConnectFeedStatement.java
@@ -0,0 +1,187 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.aql.expression;
+
+import java.io.StringReader;
+import java.util.List;
+
+import edu.uci.ics.asterix.aql.base.Statement;
+import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
+import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
+import edu.uci.ics.asterix.aql.parser.AQLParser;
+import edu.uci.ics.asterix.aql.parser.ParseException;
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.functions.FunctionSignature;
+import edu.uci.ics.asterix.metadata.MetadataException;
+import edu.uci.ics.asterix.metadata.MetadataManager;
+import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
+import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.metadata.entities.DatasourceAdapter.AdapterType;
+import edu.uci.ics.asterix.metadata.entities.Feed;
+import edu.uci.ics.asterix.metadata.entities.Function;
+import edu.uci.ics.asterix.metadata.feeds.BuiltinFeedPolicies;
+import edu.uci.ics.asterix.metadata.feeds.FeedUtil;
+import edu.uci.ics.asterix.metadata.feeds.IAdapterFactory;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.common.utils.Triple;
+
+public class ConnectFeedStatement implements Statement {
+
+ private final Identifier dataverseName;
+ private final Identifier datasetName;
+ private final String feedName;
+ private final String policy;
+ private Query query;
+ private int varCounter;
+ private boolean forceConnect = false;
+
+ public static final String WAIT_FOR_COMPLETION = "wait-for-completion-feed";
+
+ public ConnectFeedStatement(Pair<Identifier, Identifier> feedNameCmp, Pair<Identifier, Identifier> datasetNameCmp,
+ String policy, int varCounter) {
+ if (feedNameCmp.first != null && datasetNameCmp.first != null
+ && !feedNameCmp.first.getValue().equals(datasetNameCmp.first.getValue())) {
+ throw new IllegalArgumentException("Dataverse for source feed and target dataset do not match");
+ }
+ this.dataverseName = feedNameCmp.first != null ? feedNameCmp.first
+ : datasetNameCmp.first != null ? datasetNameCmp.first : null;
+ this.datasetName = datasetNameCmp.second;
+ this.feedName = feedNameCmp.second.getValue();
+ this.policy = policy != null ? policy : BuiltinFeedPolicies.DEFAULT_POLICY.getPolicyName();
+ this.varCounter = varCounter;
+ }
+
+ public ConnectFeedStatement(Identifier dataverseName, Identifier feedName, Identifier datasetName, String policy,
+ int varCounter) {
+ this.dataverseName = dataverseName;
+ this.datasetName = datasetName;
+ this.feedName = feedName.getValue();
+ this.policy = policy != null ? policy : BuiltinFeedPolicies.DEFAULT_POLICY.getPolicyName();
+ this.varCounter = varCounter;
+ }
+
+ public void initialize(MetadataTransactionContext mdTxnCtx, Dataset targetDataset, Feed sourceFeed)
+ throws MetadataException {
+ query = new Query();
+ FunctionSignature appliedFunction = sourceFeed.getAppliedFunction();
+ Function function = null;
+ String adapterOutputType = null;
+ if (appliedFunction != null) {
+ function = MetadataManager.INSTANCE.getFunction(mdTxnCtx, appliedFunction);
+ if (function == null) {
+ throw new MetadataException(" Unknown function " + function);
+ } else if (function.getParams().size() > 1) {
+ throw new MetadataException(" Incompatible function: " + appliedFunction
+ + " Number if arguments must be 1");
+ }
+ }
+
+ Triple<IAdapterFactory, ARecordType, AdapterType> factoryOutput = null;
+ try {
+ factoryOutput = FeedUtil.getFeedFactoryAndOutput(sourceFeed, mdTxnCtx);
+ adapterOutputType = factoryOutput.second.getTypeName();
+ } catch (AlgebricksException ae) {
+ ae.printStackTrace();
+ throw new MetadataException(ae);
+ }
+
+ StringBuilder builder = new StringBuilder();
+ builder.append("set" + " " + FunctionUtils.IMPORT_PRIVATE_FUNCTIONS + " " + "'" + Boolean.TRUE + "'" + ";\n");
+ builder.append("insert into dataset " + datasetName + " ");
+
+ if (appliedFunction == null) {
+ builder.append(" (" + " for $x in feed-ingest ('" + feedName + "'" + "," + "'" + adapterOutputType + "'"
+ + "," + "'" + targetDataset.getDatasetName() + "'" + ")");
+ builder.append(" return $x");
+ } else {
+ if (function.getLanguage().equalsIgnoreCase(Function.LANGUAGE_AQL)) {
+ String param = function.getParams().get(0);
+ builder.append(" (" + " for" + " " + param + " in feed-ingest ('" + feedName + "'" + "," + "'"
+ + adapterOutputType + "'" + "," + "'" + targetDataset.getDatasetName() + "'" + ")");
+ builder.append(" let $y:=(" + function.getFunctionBody() + ")" + " return $y");
+ } else {
+ builder.append(" (" + " for $x in feed-ingest ('" + feedName + "'" + "," + "'" + adapterOutputType
+ + "'" + "," + "'" + targetDataset.getDatasetName() + "'" + ")");
+ builder.append(" let $y:=" + sourceFeed.getDataverseName() + "." + function.getName() + "(" + "$x"
+ + ")");
+ builder.append(" return $y");
+ }
+
+ }
+ builder.append(")");
+ builder.append(";");
+ AQLParser parser = new AQLParser(new StringReader(builder.toString()));
+
+ List<Statement> statements;
+ try {
+ statements = parser.Statement();
+ query = ((InsertStatement) statements.get(1)).getQuery();
+ } catch (ParseException pe) {
+ throw new MetadataException(pe);
+ }
+
+ }
+
+ public Identifier getDataverseName() {
+ return dataverseName;
+ }
+
+ public Identifier getDatasetName() {
+ return datasetName;
+ }
+
+ public Query getQuery() {
+ return query;
+ }
+
+ public int getVarCounter() {
+ return varCounter;
+ }
+
+ @Override
+ public Kind getKind() {
+ return Kind.CONNECT_FEED;
+ }
+
+ public String getPolicy() {
+ return policy;
+ }
+
+ @Override
+ public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
+ return visitor.visitConnectFeedStatement(this, arg);
+ }
+
+ @Override
+ public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
+ visitor.visit(this, arg);
+ }
+
+ public boolean forceConnect() {
+ return forceConnect;
+ }
+
+ public void setForceConnect(boolean forceConnect) {
+ this.forceConnect = forceConnect;
+ }
+
+ public String getFeedName() {
+ return feedName;
+ }
+
+}
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/ControlFeedStatement.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/ControlFeedStatement.java
deleted file mode 100644
index d442fe7..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/ControlFeedStatement.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import java.util.Map;
-
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-
-public class ControlFeedStatement implements Statement {
-
- private final Identifier dataverseName;
- private final Identifier datasetName;
-
- public enum OperationType {
- BEGIN,
- SUSPEND,
- RESUME,
- END,
- ALTER
- }
-
- private OperationType operationType;
- private Map<String, String> alterAdapterConfParams;
-
- public ControlFeedStatement(OperationType operation, Identifier dataverseName, Identifier datasetName) {
- this.operationType = operation;
- this.datasetName = datasetName;
- this.dataverseName = dataverseName;
- }
-
- public ControlFeedStatement(OperationType operation, Identifier dataverseName, Identifier datasetName,
- Map<String, String> alterAdapterConfParams) {
- this.operationType = operation;
- this.datasetName = datasetName;
- this.dataverseName = dataverseName;
- this.alterAdapterConfParams = alterAdapterConfParams;
- }
-
- public Identifier getDataverseName() {
- return dataverseName;
- }
-
- public Identifier getDatasetName() {
- return datasetName;
- }
-
- public OperationType getOperationType() {
- return operationType;
- }
-
- public void setOperation(OperationType operationType) {
- this.operationType = operationType;
- }
-
- @Override
- public Kind getKind() {
- return Kind.CONTROL_FEED;
- }
-
- public Map<String, String> getAlterAdapterConfParams() {
- return alterAdapterConfParams;
- }
-
- @Override
- public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
- return visitor.visitControlFeedStatement(this, arg);
- }
-
- @Override
- public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
- visitor.visit(this, arg);
- }
-
-}
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateFeedStatement.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateFeedStatement.java
new file mode 100644
index 0000000..8f90e87
--- /dev/null
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateFeedStatement.java
@@ -0,0 +1,83 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.aql.expression;
+
+import java.util.Map;
+
+import edu.uci.ics.asterix.aql.base.Statement;
+import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
+import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.functions.FunctionSignature;
+
+public class CreateFeedStatement implements Statement {
+
+ private final Identifier dataverseName;
+ private final Identifier feedName;
+ private final String adaptorName;
+ private final Map<String, String> adaptorConfiguration;
+ private final FunctionSignature appliedFunction;
+ private final boolean ifNotExists;
+
+ public CreateFeedStatement(Identifier dataverseName, Identifier feedName, String adaptorName,
+ Map<String, String> adaptorConfiguration, FunctionSignature appliedFunction, boolean ifNotExists) {
+ this.feedName = feedName;
+ this.dataverseName = dataverseName;
+ this.adaptorName = adaptorName;
+ this.adaptorConfiguration = adaptorConfiguration;
+ this.appliedFunction = appliedFunction;
+ this.ifNotExists = ifNotExists;
+ }
+
+ public Identifier getDataverseName() {
+ return dataverseName;
+ }
+
+ public Identifier getFeedName() {
+ return feedName;
+ }
+
+ public String getAdaptorName() {
+ return adaptorName;
+ }
+
+ public Map<String, String> getAdaptorConfiguration() {
+ return adaptorConfiguration;
+ }
+
+ public FunctionSignature getAppliedFunction() {
+ return appliedFunction;
+ }
+
+ public boolean getIfNotExists() {
+ return this.ifNotExists;
+ }
+
+ @Override
+ public Kind getKind() {
+ return Kind.CREATE_FEED;
+ }
+
+ @Override
+ public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
+ return visitor.visitCreateFeedStatement(this, arg);
+ }
+
+ @Override
+ public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
+ visitor.visit(this, arg);
+ }
+
+}
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DeleteStatement.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DeleteStatement.java
index 770a6d5..ded6946 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DeleteStatement.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DeleteStatement.java
@@ -72,4 +72,4 @@
visitor.visit(this, arg);
}
-}
+}
\ No newline at end of file
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DisconnectFeedStatement.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DisconnectFeedStatement.java
new file mode 100644
index 0000000..b2035a6
--- /dev/null
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DisconnectFeedStatement.java
@@ -0,0 +1,79 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.aql.expression;
+
+import edu.uci.ics.asterix.aql.base.Statement;
+import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
+import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+
+public class DisconnectFeedStatement implements Statement {
+
+ private final Identifier dataverseName;
+ private final Identifier feedName;
+ private final Identifier datasetName;
+
+ public DisconnectFeedStatement(Identifier dataverseName, Identifier feedName, Identifier datasetName) {
+ this.feedName = feedName;
+ this.datasetName = datasetName;
+ this.dataverseName = dataverseName;
+ }
+
+ public DisconnectFeedStatement(Pair<Identifier, Identifier> feedNameComponent,
+ Pair<Identifier, Identifier> datasetNameComponent) {
+ if (feedNameComponent.first != null && datasetNameComponent.first != null
+ && !feedNameComponent.first.getValue().equals(datasetNameComponent.first.getValue())) {
+ throw new IllegalArgumentException("Dataverse for source feed and target dataset do not match");
+ }
+ this.dataverseName = feedNameComponent.first != null ? feedNameComponent.first
+ : datasetNameComponent.first != null ? datasetNameComponent.first : null;
+ this.datasetName = datasetNameComponent.second;
+ this.feedName = feedNameComponent.second;
+ }
+
+ public Identifier getDataverseName() {
+ return dataverseName;
+ }
+
+ public Identifier getFeedName() {
+ return feedName;
+ }
+
+ public Identifier getDatasetName() {
+ return datasetName;
+ }
+
+ @Override
+ public Kind getKind() {
+ return Kind.DISCONNECT_FEED;
+ }
+
+ @Override
+ public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
+ return visitor.visitDisconnectFeedStatement(this, arg);
+ }
+
+ @Override
+ public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
+ visitor.visit(this, arg);
+ }
+
+ @Override
+ public String toString() {
+ return "disconnect feed " + feedName + " from " + datasetName;
+ }
+
+}
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FeedDetailsDecl.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FeedDetailsDecl.java
index f664fe3..4028340 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FeedDetailsDecl.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FeedDetailsDecl.java
@@ -27,7 +27,7 @@
public FeedDetailsDecl(String adapterFactoryClassname, Map<String, String> configuration,
FunctionSignature signature, Identifier nodeGroupName, List<String> partitioningExpr,
String compactionPolicy, Map<String, String> compactionPolicyProperties) {
- super(nodeGroupName, partitioningExpr, compactionPolicy, compactionPolicyProperties);
+ super(nodeGroupName, partitioningExpr, false, compactionPolicy, compactionPolicyProperties);
this.adapterFactoryClassname = adapterFactoryClassname;
this.configuration = configuration;
this.functionSignature = signature;
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FeedDropStatement.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FeedDropStatement.java
new file mode 100644
index 0000000..0120a7d
--- /dev/null
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FeedDropStatement.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.aql.expression;
+
+import edu.uci.ics.asterix.aql.base.Statement;
+import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
+import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+
+public class FeedDropStatement implements Statement {
+
+ private final Identifier dataverseName;
+ private final Identifier feedName;
+ private boolean ifExists;
+
+ public FeedDropStatement(Identifier dataverseName, Identifier feedName, boolean ifExists) {
+ this.dataverseName = dataverseName;
+ this.feedName = feedName;
+ this.ifExists = ifExists;
+ }
+
+ @Override
+ public Kind getKind() {
+ return Kind.DROP_FEED;
+ }
+
+ public Identifier getDataverseName() {
+ return dataverseName;
+ }
+
+ public Identifier getFeedName() {
+ return feedName;
+ }
+
+ public boolean getIfExists() {
+ return ifExists;
+ }
+
+ @Override
+ public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
+ return visitor.visitDropFeedStatement(this, arg);
+ }
+
+ @Override
+ public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
+ visitor.visit(this, arg);
+ }
+
+}
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/InternalDetailsDecl.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/InternalDetailsDecl.java
index d6e2611..d9fa886 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/InternalDetailsDecl.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/InternalDetailsDecl.java
@@ -22,14 +22,16 @@
public class InternalDetailsDecl implements IDatasetDetailsDecl {
private final Identifier nodegroupName;
private final List<String> partitioningExprs;
+ private final boolean autogenerated;
private final String compactionPolicy;
private final Map<String, String> compactionPolicyProperties;
- public InternalDetailsDecl(Identifier nodeGroupName, List<String> partitioningExpr, String compactionPolicy,
- Map<String, String> compactionPolicyProperties) {
+ public InternalDetailsDecl(Identifier nodeGroupName, List<String> partitioningExpr, boolean autogenerated,
+ String compactionPolicy, Map<String, String> compactionPolicyProperties) {
this.nodegroupName = nodeGroupName == null ? new Identifier(MetadataConstants.METADATA_DEFAULT_NODEGROUP_NAME)
: nodeGroupName;
this.partitioningExprs = partitioningExpr;
+ this.autogenerated = autogenerated;
this.compactionPolicy = compactionPolicy;
this.compactionPolicyProperties = compactionPolicyProperties;
}
@@ -42,6 +44,10 @@
return nodegroupName;
}
+ public boolean isAutogenerated() {
+ return autogenerated;
+ }
+
public String getCompactionPolicy() {
return compactionPolicy;
}
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/LoadFromFileStatement.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/LoadFromFileStatement.java
deleted file mode 100644
index d37fd46..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/LoadFromFileStatement.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import java.util.Map;
-
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-
-public class LoadFromFileStatement implements Statement {
-
- private Identifier datasetName;
- private Identifier dataverseName;
- private String adapter;
- private Map<String, String> properties;
- private boolean dataIsLocallySorted;
-
- public LoadFromFileStatement(Identifier dataverseName, Identifier datasetName, String adapter,
- Map<String, String> propertiees, boolean dataIsLocallySorted) {
- this.dataverseName = dataverseName;
- this.datasetName = datasetName;
- this.adapter = adapter;
- this.properties = propertiees;
- this.dataIsLocallySorted = dataIsLocallySorted;
- }
-
- public String getAdapter() {
- return adapter;
- }
-
- public void setAdapter(String adapter) {
- this.adapter = adapter;
- }
-
- public Map<String, String> getProperties() {
- return properties;
- }
-
- public void setProperties(Map<String, String> properties) {
- this.properties = properties;
- }
-
- public Identifier getDataverseName() {
- return dataverseName;
- }
-
- public void setDataverseName(Identifier dataverseName) {
- this.dataverseName = dataverseName;
- }
-
- @Override
- public Kind getKind() {
- return Kind.LOAD_FROM_FILE;
- }
-
- public Identifier getDatasetName() {
- return datasetName;
- }
-
- public boolean dataIsAlreadySorted() {
- return dataIsLocallySorted;
- }
-
- @Override
- public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
- return visitor.visitLoadFromFileStatement(this, arg);
- }
-
- @Override
- public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
- visitor.visit(this, arg);
- }
-
-}
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/LoadStatement.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/LoadStatement.java
new file mode 100644
index 0000000..3ad3f8d
--- /dev/null
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/LoadStatement.java
@@ -0,0 +1,88 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.aql.expression;
+
+import java.util.Map;
+
+import edu.uci.ics.asterix.aql.base.Statement;
+import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
+import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+
+public class LoadStatement implements Statement {
+
+ private Identifier datasetName;
+ private Identifier dataverseName;
+ private String adapter;
+ private Map<String, String> properties;
+ private boolean dataIsLocallySorted;
+
+ public LoadStatement(Identifier dataverseName, Identifier datasetName, String adapter,
+ Map<String, String> propertiees, boolean dataIsLocallySorted) {
+ this.dataverseName = dataverseName;
+ this.datasetName = datasetName;
+ this.adapter = adapter;
+ this.properties = propertiees;
+ this.dataIsLocallySorted = dataIsLocallySorted;
+ }
+
+ public String getAdapter() {
+ return adapter;
+ }
+
+ public void setAdapter(String adapter) {
+ this.adapter = adapter;
+ }
+
+ public Map<String, String> getProperties() {
+ return properties;
+ }
+
+ public void setProperties(Map<String, String> properties) {
+ this.properties = properties;
+ }
+
+ public Identifier getDataverseName() {
+ return dataverseName;
+ }
+
+ public void setDataverseName(Identifier dataverseName) {
+ this.dataverseName = dataverseName;
+ }
+
+ @Override
+ public Kind getKind() {
+ return Kind.LOAD;
+ }
+
+ public Identifier getDatasetName() {
+ return datasetName;
+ }
+
+ public boolean dataIsAlreadySorted() {
+ return dataIsLocallySorted;
+ }
+
+ @Override
+ public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
+ return visitor.visitLoadStatement(this, arg);
+ }
+
+ @Override
+ public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
+ visitor.visit(this, arg);
+ }
+
+}
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/Query.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/Query.java
index c9d4d59..b8538b8 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/Query.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/Query.java
@@ -21,39 +21,37 @@
import edu.uci.ics.asterix.common.exceptions.AsterixException;
public class Query implements Statement {
- private Expression body;
- private int varCounter;
+ private Expression body;
+ private int varCounter;
- public Expression getBody() {
- return body;
- }
+ public Expression getBody() {
+ return body;
+ }
- public void setBody(Expression body) {
- this.body = body;
- }
+ public void setBody(Expression body) {
+ this.body = body;
+ }
- public int getVarCounter() {
- return varCounter;
- }
+ public int getVarCounter() {
+ return varCounter;
+ }
- public void setVarCounter(int varCounter) {
- this.varCounter = varCounter;
- }
+ public void setVarCounter(int varCounter) {
+ this.varCounter = varCounter;
+ }
- @Override
- public Kind getKind() {
- return Kind.QUERY;
- }
+ @Override
+ public Kind getKind() {
+ return Kind.QUERY;
+ }
- @Override
- public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T step)
- throws AsterixException {
- visitor.visit(this, step);
- }
+ @Override
+ public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T step) throws AsterixException {
+ visitor.visit(this, step);
+ }
- @Override
- public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg)
- throws AsterixException {
- return visitor.visitQuery(this, arg);
- }
+ @Override
+ public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
+ return visitor.visitQuery(this, arg);
+ }
}
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/WhereClause.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/WhereClause.java
index a675308..3782624 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/WhereClause.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/WhereClause.java
@@ -54,4 +54,4 @@
visitor.visit(this, arg);
}
-}
+}
\ No newline at end of file
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/AQLPrintVisitor.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/AQLPrintVisitor.java
index aac2b6b..ca87a0e 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/AQLPrintVisitor.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/AQLPrintVisitor.java
@@ -21,20 +21,22 @@
import edu.uci.ics.asterix.aql.base.Clause;
import edu.uci.ics.asterix.aql.base.Expression;
import edu.uci.ics.asterix.aql.base.Literal;
-import edu.uci.ics.asterix.aql.expression.BeginFeedStatement;
import edu.uci.ics.asterix.aql.expression.CallExpr;
import edu.uci.ics.asterix.aql.expression.CompactStatement;
-import edu.uci.ics.asterix.aql.expression.ControlFeedStatement;
+import edu.uci.ics.asterix.aql.expression.ConnectFeedStatement;
import edu.uci.ics.asterix.aql.expression.CreateDataverseStatement;
+import edu.uci.ics.asterix.aql.expression.CreateFeedStatement;
import edu.uci.ics.asterix.aql.expression.CreateFunctionStatement;
import edu.uci.ics.asterix.aql.expression.CreateIndexStatement;
import edu.uci.ics.asterix.aql.expression.DatasetDecl;
import edu.uci.ics.asterix.aql.expression.DataverseDecl;
import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
import edu.uci.ics.asterix.aql.expression.DeleteStatement;
+import edu.uci.ics.asterix.aql.expression.DisconnectFeedStatement;
import edu.uci.ics.asterix.aql.expression.DistinctClause;
import edu.uci.ics.asterix.aql.expression.DropStatement;
import edu.uci.ics.asterix.aql.expression.FLWOGRExpression;
+import edu.uci.ics.asterix.aql.expression.FeedDropStatement;
import edu.uci.ics.asterix.aql.expression.FieldAccessor;
import edu.uci.ics.asterix.aql.expression.FieldBinding;
import edu.uci.ics.asterix.aql.expression.ForClause;
@@ -51,7 +53,7 @@
import edu.uci.ics.asterix.aql.expression.LimitClause;
import edu.uci.ics.asterix.aql.expression.ListConstructor;
import edu.uci.ics.asterix.aql.expression.LiteralExpr;
-import edu.uci.ics.asterix.aql.expression.LoadFromFileStatement;
+import edu.uci.ics.asterix.aql.expression.LoadStatement;
import edu.uci.ics.asterix.aql.expression.NodeGroupDropStatement;
import edu.uci.ics.asterix.aql.expression.NodegroupDecl;
import edu.uci.ics.asterix.aql.expression.OperatorExpr;
@@ -393,14 +395,15 @@
@Override
public void visit(DatasetDecl dd, Integer step) throws AsterixException {
if (dd.getDatasetType() == DatasetType.INTERNAL) {
- out.println(skip(step) + "DatasetDecl" + dd.getName() + "(" + dd.getItemTypeName() + ")"
- + " partitioned by " + ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getPartitioningExprs());
+ String line = skip(step) + "DatasetDecl" + dd.getName() + "(" + dd.getItemTypeName() + ")"
+ + " partitioned by " + ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getPartitioningExprs();
+ if (((InternalDetailsDecl) dd.getDatasetDetailsDecl()).isAutogenerated()) {
+ line += " [autogenerated]";
+ }
+ out.println(line);
} else if (dd.getDatasetType() == DatasetType.EXTERNAL) {
out.println(skip(step) + "DatasetDecl" + dd.getName() + "(" + dd.getItemTypeName() + ")"
+ "is an external dataset");
- } else if (dd.getDatasetType() == DatasetType.FEED) {
- out.println(skip(step) + "DatasetDecl" + dd.getName() + "(" + dd.getItemTypeName() + ")"
- + "is an feed dataset");
}
}
@@ -415,7 +418,7 @@
}
@Override
- public void visit(LoadFromFileStatement stmtLoad, Integer arg) throws AsterixException {
+ public void visit(LoadStatement stmtLoad, Integer arg) throws AsterixException {
// TODO Auto-generated method stub
}
@@ -447,8 +450,8 @@
}
@Override
- public void visit(ControlFeedStatement ss, Integer step) throws AsterixException {
- out.println(skip(step) + ss.getOperationType() + skip(step) + ss.getDatasetName());
+ public void visit(DisconnectFeedStatement ss, Integer step) throws AsterixException {
+ out.println(skip(step) + skip(step) + ss.getFeedName() + skip(step) + ss.getDatasetName());
}
@Override
@@ -529,7 +532,19 @@
}
@Override
- public void visit(BeginFeedStatement stmtDel, Integer arg) throws AsterixException {
+ public void visit(CreateFeedStatement stmtDel, Integer arg) throws AsterixException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void visit(ConnectFeedStatement stmtDel, Integer arg) throws AsterixException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void visit(FeedDropStatement stmt, Integer arg) throws AsterixException {
// TODO Auto-generated method stub
}
@@ -537,7 +552,7 @@
@Override
public void visit(CompactStatement fds, Integer arg) throws AsterixException {
// TODO Auto-generated method stub
-
+
}
}
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/IAqlExpressionVisitor.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/IAqlExpressionVisitor.java
index d9a92b4..84d9726 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/IAqlExpressionVisitor.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/IAqlExpressionVisitor.java
@@ -14,18 +14,20 @@
*/
package edu.uci.ics.asterix.aql.expression.visitor;
-import edu.uci.ics.asterix.aql.expression.BeginFeedStatement;
import edu.uci.ics.asterix.aql.expression.CallExpr;
+import edu.uci.ics.asterix.aql.expression.ConnectFeedStatement;
import edu.uci.ics.asterix.aql.expression.CompactStatement;
-import edu.uci.ics.asterix.aql.expression.ControlFeedStatement;
import edu.uci.ics.asterix.aql.expression.CreateDataverseStatement;
+import edu.uci.ics.asterix.aql.expression.CreateFeedStatement;
import edu.uci.ics.asterix.aql.expression.CreateFunctionStatement;
import edu.uci.ics.asterix.aql.expression.CreateIndexStatement;
import edu.uci.ics.asterix.aql.expression.DatasetDecl;
import edu.uci.ics.asterix.aql.expression.DataverseDecl;
import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
import edu.uci.ics.asterix.aql.expression.DeleteStatement;
+import edu.uci.ics.asterix.aql.expression.DisconnectFeedStatement;
import edu.uci.ics.asterix.aql.expression.DistinctClause;
+import edu.uci.ics.asterix.aql.expression.FeedDropStatement;
import edu.uci.ics.asterix.aql.expression.DropStatement;
import edu.uci.ics.asterix.aql.expression.FLWOGRExpression;
import edu.uci.ics.asterix.aql.expression.FieldAccessor;
@@ -41,7 +43,7 @@
import edu.uci.ics.asterix.aql.expression.LimitClause;
import edu.uci.ics.asterix.aql.expression.ListConstructor;
import edu.uci.ics.asterix.aql.expression.LiteralExpr;
-import edu.uci.ics.asterix.aql.expression.LoadFromFileStatement;
+import edu.uci.ics.asterix.aql.expression.LoadStatement;
import edu.uci.ics.asterix.aql.expression.NodeGroupDropStatement;
import edu.uci.ics.asterix.aql.expression.NodegroupDecl;
import edu.uci.ics.asterix.aql.expression.OperatorExpr;
@@ -77,7 +79,7 @@
R visitDatasetDecl(DatasetDecl dd, T arg) throws AsterixException;
- R visitLoadFromFileStatement(LoadFromFileStatement stmtLoad, T arg) throws AsterixException;
+ R visitLoadStatement(LoadStatement stmtLoad, T arg) throws AsterixException;
R visitDropStatement(DropStatement del, T arg) throws AsterixException;
@@ -151,9 +153,13 @@
R visitSetStatement(SetStatement ss, T arg) throws AsterixException;
- R visitBeginFeedStatement(BeginFeedStatement bf, T arg) throws AsterixException;
+ R visitDisconnectFeedStatement(DisconnectFeedStatement del, T arg) throws AsterixException;
- R visitControlFeedStatement(ControlFeedStatement del, T arg) throws AsterixException;
+ R visitConnectFeedStatement(ConnectFeedStatement del, T arg) throws AsterixException;
+
+ R visitCreateFeedStatement(CreateFeedStatement del, T arg) throws AsterixException;
+
+ R visitDropFeedStatement(FeedDropStatement del, T arg) throws AsterixException;
R visitCallExpr(CallExpr pf, T arg) throws AsterixException;
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/IAqlVisitorWithVoidReturn.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/IAqlVisitorWithVoidReturn.java
index 61a7183..ce0e121 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/IAqlVisitorWithVoidReturn.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/IAqlVisitorWithVoidReturn.java
@@ -14,18 +14,20 @@
*/
package edu.uci.ics.asterix.aql.expression.visitor;
-import edu.uci.ics.asterix.aql.expression.BeginFeedStatement;
import edu.uci.ics.asterix.aql.expression.CallExpr;
+import edu.uci.ics.asterix.aql.expression.ConnectFeedStatement;
import edu.uci.ics.asterix.aql.expression.CompactStatement;
-import edu.uci.ics.asterix.aql.expression.ControlFeedStatement;
import edu.uci.ics.asterix.aql.expression.CreateDataverseStatement;
+import edu.uci.ics.asterix.aql.expression.CreateFeedStatement;
import edu.uci.ics.asterix.aql.expression.CreateFunctionStatement;
import edu.uci.ics.asterix.aql.expression.CreateIndexStatement;
import edu.uci.ics.asterix.aql.expression.DatasetDecl;
import edu.uci.ics.asterix.aql.expression.DataverseDecl;
import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
import edu.uci.ics.asterix.aql.expression.DeleteStatement;
+import edu.uci.ics.asterix.aql.expression.DisconnectFeedStatement;
import edu.uci.ics.asterix.aql.expression.DistinctClause;
+import edu.uci.ics.asterix.aql.expression.FeedDropStatement;
import edu.uci.ics.asterix.aql.expression.DropStatement;
import edu.uci.ics.asterix.aql.expression.FLWOGRExpression;
import edu.uci.ics.asterix.aql.expression.FieldAccessor;
@@ -41,7 +43,7 @@
import edu.uci.ics.asterix.aql.expression.LimitClause;
import edu.uci.ics.asterix.aql.expression.ListConstructor;
import edu.uci.ics.asterix.aql.expression.LiteralExpr;
-import edu.uci.ics.asterix.aql.expression.LoadFromFileStatement;
+import edu.uci.ics.asterix.aql.expression.LoadStatement;
import edu.uci.ics.asterix.aql.expression.NodeGroupDropStatement;
import edu.uci.ics.asterix.aql.expression.NodegroupDecl;
import edu.uci.ics.asterix.aql.expression.OperatorExpr;
@@ -77,7 +79,7 @@
void visit(DropStatement stmtDel, T arg) throws AsterixException;
- void visit(LoadFromFileStatement stmtLoad, T arg) throws AsterixException;
+ void visit(LoadStatement stmtLoad, T arg) throws AsterixException;
void visit(InsertStatement stmtInsert, T arg) throws AsterixException;
@@ -155,9 +157,13 @@
void visit(TypeDropStatement stmtDel, T arg) throws AsterixException;
- void visit(BeginFeedStatement stmtDel, T arg) throws AsterixException;
+ void visit(DisconnectFeedStatement stmtDel, T arg) throws AsterixException;
- void visit(ControlFeedStatement stmtDel, T arg) throws AsterixException;
+ void visit(ConnectFeedStatement stmtDel, T arg) throws AsterixException;
+
+ void visit(CreateFeedStatement stmt, T arg) throws AsterixException;
+
+ void visit(FeedDropStatement stmt, T arg) throws AsterixException;
void visit(CreateFunctionStatement cfs, T arg) throws AsterixException;
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/literal/DoubleLiteral.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/literal/DoubleLiteral.java
index 542208a..ca21424 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/literal/DoubleLiteral.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/literal/DoubleLiteral.java
@@ -28,12 +28,11 @@
this.value = value;
}
- @Override
+ @Override
public Double getValue() {
return value;
}
-
public void setValue(Double value) {
this.value = value;
}
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/literal/FloatLiteral.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/literal/FloatLiteral.java
index 07d6641..6ecdae8 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/literal/FloatLiteral.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/literal/FloatLiteral.java
@@ -33,7 +33,6 @@
return value;
}
-
public void setValue(Float value) {
this.value = value;
}
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/literal/NullLiteral.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/literal/NullLiteral.java
index 36942bb..5429e2f 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/literal/NullLiteral.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/literal/NullLiteral.java
@@ -51,5 +51,5 @@
@Override
public Object getValue() {
return null;
- }
+ }
}
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/parser/ScopeChecker.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/parser/ScopeChecker.java
index 246e665..b1c4c5d 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/parser/ScopeChecker.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/parser/ScopeChecker.java
@@ -162,8 +162,8 @@
public String extractFragment(int beginLine, int beginColumn, int endLine, int endColumn) {
StringBuilder extract = new StringBuilder();
- extract.append(inputLines[beginLine - 1].trim().length() > 1 ? inputLines[beginLine - 1].trim().substring(beginColumn)
- : "");
+ extract.append(inputLines[beginLine - 1].trim().length() > 1 ? inputLines[beginLine - 1].trim().substring(
+ beginColumn) : "");
for (int i = beginLine + 1; i < endLine; i++) {
extract.append("\n");
extract.append(inputLines[i - 1]);
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/AqlRewriter.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/AqlRewriter.java
index 203758d..c416fa6 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/AqlRewriter.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/AqlRewriter.java
@@ -24,11 +24,12 @@
import edu.uci.ics.asterix.aql.base.Clause;
import edu.uci.ics.asterix.aql.base.Expression;
import edu.uci.ics.asterix.aql.base.Expression.Kind;
-import edu.uci.ics.asterix.aql.expression.BeginFeedStatement;
import edu.uci.ics.asterix.aql.expression.CallExpr;
+import edu.uci.ics.asterix.aql.expression.ConnectFeedStatement;
+import edu.uci.ics.asterix.aql.expression.DisconnectFeedStatement;
import edu.uci.ics.asterix.aql.expression.CompactStatement;
-import edu.uci.ics.asterix.aql.expression.ControlFeedStatement;
import edu.uci.ics.asterix.aql.expression.CreateDataverseStatement;
+import edu.uci.ics.asterix.aql.expression.CreateFeedStatement;
import edu.uci.ics.asterix.aql.expression.CreateFunctionStatement;
import edu.uci.ics.asterix.aql.expression.CreateIndexStatement;
import edu.uci.ics.asterix.aql.expression.DatasetDecl;
@@ -38,6 +39,7 @@
import edu.uci.ics.asterix.aql.expression.DistinctClause;
import edu.uci.ics.asterix.aql.expression.DropStatement;
import edu.uci.ics.asterix.aql.expression.FLWOGRExpression;
+import edu.uci.ics.asterix.aql.expression.FeedDropStatement;
import edu.uci.ics.asterix.aql.expression.FieldAccessor;
import edu.uci.ics.asterix.aql.expression.FieldBinding;
import edu.uci.ics.asterix.aql.expression.ForClause;
@@ -53,7 +55,7 @@
import edu.uci.ics.asterix.aql.expression.LimitClause;
import edu.uci.ics.asterix.aql.expression.ListConstructor;
import edu.uci.ics.asterix.aql.expression.LiteralExpr;
-import edu.uci.ics.asterix.aql.expression.LoadFromFileStatement;
+import edu.uci.ics.asterix.aql.expression.LoadStatement;
import edu.uci.ics.asterix.aql.expression.NodeGroupDropStatement;
import edu.uci.ics.asterix.aql.expression.NodegroupDecl;
import edu.uci.ics.asterix.aql.expression.OperatorExpr;
@@ -178,34 +180,42 @@
continue;
}
- FunctionDecl functionDecl = lookupUserDefinedFunctionDecl(signature);
- if (functionDecl != null) {
- if (functionDecls.contains(functionDecl)) {
- throw new AsterixException(" Detected recursvity!");
+ Function function = lookupUserDefinedFunctionDecl(signature);
+ if (function == null) {
+ if (AsterixBuiltinFunctions.isBuiltinCompilerFunction(signature, includePrivateFunctions)) {
+ continue;
+ }
+ StringBuilder messageBuilder = new StringBuilder();
+ if (functionDecls.size() > 0) {
+ messageBuilder.append(" function " + functionDecls.get(functionDecls.size() - 1).getSignature()
+ + " depends upon function " + signature + " which is undefined");
} else {
+ messageBuilder.append(" function " + signature + " is undefined ");
+ }
+ throw new AsterixException(messageBuilder.toString());
+ }
+
+ if (function.getLanguage().equalsIgnoreCase(Function.LANGUAGE_AQL)) {
+ FunctionDecl functionDecl = FunctionUtils.getFunctionDecl(function);
+ if (functionDecl != null) {
+ if (functionDecls.contains(functionDecl)) {
+ throw new AsterixException("ERROR:Recursive invocation "
+ + functionDecls.get(functionDecls.size() - 1).getSignature() + " <==> "
+ + functionDecl.getSignature());
+ }
functionDecls.add(functionDecl);
buildOtherUdfs(functionDecl.getFuncBody(), functionDecls, declaredFunctions);
}
- } else {
- if (AsterixBuiltinFunctions.isBuiltinCompilerFunction(signature, includePrivateFunctions)) {
- continue;
- } else {
- throw new AsterixException(" unknown function " + signature);
- }
}
}
+
}
- private FunctionDecl lookupUserDefinedFunctionDecl(FunctionSignature signature) throws AsterixException {
+ private Function lookupUserDefinedFunctionDecl(FunctionSignature signature) throws AsterixException {
if (signature.getNamespace() == null) {
return null;
}
- Function function = MetadataManager.INSTANCE.getFunction(mdTxnCtx, signature);
- if (function == null) {
- return null;
- }
- return FunctionUtils.getFunctionDecl(function);
-
+ return MetadataManager.INSTANCE.getFunction(mdTxnCtx, signature);
}
private Set<FunctionSignature> getFunctionCalls(Expression expression) throws AsterixException {
@@ -359,7 +369,7 @@
}
@Override
- public Void visitLoadFromFileStatement(LoadFromFileStatement stmtLoad, Void arg) throws AsterixException {
+ public Void visitLoadStatement(LoadStatement stmtLoad, Void arg) throws AsterixException {
// TODO Auto-generated method stub
return null;
}
@@ -525,7 +535,7 @@
}
@Override
- public Void visitControlFeedStatement(ControlFeedStatement del, Void arg) throws AsterixException {
+ public Void visitDisconnectFeedStatement(DisconnectFeedStatement del, Void arg) throws AsterixException {
// TODO Auto-generated method stub
return null;
}
@@ -543,7 +553,19 @@
}
@Override
- public Void visitBeginFeedStatement(BeginFeedStatement bf, Void arg) throws AsterixException {
+ public Void visitCreateFeedStatement(CreateFeedStatement del, Void arg) throws AsterixException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public Void visitConnectFeedStatement(ConnectFeedStatement del, Void arg) throws AsterixException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public Void visitDropFeedStatement(FeedDropStatement del, Void arg) throws AsterixException {
// TODO Auto-generated method stub
return null;
}
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/CloneAndSubstituteVariablesVisitor.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/CloneAndSubstituteVariablesVisitor.java
index d860b0c..30a0ebb 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/CloneAndSubstituteVariablesVisitor.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/CloneAndSubstituteVariablesVisitor.java
@@ -21,11 +21,12 @@
import edu.uci.ics.asterix.aql.base.Clause;
import edu.uci.ics.asterix.aql.base.Expression;
import edu.uci.ics.asterix.aql.base.IAqlExpression;
-import edu.uci.ics.asterix.aql.expression.BeginFeedStatement;
import edu.uci.ics.asterix.aql.expression.CallExpr;
+import edu.uci.ics.asterix.aql.expression.ConnectFeedStatement;
+import edu.uci.ics.asterix.aql.expression.DisconnectFeedStatement;
import edu.uci.ics.asterix.aql.expression.CompactStatement;
-import edu.uci.ics.asterix.aql.expression.ControlFeedStatement;
import edu.uci.ics.asterix.aql.expression.CreateDataverseStatement;
+import edu.uci.ics.asterix.aql.expression.CreateFeedStatement;
import edu.uci.ics.asterix.aql.expression.CreateFunctionStatement;
import edu.uci.ics.asterix.aql.expression.CreateIndexStatement;
import edu.uci.ics.asterix.aql.expression.DatasetDecl;
@@ -35,6 +36,7 @@
import edu.uci.ics.asterix.aql.expression.DistinctClause;
import edu.uci.ics.asterix.aql.expression.DropStatement;
import edu.uci.ics.asterix.aql.expression.FLWOGRExpression;
+import edu.uci.ics.asterix.aql.expression.FeedDropStatement;
import edu.uci.ics.asterix.aql.expression.FieldAccessor;
import edu.uci.ics.asterix.aql.expression.FieldBinding;
import edu.uci.ics.asterix.aql.expression.ForClause;
@@ -50,7 +52,7 @@
import edu.uci.ics.asterix.aql.expression.LimitClause;
import edu.uci.ics.asterix.aql.expression.ListConstructor;
import edu.uci.ics.asterix.aql.expression.LiteralExpr;
-import edu.uci.ics.asterix.aql.expression.LoadFromFileStatement;
+import edu.uci.ics.asterix.aql.expression.LoadStatement;
import edu.uci.ics.asterix.aql.expression.NodeGroupDropStatement;
import edu.uci.ics.asterix.aql.expression.NodegroupDecl;
import edu.uci.ics.asterix.aql.expression.OperatorExpr;
@@ -396,7 +398,7 @@
}
@Override
- public Pair<IAqlExpression, List<VariableSubstitution>> visitLoadFromFileStatement(LoadFromFileStatement stmtLoad,
+ public Pair<IAqlExpression, List<VariableSubstitution>> visitLoadStatement(LoadStatement stmtLoad,
List<VariableSubstitution> arg) throws AsterixException {
// TODO Auto-generated method stub
return null;
@@ -410,7 +412,7 @@
}
@Override
- public Pair<IAqlExpression, List<VariableSubstitution>> visitControlFeedStatement(ControlFeedStatement del,
+ public Pair<IAqlExpression, List<VariableSubstitution>> visitDisconnectFeedStatement(DisconnectFeedStatement del,
List<VariableSubstitution> arg) throws AsterixException {
// TODO Auto-generated method stub
return null;
@@ -578,7 +580,21 @@
}
@Override
- public Pair<IAqlExpression, List<VariableSubstitution>> visitBeginFeedStatement(BeginFeedStatement bf,
+ public Pair<IAqlExpression, List<VariableSubstitution>> visitCreateFeedStatement(CreateFeedStatement del,
+ List<VariableSubstitution> arg) throws AsterixException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public Pair<IAqlExpression, List<VariableSubstitution>> visitConnectFeedStatement(ConnectFeedStatement del,
+ List<VariableSubstitution> arg) throws AsterixException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public Pair<IAqlExpression, List<VariableSubstitution>> visitDropFeedStatement(FeedDropStatement del,
List<VariableSubstitution> arg) throws AsterixException {
// TODO Auto-generated method stub
return null;
@@ -590,4 +606,4 @@
// TODO Auto-generated method stub
return null;
}
-}
+}
\ No newline at end of file
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/InlineUdfsVisitor.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/InlineUdfsVisitor.java
index fc7acfe..58433d3 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/InlineUdfsVisitor.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/InlineUdfsVisitor.java
@@ -22,11 +22,12 @@
import edu.uci.ics.asterix.aql.base.Expression;
import edu.uci.ics.asterix.aql.base.Expression.Kind;
import edu.uci.ics.asterix.aql.base.IAqlExpression;
-import edu.uci.ics.asterix.aql.expression.BeginFeedStatement;
import edu.uci.ics.asterix.aql.expression.CallExpr;
+import edu.uci.ics.asterix.aql.expression.ConnectFeedStatement;
+import edu.uci.ics.asterix.aql.expression.DisconnectFeedStatement;
import edu.uci.ics.asterix.aql.expression.CompactStatement;
-import edu.uci.ics.asterix.aql.expression.ControlFeedStatement;
import edu.uci.ics.asterix.aql.expression.CreateDataverseStatement;
+import edu.uci.ics.asterix.aql.expression.CreateFeedStatement;
import edu.uci.ics.asterix.aql.expression.CreateFunctionStatement;
import edu.uci.ics.asterix.aql.expression.CreateIndexStatement;
import edu.uci.ics.asterix.aql.expression.DatasetDecl;
@@ -36,6 +37,7 @@
import edu.uci.ics.asterix.aql.expression.DistinctClause;
import edu.uci.ics.asterix.aql.expression.DropStatement;
import edu.uci.ics.asterix.aql.expression.FLWOGRExpression;
+import edu.uci.ics.asterix.aql.expression.FeedDropStatement;
import edu.uci.ics.asterix.aql.expression.FieldAccessor;
import edu.uci.ics.asterix.aql.expression.FieldBinding;
import edu.uci.ics.asterix.aql.expression.ForClause;
@@ -51,7 +53,7 @@
import edu.uci.ics.asterix.aql.expression.LimitClause;
import edu.uci.ics.asterix.aql.expression.ListConstructor;
import edu.uci.ics.asterix.aql.expression.LiteralExpr;
-import edu.uci.ics.asterix.aql.expression.LoadFromFileStatement;
+import edu.uci.ics.asterix.aql.expression.LoadStatement;
import edu.uci.ics.asterix.aql.expression.NodeGroupDropStatement;
import edu.uci.ics.asterix.aql.expression.NodegroupDecl;
import edu.uci.ics.asterix.aql.expression.OperatorExpr;
@@ -397,7 +399,7 @@
}
@Override
- public Boolean visitLoadFromFileStatement(LoadFromFileStatement stmtLoad, List<FunctionDecl> arg)
+ public Boolean visitLoadStatement(LoadStatement stmtLoad, List<FunctionDecl> arg)
throws AsterixException {
// TODO Auto-generated method stub
return null;
@@ -500,7 +502,8 @@
}
@Override
- public Boolean visitControlFeedStatement(ControlFeedStatement del, List<FunctionDecl> arg) throws AsterixException {
+ public Boolean visitDisconnectFeedStatement(DisconnectFeedStatement del, List<FunctionDecl> arg)
+ throws AsterixException {
// TODO Auto-generated method stub
return null;
}
@@ -519,7 +522,19 @@
}
@Override
- public Boolean visitBeginFeedStatement(BeginFeedStatement bf, List<FunctionDecl> arg) throws AsterixException {
+ public Boolean visitCreateFeedStatement(CreateFeedStatement del, List<FunctionDecl> arg) throws AsterixException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public Boolean visitConnectFeedStatement(ConnectFeedStatement del, List<FunctionDecl> arg) throws AsterixException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public Boolean visitDropFeedStatement(FeedDropStatement del, List<FunctionDecl> arg) throws AsterixException {
// TODO Auto-generated method stub
return null;
}
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/util/FunctionUtils.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/util/FunctionUtils.java
index 1b0cfbe..18695c3 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/util/FunctionUtils.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/util/FunctionUtils.java
@@ -25,6 +25,7 @@
import edu.uci.ics.asterix.aql.parser.AQLParser;
import edu.uci.ics.asterix.aql.parser.ParseException;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.functions.FunctionSignature;
import edu.uci.ics.asterix.metadata.entities.Function;
import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
@@ -76,4 +77,8 @@
return AsterixBuiltinFunctions.getAsterixFunctionInfo(fi);
}
+ public static IFunctionInfo getFunctionInfo(FunctionSignature fs) {
+ return getFunctionInfo(new FunctionIdentifier(fs.getNamespace(), fs.getName(), fs.getArity()));
+ }
+
}
diff --git a/asterix-aql/src/main/javacc/AQL.jj b/asterix-aql/src/main/javacc/AQL.jj
index 9080f9c..1044d2e 100644
--- a/asterix-aql/src/main/javacc/AQL.jj
+++ b/asterix-aql/src/main/javacc/AQL.jj
@@ -56,6 +56,7 @@
// optimizer hints
private static final String HASH_GROUP_BY_HINT = "hash";
+ private static final String SKIP_SECONDARY_INDEX_SEARCH_HINT = "skip-index";
private static final String BROADCAST_JOIN_HINT = "bcast";
private static final String INDEXED_NESTED_LOOP_JOIN_HINT = "indexnl";
private static final String INMEMORY_HINT = "inmem";
@@ -86,6 +87,12 @@
}
};
+ private static class FunctionName {
+ public String dataverse = null;
+ public String library = null;
+ public String function = null;
+ }
+
private static String getHint(Token t) {
if (t.specialToken == null) {
return null;
@@ -234,6 +241,7 @@
| stmt = IndexSpecification()
| stmt = DataverseSpecification()
| stmt = FunctionSpecification()
+ | stmt = FeedSpecification()
)
{
return stmt;
@@ -247,7 +255,7 @@
TypeExpression typeExpr = null;
}
{
- "type" nameComponents = FunctionOrTypeName() ifNotExists = IfNotExists()
+ "type" nameComponents = TypeName() ifNotExists = IfNotExists()
"as" typeExpr = TypeExpr()
{
long numValues = -1;
@@ -303,6 +311,7 @@
String nodeGroupName = null;
Map<String,String> hints = new HashMap<String,String>();
DatasetDecl dsetDecl = null;
+ boolean autogenerated = false;
String compactionPolicy = null;
}
{
@@ -325,36 +334,12 @@
ifNotExists);
}
- | "feed" <DATASET> nameComponents = QualifiedName()
- <LEFTPAREN> typeName = Identifier() <RIGHTPAREN>
- ifNotExists = IfNotExists()
- "using" adapterName = AdapterName() properties = Configuration()
- (appliedFunction = ApplyFunction())? primaryKeyFields = PrimaryKey()
- ( "on" nodeGroupName = Identifier() )?
- ( "hints" hints = Properties() )?
- ( "using" "compaction" "policy" compactionPolicy = CompactionPolicy() compactionPolicyProperties = Configuration() )?
- {
- FeedDetailsDecl fdd = new FeedDetailsDecl(adapterName,
- properties,
- appliedFunction,
- nodeGroupName != null
- ? new Identifier(nodeGroupName)
- : null,
- primaryKeyFields,
- compactionPolicy,
- compactionPolicyProperties);
- dsetDecl = new DatasetDecl(nameComponents.first,
- nameComponents.second,
- new Identifier(typeName),
- hints,
- DatasetType.FEED,
- fdd,
- ifNotExists);
- }
| ("internal")? <DATASET> nameComponents = QualifiedName()
<LEFTPAREN> typeName = Identifier() <RIGHTPAREN>
ifNotExists = IfNotExists()
- primaryKeyFields = PrimaryKey() ("on" nodeGroupName = Identifier() )?
+ primaryKeyFields = PrimaryKey()
+ ("autogenerated" { autogenerated = true; } )?
+ ("on" nodeGroupName = Identifier() )?
( "hints" hints = Properties() )?
( "using" "compaction" "policy" compactionPolicy = CompactionPolicy() compactionPolicyProperties = Configuration() )?
{
@@ -362,6 +347,7 @@
? new Identifier(nodeGroupName)
: null,
primaryKeyFields,
+ autogenerated,
compactionPolicy,
compactionPolicyProperties);
dsetDecl = new DatasetDecl(nameComponents.first,
@@ -474,33 +460,61 @@
boolean ifNotExists = false;
List<VarIdentifier> paramList = new ArrayList<VarIdentifier>();
String functionBody;
+ VarIdentifier var = null;
Expression functionBodyExpr;
Token beginPos;
Token endPos;
- Pair<Identifier,Identifier> nameComponents=null;
-
+ FunctionName fctName = null;
+
createNewScope();
}
{
- "function" nameComponents = FunctionOrTypeName()
+ "function" fctName = FunctionName()
ifNotExists = IfNotExists()
paramList = ParameterList()
<LEFTBRACE>
- {
- beginPos = token;
- }
+ {
+ beginPos = token;
+ }
functionBodyExpr = Expression() <RIGHTBRACE>
{
endPos = token;
functionBody = extractFragment(beginPos.beginLine, beginPos.beginColumn, endPos.beginLine, endPos.beginColumn);
- String dataverse = nameComponents.first.getValue();
- String functionName = nameComponents.second.getValue();
- signature = new FunctionSignature(dataverse, functionName, paramList.size());
+ // TODO use fctName.library
+ signature = new FunctionSignature(fctName.dataverse, fctName.function, paramList.size());
getCurrentScope().addFunctionDescriptor(signature, false);
return new CreateFunctionStatement(signature, paramList, functionBody, ifNotExists);
}
}
+CreateFeedStatement FeedSpecification() throws ParseException:
+{
+ Pair<Identifier,Identifier> nameComponents = null;
+ boolean ifNotExists = false;
+ String adaptorName = null;
+ Map<String,String> properties = null;
+ FunctionSignature appliedFunction = null;
+ CreateFeedStatement cfs = null;
+}
+{
+ (
+ "feed" nameComponents = QualifiedName()
+ ifNotExists = IfNotExists()
+ "using" adaptorName = AdapterName() properties = Configuration()
+ (appliedFunction = ApplyFunction())?
+ {
+ cfs = new CreateFeedStatement(nameComponents.first,
+ nameComponents.second, adaptorName, properties, appliedFunction, ifNotExists);
+ }
+
+ )
+ {
+ return cfs;
+ }
+}
+
+
+
List<VarIdentifier> ParameterList() throws ParseException:
{
List<VarIdentifier> paramList = new ArrayList<VarIdentifier>();
@@ -543,31 +557,45 @@
FunctionSignature ApplyFunction() throws ParseException:
{
+ FunctionName functioName = null;
FunctionSignature funcSig = null;
}
{
- "apply" "function" funcSig = FunctionSignature()
+ "apply" "function" functioName = FunctionName()
{
- return funcSig;
+ String fqFunctionName = functioName.library == null ? functioName.function : functioName.library + "#" + functioName.function;
+ return new FunctionSignature(functioName.dataverse, fqFunctionName, 1);
}
}
+String GetPolicy() throws ParseException:
+{
+ String policy = null;
+}
+{
+ "using" "policy" policy = Identifier()
+ {
+ return policy;
+ }
+
+}
+
FunctionSignature FunctionSignature() throws ParseException:
{
- Pair<Identifier,Identifier> pairId = null;
+ FunctionName fctName = null;
int arity = 0;
}
{
- pairId = FunctionOrTypeName() "@" <INTEGER_LITERAL>
+ fctName = FunctionName() "@" <INTEGER_LITERAL>
{
arity = new Integer(token.image);
if (arity < 0 && arity != FunctionIdentifier.VARARGS) {
throw new ParseException(" invalid arity:" + arity);
}
- String dataverse = pairId.first.getValue();
- String functionName = pairId.second.getValue();
- return new FunctionSignature(dataverse, functionName, arity);
+ // TODO use fctName.library
+ String fqFunctionName = fctName.library == null ? fctName.function : fctName.library + "#" + fctName.function;
+ return new FunctionSignature(fctName.dataverse, fqFunctionName, arity);
}
}
@@ -615,7 +643,7 @@
{
stmt = new NodeGroupDropStatement(new Identifier(id), ifExists);
}
- | "type" pairId = FunctionOrTypeName() ifExists = IfExists()
+ | "type" pairId = TypeName() ifExists = IfExists()
{
stmt = new TypeDropStatement(pairId.first, pairId.second, ifExists);
}
@@ -627,6 +655,10 @@
{
stmt = new FunctionDropStatement(funcSig, ifExists);
}
+ | "feed" pairId = QualifiedName() ifExists = IfExists()
+ {
+ stmt = new FeedDropStatement(pairId.first, pairId.second, ifExists);
+ }
)
{
return stmt;
@@ -754,7 +786,7 @@
}
}
-LoadFromFileStatement LoadStatement() throws ParseException:
+LoadStatement LoadStatement() throws ParseException:
{
Identifier dataverseName = null;
Identifier datasetName = null;
@@ -776,7 +808,7 @@
}
)?
{
- return new LoadFromFileStatement(dataverseName, datasetName, adapterName, properties, alreadySorted);
+ return new LoadStatement(dataverseName, datasetName, adapterName, properties, alreadySorted);
}
}
@@ -809,31 +841,22 @@
Statement FeedStatement() throws ParseException:
{
- Pair<Identifier,Identifier> nameComponents = null;
+ Pair<Identifier,Identifier> feedNameComponents = null;
+ Pair<Identifier,Identifier> datasetNameComponents = null;
+
Map<String,String> configuration = null;
Statement stmt = null;
+ String policy = null;
}
{
(
- "begin" "feed" nameComponents = QualifiedName()
+ "connect" "feed" feedNameComponents = QualifiedName() "to" <DATASET> datasetNameComponents = QualifiedName() (policy = GetPolicy())?
{
- stmt = new BeginFeedStatement(nameComponents.first, nameComponents.second, getVarCounter());
+ stmt = new ConnectFeedStatement(feedNameComponents, datasetNameComponents, policy, getVarCounter());
}
- | "suspend" "feed" nameComponents = QualifiedName()
+ | "disconnect" "feed" feedNameComponents = QualifiedName() "from" <DATASET> datasetNameComponents = QualifiedName()
{
- stmt = new ControlFeedStatement(ControlFeedStatement.OperationType.SUSPEND, nameComponents.first, nameComponents.second);
- }
- | "resume" "feed" nameComponents = QualifiedName()
- {
- stmt = new ControlFeedStatement(ControlFeedStatement.OperationType.RESUME, nameComponents.first, nameComponents.second);
- }
- | "end" "feed" nameComponents = QualifiedName()
- {
- stmt = new ControlFeedStatement(ControlFeedStatement.OperationType.END, nameComponents.first, nameComponents.second);
- }
- | "alter" "feed" nameComponents = QualifiedName() "set" configuration = Configuration()
- {
- stmt = new ControlFeedStatement(ControlFeedStatement.OperationType.ALTER, nameComponents.first, nameComponents.second, configuration);
+ stmt = new DisconnectFeedStatement(feedNameComponents, datasetNameComponents);
}
)
{
@@ -1026,8 +1049,46 @@
}
}
+FunctionName FunctionName() throws ParseException:
+{
+ String first = null;
+ String second = null;
+ String third = null;
+ boolean secondAfterDot = false;
+}
+{
+ first = Identifier() ( <DOT> second = Identifier()
+ {
+ secondAfterDot = true;
+ }
+ ("#" third = Identifier())? | "#" second = Identifier() )?
+ {
+ FunctionName result = new FunctionName();
+ if (second == null) {
+ result.dataverse = defaultDataverse;
+ result.library = null;
+ result.function = first;
+ } else if (third == null) {
+ if (secondAfterDot) {
+ result.dataverse = first;
+ result.library = null;
+ result.function = second;
+ } else {
+ result.dataverse = defaultDataverse;
+ result.library = first;
+ result.function = second;
+ }
+ } else {
+ result.dataverse = first;
+ result.library = second;
+ result.function = third;
+ }
+ return result;
+ }
+}
-Pair<Identifier,Identifier> FunctionOrTypeName() throws ParseException:
+
+Pair<Identifier,Identifier> TypeName() throws ParseException:
{
Pair<Identifier,Identifier> name = null;
}
@@ -1260,8 +1321,12 @@
LOOKAHEAD(2)( <LT> | <GT> | <LE> | <GE> | <EQ> | <NE> |<SIMILAR>)
{
String mhint = getHint(token);
- if (mhint != null && mhint.equals(INDEXED_NESTED_LOOP_JOIN_HINT)) {
- annotation = IndexedNLJoinExpressionAnnotation.INSTANCE;
+ if (mhint != null) {
+ if (mhint.equals(INDEXED_NESTED_LOOP_JOIN_HINT)) {
+ annotation = IndexedNLJoinExpressionAnnotation.INSTANCE;
+ } else if (mhint.equals(SKIP_SECONDARY_INDEX_SEARCH_HINT)) {
+ annotation = SkipSecondaryIndexSearchExpressionAnnotation.INSTANCE;
+ }
}
if (op == null) {
op = new OperatorExpr();
@@ -1684,18 +1749,12 @@
List<Expression> argList = new ArrayList<Expression>();
Expression tmp;
int arity = 0;
- Pair<Identifier,Identifier> funcId = null;
- String funcName;
- String dataverse;
+ FunctionName funcName = null;
String hint = null;
- String id1 = null;
- String id2 = null;
}
{
- funcId = FunctionOrTypeName()
+ funcName = FunctionName()
{
- dataverse = funcId.first.getValue();
- funcName = funcId.second.getValue();
hint = getHint(token);
}
<LEFTPAREN> (tmp = Expression()
@@ -1710,18 +1769,26 @@
}
)*)? <RIGHTPAREN>
{
- FunctionSignature signature = lookupFunctionSignature(dataverse, funcName, arity);
+ // TODO use funcName.library
+ String fqFunctionName = funcName.library == null ? funcName.function : funcName.library + "#" + funcName.function;
+ FunctionSignature signature
+ = lookupFunctionSignature(funcName.dataverse, fqFunctionName, arity);
if (signature == null) {
- signature = new FunctionSignature(dataverse, funcName, arity);
+ signature = new FunctionSignature(funcName.dataverse, fqFunctionName, arity);
}
callExpr = new CallExpr(signature,argList);
- if (hint != null && hint.startsWith(INDEXED_NESTED_LOOP_JOIN_HINT)) {
- callExpr.addHint(IndexedNLJoinExpressionAnnotation.INSTANCE);
+ if (hint != null) {
+ if (hint.startsWith(INDEXED_NESTED_LOOP_JOIN_HINT)) {
+ callExpr.addHint(IndexedNLJoinExpressionAnnotation.INSTANCE);
+ } else if (hint.startsWith(SKIP_SECONDARY_INDEX_SEARCH_HINT)) {
+ callExpr.addHint(SkipSecondaryIndexSearchExpressionAnnotation.INSTANCE);
+ }
}
return callExpr;
}
}
+
Expression DatasetAccessExpression() throws ParseException:
{
String funcName;
diff --git a/asterix-common/.gitignore b/asterix-common/.gitignore
index 19f2e00..073c9fa 100644
--- a/asterix-common/.gitignore
+++ b/asterix-common/.gitignore
@@ -1,2 +1,3 @@
/target
/target
+/target
diff --git a/asterix-common/pom.xml b/asterix-common/pom.xml
index 7d1786e..9203d86 100644
--- a/asterix-common/pom.xml
+++ b/asterix-common/pom.xml
@@ -1,23 +1,18 @@
-<!--
- ! Copyright 2009-2013 by The Regents of the University of California
- ! Licensed under the Apache License, Version 2.0 (the "License");
- ! you may not use this file except in compliance with the License.
- ! you may obtain a copy of the License from
- !
- ! http://www.apache.org/licenses/LICENSE-2.0
- !
- ! Unless required by applicable law or agreed to in writing, software
- ! distributed under the License is distributed on an "AS IS" BASIS,
- ! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ! See the License for the specific language governing permissions and
- ! limitations under the License.
- !-->
+<!-- ! Copyright 2009-2013 by The Regents of the University of California
+ ! Licensed under the Apache License, Version 2.0 (the "License"); ! you may
+ not use this file except in compliance with the License. ! you may obtain
+ a copy of the License from ! ! http://www.apache.org/licenses/LICENSE-2.0
+ ! ! Unless required by applicable law or agreed to in writing, software !
+ distributed under the License is distributed on an "AS IS" BASIS, ! WITHOUT
+ WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ! See the
+ License for the specific language governing permissions and ! limitations
+ under the License. ! -->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
</parent>
<artifactId>asterix-common</artifactId>
<build>
@@ -70,6 +65,40 @@
<generateDirectory>${project.build.directory}/generated-sources/configuration</generateDirectory>
</configuration>
</execution>
+ <execution>
+ <id>cluster</id>
+ <goals>
+ <goal>generate</goal>
+ </goals>
+ <configuration>
+ <args>
+ <arg>-Xsetters</arg>
+ <arg>-Xvalue-constructor</arg>
+ </args>
+ <plugins>
+ <plugin>
+ <groupId>org.jvnet.jaxb2_commons</groupId>
+ <artifactId>jaxb2-basics</artifactId>
+ <version>0.6.2</version>
+ </plugin>
+ <plugin>
+ <groupId>org.jvnet.jaxb2_commons</groupId>
+ <artifactId>jaxb2-value-constructor</artifactId>
+ <version>3.0</version>
+ </plugin>
+ </plugins>
+ <schemaDirectory>src/main/resources/schema</schemaDirectory>
+ <schemaIncludes>
+ <include>cluster.xsd</include>
+ </schemaIncludes>
+ <generatePackage>edu.uci.ics.asterix.event.schema.cluster</generatePackage>
+ <generateDirectory>${project.build.directory}/generated-sources/cluster</generateDirectory>
+ <bindingDirectory>src/main/resources/schema</bindingDirectory>
+ <bindingIncludes>
+ <bindingInclude>jaxb-bindings.xjb</bindingInclude>
+ </bindingIncludes>
+ </configuration>
+ </execution>
</executions>
</plugin>
<plugin>
@@ -94,6 +123,16 @@
<dependencies>
<dependency>
+ <groupId>commons-io</groupId>
+ <artifactId>commons-io</artifactId>
+ <version>1.4</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-httpclient</groupId>
+ <artifactId>commons-httpclient</artifactId>
+ <version>3.0.1</version>
+ </dependency>
+ <dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>algebricks-compiler</artifactId>
</dependency>
@@ -116,9 +155,17 @@
<artifactId>hyracks-storage-am-lsm-common</artifactId>
</dependency>
<dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-storage-am-common</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-api</artifactId>
+ </dependency>
+ <dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-test-framework</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
<scope>test</scope>
</dependency>
<dependency>
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/annotations/SkipSecondaryIndexSearchExpressionAnnotation.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/annotations/SkipSecondaryIndexSearchExpressionAnnotation.java
new file mode 100644
index 0000000..46190e7
--- /dev/null
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/annotations/SkipSecondaryIndexSearchExpressionAnnotation.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.common.annotations;
+
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionAnnotation;
+
+public class SkipSecondaryIndexSearchExpressionAnnotation implements IExpressionAnnotation {
+
+ public static final String SKIP_SECONDARY_INDEX_SEARCH_ANNOTATION_KEY = "skip-index";
+ public static final SkipSecondaryIndexSearchExpressionAnnotation INSTANCE = new SkipSecondaryIndexSearchExpressionAnnotation();
+
+ private Object object;
+
+ @Override
+ public Object getObject() {
+ return object;
+ }
+
+ @Override
+ public void setObject(Object object) {
+ this.object = object;
+ }
+
+ @Override
+ public IExpressionAnnotation copy() {
+ SkipSecondaryIndexSearchExpressionAnnotation clone = new SkipSecondaryIndexSearchExpressionAnnotation();
+ clone.setObject(object);
+ return clone;
+ }
+}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/AsterixThreadExecutor.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/AsterixThreadExecutor.java
index edd4b2a..f2026e2 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/AsterixThreadExecutor.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/AsterixThreadExecutor.java
@@ -19,13 +19,13 @@
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
+import java.util.concurrent.ThreadFactory;
public class AsterixThreadExecutor implements Executor {
- public final static AsterixThreadExecutor INSTANCE = new AsterixThreadExecutor();
- private final ExecutorService executorService = Executors.newCachedThreadPool(AsterixThreadFactory.INSTANCE);
+ private final ExecutorService executorService;
- private AsterixThreadExecutor() {
-
+ public AsterixThreadExecutor(ThreadFactory threadFactory) {
+ executorService = Executors.newCachedThreadPool(threadFactory);
}
@Override
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/AsterixThreadFactory.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/AsterixThreadFactory.java
index e14e549..d86a4b3 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/AsterixThreadFactory.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/AsterixThreadFactory.java
@@ -16,14 +16,14 @@
import java.util.concurrent.ThreadFactory;
-import edu.uci.ics.hyracks.api.lifecycle.LifeCycleComponentManager;
+import edu.uci.ics.hyracks.api.lifecycle.ILifeCycleComponentManager;
public class AsterixThreadFactory implements ThreadFactory {
- public final static AsterixThreadFactory INSTANCE = new AsterixThreadFactory();
+ private final ILifeCycleComponentManager lccm;
- private AsterixThreadFactory() {
-
+ public AsterixThreadFactory(ILifeCycleComponentManager lifeCycleComponentManager) {
+ this.lccm = lifeCycleComponentManager;
}
@Override
@@ -34,7 +34,7 @@
} else {
t = new Thread(r);
}
- t.setUncaughtExceptionHandler(LifeCycleComponentManager.INSTANCE);
+ t.setUncaughtExceptionHandler(lccm);
return t;
}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/IAsterixAppRuntimeContext.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/IAsterixAppRuntimeContext.java
index f05efe6..8059ede 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/IAsterixAppRuntimeContext.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/IAsterixAppRuntimeContext.java
@@ -16,11 +16,14 @@
import java.io.IOException;
import java.util.List;
+import java.util.concurrent.Executor;
import edu.uci.ics.asterix.common.exceptions.ACIDException;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.feeds.IFeedManager;
import edu.uci.ics.asterix.common.transactions.ITransactionSubsystem;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.IIOManager;
import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManager;
import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
@@ -33,6 +36,10 @@
public interface IAsterixAppRuntimeContext {
+ public IIOManager getIOManager();
+
+ public Executor getThreadExecutor();
+
public ITransactionSubsystem getTransactionSubsystem();
public boolean isShuttingdown();
@@ -64,4 +71,6 @@
public double getBloomFilterFalsePositiveRate();
public List<IVirtualBufferCache> getVirtualBufferCaches(int datasetID);
+
+ public IFeedManager getFeedManager();
}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/IAsterixContextInfo.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/IAsterixContextInfo.java
index db636de..8171d22 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/IAsterixContextInfo.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/IAsterixContextInfo.java
@@ -25,5 +25,4 @@
*/
public ICCApplicationContext getCCApplicationContext();
-
}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixExternalProperties.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixExternalProperties.java
index e975f60..f5a902a 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixExternalProperties.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixExternalProperties.java
@@ -27,6 +27,18 @@
private static final String EXTERNAL_APISERVER_KEY = "api.port";
private static int EXTERNAL_APISERVER_DEFAULT = 19002;
+ private static final String EXTERNAL_FEEDSERVER_KEY = "feed.port";
+ private static int EXTERNAL_FEEDSERVER_DEFAULT = 19003;
+
+ private static final String EXTERNAL_CC_JAVA_OPTS_KEY = "cc.java.opts";
+ private static String EXTERNAL_CC_JAVA_OPTS_DEFAULT = "-Xmx1024m";
+
+ private static final String EXTERNAL_NC_JAVA_OPTS_KEY = "nc.java.opts";
+ private static String EXTERNAL_NC_JAVA_OPTS_DEFAULT = "-Xmx1024m";
+
+ private static final String EXTERNAL_MAX_WAIT_FOR_ACTIVE_CLUSTER = "max.wait.active.cluster";
+ private static int EXTERNAL_MAX_WAIT_FOR_ACTIVE_CLUSTER_DEFAULT = 60;
+
public AsterixExternalProperties(AsterixPropertiesAccessor accessor) {
super(accessor);
}
@@ -41,8 +53,28 @@
PropertyInterpreters.getIntegerPropertyInterpreter());
}
+ public int getFeedServerPort() {
+ return accessor.getProperty(EXTERNAL_FEEDSERVER_KEY, EXTERNAL_FEEDSERVER_DEFAULT,
+ PropertyInterpreters.getIntegerPropertyInterpreter());
+ }
+
public Level getLogLevel() {
return accessor.getProperty(EXTERNAL_LOGLEVEL_KEY, EXTERNAL_LOGLEVEL_DEFAULT,
PropertyInterpreters.getLevelPropertyInterpreter());
}
+
+ public String getNCJavaParams() {
+ return accessor.getProperty(EXTERNAL_NC_JAVA_OPTS_KEY, EXTERNAL_NC_JAVA_OPTS_DEFAULT,
+ PropertyInterpreters.getStringPropertyInterpreter());
+ }
+
+ public String getCCJavaParams() {
+ return accessor.getProperty(EXTERNAL_CC_JAVA_OPTS_KEY, EXTERNAL_CC_JAVA_OPTS_DEFAULT,
+ PropertyInterpreters.getStringPropertyInterpreter());
+ }
+
+ public int getMaxWaitClusterActive() {
+ return accessor.getProperty(EXTERNAL_MAX_WAIT_FOR_ACTIVE_CLUSTER, EXTERNAL_MAX_WAIT_FOR_ACTIVE_CLUSTER_DEFAULT,
+ PropertyInterpreters.getIntegerPropertyInterpreter());
+ }
}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixMetadataProperties.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixMetadataProperties.java
index 323f5f8..1d68dfc 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixMetadataProperties.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixMetadataProperties.java
@@ -23,6 +23,10 @@
super(accessor);
}
+ public String getInstanceName() {
+ return accessor.getInstanceName();
+ }
+
public String getMetadataNodeName() {
return accessor.getMetadataNodeName();
}
@@ -38,9 +42,12 @@
public Set<String> getNodeNames() {
return accessor.getNodeNames();
}
-
- public String getCoredumpPath(String nodeId){
+
+ public String getCoredumpPath(String nodeId) {
return accessor.getCoredumpPath(nodeId);
}
+ public Map<String, String> getCoredumpPaths() {
+ return accessor.getCoredumpConfig();
+ }
}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixPropertiesAccessor.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixPropertiesAccessor.java
index 2bd292e..7654aa3 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixPropertiesAccessor.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixPropertiesAccessor.java
@@ -39,6 +39,7 @@
public class AsterixPropertiesAccessor {
private static final Logger LOGGER = Logger.getLogger(AsterixPropertiesAccessor.class.getName());
+ private final String instanceName;
private final String metadataNodeName;
private final Set<String> nodeNames;
private final Map<String, String[]> stores;
@@ -69,6 +70,7 @@
} catch (JAXBException e) {
throw new AsterixException("Failed to read configuration file " + fileName);
}
+ instanceName = asterixConfiguration.getInstanceName();
metadataNodeName = asterixConfiguration.getMetadataNode();
stores = new HashMap<String, String[]>();
List<Store> configuredStores = asterixConfiguration.getStore();
@@ -113,8 +115,26 @@
return coredumpConfig.get(nodeId);
}
- public String getTransactionLogDir(String nodeId) {
- return transactionLogDirs.get(nodeId);
+ public Map<String, String> getTransactionLogDirs() {
+ return transactionLogDirs;
+ }
+
+ public Map<String, String> getCoredumpConfig() {
+ return coredumpConfig;
+ }
+
+ public void putCoredumpPaths(String nodeId, String coredumpPath) {
+ if (coredumpConfig.containsKey(nodeId)) {
+ throw new IllegalStateException("Cannot override value for coredump path");
+ }
+ coredumpConfig.put(nodeId, coredumpPath);
+ }
+
+ public void putTransactionLogDir(String nodeId, String txnLogDir) {
+ if (transactionLogDirs.containsKey(nodeId)) {
+ throw new IllegalStateException("Cannot override value for txnLogDir");
+ }
+ transactionLogDirs.put(nodeId, txnLogDir);
}
public <T> T getProperty(String property, T defaultValue, IPropertyInterpreter<T> interpreter) {
@@ -137,4 +157,9 @@
+ "'.\n See the description: \n" + p.getDescription() + "\nDefault = " + defaultValue);
}
}
+
+ public String getInstanceName() {
+ return instanceName;
+ }
+
}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixTransactionProperties.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixTransactionProperties.java
index a1dd52a..992d43f 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixTransactionProperties.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixTransactionProperties.java
@@ -14,6 +14,8 @@
*/
package edu.uci.ics.asterix.common.config;
+import java.util.Map;
+
public class AsterixTransactionProperties extends AbstractAsterixProperties {
private static final String TXN_LOG_BUFFER_NUMPAGES_KEY = "txn.log.buffer.numpages";
@@ -23,7 +25,7 @@
private static final int TXN_LOG_BUFFER_PAGESIZE_DEFAULT = (128 << 10); // 128KB
private static final String TXN_LOG_PARTITIONSIZE_KEY = "txn.log.partitionsize";
- private static final long TXN_LOG_PARTITIONSIZE_DEFAULT = ((long)2 << 30); // 2GB
+ private static final long TXN_LOG_PARTITIONSIZE_DEFAULT = ((long) 2 << 30); // 2GB
private static final String TXN_LOG_CHECKPOINT_LSNTHRESHOLD_KEY = "txn.log.checkpoint.lsnthreshold";
private static final int TXN_LOG_CHECKPOINT_LSNTHRESHOLD_DEFAULT = (64 << 20); // 64M
@@ -51,7 +53,11 @@
}
public String getLogDirectory(String nodeId) {
- return accessor.getTransactionLogDir(nodeId);
+ return accessor.getTransactionLogDirs().get(nodeId);
+ }
+
+ public Map<String, String> getLogDirectories() {
+ return accessor.getTransactionLogDirs();
}
public int getLogBufferNumPages() {
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/DatasetConfig.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/DatasetConfig.java
index 60cf6e5..8481817 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/DatasetConfig.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/DatasetConfig.java
@@ -17,28 +17,20 @@
public class DatasetConfig {
/*
- * We have three kinds of datasets. INTERNAL: A dataset with data persisted
+ * We have two kinds of datasets. INTERNAL: A dataset with data persisted
* in ASTERIX storage. The dataset is populated either using a load
* statement or using insert statement. EXTERNAL: A dataset with data
* residing outside ASTERIX. As such ASTERIX does not maintain any indexes
* on the data. The data for the dataset is fetched as and when required
- * from an external data source using an adapter. FEED : A dataset that can
- * be considered as a hybrid of INTERNAL and EXTERNAL dataset. A FEED
- * dataset is INTERNAL in the sense that the data is persisted within
- * ASTERIX storage and has associated indexes that are maintained by
- * ASTERIX. However the dataset is initially populated with data fetched
- * from an external datasource using an adapter, in a manner similar to an
- * EXTERNAL dataset. A FEED dataset continuously receives data from the
- * associated adapter.
+ * from an external data source using an adapter.
*/
public enum DatasetType {
INTERNAL,
- EXTERNAL,
- FEED
+ EXTERNAL
}
public enum IndexType {
- BTREE,
+ BTREE,
RTREE,
SINGLE_PARTITION_WORD_INVIX,
SINGLE_PARTITION_NGRAM_INVIX,
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/GlobalConfig.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/GlobalConfig.java
index 21913b0..e00c432 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/GlobalConfig.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/GlobalConfig.java
@@ -14,7 +14,7 @@
*/
package edu.uci.ics.asterix.common.config;
-import java.util.HashMap;
+import java.util.LinkedHashMap;
import java.util.Map;
import java.util.logging.Logger;
@@ -45,7 +45,7 @@
public static final Map<String, String> DEFAULT_COMPACTION_POLICY_PROPERTIES;
static {
- DEFAULT_COMPACTION_POLICY_PROPERTIES = new HashMap<String, String>();
+ DEFAULT_COMPACTION_POLICY_PROPERTIES = new LinkedHashMap<String, String>();
DEFAULT_COMPACTION_POLICY_PROPERTIES.put("max-mergable-component-size", "1073741824"); // 1GB
DEFAULT_COMPACTION_POLICY_PROPERTIES.put("max-tolernace-component-count", "5"); // 5 components
}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/IAsterixPropertiesProvider.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/IAsterixPropertiesProvider.java
index fc6539c..e696519 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/IAsterixPropertiesProvider.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/IAsterixPropertiesProvider.java
@@ -14,7 +14,6 @@
*/
package edu.uci.ics.asterix.common.config;
-
public interface IAsterixPropertiesProvider {
public AsterixStorageProperties getStorageProperties();
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/context/DatasetLifecycleManager.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/context/DatasetLifecycleManager.java
index b38f92b..ba80c29 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/context/DatasetLifecycleManager.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/context/DatasetLifecycleManager.java
@@ -436,11 +436,11 @@
}
@Override
- public void start() {
+ public synchronized void start() {
}
@Override
- public void stop(boolean dumpState, OutputStream outputStream) throws IOException {
+ public synchronized void stop(boolean dumpState, OutputStream outputStream) throws IOException {
if (dumpState) {
dumpState(outputStream);
}
@@ -454,24 +454,36 @@
datasetInfos.clear();
}
- private void dumpState(OutputStream outputStream) throws IOException {
+ public void dumpState(OutputStream outputStream) throws IOException {
StringBuilder sb = new StringBuilder();
sb.append(String.format("Memory budget = %d\n", capacity));
sb.append(String.format("Memory used = %d\n", used));
+ sb.append("\n");
- String headerFormat = "%-20s %-10s %-20s %-20s\n";
- String dsFormat = "%-20d %-10b %-20d %-20s %-20s\n";
- String idxFormat = "\t%-20d %-10b %-20d %-20s\n";
- sb.append(String.format(headerFormat, "DatasetID", "Open", "Reference Count", "Last Access"));
+ String dsHeaderFormat = "%-10s %-6s %-16s %-12s\n";
+ String dsFormat = "%-10d %-6b %-16d %-12d\n";
+ String idxHeaderFormat = "%-10s %-11s %-6s %-16s %-6s\n";
+ String idxFormat = "%-10d %-11d %-6b %-16d %-6s\n";
+
+ sb.append("[Datasets]\n");
+ sb.append(String.format(dsHeaderFormat, "DatasetID", "Open", "Reference Count", "Last Access"));
for (DatasetInfo dsInfo : datasetInfos.values()) {
sb.append(String
.format(dsFormat, dsInfo.datasetID, dsInfo.isOpen, dsInfo.referenceCount, dsInfo.lastAccess));
+ }
+ sb.append("\n");
+
+ sb.append("[Indexes]\n");
+ sb.append(String.format(idxHeaderFormat, "DatasetID", "ResourceID", "Open", "Reference Count", "Index"));
+ for (DatasetInfo dsInfo : datasetInfos.values()) {
for (Map.Entry<Long, IndexInfo> entry : dsInfo.indexes.entrySet()) {
IndexInfo iInfo = entry.getValue();
- sb.append(String.format(idxFormat, entry.getKey(), iInfo.isOpen, iInfo.referenceCount, iInfo.index));
+ sb.append(String.format(idxFormat, dsInfo.datasetID, entry.getKey(), iInfo.isOpen,
+ iInfo.referenceCount, iInfo.index));
}
}
+
outputStream.write(sb.toString().getBytes());
}
-}
\ No newline at end of file
+}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/context/ITransactionSubsystemProvider.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/context/ITransactionSubsystemProvider.java
index 27751e3..f0137ba 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/context/ITransactionSubsystemProvider.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/context/ITransactionSubsystemProvider.java
@@ -20,6 +20,6 @@
import edu.uci.ics.asterix.common.transactions.ITransactionSubsystem;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-public interface ITransactionSubsystemProvider extends Serializable{
+public interface ITransactionSubsystemProvider extends Serializable {
public ITransactionSubsystem getTransactionSubsystem(IHyracksTaskContext ctx);
}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/context/TransactionSubsystemProvider.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/context/TransactionSubsystemProvider.java
index 0e8e117..ef8dd85 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/context/TransactionSubsystemProvider.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/context/TransactionSubsystemProvider.java
@@ -27,8 +27,8 @@
public class TransactionSubsystemProvider implements ITransactionSubsystemProvider {
@Override
public ITransactionSubsystem getTransactionSubsystem(IHyracksTaskContext ctx) {
- IAsterixAppRuntimeContext runtimeCtx = (IAsterixAppRuntimeContext) ctx.getJobletContext().getApplicationContext()
- .getApplicationObject();
+ IAsterixAppRuntimeContext runtimeCtx = (IAsterixAppRuntimeContext) ctx.getJobletContext()
+ .getApplicationContext().getApplicationObject();
return runtimeCtx.getTransactionSubsystem();
}
}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/dataflow/AsterixLSMInsertDeleteOperatorNodePushable.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/dataflow/AsterixLSMInsertDeleteOperatorNodePushable.java
index bc19543..26a79c3 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/dataflow/AsterixLSMInsertDeleteOperatorNodePushable.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/dataflow/AsterixLSMInsertDeleteOperatorNodePushable.java
@@ -75,6 +75,7 @@
}
}
} catch (Exception e) {
+ e.printStackTrace();
throw new HyracksDataException(e);
}
System.arraycopy(buffer.array(), 0, writeBuffer.array(), 0, buffer.capacity());
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/dataflow/AsterixLSMTreeInsertDeleteOperatorDescriptor.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/dataflow/AsterixLSMTreeInsertDeleteOperatorDescriptor.java
index e58bbfa..6f6a7b2 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/dataflow/AsterixLSMTreeInsertDeleteOperatorDescriptor.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/dataflow/AsterixLSMTreeInsertDeleteOperatorDescriptor.java
@@ -56,4 +56,19 @@
recordDescProvider, op, isPrimary);
}
+ public boolean isPrimary() {
+ return isPrimary;
+ }
+
+ public int[] getFieldPermutations() {
+ return fieldPermutation;
+ }
+
+ public IndexOperation getIndexOperation() {
+ return op;
+ }
+
+ public IBinaryComparatorFactory[] getComparatorFactories() {
+ return comparatorFactories;
+ }
}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/exceptions/ACIDException.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/exceptions/ACIDException.java
index 857ddd5..16b62f8 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/exceptions/ACIDException.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/exceptions/ACIDException.java
@@ -56,7 +56,7 @@
public ACIDException(String message) {
super(message);
}
-
+
public ACIDException(Throwable cause) {
super(cause);
}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/feeds/FeedConnectionId.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/feeds/FeedConnectionId.java
new file mode 100644
index 0000000..def7c10
--- /dev/null
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/feeds/FeedConnectionId.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.common.feeds;
+
+import java.io.Serializable;
+
+/**
+ * A unique identifier for a data feed flowing into a dataset.
+ */
+public class FeedConnectionId implements Serializable {
+
+ private static final long serialVersionUID = 1L;
+
+ private final String dataverse;
+ private final String feedName;
+ private final String datasetName;
+
+ public FeedConnectionId(String dataverse, String feedName, String datasetName) {
+ this.dataverse = dataverse;
+ this.feedName = feedName;
+ this.datasetName = datasetName;
+ }
+
+ public String getDataverse() {
+ return dataverse;
+ }
+
+ public String getFeedName() {
+ return feedName;
+ }
+
+ public String getDatasetName() {
+ return datasetName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == null || !(o instanceof FeedConnectionId)) {
+ return false;
+ }
+ if (((FeedConnectionId) o).getFeedName().equals(feedName)
+ && ((FeedConnectionId) o).getDataverse().equals(dataverse)
+ && ((FeedConnectionId) o).getDatasetName().equals(datasetName)) {
+ return true;
+ }
+ return false;
+ }
+
+ @Override
+ public int hashCode() {
+ return toString().hashCode();
+ }
+
+ @Override
+ public String toString() {
+ return dataverse + "." + feedName + "-->" + datasetName;
+ }
+}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/feeds/FeedMessageService.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/feeds/FeedMessageService.java
new file mode 100644
index 0000000..3d0d8f9
--- /dev/null
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/feeds/FeedMessageService.java
@@ -0,0 +1,149 @@
+package edu.uci.ics.asterix.common.feeds;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.Socket;
+import java.net.UnknownHostException;
+import java.nio.CharBuffer;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+/**
+ * Sends feed report messages on behalf of an operator instance
+ * to the SuperFeedManager associated with the feed.
+ */
+public class FeedMessageService {
+
+ private static final Logger LOGGER = Logger.getLogger(FeedMessageService.class.getName());
+
+ public static final char MessageSeparator = '|';
+ private static final char EOL = (char) "\n".getBytes()[0];
+
+ private final FeedConnectionId feedId;
+ private final LinkedBlockingQueue<String> inbox;
+ private final FeedMessageHandler mesgHandler;
+ private final IFeedManager feedManager;
+
+ public FeedMessageService(FeedConnectionId feedId, IFeedManager feedManager) {
+ this.feedId = feedId;
+ inbox = new LinkedBlockingQueue<String>();
+ mesgHandler = new FeedMessageHandler(inbox, feedId, feedManager);
+ this.feedManager = feedManager;
+ }
+
+ public void start() throws UnknownHostException, IOException, Exception {
+ feedManager.getFeedExecutorService(feedId).execute(mesgHandler);
+ }
+
+ public void stop() throws IOException {
+ mesgHandler.stop();
+ }
+
+ public void sendMessage(String message) throws IOException {
+ inbox.add(message);
+ }
+
+ private static class FeedMessageHandler implements Runnable {
+
+ private final LinkedBlockingQueue<String> inbox;
+ private final FeedConnectionId feedId;
+ private Socket sfmSocket;
+ private boolean process = true;
+ private final IFeedManager feedManager;
+
+ public FeedMessageHandler(LinkedBlockingQueue<String> inbox, FeedConnectionId feedId, IFeedManager feedManager) {
+ this.inbox = inbox;
+ this.feedId = feedId;
+ this.feedManager = feedManager;
+ }
+
+ public void run() {
+ try {
+ sfmSocket = obtainSFMSocket();
+ if (sfmSocket != null) {
+ while (process) {
+ String message = inbox.take();
+ sfmSocket.getOutputStream().write(message.getBytes());
+ }
+ } else {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Unable to start feed message service for " + feedId);
+ }
+ }
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Ended feed message service for " + feedId);
+ }
+ } catch (Exception e) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Exception in handling incoming feed messages" + e.getMessage());
+ }
+ } finally {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Stopping feed message handler");
+ }
+ if (sfmSocket != null) {
+ try {
+ sfmSocket.close();
+ } catch (Exception e) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Exception in closing socket " + e.getMessage());
+ }
+ }
+ }
+ }
+
+ }
+
+ public void stop() {
+ process = false;
+ }
+
+ private Socket obtainSFMSocket() throws UnknownHostException, IOException, Exception {
+ Socket sfmDirServiceSocket = null;
+ SuperFeedManager sfm = feedManager.getSuperFeedManager(feedId);
+ try {
+ FeedRuntimeManager runtimeManager = feedManager.getFeedRuntimeManager(feedId);
+ sfmDirServiceSocket = runtimeManager.createClientSocket(sfm.getHost(), sfm.getPort(),
+ IFeedManager.SOCKET_CONNECT_TIMEOUT);
+ if (sfmDirServiceSocket == null) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Unable to connect to " + sfm.getHost() + "[" + sfm.getPort() + "]");
+ }
+ } else {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info(" Connected to Super Feed Manager service " + sfm.getHost() + " " + sfm.getPort());
+ }
+ while (!sfmDirServiceSocket.isConnected()) {
+ Thread.sleep(2000);
+ }
+ InputStream in = sfmDirServiceSocket.getInputStream();
+ CharBuffer buffer = CharBuffer.allocate(50);
+ char ch = 0;
+ while (ch != EOL) {
+ buffer.put(ch);
+ ch = (char) in.read();
+ }
+ buffer.flip();
+ String s = new String(buffer.array());
+ int port = Integer.parseInt(s.trim());
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Response from Super Feed Manager service " + port + " will connect at "
+ + sfm.getHost() + " " + port);
+ }
+ sfmSocket = runtimeManager.createClientSocket(sfm.getHost(), port,
+ IFeedManager.SOCKET_CONNECT_TIMEOUT);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ throw e;
+ } finally {
+ if (sfmDirServiceSocket != null) {
+ sfmDirServiceSocket.close();
+ }
+ }
+ return sfmSocket;
+ }
+ }
+
+}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/feeds/FeedReport.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/feeds/FeedReport.java
new file mode 100644
index 0000000..cda56ae
--- /dev/null
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/feeds/FeedReport.java
@@ -0,0 +1,116 @@
+package edu.uci.ics.asterix.common.feeds;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import edu.uci.ics.asterix.common.feeds.FeedRuntime.FeedRuntimeType;
+import edu.uci.ics.asterix.common.feeds.SuperFeedManager.FeedReportMessageType;
+
+public class FeedReport implements Comparable {
+
+ private FeedConnectionId feedId;
+ private FeedReportMessageType reportType;
+ private int partition = -1;
+ private FeedRuntimeType runtimeType;
+ private long value = -1;
+ private String[] representation;
+
+ public FeedReport() {
+ }
+
+ public FeedReport(String message) {
+ representation = message.split("\\|");
+ }
+
+ public void reset(String message) {
+ representation = message.split("\\|");
+ reportType = null;
+ feedId = null;
+ runtimeType = null;
+ partition = -1;
+ value = -1;
+ }
+
+ @Override
+ public String toString() {
+ return getFeedId() + " " + getReportType() + " " + getPartition() + " " + getRuntimeType() + " " + getValue();
+ }
+
+ public FeedConnectionId getFeedId() {
+ if (feedId == null) {
+ String feedIdRep = representation[1];
+ String[] feedIdComp = feedIdRep.split(":");
+ feedId = new FeedConnectionId(feedIdComp[0], feedIdComp[1], feedIdComp[2]);
+ }
+ return feedId;
+ }
+
+ public FeedReportMessageType getReportType() {
+ if (reportType == null) {
+ reportType = FeedReportMessageType.valueOf(representation[0].toUpperCase());
+ }
+ return reportType;
+ }
+
+ public int getPartition() {
+ if (partition < 0) {
+ partition = Integer.parseInt(representation[3]);
+ }
+ return partition;
+ }
+
+ public FeedRuntimeType getRuntimeType() {
+ if (runtimeType == null) {
+ runtimeType = FeedRuntimeType.valueOf(representation[2].toUpperCase());
+ }
+ return runtimeType;
+ }
+
+ public long getValue() {
+ if (value < 0) {
+ value = Long.parseLong(representation[4]);
+ }
+ return value;
+ }
+
+ public String[] getRepresentation() {
+ return representation;
+ }
+
+ @Override
+ public int compareTo(Object o) {
+ if (!(o instanceof FeedReport)) {
+ throw new IllegalArgumentException("Incorrect operand type " + o);
+ }
+
+ FeedReport other = (FeedReport) o;
+ if (!other.getReportType().equals(getReportType())) {
+ throw new IllegalArgumentException("Incorrect operand type " + o);
+ }
+
+ int returnValue = 0;
+
+ switch (getReportType()) {
+ case CONGESTION:
+ returnValue = ranking.get(getRuntimeType()) - ranking.get(other.getRuntimeType());
+ break;
+
+ case THROUGHPUT:
+ returnValue = (int) (other.getValue() - getValue());
+ break;
+ }
+
+ return returnValue;
+ }
+
+ private static Map<FeedRuntimeType, Integer> ranking = populateRanking();
+
+ private static Map<FeedRuntimeType, Integer> populateRanking() {
+ Map<FeedRuntimeType, Integer> ranking = new HashMap<FeedRuntimeType, Integer>();
+ ranking.put(FeedRuntimeType.INGESTION, 1);
+ ranking.put(FeedRuntimeType.COMPUTE, 2);
+ ranking.put(FeedRuntimeType.STORAGE, 3);
+ ranking.put(FeedRuntimeType.COMMIT, 4);
+ return ranking;
+ }
+}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/feeds/FeedRuntime.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/feeds/FeedRuntime.java
new file mode 100644
index 0000000..d1b2faf
--- /dev/null
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/feeds/FeedRuntime.java
@@ -0,0 +1,149 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.common.feeds;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+
+public class FeedRuntime {
+
+ public enum FeedRuntimeType {
+ INGESTION,
+ COMPUTE,
+ STORAGE,
+ COMMIT
+ }
+
+ /** A unique identifier */
+ protected final FeedRuntimeId feedRuntimeId;
+
+ /** The runtime state: @see FeedRuntimeState */
+ protected FeedRuntimeState runtimeState;
+
+ public FeedRuntime(FeedConnectionId feedId, int partition, FeedRuntimeType feedRuntimeType) {
+ this.feedRuntimeId = new FeedRuntimeId(feedRuntimeType, feedId, partition);
+ }
+
+ public FeedRuntime(FeedConnectionId feedId, int partition, FeedRuntimeType feedRuntimeType,
+ FeedRuntimeState runtimeState) {
+ this.feedRuntimeId = new FeedRuntimeId(feedRuntimeType, feedId, partition);
+ this.runtimeState = runtimeState;
+ }
+
+ @Override
+ public String toString() {
+ return feedRuntimeId + " " + "runtime state present ? " + (runtimeState != null);
+ }
+
+ public static class FeedRuntimeState {
+
+ private ByteBuffer frame;
+ private IFrameWriter frameWriter;
+ private Exception exception;
+
+ public FeedRuntimeState(ByteBuffer frame, IFrameWriter frameWriter, Exception exception) {
+ this.frame = frame;
+ this.frameWriter = frameWriter;
+ this.exception = exception;
+ }
+
+ public ByteBuffer getFrame() {
+ return frame;
+ }
+
+ public void setFrame(ByteBuffer frame) {
+ this.frame = frame;
+ }
+
+ public IFrameWriter getFrameWriter() {
+ return frameWriter;
+ }
+
+ public void setFrameWriter(IFrameWriter frameWriter) {
+ this.frameWriter = frameWriter;
+ }
+
+ public Exception getException() {
+ return exception;
+ }
+
+ public void setException(Exception exception) {
+ this.exception = exception;
+ }
+
+ }
+
+ public static class FeedRuntimeId {
+
+ private final FeedRuntimeType feedRuntimeType;
+ private final FeedConnectionId feedId;
+ private final int partition;
+ private final int hashCode;
+
+ public FeedRuntimeId(FeedRuntimeType runtimeType, FeedConnectionId feedId, int partition) {
+ this.feedRuntimeType = runtimeType;
+ this.feedId = feedId;
+ this.partition = partition;
+ this.hashCode = (feedId + "[" + partition + "]" + feedRuntimeType).hashCode();
+ }
+
+ @Override
+ public String toString() {
+ return feedId + "[" + partition + "]" + " " + feedRuntimeType;
+ }
+
+ @Override
+ public int hashCode() {
+ return hashCode;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o instanceof FeedRuntimeId) {
+ FeedRuntimeId oid = ((FeedRuntimeId) o);
+ return oid.getFeedId().equals(feedId) && oid.getFeedRuntimeType().equals(feedRuntimeType)
+ && oid.getPartition() == partition;
+ }
+ return false;
+ }
+
+ public FeedRuntimeType getFeedRuntimeType() {
+ return feedRuntimeType;
+ }
+
+ public FeedConnectionId getFeedId() {
+ return feedId;
+ }
+
+ public int getPartition() {
+ return partition;
+ }
+
+ }
+
+ public FeedRuntimeState getRuntimeState() {
+ return runtimeState;
+ }
+
+ public void setRuntimeState(FeedRuntimeState runtimeState) {
+ this.runtimeState = runtimeState;
+ }
+
+ public FeedRuntimeId getFeedRuntimeId() {
+ return feedRuntimeId;
+ }
+
+}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/feeds/FeedRuntimeManager.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/feeds/FeedRuntimeManager.java
new file mode 100644
index 0000000..a68f6b8
--- /dev/null
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/feeds/FeedRuntimeManager.java
@@ -0,0 +1,238 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.common.feeds;
+
+import java.io.IOException;
+import java.net.ServerSocket;
+import java.net.Socket;
+import java.net.UnknownHostException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.common.feeds.FeedRuntime.FeedRuntimeId;
+
+public class FeedRuntimeManager {
+
+ private static Logger LOGGER = Logger.getLogger(FeedRuntimeManager.class.getName());
+
+ private final FeedConnectionId feedId;
+ private final IFeedManager feedManager;
+ private SuperFeedManager superFeedManager;
+ private final Map<FeedRuntimeId, FeedRuntime> feedRuntimes;
+ private final ExecutorService executorService;
+ private FeedMessageService messageService;
+ private SocketFactory socketFactory = new SocketFactory();
+ private final LinkedBlockingQueue<String> feedReportQueue;
+
+ public FeedRuntimeManager(FeedConnectionId feedId, IFeedManager feedManager) {
+ this.feedId = feedId;
+ feedRuntimes = new ConcurrentHashMap<FeedRuntimeId, FeedRuntime>();
+ executorService = Executors.newCachedThreadPool();
+ feedReportQueue = new LinkedBlockingQueue<String>();
+ this.feedManager = feedManager;
+ }
+
+ public void close(boolean closeAll) throws IOException {
+ socketFactory.close();
+
+ if (messageService != null) {
+ messageService.stop();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Shut down message services for :" + feedId);
+ }
+ messageService = null;
+ }
+ if (superFeedManager != null && superFeedManager.isLocal()) {
+ superFeedManager.stop();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Shut down super feed manager for :" + feedId);
+ }
+ }
+
+ if (closeAll) {
+ if (executorService != null) {
+ executorService.shutdownNow();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Shut down executor service for :" + feedId);
+ }
+ }
+ }
+ }
+
+ public void setSuperFeedManager(SuperFeedManager sfm) throws UnknownHostException, IOException, Exception {
+ this.superFeedManager = sfm;
+ if (sfm.isLocal()) {
+ sfm.start();
+ }
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Started Super Feed Manager for feed :" + feedId);
+ }
+ this.messageService = new FeedMessageService(feedId, feedManager);
+ messageService.start();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Started Feed Message Service for feed :" + feedId);
+ }
+ }
+
+ public SuperFeedManager getSuperFeedManager() {
+ return superFeedManager;
+ }
+
+ public FeedRuntime getFeedRuntime(FeedRuntimeId runtimeId) {
+ return feedRuntimes.get(runtimeId);
+ }
+
+ public void registerFeedRuntime(FeedRuntimeId runtimeId, FeedRuntime feedRuntime) {
+ feedRuntimes.put(runtimeId, feedRuntime);
+ }
+
+ public void deregisterFeedRuntime(FeedRuntimeId runtimeId) {
+ feedRuntimes.remove(runtimeId);
+ if (feedRuntimes.isEmpty()) {
+ synchronized (this) {
+ if (feedRuntimes.isEmpty()) {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("De-registering feed");
+ }
+ feedManager.deregisterFeed(runtimeId.getFeedId());
+ }
+ }
+ }
+ }
+
+ public ExecutorService getExecutorService() {
+ return executorService;
+ }
+
+ public FeedMessageService getMessageService() {
+ return messageService;
+ }
+
+ public Socket createClientSocket(String host, int port) throws UnknownHostException, IOException {
+ return socketFactory.createClientSocket(host, port);
+ }
+
+ public Socket createClientSocket(String host, int port, long timeout) throws UnknownHostException, IOException {
+ Socket client = null;
+ boolean continueAttempt = true;
+ long startAttempt = System.currentTimeMillis();
+ long endAttempt = System.currentTimeMillis();
+ while (client == null && continueAttempt) {
+ try {
+ client = socketFactory.createClientSocket(host, port);
+ } catch (Exception e) {
+ endAttempt = System.currentTimeMillis();
+ if (endAttempt - startAttempt > timeout) {
+ continueAttempt = false;
+ }
+ }
+ }
+ return client;
+ }
+
+ public ServerSocket createServerSocket(int port) throws IOException {
+ return socketFactory.createServerSocket(port);
+ }
+
+ private static class SocketFactory {
+
+ private final Map<SocketId, Socket> sockets = new HashMap<SocketId, Socket>();
+ private final List<ServerSocket> serverSockets = new ArrayList<ServerSocket>();
+
+ public Socket createClientSocket(String host, int port) throws UnknownHostException, IOException {
+ Socket socket = new Socket(host, port);
+ sockets.put(new SocketId(host, port), socket);
+ return socket;
+ }
+
+ public void close() throws IOException {
+ for (ServerSocket socket : serverSockets) {
+ socket.close();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Closed server socket :" + socket);
+ }
+ }
+
+ for (Entry<SocketId, Socket> entry : sockets.entrySet()) {
+ entry.getValue().close();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Closed client socket :" + entry.getKey());
+ }
+ }
+ }
+
+ public ServerSocket createServerSocket(int port) throws IOException {
+ ServerSocket socket = new ServerSocket(port);
+ serverSockets.add(socket);
+ return socket;
+ }
+
+ private static class SocketId {
+ private final String host;
+ private final int port;
+
+ public SocketId(String host, int port) {
+ this.host = host;
+ this.port = port;
+ }
+
+ public String getHost() {
+ return host;
+ }
+
+ public int getPort() {
+ return port;
+ }
+
+ @Override
+ public String toString() {
+ return host + "[" + port + "]";
+ }
+
+ @Override
+ public int hashCode() {
+ return toString().hashCode();
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (!(o instanceof SocketId)) {
+ return false;
+ }
+
+ return ((SocketId) o).getHost().equals(host) && ((SocketId) o).getPort() == port;
+ }
+
+ }
+ }
+
+ public FeedConnectionId getFeedId() {
+ return feedId;
+ }
+
+ public LinkedBlockingQueue<String> getFeedReportQueue() {
+ return feedReportQueue;
+ }
+
+}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/feeds/IFeedManager.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/feeds/IFeedManager.java
new file mode 100644
index 0000000..6cdc45c
--- /dev/null
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/feeds/IFeedManager.java
@@ -0,0 +1,101 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.common.feeds;
+
+import java.io.IOException;
+import java.util.concurrent.ExecutorService;
+
+import edu.uci.ics.asterix.common.feeds.FeedRuntime.FeedRuntimeId;
+
+/**
+ * Handle (de)registration of feeds for delivery of control messages.
+ */
+public interface IFeedManager {
+
+ public static final long SOCKET_CONNECT_TIMEOUT = 5000;
+
+ /**
+ * Returns the executor service associated with the feed.
+ *
+ * @param feedId
+ * @return
+ */
+ public ExecutorService getFeedExecutorService(FeedConnectionId feedId);
+
+ /**
+ * Allows registration of a feedRuntime.
+ *
+ * @param feedRuntime
+ * @throws Exception
+ */
+ public void registerFeedRuntime(FeedRuntime feedRuntime) throws Exception;
+
+ /**
+ * Allows de-registration of a feed runtime.
+ *
+ * @param feedRuntimeId
+ */
+ public void deRegisterFeedRuntime(FeedRuntimeId feedRuntimeId);
+
+ /**
+ * Obtain feed runtime corresponding to a feedRuntimeId
+ *
+ * @param feedRuntimeId
+ * @return
+ */
+ public FeedRuntime getFeedRuntime(FeedRuntimeId feedRuntimeId);
+
+ /**
+ * Register the Super Feed Manager associated witht a feed.
+ *
+ * @param feedId
+ * @param sfm
+ * @throws Exception
+ */
+ public void registerSuperFeedManager(FeedConnectionId feedId, SuperFeedManager sfm) throws Exception;
+
+ /**
+ * Obtain a handle to the Super Feed Manager associated with the feed.
+ *
+ * @param feedId
+ * @return
+ */
+ public SuperFeedManager getSuperFeedManager(FeedConnectionId feedId);
+
+ /**
+ * De-register a feed
+ *
+ * @param feedId
+ * @throws IOException
+ */
+ void deregisterFeed(FeedConnectionId feedId);
+
+ /**
+ * Obtain the feed runtime manager associated with a feed.
+ *
+ * @param feedId
+ * @return
+ */
+ public FeedRuntimeManager getFeedRuntimeManager(FeedConnectionId feedId);
+
+ /**
+ * Obtain a handle to the feed Message service associated with a feed.
+ *
+ * @param feedId
+ * @return
+ */
+ public FeedMessageService getFeedMessageService(FeedConnectionId feedId);
+
+}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/feeds/MessageListener.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/feeds/MessageListener.java
new file mode 100644
index 0000000..7beb212
--- /dev/null
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/feeds/MessageListener.java
@@ -0,0 +1,179 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.common.feeds;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.ServerSocket;
+import java.net.Socket;
+import java.nio.CharBuffer;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+public class MessageListener {
+
+ private static final Logger LOGGER = Logger.getLogger(MessageListener.class.getName());
+
+ private int port;
+ private final LinkedBlockingQueue<String> outbox;
+
+ private ExecutorService executorService = Executors.newFixedThreadPool(10);
+
+ private MessageListenerServer listenerServer;
+
+ public MessageListener(int port, LinkedBlockingQueue<String> outbox) {
+ this.port = port;
+ this.outbox = outbox;
+ }
+
+ public void stop() {
+ listenerServer.stop();
+ System.out.println("STOPPED MESSAGE RECEIVING SERVICE AT " + port);
+ if (!executorService.isShutdown()) {
+ executorService.shutdownNow();
+ }
+
+ }
+
+ public void start() throws IOException {
+ System.out.println("STARTING MESSAGE RECEIVING SERVICE AT " + port);
+ listenerServer = new MessageListenerServer(port, outbox);
+ executorService.execute(listenerServer);
+ }
+
+ private static class MessageListenerServer implements Runnable {
+
+ private final int port;
+ private final LinkedBlockingQueue<String> outbox;
+ private ServerSocket server;
+
+ public MessageListenerServer(int port, LinkedBlockingQueue<String> outbox) {
+ this.port = port;
+ this.outbox = outbox;
+ }
+
+ public void stop() {
+ try {
+ server.close();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+
+ @Override
+ public void run() {
+ char EOL = (char) "\n".getBytes()[0];
+ Socket client = null;
+ try {
+ server = new ServerSocket(port);
+ client = server.accept();
+ InputStream in = client.getInputStream();
+ CharBuffer buffer = CharBuffer.allocate(5000);
+ char ch;
+ while (true) {
+ ch = (char) in.read();
+ if (((int) ch) == -1) {
+ break;
+ }
+ while (ch != EOL) {
+ buffer.put(ch);
+ ch = (char) in.read();
+ }
+ buffer.flip();
+ String s = new String(buffer.array());
+ synchronized (outbox) {
+ outbox.add(s + "\n");
+ }
+ buffer.position(0);
+ buffer.limit(5000);
+ }
+
+ } catch (Exception e) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Unable to start Message listener" + server);
+ }
+ } finally {
+ if (server != null) {
+ try {
+ server.close();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+ }
+
+ }
+
+ private static class MessageParser implements Runnable {
+
+ private Socket client;
+ private IMessageAnalyzer messageAnalyzer;
+ private static final char EOL = (char) "\n".getBytes()[0];
+
+ public MessageParser(Socket client, IMessageAnalyzer messageAnalyzer) {
+ this.client = client;
+ this.messageAnalyzer = messageAnalyzer;
+ }
+
+ @Override
+ public void run() {
+ CharBuffer buffer = CharBuffer.allocate(5000);
+ char ch;
+ try {
+ InputStream in = client.getInputStream();
+ while (true) {
+ ch = (char) in.read();
+ if (((int) ch) == -1) {
+ break;
+ }
+ while (ch != EOL) {
+ buffer.put(ch);
+ ch = (char) in.read();
+ }
+ buffer.flip();
+ String s = new String(buffer.array());
+ synchronized (messageAnalyzer) {
+ messageAnalyzer.getMessageQueue().add(s + "\n");
+ }
+ buffer.position(0);
+ buffer.limit(5000);
+ }
+ } catch (IOException ioe) {
+ ioe.printStackTrace();
+ } finally {
+ try {
+ client.close();
+ } catch (IOException ioe) {
+ // do nothing
+ }
+ }
+ }
+ }
+
+ public static interface IMessageAnalyzer {
+
+ /**
+ * @return
+ */
+ public LinkedBlockingQueue<String> getMessageQueue();
+
+ }
+
+}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/feeds/SuperFeedManager.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/feeds/SuperFeedManager.java
new file mode 100644
index 0000000..37306a0
--- /dev/null
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/feeds/SuperFeedManager.java
@@ -0,0 +1,447 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.common.feeds;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.net.ServerSocket;
+import java.net.Socket;
+import java.net.UnknownHostException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+/**
+ * The feed operators running in an NC report their health (statistics) to the local Feed Manager.
+ * A feed thus has a Feed Manager per NC. From amongst the Feed Maanger, a SuperFeedManager is chosen (randomly)
+ * The SuperFeedManager collects reports from the FeedMaangers and has the global cluster view in terms of
+ * how different feed operators running in a distributed fashion are performing.
+ */
+public class SuperFeedManager {
+
+ private static final Logger LOGGER = Logger.getLogger(SuperFeedManager.class.getName());
+
+ /**
+ * IP Address or DNS name of the host where Super Feed Manager is running.
+ */
+ private String host;
+
+ private AtomicInteger availablePort; // starting value is fixed
+
+ /**
+ * The port at which the SuperFeedManager listens for connections by other Feed Managers.
+ */
+ private final int feedReportPort; // fixed
+
+ /**
+ * The port at which the SuperFeedManager listens for connections by clients that wish
+ * to subscribe to the feed health reports.E.g. feed management console.
+ */
+ private final int feedReportSubscribePort; // fixed
+
+ /**
+ * The Id of Node Controller
+ */
+ private final String nodeId;
+
+ /**
+ * A unique identifier for the feed instance. A feed instance represents the flow of data
+ * from a feed to a dataset.
+ **/
+ private final FeedConnectionId feedConnectionId;
+
+ /**
+ * Set to true of the Super Feed Manager is local to the NC.
+ **/
+ private boolean isLocal = false;
+
+ private FeedReportDestinationSocketProvider sfmService;
+
+ private SuperFeedReportSubscriptionService subscriptionService;
+
+ private LinkedBlockingQueue<String> feedReportInbox; ///
+
+ private boolean started = false;
+
+ private final IFeedManager feedManager;
+
+ public static final int PORT_RANGE_ASSIGNED = 10;
+
+ public enum FeedReportMessageType {
+ CONGESTION,
+ THROUGHPUT
+ }
+
+ public SuperFeedManager(FeedConnectionId feedId, String host, String nodeId, int port, IFeedManager feedManager)
+ throws Exception {
+ this.feedConnectionId = feedId;
+ this.feedManager = feedManager;
+ this.nodeId = nodeId;
+ this.feedReportPort = port;
+ this.feedReportSubscribePort = port + 1;
+ this.availablePort = new AtomicInteger(feedReportSubscribePort + 1);
+ this.host = host;
+ this.feedReportInbox = new LinkedBlockingQueue<String>();
+ }
+
+ public int getPort() {
+ return feedReportPort;
+ }
+
+ public String getHost() throws Exception {
+ return host;
+ }
+
+ public String getNodeId() {
+ return nodeId;
+ }
+
+ public FeedConnectionId getFeedConnectionId() {
+ return feedConnectionId;
+ }
+
+ public boolean isLocal() {
+ return isLocal;
+ }
+
+ public void setLocal(boolean isLocal) {
+ this.isLocal = isLocal;
+ }
+
+ public void start() throws IOException {
+ if (sfmService == null) {
+ ExecutorService executorService = feedManager.getFeedExecutorService(feedConnectionId);
+ sfmService = new FeedReportDestinationSocketProvider(feedReportPort, feedReportInbox, feedConnectionId,
+ availablePort, feedManager);
+ executorService.execute(sfmService);
+ subscriptionService = new SuperFeedReportSubscriptionService(feedConnectionId, feedReportSubscribePort,
+ sfmService.getMesgAnalyzer(), availablePort, feedManager);
+ executorService.execute(subscriptionService);
+ }
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Started super feed manager! " + this);
+ }
+ started = true;
+ }
+
+ public void stop() throws IOException {
+ sfmService.stop();
+ subscriptionService.stop();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Stopped super feed manager! " + this);
+ }
+ started = false;
+ }
+
+ public boolean isStarted() {
+ return started;
+ }
+
+ @Override
+ public String toString() {
+ return feedConnectionId + "[" + nodeId + "(" + host + ")" + ":" + feedReportPort + "]"
+ + (isLocal ? started ? "Started " : "Not Started" : " Remote ");
+ }
+
+ public AtomicInteger getAvailablePort() {
+ return availablePort;
+ }
+
+ private static class SuperFeedReportSubscriptionService implements Runnable {
+
+ private final FeedConnectionId feedId;
+ private ServerSocket serverFeedSubscribe;
+ private AtomicInteger subscriptionPort;
+ private boolean active = true;
+ private String EOM = "\n";
+ private final FeedReportProvider reportProvider;
+ private final List<FeedDataProviderService> dataProviders = new ArrayList<FeedDataProviderService>();
+ private final IFeedManager feedManager;
+
+ public SuperFeedReportSubscriptionService(FeedConnectionId feedId, int port, FeedReportProvider reportProvider,
+ AtomicInteger nextPort, IFeedManager feedManager) throws IOException {
+ this.feedId = feedId;
+ serverFeedSubscribe = feedManager.getFeedRuntimeManager(feedId).createServerSocket(port);
+ this.subscriptionPort = nextPort;
+ this.reportProvider = reportProvider;
+ this.feedManager = feedManager;
+ }
+
+ public void stop() {
+ active = false;
+ for (FeedDataProviderService dataProviderService : dataProviders) {
+ dataProviderService.stop();
+ }
+ }
+
+ @Override
+ public void run() {
+ while (active) {
+ try {
+ Socket client = serverFeedSubscribe.accept();
+ OutputStream os = client.getOutputStream();
+ int port = subscriptionPort.incrementAndGet();
+ LinkedBlockingQueue<String> reportInbox = new LinkedBlockingQueue<String>();
+ reportProvider.registerSubsription(reportInbox);
+ FeedDataProviderService dataProviderService = new FeedDataProviderService(feedId, port,
+ reportInbox, feedManager);
+ dataProviders.add(dataProviderService);
+ feedManager.getFeedRuntimeManager(feedId).getExecutorService().execute(dataProviderService);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Recevied subscription request for feed :" + feedId
+ + " Subscripton available at port " + subscriptionPort);
+ }
+ os.write((port + EOM).getBytes());
+ os.flush();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ }
+
+ private static class FeedDataProviderService implements Runnable {
+
+ private final FeedConnectionId feedId;
+ private final IFeedManager feedManager;
+ private int subscriptionPort;
+ private ServerSocket dataProviderSocket;
+ private LinkedBlockingQueue<String> inbox;
+ private boolean active = true;
+ private String EOM = "\n";
+
+ public FeedDataProviderService(FeedConnectionId feedId, int port, LinkedBlockingQueue<String> inbox,
+ IFeedManager feedManager) throws IOException {
+ this.feedId = feedId;
+ this.subscriptionPort = port;
+ this.inbox = inbox;
+ dataProviderSocket = feedManager.getFeedRuntimeManager(feedId).createServerSocket(port);
+ this.feedManager = feedManager;
+ }
+
+ @Override
+ public void run() {
+ try {
+ Socket client = dataProviderSocket.accept();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Subscriber to " + feedId + " data connected");
+ }
+ OutputStream os = client.getOutputStream();
+ while (active) {
+ String message = inbox.take();
+ os.write((message + EOM).getBytes());
+ }
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Unsubscribed from " + feedId + " disconnected");
+ }
+ } catch (IOException e) {
+ e.printStackTrace();
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+
+ public void stop() {
+ active = false;
+ }
+
+ @Override
+ public String toString() {
+ return "DATA_PROVIDER_" + feedId + "[" + subscriptionPort + "]";
+ }
+
+ }
+
+ private static class FeedReportDestinationSocketProvider implements Runnable {
+
+ private static final String EOM = "\n";
+
+ private AtomicInteger nextPort;
+ private final ServerSocket feedReportSocket;
+ private final LinkedBlockingQueue<String> inbox;
+ private final List<MessageListener> messageListeners;
+ private final FeedReportProvider mesgAnalyzer;
+ private final FeedConnectionId feedId;
+ private boolean process = true;
+
+ public FeedReportDestinationSocketProvider(int port, LinkedBlockingQueue<String> inbox,
+ FeedConnectionId feedId, AtomicInteger availablePort, IFeedManager feedManager) throws IOException {
+ FeedRuntimeManager runtimeManager = feedManager.getFeedRuntimeManager(feedId);
+ this.feedReportSocket = runtimeManager.createServerSocket(port);
+ this.nextPort = availablePort;
+ this.inbox = inbox;
+ this.feedId = feedId;
+ this.messageListeners = new ArrayList<MessageListener>();
+ this.mesgAnalyzer = new FeedReportProvider(inbox, feedId);
+ feedManager.getFeedExecutorService(feedId).execute(mesgAnalyzer);
+ }
+
+ public void stop() {
+ process = false;
+ if (feedReportSocket != null) {
+ try {
+ feedReportSocket.close();
+ process = false;
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+ for (MessageListener listener : messageListeners) {
+ listener.stop();
+ }
+ mesgAnalyzer.stop();
+ }
+
+ @Override
+ public void run() {
+ Socket client = null;
+ while (process) {
+ try {
+ client = feedReportSocket.accept();
+ int port = nextPort.incrementAndGet();
+ /**
+ * MessageListener provides the functionality of listening at a port for messages
+ * and delivering each received message to an input queue (inbox).
+ */
+ MessageListener listener = new MessageListener(port, inbox);
+ listener.start();
+ synchronized (messageListeners) {
+ messageListeners.add(listener);
+ }
+ OutputStream os = client.getOutputStream();
+ os.write((port + EOM).getBytes());
+ os.flush();
+ } catch (IOException e) {
+ if (process == false) {
+ break;
+ }
+ } finally {
+ if (client != null) {
+ try {
+ client.close();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ }
+ }
+
+ public FeedReportProvider getMesgAnalyzer() {
+ return mesgAnalyzer;
+ }
+
+ }
+
+ /**
+ * The report messages sent by the feed operators are sent to the FeedReportProvider.
+ * FeedReportMessageAnalyzer is responsible for distributing the messages to the subscribers.
+ * The Feed Management Console is an example of a subscriber.
+ */
+ private static class FeedReportProvider implements Runnable {
+
+ private final LinkedBlockingQueue<String> inbox;
+ private final FeedConnectionId feedId;
+ private boolean process = true;
+ private final List<LinkedBlockingQueue<String>> subscriptionQueues;
+ private final Map<String, String> ingestionThroughputs;
+
+ public FeedReportProvider(LinkedBlockingQueue<String> inbox, FeedConnectionId feedId)
+ throws UnknownHostException, IOException {
+ this.inbox = inbox;
+ this.feedId = feedId;
+ this.subscriptionQueues = new ArrayList<LinkedBlockingQueue<String>>();
+ this.ingestionThroughputs = new HashMap<String, String>();
+ }
+
+ public void stop() {
+ process = false;
+ }
+
+ public void registerSubsription(LinkedBlockingQueue<String> subscriptionQueue) {
+ subscriptionQueues.add(subscriptionQueue);
+ }
+
+ public void deregisterSubsription(LinkedBlockingQueue<String> subscriptionQueue) {
+ subscriptionQueues.remove(subscriptionQueue);
+ }
+
+ public void run() {
+ StringBuilder finalMessage = new StringBuilder();
+ FeedReport report = new FeedReport();
+ while (process) {
+ try {
+ String message = inbox.take();
+ report.reset(message);
+ FeedReportMessageType mesgType = report.getReportType();
+ switch (mesgType) {
+ case THROUGHPUT:
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.warning("Feed Health Report " + message);
+ }
+ String[] msgComponents = message.split("\\|");
+ String partition = msgComponents[3];
+ String tput = msgComponents[4];
+ String timestamp = msgComponents[6];
+
+ boolean dispatchReport = true;
+ if (ingestionThroughputs.get(partition) == null) {
+ ingestionThroughputs.put(partition, tput);
+ dispatchReport = false;
+ } else {
+ for (int i = 0; i < ingestionThroughputs.size(); i++) {
+ String tp = ingestionThroughputs.get(i + "");
+ if (tp != null) {
+ ingestionThroughputs.put(i + "", null);
+ finalMessage.append(tp + FeedMessageService.MessageSeparator);
+ } else {
+ dispatchReport = false;
+ break;
+ }
+ }
+ ingestionThroughputs.put(partition, tput);
+ }
+
+ if (dispatchReport) {
+ String dispatchedReport = finalMessage.toString();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Dispatched report " + dispatchedReport);
+ }
+ for (LinkedBlockingQueue<String> q : subscriptionQueues) {
+ q.add(dispatchedReport);
+ }
+ }
+ finalMessage.delete(0, finalMessage.length());
+ break;
+ case CONGESTION:
+ // congestionInbox.add(report);
+ break;
+ }
+ } catch (InterruptedException e) {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Unable to process messages " + e.getMessage() + " for feed " + feedId);
+ }
+ }
+ }
+ }
+
+ }
+}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/functions/FunctionSignature.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/functions/FunctionSignature.java
index 4e05048..56d0a4b 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/functions/FunctionSignature.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/functions/FunctionSignature.java
@@ -19,10 +19,10 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
public class FunctionSignature implements Serializable {
- private String namespace;
- private String name;
- private int arity;
- private String rep;
+ private String namespace;
+ private String name;
+ private int arity;
+ private String rep;
public FunctionSignature(String namespace, String name, int arity) {
this.namespace = namespace;
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/transactions/AbstractOperationCallback.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/transactions/AbstractOperationCallback.java
index c549e7d..77cc489 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/transactions/AbstractOperationCallback.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/transactions/AbstractOperationCallback.java
@@ -45,7 +45,7 @@
MurmurHash128Bit.hash3_x64_128(tuple, primaryKeyFields, SEED, longHashes);
return Math.abs((int) longHashes[0]);
}
-
+
public void resetLocalNumActiveOperations() {
transactorLocalNumActiveOperations.set(0);
}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/transactions/IAsterixAppRuntimeContextProvider.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/transactions/IAsterixAppRuntimeContextProvider.java
index b10cd2e..a33f9b0 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/transactions/IAsterixAppRuntimeContextProvider.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/transactions/IAsterixAppRuntimeContextProvider.java
@@ -16,6 +16,7 @@
import java.util.List;
+import edu.uci.ics.asterix.common.api.AsterixThreadExecutor;
import edu.uci.ics.asterix.common.api.IAsterixAppRuntimeContext;
import edu.uci.ics.hyracks.api.io.IIOManager;
import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManager;
@@ -29,6 +30,8 @@
public interface IAsterixAppRuntimeContextProvider {
+ public AsterixThreadExecutor getThreadExecutor();
+
public IBufferCache getBufferCache();
public IFileMapProvider getFileMapManager();
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/transactions/ILockManager.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/transactions/ILockManager.java
index a752afa..191a810 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/transactions/ILockManager.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/transactions/ILockManager.java
@@ -61,12 +61,13 @@
/**
* @param datasetId
* @param entityHashValue
+ * @param lockMode
* @param txnContext
* @throws ACIDException
* TODO
- * @return
+ * @return
*/
- public void unlock(DatasetId datasetId, int entityHashValue, ITransactionContext txnContext)
+ public void unlock(DatasetId datasetId, int entityHashValue, byte lockMode, ITransactionContext txnContext)
throws ACIDException;
/**
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/transactions/IRecoveryManager.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/transactions/IRecoveryManager.java
index 6d2f3cb..6af5ebf 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/transactions/IRecoveryManager.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/transactions/IRecoveryManager.java
@@ -33,7 +33,7 @@
HEALTHY,
CORRUPTED
}
-
+
public class ResourceType {
public static final byte LSM_BTREE = 0;
public static final byte LSM_RTREE = 1;
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/transactions/ITransactionManager.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/transactions/ITransactionManager.java
index ffd4cc2..b4bb354 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/transactions/ITransactionManager.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/transactions/ITransactionManager.java
@@ -52,7 +52,8 @@
*
* @param jobId
* a unique value for the transaction id.
- * @param createIfNotExist TODO
+ * @param createIfNotExist
+ * TODO
* @return
* @throws ACIDException
*/
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/transactions/ITransactionSubsystem.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/transactions/ITransactionSubsystem.java
index b6c934a..9b1cbb6 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/transactions/ITransactionSubsystem.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/transactions/ITransactionSubsystem.java
@@ -14,6 +14,7 @@
*/
package edu.uci.ics.asterix.common.transactions;
+
public interface ITransactionSubsystem {
public ILogManager getLogManager();
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/transactions/JobId.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/transactions/JobId.java
index a08924d..f329e28 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/transactions/JobId.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/transactions/JobId.java
@@ -50,7 +50,7 @@
return "JID:" + id;
}
- public void setId(int jobId) {
- id = jobId;
- }
+ public void setId(int jobId) {
+ id = jobId;
+ }
}
\ No newline at end of file
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/transactions/MutableLong.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/transactions/MutableLong.java
index 4480aba..1fd913f 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/transactions/MutableLong.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/transactions/MutableLong.java
@@ -17,14 +17,13 @@
public class MutableLong {
private long val;
-
public MutableLong() {
}
-
+
public MutableLong(long val) {
this.val = val;
}
-
+
public void set(long val) {
this.val = val;
}
@@ -32,10 +31,10 @@
public long get() {
return val;
}
-
+
@Override
public int hashCode() {
- return (int)val;
+ return (int) val;
}
@Override
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/utils/ANameSchema.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/utils/ANameSchema.java
index a34b1d4..b809745 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/utils/ANameSchema.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/utils/ANameSchema.java
@@ -12,14 +12,14 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package edu.uci.ics.asterix.common.utils;
-
-/*
- * Author: Guangqiang Li
- * Created on Nov 30, 2009
- */
-public class ANameSchema {
- public static final int LOCALNAME_PROPERTY = 0;
- public static final int URI = 1;
- public static final int NODE_TYPE = 2;
-}
+package edu.uci.ics.asterix.common.utils;
+
+/*
+ * Author: Guangqiang Li
+ * Created on Nov 30, 2009
+ */
+public class ANameSchema {
+ public static final int LOCALNAME_PROPERTY = 0;
+ public static final int URI = 1;
+ public static final int NODE_TYPE = 2;
+}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/utils/UTF8CharSequence.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/utils/UTF8CharSequence.java
index 17e85b2..e2228c1 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/utils/UTF8CharSequence.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/utils/UTF8CharSequence.java
@@ -19,73 +19,71 @@
public class UTF8CharSequence implements CharSequence {
- private int start;
- private int len;
- private char[] buf;
+ private int start;
+ private int len;
+ private char[] buf;
- public UTF8CharSequence(IValueReference valueRef, int start) {
- reset(valueRef, start);
- }
+ public UTF8CharSequence(IValueReference valueRef, int start) {
+ reset(valueRef, start);
+ }
- public UTF8CharSequence() {
- }
+ public UTF8CharSequence() {
+ }
- @Override
- public char charAt(int index) {
- if (index >= len || index < 0) {
- throw new IndexOutOfBoundsException("No index " + index
- + " for string of length " + len);
- }
- return buf[index];
- }
+ @Override
+ public char charAt(int index) {
+ if (index >= len || index < 0) {
+ throw new IndexOutOfBoundsException("No index " + index + " for string of length " + len);
+ }
+ return buf[index];
+ }
- @Override
- public int length() {
- return len;
- }
+ @Override
+ public int length() {
+ return len;
+ }
- @Override
- public CharSequence subSequence(int start, int end) {
- UTF8CharSequence carSeq = new UTF8CharSequence();
- carSeq.len = end - start;
- if (end != start) {
- carSeq.buf = new char[carSeq.len];
- System.arraycopy(buf, start, carSeq.buf, 0, carSeq.len);
- }
- return carSeq;
- }
+ @Override
+ public CharSequence subSequence(int start, int end) {
+ UTF8CharSequence carSeq = new UTF8CharSequence();
+ carSeq.len = end - start;
+ if (end != start) {
+ carSeq.buf = new char[carSeq.len];
+ System.arraycopy(buf, start, carSeq.buf, 0, carSeq.len);
+ }
+ return carSeq;
+ }
- public void reset(IValueReference valueRef, int start) {
- this.start = start;
- resetLength(valueRef);
- if (buf == null || buf.length < len) {
- buf = new char[len];
- }
- int sStart = start + 2;
- int c = 0;
- int i = 0;
- byte[] bytes = valueRef.getByteArray();
- while (c < len) {
- buf[i++] = UTF8StringPointable.charAt(bytes, sStart + c);
- c += UTF8StringPointable.charSize(bytes, sStart + c);
- }
+ public void reset(IValueReference valueRef, int start) {
+ this.start = start;
+ resetLength(valueRef);
+ if (buf == null || buf.length < len) {
+ buf = new char[len];
+ }
+ int sStart = start + 2;
+ int c = 0;
+ int i = 0;
+ byte[] bytes = valueRef.getByteArray();
+ while (c < len) {
+ buf[i++] = UTF8StringPointable.charAt(bytes, sStart + c);
+ c += UTF8StringPointable.charSize(bytes, sStart + c);
+ }
- }
+ }
- private void resetLength(IValueReference valueRef) {
- this.len = UTF8StringPointable.getUTFLength(valueRef.getByteArray(),
- start);
- }
+ private void resetLength(IValueReference valueRef) {
+ this.len = UTF8StringPointable.getUTFLength(valueRef.getByteArray(), start);
+ }
- @Override
- public String toString() {
- StringBuffer bf = new StringBuffer();
- if (buf != null) {
- for (int i = 0; i < buf.length; i++) {
- bf.append(buf[i]);
- }
- }
- return new String(bf);
- }
+ @Override
+ public String toString() {
+ StringBuffer bf = new StringBuffer();
+ if (buf != null) {
+ for (int i = 0; i < buf.length; i++) {
+ bf.append(buf[i]);
+ }
+ }
+ return new String(bf);
+ }
}
diff --git a/asterix-common/src/main/resources/schema/asterix-conf.xsd b/asterix-common/src/main/resources/schema/asterix-conf.xsd
index f461723..3033893 100644
--- a/asterix-common/src/main/resources/schema/asterix-conf.xsd
+++ b/asterix-common/src/main/resources/schema/asterix-conf.xsd
@@ -5,7 +5,8 @@
<!-- definition of simple types -->
-
+
+ <xs:element name="instanceName" type="xs:string" />
<xs:element name="metadataNode" type="xs:string" />
<xs:element name="coredumpPath" type="xs:string" />
<xs:element name="storeDirs" type="xs:string" />
@@ -13,9 +14,9 @@
<xs:element name="name" type="xs:string" />
<xs:element name="value" type="xs:string" />
<xs:element name="description" type="xs:string" />
- <xs:element name="txnLogDirPath" type="xs:string" />
-
-
+ <xs:element name="txnLogDirPath" type="xs:string" />
+
+
<!-- definition of complex elements -->
<xs:element name="store">
<xs:complexType>
@@ -34,7 +35,7 @@
</xs:sequence>
</xs:complexType>
</xs:element>
-
+
<xs:element name="transactionLogDir">
<xs:complexType>
<xs:sequence>
@@ -58,10 +59,11 @@
<xs:element name="asterixConfiguration">
<xs:complexType>
<xs:sequence>
- <xs:element ref="mg:metadataNode" minOccurs="0"/>
+ <xs:element ref="mg:instanceName" minOccurs="0" />
+ <xs:element ref="mg:metadataNode" minOccurs="0" />
<xs:element ref="mg:store" maxOccurs="unbounded" />
<xs:element ref="mg:coredump" maxOccurs="unbounded" />
- <xs:element ref="mg:transactionLogDir" maxOccurs="unbounded"/>
+ <xs:element ref="mg:transactionLogDir" maxOccurs="unbounded" />
<xs:element ref="mg:property" minOccurs="0" maxOccurs="unbounded" />
</xs:sequence>
</xs:complexType>
diff --git a/asterix-common/src/main/resources/schema/cluster.xsd b/asterix-common/src/main/resources/schema/cluster.xsd
new file mode 100644
index 0000000..8c317d8
--- /dev/null
+++ b/asterix-common/src/main/resources/schema/cluster.xsd
@@ -0,0 +1,116 @@
+<?xml version="1.0" encoding="ISO-8859-1" ?>
+<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema"
+ xmlns:cl="cluster" targetNamespace="cluster" elementFormDefault="qualified">
+
+ <!-- definition of simple types -->
+ <xs:element name="instance_name" type="xs:string" />
+ <xs:element name="cluster_name" type="xs:string" />
+ <xs:element name="log_dir" type="xs:string" />
+ <xs:element name="txn_log_dir" type="xs:string" />
+ <xs:element name="id" type="xs:string" />
+ <xs:element name="client_ip" type="xs:string" />
+ <xs:element name="cluster_ip" type="xs:string" />
+ <xs:element name="key" type="xs:string" />
+ <xs:element name="value" type="xs:string" />
+ <xs:element name="dir" type="xs:string" />
+ <xs:element name="NFS" type="xs:boolean" />
+ <xs:element name="store" type="xs:string" />
+ <xs:element name="iodevices" type="xs:string" />
+ <xs:element name="java_home" type="xs:string" />
+ <xs:element name="username" type="xs:string" />
+ <xs:element name="web_port" type="xs:string" />
+ <xs:element name="client_port" type="xs:integer" />
+ <xs:element name="cluster_port" type="xs:integer" />
+ <xs:element name="http_port" type="xs:integer" />
+ <xs:element name="debug_port" type="xs:integer" />
+ <xs:element name="metadata_node" type="xs:string" />
+
+
+ <!-- definition of complex elements -->
+ <xs:element name="working_dir">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element ref="cl:dir" />
+ <xs:element ref="cl:NFS" />
+ </xs:sequence>
+ </xs:complexType>
+ </xs:element>
+
+ <xs:element name="master_node">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element ref="cl:id" />
+ <xs:element ref="cl:client_ip" />
+ <xs:element ref="cl:cluster_ip" />
+ <xs:element ref="cl:java_home" minOccurs="0" />
+ <xs:element ref="cl:log_dir" minOccurs="0" />
+ <xs:element ref="cl:client_port" />
+ <xs:element ref="cl:cluster_port" />
+ <xs:element ref="cl:http_port" />
+ <xs:element ref="cl:debug_port" minOccurs="0" />
+ </xs:sequence>
+ </xs:complexType>
+ </xs:element>
+
+ <xs:element name="property">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element ref="cl:key" />
+ <xs:element ref="cl:value" />
+ </xs:sequence>
+ </xs:complexType>
+ </xs:element>
+
+ <xs:element name="env">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element ref="cl:property" minOccurs="0" maxOccurs="unbounded" />
+ </xs:sequence>
+ </xs:complexType>
+ </xs:element>
+
+ <xs:element name="node">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element ref="cl:id" />
+ <xs:element ref="cl:cluster_ip" />
+ <xs:element ref="cl:java_home" minOccurs="0" />
+ <xs:element ref="cl:log_dir" minOccurs="0" />
+ <xs:element ref="cl:txn_log_dir" minOccurs="0" />
+ <xs:element ref="cl:store" minOccurs="0" />
+ <xs:element ref="cl:iodevices" minOccurs="0" />
+ <xs:element ref="cl:debug_port" minOccurs="0" />
+ </xs:sequence>
+ </xs:complexType>
+ </xs:element>
+
+ <xs:element name="substitute_nodes">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element ref="cl:node" maxOccurs="unbounded" />
+ </xs:sequence>
+ </xs:complexType>
+ </xs:element>
+
+ <xs:element name="cluster">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element ref="cl:instance_name" />
+ <xs:element ref="cl:cluster_name" />
+ <xs:element ref="cl:username" />
+ <xs:element ref="cl:env" minOccurs="0" />
+ <xs:element ref="cl:java_home" minOccurs="0" />
+ <xs:element ref="cl:log_dir" minOccurs="0" />
+ <xs:element ref="cl:txn_log_dir" minOccurs="0" />
+ <xs:element ref="cl:store" minOccurs="0" />
+ <xs:element ref="cl:iodevices" minOccurs="0" />
+ <xs:element ref="cl:working_dir" />
+ <xs:element ref="cl:metadata_node" />
+ <xs:element ref="cl:master_node" />
+ <xs:element ref="cl:node" maxOccurs="unbounded" />
+ <xs:element ref="cl:substitute_nodes" />
+ </xs:sequence>
+ </xs:complexType>
+ </xs:element>
+
+</xs:schema>
diff --git a/asterix-common/src/test/java/edu/uci/ics/asterix/test/aql/TestHelper.java b/asterix-common/src/test/java/edu/uci/ics/asterix/test/aql/TestHelper.java
new file mode 100644
index 0000000..5777d0a
--- /dev/null
+++ b/asterix-common/src/test/java/edu/uci/ics/asterix/test/aql/TestHelper.java
@@ -0,0 +1,16 @@
+package edu.uci.ics.asterix.test.aql;
+
+import java.util.List;
+
+public final class TestHelper {
+
+ public static boolean isInPrefixList(List<String> prefixList, String s) {
+ for (String s2 : prefixList) {
+ if (s.startsWith(s2)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+}
diff --git a/asterix-common/src/test/java/edu/uci/ics/asterix/test/aql/TestsUtils.java b/asterix-common/src/test/java/edu/uci/ics/asterix/test/aql/TestsUtils.java
index 3fec2c8..9643e1d 100644
--- a/asterix-common/src/test/java/edu/uci/ics/asterix/test/aql/TestsUtils.java
+++ b/asterix-common/src/test/java/edu/uci/ics/asterix/test/aql/TestsUtils.java
@@ -36,9 +36,12 @@
import org.apache.commons.httpclient.DefaultHttpMethodRetryHandler;
import org.apache.commons.httpclient.HttpClient;
+import org.apache.commons.httpclient.HttpMethod;
import org.apache.commons.httpclient.HttpStatus;
import org.apache.commons.httpclient.NameValuePair;
import org.apache.commons.httpclient.methods.GetMethod;
+import org.apache.commons.httpclient.methods.PostMethod;
+import org.apache.commons.httpclient.methods.StringRequestEntity;
import org.apache.commons.httpclient.params.HttpMethodParams;
import org.apache.commons.io.IOUtils;
import org.codehaus.jackson.map.JsonMappingException;
@@ -186,7 +189,7 @@
}
}
- private static String[] handleError(GetMethod method) throws Exception {
+ private static String[] handleError(HttpMethod method) throws Exception {
String errorBody = method.getResponseBodyAsString();
JSONObject result = new JSONObject(errorBody);
String[] errors = { result.getJSONArray("error-code").getString(0), result.getString("summary"),
@@ -205,7 +208,6 @@
// Create a method instance.
GetMethod method = new GetMethod(url);
-
method.setQueryString(new NameValuePair[] { new NameValuePair("query", str) });
// Provide custom retry handler is necessary
@@ -238,9 +240,8 @@
HttpClient client = new HttpClient();
// Create a method instance.
- GetMethod method = new GetMethod(url);
-
- method.setQueryString(new NameValuePair[] { new NameValuePair("statements", str) });
+ PostMethod method = new PostMethod(url);
+ method.setRequestEntity(new StringRequestEntity(str));
// Provide custom retry handler is necessary
method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false));
@@ -253,7 +254,8 @@
GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, "Method failed: " + method.getStatusLine());
String[] errors = handleError(method);
GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, errors[2]);
- throw new Exception("DDL operation failed: " + errors[0] + "\nSUMMARY: " + errors[1] + "\nSTACKTRACE: " + errors[2]);
+ throw new Exception("DDL operation failed: " + errors[0] + "\nSUMMARY: " + errors[1] + "\nSTACKTRACE: "
+ + errors[2]);
}
}
@@ -270,10 +272,8 @@
HttpClient client = new HttpClient();
// Create a method instance.
- GetMethod method = new GetMethod(url);
-
- method.setQueryString(new NameValuePair[] { new NameValuePair("ddl", str) });
-
+ PostMethod method = new PostMethod(url);
+ method.setRequestEntity(new StringRequestEntity(str));
// Provide custom retry handler is necessary
method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false));
@@ -285,7 +285,8 @@
GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, "Method failed: " + method.getStatusLine());
String[] errors = handleError(method);
GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, errors[2]);
- throw new Exception("DDL operation failed: " + errors[0] + "\nSUMMARY: " + errors[1] + "\nSTACKTRACE: " + errors[2]);
+ throw new Exception("DDL operation failed: " + errors[0] + "\nSUMMARY: " + errors[1] + "\nSTACKTRACE: "
+ + errors[2]);
}
}
@@ -309,7 +310,7 @@
public static void executeManagixCommand(String command) throws ClassNotFoundException, NoSuchMethodException,
SecurityException, IllegalAccessException, IllegalArgumentException, InvocationTargetException {
if (managixExecuteMethod == null) {
- Class clazz = Class.forName("edu.uci.ics.asterix.installer.test.AsterixInstallerIntegrationUtil");
+ Class<?> clazz = Class.forName("edu.uci.ics.asterix.installer.test.AsterixInstallerIntegrationUtil");
managixExecuteMethod = clazz.getMethod("executeCommand", String.class);
}
managixExecuteMethod.invoke(null, command);
@@ -355,7 +356,8 @@
return s.toString();
}
- public static void executeTest(String actualPath, TestCaseContext testCaseCtx, ProcessBuilder pb) throws Exception {
+ public static void executeTest(String actualPath, TestCaseContext testCaseCtx, ProcessBuilder pb,
+ boolean isDmlRecoveryTest) throws Exception {
File testFile;
File expectedResultFile;
@@ -381,10 +383,26 @@
TestsUtils.executeDDL(statement);
break;
case "update":
+
+ //isDmlRecoveryTest: set IP address
+ if (isDmlRecoveryTest && statement.contains("nc1://")) {
+ statement = statement
+ .replaceAll("nc1://", "127.0.0.1://../../../../../../asterix-app/");
+
+ }
+
TestsUtils.executeUpdate(statement);
break;
case "query":
try {
+ // isDmlRecoveryTest: insert Crash and Recovery
+ if (isDmlRecoveryTest) {
+ executeScript(pb, pb.environment().get("SCRIPT_HOME") + File.separator
+ + "dml_recovery" + File.separator + "kill_cc_and_nc.sh");
+ executeScript(pb, pb.environment().get("SCRIPT_HOME") + File.separator
+ + "dml_recovery" + File.separator + "stop_and_start.sh");
+ }
+
InputStream resultStream = executeQuery(statement);
expectedResultFile = expectedResultFileCtxs.get(queryCount).getFile();
@@ -422,34 +440,18 @@
break;
case "txnqar": //qar represents query after recovery
try {
- ////////////// <begin of temporary fix> ////////////////////////////
- //TODO
- //Temporary fix in order not to block the build test(mvn verify)
- //A proper fix should not have the while loop here.
- int maxRetryCount = 12;
- int tryCount = 0;
- InputStream resultStream = null;
- long sleepTime = 5;
-
- do {
- //wait until NC starts
- sleepTime *= 2;
- Thread.sleep(sleepTime);
- if (++tryCount > maxRetryCount) {
- LOGGER.info("Metadata node is not running - this test will fail.");
- break;
- }
- resultStream = executeQuery(statement);
- } while (resultStream.toString().contains("Connection refused to host"));
- ////////////// <end of temporary fix> //////////////////////////////
+
+ InputStream resultStream = executeQuery(statement);
qarFile = new File(actualPath + File.separator
+ testCaseCtx.getTestCase().getFilePath().replace(File.separator, "_") + "_"
+ cUnit.getName() + "_qar.adm");
qarFile.getParentFile().mkdirs();
TestsUtils.writeResultsToFile(qarFile, resultStream);
+
TestsUtils.runScriptAndCompareWithResult(testFile, new PrintWriter(System.err),
qbcFile, qarFile);
+
LOGGER.info("[TEST]: " + testCaseCtx.getTestCase().getFilePath() + "/"
+ cUnit.getName() + " PASSED ");
} catch (JsonMappingException e) {
@@ -476,11 +478,23 @@
throw new Exception("Test \"" + testFile + "\" FAILED!\n", e);
}
break;
+ case "sleep":
+ Thread.sleep(Long.parseLong(statement.trim()));
+ break;
+ case "errddl": // a ddlquery that expects error
+ try {
+ TestsUtils.executeDDL(statement);
+
+ } catch (Exception e) {
+ // expected error happens
+ }
+ break;
default:
throw new IllegalArgumentException("No statements of type " + ctx.getType());
}
} catch (Exception e) {
+ e.printStackTrace();
if (cUnit.getExpectedError().isEmpty()) {
throw new Exception("Test \"" + testFile + "\" FAILED!", e);
}
@@ -488,4 +502,5 @@
}
}
}
+
}
diff --git a/asterix-doc/pom.xml b/asterix-doc/pom.xml
index 6eb217f..8675dcc 100644
--- a/asterix-doc/pom.xml
+++ b/asterix-doc/pom.xml
@@ -17,7 +17,7 @@
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
</parent>
<artifactId>asterix-doc</artifactId>
<build>
diff --git a/asterix-doc/src/site/markdown/api.md b/asterix-doc/src/site/markdown/api.md
index b48f24a..4d6edd2 100644
--- a/asterix-doc/src/site/markdown/api.md
+++ b/asterix-doc/src/site/markdown/api.md
@@ -1,6 +1,16 @@
# REST API to AsterixDB #
-## DDL API ##
+## <a id="toc">Table of Contents</a>
+
+* [DDL API](#DdlApi)
+* [Update API](#UpdateApi)
+* [Query API](#QueryApi)
+* [Asynchronous Result API](#AsynchronousResultApi)
+* [Query Status API](#QueryStatusApi)
+* [Error Codes](#ErrorCodes)
+
+
+## <a id="DdlApi">DDL API</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
*End point for the data definition statements*
@@ -48,7 +58,7 @@
*HTTP OK 200*
`<NO PAYLOAD>`
-## Update API ##
+## <a id="UpdateApi">Update API</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
*End point for update statements (INSERT, DELETE and LOAD)*
@@ -89,7 +99,7 @@
*HTTP OK 200*
`<NO PAYLOAD>`
-## Query API ##
+## <a id="QueryApi">Query API</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
*End point for query statements*
@@ -169,7 +179,7 @@
}
-## Asynchronous Result API ##
+## <a id="AsynchronousResultApi">Asynchronous Result API</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
*End point to fetch the results of an asynchronous query*
@@ -231,7 +241,7 @@
}
-## Query Status API ##
+## <a id="QueryStatusApi">Query Status API</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
*End point to check the status of the query asynchronous*
@@ -261,7 +271,7 @@
-## Error Codes ##
+## <a id="ErrorCodes">Error Codes</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
Table of error codes and their types:
diff --git a/asterix-doc/src/site/markdown/aql/allens.md b/asterix-doc/src/site/markdown/aql/allens.md
new file mode 100644
index 0000000..a07e287
--- /dev/null
+++ b/asterix-doc/src/site/markdown/aql/allens.md
@@ -0,0 +1,229 @@
+# AsterixDB Temporal Functions: Allen's Relations #
+
+## <a id="toc">Table of Contents</a> ##
+
+* [About Allen's Relations](#AboutAllensRelations)
+* [Allen's Relations Functions](#AllensRelatonsFunctions)
+
+
+## <a id="AboutAllensRelations">About Allen's Relations</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
+
+AsterixDB supports Allen's relations over interval types. Allen's relations are also called Allen's interval algebra. There are totally 13 base relations described by this algebra, and all of them are supported in AsterixDB (note that `interval-equals` is supported by the `=` comparison symbol so there is no extra function for it).
+
+A detailed description of Allen's relations can be found from its [wikipedia entry](http://en.wikipedia.org/wiki/Allen's_interval_algebra).
+
+## <a id="AllensRelatonsFunctions">Allen's Relations Functions</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
+
+### interval-before, interval-after ###
+
+ * Syntax:
+
+ interval-before(interval1, interval2)
+ interval-after(interval1, interval2)
+
+ * These two functions check whether an interval happens before/after another interval.
+ * Arguments:
+ * `interval1`, `interval2`: two intervals to be compared
+ * Return Value:
+
+ A `boolean` value. Specifically, `interval-before(interval1, interval2)` is true if and only if `interval1.end < interval2.start`, and `interval-after(interval1, interval2)` is true if and only if `interval1.start > interval2.end`. If any of the two inputs is `null`, `null` is returned.
+
+ * Examples:
+
+ let $itv1 := interval-from-date("2000-01-01", "2005-01-01")
+ let $itv2 := interval-from-date("2005-05-01", "2012-09-09")
+ return {"interval-before": interval-before($itv1, $itv2), "interval-after": interval-after($itv2, $itv1)}
+
+ * The expected result is:
+
+ { "interval-before": true, "interval-after": true }
+
+### interval-meets, interval-met-by ###
+
+ * Syntax:
+
+ interval-meets(interval1, interval2)
+ interval-met-by(interval1, interval2)
+
+ * These two functions check whether an interval meets with another interval.
+ * Arguments:
+ * `interval1`, `interval2`: two intervals to be compared
+ * Return Value:
+
+ A `boolean` value. Specifically, `interval-meets(interval1, interval2)` is true if and only if `interval1.end = interval2.start`, and `interval-met-by(interval1, interval2)` is true if and only if `interval1.start = interval2.end`. If any of the two inputs is `null`, `null` is returned.
+
+ * Examples:
+
+ let $itv1 := interval-from-date("2000-01-01", "2005-01-01")
+ let $itv2 := interval-from-date("2005-01-01", "2012-09-09")
+ let $itv3 := interval-from-date("2006-08-01", "2007-03-01")
+ let $itv4 := interval-from-date("2004-09-10", "2006-08-01")
+ return {"meets": interval-meets($itv1, $itv2), "metby": interval-met-by($itv3, $itv4)}
+
+ * The expected result is:
+
+ { "meets": true, "metby": true }
+
+
+### interval-overlaps, interval-overlapped-by, overlap ###
+
+ * Syntax:
+
+ interval-overlaps(interval1, interval2)
+ interval-overlapped-by(interval1, interval2)
+ overlap(interval1, interval2)
+
+ * These functions check whether two intervals overlap with each other.
+ * Arguments:
+ * `interval1`, `interval2`: two intervals to be compared
+ * Return Value:
+
+ A `boolean` value. Specifically, `interval-overlaps(interval1, interval2)` is true if and only if
+
+ interval1.start < interval2.start
+ AND interval2.end > interval1.end
+ AND interval1.end > interval2.start
+
+ `interval-overlapped-by(interval1, interval2)` is true if and only if
+
+ interval2.start < interval1.start
+ AND interval1.end > interval2.end
+ AND interval2.end > interval1.start
+
+ `overlap(interval1, interval2)` is true if
+
+ (interval2.start >= interval1.start
+ AND interval2.start < interval1.end)
+ OR
+ (interval2.end > interval1.start
+ AND interval2.end <= interval.end)
+
+ For all these functions, if any of the two inputs is `null`, `null` is returned.
+
+ Note that `interval-overlaps` and `interval-overlapped-by` are following the Allen's relations on the definition of overlap. `overlap` is a syntactic sugar for the case that the intersect of two intervals is not empty.
+
+ * Examples:
+
+ let $itv1 := interval-from-date("2000-01-01", "2005-01-01")
+ let $itv2 := interval-from-date("2004-05-01", "2012-09-09")
+ let $itv3 := interval-from-date("2006-08-01", "2007-03-01")
+ let $itv4 := interval-from-date("2004-09-10", "2006-12-31")
+ return {"overlaps": interval-overlaps($itv1, $itv2),
+ "overlapped-by": interval-overlapped-by($itv3, $itv4),
+ "overlapping1": overlap($itv1, $itv2),
+ "overlapping2": overlap($itv3, $itv4)}
+
+ * The expected result is:
+
+ { "overlaps": true, "overlapped-by": true, "overlapping1": true, "overlapping2": true }
+
+
+### interval-starts, interval-started-by ###
+
+ * Syntax:
+
+ interval-starts(interval1, interval2)
+ interval-started-by(interval1, interval2)
+
+ * These two functions check whether one interval starts with the other interval.
+ * Arguments:
+ * `interval1`, `interval2`: two intervals to be compared
+ * Return Value:
+
+ A `boolean` value. Specifically, `interval-starts(interval1, interval2)` returns true if and only if
+
+ interval1.start = interval2.start
+ AND interval1.end <= interval2.end
+
+ `interval-started-by(interval1, interval2)` returns true if and only if
+
+ interval1.start = interval2.start
+ AND interval2.end <= interval1.end
+
+ For both functions, if any of the two inputs is `null`, `null` is returned.
+
+ * Examples:
+
+ let $itv1 := interval-from-date("2000-01-01", "2005-01-01")
+ let $itv2 := interval-from-date("2000-01-01", "2012-09-09")
+ let $itv3 := interval-from-date("2006-08-01", "2007-03-01")
+ let $itv4 := interval-from-date("2006-08-01", "2006-08-01")
+ return {"interval-starts": interval-starts($itv1, $itv2), "interval-started-by": interval-started-by($itv3, $itv4)}
+
+ * The expected result is:
+
+ { "interval-starts": true, "interval-started-by": true }
+
+
+### interval-covers, interval-covered-by ###
+
+ * Syntax:
+
+ interval-covers(interval1, interval2)
+ interval-covered-by(interval1, interval2)
+
+ * These two functions check whether one interval covers the other interval.
+ * Arguments:
+ * `interval1`, `interval2`: two intervals to be compared
+ * Return Value:
+
+ A `boolean` value. Specifically, `interval-covers(interval1, interval2)` is true if and only if
+
+ interval1.start <= interval2.start
+ AND interval2.end >= interval1.end
+
+ `interval-covered-by(interval1, interval2)` is true if and only if
+
+ interval2.start <= interval1.start
+ AND interval1.end >= interval2.end
+
+ For both functions, if any of the two inputs is `null`, `null` is returned.
+
+ * Examples:
+
+ let $itv1 := interval-from-date("2000-01-01", "2005-01-01")
+ let $itv2 := interval-from-date("2000-03-01", "2004-09-09")
+ let $itv3 := interval-from-date("2006-08-01", "2007-03-01")
+ let $itv4 := interval-from-date("2004-09-10", "2012-08-01")
+ return {"interval-covers": interval-covers($itv1, $itv2), "interval-covered-by": interval-covered-by($itv3, $itv4)}
+
+ * The expected result is:
+
+ { "interval-covers": true, "interval-covered-by": true }
+
+
+### interval-ends, interval-ended-by ###
+
+* Syntax:
+
+ interval-ends(interval1, interval2)
+ interval-ended-by(interval1, interval2)
+
+ * These two functions check whether one interval ends with the other interval.
+ * Arguments:
+ * `interval1`, `interval2`: two intervals to be compared
+ * Return Value:
+
+ A `boolean` value. Specifically, `interval-ends(interval1, interval2)` returns true if and only if
+
+ interval1.end = interval2.end
+ AND interval1.start >= interval2.start
+
+ `interval-ended-by(interval1, interval2)` returns true if and only if
+
+ interval2.end = interval1.end
+ AND interval2.start >= interval1.start
+
+ For both functions, if any of the two inputs is `null`, `null` is returned.
+
+* Examples:
+
+ let $itv1 := interval-from-date("2000-01-01", "2005-01-01")
+ let $itv2 := interval-from-date("1998-01-01", "2005-01-01")
+ let $itv3 := interval-from-date("2006-08-01", "2007-03-01")
+ let $itv4 := interval-from-date("2006-09-10", "2007-03-01")
+ return {"interval-ends": interval-ends($itv1, $itv2), "interval-ended-by": interval-ended-by($itv3, $itv4) }
+
+* The expected result is:
+
+ { "interval-ends": true, "interval-ended-by": true }
diff --git a/asterix-doc/src/site/markdown/aql/datamodel.md b/asterix-doc/src/site/markdown/aql/datamodel.md
index 3e54d61..71a5cbb 100644
--- a/asterix-doc/src/site/markdown/aql/datamodel.md
+++ b/asterix-doc/src/site/markdown/aql/datamodel.md
@@ -1,11 +1,33 @@
# Asterix Data Model (ADM) #
+## <a id="toc">Table of Contents</a> ##
+
+* [Primitive Types](#PrimitiveTypes)
+ * [Boolean](#PrimitiveTypesBoolean)
+ * [Int8 / Int16 / Int32 / Int64](#PrimitiveTypesInt)
+ * [Float](#PrimitiveTypesFloat)
+ * [Double](#PrimitiveTypesDouble)
+ * [String](#PrimitiveTypesString)
+ * [Point](#PrimitiveTypesPoint)
+ * [Line](#PrimitiveTypesLine)
+ * [Rectangle](#PrimitiveTypesRectangle)
+ * [Circle](#PrimitiveTypesCircle)
+ * [Polygon](#PrimitiveTypesPolygon)
+ * [Date](#PrimitiveTypesDate)
+ * [Time](#PrimitiveTypesTime)
+ * [Datetime](#PrimitiveTypesDateTime)
+ * [Duration/Year-month-duration/Day-time-duration](#PrimitiveTypesDuration)
+ * [Interval](#PrimitiveTypesInterval)
+* [Derived Types](#DerivedTypes)
+ * [Record](#DerivedTypesRecord)
+ * [OrderedList](#DerivedTypesOrderedList)
+ * [UnorderedList](#DerivedTypesUnorderedList)
An instance of Asterix data model (ADM) can be a _*primitive type*_ (`int32`, `int64`, `string`, `float`, `double`, `date`, `time`, `datetime`, etc. or `null`) or a _*derived type*_.
-## Primitive Types ##
+## <a id="PrimitiveTypes">Primitive Types</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
-### Boolean ###
+### <a id="PrimitiveTypesBoolean">Boolean</a><font size="4"><a href="#toc">[Back to TOC]</a></font> ###
`boolean` data type can have one of the two values: _*true*_ or _*false*_.
* Example:
@@ -21,7 +43,7 @@
-### Int8 / Int16 / Int32 / Int64 ###
+### <a id="PrimitiveTypesInt">Int8 / Int16 / Int32 / Int64</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ###
Integer types using 8, 16, 32, or 64 bits. The ranges of these types are:
- `int8`: -127 to 127
@@ -43,7 +65,7 @@
{ "int8": 125i8, "int16": 32765i16, "int32": 294967295, "int64": 1700000000000000000i64 }
-### Float ###
+### <a id="PrimitiveTypesFloat">Float</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ###
`float` represents approximate numeric data values using 4 bytes. The range of a float value can be from 2^(-149) to (2-2^(-23)·2^(127) for both positive and negative. Beyond these ranges will get `INF` or `-INF`.
* Example:
@@ -60,7 +82,7 @@
{ "v1": NaNf, "v2": Infinityf, "v3": -Infinityf, "v4": -2013.5f }
-### Double ###
+### <a id="PrimitiveTypesDouble">Double</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ###
`double` represents approximate numeric data values using 8 bytes. The range of a double value can be from (2^(-1022)) to (2-2^(-52))·2^(1023) for both positive and negative. Beyond these ranges will get `INF` or `-INF`.
* Example:
@@ -77,7 +99,7 @@
{ "v1": NaNd, "v2": Infinityd, "v3": -Infinityd, "v4": -2013.5938237483274d }
-### String ###
+### <a id="PrimitiveTypesString">String</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ###
`string` represents a sequence of characters.
* Example:
@@ -92,7 +114,7 @@
{ "v1": "This is a string.", "v2": "\"This is a quoted string\"" }
-### Point ###
+### <a id="PrimitiveTypesPoint">Point</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ###
`point` is the fundamental two-dimensional building block for spatial types. It consists of two `double` coordinates x and y.
* Example:
@@ -107,7 +129,7 @@
{ "v1": point("80.1,-1000000.0"), "v2": point("5.1E-10,-1000000.0") }
-### Line ###
+### <a id="PrimitiveTypesLine">Line</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ###
`line` consists of two points that represent the start and the end points of a line segment.
* Example:
@@ -122,7 +144,7 @@
{ "v1": line("10.1234,1.11 0.102,-11.22"), "v2": line("0.1234,-1.0E-10 0.105,-1.02") }
-### Rectangle ###
+### <a id="PrimitiveTypesRectangle">Rectangle</a><font size="4"><a href="#toc">[Back to TOC]</a></font> ###
`rectangle` consists of two points that represent the _*bottom left*_ and _*upper right*_ corners of a rectangle.
* Example:
@@ -137,7 +159,7 @@
{ "v1": rectangle("5.1,11.8 87.6,15.6548"), "v2": rectangle("0.1234,-1.0E-10 5.5487,0.48765") }
-### Circle ###
+### <a id="PrimitiveTypesCircle">Circle</a><font size="4"><a href="#toc">[Back to TOC]</a></font> ###
`circle` consists of one point that represents the center of the circle and a radius of type `double`.
* Example:
@@ -152,7 +174,7 @@
{ "v1": circle("10.1234,1.11 0.102"), "v2": circle("0.1234,-1.0E-10 0.105") }
-### Polygon ###
+### <a id="PrimitiveTypesPolygon">Polygon</a><font size="4"><a href="#toc">[Back to TOC]</a></font> ###
`polygon` consists of _*n*_ points that represent the vertices of a _*simple closed*_ polygon.
* Example:
@@ -167,7 +189,7 @@
{ "v1": polygon("-1.2,130.0 -214000.0,2.15 -350.0,3.6 -0.0046,4.81"), "v2": polygon("-1.0,1050.0 -2.15E50,2.5 -1.0,3300.0 -250000.0,20.15 350.0,3.6 -0.0046,4.75 -2.0,100.0 -200000.0,20.1 30.5,3.25 -0.00433,4.75") }
-### Date ###
+### <a id="PrimitiveTypesDate">Date</a><font size="4"><a href="#toc">[Back to TOC]</a></font> ###
`date` represents a time point along the Gregorian calendar system specified by the year, month and day. ASTERIX supports the date from `-9999-01-01` to `9999-12-31`.
A date value can be represented in two formats, extended format and basic format.
@@ -187,7 +209,7 @@
{ "v1": date("2013-01-01"), "v2": date("-1970-01-01") }
-### Time ###
+### <a id="PrimitiveTypesTime">Time</a><font size="4"><a href="#toc">[Back to TOC]</a></font> ###
`time` type describes the time within the range of a day. It is represented by three fields: hour, minute and second. Millisecond field is optional as the fraction of the second field. Its extended format is as `hh:mm:ss[.mmm]` and the basic format is `hhmmss[mmm]`. The value domain is from `00:00:00.000` to `23:59:59.999`.
Timezone field is optional for a time value. Timezone is represented as `[+|-]hh:mm` for extended format or `[+|-]hhmm` for basic format. Note that the sign designators cannot be omitted. `Z` can also be used to represent the UTC local time. If no timezone information is given, it is UTC by default.
@@ -204,7 +226,7 @@
{ "v1": time("12:12:12.039Z"), "v2": time("08:00:00.000Z") }
-### Datetime ###
+### <a id="PrimitiveTypesDateTime">Datetime</a><font size="4"><a href="#toc">[Back to TOC]</a></font> ###
A `datetime` value is a combination of an `date` and `time`, representing a fixed time point along the Gregorian calendar system. The value is among `-9999-01-01 00:00:00.000` and `9999-12-31 23:59:59.999`.
A `datetime` value is represented as a combination of the representation of its `date` part and `time` part, separated by a separator `T`. Either extended or basic format can be used, and the two parts should be the same format.
@@ -223,13 +245,15 @@
{ "v1": datetime("2013-01-01T12:12:12.039Z"), "v2": datetime("-1970-01-01T08:00:00.000Z") }
-### Duration ###
+### <a id="PrimitiveTypesDuration">Duration/Year-month-duration/Day-time-duration</a><font size="4"><a href="#toc">[Back to TOC]</a></font> ###
`duration` represents a duration of time. A duration value is specified by integers on at least one of the following fields: year, month, day, hour, minute, second, and millisecond.
A duration value is in the format of `[-]PnYnMnDTnHnMn.mmmS`. The millisecond part (as the fraction of the second field) is optional, and when no millisecond field is used, the decimal point should also be absent.
Negative durations are also supported for the arithmetic operations between time instance types (`date`, `time` and `datetime`), and is used to roll the time back for the given duration. For example `date("2012-01-01") + duration("-P3D")` will return `date("2011-12-29")`.
+There are also two sub-duration types, namely `year-month-duration` and `day-time-duration`. `year-month-duration` represents only the years and months of a duration, while `day-time-duration` represents only the day to millisecond fields. Different from the `duration` type, both these two subtypes are totally ordered, so they can be used for comparison and index construction.
+
Note that a canonical representation of the duration is always returned, regardless whether the duration is in the canonical representation or not from the user's input. More information about canonical representation can be found from [XPath dayTimeDuration Canonical Representation](http://www.w3.org/TR/xpath-functions/#canonical-dayTimeDuration) and [yearMonthDuration Canonical Representation](http://www.w3.org/TR/xpath-functions/#canonical-yearMonthDuration).
* Example:
@@ -244,7 +268,7 @@
{ "v1": duration("P101YT12M"), "v2": duration("-PT20.943S") }
-### Interval ###
+### <a id="PrimitiveTypesInterval">Interval</a><font size="4"><a href="#toc">[Back to TOC]</a></font> ###
`interval` represents inclusive-exclusive ranges of time. It is defined by two time point values with the same temporal type(`date`, `time` or `datetime`).
* Example:
@@ -260,9 +284,9 @@
{ "v1": interval-date("2013-01-01, 2013-05-05"), "v2": interval-time("00:01:01.000Z, 13:39:01.049Z"), "v3": interval-datetime("2013-01-01T00:01:01.000Z, 2013-05-05T13:39:01.049Z") }
-## Derived Types ##
+## <a id="DerivedTypes">Derived Types</a><font size="4"><a href="#toc">[Back to TOC]</a></font> ##
-### Record ###
+### <a id="DerivedTypesRecord">Record</a><font size="4"><a href="#toc">[Back to TOC]</a></font> ###
A `record` contains a set of fields, where each field is described by its name and type. A record type is either open or closed. Open records can contain fields that are not part of the type definition, while closed records cannot. Syntactically, record constructors are surrounded by curly braces "{...}".
An example would be
@@ -271,7 +295,7 @@
{ "id": 213508, "name": "Alice Bob" }
-### OrderedList ###
+### <a id="DerivedTypesOrderedList">OrderedList</a><font size="4"><a href="#toc">[Back to TOC]</a></font> ###
An `orderedList` is a sequence of values for which the order is determined by creation or insertion. OrderedList constructors are denoted by brackets: "[...]".
An example would be
@@ -280,7 +304,7 @@
["alice", 123, "bob", null]
-### UnorderedList ###
+### <a id="DerivedTypesUnorderedList">UnorderedList</a><font size="4"><a href="#toc">[Back to TOC]</a></font> ###
An `unorderedList` is an unordered sequence of values, similar to bags in SQL. UnorderedList constructors are denoted by two opening flower braces followed by data and two closing flower braces, like "{{...}}".
An example would be
diff --git a/asterix-doc/src/site/markdown/aql/externaldata.md b/asterix-doc/src/site/markdown/aql/externaldata.md
index e603954..ca350b9 100644
--- a/asterix-doc/src/site/markdown/aql/externaldata.md
+++ b/asterix-doc/src/site/markdown/aql/externaldata.md
@@ -1,12 +1,19 @@
# Accessing External Data in AsterixDB #
-## Introduction ##
+## <a id="toc">Table of Contents</a> ##
+
+* [Introduction](#Introduction)
+ * [Adapter for an External Dataset](#IntroductionAdapterForAnExternalDataset)
+ * [Creating an External Dataset](#IntroductionCreatingAnExternalDataset)
+* [Writing Queries against an External Dataset](#WritingQueriesAgainstAnExternalDataset)
+
+## <a id="Introduction">Introduction</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
Data that needs to be processed by ASTERIX could be residing outside ASTERIX storage. Examples include data files on a distributed file system such as HDFS or on the local file system of a machine that is part of an ASTERIX cluster. For ASTERIX to process such data, end-user may create a regular dataset in ASTERIX (a.k.a. internal dataset) and load the dataset with the data. ASTERIX supports ''external datasets'' so that it is not necessary to “load” all data prior to using it. This also avoids creating multiple copies of data and the need to keep the copies in sync.
-### Adapter for an External Dataset ###
+### <a id="IntroductionAdapterForAnExternalDataset">Adapter for an External Dataset</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ###
External data is accessed using wrappers (adapters in ASTERIX) that abstract away the mechanism of connecting with an external service, receiving data and transforming the data into ADM records that are understood by ASTERIX. ASTERIX comes with built-in adapters for common storage systems such as HDFS or the local file system.
-### Creating an External Dataset ###
+### <a id="IntroductionCreatingAnExternalDataset">Creating an External Dataset</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ###
As an example we consider the Lineitem dataset from [TPCH schema](http://www.openlinksw.com/dataspace/doc/dav/wiki/Main/VOSTPCHLinkedData/tpch.sql).
@@ -168,7 +175,7 @@
You may now run the sample query in next section.
-## Writing Queries against an External Dataset ##
+## <a id="WritingQueriesAgainstAnExternalDataset">Writing Queries against an External Dataset</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
You may write AQL queries against an external dataset. Following is an example AQL query that applies a filter and returns an ordered result.
diff --git a/asterix-doc/src/site/markdown/aql/functions.md b/asterix-doc/src/site/markdown/aql/functions.md
index 1f46fca..ca91581 100644
--- a/asterix-doc/src/site/markdown/aql/functions.md
+++ b/asterix-doc/src/site/markdown/aql/functions.md
@@ -1,7 +1,154 @@
# Asterix: Using Functions #
-Asterix provides rich support of various classes of functions to support operations on string, spatial, and temporal data. This document explains how to use these functions.
-## String Functions ##
+## <a id="toc">Table of Contents</a> ##
+
+* [Numeric Functions](#NumericFunctions)
+* [String Functions](#StringFunctions)
+* [Aggregate Functions](#AggregateFunctions)
+* [Spatial Functions](#SpatialFunctions)
+* [Similarity Functions](#SimilarityFunctions)
+* [Tokenizing Functions](#TokenizingFunctions)
+* [Temporal Functions](#TemporalFunctions)
+* [Other Functions](#OtherFunctions)
+
+Asterix provides various classes of functions to support operations on numeric, string, spatial, and temporal data. This document explains how to use these functions.
+
+## <a id="NumericFunctions">Numeric Functions</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
+### numeric-abs ###
+ * Syntax:
+
+ numeric-abs(numeric_expression)
+
+ * Computes the absolute value of the argument.
+ * Arguments:
+ * `numeric_expression`: A `int8`/`int16`/`int32`/`int64`/`float`/`double` value.
+ * Return Value:
+ * The absolute value of the argument with the same type as the input argument, or `null` if the argument is a `null` value.
+
+ * Example:
+
+ let $v1 := numeric-abs(2013)
+ let $v2 := numeric-abs(-4036)
+ let $v3 := numeric-abs(0)
+ let $v4 := numeric-abs(float("-2013.5"))
+ let $v5 := numeric-abs(double("-2013.593823748327284"))
+ return { "v1": $v1, "v2": $v2, "v3": $v3, "v4": $v4, "v5": $v5 }
+
+
+ * The expected result is:
+
+ { "v1": 2013, "v2": 4036, "v3": 0, "v4": 2013.5f, "v5": 2013.5938237483274d }
+
+
+### numeric-ceiling ###
+ * Syntax:
+
+ numeric-ceiling(numeric_expression)
+
+ * Computes the smallest (closest to negative infinity) number with no fractional part that is not less than the value of the argument. If the argument is already equal to mathematical integer, then the result is the same as the argument.
+ * Arguments:
+ * `numeric_expression`: A `int8`/`int16`/`int32`/`int64`/`float`/`double` value.
+ * Return Value:
+ * The ceiling value for the given number in the same type as the input argument, or `null` if the input is `null`.
+
+ * Example:
+
+ let $v1 := numeric-ceiling(2013)
+ let $v2 := numeric-ceiling(-4036)
+ let $v3 := numeric-ceiling(0.3)
+ let $v4 := numeric-ceiling(float("-2013.2"))
+ let $v5 := numeric-ceiling(double("-2013.893823748327284"))
+ return { "v1": $v1, "v2": $v2, "v3": $v3, "v4": $v4, "v5": $v5 }
+
+
+ * The expected result is:
+
+ { "v1": 2013, "v2": -4036, "v3": 1.0d, "v4": -2013.0f, "v5": -2013.0d }
+
+
+### numeric-floor ###
+ * Syntax:
+
+ numeric-floor(numeric_expression)
+
+ * Computes the largest (closest to positive infinity) number with no fractional part that is not greater than the value. If the argument is already equal to mathematical integer, then the result is the same as the argument.
+ * Arguments:
+ * `numeric_expression`: A `int8`/`int16`/`int32`/`int64`/`float`/`double` value.
+ * Return Value:
+ * The floor value for the given number in the same type as the input argument, or `null` if the input is `null`.
+
+ * Example:
+
+ let $v1 := numeric-floor(2013)
+ let $v2 := numeric-floor(-4036)
+ let $v3 := numeric-floor(0.8)
+ let $v4 := numeric-floor(float("-2013.2"))
+ let $v5 := numeric-floor(double("-2013.893823748327284"))
+ return { "v1": $v1, "v2": $v2, "v3": $v3, "v4": $v4, "v5": $v5 }
+
+
+ * The expected result is:
+
+ { "v1": 2013, "v2": -4036, "v3": 0.0d, "v4": -2014.0f, "v5": -2014.0d }
+
+
+### numeric-round ###
+ * Syntax:
+
+ numeric-round(numeric_expression)
+
+ * Computes the number with no fractional part that is closest (and also closest to positive infinity) to the argument.
+ * Arguments:
+ * `numeric_expression`: A `int8`/`int16`/`int32`/`int64`/`float`/`double` value.
+ * Return Value:
+ * The rounded value for the given number in the same type as the input argument, or `null` if the input is `null`.
+
+ * Example:
+
+ let $v1 := numeric-round(2013)
+ let $v2 := numeric-round(-4036)
+ let $v3 := numeric-round(0.8)
+ let $v4 := numeric-round(float("-2013.256"))
+ let $v5 := numeric-round(double("-2013.893823748327284"))
+ return { "v1": $v1, "v2": $v2, "v3": $v3, "v4": $v4, "v5": $v5 }
+
+
+ * The expected result is:
+
+ { "v1": 2013, "v2": -4036, "v3": 1.0d, "v4": -2013.0f, "v5": -2014.0d }
+
+
+### numeric-round-half-to-even ###
+ * Syntax:
+
+ numeric-round-half-to-even(numeric_expression, [precision])
+
+ * Computes the closest numeric value to `numeric_expression` that is a multiple of ten to the power of minus `precision`. `precision` is optional and by default value `0` is used.
+ * Arguments:
+ * `numeric_expression`: A `int8`/`int16`/`int32`/`int64`/`float`/`double` value.
+ * `precision`: An optional integer field representing the number of digits in the fraction of the the result
+ * Return Value:
+ * The rounded value for the given number in the same type as the input argument, or `null` if the input is `null`.
+
+ * Example:
+
+ let $v1 := numeric-round-half-to-even(2013)
+ let $v2 := numeric-round-half-to-even(-4036)
+ let $v3 := numeric-round-half-to-even(0.8)
+ let $v4 := numeric-round-half-to-even(float("-2013.256"))
+ let $v5 := numeric-round-half-to-even(double("-2013.893823748327284"))
+ let $v6 := numeric-round-half-to-even(double("-2013.893823748327284"), 2)
+ let $v7 := numeric-round-half-to-even(2013, 4)
+ let $v8 := numeric-round-half-to-even(float("-2013.256"), 5)
+ return { "v1": $v1, "v2": $v2, "v3": $v3, "v4": $v4, "v5": $v5, "v6": $v6, "v7": $v7, "v8": $v8 }
+
+
+ * The expected result is:
+
+ { "v1": 2013, "v2": -4036, "v3": 1.0d, "v4": -2013.0f, "v5": -2014.0d, "v6": -2013.89d, "v7": 2013, "v8": -2013.256f }
+
+
+## <a id="StringFunctions">String Functions</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
### string-to-codepoint ###
* Syntax:
@@ -9,9 +156,9 @@
* Converts the string `string_expression` to its code-based representation.
* Arguments:
- * `string_expression` : A `string` that will be converted.
+ * `string_expression` : A `string` that will be converted.
* Return Value:
- * An `OrderedList` of the code points for the string `string_expression`.
+ * An `OrderedList` of the code points for the string `string_expression`.
### codepoint-to-string ###
* Syntax:
@@ -20,9 +167,9 @@
* Converts the ordered code-based representation `list_expression` to the corresponding string.
* Arguments:
- * `list_expression` : An `OrderedList` of code-points.
+ * `list_expression` : An `OrderedList` of code-points.
* Return Value:
- * A `string` representation of `list_expression`.
+ * A `string` representation of `list_expression`.
* Example:
@@ -46,10 +193,10 @@
* Checks whether the string `string_expression` contains the string `substring_to_contain`
* Arguments:
- * `string_expression` : A `string` that might contain the given substring.
- * `substring_to_contain` : A target `string` that might be contained.
+ * `string_expression` : A `string` that might contain the given substring.
+ * `substring_to_contain` : A target `string` that might be contained.
* Return Value:
- * A `boolean`, returns `true` if `string_expression` contains `substring_to_contain`, otherwise returns `false`.
+ * A `boolean` value, `true` if `string_expression` contains `substring_to_contain`, and `false` otherwise.
* Example:
@@ -67,41 +214,17 @@
{ "mid": 15, "message": " like iphone the voicemail-service is awesome" }
-### len ###
- * Syntax:
-
- len(list_expression)
-
- * Returns the length of the list `list_expression`.
- * Arguments:
- * `list_expression` : An `OrderedList`, `UnorderedList` or `null`, represents the list need to be checked.
- * Return Value:
- * An `int32` that represents the length of `list_expression`.
-
- * Example:
-
- use dataverse TinySocial;
-
- let $l := ["ASTERIX", "Hyracks"]
- return len($l)
-
-
- * The expected result is:
-
- 2
-
-
### like ###
* Syntax:
like(string_expression, string_pattern)
- * Checks whether the string `string_expression` contains the string pattern `string_pattern`. Compared with `contains` function, `like` function also supports regex keywords.
+ * Checks whether the string `string_expression` contains the string pattern `string_pattern`. Compared to the `contains` function, the `like` function also supports regular expressions.
* Arguments:
- * `string_expression` : A `string` that might contain the pattern or `null`.
- * `string_pattern` : A pattern `string` that might be contained or `null`.
+ * `string_expression` : A `string` that might contain the pattern or `null`.
+ * `string_pattern` : A pattern `string` that might be contained or `null`.
* Return Value:
- * A `boolean`, returns `true` if `string_expression` contains the pattern `string_pattern`, otherwise returns `false`.
+ * A `boolean` value, `true` if `string_expression` contains the pattern `string_pattern`, and `false` otherwise.
* Example:
@@ -126,10 +249,10 @@
* Checks whether the string `string_expression` starts with the string `substring_to_start_with`.
* Arguments:
- * `string_expression` : A `string` that might start with the given string.
- * `substring_to_start_with` : A `string` that might be contained as the starting substring.
+ * `string_expression` : A `string` that might start with the given string.
+ * `substring_to_start_with` : A `string` that might be contained as the starting substring.
* Return Value:
- * A `boolean`, returns `true` if `string_expression` starts with the string `substring_to_start_with`, otherwise returns `false`.
+ * A `boolean`, returns `true` if `string_expression` starts with the string `substring_to_start_with`, and `false` otherwise.
* Example:
@@ -155,10 +278,10 @@
* Checks whether the string `string_expression` ends with the string `substring_to_end_with`.
* Arguments:
- * `string_expression` : A `string` that might end with the given string.
- * `substring_to_end_with` : A `string` that might be contained as the ending substring.
+ * `string_expression` : A `string` that might end with the given string.
+ * `substring_to_end_with` : A `string` that might be contained as the ending substring.
* Return Value:
- * A `boolean`, returns `true` if `string_expression` ends with the string `substring_to_end_with`, otherwise returns `false`.
+ * A `boolean`, returns `true` if `string_expression` ends with the string `substring_to_end_with`, and `false` otherwise.
* Example:
@@ -183,9 +306,9 @@
* Concatenates a list of strings `list_expression` into a single string.
* Arguments:
- * `list_expression` : An `OrderedList` or `UnorderedList` of `string`s (could be `null`) to be concatenated.
+ * `list_expression` : An `OrderedList` or `UnorderedList` of `string`s (could be `null`) to be concatenated.
* Return Value:
- * Returns the concatenated `string` value.
+ * Returns the concatenated `string` value.
* Example:
@@ -200,31 +323,6 @@
"ASTERIX ROCKS!"
-### string-equal ###
- * Syntax:
-
- string-equal(string_expression1, string_expression2)
-
- * Checks whether the strings `string_expression1` and `string_expression2` are equal.
- * Arguments:
- * `string_expression1` : A `string` to be compared.
- * `string_expression2` : A `string` to be compared with.
- * Return Value:
- * A `boolean`, returns `true` if `string_expression1` and `string_expression2` are equal, otherwise returns `false`.
-
- * Example:
-
- use dataverse TinySocial;
-
- let $i := "Android"
- return {"Equal": string-equal($i, "Android"), "NotEqual": string-equal($i, "iphone")}
-
-
- * The expected result is:
-
- { "Equal": true, "NotEqual": false }
-
-
### string-join ###
* Syntax:
@@ -232,10 +330,10 @@
* Joins a list of strings `list_expression` with the given separator `string_expression` into a single string.
* Arguments:
- * `list_expression` : An `OrderedList` or `UnorderedList` of `string`s (could be `null`) to be joined.
- * `string_expression` : A `string` as the separator.
+ * `list_expression` : An `OrderedList` or `UnorderedList` of strings (could be `null`) to be joined.
+ * `string_expression` : A `string` as the separator.
* Return Value:
- * Returns the joined `String`.
+ * Returns the joined `String`.
* Example:
@@ -257,9 +355,9 @@
* Converts a given string `string_expression` to its lowercase form.
* Arguments:
- * `string_expression` : A `string` to be converted.
+ * `string_expression` : A `string` to be converted.
* Return Value:
- * Returns a `string` as the lowercase form of the given `string_expression`.
+ * Returns a `string` as the lowercase form of the given `string_expression`.
* Example:
@@ -281,10 +379,10 @@
* Checks whether the strings `string_expression` matches the given pattern `string_pattern`.
* Arguments:
- * `string_expression` : A `string` that might contain the pattern.
- * `string_pattern` : A pattern `string` to be matched.
+ * `string_expression` : A `string` that might contain the pattern.
+ * `string_pattern` : A pattern `string` to be matched.
* Return Value:
- * A `boolean`, returns `true` if `string_expression` matches the pattern `string_pattern`, otherwise returns `false`.
+ * A `boolean`, returns `true` if `string_expression` matches the pattern `string_pattern`, and `false` otherwise.
* Example:
@@ -306,13 +404,13 @@
replace(string_expression, string_pattern, string_replacement)
- * Checks whether the strings `string_expression` matches the given pattern `string_pattern`, and replace the matched pattern `string_pattern` with the new pattern `string_replacement`.
+ * Checks whether the string `string_expression` matches the given pattern `string_pattern`, and replace the matched pattern `string_pattern` with the new pattern `string_replacement`.
* Arguments:
- * `string_expression` : A `string` that might contain the pattern.
- * `string_pattern` : A pattern `string` to be matched.
- * `string_replacement` : A pattern `string` to be used as the replacement.
+ * `string_expression` : A `string` that might contain the pattern.
+ * `string_pattern` : A pattern `string` to be matched.
+ * `string_replacement` : A pattern `string` to be used as the replacement.
* Return Value:
- * Returns a `string` that is obtained after the replacements.
+ * Returns a `string` that is obtained after the replacements.
* Example:
@@ -335,9 +433,9 @@
* Returns the length of the string `string_expression`.
* Arguments:
- * `string_expression` : A `string` or `null`, represents the string to be checked.
+ * `string_expression` : A `string` or `null` that represents the string to be checked.
* Return Value:
- * An `int32` that represents the length of `string_expression`.
+ * An `int32` that represents the length of `string_expression`.
* Example:
@@ -373,11 +471,11 @@
* Returns the substring from the given string `string_expression` based on the given start offset `offset` with the optional `length`.
* Arguments:
- * `string_expression` : A `string` as the string to be extracted.
- * `offset` : An `int32` as the starting offset of the substring in `string_expression`.
- * `length` : (Optional) An `int32` as the length of the substring.
+ * `string_expression` : A `string` to be extracted.
+ * `offset` : An `int32` as the starting offset of the substring in `string_expression`.
+ * `length` : (Optional) An `int32` as the length of the substring.
* Return Value:
- * A `string` that represents the substring.
+ * A `string` that represents the substring.
* Example:
@@ -400,10 +498,10 @@
* Returns the substring from the given string `string_expression` before the given pattern `string_pattern`.
* Arguments:
- * `string_expression` : A `string` as the string to be extracted.
- * `string_pattern` : A `string` as the string pattern to be searched.
+ * `string_expression` : A `string` to be extracted.
+ * `string_pattern` : A `string` pattern to be searched.
* Return Value:
- * A `string` that represents the substring.
+ * A `string` that represents the substring.
* Example:
@@ -428,10 +526,10 @@
* Returns the substring from the given string `string_expression` after the given pattern `string_pattern`.
* Arguments:
- * `string_expression` : A `string` as the string to be extracted.
- * `string_pattern` : A `string` as the string pattern to be searched.
+ * `string_expression` : A `string` to be extracted.
+ * `string_pattern` : A `string` pattern to be searched.
* Return Value:
- * A `string` that represents the substring.
+ * A `string` that represents the substring.
* Example:
@@ -448,19 +546,108 @@
" the voice-command is bad:("
" the voicemail-service is awesome"
+## <a id="AggregateFunctions">Aggregate Functions</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
+### count ###
+ * Syntax:
+
+ count(list)
+
+ * Gets the number of items in the given list.
+ * Arguments:
+ * `list`: An `orderedList` or `unorderedList` containing the items to be counted, or a `null` value.
+ * Return Value:
+ * An `int64` value representing the number of items in the given list. `0i64` is returned if the input is `null`.
+
+ * Example:
+
+ use dataverse TinySocial;
-## Spatial Functions ##
+ let $l1 := ['hello', 'world', 1, 2, 3]
+ let $l2 := for $i in dataset TwitterUsers return $i
+ return {"count1": count($l1), "count2": count($l2)}
+
+ * The expected result is:
+
+ { "count1": 5i64, "count2": 4i64 }
+
+### avg ###
+ * Syntax:
+
+ avg(num_list)
+
+ * Gets the average value of the items in the given list.
+ * Arguments:
+ * `num_list`: An `orderedList` or `unorderedList` containing numeric or null values, or a `null` value.
+ * Return Value:
+ * An `double` value representing the average of the numbers in the given list. `null` is returned if the input is `null`, or the input list contains `null`. Non-numeric types in the input list will cause an error.
+
+ * Example:
+
+ use dataverse TinySocial;
+
+ let $l := for $i in dataset TwitterUsers return $i.friends_count
+ return {"avg_friend_count": avg($l)}
+
+ * The expected result is:
+
+ { "avg_friend_count": 191.5d }
+
+### sum ###
+ * Syntax:
+
+ sum(num_list)
+
+ * Gets the sum of the items in the given list.
+ * Arguments:
+ * `num_list`: An `orderedList` or `unorderedList` containing numeric or null values, or a `null` value.
+ * Return Value:
+ * The sum of the numbers in the given list. The returning type is decided by the item type with the highest order in the numeric type promotion order (`int8`-> `int16`->`int32`->`float`->`double`, `int32`->`int64`->`double`) among items. `null` is returned if the input is `null`, or the input list contains `null`. Non-numeric types in the input list will cause an error.
+
+ * Example:
+
+ use dataverse TinySocial;
+
+ let $l := for $i in dataset TwitterUsers return $i.friends_count
+ return {"sum_friend_count": sum($l)}
+
+ * The expected result is:
+
+ { "sum_friend_count": 766 }
+
+### min/max ###
+ * Syntax:
+
+ min(num_list), max(num_list)
+
+ * Gets the min/max value of numeric items in the given list.
+ * Arguments:
+ * `num_list`: An `orderedList` or `unorderedList` containing the items to be compared, or a `null` value.
+ * Return Value:
+ * The min/max value of the given list. The returning type is decided by the item type with the highest order in the numeric type promotion order (`int8`-> `int16`->`int32`->`float`->`double`, `int32`->`int64`->`double`) among items. `null` is returned if the input is `null`, or the input list contains `null`. Non-numeric types in the input list will cause an error.
+
+ * Example:
+
+ use dataverse TinySocial;
+
+ let $l := for $i in dataset TwitterUsers return $i. friends_count
+ return {"min_friend_count": min($l), "max_friend_count": max($l)}
+
+ * The expected result is:
+
+ { "min_friend_count": 18, "max_friend_count": 445 }
+
+## <a id="SpatialFunctions">Spatial Functions</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
### create-point ###
* Syntax:
- create-point(latitude, longitude)
+ create-point(x, y)
- * Creates the primitive type `point` using `latitude` and `longitude`.
+ * Creates the primitive type `point` using an `x` and `y` value.
* Arguments:
- * `latitude` : A `double` that represents the latitude.
- * `longitude` : A `double` that represents the longitude.
+ * `x` : A `double` that represents the x-coordinate.
+ * `y` : A `double` that represents the y-coordinate.
* Return Value:
- * A `point`, represents a spatial point created using the latitude and longitude provided in `latitude` and `longitude`.
+ * A `point` representing the ordered pair (`x`, `y`).
* Example:
@@ -482,10 +669,10 @@
* Creates the primitive type `line` using `point_expression1` and `point_expression2`.
* Arguments:
- * `point_expression1` : A `point` that represents the start point of the line.
- * `point_expression2` : A `point` that represents the end point of the line.
+ * `point_expression1` : A `point` that represents the start point of the line.
+ * `point_expression2` : A `point` that represents the end point of the line.
* Return Value:
- * A `line`, represents a spatial line created using the points provided in `point_expression1` and `point_expression2`.
+ * A spatial `line` created using the points provided in `point_expression1` and `point_expression2`.
* Example:
@@ -507,10 +694,10 @@
* Creates the primitive type `rectangle` using `point_expression1` and `point_expression2`.
* Arguments:
- * `point_expression1` : A `point` that represents the lower-left point of the rectangle.
- * `point_expression2` : A `point` that represents the upper-right point of the rectangle.
+ * `point_expression1` : A `point` that represents the lower-left point of the rectangle.
+ * `point_expression2` : A `point` that represents the upper-right point of the rectangle.
* Return Value:
- * A `rectangle`, represents a spatial rectangle created using the points provided in `point_expression1` and `point_expression2`.
+ * A spatial `rectangle` created using the points provided in `point_expression1` and `point_expression2`.
* Example:
@@ -532,10 +719,10 @@
* Creates the primitive type `circle` using `point_expression` and `radius`.
* Arguments:
- * `point_expression` : A `point` that represents the center of the circle.
- * `radius` : A `double` that represents the radius of the circle.
+ * `point_expression` : A `point` that represents the center of the circle.
+ * `radius` : A `double` that represents the radius of the circle.
* Return Value:
- * A `circle`, represents a spatial circle created using the center point and the radius provided in `point_expression` and `radius`.
+ * A spatial `circle` created using the center point and the radius provided in `point_expression` and `radius`.
* Example:
@@ -553,19 +740,19 @@
### create-polygon ###
* Syntax:
- create-polygon(point_expression1, point_expression2, point_expression3, […, point_expressionn])
+ create-polygon(list_expression)
- * Creates the primitive type `polygon` using unlimited number of arguments `point_expression1`, `point_expression2`, ..., `point_expressionn`. Note that at least three points should be specified.
+ * Creates the primitive type `polygon` using the double values provided in the argument `list_expression`. Each two consecutive double values represent a point starting from the first double value in the list. Note that at least six double values should be specified, meaning a total of three points.
* Arguments:
- * `point_expression1`/.../`point_expressionn` : A `point` that represents a vertex of the polygon.
+ * `list_expression` : An OrderedList of doubles representing the points of the polygon.
* Return Value:
- * A `polygon`, represents a spatial simple polygon created using the points provided in `point_expression1`, `point_expression2`, ..., `point_expressionn`.
+ * A `polygon`, represents a spatial simple polygon created using the points provided in `list_expression`.
* Example:
use dataverse TinySocial;
- let $c := create-polygon(create-point(1.0,1.0), create-point(2.0,2.0), create-point(3.0,3.0), create-point(4.0,4.0))
+ let $c := create-polygon([1.0,1.0,2.0,2.0,3.0,3.0,4.0,4.0])
return {"polygon": $c}
@@ -579,15 +766,14 @@
point(string_expression)
- * Constructor function for `point` type by parsing a point string `string_expression`
+ * Constructor function for the `point` type by parsing a point string `string_expression`
* Arguments:
- * `string_expression` : The `string` value representing a point value.
+ * `string_expression` : The `string` value representing a point value.
* Return Value:
- * A `point` value represented by the given string.
+ * A `point` value represented by the given string.
* Example:
-
use dataverse TinySocial;
let $c := point("55.05,-138.04")
@@ -606,13 +792,12 @@
* Constructor function for `line` type by parsing a line string `string_expression`
* Arguments:
- * `string_expression` : The `string` value representing a line value.
+ * `string_expression` : The `string` value representing a line value.
* Return Value:
- * A `line` value represented by the given string.
+ * A `line` value represented by the given string.
* Example:
-
use dataverse TinySocial;
let $c := line("55.05,-138.04 13.54,-138.04")
@@ -631,13 +816,12 @@
* Constructor function for `rectangle` type by parsing a rectangle string `string_expression`
* Arguments:
- * `string_expression` : The `string` value representing a rectangle value.
+ * `string_expression` : The `string` value representing a rectangle value.
* Return Value:
- * A `rectangle` value represented by the given string.
+ * A `rectangle` value represented by the given string.
* Example:
-
use dataverse TinySocial;
let $c := rectangle("20.05,-125.0 40.67,-100.87")
@@ -656,13 +840,12 @@
* Constructor function for `circle` type by parsing a circle string `string_expression`
* Arguments:
- * `string_expression` : The `string` value representing a circle value.
+ * `string_expression` : The `string` value representing a circle value.
* Return Value:
* A `circle` value represented by the given string.
* Example:
-
use dataverse TinySocial;
let $c := circle("55.05,-138.04 10.0")
@@ -681,13 +864,12 @@
* Constructor function for `polygon` type by parsing a polygon string `string_expression`
* Arguments:
- * `string_expression` : The `string` value representing a polygon value.
+ * `string_expression` : The `string` value representing a polygon value.
* Return Value:
- * A `polygon` value represented by the given string.
+ * A `polygon` value represented by the given string.
* Example:
-
use dataverse TinySocial;
let $c := polygon("55.05,-138.04 13.54,-138.04 13.54,-53.31 55.05,-53.31")
@@ -706,9 +888,9 @@
* Returns the x or y coordinates of a point `point_expression`.
* Arguments:
- * `point_expression` : A `point`.
+ * `point_expression` : A `point`.
* Return Value:
- * A `double`, represents the x or y coordinates of the point `point_expression`.
+ * A `double` representing the x or y coordinates of the point `point_expression`.
* Example:
@@ -730,9 +912,9 @@
* Returns an ordered list of the points forming the spatial object `spatial_expression`.
* Arguments:
- * `spatial_expression` : A `point`, `line`, `rectangle`, `circle`, or `polygon`.
+ * `spatial_expression` : A `point`, `line`, `rectangle`, `circle`, or `polygon`.
* Return Value:
- * An `OrderedList` of the points forming the spatial object `spatial_expression`.
+ * An `OrderedList` of the points forming the spatial object `spatial_expression`.
* Example:
@@ -740,7 +922,7 @@
let $line := create-line(create-point(100.6,99.4), create-point(-72.0,-76.9))
let $rectangle := create-rectangle(create-point(9.2,49.0), create-point(77.8,111.1))
- let $polygon := create-polygon(create-point(1.0,1.0), create-point(2.0,2.0), create-point(3.0,3.0), create-point(4.0,4.0))
+ let $polygon := create-polygon([1.0,1.0,2.0,2.0,3.0,3.0,4.0,4.0])
let $line_list := get-points($line)
let $rectangle_list := get-points($rectangle)
let $polygon_list := get-points($polygon)
@@ -757,11 +939,11 @@
get-center(circle_expression) or get-radius(circle_expression)
- * Returns the center and the radius of a circle `circle_expression`.
+ * Returns the center and the radius of a circle `circle_expression`, respectively.
* Arguments:
- * `circle_expression` : A `circle`.
+ * `circle_expression` : A `circle`.
* Return Value:
- * A `point` or `double`, represent the center or radius of the circle `circle_expression`.
+ * A `point` or `double`, represent the center or radius of the circle `circle_expression`.
* Example:
@@ -783,12 +965,12 @@
spatial-distance(point_expression1, point_expression2)
- * Returns the euclidean distance between `point_expression1` and `point_expression2`.
+ * Returns the Euclidean distance between `point_expression1` and `point_expression2`.
* Arguments:
- * `point_expression1` : A `point`.
- * `point_expression2` : A `point`.
+ * `point_expression1` : A `point`.
+ * `point_expression2` : A `point`.
* Return Value:
- * A `double`, represents the euclidean distance between `point_expression1` and `point_expression2`.
+ * A `double` as the Euclidean distance between `point_expression1` and `point_expression2`.
* Example:
@@ -819,13 +1001,13 @@
### spatial-area ###
* Syntax:
- spatial-distance(spatial_2d_expression)
+ spatial-area(spatial_2d_expression)
* Returns the spatial area of `spatial_2d_expression`.
* Arguments:
- * `spatial_2d_expression` : A `rectangle`, `circle`, or `polygon`.
+ * `spatial_2d_expression` : A `rectangle`, `circle`, or `polygon`.
* Return Value:
- * A `double`, represents the area of `spatial_2d_expression`.
+ * A `double` representing the area of `spatial_2d_expression`.
* Example:
@@ -848,10 +1030,10 @@
* Checks whether `@arg1` and `@arg2` spatially intersect each other.
* Arguments:
- * `spatial_expression1` : A `point`, `line`, `rectangle`, `circle`, or `polygon`.
- * `spatial_expression2` : A `point`, `line`, `rectangle`, `circle`, or `polygon`.
+ * `spatial_expression1` : A `point`, `line`, `rectangle`, `circle`, or `polygon`.
+ * `spatial_expression2` : A `point`, `line`, `rectangle`, `circle`, or `polygon`.
* Return Value:
- * A `boolean`, represents whether `spatial_expression1` and `spatial_expression2` spatially intersect each other.
+ * A `boolean` representing whether `spatial_expression1` and `spatial_expression2` spatially overlap with each other.
* Example:
@@ -876,12 +1058,12 @@
* Returns the grid cell that `point_expression1` belongs to.
* Arguments:
- * `point_expression1` : A `point`, represents the point of interest that its grid cell will be returned.
- * `point_expression2` : A `point`, represents the origin of the grid.
- * `x_increment` : A `double`, represents X increments.
- * `y_increment` : A `double`, represents Y increments.
+ * `point_expression1` : A `point` representing the point of interest that its grid cell will be returned.
+ * `point_expression2` : A `point` representing the origin of the grid.
+ * `x_increment` : A `double`, represents X increments.
+ * `y_increment` : A `double`, represents Y increments.
* Return Value:
- * A `rectangle`, represents the grid cell that `point_expression1` belongs to.
+ * A `rectangle` representing the grid cell that `point_expression1` belongs to.
* Example:
@@ -895,21 +1077,21 @@
* The expected result is:
- { "cell": rectangle("20.0,92.0 25.5,98.0"), "count": 1 }
- { "cell": rectangle("25.5,74.0 31.0,80.0"), "count": 2 }
- { "cell": rectangle("31.0,62.0 36.5,68.0"), "count": 1 }
- { "cell": rectangle("31.0,68.0 36.5,74.0"), "count": 1 }
- { "cell": rectangle("36.5,68.0 42.0,74.0"), "count": 2 }
- { "cell": rectangle("36.5,74.0 42.0,80.0"), "count": 1 }
- { "cell": rectangle("36.5,92.0 42.0,98.0"), "count": 1 }
- { "cell": rectangle("42.0,80.0 47.5,86.0"), "count": 1 }
- { "cell": rectangle("42.0,92.0 47.5,98.0"), "count": 1 }
- { "cell": rectangle("47.5,80.0 53.0,86.0"), "count": 1 }
+ { "cell": rectangle("20.0,92.0 25.5,98.0"), "count": 1i64 }
+ { "cell": rectangle("25.5,74.0 31.0,80.0"), "count": 2i64 }
+ { "cell": rectangle("31.0,62.0 36.5,68.0"), "count": 1i64 }
+ { "cell": rectangle("31.0,68.0 36.5,74.0"), "count": 1i64 }
+ { "cell": rectangle("36.5,68.0 42.0,74.0"), "count": 2i64 }
+ { "cell": rectangle("36.5,74.0 42.0,80.0"), "count": 1i64 }
+ { "cell": rectangle("36.5,92.0 42.0,98.0"), "count": 1i64 }
+ { "cell": rectangle("42.0,80.0 47.5,86.0"), "count": 1i64 }
+ { "cell": rectangle("42.0,92.0 47.5,98.0"), "count": 1i64 }
+ { "cell": rectangle("47.5,80.0 53.0,86.0"), "count": 1i64 }
-## Similarity Functions ##
+## <a id="SimilarityFunctions">Similarity Functions</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
AsterixDB supports queries with different similarity functions, including edit distance and Jaccard.
@@ -920,10 +1102,10 @@
* Returns the [edit distance](http://en.wikipedia.org/wiki/Levenshtein_distance) of `expression1` and `expression2`.
* Arguments:
- * `expression1` : A `string` or a homogeneous `OrderedList` of a comparable item type.
- * `expression2` : The same type as `expression1`.
+ * `expression1` : A `string` or a homogeneous `OrderedList` of a comparable item type.
+ * `expression2` : The same type as `expression1`.
* Return Value:
- * An `int32` that represents the edit-distance similarity between `expression1` and `expression2`.
+ * An `int32` that represents the edit distance between `expression1` and `expression2`.
* Example:
@@ -948,16 +1130,16 @@
edit-distance-check(expression1, expression2, threshold)
- * Checks whether `expression1` and `expression2` have a [edit distance](http://en.wikipedia.org/wiki/Levenshtein_distance) `<= threshold`. The “check” version of edit distance is faster than the "non-check" version because the former can detect whether two items satisfy a given similarity threshold using early-termination techniques, as opposed to computing their real distance. Although possible, it is not necessary for the user to write queries using the “check” versions explicitly, since a rewrite rule can perform an appropriate transformation from a “non-check” version to a “check” version.
+ * Checks whether `expression1` and `expression2` have an [edit distance](http://en.wikipedia.org/wiki/Levenshtein_distance) within a given threshold. The “check” version of edit distance is faster than the "non-check" version because the former can detect whether two items satisfy a given threshold using early-termination techniques, as opposed to computing their real distance. Although possible, it is not necessary for the user to write queries using the “check” versions explicitly, since a rewrite rule can perform an appropriate transformation from a “non-check” version to a “check” version.
* Arguments:
- * `expression1` : A `string` or a homogeneous `OrderedList` of a comparable item type.
- * `expression2` : The same type as `expression1`.
- * `threshold` : An `int32` that represents the distance threshold.
+ * `expression1` : A `string` or a homogeneous `OrderedList` of a comparable item type.
+ * `expression2` : The same type as `expression1`.
+ * `threshold` : An `int32` that represents the distance threshold.
* Return Value:
- * An `OrderedList` with two items:
- * The first item contains a `boolean` value representing whether `expression1` and `expression2` are similar.
- * The second item contains an `int32` that represents the edit distance of `expression1` and `expression2` if it is `<= `threshold`, or 0 otherwise.
+ * An `OrderedList` with two items:
+ * The first item contains a `boolean` value representing whether `expression1` and `expression2` are similar.
+ * The second item contains an `int32` that represents the edit distance of `expression1` and `expression2` if it is within the threshold, or 0 otherwise.
* Example:
@@ -981,10 +1163,10 @@
* Returns the [Jaccard similarity](http://en.wikipedia.org/wiki/Jaccard_index) of `list_expression1` and `list_expression2`.
* Arguments:
- * `list_expression1` : An `UnorderedList` or `OrderedList`.
- * `list_expression2` : An `UnorderedList` or `OrderedList`.
+ * `list_expression1` : An `UnorderedList` or `OrderedList`.
+ * `list_expression2` : An `UnorderedList` or `OrderedList`.
* Return Value:
- * A `float` that represents the Jaccard similarity of `list_expression1` and `list_expression2`.
+ * A `float` that represents the Jaccard similarity of `list_expression1` and `list_expression2`.
* Example:
@@ -1013,16 +1195,16 @@
similarity-jaccard-check(list_expression1, list_expression2, threshold)
- * Checks whether `list_expression1` and `list_expression2` have a [Jaccard similarity](http://en.wikipedia.org/wiki/Jaccard_index) `>= threshold`. Again, the “check” version of Jaccard is faster than the "non-check" version.
+ * Checks whether `list_expression1` and `list_expression2` have a [Jaccard similarity](http://en.wikipedia.org/wiki/Jaccard_index) greater than or equal to threshold. Again, the “check” version of Jaccard is faster than the "non-check" version.
* Arguments:
- * `list_expression1` : An `UnorderedList` or `OrderedList`.
- * `list_expression2` : An `UnorderedList` or `OrderedList`.
- * `threshold` : A `float` that represents the similarity threshold.
+ * `list_expression1` : An `UnorderedList` or `OrderedList`.
+ * `list_expression2` : An `UnorderedList` or `OrderedList`.
+ * `threshold` : A `float` that represents the similarity threshold.
* Return Value:
- * An `OrderedList` with two items:
+ * An `OrderedList` with two items:
* The first item contains a `boolean` value representing whether `list_expression1` and `list_expression2` are similar.
- * The second item contains a `float` that represents the Jaccard similarity of `list_expression1` and `list_expression2` if it is >`= `threshold`, or 0 otherwise.
+ * The second item contains a `float` that represents the Jaccard similarity of `list_expression1` and `list_expression2` if it is greater than or equal to the threshold, or 0 otherwise.
* Example:
@@ -1043,7 +1225,7 @@
### Similarity Operator ~# ###
* "`~=`" is syntactic sugar for expressing a similarity condition with a given similarity threshold.
* The similarity function and threshold for "`~=`" are controlled via "set" directives.
- * The "`~=`" operator returns a `boolean` that represents whether the operands are similar.
+ * The "`~=`" operator returns a `boolean` value that represents whether the operands are similar.
* Example for Jaccard similarity:
@@ -1089,7 +1271,7 @@
}
-## Tokenizing Functions ##
+## <a id="TokenizingFunctions">Tokenizing Functions</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
### word-tokens ###
* Syntax:
@@ -1097,9 +1279,9 @@
* Returns a list of word tokens of `string_expression`.
* Arguments:
- * `string_expression` : A `string` that will be tokenized.
+ * `string_expression` : A `string` that will be tokenized.
* Return Value:
- * An `OrderedList` of `string` word tokens.
+ * An `OrderedList` of `string` word tokens.
* Example:
@@ -1126,7 +1308,7 @@
* Returns a list of hashed word tokens of `string_expression`.
* Arguments:
- * `string_expression` : A `string` that will be tokenized.
+ * `string_expression` : A `string` that will be tokenized.
* Return Value:
* An `OrderedList` of `int32` hashed tokens.
@@ -1155,9 +1337,9 @@
* Returns a list of hashed word tokens of `string_expression`. The hashing mechanism gives duplicate tokens different hash values, based on the occurrence count of that token.
* Arguments:
- * `string_expression` : A `String` that will be tokenized.
+ * `string_expression` : A `String` that will be tokenized.
* Return Value:
- * An `OrderedList` of `Int32` hashed tokens.
+ * An `OrderedList` of `Int32` hashed tokens.
* Example:
use dataverse TinySocial;
@@ -1183,11 +1365,11 @@
* Returns a list of gram tokens of `string_expression`, which can be obtained by scanning the characters using a sliding window of a fixed length.
* Arguments:
- * `string_expression` : A `String` that will be tokenized.
- * `gram_length` : An `Int32` as the length of grams.
+ * `string_expression` : A `String` that will be tokenized.
+ * `gram_length` : An `Int32` as the length of grams.
* `boolean_expression` : A `Boolean` value to indicate whether to generate additional grams by pre- and postfixing `string_expression` with special characters.
* Return Value:
- * An `OrderedList` of String gram tokens.
+ * An `OrderedList` of String gram tokens.
* Example:
@@ -1218,11 +1400,11 @@
* Returns a list of hashed gram tokens of `string_expression`.
* Arguments:
- * `string_expression` : A `String` that will be tokenized.
- * `gram_length` : An `Int32` as the length of grams.
- * `boolean_expression` : A `Boolean` to indicate whether to generate additional grams by pre- and postfixing `string_expression` with special characters.
+ * `string_expression` : A `String` that will be tokenized.
+ * `gram_length` : An `Int32` as the length of grams.
+ * `boolean_expression` : A `Boolean` to indicate whether to generate additional grams by pre- and postfixing `string_expression` with special characters.
* Return Value:
- * An `OrderedList` of `Int32` hashed gram tokens.
+ * An `OrderedList` of `Int32` hashed gram tokens.
* Example:
@@ -1255,11 +1437,11 @@
* Returns a list of hashed gram tokens of `string_expression`. The hashing mechanism gives duplicate tokens different hash values, based on the occurrence count of that token.
* Arguments:
- * `string_expression` : A `String` that will be tokenized.
- * `gram_length` : An `Int32`, length of grams to generate.
- * `boolean_expression` : A `Boolean`, whether to generate additional grams by pre- and postfixing `string_expression` with special characters.
+ * `string_expression` : A `String` that will be tokenized.
+ * `gram_length` : An `Int32`, length of grams to generate.
+ * `boolean_expression` : A `Boolean`, whether to generate additional grams by pre- and postfixing `string_expression` with special characters.
* Return Value:
- * An `OrderedList` of `Int32` hashed gram tokens.
+ * An `OrderedList` of `Int32` hashed gram tokens.
* Example:
@@ -1285,18 +1467,18 @@
}
-->
-## Temporal Functions ##
+## <a id="TemporalFunctions">Temporal Functions</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
### date ###
* Syntax:
date(string_expression)
- * Constructor function for `date` type by parsing a date string `string_expression`
+ * Constructor function for `date` type by parsing a date string `string_expression`.
* Arguments:
- * `string_expression` : The `string` value representing a date value.
+ * `string_expression` : The `string` value representing a date value.
* Return Value:
- * A `date` value represented by the given string.
+ * A `date` value represented by the given string.
* Example:
@@ -1319,11 +1501,11 @@
time(string_expression)
- * Constructor function for `time` type by parsing a time string `string_expression`
+ * Constructor function for `time` type by parsing a time string `string_expression`.
* Arguments:
- * `string_expression` : The `string` value representing a time value.
+ * `string_expression` : The `string` value representing a time value.
* Return Value:
- * A `time` value represented by the given string.
+ * A `time` value represented by the given string.
* Example:
@@ -1346,11 +1528,11 @@
datetime(string_expression)
- * Constructor function for `datetime` type by parsing a datetime string `string_expression`
+ * Constructor function for the `datetime` type by parsing a datetime string `string_expression`.
* Arguments:
- * `string_expression` : The `string` value representing a datetime value.
+ * `string_expression` : The `string` value representing a datetime value.
* Return Value:
- * A `datetime` value represented by the given string.
+ * A `datetime` value represented by the given string.
* Example:
@@ -1373,12 +1555,12 @@
interval-from-date(string_expression1, string_expression2)
- * Constructor function for `interval` type by parsing two date strings.
+ * Constructor function for the `interval` type by parsing two date strings.
* Arguments:
- * `string_expression1` : The `string` value representing the starting date.
- * `string_expression2` : The `string` value representing the ending date.
+ * `string_expression1` : The `string` value representing the starting date.
+ * `string_expression2` : The `string` value representing the ending date.
* Return Value:
- * An `interval` value between the two dates.
+ * An `interval` value between the two dates.
* Example:
@@ -1395,12 +1577,12 @@
interval-from-time(string_expression1, string_expression2)
- * Constructor function for `interval` type by parsing two time strings.
+ * Constructor function for the `interval` type by parsing two time strings.
* Arguments:
- * `string_expression1` : The `string` value representing the starting time.
- * `string_expression2` : The `string` value representing the ending time.
+ * `string_expression1` : The `string` value representing the starting time.
+ * `string_expression2` : The `string` value representing the ending time.
* Return Value:
- * An `interval` value between the two times.
+ * An `interval` value between the two times.
* Example:
@@ -1419,10 +1601,10 @@
* Constructor function for `interval` type by parsing two datetime strings.
* Arguments:
- * `string_expression1` : The `string` value representing the starting datetime.
- * `string_expression2` : The `string` value representing the ending datetime.
+ * `string_expression1` : The `string` value representing the starting datetime.
+ * `string_expression2` : The `string` value representing the ending datetime.
* Return Value:
- * An `interval` value between the two datetimes.
+ * An `interval` value between the two datetimes.
* Example:
@@ -1441,9 +1623,9 @@
* Accessors for accessing fields in a temporal value
* Arguments:
- * `temporal_expression` : a temporal value represented as one of the following types: `date`, `datetime`, `time`, `duration`.
+ * `temporal_expression` : a temporal value represented as one of the following types: `date`, `datetime`, `time`, and `duration`.
* Return Value:
- * An `int32` value representing the field to be extracted.
+ * An `int32` value representing the field to be extracted.
* Example:
@@ -1460,121 +1642,23 @@
{ "year": 2010, "month": 11, "day": 30, "hour": 5, "min": 28, "second": 23, "ms": 94 }
-
-### add-date-duration ###
- * Syntax:
-
- add-date-duration(date_expression, duration_expression)
-
- * Create a new date by adding the duration `duration_expression` to the given date `date_expression`.
- * Arguments:
- * `date_expression` : The `date` value to be added onto.
- * `duration_expression` : The `duration` to be added.
- * Return Value:
- * A `date` value represents the new date after being adjusted by the duration.
-
- * Example:
-
- use dataverse TinySocial;
-
- let $startdate := date('2011-03-01')
- for $i in dataset('TweetMessage')
- where date-from-datetime($i.send-time) > $startdate
- and date-from-datetime($i.send-time) < add-date-duration($startdate, duration('P2Y'))
- return {"send-time": $i.send-time, "message": $i.message-text}
-
-
- * The expected result is:
-
- { "send-time": datetime("2011-12-26T10:10:00.000Z"), "message": " like sprint the voice-command is mind-blowing:)" }
- { "send-time": datetime("2011-08-25T10:10:00.000Z"), "message": " like samsung the platform is good" }
- { "send-time": datetime("2012-07-21T10:10:00.000Z"), "message": " love verizon its voicemail-service is awesome" }
-
-
-### add-datetime-duration ###
- * Syntax:
-
- add-date-duration(datetime_expression, duration_expression)
-
- * Create a new datetime by adding the duration `duration_expression` to the given datetime `datetime_expression`.
- * Arguments:
- * `datetime_expression` : The `datetime` value to be added onto.
- * `duration_expression` : The `duration` to be added.
- * Return Value:
- * A `datetime` value represents the new datetime after being adjusted by the duration.
-
- * Example:
-
- use dataverse TinySocial;
-
- let $startdt := datetime('2011-03-01T00:00:00')
- for $i in dataset('TweetMessage')
- where $i.send-time > $startdt and $i.send-time < add-datetime-duration($startdt, duration('P2Y'))
- return {"send-time": $i.send-time, "message": $i.message-text}
-
-
- * The expected result is:
-
- { "send-time": datetime("2011-12-26T10:10:00.000Z"), "message": " like sprint the voice-command is mind-blowing:)" }
- { "send-time": datetime("2011-08-25T10:10:00.000Z"), "message": " like samsung the platform is good" }
- { "send-time": datetime("2012-07-21T10:10:00.000Z"), "message": " love verizon its voicemail-service is awesome" }
-
-
-### add-time-duration ###
- * Syntax:
-
- add-time-duration(time_expression, duration_expression)
-
- * Create a new time by adding the duration `duration_expression` to the given time `time_expression`.
- * Arguments:
- * `time_expression` : The `time` value to be added onto.
- * `duration_expression` : The `duration` to be added.
- * Return Value:
- * A `time` value represents the new time after being adjusted by the duration.
-
- * Example:
-
- use dataverse TinySocial;
-
- let $starttime := time('08:00:00')
- for $i in dataset('TweetMessage')
- where time-from-datetime($i.send-time) > $starttime and time-from-datetime($i.send-time) < add-time-duration($starttime, duration('PT5H'))
- return {"send-time": $i.send-time, "message": $i.message-text}
-
-
- * The expected result is:
-
- { "send-time": datetime("2008-04-26T10:10:00.000Z"), "message": " love t-mobile its customization is good:)" }
- { "send-time": datetime("2010-05-13T10:10:00.000Z"), "message": " like verizon its shortcut-menu is awesome:)" }
- { "send-time": datetime("2006-11-04T10:10:00.000Z"), "message": " like motorola the speed is good:)" }
- { "send-time": datetime("2011-12-26T10:10:00.000Z"), "message": " like sprint the voice-command is mind-blowing:)" }
- { "send-time": datetime("2006-08-04T10:10:00.000Z"), "message": " can't stand motorola its speed is terrible:(" }
- { "send-time": datetime("2010-05-07T10:10:00.000Z"), "message": " like iphone the voice-clarity is good:)" }
- { "send-time": datetime("2011-08-25T10:10:00.000Z"), "message": " like samsung the platform is good" }
- { "send-time": datetime("2005-10-14T10:10:00.000Z"), "message": " like t-mobile the shortcut-menu is awesome:)" }
- { "send-time": datetime("2012-07-21T10:10:00.000Z"), "message": " love verizon its voicemail-service is awesome" }
- { "send-time": datetime("2008-01-26T10:10:00.000Z"), "message": " hate verizon its voice-clarity is OMG:(" }
- { "send-time": datetime("2008-03-09T10:10:00.000Z"), "message": " can't stand iphone its platform is terrible" }
- { "send-time": datetime("2010-02-13T10:10:00.000Z"), "message": " like samsung the voice-command is amazing:)" }
-
-
### adjust-datetime-for-timezone ###
* Syntax:
adjust-datetime-for-timezone(datetime_expression, string_expression)
- * Adjust the given datetime `datetime_expression` by applying the timezone information `string_expression`
+ * Adjusts the given datetime `datetime_expression` by applying the timezone information `string_expression`.
* Arguments:
- * `datetime_expression` : A `datetime` value to be adjusted.
- * `string_expression` : A `string` representing the timezone information.
+ * `datetime_expression` : A `datetime` value to be adjusted.
+ * `string_expression` : A `string` representing the timezone information.
* Return Value:
- * A `string` value represents the new datetime after being adjusted by the timezone information.
+ * A `string` value representing the new datetime after being adjusted by the timezone information.
* Example:
use dataverse TinySocial;
- for $i in dataset('TweetMessage')
+ for $i in dataset('TweetMessages')
return {"adjusted-send-time": adjust-datetime-for-timezone($i.send-time, "+08:00"), "message": $i.message-text}
@@ -1599,18 +1683,18 @@
adjust-time-for-timezone(time_expression, string_expression)
- * Adjust the given time `time_expression` by applying the timezone information `string_expression`
+ * Adjusts the given time `time_expression` by applying the timezone information `string_expression`.
* Arguments:
- * `time_expression` : A `time` value to be adjusted.
- * `string_expression` : A `string` representing the timezone information.
+ * `time_expression` : A `time` value to be adjusted.
+ * `string_expression` : A `string` representing the timezone information.
* Return Value:
- * A `string` value represents the new time after being adjusted by the timezone information.
+ * A `string` value representing the new time after being adjusted by the timezone information.
* Example:
use dataverse TinySocial;
- for $i in dataset('TweetMessage')
+ for $i in dataset('TweetMessages')
return {"adjusted-send-time": adjust-time-for-timezone(time-from-datetime($i.send-time), "+08:00"), "message": $i.message-text}
@@ -1635,18 +1719,18 @@
calendar-duration-from-datetime(datetime_expression, duration_expression)
- * Get a user-friendly representation of the duration `duration_expression` based on the given datetime `datetime_expression`
+ * Gets a user-friendly representation of the duration `duration_expression` based on the given datetime `datetime_expression`.
* Arguments:
- * `datetime_expression` : A `datetime` value to be used as the reference time point.
- * `duration_expression` : A `duration` value to be converted
+ * `datetime_expression` : A `datetime` value to be used as the reference time point.
+ * `duration_expression` : A `duration` value to be converted.
* Return Value:
- * A `duration` value with the duration as `duration_expression` but with a user-friendly representation.
+ * A `duration` value with the duration as `duration_expression` but with a user-friendly representation.
* Example:
use dataverse TinySocial;
- for $i in dataset('TweetMessage')
+ for $i in dataset('TweetMessages')
where $i.send-time > datetime("2011-01-01T00:00:00")
return {"since-2011": subtract-datetime($i.send-time, datetime("2011-01-01T00:00:00")), "since-2011-user-friendly": calendar-duration-from-datetime($i.send-time, subtract-datetime($i.send-time, datetime("2011-01-01T00:00:00")))}
@@ -1663,18 +1747,18 @@
calendar-duration-from-date(date_expression, duration_expression)
- * Get a user-friendly representation of the duration `duration_expression` based on the given date `date_expression`
+ * Gets a user-friendly representation of the duration `duration_expression` based on the given date `date_expression`.
* Arguments:
- * `date_expression` : A `date` value to be used as the reference time point.
- * `duration_expression` : A `duration` value to be converted
+ * `date_expression` : A `date` value to be used as the reference time point.
+ * `duration_expression` : A `duration` value to be converted.
* Return Value:
- * A `duration` value with the duration as `duration_expression` but with a user-friendly representation.
+ * A `duration` value with the duration as `duration_expression` but with a user-friendly representation.
* Example:
use dataverse TinySocial;
- for $i in dataset('TweetMessage')
+ for $i in dataset('TweetMessages')
where $i.send-time > datetime("2011-01-01T00:00:00")
return {"since-2011": subtract-datetime($i.send-time, datetime("2011-01-01T00:00:00")),
"since-2011-user-friendly": calendar-duration-from-date(date-from-datetime($i.send-time), subtract-datetime($i.send-time, datetime("2011-01-01T00:00:00")))}
@@ -1692,10 +1776,10 @@
current-date()
- * Get the current date
- * Arguments:None
+ * Gets the current date.
+ * Arguments: None
* Return Value:
- * A `date` value of the date when the function is called.
+ * A `date` value of the date when the function is called.
### current-time ###
* Syntax:
@@ -1703,9 +1787,9 @@
current-time()
* Get the current time
- * Arguments:None
+ * Arguments: None
* Return Value:
- * A `time` value of the time when the function is called.
+ * A `time` value of the time when the function is called.
### current-datetime ###
* Syntax:
@@ -1713,9 +1797,9 @@
current-datetime()
* Get the current datetime
- * Arguments:None
+ * Arguments: None
* Return Value:
- * A `datetime` value of the datetime when the function is called.
+ * A `datetime` value of the datetime when the function is called.
* Example:
@@ -1738,11 +1822,11 @@
date-from-datetime(datetime_expression)
- * Get the date value from the given datetime value `datetime_expression`
+ * Gets the date value from the given datetime value `datetime_expression`.
* Arguments:
- * `datetime_expression`: A `datetime` value to be extracted from
+ * `datetime_expression`: A `datetime` value to be extracted from.
* Return Value:
- * A `date` value from the datetime.
+ * A `date` value from the datetime.
### time-from-datetime ###
* Syntax:
@@ -1751,15 +1835,15 @@
* Get the time value from the given datetime value `datetime_expression`
* Arguments:
- * `datetime_expression`: A `datetime` value to be extracted from
+ * `datetime_expression`: A `datetime` value to be extracted from
* Return Value:
- * A `time` value from the datetime.
+ * A `time` value from the datetime.
* Example:
use dataverse TinySocial;
- for $i in dataset('TweetMessage')
+ for $i in dataset('TweetMessages')
where $i.send-time > datetime("2011-01-01T00:00:00")
return {"send-date": date-from-datetime($i.send-time), "send-time": time-from-datetime($i.send-time)}
@@ -1776,33 +1860,33 @@
date-from-unix-time-in-days(numeric_expression)
- * Get date representing the time after `numeric_expression` days since 1970-01-01
+ * Gets a date representing the time after `numeric_expression` days since 1970-01-01.
* Arguments:
- * `numeric_expression`: A `int8`/`int16`/`int32` value representing the number of days
+ * `numeric_expression`: A `int8`/`int16`/`int32` value representing the number of days.
* Return Value:
- * A `date` value as the time after `numeric_expression` days since 1970-01-01
+ * A `date` value as the time after `numeric_expression` days since 1970-01-01.
### datetime-from-unix-time-in-ms ###
* Syntax:
datetime-from-unix-time-in-ms(numeric_expression)
- * Get datetime representing the time after `numeric_expression` milliseconds since 1970-01-01T00:00:00Z
+ * Gets a datetime representing the time after `numeric_expression` milliseconds since 1970-01-01T00:00:00Z.
* Arguments:
- * `numeric_expression`: A `int8`/`int16`/`int32`/`int64` value representing the number of milliseconds
+ * `numeric_expression`: A `int8`/`int16`/`int32`/`int64` value representing the number of milliseconds.
* Return Value:
- * A `datetime` value as the time after `numeric_expression` milliseconds since 1970-01-01T00:00:00Z
+ * A `datetime` value as the time after `numeric_expression` milliseconds since 1970-01-01T00:00:00Z.
### time-from-unix-time-in-ms ###
* Syntax:
time-from-unix-time-in-ms(numeric_expression)
- * Get time representing the time after `numeric_expression` milliseconds since 00:00:00.000Z
+ * Gets a time representing the time after `numeric_expression` milliseconds since 00:00:00.000Z.
* Arguments:
- * `numeric_expression`: A `int8`/`int16`/`int32` value representing the number of milliseconds
+ * `numeric_expression`: A `int8`/`int16`/`int32` value representing the number of milliseconds.
* Return Value:
- * A `time` value as the time after `numeric_expression` milliseconds since 00:00:00.000Z
+ * A `time` value as the time after `numeric_expression` milliseconds since 00:00:00.000Z.
* Example:
@@ -1818,7 +1902,6 @@
{ "date": date("2013-04-05"), "datetime": datetime("2013-04-05T05:28:20.000Z"), "time": time("00:00:03.748Z") }
-
### subtract-date ###
* Syntax:
@@ -1826,10 +1909,10 @@
* Get the duration between two dates `date_start` and `date_end`
* Arguments:
- * `date_start`: the starting `date`
- * `date_end`: the ending `date`
+ * `date_start`: the starting `date`
+ * `date_end`: the ending `date`
* Return Value:
- * A `duration` value between `date_start` and `date_end`
+ * A `duration` value between `date_start` and `date_end`
* Example:
@@ -1855,10 +1938,10 @@
* Get the duration between two times `time_start` and `time_end`
* Arguments:
- * `time_start`: the starting `time`
- * `time_end`: the ending `time`
+ * `time_start`: the starting `time`
+ * `time_end`: the ending `time`
* Return Value:
- * A `duration` value between `time_start` and `time_end`
+ * A `duration` value between `time_start` and `time_end`
* Example:
@@ -1884,10 +1967,10 @@
* Get the duration between two datetimes `datetime_start` and `datetime_end`
* Arguments:
- * `datetime_start`: the starting `datetime`
- * `datetime_end`: the ending `datetime`
+ * `datetime_start`: the starting `datetime`
+ * `datetime_end`: the ending `datetime`
* Return Value:
- * A `duration` value between `datetime_start` and `datetime_end`
+ * A `duration` value between `datetime_start` and `datetime_end`
* Example:
@@ -1908,16 +1991,39 @@
{ "id1": 3, "id2": 7, "diff": duration("P28D") }
{ "id1": 7, "id2": 1, "diff": duration("P13D") }
+### interval-start-from-date/time/datetime ###
+ * Syntax:
+
+ interval-start-from-date/time/datetime(date/time/datetime, duration)
+
+ * Construct an `interval` value by the given starting `date`/`time`/`datetime` and the `duration` that the interval lasts.
+ * Arguments:
+ * `date/time/datetime`: a `string` representing a `date`, `time` or `datetime`, or a `date`/`time`/`datetime` value, representing the starting time point.
+ * `duration`: a `string` or `duration` value representing the duration of the interval. Note that duration cannot be negative value.
+ * Return Value:
+ * An `interval` value representing the interval starting from the given time point with the length of duration.
+
+ * Example:
+
+ let $itv1 := interval-start-from-date("1984-01-01", "P1Y")
+ let $itv2 := interval-start-from-time(time("02:23:28.394"), "PT3H24M")
+ let $itv3 := interval-start-from-datetime("1999-09-09T09:09:09.999", duration("P2M30D"))
+ return {"interval1": $itv1, "interval2": $itv2, "interval3": $itv3}
+
+ * The expectecd result is:
+
+ { "interval1": interval-date("1984-01-01, 1985-01-01"), "interval2": interval-time("02:23:28.394Z, 05:47:28.394Z"), "interval3": interval-datetime("1999-09-09T09:09:09.999Z, 1999-12-09T09:09:09.999Z") }
+
### get-interval-start, get-interval-end ###
* Syntax:
get-interval-start/get-interval-end(interval)
- * Get the start/end of the given interval
+ * Gets the start/end of the given interval.
* Arguments:
- * `interval`: the interval to be accessed
+ * `interval`: the interval to be accessed.
* Return Value:
- * A `time`, `date` or `datetime` (depending on the time instances of the interval) representing the starting or ending time.
+ * A `time`, `date`, or `datetime` (depending on the time instances of the interval) representing the starting or ending time.
* Example:
@@ -1927,4 +2033,66 @@
* The expected result is:
- { "start": date("1984-01-01"), "end": date("1985-01-01") }
\ No newline at end of file
+ { "start": date("1984-01-01"), "end": date("1985-01-01") }
+
+### interval-bin ###
+ * Syntax:
+
+ interval-bin(time-to-bin, time-bin-anchor, duration-bin-size)
+
+ * Return the `interval` value representing the bin containing the `time-to-bin` value.
+ * Arguments:
+ * `time-to-bin`: a date/time/datetime value representing the time to be binned.
+ * `time-bin-anchor`: a date/time/datetime value representing an anchor of a bin starts. The type of this argument should be the same as the first `time-to-bin` argument.
+ * `duration-bin-size`: the duration value representing the size of the bin, in the type of `year-month-duration` or `day-time-duration` or `null`. The sub-duration type must be compatible to the arithmetic operations between the type of "time_to_bin" and the sub-duration type must be defined. Specifically, one of the following arithmetic operations should be used:
+ * `datetime` +|- `year-month-duration`
+ * `datetime` +|- `day-time-duration`
+ * `date` +|- `year-month-duration`
+ * `date` +|- `day-time-duration`
+ * `time` +|- `day-time-duration`
+ * Return Value:
+ * A `interval` value representing the bin containing the `time-to-bin` value. Note that the internal type of this interval value should be the same as the `time-to-bin` type.
+
+ * Example:
+
+ let $c1 := date("2010-10-30")
+ let $c2 := datetime("-1987-11-19T23:49:23.938")
+ let $c3 := time("12:23:34.930+07:00")
+
+ return { "bin1": interval-bin($c1, date("1990-01-01"), year-month-duration("P1Y")),
+ "bin2": interval-bin($c2, datetime("1990-01-01T00:00:00.000Z"), year-month-duration("P6M")),
+ "bin3": interval-bin($c3, time("00:00:00"), day-time-duration("PD1M")),
+ "bin4": interval-bin($c2, datetime("2013-01-01T00:00:00.000"), day-time-duration("PT24H"))
+ }
+
+ * The expected result is:
+
+ { "bin1": interval-date("2010-01-01, 2011-01-01"),
+ "bin2": interval-datetime("-1987-07-01T00:00:00.000Z, -1986-01-01T00:00:00.000Z"),
+ "bin3": interval-time("05:23:00.000Z, 05:24:00.000Z"),
+ "bin4": interval-datetime("-1987-11-19T00:00:00.000Z, -1987-11-20T00:00:00.000Z")}
+
+## <a id="OtherFunctions">Other Functions</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
+
+### is-null ###
+ * Syntax:
+
+ is-null(var)
+
+ * Checks whether the given variable is a `null` value.
+ * Arguments:
+ * `var` : A variable (any type is allowed).
+ * Return Value:
+ * A `boolean` on whether the variable is a `null` or not.
+
+ * Example:
+
+ for $m in ['hello', 'world', null]
+ where not(is-null($m))
+ return $m
+
+
+ * The expected result is:
+
+ "hello"
+ "world"
diff --git a/asterix-doc/src/site/markdown/aql/manual.md b/asterix-doc/src/site/markdown/aql/manual.md
index f1c3fbd..882d331 100644
--- a/asterix-doc/src/site/markdown/aql/manual.md
+++ b/asterix-doc/src/site/markdown/aql/manual.md
@@ -1,5 +1,12 @@
# The Asterix Query Language, Version 1.0
-## 1. Introduction
+
+## <a id="toc">Table of Contents</a> ##
+
+* [1. Introduction](#Introduction)
+* [2. Expressions](#Expressions)
+* [3. Statements](#Statements)
+
+## <a id="Introduction">1. Introduction</a><font size="4"> <a href="#toc">[Back to TOC]</a></font>
This document is intended as a reference guide to the full syntax
and semantics of the Asterix Query Language (AQL), the language for talking to AsterixDB.
@@ -14,7 +21,7 @@
We list and briefly explain each of the productions in the AQL grammar, offering
examples for clarity in cases where doing so seems needed or helpful.
-## 2. Expressions
+## <a id="Expressions">2. Expressions</a> <font size="4"><a href="#toc">[Back to TOC]</a></font>
Query ::= Expression
@@ -53,14 +60,25 @@
#### Literals
- Literal ::= StringLiteral
- | <INTEGER_LITERAL>
- | <FLOAT_LITERAL>
- | <DOUBLE_LITERAL>
- | "null"
- | "true"
- | "false"
- StringLiteral ::= <STRING_LITERAL>
+ Literal ::= StringLiteral
+ | IntegerLiteral
+ | FloatLiteral
+ | DoubleLiteral
+ | "null"
+ | "true"
+ | "false"
+ StringLiteral ::= ("\"" (<ESCAPE_QUOT> | ~["\""])* "\"")
+ | ("\'" (<ESCAPE_APOS> | ~["\'"])* "\'")
+ <ESCAPE_QUOT> ::= "\\\""
+ <ESCAPE_APOS> ::= "\\\'"
+ IntegerLiteral ::= <DIGITS>
+ <DIGITS> ::= ["0" - "9"]+
+ FloatLiteral ::= <DIGITS> ( "f" | "F" )
+ | <DIGITS> ( "." <DIGITS> ( "f" | "F" ) )?
+ | "." <DIGITS> ( "f" | "F" )
+ DoubleLiteral ::= <DIGITS>
+ | <DIGITS> ( "." <DIGITS> )?
+ | "." <DIGITS>
Literals (constants) in AQL can be strings, integers, floating point values,
double values, boolean constants, or the constant value null.
@@ -78,6 +96,8 @@
#### Variable References
VariableRef ::= <VARIABLE>
+ <VARIABLE> ::= "$" <LETTER> (<LETTER> | <DIGIT> | "_")*
+ <LETTER> ::= ["A" - "Z", "a" - "z"]
A variable in AQL can be bound to any legal ADM value.
A variable reference refers to the value to which an in-scope variable is bound.
@@ -125,6 +145,8 @@
DatasetAccessExpression ::= "dataset" ( ( Identifier ( "." Identifier )? )
| ( "(" Expression ")" ) )
Identifier ::= <IDENTIFIER> | StringLiteral
+ <IDENTIFIER> ::= <LETTER> (<LETTER> | <DIGIT> | <SPECIALCHARS>)*
+ <SPECIALCHARS> ::= ["$", "_", "-"]
Querying Big Data is the main point of AsterixDB and AQL.
Data in AsterixDB reside in datasets (collections of ADM records),
@@ -133,6 +155,8 @@
Dataset access expressions are most commonly used in FLWOR expressions, where variables
are bound to their contents.
+Note that the Identifier that identifies a dataset (or any other Identifier in AQL) can also be a StringLiteral.
+This is especially useful to avoid conficts with AQL keywords (e.g. "dataset", "null", or "type").
The following are three examples of legal dataset access expressions.
The first one accesses a dataset called Customers in the dataverse called SalesDV.
@@ -179,6 +203,11 @@
"project members": {{ "vinayakb", "dtabass", "chenli" }}
}
+##### Note
+
+When constructing nested records there needs to be a space between the closing braces to avoid confusion with the `}}` token that ends an unordered list constructor:
+`{ "a" : { "b" : "c" }}` will fail to parse while `{ "a" : { "b" : "c" } }` will work.
+
### Path Expressions
ValueExpr ::= PrimaryExpr ( Field | Index )*
@@ -235,7 +264,7 @@
### Arithmetic Expressions
AddExpr ::= MultExpr ( ( "+" | "-" ) MultExpr )*
- MultExpr ::= UnaryExpr ( ( "*" | "/" | "%" | <CARET> | "idiv" ) UnaryExpr )*
+ MultExpr ::= UnaryExpr ( ( "*" | "/" | "%" | "^"| "idiv" ) UnaryExpr )*
UnaryExpr ::= ( ( "+" | "-" ) )? ValueExpr
AQL also supports the usual cast of characters for arithmetic expressions.
@@ -406,7 +435,7 @@
every $x in [ 1, 2, 3 ] satisfies $x < 3
some $x in [ 1, 2, 3 ] satisfies $x < 3
-## 3. Statements
+## <a id="Statements">3. Statements</a> <font size="4"><a href="#toc">[Back to TOC]</a></font>
Statement ::= ( SingleStatement ( ";" )? )* <EOF>
SingleStatement ::= DataverseDeclaration
@@ -523,12 +552,12 @@
##### Example
create type FacebookUserType as closed {
- id: int32,
- alias: string,
- name: string,
- user-since: datetime,
- friend-ids: {{ int32 }},
- employment: [ EmploymentType ]
+ "id" : int32,
+ "alias" : string,
+ "name" : string,
+ "user-since" : datetime,
+ "friend-ids" : {{ int32 }},
+ "employment" : [ EmploymentType ]
}
#### Datasets
@@ -541,8 +570,8 @@
Configuration ::= "(" ( KeyValuePair ( "," KeyValuePair )* )? ")"
KeyValuePair ::= "(" StringLiteral "=" StringLiteral ")"
Properties ::= ( "(" Property ( "," Property )* ")" )?
- Property ::= Identifier "=" ( StringLiteral | <INTEGER_LITERAL> )
- FunctionSignature ::= FunctionOrTypeName "@" <INTEGER_LITERAL>
+ Property ::= Identifier "=" ( StringLiteral | IntegerLiteral )
+ FunctionSignature ::= FunctionOrTypeName "@" IntegerLiteral
PrimaryKey ::= "primary" "key" Identifier ( "," Identifier )*
The create dataset statement is used to create a new dataset.
@@ -552,7 +581,7 @@
An Internal dataset (the default) is a dataset that is stored in and managed by AsterixDB.
It must have a specified unique primary key that can be used to partition data across nodes of an AsterixDB cluster.
The primary key is also used in secondary indexes to uniquely identify the indexed primary data records.
-An External dataset is stored outside of AsterixDB, e.g., in HDFS or in the local filesystem(s) of the cluster's nodes.
+An External dataset is stored outside of AsterixDB (currently datasets in HDFS or on the local filesystem(s) of the cluster's nodes are supported).
External dataset support allows AQL queries to treat external data as though it were stored in AsterixDB,
making it possible to query "legacy" file data (e.g., Hive data) without having to physically import it into AsterixDB.
For an external dataset, an appropriate adaptor must be selected to handle the nature of the desired external data.
@@ -565,14 +594,15 @@
create internal dataset FacebookUsers(FacebookUserType) primary key id;
The next example creates an external dataset for storing LineitemType records.
-The choice of the `localfs` adaptor means that its data will reside in the local filesystem of the cluster nodes.
-The create statement provides several parameters used by the localfs adaptor;
-e.g., the file format is delimited text with vertical bar being the field delimiter.
+The choice of the `hdfs` adaptor means that its data will reside in HDFS.
+The create statement provides parameters used by the hdfs adaptor:
+the URL and path needed to locate the data in HDFS and a description of the data format.
##### Example
-
- create external dataset Lineitem(LineitemType) using localfs (
- ("path"="127.0.0.1://SOURCE_PATH"),
+ create external dataset Lineitem('LineitemType) using hdfs (
+ ("hdfs"="hdfs://HOST:PORT"),
+ ("path"="HDFS_PATH"),
+ ("input-format"="text-input-format"),
("format"="delimited-text"),
("delimiter"="|"));
@@ -583,7 +613,7 @@
IndexType ::= "btree"
| "rtree"
| "keyword"
- | "ngram" "(" <INTEGER_LITERAL> ")"
+ | "ngram" "(" IntegerLiteral ")"
The create index statement creates a secondary index on one or more fields of a specified dataset.
Supported index types include `btree` for totally ordered datatypes,
@@ -672,6 +702,7 @@
The load statement is used to initially populate a dataset via bulk loading of data from an external file.
An appropriate adaptor must be selected to handle the nature of the desired external data.
+The load statement accepts the same adaptors and the same parameters as external datasets.
(See the [guide to external data](externaldata.html) for more information on the available adaptors.)
The following example shows how to bulk load the FacebookUsers dataset from an external file containing
@@ -730,5 +761,6 @@
for $praise in {{ "great", "brilliant", "awesome" }}
return
- string-concat(["AsterixDB is ", $praise]
+ string-concat(["AsterixDB is ", $praise])
+
diff --git a/asterix-doc/src/site/markdown/aql/primer.md b/asterix-doc/src/site/markdown/aql/primer.md
index fc8ea9a..92fcb01 100644
--- a/asterix-doc/src/site/markdown/aql/primer.md
+++ b/asterix-doc/src/site/markdown/aql/primer.md
@@ -346,11 +346,9 @@
In this section we introduce AQL via a set of example queries, along with their expected results,
based on the data above, to help you get started.
Many of the most important features of AQL are presented in this set of representative queries.
-You can find a BNF description of the current AQL grammar at [wiki:AsterixDBGrammar], and someday
-in the not-too-distant future we will also provide a complete reference manual for the language.
-In the meantime, this will get you started down the path of using AsterixDB.
-A more complete list of the supported AsterixDB primitive types and built-in functions can be
-found at [Asterix Data Model (ADM)](datamodel.html) and [Asterix Functions](functions.html).
+You can find more details in the document on the [Asterix Data Model (ADM)](datamodel.html),
+in the [AQL Reference Manual](manual.html), and a complete list of built-in functions is available
+in the [Asterix Functions](functions.html) document.
AQL is an expression language.
Even the expression 1+1 is a valid AQL query that evaluates to 2.
diff --git a/asterix-doc/src/site/markdown/aql/similarity.md b/asterix-doc/src/site/markdown/aql/similarity.md
index 244103c..12cfb10 100644
--- a/asterix-doc/src/site/markdown/aql/similarity.md
+++ b/asterix-doc/src/site/markdown/aql/similarity.md
@@ -1,7 +1,15 @@
# AsterixDB Support of Similarity Queries #
-## Motivation ##
+## <a id="toc">Table of Contents</a> ##
+
+* [Motivation](#Motivation)
+* [Data Types and Similarity Functions](#DataTypesAndSimilarityFunctions)
+* [Similarity Selection Queries](#SimilaritySelectionQueries)
+* [Similarity Join Queries](#SimilarityJoinQueries)
+* [Using Indexes to Support Similarity Queries](#UsingIndexesToSupportSimilarityQueries)
+
+## <a id="Motivation">Motivation</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
Similarity queries are widely used in applications where users need to
find records that satisfy a similarity predicate, while exact matching
@@ -14,7 +22,7 @@
users who have similar friends. To meet this type of needs, AsterixDB
supports similarity queries using efficient indexes and algorithms.
-## Data Types and Similarity Functions ##
+## <a id="DataTypesAndSimilarityFunctions">Data Types and Similarity Functions</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
AsterixDB supports [edit distance](http://en.wikipedia.org/wiki/Levenshtein_distance) (on strings) and
[Jaccard](http://en.wikipedia.org/wiki/Jaccard_index) (on sets). For
@@ -33,7 +41,7 @@
to convert strings to sets, and the
[similarity functions](functions.html#Similarity_Functions).
-## Similarity Selection Queries ##
+## <a id="SimilaritySelectionQueries">Similarity Selection Queries</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
The following [query](functions.html#edit-distance)
asks for all the Facebook users whose name is similar to
@@ -78,7 +86,7 @@
using `simfunction` and then specify the threshold `0.6f` using
`simthreshold`.
-## Similarity Join Queries ##
+## <a id="SimilarityJoinQueries">Similarity Join Queries</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
AsterixDB supports fuzzy joins between two sets. The following
[query](primer.html#Query_5_-_Fuzzy_Join)
@@ -103,7 +111,7 @@
}
};
-## Using Indexes to Support Similarity Queries ##
+## <a id="UsingIndexesToSupportSimilarityQueries">Using Indexes to Support Similarity Queries</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
AsterixDB uses two types of indexes to support similarity queries, namely
"ngram index" and "keyword index".
diff --git a/asterix-doc/src/site/markdown/index.md b/asterix-doc/src/site/markdown/index.md
index 4ee2a5f..4d864bc 100644
--- a/asterix-doc/src/site/markdown/index.md
+++ b/asterix-doc/src/site/markdown/index.md
@@ -1,19 +1,12 @@
# AsterixDB: A Big Data Management System #
-## What Is AsterixDB? ##
+## <a id="toc">Table of Contents</a> ##
+* [What Is AsterixDB?](#WhatIsAsterixDB)
+* [Getting and Using AsterixDB](#GettingAndUsingAsterixDB)
-Welcome to the new home of the AsterixDB Big Data Management System (BDMS).
-The AsterixDB BDMS is the result of about 3.5 years of R&D involving researchers at UC Irvine, UC Riverside, and UC San Diego.
-The AsterixDB code base now consists of roughly 250K lines of Java code that has been co-developed at UC Irvine and UC Riverside.
+## <a id="WhatIsAsterixDB">What Is AsterixDB?</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
-Initiated in 2009, the NSF-sponsored ASTERIX project has been developing new technologies for ingesting, storing, managing, indexing, querying, and analyzing vast quantities of semi-structured information.
-The project has been combining ideas from three distinct areas---semi-structured data, parallel databases, and data-intensive computing (a.k.a. today's Big Data platforms)---in order to create a next-generation, open-source software platform that scales by running on large, shared-nothing commodity computing clusters.
-The ASTERIX effort has been targeting a wide range of semi-structured information, ranging from "data" use cases---where information is well-typed and highly regular---to "content" use cases---where data tends to be irregular, much of each datum may be textual, and the ultimate schema for the various data types involved may be hard to anticipate up front.
-The ASTERIX project has been addressing technical issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging time-tested parallel database techniques with modern data-intensive computing techniques to support performant yet declarative solutions to the problem of storing and analyzing semi-structured information effectively.
-The first fruits of this labor have been captured in the AsterixDB system that is now being released in preliminary or "Beta" release form.
-We are hoping that the arrival of AsterixDB will mark the beginning of the "BDMS era", and we hope that both the Big Data community and the database community will find the AsterixDB system to be interesting and useful for a much broader class of problems than can be addressed with any one of today's current Big Data platforms and related technologies (e.g., Hadoop, Pig, Hive, HBase, MongoDB, and so on). One of our project mottos has been "one size fits a bunch"---at least that has been our aim. For more information about the research effort that led to the birth of AsterixDB, please refer to our NSF project web site: [http://asterix.ics.uci.edu/](http://asterix.ics.uci.edu/).
-
-In a nutshell, AsterixDB is a full-function BDMS with a rich feature set that distinguishes it from pretty much any other Big Data platform that's out and available today. We believe that its feature set makes it well-suited to modern needs such as web data warehousing and social data storage and analysis. AsterixDB has:
+In a nutshell, AsterixDB is a full-function BDMS (Big Data Management System) with a rich feature set that distinguishes it from pretty much any other Big Data platform that's out and available today. We believe that its feature set makes it well-suited to modern needs such as web data warehousing and social data storage and analysis. AsterixDB has:
* A semistructured NoSQL style data model (ADM) resulting from extending JSON with object database ideas
* An expressive and declarative query language (AQL) that supports a broad range of queries and analysis over semistructured data
@@ -25,16 +18,14 @@
* Support for fuzzy and spatial queries as well as for more traditional parametric queries
* Basic transactional (concurrency and recovery) capabilities akin to those of a NoSQL store
-## Getting and Using AsterixDB ##
+## <a id="GettingAndUsingAsterixDB">Getting and Using AsterixDB</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
You are most likely here because you are interested in getting your hands on AsterixDB---so you would like to know how to get it, how to set it up, and how to use it.
-Someday our plan is to have comprehensive documentation for AsterixDB and its data model (ADM) and query language (AQL) here on this wiki.
-For the Beta release, we've got a start; for the Beta release a month or so from now, we will hopefully have much more.
-The following is a list of the wiki pages and supporting documents that we have available today:
+The following is a list of the supporting documents that we have available today:
1. [Installing AsterixDB using Managix](install.html) :
This is our installation guide, and it is where you should start.
-This document will tell you how to obtain, install, and manage instances of [AsterixDB](https://asterixdb.googlecode.com/files/asterix-installer-0.0.4-binary-assembly.zip), including both single-machine setup (for developers) as well as cluster installations (for deployment in its intended form).
+This document will tell you how to obtain, install, and manage instances of [AsterixDB](http://asterixdb.ics.uci.edu/download/0.8.3/asterix-installer-0.8.3-binary-assembly.zip), including both single-machine setup (for developers) as well as cluster installations (for deployment in its intended form).
2. [AsterixDB 101: An ADM and AQL Primer](aql/primer.html) :
This is a first-timers introduction to the user model of the AsterixDB BDMS, by which we mean the view of AsterixDB as seen from the perspective of an "average user" or Big Data application developer.
@@ -42,7 +33,7 @@
This document presents a tiny "social data warehousing" example and uses it as a backdrop for describing, by example, the key features of AsterixDB.
By working through this document, you will learn how to define the artifacts needed to manage data in AsterixDB, how to load data into the system, how to use most of the basic features of its query language, and how to insert and delete data dynamically.
-3. [Asterix Data Model (ADM)](aql/datamodel.html), [Asterix Functions](aql/functions.html), and [Asterix Query Language (AQL)](aql/manual.html) :
+3. [Asterix Data Model (ADM)](aql/datamodel.html), [Asterix Functions](aql/functions.html), [Asterix functions for Allen's Relations](aql/allens.html), and [Asterix Query Language (AQL)](aql/manual.html) :
These are reference documents that catalog the primitive data types and built-in functions available in AQL and the reference manual for AQL itself.
5. [REST API to AsterixDB](api.html) :
diff --git a/asterix-doc/src/site/markdown/install.md b/asterix-doc/src/site/markdown/install.md
index 9ba8ea3..0b0d364 100644
--- a/asterix-doc/src/site/markdown/install.md
+++ b/asterix-doc/src/site/markdown/install.md
@@ -1,7 +1,17 @@
# Introduction #
-This is a quickstart guide for getting ASTERIX running in a distributed environment. This guide also introduces the ASTERIX installer (nicknamed _*Managix*_) and describes how it can be used to create/manage an ASTERIX instance. By following the simple steps described in this guide, you will get a running instance of ASTERIX. You shall be able to use ASTERIX from its Web interface and manage its lifecycle using Managix. This document assumes that you are running some version of _*Linux*_ or _*MacOS X*_.
-## Prerequisites for Installing ASTERIX ##
+## <a id="toc">Table of Contents</a> ##
+
+* [Prerequisites for Installing AsterixDB](#PrerequisitesForInstallingAsterixDB)
+* [Section 1: Single-Machine AsterixDB installation](#Section1SingleMachineAsterixDBInstallation)
+* [Section 2: Single-Machine AsterixDB installation (Advanced)](#Section2SingleMachineAsterixDBInstallationAdvanced)
+* [Section 3: Installing AsterixDB on a Cluster of Multiple Machines](#Section3InstallingAsterixDBOnAClusterOfMultipleMachines)
+* [Section 4: Managing the Lifecycle of an AsterixDB Instance](#Section4ManagingTheLifecycleOfAnAsterixDBInstance)
+* [Section 5: Frequently Asked Questions](#Section5FAQ)
+
+This is a quickstart guide for getting AsterixDB running in a distributed environment. This guide also introduces the AsterixDB installer (nicknamed _*Managix*_) and describes how it can be used to create and manage an AsterixDB instance. By following the simple steps described in this guide, you will get a running instance of AsterixDB. You shall be able to use AsterixDB from its Web interface and manage its lifecycle using Managix. This document assumes that you are running some version of _*Linux*_ or _*MacOS X*_.
+
+## <a id="PrerequisitesForInstallingAsterixDB">Prerequisites for Installing AsterixDB</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
Prerequisite:
* [JDK7](http://www.oracle.com/technetwork/java/javase/downloads/index.html) (Otherwise known as JDK 1.7).
@@ -25,7 +35,7 @@
* For Mac: [JDK 7 Mac Install](http://docs.oracle.com/javase/7/docs/webnotes/install/mac/mac-jdk.html)
JDK would be installed at /Library/Java/JavaVirtualMachines/jdk-version/Contents/Home .
-The java installation directory is referred as JAVA_HOME. Since we upgraded/installed Java, we need to ensure JAVA_HOME points to the installation directory of JDK 7. Modify your ~/.bash_profile (or ~/.bashrc) and define JAVA_HOME accordingly. After modifying, execute the following:
+The java installation directory is referred as JAVA_HOME. Since we upgraded/installed Java, we need to ensure JAVA_HOME points to the installation directory of JDK 7. Modify your ~/.bash_profile (or ~/.bashrc) and define JAVA_HOME accordingly. After the modification, execute the following:
$ java -version
@@ -36,36 +46,27 @@
$ echo "PATH=$JAVA_HOME/bin:$PATH" >> ~/.bash_profile (or ~/.bashrc)
$ source ~/.bash_profile (or ~/.bashrc)
-We also need to ensure that $JAVA_HOME/bin is in the PATH. $JAVA_HOME/bin should be included in the PATH value. We need to change the if $JAVA_HOME/bin is already in the PATH, we shall simply execute the following:
-
- $ java
-
-If you get the following message, you need to alter the PATH variable in your ~/.bash_profile or ~/.bashrc (whichever you use).
-
-
- -bash: java: command not found
-
-## Section 1: Single-Machine ASTERIX installation ##
-We assume a user Joe with a home directory as /home/joe. Please note that on Mac, the home directory for user Joe would be /Users/joe.
+## <a id="Section1SingleMachineAsterixDBInstallation">Section 1: Single-Machine AsterixDB installation</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
+We assume a user called "Joe" with a home directory as /home/joe. On a Mac, the home directory for user Joe would be /Users/joe.
### Configuring Environment ###
-Ensure that JAVA_HOME variable is defined and points to the the java installation directory on your machine. To verify, execute the following.
+Ensure that JAVA_HOME variable is defined and points to the the java installation directory on your machine. To verify, execute the following:
$ echo $JAVA_HOME
-If you do not see any output, JAVA_HOME is not defined. We need to add the following line to your profile located at /home/joe/.bash_profile or /home/joe/.bashrc, whichever you are using. If you do not any of these files, create a ~/.bash_profile.
+If you do not see any output, JAVA_HOME is not defined. We need to add the following line to your profile located at /home/joe/.bash_profile or /home/joe/.bashrc, whichever you are using. If you do not have any of these files, create a ~/.bash_profile file.
export JAVA_HOME=<Path to Java installation directory>
-After you have edited ~/.bash_profile (or ~/.bashrc), execute the following to make the changes effective in current shell.
+After you have edited ~/.bash_profile (or ~/.bashrc), execute the following to make the changes effective in current shell:
$ source /home/joe/.bash_profile (or /home/joe/.bashrc)
-Before proceeding, verify that JAVA_HOME is defined by executing the following.
+Before proceeding, verify that JAVA_HOME is defined by executing the following:
$ echo $JAVA_HOME
@@ -74,7 +75,7 @@
If SSH is not enabled on your system, please follow the instruction below to enable/install it or else skip to the section [Configuring Password-less SSH](#Configuring_Password-less_SSH).
#### Enabling SSH on Mac ####
-The Apple Mac OS X operating system has SSH installed by default but the SSH daemon is not enabled. This means you can’t login remotely or do remote copies until you enable it. To enable it, go to ‘System Preferences’. Under ‘Internet & Networking’ there is a ‘Sharing’ icon. Run that. In the list that appears, check the ‘Remote Login’ option. Also check the "All users" radio button for "Allow access for". This starts the SSH daemon immediately and you can remotely login using your username. The ‘Sharing’ window shows at the bottom the name and IP address to use. You can also find this out using ‘whoami’ and ‘ifconfig’ from the Terminal application.
+The Apple Mac OS X operating system has SSH installed by default but the SSH daemon is not enabled. This means you can't login remotely or do remote copies until you enable it. To enable it, go to 'System Preferences'. Under 'Internet & Networking' there is a 'Sharing' icon. Run that. In the list that appears, check the 'Remote Login' option. Also check the "All users" radio button for "Allow access for". This starts the SSH daemon immediately and you can remotely login using your username. The 'Sharing' window shows at the bottom the name and IP address to use. You can also find this out using 'whoami' and 'ifconfig' from the Terminal application.
#### Enabling SSH on Linux ####
@@ -84,7 +85,7 @@
#### Configuring Password-less SSH ####
-For our single-machine setup of ASTERIX, we need to configure password-less SSH access to localhost. We assume that you are on the machine where you want to install ASTERIX. To verify if you already have password-less SSH configured, execute the following.
+For our single-machine setup of AsterixDB, we need to configure password-less SSH access to localhost. We assume that you are on the machine where you want to install AsterixDB. To verify if you already have password-less SSH configured, execute the following:
$ ssh 127.0.0.1
@@ -103,7 +104,6 @@
$ ssh 127.0.0.1
Last login: Sat Mar 23 22:52:49 2013
-
[Important: Password-less SSH requires the use of a (public,private) key-pair. The key-pair is located as a pair of files under
$HOME/.ssh directory. It is required that the (public,private) key-pair files have default names (id_rsa.pub, id_rsa) respectively.
If you are using different names, please rename the files to use the default names]
@@ -129,7 +129,7 @@
/home/joe/.ssh/id_rsa already exists.
Overwrite (y/n)?
-You should see an output similar to one shown below.
+You should see an output similar to one shown below:
The key fingerprint is:
@@ -158,14 +158,14 @@
$ ssh 127.0.0.1
-You may see an output similar to one shown below.
+You may see an output similar to one shown below:
The authenticity of host '127.0.0.1 (127.0.0.1)' can't be established.
- RSA key fingerprint is aa:7b:51:90:74:39:c4:f6:28:a2:9d:47:c2:8d:33:31.
+ RSA key fingerprint is aa:7b:51:90:74:39:c4:f6:28:a2:9d:47:c2:8d:33:31.
Are you sure you want to continue connecting (yes/no)?
-Type 'yes' and press the enter key. You should see an output similar to one shown below.
+Type 'yes' and press the enter key. You should see an output similar to one shown below:
Warning: Permanently added '127.0.0.1' (RSA) to the list of known hosts.
@@ -185,9 +185,9 @@
Connection to 127.0.0.1 closed.
### Configuring Managix ###
-You will need the ASTERIX installer (a.k.a Managix). Download Managix from [here](https://asterixdb.googlecode.com/files/asterix-installer-0.0.5-binary-assembly.zip); this includes the bits for Managix as well as ASTERIX.
+You will need the AsterixDB installer (a.k.a. Managix). Download Managix from [here](http://asterixdb.ics.uci.edu/download/0.8.3/asterix-installer-0.8.3-binary-assembly.zip); this includes the bits for Managix as well as AsterixDB.
-Unzip the Managix zip bundle to an appropriate location. You may create a sub-directory: asterix-mgmt (short for asterix-management) under your home directory. We shall refer to this location as MANAGIX_HOME.
+Unzip the Managix zip bundle to an appropriate location. You may create a sub-directory called "asterix-mgmt" (short for asterix-management) under your home directory. We shall refer to this location as MANAGIX_HOME.
$ cd ~
@@ -197,7 +197,7 @@
/home/joe/asterix-mgmt> $ export MANAGIX_HOME=`pwd`
/home/joe/asterix-mgmt> $ export PATH=$PATH:$MANAGIX_HOME/bin
-It is recommended that you add $MANAGIX_HOME/bin to your PATH variable in your bash profile . This can be done by executing the following.
+It is recommended that you add $MANAGIX_HOME/bin to your PATH variable in your bash profile . This step can be done by executing the following.
currentDir=`pwd`
@@ -206,12 +206,12 @@
Above, use ~/.bashrc instead of ~/.bash_profile if you are using ~/.bashrc .
-To be able to create an ASTERIX instance and manage its lifecycle, the Managix requires you to configure a set of configuration files namely:
+To be able to create an AsterixDB instance and manage its lifecycle, the Managix requires you to configure a set of configuration files namely:
* `conf/managix-conf.xml`: A configuration XML file that contains configuration settings for Managix.
* A configuration XML file that describes the nodes in the cluster, e.g., `$MANAGIX_HOME/clusters/local/local.xml`.
-Since we intend to run ASTERIX on a single node, Managix can auto-configure itself and populate the above mentioned configuration files. To auto-configure Managix, execute the following in the MANAGIX_HOME directory:
+Since we intend to run AsterixDB on a single node, Managix can auto-configure itself and populate the above configuration files. To auto-configure Managix, execute the following in the MANAGIX_HOME directory:
/home/joe/asterix-mgmt> $ managix configure
@@ -228,18 +228,18 @@
INFO: Environment [OK]
INFO: Cluster configuration [OK]
-### Creating an ASTERIX instance ###
-Now that we have configured Managix, we shall next create an ASTERIX instance. An ASTERIX instance is identified by a unique name and is created using the `create` command. The usage description for the `create` command can be obtained by executing the following.
+### Creating an AsterixDB instance ###
+Now that we have configured Managix, we shall next create an AsterixDB instance. An AsterixDB instance is identified by a unique name and is created using the `create` command. The usage description for the `create` command can be obtained by executing the following:
$ managix help -cmd create
- Creates an ASTERIX instance with a specified name. Post creation, the instance is in ACTIVE state,
+ Creates an AsterixDB instance with a specified name. Post creation, the instance is in ACTIVE state,
indicating its availability for executing statements/queries.
Usage arguments/options:
- -n Name of the ASTERIX instance.
+ -n Name of the AsterixDB instance.
-c Path to the cluster configuration file
-We shall now use the create command to create an ASTERIX instance by the name "my_asterix". In doing so, we shall use the cluster configuration file that was auto-generated by managix.
+We shall now use the `create` command to create an AsterixDB instance by the name "my_asterix". In doing so, we shall use the cluster configuration file that was auto-generated by Managix.
$ managix create -n my_asterix -c $MANAGIX_HOME/clusters/local/local.xml
@@ -252,7 +252,7 @@
Web-Url:http://127.0.0.1:19001
State:ACTIVE
-The third line above shows the web-url http://127.0.0.1:19001 for ASTERIX's web-interface. The ASTERIX instance is in the 'ACTIVE' state indicating that you may access the web-interface by navigating to the web-url.
+The third line above shows the web-url http://127.0.0.1:19001 for an AsterixDB's web interface. The AsterixDB instance is in the 'ACTIVE' state, indicating that you may access the web interface by navigating to the web url.
Type in the following "Hello World" query in the box:
@@ -260,23 +260,23 @@
let $message := 'Hello World!'
return $message
-Press the "Execute" button. If the query result shows on the output box, then Congratulations! You have successfully created an ASTERIX instance!
+Press the "Run" button. If the query result shows on the output box, then Congratulations! You have successfully created an AsterixDB instance!
-## Section 2: Single-Machine ASTERIX installation (Advanced) ##
-We assume that you have successfully completed the single-machine ASTERIX installation by following the instructions above in section [ASTERIX installation](#Section_1:_Single-Machine_ASTERIX_installation Single Machine). In this section, we shall cover advanced topics related to ASTERIX configuration. Before we proceed, it is imperative to go through some preliminary concepts related to ASTERIX runtime.
+## <a id="Section2SingleMachineAsterixDBInstallationAdvanced">Section 2: Single-Machine AsterixDB installation (Advanced)</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
+We assume that you have successfully completed the single-machine AsterixDB installation by following the instructions above in section [AsterixDB installation](#Section_1:_Single-Machine_AsterixDB_installation). In this section, we shall cover advanced topics related to AsterixDB configuration. Before we proceed, it is imperative to go through some preliminary concepts related to AsterixDB runtime.
-### ASTERIX Runtime ###
-An ASTERIX runtime comprises of a ''master node'' and a set of ''worker nodes'', each identified by a unique id. The master node runs a ''Cluster Controller'' service (a.k.a. ''CC''), while each worker node runs a ''Node Controller'' service (a.k.a. ''NC''). Please note that a node in an ASTERIX cluster is a logical concept in the sense that multiple nodes may map to a single physical machine, which is the case for a single-machine ASTERIX installation. This association or mapping between an ASTERIX node and a physical machine is captured in a cluster configuration XML file. In addition, the XML file contains properties and parameters associated with each node.
+### AsterixDB Runtime ###
+An AsterixDB runtime comprises of a ''master node'' and a set of ''worker nodes'', each identified by a unique id. The master node runs a ''Cluster Controller'' service (a.k.a. ''CC''), while each worker node runs a ''Node Controller'' service (a.k.a. ''NC''). Please note that a node in an AsterixDB cluster is a logical concept in the sense that multiple nodes may map to a single physical machine, which is the case for a single-machine AsterixDB installation. This association or mapping between an AsterixDB node and a physical machine is captured in a cluster configuration XML file. In addition, the XML file contains properties and parameters associated with each node.
-#### ASTERIX Runtime Configuration ####
-As observed earlier, Managix can auto-configure itself for a single-machine setup. As part of auto-configuration, Managix generated the cluster XML file. Let us understand the components of the generated cluster XML file. If you have configured Managix (via the "configure" command), you can find a similar cluster XML file as $MANAGIX_HOME/clusters/local/local.xml. The following is a sample XML file generated on a Ubuntu (Linux) setup:
+#### AsterixDB Runtime Configuration ####
+As observed earlier, Managix can auto-configure itself for a single-machine setup. As part of auto-configuration, Managix generated the cluster XML file. Let us understand the components of the generated cluster XML file. If you have configured Managix (via the `configure` command), you can find a similar cluster XML file as $MANAGIX_HOME/clusters/local/local.xml. The following is a sample XML file generated on a Ubuntu (Linux) setup:
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<cluster xmlns="cluster">
<name>local</name>
<java_home>/usr/lib/jvm/jdk1.7.0</java_home>
- <log_dir>/home/joe/asterix-mgmt/clusters/local/working_dir/logs</logdir>
+ <log_dir>/home/joe/asterix-mgmt/clusters/local/working_dir/logs</log_dir>
<txn_log_dir>/home/joe/asterix-mgmt/clusters/local/working_dir/logs</txn_log_dir>
<iodevices>/home/joe/asterix-mgmt/clusters/local/working_dir</iodevices>
<store>storage</store>
@@ -286,8 +286,11 @@
</working_dir>
<master_node>
<id>master</id>
- <client_ip>127.0.0.1</client_ip>
- <cluster_ip>127.0.0.1</cluster_ip>
+ <client-ip>127.0.0.1</client-ip>
+ <cluster-ip>127.0.0.1</cluster-ip>
+ <client_port>1098</client_port>
+ <cluster_port>1099</cluster_port>
+ <http_port>8888</http_port>
</master_node>
<node>
<id>node1</id>
@@ -297,14 +300,17 @@
We shall next explain the components of the cluster configuration XML file.
-#### (1) Defining nodes in ASTERIX runtime ####
-The single-machine ASTERIX instance configuration that is auto-generated by Managix (using the "configure" command) involves a master node (CC) and a worker node (NC). Each node is assigned a unique id and provided with an ip address (called ''cluster_ip'') that maps a node to a physical machine. The following snippet from the above XML file captures the master/worker nodes in our ASTERIX installation.
+#### (1) Defining nodes in AsterixDB runtime ####
+The single-machine AsterixDB instance configuration that is auto-generated by Managix (using the `configure` command) involves a master node (CC) and a worker node (NC). Each node is assigned a unique id and provided with an ip address (called ''cluster-ip'') that maps a node to a physical machine. The following snippet from the above XML file captures the master/worker nodes in our AsterixDB installation.
<master_node>
<id>master</id>
- <client_ip>127.0.0.1</client_ip>
- <cluster_ip>127.0.0.1</cluster_ip>
+ <client-ip>127.0.0.1</client-ip>
+ <cluster-ip>127.0.0.1</cluster-ip>
+ <client_port>1098</client_port>
+ <cluster_port>1099</cluster_port>
+ <http_port>8888</http_port>
</master_node>
<node>
<id>node1</id>
@@ -328,13 +334,26 @@
<td>IP address of the machine to which a node maps to. This address is used for all internal communication between the nodes.</td>
</tr>
<tr>
- <td>client_ip</td>
- <td>Provided for the master node. This IP should be reachable from clients that want to connect with ASTERIX via its web interface.</td>
+ <td>client-ip</td>
+ <td>Provided for the master node. This IP should be reachable from clients that want to connect with AsterixDB via its web interface.</td>
</tr>
+<tr>
+ <td>client-port</td>
+ <td>Provided for the master node. This is the port at which the Cluster Controller (CC) service listens for connections from clients.</td>
+</tr>
+<tr>
+ <td>cluster-port</td>
+ <td>Provided for the master node. This is the port used by the Cluster Controller (CC) service to listen for connections from Node Controllers (NCs). </td>
+</tr>
+<tr>
+ <td>http-port</td>
+ <td>Provided for the master node. This is the http port used by the Cluster Controller (CC) service. </td>
+</tr>
+
</table>
-#### (2) Properties associated with a worker node (NC) in ASTERIX ####
-The following is a list of properties associated with each worker node in an ASTERIX configuration.
+#### (2) Properties associated with a worker node (NC) in AsterixDB ####
+The following is a list of properties associated with each worker node in an AsterixDB configuration.
<table>
<tr>
@@ -347,11 +366,11 @@
</tr>
<tr>
<td>log_dir</td>
- <td>A directory where worker node may write logs.</td>
+ <td>A directory where the worker node JVM may write logs.</td>
</tr>
<tr>
<td>txn_log_dir</td>
- <td>A directory where worker node may write transaction logs.</td>
+ <td>A directory where the worker node writes transaction logs.</td>
</tr>
<tr>
<td>iodevices</td>
@@ -359,13 +378,13 @@
</tr>
<tr>
<td>store</td>
- <td>A data directory (under each iodevice) that ASTERIX uses to store data belonging to dataset(s).</td>
+ <td>A data directory (under each iodevice) that AsterixDB uses to store data belonging to dataset(s).</td>
</tr>
</table>
-All the above properties can be defined at the global level or a local level. In the former case, these properties apply to all the nodes in an ASTERIX configuration. In the latter case, these properties apply only to the node(s) under which they are defined. A property defined at the local level overrides the definition at the global level.
+All the above properties can be defined at the global level or a local level. In the former case, these properties apply to all the nodes in an AsterixDB configuration. In the latter case, these properties apply only to the node(s) under which they are defined. A property defined at the local level overrides the definition at the global level.
-#### (3) Working directory of an ASTERIX instance ####
+#### (3) Working directory of an AsterixDB instance ####
Next we explain the following setting in the file $MANAGIX_HOME/clusters/local/local.xml.
@@ -375,10 +394,10 @@
</working_dir>
-Managix associates a working directory with an ASTERIX instance and uses this directory for transferring binaries to each node. If there exists a directory that is readable by each node, Managix can use it to place binaries that can be accessed and used by all the nodes in the ASTERIX set up. A network file system (NFS) provides such a functionality for a cluster of physical machines such that a path on NFS is accessible from each machine in the cluster. In the single-machine set up described above, all nodes correspond to a single physical machine. Each path on the local file system is accessible to all the nodes in the ASTERIX setup and the boolean value for NFS above is thus set to `true`.
+Managix associates a working directory with an AsterixDB instance and uses this directory for transferring binaries to each node. If there is a directory that is readable by each node, Managix can use it to place binaries that can be accessed and used by all the nodes in the AsterixDB set up. A network file system (NFS) provides such a functionality for a cluster of physical machines so that a path on NFS is accessible from each machine in the cluster. In the single-machine set up described above, all nodes correspond to a single physical machine. Each path on the local file system is accessible to all the nodes in the AsterixDB setup and the boolean value for NFS above is thus set to `true`.
### Managix Configuration ###
-Managix allows creation and management of multiple ASTERIX instances and uses Zookeeper as its back-end database to keep track of information related to each instance. We need to provide a set of one or more hosts that Managix can use to run a Zookeeper instance. Zookeeper runs as a daemon process on each of the specified hosts. At each host, Zookeeper stores data under the Zookeeper home directory specified as part of the configuration. The following is an example configuration `$MANAGIX_HOME/conf/managix-conf.xml` that has Zookeeper running on the localhost (127.0.0.1) :
+Managix allows creation and management of multiple AsterixDB instances and uses Zookeeper as its back-end database to keep track of information related to each instance. We need to provide a set of one or more hosts that Managix can use to run a Zookeeper instance. Zookeeper runs as a daemon process on each of the specified hosts. At each host, Zookeeper stores data under the Zookeeper home directory specified as part of the configuration. The following is an example configuration `$MANAGIX_HOME/conf/managix-conf.xml` that has Zookeeper running on the localhost (127.0.0.1) :
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
@@ -392,20 +411,20 @@
</zookeeper>
</configuration>
-It is possible to have a single host for Zookeeper. A larger number of hosts would use Zookeeper's replication and fault-tolerance feature such that a failure of a host running Zookeeper would not result in loss of information about existing ASTERIX instances.
+It is possible to have a single host for Zookeeper. A larger number of hosts would use Zookeeper's replication and fault-tolerance feature such that a failure of a host running Zookeeper would not result in loss of information about existing AsterixDB instances.
-## Section 3: Installing ASTERIX on a Cluster of Multiple Machines ##
-We assume that you have read the two sections above on single-machine ASTERIX setup. Next we explain how to install ASTERIX in a cluster of multiple machines. As an example, we assume we want to setup ASTERIX on a cluster of three machines, in which we use one machine (called machine A) as the master node and two other machines (called machine B and machine C) as the worker nodes, as shown in the following diagram:
+## <a id="Section3InstallingAsterixDBOnAClusterOfMultipleMachines">Section 3: Installing AsterixDB on a Cluster of Multiple Machines</a><font size="4"><a href="#toc">[Back to TOC]</a></font> ##
+We assume that you have read the two sections above on single-machine AsterixDB setup. Next we explain how to install AsterixDB in a cluster of multiple machines. As an example, we assume we want to setup AsterixDB on a cluster of three machines, in which we use one machine (called machine A) as the master node and two other machines (called machine B and machine C) as the worker nodes, as shown in the following diagram:
![AsterixCluster](https://asterixdb.googlecode.com/files/AsterixCluster.png)
Notice that each machine has a ''cluster_ip'' address, which is used by these machines for their intra-cluster communication. Meanwhile, the master machine also has a ''client_ip'' address, using which an end-user outside the cluster can communicate with this machine. The reason we differentiate between these two types of IP addresses is that we can have a cluster of machines using a private network. In this case they have internal ip addresses that cannot be used outside the network. In the case all the machines are on a public network, the "client_ip" and "cluster_ip" of the master machine can share the same address.
-Next we describe how to set up ASTERIX in this cluster, assuming no Managix has been installed on these machines.
+Next we describe how to set up AsterixDB in this cluster, assuming no Managix has been installed on these machines.
-### Step (1): Define the ASTERIX cluster ###
+### Step (1): Define the AsterixDB cluster ###
-We first log into the master machine as the user "joe". On this machine, download Managix from [here](https://asterixdb.googlecode.com/files/asterix-installer-0.0.5-binary-assembly.zip) (same as above), then do the following steps similar to the single-machine case described above:
+We first log into the master machine as the user "joe". On this machine, download Managix from [here](http://asterixdb.ics.uci.edu/download/0.8.3/asterix-installer-0.8.3-binary-assembly.zip) (save as above), then do the following steps similar to the single-machine case described above:
machineA> cd ~
@@ -415,8 +434,10 @@
machineA> export MANAGIX_HOME=`pwd`
machineA> export PATH=$PATH:$MANAGIX_HOME/bin
+Note that it is recommended that MANAGIX_HOME is not located on a network file system (NFS). Managix creates artifacts/logs that are not required to be shared. Any overhead
+associated with creating artifacts/logs on the NFS should be avoided.
-We also need an ASTERIX configuration XML file for the cluster. We give the name to the cluster, say, "rainbow". We create a folder for the configuration of this cluster:
+We also need an AsterixDB configuration XML file for the cluster. We give the name to the cluster, say, "rainbow". We create a folder for the configuration of this cluster:
machineA> mkdir $MANAGIX_HOME/rainbow_cluster
@@ -432,30 +453,32 @@
<!-- username, which should be valid for all the three machines -->
<username>joe</username>
- <!-- The working directory of Managix. It should be on a network file system (NFS) that
- can accessed by all the machine. -->
+ <!-- The working directory of Managix. It is recommended for the working
+ directory to be on a network file system (NFS) that can accessed by
+ all machines.
+ Managix creates the directory if it it doesn't exist. -->
<working_dir>
<dir>/home/joe/managix-workingDir</dir>
<NFS>true</NFS>
</working_dir>
- <!-- Directory for Asterix to store log information for each node. Needs
- to be on the local file system. -->
- <log_dir>/mnt/joe/logs</log_dir>
+ <!-- Directory for Asterix to store log information for each machine.
+ Needs to be on the local file system of each machine.
+ Managix creates the directory if it doesn't exist.
+ This property can be overriden for a node by redefining at the node level. -->
+ <logdir>/mnt/joe/logs</logdir>
- <!-- Directory for Asterix to store transaction logs information for each node. Needs
- to be on the local file system. -->
- <txn_log_dir>/mnt/joe/txn-logs</txn_log_dir>
-
+ <!-- Mount point of an iodevice. Use a comma separated list for a machine that
+ has multiple iodevices (disks).
+ This property can be overriden for a node by redefining at the node level. -->
<iodevices>/mnt/joe</iodevices>
- <!-- Directory named (under each iodevice) that used by each worker node to store data files. Needs
- to be on the local file system. -->
+ <!-- Path on each iodevice where Asterix will store its data -->
<store>storage</store>
- <!-- Java home for each node. Can be overriden at node level. -->
+ <!-- Java home for each machine -->
<java_home>/usr/lib/jvm/jdk1.7.0</java_home>
-
+
<!-- IP addresses of the master machine A -->
<master_node>
<id>master</id>
@@ -524,7 +547,6 @@
machineA> ssh-keygen -t rsa -P ""
machineA> cat $HOME/.ssh/id_rsa.pub >> $HOME/.ssh/authorized_keys
machineA> chmod 700 $HOME/.ssh/authorized_keys
-
If $HOME is not on the NFS, copy the id_rsa.pub to the directory ~/.ssh (login with the same account) on each machine, and then do the following on each machine. (Notice that this step is not needed if the folder ".ssh" is on the NFS and can be accessed by all the nodes.)
@@ -533,7 +555,7 @@
cd ~/.ssh
cat id_rsa.pub >> authorized_keys
chmod 700 $HOME/.ssh/authorized_keys
-
+
Then run the following step again and type "yes" if prompted:
@@ -542,13 +564,13 @@
### Step (3): Configuring Managix ###
-Managix is using a configuration XML file at `$MANAGIX_HOME/conf/managix-conf.xml` to configure its own properties, such as its Zookeeper service. We can use the `configure` command to auto-generate this configuration file:
+Managix uses a configuration XML file at `$MANAGIX_HOME/conf/managix-conf.xml` to configure its own properties, such as its Zookeeper service. We can use the `configure` command to auto-generate this configuration file:
machineA> managix configure
-We use the validate command to validate managix configuration. To do so, execute the following.
+We use the `validate` command to validate the Managix configuration. To do so, execute the following.
machineA> managix validate
INFO: Environment [OK]
@@ -557,49 +579,49 @@
Note that the `configure` command also generates a cluster configuration XML file at $MANAGIX_HOME/conf/clusters/local.xml. This file is not needed in the case of a cluster of machines.
-### Step (4): Creating an ASTERIX instance ###
+### Step (4): Creating an AsterixDB instance ###
-Now that we have configured Managix, we shall next create an ASTERIX instance. An ASTERIX instance is identified by a unique name and is created using the create command. The usage description for the create command can be obtained by executing the following:
+Now that we have configured Managix, we shall next create an AsterixDB instance, which is identified by a unique name and is created using the `create` command. The usage description for the `create` command can be obtained by executing the following:
machineA> managix help -cmd create
- Creates an ASTERIX instance with a specified name. Post creation, the instance is in ACTIVE state,
+ Creates an AsterixDB instance with a specified name. Post creation, the instance is in ACTIVE state,
indicating its availability for executing statements/queries.
Usage arguments/options:
- -n Name of the ASTERIX instance.
+ -n Name of the AsterixDB instance.
-c Path to the cluster configuration file
-We shall now use the `create` command to create an ASTERIX instance called "rainbow_asterix". In doing so, we shall use the cluster configuration file that was auto-generated by Managix.
+We shall now use the `create` command to create an AsterixDB instance called "rainbow_asterix". In doing so, we shall use the cluster configuration file that was auto-generated by Managix.
machineA> managix create -n rainbow_asterix -c $MANAGIX_HOME/clusters/rainbow.xml
-If the response message does not have warning, then Congratulations! You have successfully installed Asterix on this cluster of machines!
+If the response message does not have warning, then Congratulations! You have successfully installed AsterixDB on this cluster of machines!
-Please refer to the section [Managing the Lifecycle of an ASTERIX Instance](#Section_4:_Managing_the_Lifecycle_of_an_ASTERIX_Instance) for a detailed description on the set of available commands/operations that let you manage the lifecycle of an ASTERIX instance. Note that the output of the commands varies with the cluster definition and may not apply to the cluster specification you built above.
+Please refer to the section [Managing the Lifecycle of an AsterixDB Instance](#Section_4:_Managing_the_Lifecycle_of_an_AsterixDB_Instance) for a detailed description on the set of available commands/operations that let you manage the lifecycle of an AsterixDB instance. Note that the output of the commands varies with the cluster definition and may not apply to the cluster specification you built above.
-## Section 4: Managing the Lifecycle of an ASTERIX Instance ##
+## <a id="Section4ManagingTheLifecycleOfAnAsterixDBInstance">Section 4: Managing the Lifecycle of an AsterixDB Instance</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
-Now that we have an ASTERIX instance running, let us use Managix to manage the instance's lifecycle. Managix provides the following set of commands/operations:
+Now that we have an AsterixDB instance running, let us use Managix to manage the instance's lifecycle. Managix provides the following set of commands/operations:
#### Managix Commands ####
<table>
<tr><td>Command</td> <td>Description</td></tr>
-<tr><td><a href="#Creating_an_ASTERIX_instance">create</a></td> <td>Creates a new asterix instance.</td></tr>
+<tr><td><a href="#Creating_an_AsterixDB_instance">create</a></td> <td>Creates a new asterix instance.</td></tr>
<tr><td><a href="#Describe_Command" >describe</a></td> <td>Describes an existing asterix instance.</td></tr>
<tr><td><a href="#Stop_Command" >stop</a></td> <td>Stops an asterix instance that is in the ACTIVE state.</td></tr>
-<tr><td><a href="#Start_Command" >start</a></td> <td>Starts an Asterix instance.</td></tr>
-<tr><td><a href="#Backup_Command" >backup</a></td> <td>Creates a backup for an existing Asterix instance.</td></tr>
-<tr><td><a href="#Restore_Command" >restore</a></td> <td>Restores an Asterix instance.</td></tr>
-<tr><td><a href="#Delete_Command" >delete</a></td> <td>Deletes an Asterix instance.</td></tr>
+<tr><td><a href="#Start_Command" >start</a></td> <td>Starts an AsterixDB instance.</td></tr>
+<tr><td><a href="#Backup_Command" >backup</a></td> <td>Creates a backup for an existing AsterixDB instance.</td></tr>
+<tr><td><a href="#Restore_Command" >restore</a></td> <td>Restores an AsterixDB instance.</td></tr>
+<tr><td><a href="#Delete_Command" >delete</a></td> <td>Deletes an AsterixDB instance.</td></tr>
<tr><td><a href="#Configuring_Managix" >validate</a></td> <td>Validates the installer/cluster configuration.</td></tr>
-<tr><td><a href="#Configuring_Managix" >configure</a></td><td>Auto generate configuration for an Asterix instance.</td></tr>
+<tr><td><a href="#Configuring_Managix" >configure</a></td><td>Auto generates a configuration for an AsterixDB instance.</td></tr>
<tr><td><a href="#Log_Command" >log</a></td><td>Produces a zip archive containing log files from each node in an AsterixDB instance.</td></tr>
-<tr><td><a href="#Shutdown_Command" >shutdown</a></td> <td>Shutdown the installer service.</td></tr>
+<tr><td><a href="#Shutdown_Command" >shutdown</a></td> <td>Shuts down the installer service.</td></tr>
</table>
You may obtain the above listing by simply executing 'managix' :
@@ -607,22 +629,22 @@
$ managix
-We already talked about create and validate commands. We shall next explain the rest of the commands listed above. We also provide sample output messages of these commands assuming we are running an ASTERIX instance on a single machine.
+We already talked about `create` and `validate` commands. We shall next explain the rest of the commands listed above. We also provide sample output messages of these commands assuming we are running an AsterixDB instance on a single machine.
##### Describe Command #####
-The `describe` command provides information about an ASTERIX instance. The usage can be looked up by executing the following:
+The `describe` command provides information about an AsterixDB instance. The usage can be looked up by executing the following:
$ managix help -cmd describe
- Provides information about an ASTERIX instance.
+ Provides information about an AsterixDB instance.
The following options are available:
- [-n] Name of the ASTERIX instance.
+ [-n] Name of the AsterixDB instance.
[-admin] Provides a detailed description
The brackets indicate optional flags.
-The output of the `describe` command when used without the `admin` flag contains minimal information and is similar to the output of the create command. Let us try running the describe command in "admin" mode.
+The output of the `describe` command when used without the `admin` flag contains minimal information and is similar to the output of the `create` command. Let us try running the describe command in "admin" mode.
$ managix describe -n my_asterix -admin
@@ -640,26 +662,56 @@
Processes
NC at 127.0.0.1 [ 22195 ]
CC at 127.0.0.1 [ 22161 ]
+
+ Asterix Configuration
+ nc.java.opts :-Xmx1024m
+ cc.java.opts :-Xmx1024m
+ storage.buffercache.pagesize :32768
+ storage.buffercache.size :33554432
+ storage.buffercache.maxopenfiles :214748364
+ storage.memorycomponent.pagesize :32768
+ storage.memorycomponent.numpages :1024
+ storage.memorycomponent.globalbudget :536870192
+ storage.lsm.mergethreshold :3
+ storage.lsm.bloomfilter.falsepositiverate:0.01
+ txn.log.buffer.numpages :8
+ txn.log.buffer.pagesize :131072
+ txn.log.partitionsize :2147483648
+ txn.log.disksectorsize :4096
+ txn.log.groupcommitinterval :1
+ txn.log.checkpoint.lsnthreshold :67108864
+ txn.log.checkpoint.pollfrequency :120
+ txn.log.checkpoint.history :0
+ txn.lock.escalationthreshold :1000
+ txn.lock.shrinktimer :5000
+ txn.lock.timeout.waitthreshold :60000
+ txn.lock.timeout.sweepthreshold :10000
+ compiler.sortmemory :33554432
+ compiler.joinmemory :33554432
+ compiler.framesize :32768
+ web.port :19001
+ api.port :19002
+ log.level :INFO
As seen above, the instance 'my_asterix' is configured such that all processes running at the localhost (127.0.0.1). The process id for each process (JVM) is shown next to it.
##### Stop Command #####
-The `stop` command can be used for shutting down an ASTERIX instance. After that, the instance is unavailable for executing queries. The usage can be looked up by executing the following:
+The `stop` command can be used for shutting down an AsterixDB instance. After that, the instance is unavailable for executing queries. The usage can be looked up by executing the following.
$ managix help -cmd stop
- Shuts an ASTERIX instance that is in ACTIVE state. After executing the stop command, the ASTERIX instance transits
+ Shuts an AsterixDB instance that is in ACTIVE state. After executing the stop command, the AsterixDB instance transits
to the INACTIVE state, indicating that it is no longer available for executing queries.
Available arguments/options
- -n name of the ASTERIX instance.
+ -n name of the AsterixDB instance.
-To stop the ASTERIX instance.
+To stop the AsterixDB instance.
$ managix stop -n my_asterix
- INFO: Stopped Asterix instance: my_asterix
+ INFO: Stopped AsterixDB instance: my_asterix
$ managix describe -n my_asterix
@@ -670,17 +722,17 @@
##### Start Command #####
-The `start` command starts an ASTERIX instance that is in the INACTIVE state. The usage can be looked up by executing the following:
+The `start` command starts an AsterixDB instance that is in the INACTIVE state. The usage can be looked up by executing the following:
$ managix help -cmd start
- Starts an ASTERIX instance that is in INACTIVE state. After executing the start command, the ASTERIX instance transits to the ACTIVE state, indicating that it is now available for executing statements/queries.
+ Starts an AsterixDB instance that is in INACTIVE state. After executing the start command, the AsterixDB instance transits to the ACTIVE state, indicating that it is now available for executing statements/queries.
Available arguments/options
- -n name of the ASTERIX instance.
+ -n name of the AsterixDB instance.
-Let us now start the ASTERIX instance.
+Let us now start the AsterixDB instance.
$ managix start -n my_asterix
@@ -692,22 +744,22 @@
##### Backup Command #####
-In an undesirable event of data loss either due to a disk/system failure or accidental execution of a DDL statement (drop dataverse/dataset), you may need to recover the lost data. The backup command allows you to take a backup of the data stored with an ASTERIX instance. The backup can be taken on the local file system or on an HDFS instance. In either case, the snapshots are stored under a backup directory. You need to make sure the backup directory has appropriate read/write permissions. Configuring settings for backup can be found inside the Managix's configuration file located at `$MANAGIX_HOME/conf/managix-conf.xml`.
+The backup command allows you to take a backup of the data stored with an AsterixDB instance. The backup can be taken on the local file system or on an HDFS instance. In either case, the snapshots are stored under a backup directory. You need to make sure the backup directory has appropriate read/write permissions. Configuring settings for backup can be found inside the Managix's configuration file located at `$MANAGIX_HOME/conf/managix-conf.xml`.
*Configuring backup on the local file system*
-We need to provide path to a backup directory on the local file system. The backup directory can be configured be editing the Managix configuration XML, found at `$MANAGIX_HOME/conf/managix-conf.xml`.
+We need to provide a path to a backup directory on the local file system. The backup directory can be configured be editing the Managix configuration XML, found at `$MANAGIX_HOME/conf/managix-conf.xml`.
<backup>
<backupDir>Provide path to the backup directory here</backupDir>
</backup>
-Prior to taking a backup of an ASTERIX instance, it is required for the instance to be in the INACTIVE state. We do so by using the `stop` command, as shown below:
+Prior to taking a backup of an AsterixDB instance, it is required for the instance to be in the INACTIVE state. We do so by using the `stop` command, as shown below:
$ managix stop -n my_asterix
- INFO: Stopped Asterix instance: my_asterix
+ INFO: Stopped AsterixDB instance: my_asterix
We can now take the backup by executing the following:
@@ -718,7 +770,7 @@
*Configuring backup on an HDFS instance*
-To configure a backups to be taken on an HDFS instance, we need to provide required information about the running HDFS instance. This information includes the HDFS version and the HDFS url. Simply edit the Managix configuration file and provide the required information.
+To configure a backup to be taken on an HDFS instance, we need to provide required information about the running HDFS instance. This information includes the HDFS version and the HDFS url. Simply edit the Managix configuration file and provide the required information.
<backup>
@@ -756,29 +808,29 @@
Processes
-The above output shows the available backup identified by it's id (0). We shall next describe the method for restoring an ASTERIX instance from a backup snapshot.
+The above output shows the available backup identified by it's id (0). We shall next describe the method for restoring an AsterixDB instance from a backup snapshot.
##### Restore Command #####
-The `restore` command allows you to restore an ASTERIX instance's data from a previously taken backup. The usage description can be obtained as follows:
+The `restore` command allows you to restore an AsterixDB instance's data from a previously taken backup. The usage description can be obtained as follows:
$ managix help -cmd restore
- Restores an ASTERIX instance's data from a previously taken backup.
+ Restores an AsterixDB instance's data from a previously taken backup.
Available arguments/options
- -n name of the ASTERIX instance
+ -n name of the AsterixDB instance
-b id of the backup snapshot
-The following command restores our ASTERIX instance from the backup snapshot identified by the id (0). Prior to restoring an instance from a backup, it is required that the instance is in the INACTIVE state.
+The following command restores our AsterixDB instance from the backup snapshot identified by the id (0). Prior to restoring an instance from a backup, it is required that the instance is in the INACTIVE state.
$ managix restore -n my_asterix -b 0
- INFO: Asterix instance: my_asterix has been restored from backup
+ INFO: AsterixDB instance: my_asterix has been restored from backup
-You can start the ASTERIX instance by using the start command.
+You can start the AsterixDB instance by using the start command.
##### Log Command #####
@@ -800,22 +852,22 @@
##### Delete Command #####
-As the name suggests, the `delete` command permanently removes an ASTERIX instance by cleaning up all associated data/artifacts. The usage can be looked up by executing the following:
+As the name suggests, the `delete` command permanently removes an AsterixDB instance by cleaning up all associated data/artifacts. The usage can be looked up by executing the following:
$ managix help -cmd delete
- Permanently deletes an ASTERIX instance. The instance must be in the INACTIVE state.
+ Permanently deletes an AsterixDB instance. The instance must be in the INACTIVE state.
Available arguments/options
- -n name of the ASTERIX instance.
+ -n name of the AsterixDB instance.
$ managix delete -n my_asterix
- INFO: Asterix instance my_asterix deleted.
+ INFO: AsterixDB instance my_asterix deleted.
##### Shutdown Command #####
-Managix uses Zookeeper service for storing all information about created ASTERIX instances. The Zookeeper service runs in the background and can be shut down using the `shutdown` command.
+Managix uses Zookeeper service for storing all information about created AsterixDB instances. The Zookeeper service runs in the background and can be shut down using the `shutdown` command.
$ managix shutdown
@@ -832,11 +884,11 @@
$ managix help -cmd configure
- Auto-generates the ASTERIX installer configruation settings and ASTERIX cluster
+ Auto-generates the AsterixDB installer configruation settings and AsterixDB cluster
configuration settings for a single node setup.
-## Section 5: Frequently Asked Questions ##
+## <a id="Section5FAQ">Section 5: Frequently Asked Questions</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
##### Question #####
@@ -868,29 +920,28 @@
##### Question #####
Do I need to create all the directories/paths I put into the cluster configuration XML ?
-##### Answer #####
+##### Answer #####
Managix will create a path if it is not existing. It does so using the user account mentioned in the cluster configuration xml.
Please ensure that the user account has appropriate permissions for creating the missing paths.
-##### Question #####
-
+##### Question #####
Should MANAGIX_HOME be on the network file system (NFS) ?
##### Answer #####
It is recommended that MANAGIX_HOME is not on the NFS. Managix produces artifacts/logs on disk which are not required to be shared.
As such an overhead in creating the artifacts/logs on the NFS should be avoided.
-##### Question #####
+##### Question #####
-Question: How do we change the underlying code (apply a code patch) for an 'active' asterix instance?
+How do we change the underlying code (apply a code patch) for an 'active' asterix instance?
##### Answer #####
At times, end-user (particularly asterix developer) may run into the need to altering the underlying code that is being run by an asterix instance. In the current version of managix, this can be achieved as follows:-
-Assume that you have an 'active' instance by the name a1 that is running version v1 of asterix.
-You have a revised version of asterix - v2 that fixes some bug(s).
+Assume that you have an 'active' instance by the name a1 that is running version v1 of asterix.
+You have a revised version of asterix - v2 that fixes some bug(s).
To upgrade asterix from v1 to v2:-
@@ -900,12 +951,12 @@
step 3) copy asterix-server zip (version v2) to $MANAGIX_HOME/asterix/
-step 4) managix start -n a1
+step 4) managix start -n a1
-a1 now is running on version v2.
+a1 now is running on version v2.
Limitations:-
-a) Obviously this wont work in a situation where v2 has made a change that is incompatible with earlier version, such altering schema.
+a) Obviously this wont work in a situation where v2 has made a change that is incompatible with earlier version, such altering schema.
-b) A change in asterix zip applies to all existing instances (after a restart) and subsequent instances that user creates.
+b) A change in asterix zip applies to all existing instances (after a restart) and subsequent instances that user creates.
diff --git a/asterix-doc/src/site/site.xml b/asterix-doc/src/site/site.xml
index 150544e..a9794ed 100644
--- a/asterix-doc/src/site/site.xml
+++ b/asterix-doc/src/site/site.xml
@@ -21,7 +21,7 @@
<bannerLeft>
<name>AsterixDB</name>
<src>images/asterixlogo.png</src>
- <href>/index.html</href>
+ <href>http://asterixdb.ics.uci.edu/</href>
</bannerLeft>
<version position="right"/>
@@ -54,16 +54,28 @@
</custom>
<body>
+ <head>
+ <script>
+ (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
+ (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
+ m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
+ })(window,document,'script','//www.google-analytics.com/analytics.js','ga');
+
+ ga('create', 'UA-41536543-1', 'uci.edu');
+ ga('send', 'pageview');
+ </script>
+ </head>
<links>
- <item name="Home" href="index.html"/>
+ <item name="Documentation Home" href="index.html"/>
</links>
<menu name="Documentation">
<item name="Installing and Managing AsterixDB using Managix" href="install.html"/>
<item name="AsterixDB 101: An ADM and AQL Primer" href="aql/primer.html"/>
<item name="Asterix Data Model (ADM)" href="aql/datamodel.html"/>
- <item name="Asterix Functions" href="aql/functions.html"/>
<item name="Asterix Query Language (AQL)" href="aql/manual.html"/>
+ <item name="AQL Functions" href="aql/functions.html"/>
+ <item name="AQL Allen's Relations Functions" href="aql/allens.html"/>
<item name="AQL Support of Similarity Queries" href="aql/similarity.html"/>
<item name="Accessing External Data" href="aql/externaldata.html"/>
<item name="REST API to AsterixDB" href="api.html"/>
diff --git a/asterix-events/pom.xml b/asterix-events/pom.xml
index f221df2..00996f4 100644
--- a/asterix-events/pom.xml
+++ b/asterix-events/pom.xml
@@ -1,196 +1,206 @@
-<!--
- ! Copyright 2009-2013 by The Regents of the University of California
- ! Licensed under the Apache License, Version 2.0 (the "License");
- ! you may not use this file except in compliance with the License.
- ! you may obtain a copy of the License from
- !
- ! http://www.apache.org/licenses/LICENSE-2.0
- !
- ! Unless required by applicable law or agreed to in writing, software
- ! distributed under the License is distributed on an "AS IS" BASIS,
- ! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ! See the License for the specific language governing permissions and
- ! limitations under the License.
- !-->
+<!-- ! Copyright 2009-2013 by The Regents of the University of California
+ ! Licensed under the Apache License, Version 2.0 (the "License"); ! you may
+ not use this file except in compliance with the License. ! you may obtain
+ a copy of the License from ! ! http://www.apache.org/licenses/LICENSE-2.0
+ ! ! Unless required by applicable law or agreed to in writing, software !
+ distributed under the License is distributed on an "AS IS" BASIS, ! WITHOUT
+ WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ! See the
+ License for the specific language governing permissions and ! limitations
+ under the License. ! -->
+
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <artifactId>asterix</artifactId>
- <groupId>edu.uci.ics.asterix</groupId>
- <version>0.8.1-SNAPSHOT</version>
- </parent>
- <groupId>edu.uci.ics.asterix</groupId>
- <artifactId>asterix-events</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <artifactId>asterix</artifactId>
+ <groupId>edu.uci.ics.asterix</groupId>
+ <version>0.8.4-SNAPSHOT</version>
+ </parent>
+ <groupId>edu.uci.ics.asterix</groupId>
+ <artifactId>asterix-events</artifactId>
+ <version>0.8.4-SNAPSHOT</version>
- <properties>
- <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
- </properties>
+ <properties>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ </properties>
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-compiler-plugin</artifactId>
- <version>2.0.2</version>
- <configuration>
- <source>1.6</source>
- <target>1.6</target>
- </configuration>
- </plugin>
- <plugin>
- <groupId>org.jvnet.jaxb2.maven2</groupId>
- <artifactId>maven-jaxb2-plugin</artifactId>
- <executions>
- <execution>
- <id>event</id>
- <goals>
- <goal>generate</goal>
- </goals>
- <configuration>
- <args>
- <arg>-Xsetters</arg>
- <arg>-Xvalue-constructor</arg>
- </args>
- <plugins>
- <plugin>
- <groupId>org.jvnet.jaxb2_commons</groupId>
- <artifactId>jaxb2-basics</artifactId>
- <version>0.6.2</version>
- </plugin>
- <plugin>
- <groupId>org.jvnet.jaxb2_commons</groupId>
- <artifactId>jaxb2-value-constructor</artifactId>
- <version>3.0</version>
- </plugin>
- </plugins>
- <schemaDirectory>src/main/resources/schema</schemaDirectory>
- <schemaIncludes>
- <include>event.xsd</include>
- </schemaIncludes>
- <generatePackage>edu.uci.ics.asterix.event.schema.event</generatePackage>
- <generateDirectory>${project.build.directory}/generated-sources/event</generateDirectory>
- </configuration>
- </execution>
- <execution>
- <id>pattern</id>
- <goals>
- <goal>generate</goal>
- </goals>
- <configuration>
- <args>
- <arg>-Xsetters</arg>
- <arg>-Xvalue-constructor</arg>
- </args>
- <plugins>
- <plugin>
- <groupId>org.jvnet.jaxb2_commons</groupId>
- <artifactId>jaxb2-basics</artifactId>
- <version>0.6.2</version>
- </plugin>
- <plugin>
- <groupId>org.jvnet.jaxb2_commons</groupId>
- <artifactId>jaxb2-value-constructor</artifactId>
- <version>3.0</version>
- </plugin>
- </plugins>
- <schemaDirectory>src/main/resources/schema</schemaDirectory>
- <schemaIncludes>
- <include>pattern.xsd</include>
- </schemaIncludes>
- <generatePackage>edu.uci.ics.asterix.event.schema.pattern</generatePackage>
- <generateDirectory>${project.build.directory}/generated-sources/pattern</generateDirectory>
- </configuration>
- </execution>
- <execution>
- <id>cluster</id>
- <goals>
- <goal>generate</goal>
- </goals>
- <configuration>
- <args>
- <arg>-Xsetters</arg>
- <arg>-Xvalue-constructor</arg>
- </args>
- <plugins>
- <plugin>
- <groupId>org.jvnet.jaxb2_commons</groupId>
- <artifactId>jaxb2-basics</artifactId>
- <version>0.6.2</version>
- </plugin>
- <plugin>
- <groupId>org.jvnet.jaxb2_commons</groupId>
- <artifactId>jaxb2-value-constructor</artifactId>
- <version>3.0</version>
- </plugin>
- </plugins>
- <schemaDirectory>src/main/resources/schema</schemaDirectory>
- <schemaIncludes>
- <include>cluster.xsd</include>
- </schemaIncludes>
- <generatePackage>edu.uci.ics.asterix.event.schema.cluster</generatePackage>
- <generateDirectory>${project.build.directory}/generated-sources/cluster</generateDirectory>
- <bindingDirectory>src/main/resources/schema</bindingDirectory>
- <bindingIncludes>
- <bindingInclude>jaxb-bindings.xjb</bindingInclude>
- </bindingIncludes>
- </configuration>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <artifactId>maven-assembly-plugin</artifactId>
- <version>2.2-beta-2</version>
- <executions>
- <execution>
- <configuration>
- <descriptor>src/main/assembly/binary-assembly.xml</descriptor>
- </configuration>
- <phase>package</phase>
- <goals>
- <goal>attached</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
-
- <dependencies>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <version>4.8.1</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>args4j</groupId>
- <artifactId>args4j</artifactId>
- <version>2.0.12</version>
- <type>jar</type>
- <scope>compile</scope>
- </dependency>
- <dependency>
- <groupId>org.jvnet.jaxb2_commons</groupId>
- <artifactId>jaxb2-value-constructor</artifactId>
- <version>3.0</version>
- </dependency>
- <dependency>
- <groupId>commons-io</groupId>
- <artifactId>commons-io</artifactId>
- <version>1.4</version>
- </dependency>
- <dependency>
- <groupId>edu.uci.ics.asterix</groupId>
- <artifactId>asterix-common</artifactId>
- <version>0.8.1-SNAPSHOT</version>
- <type>jar</type>
- <scope>compile</scope>
- </dependency>
- <dependency>
- <groupId>log4j</groupId>
- <artifactId>log4j</artifactId>
- <version>1.2.9</version>
- </dependency>
- </dependencies>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <version>2.0.2</version>
+ <configuration>
+ <source>1.6</source>
+ <target>1.6</target>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.jvnet.jaxb2.maven2</groupId>
+ <artifactId>maven-jaxb2-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>event</id>
+ <goals>
+ <goal>generate</goal>
+ </goals>
+ <configuration>
+ <args>
+ <arg>-Xsetters</arg>
+ <arg>-Xvalue-constructor</arg>
+ </args>
+ <plugins>
+ <plugin>
+ <groupId>org.jvnet.jaxb2_commons</groupId>
+ <artifactId>jaxb2-basics</artifactId>
+ <version>0.6.2</version>
+ </plugin>
+ <plugin>
+ <groupId>org.jvnet.jaxb2_commons</groupId>
+ <artifactId>jaxb2-value-constructor</artifactId>
+ <version>3.0</version>
+ </plugin>
+ </plugins>
+ <schemaDirectory>src/main/resources/schema</schemaDirectory>
+ <schemaIncludes>
+ <include>event.xsd</include>
+ </schemaIncludes>
+ <generatePackage>edu.uci.ics.asterix.event.schema.event</generatePackage>
+ <generateDirectory>${project.build.directory}/generated-sources/event</generateDirectory>
+ </configuration>
+ </execution>
+ <execution>
+ <id>configuration</id>
+ <goals>
+ <goal>generate</goal>
+ </goals>
+ <configuration>
+ <args>
+ <arg>-Xsetters</arg>
+ <arg>-Xvalue-constructor</arg>
+ </args>
+ <plugins>
+ <plugin>
+ <groupId>org.jvnet.jaxb2_commons</groupId>
+ <artifactId>jaxb2-basics</artifactId>
+ <version>0.6.2</version>
+ </plugin>
+ <plugin>
+ <groupId>org.jvnet.jaxb2_commons</groupId>
+ <artifactId>jaxb2-value-constructor</artifactId>
+ <version>3.0</version>
+ </plugin>
+ </plugins>
+ <schemaDirectory>src/main/resources/schema</schemaDirectory>
+ <schemaIncludes>
+ <include>installer-conf.xsd</include>
+ </schemaIncludes>
+ <generatePackage>edu.uci.ics.asterix.installer.schema.conf</generatePackage>
+ <generateDirectory>${project.build.directory}/generated-sources/configuration</generateDirectory>
+ </configuration>
+ </execution>
+ <execution>
+ <id>pattern</id>
+ <goals>
+ <goal>generate</goal>
+ </goals>
+ <configuration>
+ <args>
+ <arg>-Xsetters</arg>
+ <arg>-Xvalue-constructor</arg>
+ </args>
+ <plugins>
+ <plugin>
+ <groupId>org.jvnet.jaxb2_commons</groupId>
+ <artifactId>jaxb2-basics</artifactId>
+ <version>0.6.2</version>
+ </plugin>
+ <plugin>
+ <groupId>org.jvnet.jaxb2_commons</groupId>
+ <artifactId>jaxb2-value-constructor</artifactId>
+ <version>3.0</version>
+ </plugin>
+ </plugins>
+ <schemaDirectory>src/main/resources/schema</schemaDirectory>
+ <schemaIncludes>
+ <include>pattern.xsd</include>
+ </schemaIncludes>
+ <generatePackage>edu.uci.ics.asterix.event.schema.pattern</generatePackage>
+ <generateDirectory>${project.build.directory}/generated-sources/pattern</generateDirectory>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <artifactId>maven-assembly-plugin</artifactId>
+ <version>2.2-beta-2</version>
+ <executions>
+ <execution>
+ <configuration>
+ <descriptor>src/main/assembly/binary-assembly.xml</descriptor>
+ </configuration>
+ <phase>package</phase>
+ <goals>
+ <goal>attached</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+ <dependencies>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>4.8.1</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>args4j</groupId>
+ <artifactId>args4j</artifactId>
+ <version>2.0.12</version>
+ <type>jar</type>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.jvnet.jaxb2_commons</groupId>
+ <artifactId>jaxb2-value-constructor</artifactId>
+ <version>3.0</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-io</groupId>
+ <artifactId>commons-io</artifactId>
+ <version>1.4</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.zookeeper</groupId>
+ <artifactId>zookeeper</artifactId>
+ <version>3.4.5</version>
+ <exclusions>
+ <exclusion>
+ <groupId>com.sun.jmx</groupId>
+ <artifactId>jmxri</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>com.sun.jdmk</groupId>
+ <artifactId>jmxtools</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>javax.jms</groupId>
+ <artifactId>jms</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.asterix</groupId>
+ <artifactId>asterix-common</artifactId>
+ <version>0.8.4-SNAPSHOT</version>
+ <type>jar</type>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>log4j</groupId>
+ <artifactId>log4j</artifactId>
+ <version>1.2.9</version>
+ </dependency>
+ </dependencies>
</project>
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/api/ClusterInfo.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/api/ClusterInfo.java
index 5a3649f..6b2ee32 100644
--- a/asterix-events/src/main/java/edu/uci/ics/asterix/event/api/ClusterInfo.java
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/api/ClusterInfo.java
@@ -18,9 +18,9 @@
public class ClusterInfo {
- List<NodeInfo> nodes;
+ List<NodeInfo> nodes;
- public ClusterInfo(List<NodeInfo> nodes) {
- this.nodes = nodes;
- }
+ public ClusterInfo(List<NodeInfo> nodes) {
+ this.nodes = nodes;
+ }
}
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/api/NodeInfo.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/api/NodeInfo.java
index c51bae3..b2992b6 100644
--- a/asterix-events/src/main/java/edu/uci/ics/asterix/event/api/NodeInfo.java
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/api/NodeInfo.java
@@ -16,19 +16,19 @@
public class NodeInfo {
- private final String id;
- private final String ip;
+ private final String id;
+ private final String ip;
- public NodeInfo(String id, String ip) {
- this.id = id;
- this.ip = ip;
- }
+ public NodeInfo(String id, String ip) {
+ this.id = id;
+ this.ip = ip;
+ }
- public String getId() {
- return id;
- }
+ public String getId() {
+ return id;
+ }
- public String getIp() {
- return ip;
- }
+ public String getIp() {
+ return ip;
+ }
}
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/driver/EventConfig.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/driver/EventConfig.java
index c86dfae..f9590c0 100644
--- a/asterix-events/src/main/java/edu/uci/ics/asterix/event/driver/EventConfig.java
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/driver/EventConfig.java
@@ -18,19 +18,19 @@
public class EventConfig {
- @Option(name = "-h", required = false, usage = "Help")
- public boolean help = false;
+ @Option(name = "-h", required = false, usage = "Help")
+ public boolean help = false;
- @Option(name = "-d", required = false, usage = "Show the execution on a timeline")
- public boolean dryRun = false;
+ @Option(name = "-d", required = false, usage = "Show the execution on a timeline")
+ public boolean dryRun = false;
- @Option(name = "-s", required = false, usage = "Seed for randomization")
- public int seed = -1;
+ @Option(name = "-s", required = false, usage = "Seed for randomization")
+ public int seed = -1;
- @Option(name = "-c", required = true, usage = "Path to cluster configuration (REQUIRED)")
- public String clusterPath;
+ @Option(name = "-c", required = true, usage = "Path to cluster configuration (REQUIRED)")
+ public String clusterPath;
- @Option(name = "-p", required = true, usage = "Path to pattern configuration (REQUIRED)")
- public String patternPath;
+ @Option(name = "-p", required = true, usage = "Path to pattern configuration (REQUIRED)")
+ public String patternPath;
}
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/driver/EventDriver.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/driver/EventDriver.java
index 641b37b..2317cfa 100644
--- a/asterix-events/src/main/java/edu/uci/ics/asterix/event/driver/EventDriver.java
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/driver/EventDriver.java
@@ -29,7 +29,7 @@
import edu.uci.ics.asterix.event.management.DefaultOutputHandler;
import edu.uci.ics.asterix.event.management.EventUtil;
-import edu.uci.ics.asterix.event.management.EventrixClient;
+import edu.uci.ics.asterix.event.management.AsterixEventServiceClient;
import edu.uci.ics.asterix.event.management.IOutputHandler;
import edu.uci.ics.asterix.event.management.Randomizer;
import edu.uci.ics.asterix.event.schema.cluster.Cluster;
@@ -41,7 +41,7 @@
public class EventDriver {
public static final String CLIENT_NODE_ID = "client_node";
- public static final Node CLIENT_NODE = new Node(CLIENT_NODE_ID, "127.0.0.1", null, null, null, null, null);
+ public static final Node CLIENT_NODE = new Node(CLIENT_NODE_ID, "127.0.0.1", null, null, null, null, null, null);
private static String eventsDir;
private static Events events;
@@ -87,15 +87,6 @@
}
- public static EventrixClient getClient(String eventsDir, Cluster cluster, boolean dryRun) throws Exception {
- return new EventrixClient(eventsDir, cluster, dryRun, new DefaultOutputHandler());
- }
-
- public static EventrixClient getClient(String eventsDir, Cluster cluster, boolean dryRun,
- IOutputHandler outputHandler) throws Exception {
- return new EventrixClient(eventsDir, cluster, dryRun, outputHandler);
- }
-
public static void main(String[] args) throws Exception {
String eventsHome = System.getenv("EVENT_HOME");
if (eventsHome == null) {
@@ -119,9 +110,9 @@
if (!eventConfig.dryRun) {
prepare(cluster);
}
- EventrixClient client = new EventrixClient(eventsDir, cluster, eventConfig.dryRun,
- new DefaultOutputHandler());
- client.submit(patterns);
+ //AsterixEventServiceClient client = new AsterixEventServiceClient(eventsDir, cluster, eventConfig.dryRun,
+ // new DefaultOutputHandler());
+ // client.submit(patterns);
if (!eventConfig.dryRun) {
cleanup(cluster);
}
@@ -156,4 +147,5 @@
}
EventUtil.executeLocalScript(clientNode, eventsDir + "/" + "events" + "/" + "cleanup.sh", args);
}
+
}
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/error/EventException.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/error/EventException.java
new file mode 100644
index 0000000..83ed5f4
--- /dev/null
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/error/EventException.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.event.error;
+
+public class EventException extends Exception {
+
+ private static final long serialVersionUID = 1L;
+
+ public EventException(String message) {
+ super(message);
+ }
+
+}
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/error/OutputHandler.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/error/OutputHandler.java
new file mode 100644
index 0000000..3bc795a
--- /dev/null
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/error/OutputHandler.java
@@ -0,0 +1,89 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.event.error;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.util.Properties;
+
+import edu.uci.ics.asterix.event.management.IOutputHandler;
+import edu.uci.ics.asterix.event.management.OutputAnalysis;
+import edu.uci.ics.asterix.event.model.EventList.EventType;
+import edu.uci.ics.asterix.event.schema.pattern.Event;
+
+public class OutputHandler implements IOutputHandler {
+
+ public static IOutputHandler INSTANCE = new OutputHandler();
+
+ private OutputHandler() {
+
+ }
+
+ public OutputAnalysis reportEventOutput(Event event, String output) {
+
+ EventType eventType = EventType.valueOf(event.getType().toUpperCase());
+ boolean ignore = true;
+ String trimmedOutput = output.trim();
+ StringBuffer errorMessage = new StringBuffer();
+ switch (eventType) {
+ case FILE_TRANSFER:
+ if (trimmedOutput.length() > 0) {
+ if (output.contains("Permission denied") || output.contains("cannot find or open")) {
+ ignore = false;
+ break;
+ }
+ }
+ break;
+
+ case BACKUP:
+ case RESTORE:
+ if (trimmedOutput.length() > 0) {
+ if (trimmedOutput.contains("AccessControlException")) {
+ errorMessage.append("Insufficient permissions on back up directory");
+ ignore = false;
+ }
+ if (output.contains("does not exist") || output.contains("File exist")
+ || (output.contains("No such file or directory"))) {
+ ignore = true;
+ } else {
+ ignore = false;
+ }
+ }
+ break;
+
+ case NODE_INFO:
+ Properties p = new Properties();
+ try {
+ p.load(new ByteArrayInputStream(trimmedOutput.getBytes()));
+ } catch (IOException e) {
+ }
+ String javaVersion = (String) p.get("java_version");
+ if (p.get("java_version") == null) {
+ errorMessage.append("Java not installed on " + event.getNodeid().getValue().getAbsvalue());
+ ignore = false;
+ } else if (!javaVersion.contains("1.7")) {
+ errorMessage.append("Asterix requires Java 1.7.x. Incompatible version found on "
+ + event.getNodeid().getValue().getAbsvalue() + "\n");
+ ignore = false;
+ }
+ break;
+ }
+ if (ignore) {
+ return new OutputAnalysis(true, null);
+ } else {
+ return new OutputAnalysis(false, errorMessage.toString());
+ }
+ }
+}
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/error/VerificationUtil.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/error/VerificationUtil.java
new file mode 100644
index 0000000..3ab2809
--- /dev/null
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/error/VerificationUtil.java
@@ -0,0 +1,126 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.event.error;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+
+import edu.uci.ics.asterix.event.model.AsterixInstance;
+import edu.uci.ics.asterix.event.model.AsterixInstance.State;
+import edu.uci.ics.asterix.event.model.AsterixRuntimeState;
+import edu.uci.ics.asterix.event.model.ProcessInfo;
+import edu.uci.ics.asterix.event.schema.cluster.Cluster;
+import edu.uci.ics.asterix.event.schema.cluster.Node;
+import edu.uci.ics.asterix.event.service.AsterixEventService;
+import edu.uci.ics.asterix.event.service.AsterixEventServiceUtil;
+
+public class VerificationUtil {
+
+ private static final String VERIFY_SCRIPT_PATH = AsterixEventService.getEventHome() + File.separator + "scripts"
+ + File.separator + "verify.sh";
+
+ public static AsterixRuntimeState getAsterixRuntimeState(AsterixInstance instance) throws Exception {
+
+ Cluster cluster = instance.getCluster();
+ List<String> args = new ArrayList<String>();
+ args.add(instance.getName());
+ args.add(instance.getCluster().getMasterNode().getClusterIp());
+ for (Node node : cluster.getNode()) {
+ args.add(node.getClusterIp());
+ args.add(instance.getName() + "_" + node.getId());
+ }
+ Thread.sleep(2000);
+ String output = AsterixEventServiceUtil.executeLocalScript(VERIFY_SCRIPT_PATH, args);
+ boolean ccRunning = true;
+ List<String> failedNCs = new ArrayList<String>();
+ String[] infoFields;
+ ProcessInfo pInfo;
+ List<ProcessInfo> processes = new ArrayList<ProcessInfo>();
+
+ for (String line : output.split("\n")) {
+ String nodeid = null;
+ infoFields = line.split(":");
+ try {
+ int pid = Integer.parseInt(infoFields[3]);
+ if (infoFields[0].equals("NC")) {
+ nodeid = infoFields[2].split("_")[1];
+ } else {
+ nodeid = instance.getCluster().getMasterNode().getId();
+ }
+ pInfo = new ProcessInfo(infoFields[0], infoFields[1], nodeid, pid);
+ processes.add(pInfo);
+ } catch (Exception e) {
+ if (infoFields[0].equalsIgnoreCase("CC")) {
+ ccRunning = false;
+ } else {
+ failedNCs.add(infoFields[1]);
+ }
+ }
+ }
+ return new AsterixRuntimeState(processes, failedNCs, ccRunning);
+ }
+
+ public static void updateInstanceWithRuntimeDescription(AsterixInstance instance, AsterixRuntimeState state,
+ boolean expectedRunning) {
+ StringBuffer summary = new StringBuffer();
+ if (expectedRunning) {
+ if (!state.isCcRunning()) {
+ summary.append("Cluster Controller not running at " + instance.getCluster().getMasterNode().getId()
+ + "\n");
+ instance.setState(State.UNUSABLE);
+ }
+ if (state.getFailedNCs() != null && !state.getFailedNCs().isEmpty()) {
+ summary.append("Node Controller not running at the following nodes" + "\n");
+ for (String failedNC : state.getFailedNCs()) {
+ summary.append(failedNC + "\n");
+ }
+ // instance.setState(State.UNUSABLE);
+ }
+ if (!(instance.getState().equals(State.UNUSABLE))) {
+ instance.setState(State.ACTIVE);
+ }
+ } else {
+ if (state.getProcesses() != null && state.getProcesses().size() > 0) {
+ summary.append("Following process still running " + "\n");
+ for (ProcessInfo pInfo : state.getProcesses()) {
+ summary.append(pInfo + "\n");
+ }
+ // instance.setState(State.UNUSABLE);
+ } else {
+ // instance.setState(State.INACTIVE);
+ }
+ }
+ state.setSummary(summary.toString());
+ instance.setAsterixRuntimeStates(state);
+ }
+
+ public static void verifyBackupRestoreConfiguration(String hdfsUrl, String hadoopVersion, String hdfsBackupDir)
+ throws Exception {
+ StringBuffer errorCheck = new StringBuffer();
+ if (hdfsUrl == null || hdfsUrl.length() == 0) {
+ errorCheck.append("\n HDFS Url not configured");
+ }
+ if (hadoopVersion == null || hadoopVersion.length() == 0) {
+ errorCheck.append("\n HDFS version not configured");
+ }
+ if (hdfsBackupDir == null || hdfsBackupDir.length() == 0) {
+ errorCheck.append("\n HDFS backup directory not configured");
+ }
+ if (errorCheck.length() > 0) {
+ throw new Exception("Incomplete hdfs configuration" + errorCheck);
+ }
+ }
+}
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/AsterixEventServiceClient.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/AsterixEventServiceClient.java
new file mode 100644
index 0000000..8947247
--- /dev/null
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/AsterixEventServiceClient.java
@@ -0,0 +1,229 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.event.management;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.logging.Logger;
+
+import javax.xml.bind.JAXBContext;
+import javax.xml.bind.JAXBException;
+import javax.xml.bind.Marshaller;
+import javax.xml.bind.Unmarshaller;
+
+import edu.uci.ics.asterix.event.driver.EventDriver;
+import edu.uci.ics.asterix.event.schema.cluster.Cluster;
+import edu.uci.ics.asterix.event.schema.cluster.Node;
+import edu.uci.ics.asterix.event.schema.event.Events;
+import edu.uci.ics.asterix.event.schema.pattern.Event;
+import edu.uci.ics.asterix.event.schema.pattern.Nodeid;
+import edu.uci.ics.asterix.event.schema.pattern.Pattern;
+import edu.uci.ics.asterix.event.schema.pattern.Patterns;
+import edu.uci.ics.asterix.event.schema.pattern.Value;
+import edu.uci.ics.asterix.event.service.AsterixEventServiceUtil;
+import edu.uci.ics.asterix.installer.schema.conf.Configuration;
+
+public class AsterixEventServiceClient {
+
+ private static final Logger LOGGER = Logger.getLogger(AsterixEventServiceClient.class.getName());
+
+ private EventTask[] tasks;
+ private boolean dryRun = false;
+ private LinkedBlockingQueue<EventTaskReport> msgInbox = new LinkedBlockingQueue<EventTaskReport>();
+ private AtomicInteger pendingTasks = new AtomicInteger(0);
+ private final Cluster cluster;
+ private IPatternListener listener;
+ private IOutputHandler outputHandler;
+ private Events events;
+ private String eventsHomeDir;
+ private Configuration configuration;
+
+ public AsterixEventServiceClient(Configuration configuration, String eventsHomeDir, Cluster cluster,
+ boolean transferArtifacts, boolean dryRun, IOutputHandler outputHandler) throws Exception {
+ this.eventsHomeDir = eventsHomeDir;
+ this.events = initializeEvents();
+ this.cluster = cluster;
+ this.dryRun = dryRun;
+ this.configuration = configuration;
+ this.outputHandler = outputHandler;
+ if (!dryRun && transferArtifacts) {
+ initializeCluster(getEventsDir());
+ }
+ }
+
+ public void submit(Patterns patterns) throws Exception {
+ if (patterns.getPattern().isEmpty()) {
+ return;
+ }
+ initTasks(patterns);
+ try {
+ waitForCompletion();
+ } catch (InterruptedException ie) {
+ LOGGER.info("Interrupted exception :" + ie);
+ } catch (Exception e) {
+ throw e;
+ }
+
+ }
+
+ public void submit(Patterns patterns, IPatternListener listener) throws Exception {
+ this.listener = listener;
+ initTasks(patterns);
+ }
+
+ private void initTasks(Patterns patterns) {
+ tasks = new EventTask[patterns.getPattern().size()];
+ pendingTasks.set(tasks.length);
+ int index = 0;
+ for (Pattern pattern : patterns.getPattern()) {
+ tasks[index] = new EventTask(pattern, this);
+ tasks[index].start();
+ index++;
+ }
+ }
+
+ public Cluster getCluster() {
+ return cluster;
+ }
+
+ public boolean isDryRun() {
+ return dryRun;
+ }
+
+ public Events getEvents() {
+ return events;
+ }
+
+ public String getEventsDir() {
+ return eventsHomeDir + File.separator + AsterixEventServiceUtil.EVENT_DIR;
+ }
+
+ public synchronized void notifyCompletion(EventTaskReport report) {
+
+ if (report.isSuccess()) {
+ if (listener != null) {
+ pendingTasks.decrementAndGet();
+ listener.eventCompleted(report);
+ if (pendingTasks.get() == 0) {
+ listener.jobCompleted();
+ }
+ } else {
+ try {
+ msgInbox.put(report);
+ } catch (InterruptedException e) {
+ }
+ }
+ } else {
+ for (EventTask t : tasks) {
+ if (t.getState() == EventTask.State.INITIALIZED || t.getState() == EventTask.State.IN_PROGRESS) {
+ t.cancel();
+ }
+ }
+ if (listener != null) {
+ listener.jobFailed(report);
+ } else {
+ try {
+ msgInbox.put(report);
+ } catch (InterruptedException e) {
+ }
+ }
+ }
+ }
+
+ private void waitForCompletion() throws Exception {
+ while (true) {
+ EventTaskReport report = msgInbox.take();
+ if (report.isSuccess()) {
+ if (pendingTasks.decrementAndGet() == 0) {
+ break;
+ }
+ } else {
+ throw new RuntimeException(report.getException().getMessage());
+ }
+ }
+ }
+
+ private void initializeCluster(String eventsDir) throws Exception {
+ Patterns patterns = initPattern(eventsDir);
+ submit(patterns);
+ }
+
+ private Patterns initPattern(String eventsDir) throws Exception {
+ Nodeid nodeid = new Nodeid(new Value(null, EventDriver.CLIENT_NODE.getId()));
+ List<Pattern> patternList = new ArrayList<Pattern>();
+ String workingDir = cluster.getWorkingDir().getDir();
+ String username = cluster.getUsername() == null ? System.getProperty("user.name") : cluster.getUsername();
+ patternList.add(getDirectoryTransferPattern(username, eventsDir, nodeid,
+ cluster.getMasterNode().getClusterIp(), workingDir));
+
+ JAXBContext ctx = JAXBContext.newInstance(Configuration.class);
+ Marshaller marshaller = ctx.createMarshaller();
+ marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);
+ String outputPathDir = System.getProperty("java.io.tmpdir") + File.separator + "conf-"
+ + System.getProperty("user.name");
+ new File(outputPathDir).mkdirs();
+ String outputPath = outputPathDir + File.separator + "configuration.xml";
+ marshaller.marshal(configuration, new FileOutputStream(outputPath));
+
+ patternList.add(getFileTransferPattern(username, outputPath, nodeid, cluster.getMasterNode().getClusterIp(),
+ workingDir));
+
+ if (!cluster.getWorkingDir().isNFS()) {
+ for (Node node : cluster.getNode()) {
+ patternList.add(getDirectoryTransferPattern(username, eventsDir, nodeid, node.getClusterIp(),
+ workingDir));
+
+ }
+ }
+ Patterns patterns = new Patterns(patternList);
+ return patterns;
+ }
+
+ private Pattern getDirectoryTransferPattern(String username, String src, Nodeid srcNode, String destNodeIp,
+ String destDir) {
+ String pargs = username + " " + src + " " + destNodeIp + " " + destDir;
+ Event event = new Event("directory_transfer", srcNode, pargs);
+ return new Pattern(null, 1, null, event);
+ }
+
+ private Pattern getFileTransferPattern(String username, String src, Nodeid srcNode, String destNodeIp,
+ String destDir) {
+ String pargs = username + " " + src + " " + destNodeIp + " " + destDir;
+ Event event = new Event("file_transfer", srcNode, pargs);
+ return new Pattern(null, 1, null, event);
+ }
+
+ public IOutputHandler getErrorHandler() {
+ return outputHandler;
+ }
+
+ private Events initializeEvents() throws JAXBException, FileNotFoundException {
+ File file = new File(getEventsDir() + File.separator + "events.xml");
+ JAXBContext eventCtx = JAXBContext.newInstance(Events.class);
+ Unmarshaller unmarshaller = eventCtx.createUnmarshaller();
+ events = (Events) unmarshaller.unmarshal(file);
+ return events;
+ }
+
+ public String getEventsHomeDir() {
+ return eventsHomeDir;
+ }
+
+}
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventExecutor.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventExecutor.java
index 45499f4..82b6e9e 100644
--- a/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventExecutor.java
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventExecutor.java
@@ -23,11 +23,13 @@
import org.apache.commons.io.IOUtils;
+import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
import edu.uci.ics.asterix.event.driver.EventDriver;
import edu.uci.ics.asterix.event.schema.cluster.Cluster;
import edu.uci.ics.asterix.event.schema.cluster.Node;
import edu.uci.ics.asterix.event.schema.cluster.Property;
import edu.uci.ics.asterix.event.schema.pattern.Pattern;
+import edu.uci.ics.asterix.event.service.AsterixEventServiceUtil;
public class EventExecutor {
@@ -40,10 +42,11 @@
private static final String DAEMON = "DAEMON";
public void executeEvent(Node node, String script, List<String> args, boolean isDaemon, Cluster cluster,
- Pattern pattern, IOutputHandler outputHandler, EventrixClient client) throws IOException {
+ Pattern pattern, IOutputHandler outputHandler, AsterixEventServiceClient client) throws IOException {
List<String> pargs = new ArrayList<String>();
pargs.add("/bin/bash");
- pargs.add(client.getEventsDir() + File.separator + "scripts" + File.separator + EXECUTE_SCRIPT);
+ pargs.add(client.getEventsHomeDir() + File.separator + AsterixEventServiceUtil.EVENT_DIR + File.separator
+ + EXECUTE_SCRIPT);
StringBuffer envBuffer = new StringBuffer(IP_LOCATION + "=" + node.getClusterIp() + " ");
boolean isMasterNode = node.getId().equals(cluster.getMasterNode().getId());
@@ -60,6 +63,13 @@
if (javaOpts != null) {
builder.append(javaOpts);
}
+ if (node.getDebugPort() != null) {
+ int debugPort = node.getDebugPort().intValue();
+ if (!javaOpts.contains("-Xdebug")) {
+ builder.append((" "
+ + "-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=" + debugPort));
+ }
+ }
builder.append("\"");
envBuffer.append("JAVA_OPTS" + "=" + builder + " ");
}
@@ -71,6 +81,13 @@
if (javaOpts != null) {
builder.append(javaOpts);
}
+ if (node.getDebugPort() != null) {
+ int debugPort = node.getDebugPort().intValue();
+ if (!javaOpts.contains("-Xdebug")) {
+ builder.append((" "
+ + "-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=" + debugPort));
+ }
+ }
builder.append("\"");
envBuffer.append("JAVA_OPTS" + "=" + builder + " ");
}
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventTask.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventTask.java
index a764f9a..f98230b 100644
--- a/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventTask.java
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventTask.java
@@ -20,6 +20,7 @@
import java.util.List;
import java.util.Timer;
import java.util.TimerTask;
+
import org.apache.log4j.Logger;
import edu.uci.ics.asterix.event.driver.EventDriver;
@@ -30,134 +31,124 @@
public class EventTask extends TimerTask {
- public static enum State {
- INITIALIZED, IN_PROGRESS, COMPLETED, FAILED
- }
+ public static enum State {
+ INITIALIZED,
+ IN_PROGRESS,
+ COMPLETED,
+ FAILED
+ }
- private static final Logger logger = Logger.getLogger(EventTask.class
- .getName());
+ private static final Logger logger = Logger.getLogger(EventTask.class.getName());
- private Pattern pattern;
- private Event event;
- private long interval = 0;
- private long initialDelay = 0;
- private int maxOccurs = Integer.MAX_VALUE;
- private int occurrenceCount = 0;
- private Timer timer;
- private String taskScript;
- private Node location;
- private List<String> taskArgs;
- private EventrixClient client;
- private List<Node> candidateLocations;
- private boolean dynamicLocation = false;
- private boolean reuseLocation = false;
- private State state;
+ private Pattern pattern;
+ private Event event;
+ private long interval = 0;
+ private long initialDelay = 0;
+ private int maxOccurs = Integer.MAX_VALUE;
+ private int occurrenceCount = 0;
+ private Timer timer;
+ private String taskScript;
+ private Node location;
+ private List<String> taskArgs;
+ private AsterixEventServiceClient client;
+ private List<Node> candidateLocations;
+ private boolean dynamicLocation = false;
+ private boolean reuseLocation = false;
+ private State state;
-
+ public EventTask(Pattern pattern, AsterixEventServiceClient client) {
+ this.pattern = pattern;
+ this.client = client;
+ Period period = pattern.getPeriod();
+ if (period != null && period.getAbsvalue() != null) {
+ this.interval = EventUtil.parseTimeInterval(period.getAbsvalue(), period.getUnit());
+ }
+ if (pattern.getDelay() != null) {
+ this.initialDelay = EventUtil.parseTimeInterval(new ValueType(pattern.getDelay().getValue()), pattern
+ .getDelay().getUnit());
+ }
+ if (pattern.getMaxOccurs() != null) {
+ this.maxOccurs = pattern.getMaxOccurs();
+ }
+ this.timer = new Timer();
+ taskArgs = EventUtil.getEventArgs(pattern);
+ candidateLocations = EventUtil.getCandidateLocations(pattern, client.getCluster());
+ if (pattern.getEvent().getNodeid().getValue().getRandom() != null && period != null && maxOccurs > 1) {
+ dynamicLocation = true;
+ reuseLocation = pattern.getEvent().getNodeid().getValue().getRandom().getRange().isReuse();
+ } else {
+ location = EventUtil.getEventLocation(pattern, candidateLocations, client.getCluster());
+ }
+ String scriptsDir;
+ if (location.getId().equals(EventDriver.CLIENT_NODE_ID)) {
+ scriptsDir = client.getEventsDir();
+ } else {
+ scriptsDir = client.getCluster().getWorkingDir().getDir() + File.separator + "events";
+ }
+ event = EventUtil.getEvent(pattern, client.getEvents());
+ taskScript = scriptsDir + File.separator + event.getScript();
+ state = State.INITIALIZED;
+ }
- public EventTask(Pattern pattern, EventrixClient client) {
- this.pattern = pattern;
- this.client = client;
- Period period = pattern.getPeriod();
- if (period != null && period.getAbsvalue() != null) {
- this.interval = EventUtil.parseTimeInterval(period.getAbsvalue(),
- period.getUnit());
- }
- if (pattern.getDelay() != null) {
- this.initialDelay = EventUtil.parseTimeInterval(new ValueType(
- pattern.getDelay().getValue()), pattern.getDelay()
- .getUnit());
- }
- if (pattern.getMaxOccurs() != null) {
- this.maxOccurs = pattern.getMaxOccurs();
- }
- this.timer = new Timer();
- taskArgs = EventUtil.getEventArgs(pattern);
- candidateLocations = EventUtil.getCandidateLocations(pattern,
- client.getCluster());
- if (pattern.getEvent().getNodeid().getValue().getRandom() != null
- && period != null && maxOccurs > 1) {
- dynamicLocation = true;
- reuseLocation = pattern.getEvent().getNodeid().getValue()
- .getRandom().getRange().isReuse();
- } else {
- location = EventUtil.getEventLocation(pattern, candidateLocations,
- client.getCluster());
- }
- String scriptsDir;
- if (location.getId().equals(EventDriver.CLIENT_NODE_ID)) {
- scriptsDir = client.getEventsDir() + File.separator + "events";
- } else {
- scriptsDir = client.getCluster().getWorkingDir().getDir()
- + File.separator + "eventrix" + File.separator + "events";
- }
- event = EventUtil.getEvent(pattern, client.getEvents());
- taskScript = scriptsDir + File.separator + event.getScript();
- state = State.INITIALIZED;
- }
+ public void start() {
+ if (interval > 0) {
+ timer.schedule(this, initialDelay, interval);
+ } else {
+ timer.schedule(this, initialDelay);
+ }
+ }
- public void start() {
- if (interval > 0) {
- timer.schedule(this, initialDelay, interval);
- } else {
- timer.schedule(this, initialDelay);
- }
- }
+ @Override
+ public void run() {
+ if (candidateLocations.size() == 0) {
+ timer.cancel();
+ client.notifyCompletion(new EventTaskReport(this));
+ } else {
+ if (dynamicLocation) {
+ location = EventUtil.getEventLocation(pattern, candidateLocations, client.getCluster());
+ if (!reuseLocation) {
+ candidateLocations.remove(location);
+ }
+ }
- @Override
- public void run() {
- if (candidateLocations.size() == 0) {
- timer.cancel();
- client.notifyCompletion(new EventTaskReport(this));
- } else {
- if (dynamicLocation) {
- location = EventUtil.getEventLocation(pattern,
- candidateLocations, client.getCluster());
- if (!reuseLocation) {
- candidateLocations.remove(location);
- }
- }
+ logger.debug(EventUtil.dateFormat.format(new Date()) + " " + "EVENT "
+ + pattern.getEvent().getType().toUpperCase() + " at " + location.getId().toUpperCase());
+ try {
+ if (!client.isDryRun()) {
+ new EventExecutor().executeEvent(location, taskScript, taskArgs, event.isDaemon(),
+ client.getCluster(), pattern, client.getErrorHandler(), client);
+ }
+ occurrenceCount++;
+ if (occurrenceCount >= maxOccurs) {
+ timer.cancel();
+ client.notifyCompletion(new EventTaskReport(this));
+ }
+ } catch (IOException ioe) {
+ timer.cancel();
+ client.notifyCompletion(new EventTaskReport(this, false, ioe));
+ }
+ }
- logger.debug(EventUtil.dateFormat.format(new Date()) + " "
- + "EVENT " + pattern.getEvent().getType().toUpperCase()
- + " at " + location.getId().toUpperCase());
- try {
- if (!client.isDryRun()) {
- new EventExecutor().executeEvent(location, taskScript,
- taskArgs, event.isDaemon(), client.getCluster(),
- pattern, client.getErrorHandler(), client);
- }
- occurrenceCount++;
- if (occurrenceCount >= maxOccurs) {
- timer.cancel();
- client.notifyCompletion(new EventTaskReport(this));
- }
- } catch (IOException ioe) {
- timer.cancel();
- client.notifyCompletion(new EventTaskReport(this, false, ioe));
- }
- }
+ }
- }
+ public Node getLocation() {
+ return location;
+ }
- public Node getLocation() {
- return location;
- }
+ public long getInterval() {
+ return interval;
+ }
- public long getInterval() {
- return interval;
- }
+ public long getInitialDelay() {
+ return initialDelay;
+ }
- public long getInitialDelay() {
- return initialDelay;
- }
+ public Pattern getPattern() {
+ return pattern;
+ }
- public Pattern getPattern() {
- return pattern;
- }
-
- public State getState() {
- return state;
- }
+ public State getState() {
+ return state;
+ }
}
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventUtil.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventUtil.java
index 887e272..49b7abf 100644
--- a/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventUtil.java
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventUtil.java
@@ -16,7 +16,6 @@
import java.io.File;
import java.io.IOException;
-import java.math.BigInteger;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
@@ -36,252 +35,232 @@
public class EventUtil {
- public static final String EVENTS_DIR = "events";
- public static final String CLUSTER_CONF = "config/cluster.xml";
- public static final String PATTERN_CONF = "config/pattern.xml";
- public static final DateFormat dateFormat = new SimpleDateFormat(
- "yyyy/MM/dd HH:mm:ss");
- public static final String NC_JAVA_OPTS = "nc.java.opts";
- public static final String CC_JAVA_OPTS = "cc.java.opts";
+ public static final String EVENTS_DIR = "events";
+ public static final String CLUSTER_CONF = "config/cluster.xml";
+ public static final String PATTERN_CONF = "config/pattern.xml";
+ public static final DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
+ public static final String NC_JAVA_OPTS = "nc.java.opts";
+ public static final String CC_JAVA_OPTS = "cc.java.opts";
- private static final String IP_LOCATION = "IP_LOCATION";
- private static final String CLUSTER_ENV = "ENV";
- private static final String SCRIPT = "SCRIPT";
- private static final String ARGS = "ARGS";
- private static final String EXECUTE_SCRIPT = "events/execute.sh";
- private static final String LOCALHOST = "localhost";
- private static final String LOCALHOST_IP = "127.0.0.1";
+ private static final String IP_LOCATION = "IP_LOCATION";
+ private static final String CLUSTER_ENV = "ENV";
+ private static final String SCRIPT = "SCRIPT";
+ private static final String ARGS = "ARGS";
+ private static final String EXECUTE_SCRIPT = "events/execute.sh";
+ private static final String LOCALHOST = "localhost";
+ private static final String LOCALHOST_IP = "127.0.0.1";
- public static Cluster getCluster(String clusterConfigurationPath)
- throws JAXBException {
- File file = new File(clusterConfigurationPath);
- JAXBContext ctx = JAXBContext.newInstance(Cluster.class);
- Unmarshaller unmarshaller = ctx.createUnmarshaller();
- Cluster cluster = (Cluster) unmarshaller.unmarshal(file);
- if (cluster.getMasterNode().getClusterIp().equals(LOCALHOST)) {
- cluster.getMasterNode().setClusterIp(LOCALHOST_IP);
- }
- for (Node node : cluster.getNode()) {
- if (node.getClusterIp().equals(LOCALHOST)) {
- node.setClusterIp(LOCALHOST_IP);
- }
- }
- return cluster;
- }
+ public static Cluster getCluster(String clusterConfigurationPath) throws JAXBException {
+ File file = new File(clusterConfigurationPath);
+ JAXBContext ctx = JAXBContext.newInstance(Cluster.class);
+ Unmarshaller unmarshaller = ctx.createUnmarshaller();
+ Cluster cluster = (Cluster) unmarshaller.unmarshal(file);
+ if (cluster.getMasterNode().getClusterIp().equals(LOCALHOST)) {
+ cluster.getMasterNode().setClusterIp(LOCALHOST_IP);
+ }
+ for (Node node : cluster.getNode()) {
+ if (node.getClusterIp().equals(LOCALHOST)) {
+ node.setClusterIp(LOCALHOST_IP);
+ }
+ }
+ return cluster;
+ }
- public static long parseTimeInterval(ValueType v, String unit)
- throws IllegalArgumentException {
- int val = 0;
- switch (v.getType()) {
- case ABS:
- val = Integer.parseInt(v.getAbsoluteValue());
- break;
- case RANDOM_MIN_MAX:
- val = Randomizer.getInstance().getRandomInt(v.getMin(), v.getMax());
- break;
- case RANDOM_RANGE:
- String[] values = v.getRangeSet();
- val = Integer.parseInt(values[Randomizer.getInstance()
- .getRandomInt(0, values.length - 1)]);
- break;
- }
- return computeInterval(val, unit);
- }
+ public static long parseTimeInterval(ValueType v, String unit) throws IllegalArgumentException {
+ int val = 0;
+ switch (v.getType()) {
+ case ABS:
+ val = Integer.parseInt(v.getAbsoluteValue());
+ break;
+ case RANDOM_MIN_MAX:
+ val = Randomizer.getInstance().getRandomInt(v.getMin(), v.getMax());
+ break;
+ case RANDOM_RANGE:
+ String[] values = v.getRangeSet();
+ val = Integer.parseInt(values[Randomizer.getInstance().getRandomInt(0, values.length - 1)]);
+ break;
+ }
+ return computeInterval(val, unit);
+ }
- public static long parseTimeInterval(String v, String unit)
- throws IllegalArgumentException {
- int value = Integer.parseInt(v);
- return computeInterval(value, unit);
- }
+ public static long parseTimeInterval(String v, String unit) throws IllegalArgumentException {
+ int value = Integer.parseInt(v);
+ return computeInterval(value, unit);
+ }
- private static long computeInterval(int val, String unit) {
- int vmult = 1;
- if ("hr".equalsIgnoreCase(unit)) {
- vmult = 3600 * 1000;
- } else if ("min".equalsIgnoreCase(unit)) {
- vmult = 60 * 1000;
- } else if ("sec".equalsIgnoreCase(unit)) {
- vmult = 1000;
- } else
- throw new IllegalArgumentException(
- " invalid unit value specified for frequency (hr,min,sec)");
- return val * vmult;
+ private static long computeInterval(int val, String unit) {
+ int vmult = 1;
+ if ("hr".equalsIgnoreCase(unit)) {
+ vmult = 3600 * 1000;
+ } else if ("min".equalsIgnoreCase(unit)) {
+ vmult = 60 * 1000;
+ } else if ("sec".equalsIgnoreCase(unit)) {
+ vmult = 1000;
+ } else
+ throw new IllegalArgumentException(" invalid unit value specified for frequency (hr,min,sec)");
+ return val * vmult;
- }
+ }
- public static Event getEvent(Pattern pattern, Events events) {
- for (Event event : events.getEvent()) {
- if (event.getType().equals(pattern.getEvent().getType())) {
- return event;
- }
- }
- throw new IllegalArgumentException(" Unknown event type"
- + pattern.getEvent().getType());
- }
+ public static Event getEvent(Pattern pattern, Events events) {
+ for (Event event : events.getEvent()) {
+ if (event.getType().equals(pattern.getEvent().getType())) {
+ return event;
+ }
+ }
+ throw new IllegalArgumentException(" Unknown event type" + pattern.getEvent().getType());
+ }
- public static Node getEventLocation(Pattern pattern,
- List<Node> candidateLocations, Cluster cluster) {
- ValueType value = new ValueType(pattern.getEvent().getNodeid()
- .getValue());
- Node location = null;
- Type vtype = value.getType();
+ public static Node getEventLocation(Pattern pattern, List<Node> candidateLocations, Cluster cluster) {
+ ValueType value = new ValueType(pattern.getEvent().getNodeid().getValue());
+ Node location = null;
+ Type vtype = value.getType();
- switch (vtype) {
- case ABS:
- location = getNodeFromId(value.getAbsoluteValue(), cluster);
- break;
- case RANDOM_RANGE:
- int nodeIndex = Randomizer.getInstance().getRandomInt(0,
- candidateLocations.size() - 1);
- location = candidateLocations.get(nodeIndex);
- break;
- case RANDOM_MIN_MAX:
- throw new IllegalStateException(
- " Canont configure a min max value range for location");
- }
- return location;
+ switch (vtype) {
+ case ABS:
+ location = getNodeFromId(value.getAbsoluteValue(), cluster);
+ break;
+ case RANDOM_RANGE:
+ int nodeIndex = Randomizer.getInstance().getRandomInt(0, candidateLocations.size() - 1);
+ location = candidateLocations.get(nodeIndex);
+ break;
+ case RANDOM_MIN_MAX:
+ throw new IllegalStateException(" Canont configure a min max value range for location");
+ }
+ return location;
- }
+ }
- public static List<Node> getCandidateLocations(Pattern pattern,
- Cluster cluster) {
- ValueType value = new ValueType(pattern.getEvent().getNodeid()
- .getValue());
- List<Node> candidateList = new ArrayList<Node>();
- switch (value.getType()) {
- case ABS:
- candidateList.add(getNodeFromId(value.getAbsoluteValue(), cluster));
- break;
- case RANDOM_RANGE:
- boolean anyOption = false;
- String[] values = value.getRangeSet();
- for (String v : values) {
- if (v.equalsIgnoreCase("ANY")) {
- anyOption = true;
- }
- }
- if (anyOption) {
- for (Node node : cluster.getNode()) {
- candidateList.add(node);
- }
- } else {
- boolean found = false;
- for (String v : values) {
- for (Node node : cluster.getNode()) {
- if (node.getId().equals(v)) {
- candidateList.add(node);
- found = true;
- break;
- }
- }
- if (!found) {
- throw new IllegalStateException("Unknonw nodeId : " + v);
- }
- found = false;
- }
+ public static List<Node> getCandidateLocations(Pattern pattern, Cluster cluster) {
+ ValueType value = new ValueType(pattern.getEvent().getNodeid().getValue());
+ List<Node> candidateList = new ArrayList<Node>();
+ switch (value.getType()) {
+ case ABS:
+ candidateList.add(getNodeFromId(value.getAbsoluteValue(), cluster));
+ break;
+ case RANDOM_RANGE:
+ boolean anyOption = false;
+ String[] values = value.getRangeSet();
+ for (String v : values) {
+ if (v.equalsIgnoreCase("ANY")) {
+ anyOption = true;
+ }
+ }
+ if (anyOption) {
+ for (Node node : cluster.getNode()) {
+ candidateList.add(node);
+ }
+ } else {
+ boolean found = false;
+ for (String v : values) {
+ for (Node node : cluster.getNode()) {
+ if (node.getId().equals(v)) {
+ candidateList.add(node);
+ found = true;
+ break;
+ }
+ }
+ if (!found) {
+ throw new IllegalStateException("Unknonw nodeId : " + v);
+ }
+ found = false;
+ }
- }
- String[] excluded = value.getRangeExcluded();
- if (excluded != null && excluded.length > 0) {
- List<Node> markedForRemoval = new ArrayList<Node>();
- for (String exclusion : excluded) {
- for (Node node : candidateList) {
- if (node.getId().equals(exclusion)) {
- markedForRemoval.add(node);
- }
- }
- }
- candidateList.removeAll(markedForRemoval);
- }
- break;
- case RANDOM_MIN_MAX:
- throw new IllegalStateException(
- " Invalid value configured for location");
- }
- return candidateList;
- }
+ }
+ String[] excluded = value.getRangeExcluded();
+ if (excluded != null && excluded.length > 0) {
+ List<Node> markedForRemoval = new ArrayList<Node>();
+ for (String exclusion : excluded) {
+ for (Node node : candidateList) {
+ if (node.getId().equals(exclusion)) {
+ markedForRemoval.add(node);
+ }
+ }
+ }
+ candidateList.removeAll(markedForRemoval);
+ }
+ break;
+ case RANDOM_MIN_MAX:
+ throw new IllegalStateException(" Invalid value configured for location");
+ }
+ return candidateList;
+ }
- private static Node getNodeFromId(String nodeid, Cluster cluster) {
- if (nodeid.equals(EventDriver.CLIENT_NODE.getId())) {
- return EventDriver.CLIENT_NODE;
- }
+ private static Node getNodeFromId(String nodeid, Cluster cluster) {
+ if (nodeid.equals(EventDriver.CLIENT_NODE.getId())) {
+ return EventDriver.CLIENT_NODE;
+ }
- if (nodeid.equals(cluster.getMasterNode().getId())) {
- String logDir = cluster.getMasterNode().getLogDir() == null ? cluster
- .getLogDir()
- : cluster.getMasterNode().getLogDir();
- String javaHome = cluster.getMasterNode().getJavaHome() == null ? cluster
- .getJavaHome()
- : cluster.getMasterNode().getJavaHome();
- return new Node(cluster.getMasterNode().getId(), cluster
- .getMasterNode().getClusterIp(), javaHome, logDir, null,
- null, null);
- }
+ if (nodeid.equals(cluster.getMasterNode().getId())) {
+ String logDir = cluster.getMasterNode().getLogDir() == null ? cluster.getLogDir() : cluster.getMasterNode()
+ .getLogDir();
+ String javaHome = cluster.getMasterNode().getJavaHome() == null ? cluster.getJavaHome() : cluster
+ .getMasterNode().getJavaHome();
+ return new Node(cluster.getMasterNode().getId(), cluster.getMasterNode().getClusterIp(), javaHome, logDir,
+ null, null, null, cluster.getMasterNode().getDebugPort());
+ }
- List<Node> nodeList = cluster.getNode();
- for (Node node : nodeList) {
- if (node.getId().equals(nodeid)) {
- return node;
- }
- }
- StringBuffer buffer = new StringBuffer();
- buffer.append(EventDriver.CLIENT_NODE.getId() + ",");
- buffer.append(cluster.getMasterNode().getId() + ",");
- for (Node v : cluster.getNode()) {
- buffer.append(v.getId() + ",");
- }
- buffer.deleteCharAt(buffer.length() - 1);
- throw new IllegalArgumentException("Unknown node id :" + nodeid
- + " valid ids:" + buffer);
- }
+ List<Node> nodeList = cluster.getNode();
+ for (Node node : nodeList) {
+ if (node.getId().equals(nodeid)) {
+ return node;
+ }
+ }
+ StringBuffer buffer = new StringBuffer();
+ buffer.append(EventDriver.CLIENT_NODE.getId() + ",");
+ buffer.append(cluster.getMasterNode().getId() + ",");
+ for (Node v : cluster.getNode()) {
+ buffer.append(v.getId() + ",");
+ }
+ buffer.deleteCharAt(buffer.length() - 1);
+ throw new IllegalArgumentException("Unknown node id :" + nodeid + " valid ids:" + buffer);
+ }
- public static void executeEventScript(Node node, String script,
- List<String> args, Cluster cluster) throws IOException,
- InterruptedException {
- List<String> pargs = new ArrayList<String>();
- pargs.add("/bin/bash");
- pargs.add(EventDriver.getEventsDir() + "/" + EXECUTE_SCRIPT);
- StringBuffer argBuffer = new StringBuffer();
- String env = EventDriver.getStringifiedEnv(cluster) + " " + IP_LOCATION
- + "=" + node.getClusterIp();
- if (args != null) {
- for (String arg : args) {
- argBuffer.append(arg + " ");
- }
- }
- ProcessBuilder pb = new ProcessBuilder(pargs);
- pb.environment().putAll(EventDriver.getEnvironment());
- pb.environment().put(IP_LOCATION, node.getClusterIp());
- pb.environment().put(CLUSTER_ENV, env);
- pb.environment().put(SCRIPT, script);
- pb.environment().put(ARGS, argBuffer.toString());
- pb.start();
- }
+ public static void executeEventScript(Node node, String script, List<String> args, Cluster cluster)
+ throws IOException, InterruptedException {
+ List<String> pargs = new ArrayList<String>();
+ pargs.add("/bin/bash");
+ pargs.add(EventDriver.getEventsDir() + "/" + EXECUTE_SCRIPT);
+ StringBuffer argBuffer = new StringBuffer();
+ String env = EventDriver.getStringifiedEnv(cluster) + " " + IP_LOCATION + "=" + node.getClusterIp();
+ if (args != null) {
+ for (String arg : args) {
+ argBuffer.append(arg + " ");
+ }
+ }
+ ProcessBuilder pb = new ProcessBuilder(pargs);
+ pb.environment().putAll(EventDriver.getEnvironment());
+ pb.environment().put(IP_LOCATION, node.getClusterIp());
+ pb.environment().put(CLUSTER_ENV, env);
+ pb.environment().put(SCRIPT, script);
+ pb.environment().put(ARGS, argBuffer.toString());
+ pb.start();
+ }
- public static void executeLocalScript(Node node, String script,
- List<String> args) throws IOException, InterruptedException {
- List<String> pargs = new ArrayList<String>();
- pargs.add("/bin/bash");
- pargs.add(script);
- if (args != null) {
- pargs.addAll(args);
- }
- ProcessBuilder pb = new ProcessBuilder(pargs);
- pb.environment().putAll(EventDriver.getEnvironment());
- pb.environment().put(IP_LOCATION, node.getClusterIp());
- pb.start();
- }
+ public static void executeLocalScript(Node node, String script, List<String> args) throws IOException,
+ InterruptedException {
+ List<String> pargs = new ArrayList<String>();
+ pargs.add("/bin/bash");
+ pargs.add(script);
+ if (args != null) {
+ pargs.addAll(args);
+ }
+ ProcessBuilder pb = new ProcessBuilder(pargs);
+ pb.environment().putAll(EventDriver.getEnvironment());
+ pb.environment().put(IP_LOCATION, node.getClusterIp());
+ pb.start();
+ }
- public static List<String> getEventArgs(Pattern pattern) {
- List<String> pargs = new ArrayList<String>();
- if (pattern.getEvent().getPargs() == null) {
- return pargs;
- }
- String[] args = pattern.getEvent().getPargs().split(" ");
- for (String arg : args) {
- pargs.add(arg.trim());
- }
- return pargs;
- }
+ public static List<String> getEventArgs(Pattern pattern) {
+ List<String> pargs = new ArrayList<String>();
+ if (pattern.getEvent().getPargs() == null) {
+ return pargs;
+ }
+ String[] args = pattern.getEvent().getPargs().split(" ");
+ for (String arg : args) {
+ pargs.add(arg.trim());
+ }
+ return pargs;
+ }
}
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventrixClient.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventrixClient.java
deleted file mode 100644
index 03f4061..0000000
--- a/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventrixClient.java
+++ /dev/null
@@ -1,198 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.event.management;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.logging.Logger;
-
-import javax.xml.bind.JAXBContext;
-import javax.xml.bind.JAXBException;
-import javax.xml.bind.Unmarshaller;
-
-import edu.uci.ics.asterix.event.driver.EventDriver;
-import edu.uci.ics.asterix.event.schema.cluster.Cluster;
-import edu.uci.ics.asterix.event.schema.cluster.Node;
-import edu.uci.ics.asterix.event.schema.event.Events;
-import edu.uci.ics.asterix.event.schema.pattern.Event;
-import edu.uci.ics.asterix.event.schema.pattern.Nodeid;
-import edu.uci.ics.asterix.event.schema.pattern.Pattern;
-import edu.uci.ics.asterix.event.schema.pattern.Patterns;
-import edu.uci.ics.asterix.event.schema.pattern.Value;
-
-public class EventrixClient {
-
- private static final Logger LOGGER = Logger.getLogger(EventrixClient.class.getName());
-
- private EventTask[] tasks;
- private boolean dryRun = false;
- private LinkedBlockingQueue<EventTaskReport> msgInbox = new LinkedBlockingQueue<EventTaskReport>();
- private AtomicInteger pendingTasks = new AtomicInteger(0);
- private final Cluster cluster;
- private IPatternListener listener;
- private IOutputHandler outputHandler;
- private Events events;
- private String eventsDir;
-
- public EventrixClient(String eventsDir, Cluster cluster, boolean dryRun, IOutputHandler outputHandler)
- throws Exception {
- this.eventsDir = eventsDir;
- this.events = initializeEvents();
- this.cluster = cluster;
- this.dryRun = dryRun;
- this.outputHandler = outputHandler;
- if (!dryRun) {
- initializeCluster(eventsDir);
- }
- }
-
- public void submit(Patterns patterns) throws Exception {
- initTasks(patterns);
- try {
- waitForCompletion();
- } catch (InterruptedException ie) {
- LOGGER.info("Interrupted exception :" + ie);
- } catch (Exception e) {
- throw e;
- }
-
- }
-
- public void submit(Patterns patterns, IPatternListener listener) throws Exception {
- this.listener = listener;
- initTasks(patterns);
- }
-
- private void initTasks(Patterns patterns) {
- tasks = new EventTask[patterns.getPattern().size()];
- pendingTasks.set(tasks.length);
- int index = 0;
- for (Pattern pattern : patterns.getPattern()) {
- tasks[index] = new EventTask(pattern, this);
- tasks[index].start();
- index++;
- }
- }
-
- public Cluster getCluster() {
- return cluster;
- }
-
- public boolean isDryRun() {
- return dryRun;
- }
-
- public Events getEvents() {
- return events;
- }
-
- public String getEventsDir() {
- return eventsDir;
- }
-
- public synchronized void notifyCompletion(EventTaskReport report) {
-
- if (report.isSuccess()) {
- if (listener != null) {
- pendingTasks.decrementAndGet();
- listener.eventCompleted(report);
- if (pendingTasks.get() == 0) {
- listener.jobCompleted();
- }
- } else {
- try {
- msgInbox.put(report);
- } catch (InterruptedException e) {
- }
- }
- } else {
- for (EventTask t : tasks) {
- if (t.getState() == EventTask.State.INITIALIZED || t.getState() == EventTask.State.IN_PROGRESS) {
- t.cancel();
- }
- }
- if (listener != null) {
- listener.jobFailed(report);
- } else {
- try {
- msgInbox.put(report);
- } catch (InterruptedException e) {
- }
- }
- }
- }
-
- private void waitForCompletion() throws Exception {
- while (true) {
- EventTaskReport report = msgInbox.take();
- if (report.isSuccess()) {
- if (pendingTasks.decrementAndGet() == 0) {
- break;
- }
- } else {
- throw new RuntimeException(report.getException().getMessage());
- }
- }
- }
-
- private void initializeCluster(String eventsDir) throws Exception {
- Patterns patterns = initPattern(eventsDir);
- submit(patterns);
- }
-
- private Patterns initPattern(String eventsDir) {
- Nodeid nodeid = new Nodeid(new Value(null,
- EventDriver.CLIENT_NODE.getId()));
- List<Pattern> patternList = new ArrayList<Pattern>();
- String workingDir = cluster.getWorkingDir().getDir();
- String username = cluster.getUsername() == null ? System
- .getProperty("user.name") : cluster.getUsername();
- patternList.add(getDirectoryTransferPattern(username, eventsDir,
- nodeid, cluster.getMasterNode().getClusterIp(), workingDir));
-
- if (!cluster.getWorkingDir().isNFS()) {
- for (Node node : cluster.getNode()) {
- patternList.add(getDirectoryTransferPattern(username,
- eventsDir, nodeid, node.getClusterIp(), workingDir));
- }
- }
- Patterns patterns = new Patterns(patternList);
- return patterns;
- }
-
- private Pattern getDirectoryTransferPattern(String username, String src, Nodeid srcNode, String destNodeIp,
- String destDir) {
- String pargs = username + " " + src + " " + destNodeIp + " " + destDir;
- Event event = new Event("directory_transfer", srcNode, pargs);
- return new Pattern(null, 1, null, event);
- }
-
- public IOutputHandler getErrorHandler() {
- return outputHandler;
- }
-
- private Events initializeEvents() throws JAXBException, FileNotFoundException {
- File file = new File(eventsDir + File.separator + "events" + File.separator + "events.xml");
- JAXBContext eventCtx = JAXBContext.newInstance(Events.class);
- Unmarshaller unmarshaller = eventCtx.createUnmarshaller();
- events = (Events) unmarshaller.unmarshal(file);
- return events;
- }
-
-}
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/Randomizer.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/Randomizer.java
index 6d5c492..875ee63 100644
--- a/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/Randomizer.java
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/Randomizer.java
@@ -19,48 +19,47 @@
public class Randomizer {
- private static final Logger LOGGER = Logger.getLogger(Randomizer.class
- .getName());
- private static Randomizer INSTANCE;
- private final Random random;
- private final int seed;
+ private static final Logger LOGGER = Logger.getLogger(Randomizer.class.getName());
+ private static Randomizer INSTANCE;
+ private final Random random;
+ private final int seed;
- public static Randomizer getInstance(int seed) {
- if (INSTANCE == null) {
- INSTANCE = new Randomizer(seed);
- }
- return INSTANCE;
- }
+ public static Randomizer getInstance(int seed) {
+ if (INSTANCE == null) {
+ INSTANCE = new Randomizer(seed);
+ }
+ return INSTANCE;
+ }
- public static Randomizer getInstance() {
- if (INSTANCE == null) {
- INSTANCE = new Randomizer();
- }
- return INSTANCE;
- }
+ public static Randomizer getInstance() {
+ if (INSTANCE == null) {
+ INSTANCE = new Randomizer();
+ }
+ return INSTANCE;
+ }
- private Randomizer() {
- Random rm = new Random();
- seed = rm.nextInt(10000);
- random = new Random(seed);
- LOGGER.info("SEED:" + seed);
- }
+ private Randomizer() {
+ Random rm = new Random();
+ seed = rm.nextInt(10000);
+ random = new Random(seed);
+ LOGGER.info("SEED:" + seed);
+ }
- private Randomizer(int seed) {
- this.seed = seed;
- random = new Random(seed);
- LOGGER.info("SEED:" + seed);
- }
+ private Randomizer(int seed) {
+ this.seed = seed;
+ random = new Random(seed);
+ LOGGER.info("SEED:" + seed);
+ }
- public Random getRandom() {
- return random;
- }
+ public Random getRandom() {
+ return random;
+ }
- public int getSeed() {
- return seed;
- }
+ public int getSeed() {
+ return seed;
+ }
- public int getRandomInt(int min, int max) {
- return min + random.nextInt(max - min + 1);
- }
+ public int getRandomInt(int min, int max) {
+ return min + random.nextInt(max - min + 1);
+ }
}
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/ValueType.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/ValueType.java
index b619002..1098fbe 100644
--- a/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/ValueType.java
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/ValueType.java
@@ -18,76 +18,78 @@
public class ValueType {
- public static enum Type {
- ABS, RANDOM_RANGE, RANDOM_MIN_MAX
- }
+ public static enum Type {
+ ABS,
+ RANDOM_RANGE,
+ RANDOM_MIN_MAX
+ }
- private Value value;
- private Type type;
+ private Value value;
+ private Type type;
- public ValueType(Value value) {
- this.value = value;
- if (value.getAbsvalue() != null) {
- type = Type.ABS;
- } else if (value.getRandom() != null) {
- if (value.getRandom().getMinmax() != null) {
- type = Type.RANDOM_MIN_MAX;
- } else if (value.getRandom().getRange() != null) {
- type = Type.RANDOM_RANGE;
- } else {
- throw new IllegalStateException("Incorrect value type");
- }
- }
- }
+ public ValueType(Value value) {
+ this.value = value;
+ if (value.getAbsvalue() != null) {
+ type = Type.ABS;
+ } else if (value.getRandom() != null) {
+ if (value.getRandom().getMinmax() != null) {
+ type = Type.RANDOM_MIN_MAX;
+ } else if (value.getRandom().getRange() != null) {
+ type = Type.RANDOM_RANGE;
+ } else {
+ throw new IllegalStateException("Incorrect value type");
+ }
+ }
+ }
- public int getMin() {
- switch (type) {
- case RANDOM_MIN_MAX:
- return Integer.parseInt(value.getRandom().getMinmax().getMin());
- default:
- throw new IllegalStateException("");
- }
- }
+ public int getMin() {
+ switch (type) {
+ case RANDOM_MIN_MAX:
+ return Integer.parseInt(value.getRandom().getMinmax().getMin());
+ default:
+ throw new IllegalStateException("");
+ }
+ }
- public int getMax() {
- switch (type) {
- case RANDOM_MIN_MAX:
- return Integer.parseInt(value.getRandom().getMinmax().getMax());
- default:
- throw new IllegalStateException("");
- }
- }
+ public int getMax() {
+ switch (type) {
+ case RANDOM_MIN_MAX:
+ return Integer.parseInt(value.getRandom().getMinmax().getMax());
+ default:
+ throw new IllegalStateException("");
+ }
+ }
- public String[] getRangeSet() {
- switch (type) {
- case RANDOM_RANGE:
- return value.getRandom().getRange().getSet().split(" ");
- default:
- throw new IllegalStateException("");
- }
- }
+ public String[] getRangeSet() {
+ switch (type) {
+ case RANDOM_RANGE:
+ return value.getRandom().getRange().getSet().split(" ");
+ default:
+ throw new IllegalStateException("");
+ }
+ }
- public String[] getRangeExcluded() {
- switch (type) {
- case RANDOM_RANGE:
- String exl = value.getRandom().getRange().getExclude();
- return exl != null ? exl.split(" ") : null;
- default:
- throw new IllegalStateException("");
- }
- }
+ public String[] getRangeExcluded() {
+ switch (type) {
+ case RANDOM_RANGE:
+ String exl = value.getRandom().getRange().getExclude();
+ return exl != null ? exl.split(" ") : null;
+ default:
+ throw new IllegalStateException("");
+ }
+ }
- public String getAbsoluteValue() {
- switch (type) {
- case ABS:
- return value.getAbsvalue();
- default:
- throw new IllegalStateException("");
- }
- }
+ public String getAbsoluteValue() {
+ switch (type) {
+ case ABS:
+ return value.getAbsvalue();
+ default:
+ throw new IllegalStateException("");
+ }
+ }
- public Type getType() {
- return type;
- }
+ public Type getType() {
+ return type;
+ }
}
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/model/AsterixInstance.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/model/AsterixInstance.java
new file mode 100644
index 0000000..87f14b0
--- /dev/null
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/model/AsterixInstance.java
@@ -0,0 +1,215 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.event.model;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+
+import edu.uci.ics.asterix.common.config.AsterixExternalProperties;
+import edu.uci.ics.asterix.common.configuration.AsterixConfiguration;
+import edu.uci.ics.asterix.common.configuration.Property;
+import edu.uci.ics.asterix.event.schema.cluster.Cluster;
+import edu.uci.ics.asterix.event.schema.cluster.Node;
+
+public class AsterixInstance implements Serializable {
+
+ private static final long serialVersionUID = 1L;
+
+ private static final int WEB_INTERFACE_PORT_DEFAULT = 19001;
+
+ public enum State {
+ ACTIVE,
+ INACTIVE,
+ UNUSABLE
+ }
+
+ private final Cluster cluster;
+ private final String name;
+ private final Date createdTimestamp;
+ private Date stateChangeTimestamp;
+ private Date modifiedTimestamp;
+ private AsterixConfiguration asterixConfiguration;
+ private State state;
+ private final String metadataNodeId;
+ private final String asterixVersion;
+ private final List<BackupInfo> backupInfo;
+ private AsterixRuntimeState runtimeState;
+ private State previousState;
+
+ public AsterixInstance(String name, Cluster cluster, AsterixConfiguration asterixConfiguration,
+ String metadataNodeId, String asterixVersion) {
+ this.name = name;
+ this.cluster = cluster;
+ this.asterixConfiguration = asterixConfiguration;
+ this.metadataNodeId = metadataNodeId;
+ this.state = State.ACTIVE;
+ this.previousState = State.UNUSABLE;
+ this.asterixVersion = asterixVersion;
+ this.createdTimestamp = new Date();
+ this.backupInfo = new ArrayList<BackupInfo>();
+ }
+
+ public Date getModifiedTimestamp() {
+ return stateChangeTimestamp;
+ }
+
+ public State getState() {
+ return state;
+ }
+
+ public void setState(State state) {
+ this.previousState = this.state;
+ this.state = state;
+ }
+
+ public Cluster getCluster() {
+ return cluster;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public Date getCreatedTimestamp() {
+ return createdTimestamp;
+ }
+
+ public Date getStateChangeTimestamp() {
+ return stateChangeTimestamp;
+ }
+
+ public void setStateChangeTimestamp(Date stateChangeTimestamp) {
+ this.stateChangeTimestamp = stateChangeTimestamp;
+ }
+
+ public void setModifiedTimestamp(Date modifiedTimestamp) {
+ this.modifiedTimestamp = modifiedTimestamp;
+ }
+
+ public String getMetadataNodeId() {
+ return metadataNodeId;
+ }
+
+ public String getAsterixVersion() {
+ return asterixVersion;
+ }
+
+ public String getDescription(boolean detailed) {
+ StringBuffer buffer = new StringBuffer();
+ buffer.append("Name:" + name + "\n");
+ buffer.append("Created:" + createdTimestamp + "\n");
+
+ buffer.append("Web-Url:" + getWebInterfaceUrl() + "\n");
+ buffer.append("State:" + state);
+ if (!state.equals(State.UNUSABLE) && stateChangeTimestamp != null) {
+ buffer.append(" (" + stateChangeTimestamp + ")" + "\n");
+ } else {
+ buffer.append("\n");
+ }
+ if (modifiedTimestamp != null) {
+ buffer.append("Last modified timestamp:" + modifiedTimestamp + "\n");
+ }
+
+ if (runtimeState.getSummary() != null && runtimeState.getSummary().length() > 0) {
+ buffer.append("\nWARNING!:" + runtimeState.getSummary() + "\n");
+ }
+ if (detailed) {
+ addDetailedInformation(buffer);
+ }
+ return buffer.toString();
+ }
+
+ public List<BackupInfo> getBackupInfo() {
+ return backupInfo;
+ }
+
+ public String getWebInterfaceUrl() {
+ int webPort = WEB_INTERFACE_PORT_DEFAULT;
+ for (Property p : asterixConfiguration.getProperty()) {
+ if (p.getName().equalsIgnoreCase("web.port")) {
+ webPort = Integer.parseInt(p.getValue());
+ }
+ }
+ return "http://" + cluster.getMasterNode().getClientIp() + ":" + webPort;
+ }
+
+ public AsterixRuntimeState getAsterixRuntimeState() {
+ return runtimeState;
+ }
+
+ public void setAsterixRuntimeStates(AsterixRuntimeState runtimeState) {
+ this.runtimeState = runtimeState;
+ }
+
+ private void addDetailedInformation(StringBuffer buffer) {
+ buffer.append("Master node:" + cluster.getMasterNode().getId() + ":" + cluster.getMasterNode().getClusterIp()
+ + "\n");
+ for (Node node : cluster.getNode()) {
+ buffer.append(node.getId() + ":" + node.getClusterIp() + "\n");
+ }
+
+ if (backupInfo != null && backupInfo.size() > 0) {
+ for (BackupInfo info : backupInfo) {
+ buffer.append(info + "\n");
+ }
+ }
+ buffer.append("\n");
+ buffer.append("Asterix version:" + asterixVersion + "\n");
+ buffer.append("Metadata Node:" + metadataNodeId + "\n");
+ buffer.append("Processes" + "\n");
+ for (ProcessInfo pInfo : runtimeState.getProcesses()) {
+ buffer.append(pInfo + "\n");
+ }
+
+ buffer.append("\n");
+ buffer.append("Asterix Configuration\n");
+ int lenMax = 0;
+ for (Property property : asterixConfiguration.getProperty()) {
+ int nextLen = property.getName().length();
+ if (nextLen > lenMax) {
+ lenMax = nextLen;
+ }
+ }
+ for (Property property : asterixConfiguration.getProperty()) {
+ buffer.append(property.getName() + getIndentation(property.getName(), lenMax) + ":" + property.getValue()
+ + "\n");
+ }
+
+ }
+
+ private String getIndentation(String name, int lenMax) {
+ int len = name.length();
+ StringBuffer buf = new StringBuffer();
+ for (int i = 0; i < lenMax - len; i++) {
+ buf.append(" ");
+ }
+ return buf.toString();
+ }
+
+ public State getPreviousState() {
+ return previousState;
+ }
+
+ public AsterixConfiguration getAsterixConfiguration() {
+ return asterixConfiguration;
+ }
+
+ public void setAsterixConfiguration(AsterixConfiguration asterixConfiguration) {
+ this.asterixConfiguration = asterixConfiguration;
+ }
+
+}
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/model/AsterixRuntimeState.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/model/AsterixRuntimeState.java
new file mode 100644
index 0000000..b8fbabb
--- /dev/null
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/model/AsterixRuntimeState.java
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.event.model;
+
+import java.io.Serializable;
+import java.util.List;
+
+public class AsterixRuntimeState implements Serializable {
+
+ private static final long serialVersionUID = 1L;
+ private final List<ProcessInfo> processes;
+ private final List<String> failedNCs;
+ private final boolean ccRunning;
+ private String summary;
+
+ public AsterixRuntimeState(List<ProcessInfo> processes, List<String> failedNCs, boolean ccRunning) {
+ this.processes = processes;
+ this.failedNCs = failedNCs;
+ this.ccRunning = ccRunning;
+ }
+
+ public List<ProcessInfo> getProcesses() {
+ return processes;
+ }
+
+ public List<String> getFailedNCs() {
+ return failedNCs;
+ }
+
+ public boolean isCcRunning() {
+ return ccRunning;
+ }
+
+ public void setSummary(String summary) {
+ this.summary = summary;
+ }
+
+ public String getSummary() {
+ return summary;
+ }
+
+}
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/model/BackupInfo.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/model/BackupInfo.java
new file mode 100644
index 0000000..e1bcd8a
--- /dev/null
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/model/BackupInfo.java
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.event.model;
+
+import java.io.Serializable;
+import java.util.Date;
+
+import edu.uci.ics.asterix.installer.schema.conf.Backup;
+import edu.uci.ics.asterix.installer.schema.conf.Hdfs;
+
+public class BackupInfo implements Serializable {
+
+ public static enum BackupType {
+ LOCAL,
+ HDFS
+ };
+
+ private final int id;
+ private final Date date;
+ private final Backup backupConf;
+
+ public BackupInfo(int id, Date date, Backup backupConf) {
+ this.id = id;
+ this.date = date;
+ this.backupConf = backupConf;
+ }
+
+ public int getId() {
+ return id;
+ }
+
+ public Date getDate() {
+ return date;
+ }
+
+ public Backup getBackupConf() {
+ return backupConf;
+ }
+
+ @Override
+ public String toString() {
+ return id + " " + date + " " + "(" + getBackupType() + ")" + " " + "[ " + this.getBackupConf().getBackupDir()
+ + " ]";
+
+ }
+
+ public BackupType getBackupType() {
+ return getBackupType(this.getBackupConf());
+ }
+
+ public static BackupType getBackupType(Backup backupConf) {
+ Hdfs hdfs = backupConf.getHdfs();
+ return (hdfs != null && hdfs.getUrl() != null && hdfs.getUrl().length() > 0) ? BackupType.HDFS
+ : BackupType.LOCAL;
+ }
+}
\ No newline at end of file
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/model/EventList.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/model/EventList.java
new file mode 100644
index 0000000..fda814c
--- /dev/null
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/model/EventList.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.event.model;
+
+public class EventList {
+
+ public enum EventType {
+ NODE_JOIN,
+ NODE_FAILURE,
+ CC_START,
+ CC_FAILURE,
+ BACKUP,
+ RESTORE,
+ FILE_DELETE,
+ HDFS_DELETE,
+ FILE_TRANSFER,
+ FILE_CREATE,
+ DIRECTORY_TRANSFER,
+ DIRECTORY_COPY,
+ NODE_INFO
+ }
+}
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/model/ProcessInfo.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/model/ProcessInfo.java
new file mode 100644
index 0000000..4fde136
--- /dev/null
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/model/ProcessInfo.java
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.event.model;
+
+import java.io.Serializable;
+
+public class ProcessInfo implements Serializable {
+
+ private static final long serialVersionUID = 304186774065853730L;
+ private final String processName;
+ private final String host;
+ private final String nodeId;
+ private final int processId;
+
+ public ProcessInfo(String processName, String host, String nodeId, int processId) {
+ this.processName = processName;
+ this.host = host;
+ this.nodeId = nodeId;
+ this.processId = processId;
+ }
+
+ public String getProcessName() {
+ return processName;
+ }
+
+ public String getHost() {
+ return host;
+ }
+
+ public int getProcessId() {
+ return processId;
+ }
+
+ public String getNodeId() {
+ return nodeId;
+ }
+
+ public String toString() {
+ return processName + " at " + nodeId + " [ " + processId + " ] ";
+ }
+
+}
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/service/AsterixEventService.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/service/AsterixEventService.java
new file mode 100644
index 0000000..6744746
--- /dev/null
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/service/AsterixEventService.java
@@ -0,0 +1,78 @@
+package edu.uci.ics.asterix.event.service;
+
+import java.io.File;
+import java.io.FileFilter;
+
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+
+import edu.uci.ics.asterix.event.error.OutputHandler;
+import edu.uci.ics.asterix.event.management.AsterixEventServiceClient;
+import edu.uci.ics.asterix.event.schema.cluster.Cluster;
+import edu.uci.ics.asterix.installer.schema.conf.Configuration;
+
+public class AsterixEventService {
+
+ private static final Logger LOGGER = Logger.getLogger(AsterixEventService.class.getName());
+ private static Configuration configuration;
+ private static String asterixDir;
+ private static String asterixZip;
+ private static String eventHome;
+
+ public static void initialize(Configuration configuration, String asterixDir, String eventHome) throws Exception {
+ AsterixEventService.configuration = configuration;
+ AsterixEventService.asterixDir = asterixDir;
+ AsterixEventService.asterixZip = initBinary("asterix-server");
+ AsterixEventService.eventHome = eventHome;
+ }
+
+ public static AsterixEventServiceClient getAsterixEventServiceClient(Cluster cluster, boolean transferArtifacts,
+ boolean dryRun) throws Exception {
+ AsterixEventServiceClient client = new AsterixEventServiceClient(configuration, eventHome, cluster,
+ transferArtifacts, dryRun, OutputHandler.INSTANCE);
+ return client;
+ }
+
+ public static AsterixEventServiceClient getAsterixEventServiceClient(Cluster cluster) throws Exception {
+ AsterixEventServiceClient client = new AsterixEventServiceClient(configuration, eventHome, cluster, false,
+ false, OutputHandler.INSTANCE);
+ return client;
+ }
+
+ private static String initBinary(final String fileNamePattern) {
+ File file = new File(asterixDir);
+ File[] zipFiles = file.listFiles(new FileFilter() {
+ public boolean accept(File arg0) {
+ return arg0.getAbsolutePath().contains(fileNamePattern) && arg0.isFile();
+ }
+ });
+ if (zipFiles.length == 0) {
+ String msg = " Binary not found at " + asterixDir;
+ LOGGER.log(Level.FATAL, msg);
+ throw new IllegalStateException(msg);
+ }
+ if (zipFiles.length > 1) {
+ String msg = " Multiple binaries found at " + asterixDir;
+ LOGGER.log(Level.FATAL, msg);
+ throw new IllegalStateException(msg);
+ }
+
+ return zipFiles[0].getAbsolutePath();
+ }
+
+ public static Configuration getConfiguration() {
+ return configuration;
+ }
+
+ public static String getAsterixZip() {
+ return asterixZip;
+ }
+
+ public static String getAsterixDir() {
+ return asterixDir;
+ }
+
+ public static String getEventHome() {
+ return eventHome;
+ }
+}
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/service/AsterixEventServiceUtil.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/service/AsterixEventServiceUtil.java
new file mode 100644
index 0000000..f6765e9
--- /dev/null
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/service/AsterixEventServiceUtil.java
@@ -0,0 +1,551 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.event.service;
+
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.File;
+import java.io.FileFilter;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.StringWriter;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.util.ArrayList;
+import java.util.Enumeration;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Random;
+import java.util.jar.JarEntry;
+import java.util.jar.JarFile;
+import java.util.jar.JarOutputStream;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipInputStream;
+import java.util.zip.ZipOutputStream;
+
+import javax.xml.bind.JAXBContext;
+import javax.xml.bind.JAXBException;
+import javax.xml.bind.Marshaller;
+
+import org.apache.commons.io.IOUtils;
+
+import edu.uci.ics.asterix.common.configuration.AsterixConfiguration;
+import edu.uci.ics.asterix.common.configuration.Coredump;
+import edu.uci.ics.asterix.common.configuration.Store;
+import edu.uci.ics.asterix.common.configuration.TransactionLogDir;
+import edu.uci.ics.asterix.event.driver.EventDriver;
+import edu.uci.ics.asterix.event.error.EventException;
+import edu.uci.ics.asterix.event.management.EventUtil;
+import edu.uci.ics.asterix.event.model.AsterixInstance;
+import edu.uci.ics.asterix.event.model.AsterixInstance.State;
+import edu.uci.ics.asterix.event.schema.cluster.Cluster;
+import edu.uci.ics.asterix.event.schema.cluster.Env;
+import edu.uci.ics.asterix.event.schema.cluster.Node;
+import edu.uci.ics.asterix.event.schema.cluster.Property;
+
+public class AsterixEventServiceUtil {
+
+ public static final String TXN_LOG_DIR = "txnLogs";
+ public static final String TXN_LOG_DIR_KEY_SUFFIX = "txnLogDir";
+ public static final String ASTERIX_CONFIGURATION_FILE = "asterix-configuration.xml";
+ public static final String TXN_LOG_CONFIGURATION_FILE = "log.properties";
+ public static final String CLUSTER_CONFIGURATION_FILE = "cluster.xml";
+ public static final String ASTERIX_DIR = "asterix";
+ public static final String EVENT_DIR = "events";
+ public static final String DEFAULT_ASTERIX_CONFIGURATION_PATH = "conf" + File.separator + File.separator
+ + "asterix-configuration.xml";
+ public static final int CLUSTER_NET_PORT_DEFAULT = 1098;
+ public static final int CLIENT_NET_PORT_DEFAULT = 1099;
+ public static final int HTTP_PORT_DEFAULT = 8888;
+ public static final int WEB_INTERFACE_PORT_DEFAULT = 19001;
+
+ public static final String MANAGIX_INTERNAL_DIR = ".installer";
+ public static final String MANAGIX_CONF_XML = "conf" + File.separator + "managix-conf.xml";
+
+ public static AsterixInstance createAsterixInstance(String asterixInstanceName, Cluster cluster,
+ AsterixConfiguration asterixConfiguration) throws FileNotFoundException, IOException {
+ Node metadataNode = getMetadataNode(asterixInstanceName, cluster);
+ String asterixZipName = AsterixEventService.getAsterixZip().substring(
+ AsterixEventService.getAsterixZip().lastIndexOf(File.separator) + 1);
+ String asterixVersion = asterixZipName.substring("asterix-server-".length(),
+ asterixZipName.indexOf("-binary-assembly"));
+ AsterixInstance instance = new AsterixInstance(asterixInstanceName, cluster, asterixConfiguration,
+ metadataNode.getId(), asterixVersion);
+ return instance;
+ }
+
+ public static void createAsterixZip(AsterixInstance asterixInstance) throws IOException, InterruptedException,
+ JAXBException, EventException {
+
+ String modifiedZipPath = injectAsterixPropertyFile(AsterixEventService.getAsterixZip(), asterixInstance);
+ injectAsterixClusterConfigurationFile(modifiedZipPath, asterixInstance);
+ }
+
+ public static void createClusterProperties(Cluster cluster, AsterixConfiguration asterixConfiguration) {
+
+ String ccJavaOpts = null;
+ String ncJavaOpts = null;
+ for (edu.uci.ics.asterix.common.configuration.Property property : asterixConfiguration.getProperty()) {
+ if (property.getName().equalsIgnoreCase(EventUtil.CC_JAVA_OPTS)) {
+ ccJavaOpts = property.getValue();
+ } else if (property.getName().equalsIgnoreCase(EventUtil.NC_JAVA_OPTS)) {
+ ncJavaOpts = property.getValue();
+ }
+ }
+
+ poulateClusterEnvironmentProperties(cluster, ccJavaOpts, ncJavaOpts);
+ }
+
+ public static void poulateClusterEnvironmentProperties(Cluster cluster, String ccJavaOpts, String ncJavaOpts) {
+ List<Property> clusterProperties = null;
+ if (cluster.getEnv() != null && cluster.getEnv().getProperty() != null) {
+ clusterProperties = cluster.getEnv().getProperty();
+ clusterProperties.clear();
+ } else {
+ clusterProperties = new ArrayList<Property>();
+ }
+
+ clusterProperties.add(new Property(EventUtil.CC_JAVA_OPTS, ccJavaOpts));
+ clusterProperties.add(new Property(EventUtil.NC_JAVA_OPTS, ncJavaOpts));
+ clusterProperties.add(new Property("ASTERIX_HOME", cluster.getWorkingDir().getDir() + File.separator
+ + "asterix"));
+ clusterProperties.add(new Property("LOG_DIR", cluster.getLogDir()));
+ clusterProperties.add(new Property("JAVA_HOME", cluster.getJavaHome()));
+ clusterProperties.add(new Property("WORKING_DIR", cluster.getWorkingDir().getDir()));
+ clusterProperties.add(new Property("CLIENT_NET_IP", cluster.getMasterNode().getClientIp()));
+ clusterProperties.add(new Property("CLUSTER_NET_IP", cluster.getMasterNode().getClusterIp()));
+
+ int clusterNetPort = cluster.getMasterNode().getClusterPort() != null ? cluster.getMasterNode()
+ .getClusterPort().intValue() : CLUSTER_NET_PORT_DEFAULT;
+ int clientNetPort = cluster.getMasterNode().getClientPort() != null ? cluster.getMasterNode().getClientPort()
+ .intValue() : CLIENT_NET_PORT_DEFAULT;
+ int httpPort = cluster.getMasterNode().getHttpPort() != null ? cluster.getMasterNode().getHttpPort().intValue()
+ : HTTP_PORT_DEFAULT;
+
+ clusterProperties.add(new Property("CLIENT_NET_PORT", "" + clientNetPort));
+ clusterProperties.add(new Property("CLUSTER_NET_PORT", "" + clusterNetPort));
+ clusterProperties.add(new Property("HTTP_PORT", "" + httpPort));
+
+ cluster.setEnv(new Env(clusterProperties));
+ }
+
+ private static String injectAsterixPropertyFile(String origZipFile, AsterixInstance asterixInstance)
+ throws IOException, JAXBException {
+ writeAsterixConfigurationFile(asterixInstance);
+ String asterixInstanceDir = AsterixEventService.getAsterixDir() + File.separator + asterixInstance.getName();
+ unzip(origZipFile, asterixInstanceDir);
+ File sourceJar = new File(asterixInstanceDir + File.separator + "repo" + File.separator + "asterix-app-"
+ + asterixInstance.getAsterixVersion() + ".jar");
+ File replacementFile = new File(asterixInstanceDir + File.separator + ASTERIX_CONFIGURATION_FILE);
+ replaceInJar(sourceJar, ASTERIX_CONFIGURATION_FILE, replacementFile);
+ new File(asterixInstanceDir + File.separator + ASTERIX_CONFIGURATION_FILE).delete();
+ String asterixZipName = AsterixEventService.getAsterixZip().substring(
+ AsterixEventService.getAsterixZip().lastIndexOf(File.separator) + 1);
+ zipDir(new File(asterixInstanceDir), new File(asterixInstanceDir + File.separator + asterixZipName));
+ return asterixInstanceDir + File.separator + asterixZipName;
+ }
+
+ private static String injectAsterixLogPropertyFile(String origZipFile, AsterixInstance asterixInstance)
+ throws IOException, EventException {
+ String asterixInstanceDir = AsterixEventService.getAsterixDir() + File.separator + asterixInstance.getName();
+ unzip(origZipFile, asterixInstanceDir);
+ File sourceJar1 = new File(asterixInstanceDir + File.separator + "repo" + File.separator + "asterix-app-"
+ + asterixInstance.getAsterixVersion() + ".jar");
+ Properties txnLogProperties = new Properties();
+ URLClassLoader urlClassLoader = new URLClassLoader(new URL[] { sourceJar1.toURI().toURL() });
+ InputStream in = urlClassLoader.getResourceAsStream(TXN_LOG_CONFIGURATION_FILE);
+ if (in != null) {
+ txnLogProperties.load(in);
+ }
+
+ writeAsterixLogConfigurationFile(asterixInstance, txnLogProperties);
+
+ File sourceJar2 = new File(asterixInstanceDir + File.separator + "repo" + File.separator + "asterix-app-"
+ + asterixInstance.getAsterixVersion() + ".jar");
+ File replacementFile = new File(asterixInstanceDir + File.separator + "log.properties");
+ replaceInJar(sourceJar2, TXN_LOG_CONFIGURATION_FILE, replacementFile);
+
+ new File(asterixInstanceDir + File.separator + "log.properties").delete();
+ String asterixZipName = AsterixEventService.getAsterixZip().substring(
+ AsterixEventService.getAsterixZip().lastIndexOf(File.separator) + 1);
+ zipDir(new File(asterixInstanceDir), new File(asterixInstanceDir + File.separator + asterixZipName));
+ return asterixInstanceDir + File.separator + asterixZipName;
+ }
+
+ private static String injectAsterixClusterConfigurationFile(String origZipFile, AsterixInstance asterixInstance)
+ throws IOException, EventException, JAXBException {
+ String asterixInstanceDir = AsterixEventService.getAsterixDir() + File.separator + asterixInstance.getName();
+ unzip(origZipFile, asterixInstanceDir);
+ File sourceJar = new File(asterixInstanceDir + File.separator + "repo" + File.separator + "asterix-app-"
+ + asterixInstance.getAsterixVersion() + ".jar");
+ writeAsterixClusterConfigurationFile(asterixInstance);
+
+ File replacementFile = new File(asterixInstanceDir + File.separator + "cluster.xml");
+ replaceInJar(sourceJar, CLUSTER_CONFIGURATION_FILE, replacementFile);
+
+ new File(asterixInstanceDir + File.separator + CLUSTER_CONFIGURATION_FILE).delete();
+ String asterixZipName = AsterixEventService.getAsterixZip().substring(
+ AsterixEventService.getAsterixZip().lastIndexOf(File.separator) + 1);
+ zipDir(new File(asterixInstanceDir), new File(asterixInstanceDir + File.separator + asterixZipName));
+ return asterixInstanceDir + File.separator + asterixZipName;
+ }
+
+ private static void writeAsterixClusterConfigurationFile(AsterixInstance asterixInstance) throws IOException,
+ EventException, JAXBException {
+ String asterixInstanceName = asterixInstance.getName();
+ Cluster cluster = asterixInstance.getCluster();
+
+ JAXBContext ctx = JAXBContext.newInstance(Cluster.class);
+ Marshaller marshaller = ctx.createMarshaller();
+ marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);
+ marshaller.marshal(cluster, new FileOutputStream(AsterixEventService.getAsterixDir() + File.separator
+ + asterixInstanceName + File.separator + "cluster.xml"));
+ }
+
+ public static void addLibraryToAsterixZip(AsterixInstance asterixInstance, String dataverseName,
+ String libraryName, String libraryPath) throws IOException {
+ File instanceDir = new File(AsterixEventService.getAsterixDir() + File.separator + asterixInstance.getName());
+ if (!instanceDir.exists()) {
+ instanceDir.mkdirs();
+ }
+ String asterixZipName = AsterixEventService.getAsterixZip().substring(
+ AsterixEventService.getAsterixZip().lastIndexOf(File.separator) + 1);
+
+ String sourceZip = instanceDir.getAbsolutePath() + File.separator + asterixZipName;
+ unzip(sourceZip, instanceDir.getAbsolutePath());
+ File libraryPathInZip = new File(instanceDir.getAbsolutePath() + File.separator + "external" + File.separator
+ + "library" + dataverseName + File.separator + "to-add" + File.separator + libraryName);
+ libraryPathInZip.mkdirs();
+ Runtime.getRuntime().exec("cp" + " " + libraryPath + " " + libraryPathInZip.getAbsolutePath());
+ Runtime.getRuntime().exec("rm " + sourceZip);
+ String destZip = AsterixEventService.getAsterixDir() + File.separator + asterixInstance.getName()
+ + File.separator + asterixZipName;
+ zipDir(instanceDir, new File(destZip));
+ Runtime.getRuntime().exec("mv" + " " + destZip + " " + sourceZip);
+ }
+
+ private static Node getMetadataNode(String asterixInstanceName, Cluster cluster) {
+ Node metadataNode = null;
+ if (cluster.getMetadataNode() != null) {
+ for (Node node : cluster.getNode()) {
+ if (node.getId().equals(cluster.getMetadataNode())) {
+ metadataNode = node;
+ break;
+ }
+ }
+ } else {
+ Random random = new Random();
+ int nNodes = cluster.getNode().size();
+ metadataNode = cluster.getNode().get(random.nextInt(nNodes));
+ }
+ return metadataNode;
+ }
+
+ public static String getNodeDirectories(String asterixInstanceName, Node node, Cluster cluster) {
+ String storeDataSubDir = asterixInstanceName + File.separator + "data" + File.separator;
+ String[] storeDirs = null;
+ StringBuffer nodeDataStore = new StringBuffer();
+ String storeDirValue = node.getStore();
+ if (storeDirValue == null) {
+ storeDirValue = cluster.getStore();
+ if (storeDirValue == null) {
+ throw new IllegalStateException(" Store not defined for node " + node.getId());
+ }
+ storeDataSubDir = node.getId() + File.separator + storeDataSubDir;
+ }
+
+ storeDirs = storeDirValue.split(",");
+ for (String ns : storeDirs) {
+ nodeDataStore.append(ns + File.separator + storeDataSubDir.trim());
+ nodeDataStore.append(",");
+ }
+ nodeDataStore.deleteCharAt(nodeDataStore.length() - 1);
+ return nodeDataStore.toString();
+ }
+
+ private static void writeAsterixConfigurationFile(AsterixInstance asterixInstance) throws IOException,
+ JAXBException {
+ String asterixInstanceName = asterixInstance.getName();
+ Cluster cluster = asterixInstance.getCluster();
+ String metadataNodeId = asterixInstance.getMetadataNodeId();
+
+ AsterixConfiguration configuration = asterixInstance.getAsterixConfiguration();
+ configuration.setInstanceName(asterixInstanceName);
+ configuration.setMetadataNode(asterixInstanceName + "_" + metadataNodeId);
+ String storeDir = null;
+ List<Store> stores = new ArrayList<Store>();
+ for (Node node : cluster.getNode()) {
+ storeDir = node.getStore() == null ? cluster.getStore() : node.getStore();
+ stores.add(new Store(asterixInstanceName + "_" + node.getId(), storeDir));
+ }
+ configuration.setStore(stores);
+
+ List<Coredump> coredump = new ArrayList<Coredump>();
+ String coredumpDir = null;
+ List<TransactionLogDir> txnLogDirs = new ArrayList<TransactionLogDir>();
+ String txnLogDir = null;
+ for (Node node : cluster.getNode()) {
+ coredumpDir = node.getLogDir() == null ? cluster.getLogDir() : node.getLogDir();
+ coredump.add(new Coredump(asterixInstanceName + "_" + node.getId(), coredumpDir + File.separator
+ + asterixInstanceName + "_" + node.getId()));
+
+ txnLogDir = node.getTxnLogDir() == null ? cluster.getTxnLogDir() : node.getTxnLogDir();
+ txnLogDirs.add(new TransactionLogDir(asterixInstanceName + "_" + node.getId(), txnLogDir));
+ }
+ configuration.setCoredump(coredump);
+ configuration.setTransactionLogDir(txnLogDirs);
+
+ File asterixConfDir = new File(AsterixEventService.getAsterixDir() + File.separator + asterixInstanceName);
+ asterixConfDir.mkdirs();
+
+ JAXBContext ctx = JAXBContext.newInstance(AsterixConfiguration.class);
+ Marshaller marshaller = ctx.createMarshaller();
+ marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);
+ FileOutputStream os = new FileOutputStream(asterixConfDir + File.separator + ASTERIX_CONFIGURATION_FILE);
+ marshaller.marshal(configuration, os);
+ os.close();
+ }
+
+ private static void writeAsterixLogConfigurationFile(AsterixInstance asterixInstance, Properties logProperties)
+ throws IOException, EventException {
+ String asterixInstanceName = asterixInstance.getName();
+ Cluster cluster = asterixInstance.getCluster();
+ StringBuffer conf = new StringBuffer();
+ for (Map.Entry<Object, Object> p : logProperties.entrySet()) {
+ conf.append(p.getKey() + "=" + p.getValue() + "\n");
+ }
+
+ for (Node node : cluster.getNode()) {
+ String txnLogDir = node.getTxnLogDir() == null ? cluster.getTxnLogDir() : node.getTxnLogDir();
+ if (txnLogDir == null) {
+ throw new EventException("Transaction log directory (txn_log_dir) not configured for node: "
+ + node.getId());
+ }
+ conf.append(asterixInstanceName + "_" + node.getId() + "." + TXN_LOG_DIR_KEY_SUFFIX + "=" + txnLogDir
+ + "\n");
+ }
+ List<edu.uci.ics.asterix.common.configuration.Property> properties = asterixInstance.getAsterixConfiguration()
+ .getProperty();
+ for (edu.uci.ics.asterix.common.configuration.Property p : properties) {
+ if (p.getName().trim().toLowerCase().contains("log")) {
+ conf.append(p.getValue() + "=" + p.getValue());
+ }
+ }
+ dumpToFile(AsterixEventService.getAsterixDir() + File.separator + asterixInstanceName + File.separator
+ + "log.properties", conf.toString());
+
+ }
+
+ public static void unzip(String sourceFile, String destDir) throws IOException {
+ BufferedOutputStream dest = null;
+ FileInputStream fis = new FileInputStream(sourceFile);
+ ZipInputStream zis = new ZipInputStream(new BufferedInputStream(fis));
+ ZipEntry entry = null;
+
+ int BUFFER_SIZE = 4096;
+ while ((entry = zis.getNextEntry()) != null) {
+ String dst = destDir + File.separator + entry.getName();
+ if (entry.isDirectory()) {
+ createDir(destDir, entry);
+ continue;
+ }
+ int count;
+ byte data[] = new byte[BUFFER_SIZE];
+
+ // write the file to the disk
+ FileOutputStream fos = new FileOutputStream(dst);
+ dest = new BufferedOutputStream(fos, BUFFER_SIZE);
+ while ((count = zis.read(data, 0, BUFFER_SIZE)) != -1) {
+ dest.write(data, 0, count);
+ }
+ // close the output streams
+ dest.flush();
+ dest.close();
+ }
+
+ zis.close();
+ }
+
+ public static void zipDir(File sourceDir, File destFile) throws IOException {
+ FileOutputStream fos = new FileOutputStream(destFile);
+ ZipOutputStream zos = new ZipOutputStream(fos);
+ zipDir(sourceDir, destFile, zos);
+ zos.close();
+ }
+
+ private static void zipDir(File sourceDir, final File destFile, ZipOutputStream zos) throws IOException {
+ File[] dirList = sourceDir.listFiles(new FileFilter() {
+ public boolean accept(File f) {
+ return !f.getName().endsWith(destFile.getName());
+ }
+ });
+ for (int i = 0; i < dirList.length; i++) {
+ File f = dirList[i];
+ if (f.isDirectory()) {
+ zipDir(f, destFile, zos);
+ } else {
+ int bytesIn = 0;
+ byte[] readBuffer = new byte[2156];
+ FileInputStream fis = new FileInputStream(f);
+ ZipEntry entry = new ZipEntry(sourceDir.getName() + File.separator + f.getName());
+ zos.putNextEntry(entry);
+ while ((bytesIn = fis.read(readBuffer)) != -1) {
+ zos.write(readBuffer, 0, bytesIn);
+ }
+ fis.close();
+ }
+ }
+ }
+
+ private static void replaceInJar(File sourceJar, String origFile, File replacementFile) throws IOException {
+ String srcJarAbsPath = sourceJar.getAbsolutePath();
+ String srcJarSuffix = srcJarAbsPath.substring(srcJarAbsPath.lastIndexOf(File.separator) + 1);
+ String srcJarName = srcJarSuffix.split(".jar")[0];
+
+ String destJarName = srcJarName + "-managix";
+ String destJarSuffix = destJarName + ".jar";
+ File destJar = new File(sourceJar.getParentFile().getAbsolutePath() + File.separator + destJarSuffix);
+ // File destJar = new File(sourceJar.getAbsolutePath() + ".modified");
+ InputStream jarIs = null;
+ FileInputStream fis = new FileInputStream(replacementFile);
+ JarFile sourceJarFile = new JarFile(sourceJar);
+ Enumeration<JarEntry> entries = sourceJarFile.entries();
+ JarOutputStream jos = new JarOutputStream(new FileOutputStream(destJar));
+ byte[] buffer = new byte[2048];
+ int read;
+ while (entries.hasMoreElements()) {
+ JarEntry entry = (JarEntry) entries.nextElement();
+ String name = entry.getName();
+ if (name.equals(origFile)) {
+ continue;
+ }
+ jarIs = sourceJarFile.getInputStream(entry);
+ jos.putNextEntry(entry);
+ while ((read = jarIs.read(buffer)) != -1) {
+ jos.write(buffer, 0, read);
+ }
+ }
+ JarEntry entry = new JarEntry(origFile);
+ jos.putNextEntry(entry);
+ while ((read = fis.read(buffer)) != -1) {
+ jos.write(buffer, 0, read);
+ }
+ fis.close();
+ jos.close();
+ jarIs.close();
+ sourceJar.delete();
+ destJar.renameTo(sourceJar);
+ destJar.setExecutable(true);
+ }
+
+ public static void dumpToFile(String dest, String content) throws IOException {
+ FileWriter writer = new FileWriter(dest);
+ writer.write(content);
+ writer.close();
+ }
+
+ private static void createDir(String destDirectory, ZipEntry entry) {
+ String name = entry.getName();
+ int index = name.lastIndexOf(File.separator);
+ String dirSequence = name.substring(0, index);
+ File newDirs = new File(destDirectory + File.separator + dirSequence);
+ newDirs.mkdirs();
+ }
+
+ public static AsterixInstance validateAsterixInstanceExists(String name, State... permissibleStates)
+ throws Exception {
+ AsterixInstance instance = ServiceProvider.INSTANCE.getLookupService().getAsterixInstance(name);
+ if (instance == null) {
+ throw new EventException("Asterix instance by name " + name + " does not exist.");
+ }
+ boolean valid = false;
+ for (State state : permissibleStates) {
+ if (state.equals(instance.getState())) {
+ valid = true;
+ break;
+ }
+ }
+ if (!valid) {
+ throw new EventException("Asterix instance by the name " + name + " is in " + instance.getState()
+ + " state ");
+ }
+ return instance;
+ }
+
+ public static void validateAsterixInstanceNotExists(String name) throws Exception {
+ AsterixInstance instance = ServiceProvider.INSTANCE.getLookupService().getAsterixInstance(name);
+ if (instance != null) {
+ throw new EventException("Asterix instance by name " + name + " already exists.");
+ }
+ }
+
+ public static void evaluateConflictWithOtherInstances(AsterixInstance instance) throws Exception {
+ List<AsterixInstance> existingInstances = ServiceProvider.INSTANCE.getLookupService().getAsterixInstances();
+ List<String> usedIps = new ArrayList<String>();
+ String masterIp = instance.getCluster().getMasterNode().getClusterIp();
+ for (Node node : instance.getCluster().getNode()) {
+ usedIps.add(node.getClusterIp());
+ }
+ usedIps.add(instance.getCluster().getMasterNode().getClusterIp());
+ boolean conflictFound = false;
+ AsterixInstance conflictingInstance = null;
+ for (AsterixInstance existing : existingInstances) {
+ conflictFound = !existing.getState().equals(State.INACTIVE)
+ && existing.getCluster().getMasterNode().getClusterIp().equals(masterIp);
+ if (conflictFound) {
+ conflictingInstance = existing;
+ break;
+ }
+ for (Node n : existing.getCluster().getNode()) {
+ if (usedIps.contains(n.getClusterIp())) {
+ conflictFound = true;
+ conflictingInstance = existing;
+ break;
+ }
+ }
+ }
+ if (conflictFound) {
+ throw new Exception("Cluster definition conflicts with an existing instance of Asterix: "
+ + conflictingInstance.getName());
+ }
+ }
+
+ public static void deleteDirectory(String path) throws IOException {
+ Runtime.getRuntime().exec("rm -rf " + path);
+ }
+
+ public static String executeLocalScript(String path, List<String> args) throws Exception {
+ List<String> pargs = new ArrayList<String>();
+ pargs.add("/bin/bash");
+ pargs.add(path);
+ if (args != null) {
+ pargs.addAll(args);
+ }
+ ProcessBuilder pb = new ProcessBuilder(pargs);
+ pb.environment().putAll(EventDriver.getEnvironment());
+ pb.environment().put("IP_LOCATION", EventDriver.CLIENT_NODE.getClusterIp());
+ Process p = pb.start();
+ BufferedInputStream bis = new BufferedInputStream(p.getInputStream());
+ StringWriter writer = new StringWriter();
+ IOUtils.copy(bis, writer, "UTF-8");
+ return writer.toString();
+ }
+
+}
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/service/ILookupService.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/service/ILookupService.java
new file mode 100644
index 0000000..cac504f
--- /dev/null
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/service/ILookupService.java
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.event.service;
+
+import java.util.List;
+
+import edu.uci.ics.asterix.event.model.AsterixInstance;
+import edu.uci.ics.asterix.installer.schema.conf.Configuration;
+
+public interface ILookupService {
+
+ public void writeAsterixInstance(AsterixInstance asterixInstance) throws Exception;
+
+ public AsterixInstance getAsterixInstance(String name) throws Exception;
+
+ public boolean isRunning(Configuration conf) throws Exception;
+
+ public void startService(Configuration conf) throws Exception;
+
+ public void stopService(Configuration conf) throws Exception;
+
+ public boolean exists(String name) throws Exception;
+
+ public void removeAsterixInstance(String name) throws Exception;
+
+ public List<AsterixInstance> getAsterixInstances() throws Exception;
+
+ public void updateAsterixInstance(AsterixInstance updatedInstance) throws Exception;
+}
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/service/ServiceProvider.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/service/ServiceProvider.java
new file mode 100644
index 0000000..c5ea908
--- /dev/null
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/service/ServiceProvider.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.event.service;
+
+public class ServiceProvider {
+
+ public static ServiceProvider INSTANCE = new ServiceProvider();
+ private static ILookupService lookupService = new ZooKeeperService();
+
+ private ServiceProvider() {
+
+ }
+
+ public ILookupService getLookupService() {
+ return lookupService;
+ }
+
+}
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/service/ZooKeeperService.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/service/ZooKeeperService.java
new file mode 100644
index 0000000..5c059bb
--- /dev/null
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/service/ZooKeeperService.java
@@ -0,0 +1,247 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.event.service;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.log4j.Logger;
+import org.apache.zookeeper.CreateMode;
+import org.apache.zookeeper.KeeperException;
+import org.apache.zookeeper.WatchedEvent;
+import org.apache.zookeeper.Watcher;
+import org.apache.zookeeper.ZooDefs.Ids;
+import org.apache.zookeeper.ZooKeeper;
+import org.apache.zookeeper.data.Stat;
+
+import edu.uci.ics.asterix.event.error.EventException;
+import edu.uci.ics.asterix.event.model.AsterixInstance;
+import edu.uci.ics.asterix.installer.schema.conf.Configuration;
+
+public class ZooKeeperService implements ILookupService {
+
+ private static final Logger LOGGER = Logger.getLogger(ZooKeeperService.class.getName());
+
+ private static final int ZOOKEEPER_LEADER_CONN_PORT = 2222;
+ private static final int ZOOKEEPER_LEADER_ELEC_PORT = 2223;
+ private static final int ZOOKEEPER_SESSION_TIME_OUT = 40 * 1000; //milliseconds
+ private static final String ZOOKEEPER_HOME = AsterixEventService.getEventHome() + File.separator + "zookeeper";
+ private static final String ZOO_KEEPER_CONFIG = ZOOKEEPER_HOME + File.separator + "zk.cfg";
+
+ private boolean isRunning = false;
+ private ZooKeeper zk;
+ private String zkConnectionString;
+ private static final String ASTERIX_INSTANCE_BASE_PATH = "/Asterix";
+ private static final int DEFAULT_NODE_VERSION = -1;
+ private LinkedBlockingQueue<String> msgQ = new LinkedBlockingQueue<String>();
+ private ZooKeeperWatcher watcher = new ZooKeeperWatcher(msgQ);
+
+ public boolean isRunning(Configuration conf) throws Exception {
+ List<String> servers = conf.getZookeeper().getServers().getServer();
+ int clientPort = conf.getZookeeper().getClientPort().intValue();
+ StringBuffer connectionString = new StringBuffer();
+ for (String serverAddress : servers) {
+ connectionString.append(serverAddress);
+ connectionString.append(":");
+ connectionString.append(clientPort);
+ connectionString.append(",");
+ }
+ if (connectionString.length() > 0) {
+ connectionString.deleteCharAt(connectionString.length() - 1);
+ }
+ zkConnectionString = connectionString.toString();
+
+ zk = new ZooKeeper(zkConnectionString, ZOOKEEPER_SESSION_TIME_OUT, watcher);
+ try {
+ zk.exists("/dummy", watcher);
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("ZooKeeper running at " + connectionString);
+ }
+ createRootIfNotExist();
+ isRunning = true;
+ } catch (KeeperException ke) {
+ isRunning = false;
+ }
+ return isRunning;
+ }
+
+ public void startService(Configuration conf) throws Exception {
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("Starting ZooKeeper at " + zkConnectionString);
+ }
+ ZookeeperUtil.writeConfiguration(ZOO_KEEPER_CONFIG, conf, ZOOKEEPER_LEADER_CONN_PORT,
+ ZOOKEEPER_LEADER_ELEC_PORT);
+ String initScript = ZOOKEEPER_HOME + File.separator + "bin" + File.separator + "zk.init";
+ StringBuffer cmdBuffer = new StringBuffer();
+ cmdBuffer.append(initScript + " ");
+ cmdBuffer.append(conf.getZookeeper().getHomeDir() + " ");
+ cmdBuffer.append(conf.getZookeeper().getServers().getJavaHome() + " ");
+ List<String> zkServers = conf.getZookeeper().getServers().getServer();
+ for (String zkServer : zkServers) {
+ cmdBuffer.append(zkServer + " ");
+ }
+ Runtime.getRuntime().exec(cmdBuffer.toString());
+ zk = new ZooKeeper(zkConnectionString, ZOOKEEPER_SESSION_TIME_OUT, watcher);
+ String head = msgQ.poll(10, TimeUnit.SECONDS);
+ if (head == null) {
+ StringBuilder msg = new StringBuilder(
+ "Unable to start Zookeeper Service. This could be because of the following reasons.\n");
+ msg.append("1) Managix is incorrectly configured. Please run " + "managix validate"
+ + " to run a validation test and correct the errors reported.");
+ msg.append("\n2) If validation in (1) is successful, ensure that java_home parameter is set correctly in Managix configuration ("
+ + AsterixEventServiceUtil.MANAGIX_CONF_XML + ")");
+ throw new Exception(msg.toString());
+ }
+ msgQ.take();
+ createRootIfNotExist();
+ }
+
+ public void stopService(Configuration conf) throws Exception {
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("Stopping ZooKeeper running at " + zkConnectionString);
+ }
+ String stopScript = ZOOKEEPER_HOME + File.separator + "bin" + File.separator + "stop_zk";
+ StringBuffer cmdBuffer = new StringBuffer();
+ cmdBuffer.append(stopScript + " ");
+ cmdBuffer.append(conf.getZookeeper().getHomeDir() + " ");
+ List<String> zkServers = conf.getZookeeper().getServers().getServer();
+ for (String zkServer : zkServers) {
+ cmdBuffer.append(zkServer + " ");
+ }
+ Runtime.getRuntime().exec(cmdBuffer.toString());
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("Stopped ZooKeeper service at " + zkConnectionString);
+ }
+ }
+
+ public void writeAsterixInstance(AsterixInstance asterixInstance) throws Exception {
+ String instanceBasePath = ASTERIX_INSTANCE_BASE_PATH + File.separator + asterixInstance.getName();
+ ByteArrayOutputStream b = new ByteArrayOutputStream();
+ ObjectOutputStream o = new ObjectOutputStream(b);
+ o.writeObject(asterixInstance);
+ zk.create(instanceBasePath, b.toByteArray(), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
+ }
+
+ private void createRootIfNotExist() throws Exception {
+ try {
+ Stat stat = zk.exists(ASTERIX_INSTANCE_BASE_PATH, false);
+ if (stat == null) {
+ zk.create(ASTERIX_INSTANCE_BASE_PATH, "root".getBytes(), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
+ }
+ } catch (Exception e) {
+ createRootIfNotExist();
+ }
+ }
+
+ public AsterixInstance getAsterixInstance(String name) throws Exception {
+ String path = ASTERIX_INSTANCE_BASE_PATH + File.separator + name;
+ Stat stat = zk.exists(ASTERIX_INSTANCE_BASE_PATH + File.separator + name, false);
+ if (stat == null) {
+ return null;
+ }
+ byte[] asterixInstanceBytes = zk.getData(path, false, new Stat());
+ return readAsterixInstanceObject(asterixInstanceBytes);
+ }
+
+ public boolean exists(String asterixInstanceName) throws Exception {
+ return zk.exists(ASTERIX_INSTANCE_BASE_PATH + File.separator + asterixInstanceName, false) != null;
+ }
+
+ public void removeAsterixInstance(String name) throws Exception {
+ if (!exists(name)) {
+ throw new EventException("Asterix instance by name " + name + " does not exists.");
+ }
+ zk.delete(ASTERIX_INSTANCE_BASE_PATH + File.separator + name, DEFAULT_NODE_VERSION);
+ }
+
+ public List<AsterixInstance> getAsterixInstances() throws Exception {
+ List<String> instanceNames = zk.getChildren(ASTERIX_INSTANCE_BASE_PATH, false);
+ List<AsterixInstance> asterixInstances = new ArrayList<AsterixInstance>();
+ String path;
+ for (String instanceName : instanceNames) {
+ path = ASTERIX_INSTANCE_BASE_PATH + File.separator + instanceName;
+ byte[] asterixInstanceBytes = zk.getData(path, false, new Stat());
+ asterixInstances.add(readAsterixInstanceObject(asterixInstanceBytes));
+ }
+ return asterixInstances;
+ }
+
+ private AsterixInstance readAsterixInstanceObject(byte[] asterixInstanceBytes) throws IOException,
+ ClassNotFoundException {
+ ByteArrayInputStream b = new ByteArrayInputStream(asterixInstanceBytes);
+ ObjectInputStream ois = new ObjectInputStream(b);
+ return (AsterixInstance) ois.readObject();
+ }
+
+ public void updateAsterixInstance(AsterixInstance updatedInstance) throws Exception {
+ removeAsterixInstance(updatedInstance.getName());
+ writeAsterixInstance(updatedInstance);
+ }
+
+}
+
+class ZooKeeperWatcher implements Watcher {
+
+ private boolean isRunning = true;
+ private LinkedBlockingQueue<String> msgQ;
+
+ public ZooKeeperWatcher(LinkedBlockingQueue<String> msgQ) {
+ this.msgQ = msgQ;
+ }
+
+ public void process(WatchedEvent wEvent) {
+ switch (wEvent.getState()) {
+ case SyncConnected:
+ msgQ.add("connected");
+ break;
+ }
+ }
+
+ public boolean isRunning() {
+ return isRunning;
+ }
+
+}
+
+class ZookeeperUtil {
+
+ public static void writeConfiguration(String zooKeeperConfigPath, Configuration conf, int leaderConnPort,
+ int leaderElecPort) throws IOException {
+
+ StringBuffer buffer = new StringBuffer();
+ buffer.append("tickTime=1000" + "\n");
+ buffer.append("dataDir=" + conf.getZookeeper().getHomeDir() + File.separator + "data" + "\n");
+ buffer.append("clientPort=" + conf.getZookeeper().getClientPort().intValue() + "\n");
+ buffer.append("initLimit=" + 2 + "\n");
+ buffer.append("syncLimit=" + 2 + "\n");
+
+ List<String> servers = conf.getZookeeper().getServers().getServer();
+ int serverId = 1;
+ for (String server : servers) {
+ buffer.append("server" + "." + serverId + "=" + server + ":" + leaderConnPort + ":" + leaderElecPort + "\n");
+ serverId++;
+ }
+ AsterixEventServiceUtil.dumpToFile(zooKeeperConfigPath, buffer.toString());
+ }
+
+}
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/util/AsterixConstants.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/util/AsterixConstants.java
new file mode 100644
index 0000000..c29a96e
--- /dev/null
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/util/AsterixConstants.java
@@ -0,0 +1,21 @@
+/*
+x * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.event.util;
+
+public class AsterixConstants {
+
+ public static String ASTERIX_ROOT_METADATA_DIR = "asterix_root_metadata";
+
+}
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/util/PatternCreator.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/util/PatternCreator.java
new file mode 100644
index 0000000..9af9307
--- /dev/null
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/util/PatternCreator.java
@@ -0,0 +1,640 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.event.util;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import edu.uci.ics.asterix.event.driver.EventDriver;
+import edu.uci.ics.asterix.event.error.VerificationUtil;
+import edu.uci.ics.asterix.event.model.AsterixInstance;
+import edu.uci.ics.asterix.event.model.BackupInfo;
+import edu.uci.ics.asterix.event.model.BackupInfo.BackupType;
+import edu.uci.ics.asterix.event.schema.cluster.Cluster;
+import edu.uci.ics.asterix.event.schema.cluster.Node;
+import edu.uci.ics.asterix.event.schema.pattern.Delay;
+import edu.uci.ics.asterix.event.schema.pattern.Event;
+import edu.uci.ics.asterix.event.schema.pattern.Nodeid;
+import edu.uci.ics.asterix.event.schema.pattern.Pattern;
+import edu.uci.ics.asterix.event.schema.pattern.Patterns;
+import edu.uci.ics.asterix.event.schema.pattern.Value;
+import edu.uci.ics.asterix.event.service.AsterixEventService;
+import edu.uci.ics.asterix.event.service.AsterixEventServiceUtil;
+import edu.uci.ics.asterix.event.service.ILookupService;
+import edu.uci.ics.asterix.event.service.ServiceProvider;
+import edu.uci.ics.asterix.installer.schema.conf.Backup;
+
+public class PatternCreator {
+
+ public static PatternCreator INSTANCE = new PatternCreator();
+
+ private PatternCreator() {
+
+ }
+
+ private ILookupService lookupService = ServiceProvider.INSTANCE.getLookupService();
+
+ private void addInitialDelay(Pattern p, int delay, String unit) {
+ Delay d = new Delay(new Value(null, "" + delay), unit);
+ p.setDelay(d);
+ }
+
+ public Patterns getAsterixBinaryTransferPattern(String asterixInstanceName, Cluster cluster) throws Exception {
+ String ccLocationIp = cluster.getMasterNode().getClusterIp();
+ String destDir = cluster.getWorkingDir().getDir() + File.separator + "asterix";
+ List<Pattern> ps = new ArrayList<Pattern>();
+
+ Pattern copyHyracks = createCopyHyracksPattern(asterixInstanceName, cluster, ccLocationIp, destDir);
+ ps.add(copyHyracks);
+
+ boolean copyHyracksToNC = !cluster.getWorkingDir().isNFS();
+
+ for (Node node : cluster.getNode()) {
+ if (copyHyracksToNC) {
+ Pattern copyHyracksForNC = createCopyHyracksPattern(asterixInstanceName, cluster, node.getClusterIp(),
+ destDir);
+ ps.add(copyHyracksForNC);
+ }
+ }
+ ps.addAll(createHadoopLibraryTransferPattern(cluster).getPattern());
+ Patterns patterns = new Patterns(ps);
+ return patterns;
+ }
+
+ public Patterns getStartAsterixPattern(String asterixInstanceName, Cluster cluster) throws Exception {
+ String ccLocationId = cluster.getMasterNode().getId();
+ List<Pattern> ps = new ArrayList<Pattern>();
+
+ Pattern createCC = createCCStartPattern(ccLocationId);
+ addInitialDelay(createCC, 3, "sec");
+ ps.add(createCC);
+
+ for (Node node : cluster.getNode()) {
+ String iodevices = node.getIodevices() == null ? cluster.getIodevices() : node.getIodevices();
+ Pattern createNC = createNCStartPattern(cluster.getMasterNode().getClusterIp(), node.getId(),
+ asterixInstanceName + "_" + node.getId(), iodevices);
+ addInitialDelay(createNC, 5, "sec");
+ ps.add(createNC);
+ }
+
+ Patterns patterns = new Patterns(ps);
+ return patterns;
+ }
+
+ public Patterns getStopCommandPattern(String asterixInstanceName) throws Exception {
+ List<Pattern> ps = new ArrayList<Pattern>();
+ AsterixInstance asterixInstance = lookupService.getAsterixInstance(asterixInstanceName);
+ Cluster cluster = asterixInstance.getCluster();
+
+ String ccLocation = cluster.getMasterNode().getId();
+ Pattern createCC = createCCStopPattern(ccLocation);
+ addInitialDelay(createCC, 5, "sec");
+ ps.add(createCC);
+
+ int nodeControllerIndex = 1;
+ for (Node node : cluster.getNode()) {
+ Pattern createNC = createNCStopPattern(node.getId(), asterixInstanceName + "_" + nodeControllerIndex);
+ ps.add(createNC);
+ nodeControllerIndex++;
+ }
+
+ Patterns patterns = new Patterns(ps);
+ return patterns;
+ }
+
+ public Patterns getBackUpAsterixPattern(AsterixInstance instance, Backup backupConf) throws Exception {
+ BackupType backupType = BackupInfo.getBackupType(backupConf);
+ Patterns patterns = null;
+ switch (backupType) {
+ case HDFS:
+ patterns = getHDFSBackUpAsterixPattern(instance, backupConf);
+ break;
+ case LOCAL:
+ patterns = getLocalBackUpAsterixPattern(instance, backupConf);
+ break;
+ }
+ return patterns;
+ }
+
+ public Patterns getRestoreAsterixPattern(AsterixInstance instance, BackupInfo backupInfo) throws Exception {
+ BackupType backupType = backupInfo.getBackupType();
+ Patterns patterns = null;
+ switch (backupType) {
+ case HDFS:
+ patterns = getHDFSRestoreAsterixPattern(instance, backupInfo);
+ break;
+ case LOCAL:
+ patterns = getLocalRestoreAsterixPattern(instance, backupInfo);
+ break;
+ }
+ return patterns;
+ }
+
+ private Patterns getHDFSBackUpAsterixPattern(AsterixInstance instance, Backup backupConf) throws Exception {
+ Cluster cluster = instance.getCluster();
+ String hdfsUrl = backupConf.getHdfs().getUrl();
+ String hadoopVersion = backupConf.getHdfs().getVersion();
+ String hdfsBackupDir = backupConf.getBackupDir();
+ VerificationUtil.verifyBackupRestoreConfiguration(hdfsUrl, hadoopVersion, hdfsBackupDir);
+ String workingDir = cluster.getWorkingDir().getDir();
+ String backupId = "" + instance.getBackupInfo().size();
+ String store;
+ String pargs;
+ String iodevices;
+ List<Pattern> patternList = new ArrayList<Pattern>();
+ for (Node node : cluster.getNode()) {
+ Nodeid nodeid = new Nodeid(new Value(null, node.getId()));
+ iodevices = node.getIodevices() == null ? instance.getCluster().getIodevices() : node.getIodevices();
+ store = node.getStore() == null ? cluster.getStore() : node.getStore();
+ pargs = workingDir + " " + instance.getName() + " " + iodevices + " " + store + " "
+ + AsterixConstants.ASTERIX_ROOT_METADATA_DIR + " " + AsterixEventServiceUtil.TXN_LOG_DIR + " "
+ + backupId + " " + hdfsBackupDir + " " + "hdfs" + " " + node.getId() + " " + hdfsUrl + " "
+ + hadoopVersion;
+ Event event = new Event("backup", nodeid, pargs);
+ patternList.add(new Pattern(null, 1, null, event));
+ }
+ return new Patterns(patternList);
+ }
+
+ private Patterns getLocalBackUpAsterixPattern(AsterixInstance instance, Backup backupConf) throws Exception {
+ Cluster cluster = instance.getCluster();
+ String backupDir = backupConf.getBackupDir();
+ String workingDir = cluster.getWorkingDir().getDir();
+ String backupId = "" + instance.getBackupInfo().size();
+ String iodevices;
+ String txnLogDir;
+ String store;
+ String pargs;
+ List<Pattern> patternList = new ArrayList<Pattern>();
+ for (Node node : cluster.getNode()) {
+ Nodeid nodeid = new Nodeid(new Value(null, node.getId()));
+ iodevices = node.getIodevices() == null ? instance.getCluster().getIodevices() : node.getIodevices();
+ txnLogDir = node.getTxnLogDir() == null ? instance.getCluster().getTxnLogDir() : node.getTxnLogDir();
+ store = node.getStore() == null ? cluster.getStore() : node.getStore();
+ pargs = workingDir + " " + instance.getName() + " " + iodevices + " " + store + " "
+ + AsterixConstants.ASTERIX_ROOT_METADATA_DIR + " " + txnLogDir + " " + backupId + " " + backupDir
+ + " " + "local" + " " + node.getId();
+ Event event = new Event("backup", nodeid, pargs);
+ patternList.add(new Pattern(null, 1, null, event));
+ }
+ return new Patterns(patternList);
+ }
+
+ public Patterns getHDFSRestoreAsterixPattern(AsterixInstance instance, BackupInfo backupInfo) throws Exception {
+ Cluster cluster = instance.getCluster();
+ String clusterStore = instance.getCluster().getStore();
+ String hdfsUrl = backupInfo.getBackupConf().getHdfs().getUrl();
+ String hadoopVersion = backupInfo.getBackupConf().getHdfs().getVersion();
+ String hdfsBackupDir = backupInfo.getBackupConf().getBackupDir();
+ VerificationUtil.verifyBackupRestoreConfiguration(hdfsUrl, hadoopVersion, hdfsBackupDir);
+ String workingDir = cluster.getWorkingDir().getDir();
+ int backupId = backupInfo.getId();
+ String nodeStore;
+ String pargs;
+ List<Pattern> patternList = new ArrayList<Pattern>();
+ for (Node node : cluster.getNode()) {
+ Nodeid nodeid = new Nodeid(new Value(null, node.getId()));
+ String iodevices = node.getIodevices() == null ? cluster.getIodevices() : node.getIodevices();
+ nodeStore = node.getStore() == null ? clusterStore : node.getStore();
+ pargs = workingDir + " " + instance.getName() + " " + iodevices + " " + nodeStore + " "
+ + AsterixConstants.ASTERIX_ROOT_METADATA_DIR + " " + AsterixEventServiceUtil.TXN_LOG_DIR + " "
+ + backupId + " " + " " + hdfsBackupDir + " " + "hdfs" + " " + node.getId() + " " + hdfsUrl + " "
+ + hadoopVersion;
+ Event event = new Event("restore", nodeid, pargs);
+ patternList.add(new Pattern(null, 1, null, event));
+ }
+ return new Patterns(patternList);
+ }
+
+ public Patterns getLocalRestoreAsterixPattern(AsterixInstance instance, BackupInfo backupInfo) throws Exception {
+ Cluster cluster = instance.getCluster();
+ String clusterStore = instance.getCluster().getStore();
+ String backupDir = backupInfo.getBackupConf().getBackupDir();
+ String workingDir = cluster.getWorkingDir().getDir();
+ int backupId = backupInfo.getId();
+ String nodeStore;
+ String pargs;
+ List<Pattern> patternList = new ArrayList<Pattern>();
+ for (Node node : cluster.getNode()) {
+ Nodeid nodeid = new Nodeid(new Value(null, node.getId()));
+ String iodevices = node.getIodevices() == null ? cluster.getIodevices() : node.getIodevices();
+ nodeStore = node.getStore() == null ? clusterStore : node.getStore();
+ pargs = workingDir + " " + instance.getName() + " " + iodevices + " " + nodeStore + " "
+ + AsterixConstants.ASTERIX_ROOT_METADATA_DIR + " " + AsterixEventServiceUtil.TXN_LOG_DIR + " "
+ + backupId + " " + backupDir + " " + "local" + " " + node.getId();
+ Event event = new Event("restore", nodeid, pargs);
+ patternList.add(new Pattern(null, 1, null, event));
+ }
+ return new Patterns(patternList);
+ }
+
+ public Patterns createHadoopLibraryTransferPattern(Cluster cluster) throws Exception {
+ List<Pattern> patternList = new ArrayList<Pattern>();
+ String workingDir = cluster.getWorkingDir().getDir();
+ String hadoopVersion = AsterixEventService.getConfiguration().getBackup().getHdfs().getVersion();
+ File hadoopDir = new File(AsterixEventService.getEventHome() + File.separator + "hadoop-" + hadoopVersion);
+ if (!hadoopDir.exists()) {
+ throw new IllegalStateException("Hadoop version :" + hadoopVersion + " not supported");
+ }
+
+ Nodeid nodeid = new Nodeid(new Value(null, EventDriver.CLIENT_NODE.getId()));
+ String username = cluster.getUsername() != null ? cluster.getUsername() : System.getProperty("user.name");
+ String pargs = username + " " + hadoopDir.getAbsolutePath() + " " + cluster.getMasterNode().getClusterIp()
+ + " " + workingDir;
+ Event event = new Event("directory_transfer", nodeid, pargs);
+ Pattern p = new Pattern(null, 1, null, event);
+ addInitialDelay(p, 2, "sec");
+ patternList.add(p);
+
+ boolean copyToNC = !cluster.getWorkingDir().isNFS();
+ if (copyToNC) {
+ for (Node node : cluster.getNode()) {
+ nodeid = new Nodeid(new Value(null, node.getId()));
+ pargs = cluster.getUsername() + " " + hadoopDir.getAbsolutePath() + " " + node.getClusterIp() + " "
+ + workingDir;
+ event = new Event("directory_transfer", nodeid, pargs);
+ p = new Pattern(null, 1, null, event);
+ addInitialDelay(p, 2, "sec");
+ patternList.add(p);
+ }
+ }
+ Patterns patterns = new Patterns(patternList);
+ return patterns;
+ }
+
+ public Patterns createDeleteInstancePattern(AsterixInstance instance) throws Exception {
+ List<Pattern> patternList = new ArrayList<Pattern>();
+ patternList.addAll(createRemoveAsterixStoragePattern(instance).getPattern());
+ if (instance.getBackupInfo() != null && instance.getBackupInfo().size() > 0) {
+ List<BackupInfo> backups = instance.getBackupInfo();
+ Set<String> removedBackupDirsHDFS = new HashSet<String>();
+ Set<String> removedBackupDirsLocal = new HashSet<String>();
+
+ String backupDir;
+ for (BackupInfo binfo : backups) {
+ backupDir = binfo.getBackupConf().getBackupDir();
+ switch (binfo.getBackupType()) {
+ case HDFS:
+ if (removedBackupDirsHDFS.contains(backups)) {
+ continue;
+ }
+ patternList.addAll(createRemoveHDFSBackupPattern(instance, backupDir).getPattern());
+ removedBackupDirsHDFS.add(backupDir);
+ break;
+
+ case LOCAL:
+ if (removedBackupDirsLocal.contains(backups)) {
+ continue;
+ }
+ patternList.addAll(createRemoveLocalBackupPattern(instance, backupDir).getPattern());
+ removedBackupDirsLocal.add(backupDir);
+ break;
+ }
+
+ }
+ }
+ patternList.addAll(createRemoveAsterixLogDirPattern(instance).getPattern());
+ patternList.addAll(createRemoveAsterixRootMetadata(instance).getPattern());
+ patternList.addAll(createRemoveAsterixTxnLogs(instance).getPattern());
+ Patterns patterns = new Patterns(patternList);
+ return patterns;
+ }
+
+ private Patterns createRemoveAsterixTxnLogs(AsterixInstance instance) throws Exception {
+ List<Pattern> patternList = new ArrayList<Pattern>();
+ Cluster cluster = instance.getCluster();
+ Nodeid nodeid = null;
+ Event event = null;
+ for (Node node : cluster.getNode()) {
+ String txnLogDir = node.getTxnLogDir() == null ? cluster.getTxnLogDir() : node.getTxnLogDir();
+ nodeid = new Nodeid(new Value(null, node.getId()));
+ event = new Event("file_delete", nodeid, txnLogDir);
+ patternList.add(new Pattern(null, 1, null, event));
+ }
+
+ Patterns patterns = new Patterns(patternList);
+ return patterns;
+ }
+
+ private Patterns createRemoveHDFSBackupPattern(AsterixInstance instance, String hdfsBackupDir) throws Exception {
+ List<Pattern> patternList = new ArrayList<Pattern>();
+ Cluster cluster = instance.getCluster();
+ String hdfsUrl = AsterixEventService.getConfiguration().getBackup().getHdfs().getUrl();
+ String hadoopVersion = AsterixEventService.getConfiguration().getBackup().getHdfs().getVersion();
+ String workingDir = cluster.getWorkingDir().getDir();
+ Node launchingNode = cluster.getNode().get(0);
+ Nodeid nodeid = new Nodeid(new Value(null, launchingNode.getId()));
+ String pathToDelete = hdfsBackupDir + File.separator + instance.getName();
+ String pargs = workingDir + " " + hadoopVersion + " " + hdfsUrl + " " + pathToDelete;
+ Event event = new Event("hdfs_delete", nodeid, pargs);
+ patternList.add(new Pattern(null, 1, null, event));
+ Patterns patterns = new Patterns(patternList);
+ return patterns;
+ }
+
+ private Patterns createRemoveLocalBackupPattern(AsterixInstance instance, String localBackupDir) throws Exception {
+ List<Pattern> patternList = new ArrayList<Pattern>();
+ Cluster cluster = instance.getCluster();
+
+ String pathToDelete = localBackupDir + File.separator + instance.getName();
+ String pargs = pathToDelete;
+ List<String> removedBackupDirs = new ArrayList<String>();
+ for (Node node : cluster.getNode()) {
+ if (removedBackupDirs.contains(node.getClusterIp())) {
+ continue;
+ }
+ Nodeid nodeid = new Nodeid(new Value(null, node.getId()));
+ Event event = new Event("file_delete", nodeid, pargs);
+ patternList.add(new Pattern(null, 1, null, event));
+ removedBackupDirs.add(node.getClusterIp());
+ }
+
+ Patterns patterns = new Patterns(patternList);
+ return patterns;
+ }
+
+ public Patterns createRemoveAsterixWorkingDirPattern(AsterixInstance instance) throws Exception {
+ List<Pattern> patternList = new ArrayList<Pattern>();
+ Cluster cluster = instance.getCluster();
+ String workingDir = cluster.getWorkingDir().getDir();
+ String pargs = workingDir;
+ Nodeid nodeid = new Nodeid(new Value(null, cluster.getMasterNode().getId()));
+ Event event = new Event("file_delete", nodeid, pargs);
+ patternList.add(new Pattern(null, 1, null, event));
+
+ if (!cluster.getWorkingDir().isNFS()) {
+ for (Node node : cluster.getNode()) {
+ nodeid = new Nodeid(new Value(null, node.getId()));
+ event = new Event("file_delete", nodeid, pargs);
+ patternList.add(new Pattern(null, 1, null, event));
+ }
+ }
+ Patterns patterns = new Patterns(patternList);
+ return patterns;
+ }
+
+ public Patterns getLibraryInstallPattern(AsterixInstance instance, String dataverse, String libraryName,
+ String libraryPath) throws Exception {
+ List<Pattern> patternList = new ArrayList<Pattern>();
+ Cluster cluster = instance.getCluster();
+ Nodeid nodeid = new Nodeid(new Value(null, EventDriver.CLIENT_NODE.getId()));
+ String username = cluster.getUsername() != null ? cluster.getUsername() : System.getProperty("user.name");
+ String workingDir = cluster.getWorkingDir().getDir();
+ String destDir = workingDir + File.separator + "library" + File.separator + dataverse + File.separator
+ + libraryName;
+ String fileToTransfer = new File(libraryPath).getAbsolutePath();
+
+ Iterator<Node> installTargets = cluster.getNode().iterator();
+ Node installNode = installTargets.next();
+ String destinationIp = installNode.getClusterIp();
+ String pargs = username + " " + fileToTransfer + " " + destinationIp + " " + destDir + " " + "unpack";
+ Event event = new Event("file_transfer", nodeid, pargs);
+ Pattern p = new Pattern(null, 1, null, event);
+ patternList.add(p);
+
+ if (!cluster.getWorkingDir().isNFS()) {
+ while (installTargets.hasNext()) {
+ Node node = installTargets.next();
+ pargs = username + " " + fileToTransfer + " " + node.getClusterIp() + " " + destDir + " " + "unpack";
+ event = new Event("file_transfer", nodeid, pargs);
+ p = new Pattern(null, 1, null, event);
+ patternList.add(p);
+ }
+
+ pargs = username + " " + fileToTransfer + " " + cluster.getMasterNode().getClusterIp() + " " + destDir
+ + " " + "unpack";
+ event = new Event("file_transfer", nodeid, pargs);
+ p = new Pattern(null, 1, null, event);
+ patternList.add(p);
+ }
+ return new Patterns(patternList);
+ }
+
+ public Patterns getLibraryUninstallPattern(AsterixInstance instance, String dataverse, String libraryName)
+ throws Exception {
+ List<Pattern> patternList = new ArrayList<Pattern>();
+ Cluster cluster = instance.getCluster();
+ String workingDir = cluster.getWorkingDir().getDir();
+ String destFile = dataverse + "." + libraryName;
+ String pargs = workingDir + File.separator + "uninstall" + " " + destFile;
+
+ String metadataNodeId = instance.getMetadataNodeId();
+ Nodeid nodeid = new Nodeid(new Value(null, metadataNodeId));
+ Event event = new Event("file_create", nodeid, pargs);
+ Pattern p = new Pattern(null, 1, null, event);
+ patternList.add(p);
+
+ Iterator<Node> uninstallTargets = cluster.getNode().iterator();
+ String libDir = workingDir + File.separator + "library" + File.separator + dataverse + File.separator
+ + libraryName;
+ Node uninstallNode = uninstallTargets.next();
+ nodeid = new Nodeid(new Value(null, uninstallNode.getId()));
+ event = new Event("file_delete", nodeid, libDir);
+ p = new Pattern(null, 1, null, event);
+ patternList.add(p);
+ pargs = libDir;
+
+ if (!cluster.getWorkingDir().isNFS()) {
+ while (uninstallTargets.hasNext()) {
+ uninstallNode = uninstallTargets.next();
+ nodeid = new Nodeid(new Value(null, uninstallNode.getId()));
+ event = new Event("file_delete", nodeid, pargs);
+ p = new Pattern(null, 1, null, event);
+ patternList.add(p);
+ }
+
+ nodeid = new Nodeid(new Value(null, cluster.getMasterNode().getId()));
+ event = new Event("file_delete", nodeid, pargs);
+ p = new Pattern(null, 1, null, event);
+ patternList.add(p);
+
+ }
+ return new Patterns(patternList);
+ }
+
+ private Patterns createRemoveAsterixRootMetadata(AsterixInstance instance) throws Exception {
+ List<Pattern> patternList = new ArrayList<Pattern>();
+ Cluster cluster = instance.getCluster();
+ Nodeid nodeid = null;
+ String pargs = null;
+ Event event = null;
+ for (Node node : cluster.getNode()) {
+ String iodevices = node.getIodevices() == null ? cluster.getIodevices() : node.getIodevices();
+ String primaryIODevice = iodevices.split(",")[0].trim();
+ pargs = primaryIODevice + File.separator + AsterixConstants.ASTERIX_ROOT_METADATA_DIR;
+ nodeid = new Nodeid(new Value(null, node.getId()));
+ event = new Event("file_delete", nodeid, pargs);
+ patternList.add(new Pattern(null, 1, null, event));
+ }
+
+ Patterns patterns = new Patterns(patternList);
+ return patterns;
+ }
+
+ private Patterns createRemoveAsterixLogDirPattern(AsterixInstance instance) throws Exception {
+ List<Pattern> patternList = new ArrayList<Pattern>();
+ Cluster cluster = instance.getCluster();
+ String pargs = instance.getCluster().getLogDir();
+ Nodeid nodeid = new Nodeid(new Value(null, cluster.getMasterNode().getId()));
+ Event event = new Event("file_delete", nodeid, pargs);
+ patternList.add(new Pattern(null, 1, null, event));
+
+ for (Node node : cluster.getNode()) {
+ nodeid = new Nodeid(new Value(null, node.getId()));
+ if (node.getLogDir() != null) {
+ pargs = node.getLogDir();
+ }
+ event = new Event("file_delete", nodeid, pargs);
+ patternList.add(new Pattern(null, 1, null, event));
+ }
+
+ Patterns patterns = new Patterns(patternList);
+ return patterns;
+ }
+
+ private Patterns createRemoveAsterixStoragePattern(AsterixInstance instance) throws Exception {
+ List<Pattern> patternList = new ArrayList<Pattern>();
+ Cluster cluster = instance.getCluster();
+ String pargs = null;
+
+ for (Node node : cluster.getNode()) {
+ Nodeid nodeid = new Nodeid(new Value(null, node.getId()));
+ String[] nodeIODevices;
+ String iodevices = node.getIodevices() == null ? cluster.getIodevices() : node.getIodevices();
+ nodeIODevices = iodevices.trim().split(",");
+ for (String nodeIODevice : nodeIODevices) {
+ String nodeStore = node.getStore() == null ? cluster.getStore() : node.getStore();
+ pargs = nodeIODevice.trim() + File.separator + nodeStore;
+ Event event = new Event("file_delete", nodeid, pargs);
+ patternList.add(new Pattern(null, 1, null, event));
+ }
+ }
+ Patterns patterns = new Patterns(patternList);
+ return patterns;
+ }
+
+ private Pattern createCopyHyracksPattern(String instanceName, Cluster cluster, String destinationIp, String destDir) {
+ Nodeid nodeid = new Nodeid(new Value(null, EventDriver.CLIENT_NODE.getId()));
+ String username = cluster.getUsername() != null ? cluster.getUsername() : System.getProperty("user.name");
+ String asterixZipName = AsterixEventService.getAsterixZip().substring(
+ AsterixEventService.getAsterixZip().lastIndexOf(File.separator) + 1);
+ String fileToTransfer = new File(AsterixEventService.getAsterixDir() + File.separator + instanceName
+ + File.separator + asterixZipName).getAbsolutePath();
+ String pargs = username + " " + fileToTransfer + " " + destinationIp + " " + destDir + " " + "unpack";
+ Event event = new Event("file_transfer", nodeid, pargs);
+ return new Pattern(null, 1, null, event);
+ }
+
+ private Pattern createCCStartPattern(String hostId) {
+ Nodeid nodeid = new Nodeid(new Value(null, hostId));
+ Event event = new Event("cc_start", nodeid, "");
+ return new Pattern(null, 1, null, event);
+ }
+
+ public Pattern createCCStopPattern(String hostId) {
+ Nodeid nodeid = new Nodeid(new Value(null, hostId));
+ Event event = new Event("cc_failure", nodeid, null);
+ return new Pattern(null, 1, null, event);
+ }
+
+ public Pattern createNCStartPattern(String ccHost, String hostId, String nodeControllerId, String iodevices) {
+ Nodeid nodeid = new Nodeid(new Value(null, hostId));
+ String pargs = ccHost + " " + nodeControllerId + " " + iodevices;
+ Event event = new Event("node_join", nodeid, pargs);
+ return new Pattern(null, 1, null, event);
+ }
+
+ public Pattern createNCStopPattern(String hostId, String nodeControllerId) {
+ Nodeid nodeid = new Nodeid(new Value(null, hostId));
+ Event event = new Event("node_failure", nodeid, nodeControllerId);
+ return new Pattern(null, 1, null, event);
+ }
+
+ public Patterns createPrepareNodePattern(String instanceName, Cluster cluster, Node nodeToBeAdded) {
+ List<Pattern> ps = new ArrayList<Pattern>();
+ boolean workingDirOnNFS = cluster.getWorkingDir().isNFS();
+ if (!workingDirOnNFS) {
+ String ccLocationIp = cluster.getMasterNode().getClusterIp();
+ String destDir = cluster.getWorkingDir().getDir() + File.separator + "asterix";
+ Pattern copyHyracks = createCopyHyracksPattern(instanceName, cluster, ccLocationIp, destDir);
+ ps.add(copyHyracks);
+
+ String workingDir = cluster.getWorkingDir().getDir();
+ String hadoopVersion = AsterixEventService.getConfiguration().getBackup().getHdfs().getVersion();
+ File hadoopDir = new File(AsterixEventService.getEventHome() + File.separator + "hadoop-" + hadoopVersion);
+ if (!hadoopDir.exists()) {
+ throw new IllegalStateException("Hadoop version :" + hadoopVersion + " not supported");
+ }
+
+ Nodeid nodeid = new Nodeid(new Value(null, EventDriver.CLIENT_NODE.getId()));
+ String username = cluster.getUsername() != null ? cluster.getUsername() : System.getProperty("user.name");
+ String pargs = username + " " + hadoopDir.getAbsolutePath() + " " + cluster.getMasterNode().getClusterIp()
+ + " " + workingDir;
+ Event event = new Event("directory_transfer", nodeid, pargs);
+ Pattern p = new Pattern(null, 1, null, event);
+ addInitialDelay(p, 2, "sec");
+ ps.add(p);
+
+ nodeid = new Nodeid(new Value(null, nodeToBeAdded.getId()));
+ pargs = cluster.getUsername() + " " + hadoopDir.getAbsolutePath() + " " + nodeToBeAdded.getClusterIp()
+ + " " + workingDir;
+ event = new Event("directory_transfer", nodeid, pargs);
+ p = new Pattern(null, 1, null, event);
+ addInitialDelay(p, 2, "sec");
+ ps.add(p);
+ }
+
+ Patterns patterns = new Patterns(ps);
+ return patterns;
+ }
+
+ public Patterns getGenerateLogPattern(String asterixInstanceName, Cluster cluster, String outputDir) {
+ List<Pattern> patternList = new ArrayList<Pattern>();
+ Map<String, String> nodeLogs = new HashMap<String, String>();
+
+ String username = cluster.getUsername() == null ? System.getProperty("user.name") : cluster.getUsername();
+ String srcHost = cluster.getMasterNode().getClientIp();
+ Nodeid nodeid = new Nodeid(new Value(null, EventDriver.CLIENT_NODE.getId()));
+ String srcDir = cluster.getMasterNode().getLogDir() == null ? cluster.getLogDir() : cluster.getMasterNode()
+ .getLogDir();
+ String destDir = outputDir + File.separator + "cc";
+ String pargs = username + " " + srcHost + " " + srcDir + " " + destDir;
+ Event event = new Event("directory_copy", nodeid, pargs);
+ Pattern p = new Pattern(null, 1, null, event);
+ patternList.add(p);
+ nodeLogs.put(cluster.getMasterNode().getClusterIp(), srcDir);
+ for (Node node : cluster.getNode()) {
+ srcHost = node.getClusterIp();
+ srcDir = node.getLogDir() == null ? cluster.getLogDir() : node.getLogDir();
+ if (nodeLogs.get(node.getClusterIp()) != null && nodeLogs.get(node.getClusterIp()).equals(srcDir)) {
+ continue;
+ }
+ destDir = outputDir + File.separator + node.getId();
+ pargs = username + " " + srcHost + " " + srcDir + " " + destDir;
+ event = new Event("directory_copy", nodeid, pargs);
+ p = new Pattern(null, 1, null, event);
+ patternList.add(p);
+ }
+ Patterns patterns = new Patterns(patternList);
+ return patterns;
+ }
+
+}
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/xml/PatternParser.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/xml/PatternParser.java
deleted file mode 100644
index af46e63..0000000
--- a/asterix-events/src/main/java/edu/uci/ics/asterix/event/xml/PatternParser.java
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.event.xml;
-
-public class PatternParser {
-
- public static void parsePattern(String path){
-
- }
-}
-
diff --git a/asterix-events/src/main/resources/events/backup/backup.sh b/asterix-events/src/main/resources/events/backup/backup.sh
old mode 100755
new mode 100644
diff --git a/asterix-events/src/main/resources/events/cc_failure/cc_failure.sh b/asterix-events/src/main/resources/events/cc_failure/cc_failure.sh
old mode 100755
new mode 100644
diff --git a/asterix-events/src/main/resources/events/cc_start/cc_start.sh b/asterix-events/src/main/resources/events/cc_start/cc_start.sh
old mode 100755
new mode 100644
diff --git a/asterix-events/src/main/resources/events/events.xml b/asterix-events/src/main/resources/events/events.xml
index 01495cb..67a29a2 100644
--- a/asterix-events/src/main/resources/events/events.xml
+++ b/asterix-events/src/main/resources/events/events.xml
@@ -94,7 +94,14 @@
<type>file_delete</type>
<script>file/delete.sh</script>
<description>Deletes a file on the local file system to a remote node</description>
- <args>local_source_path destination_node destination_path</args>
+ <args>destination_node destination_path</args>
+ <daemon>false</daemon>
+ </event>
+ <event>
+ <type>file_create</type>
+ <script>file/create_file.sh</script>
+ <description>Creates a file on the local file system to a remote node</description>
+ <args>destination_node destination_path</args>
<daemon>false</daemon>
</event>
<event>
diff --git a/asterix-events/src/main/resources/events/execute.sh b/asterix-events/src/main/resources/events/execute.sh
new file mode 100644
index 0000000..30f6c2f
--- /dev/null
+++ b/asterix-events/src/main/resources/events/execute.sh
@@ -0,0 +1,27 @@
+USERNAME=$1
+if [ ! -d $MANAGIX_HOME/logs ];
+then
+ mkdir -p $MANAGIX_HOME/logs
+fi
+LOGDIR=$MANAGIX_HOME/logs
+if [ $DAEMON == "false" ]; then
+ if [ -z $USERNAME ]
+ then
+ cmd_output=$(ssh $IP_LOCATION "$ENV $SCRIPT $ARGS" 2>&1 >/dev/null)
+ echo "ssh $IP_LOCATION $ENV $SCRIPT $ARGS" >> $LOGDIR/execute.log
+ echo "$cmd_output"
+ else
+ echo "ssh -l $USERNAME $IP_LOCATION $ENV $SCRIPT $ARGS" >> $LOGDIR/execute.log
+ cmd_output=$(ssh -l $USERNAME $IP_LOCATION "$ENV $SCRIPT $ARGS" 2>&1 >/dev/null)
+ echo "$cmd_output"
+ fi
+else
+ if [ -z $USERNAME ];
+ then
+ echo "ssh $IP_LOCATION $ENV $SCRIPT $ARGS &" >> $LOGDIR/execute.log
+ ssh $IP_LOCATION "$ENV $SCRIPT $ARGS" &
+ else
+ echo "ssh -l $USERNAME $IP_LOCATION $ENV $SCRIPT $ARGS &" >> $LOGDIR/execute.log
+ ssh -l $USERNAME $IP_LOCATION "$ENV $SCRIPT $ARGS" &
+ fi
+fi
diff --git a/asterix-events/src/main/resources/events/file/create_file.sh b/asterix-events/src/main/resources/events/file/create_file.sh
new file mode 100644
index 0000000..762a2d3
--- /dev/null
+++ b/asterix-events/src/main/resources/events/file/create_file.sh
@@ -0,0 +1,3 @@
+mkdir -p $1
+echo "touch $1/$2" >> ~/file_create.log
+touch $1/$2
diff --git a/asterix-events/src/main/resources/events/file/delete.sh b/asterix-events/src/main/resources/events/file/delete.sh
old mode 100755
new mode 100644
diff --git a/asterix-events/src/main/resources/events/file/dir_transfer.sh b/asterix-events/src/main/resources/events/file/dir_transfer.sh
old mode 100755
new mode 100644
diff --git a/asterix-events/src/main/resources/events/file/transfer.sh b/asterix-events/src/main/resources/events/file/transfer.sh
old mode 100755
new mode 100644
diff --git a/asterix-events/src/main/resources/events/hdfs/delete.sh b/asterix-events/src/main/resources/events/hdfs/delete.sh
old mode 100755
new mode 100644
diff --git a/asterix-events/src/main/resources/events/node_failure/nc_failure.sh b/asterix-events/src/main/resources/events/node_failure/nc_failure.sh
old mode 100755
new mode 100644
diff --git a/asterix-events/src/main/resources/events/node_info/node_info.sh b/asterix-events/src/main/resources/events/node_info/node_info.sh
old mode 100755
new mode 100644
diff --git a/asterix-events/src/main/resources/events/node_join/nc_join.sh b/asterix-events/src/main/resources/events/node_join/nc_join.sh
old mode 100755
new mode 100644
index e0254c9..a18fe09
--- a/asterix-events/src/main/resources/events/node_join/nc_join.sh
+++ b/asterix-events/src/main/resources/events/node_join/nc_join.sh
@@ -19,5 +19,7 @@
then
mkdir -p $LOG_DIR
fi
+
cd $WORKING_DIR
+
$ASTERIX_HOME/bin/asterixnc -node-id $NC_ID -cc-host $CC_HOST -cc-port $CLUSTER_NET_PORT -cluster-net-ip-address $IP_LOCATION -data-ip-address $IP_LOCATION -iodevices $IO_DEVICES -result-ip-address $IP_LOCATION &> $LOG_DIR/${NC_ID}.log
diff --git a/asterix-events/src/main/resources/events/node_restart/nc_restart.sh b/asterix-events/src/main/resources/events/node_restart/nc_restart.sh
old mode 100755
new mode 100644
diff --git a/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443-2/query-issue443-2.1.ddl.aql b/asterix-events/src/main/resources/events/prepare.sh
similarity index 100%
copy from asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443-2/query-issue443-2.1.ddl.aql
copy to asterix-events/src/main/resources/events/prepare.sh
diff --git a/asterix-events/src/main/resources/events/restore/restore.sh b/asterix-events/src/main/resources/events/restore/restore.sh
old mode 100755
new mode 100644
diff --git a/asterix-events/src/main/resources/schema/cluster.xsd b/asterix-events/src/main/resources/schema/cluster.xsd
deleted file mode 100644
index f0d5bd9..0000000
--- a/asterix-events/src/main/resources/schema/cluster.xsd
+++ /dev/null
@@ -1,99 +0,0 @@
-<?xml version="1.0" encoding="ISO-8859-1" ?>
-<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema"
- xmlns:cl="cluster" targetNamespace="cluster" elementFormDefault="qualified">
-
- <!-- definition of simple types -->
- <xs:element name="name" type="xs:string" />
- <xs:element name="log_dir" type="xs:string" />
- <xs:element name="txn_log_dir" type="xs:string" />
- <xs:element name="id" type="xs:string" />
- <xs:element name="client_ip" type="xs:string" />
- <xs:element name="cluster_ip" type="xs:string" />
- <xs:element name="key" type="xs:string" />
- <xs:element name="value" type="xs:string" />
- <xs:element name="dir" type="xs:string" />
- <xs:element name="NFS" type="xs:boolean" />
- <xs:element name="store" type="xs:string" />
- <xs:element name="iodevices" type="xs:string" />
- <xs:element name="java_home" type="xs:string" />
- <xs:element name="username" type="xs:string" />
- <xs:element name="web_port" type="xs:string" />
- <xs:element name="client_port" type="xs:integer" />
- <xs:element name="cluster_port" type="xs:integer" />
- <xs:element name="http_port" type="xs:integer" />
-
- <!-- definition of complex elements -->
- <xs:element name="working_dir">
- <xs:complexType>
- <xs:sequence>
- <xs:element ref="cl:dir" />
- <xs:element ref="cl:NFS" />
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
- <xs:element name="master_node">
- <xs:complexType>
- <xs:sequence>
- <xs:element ref="cl:id" />
- <xs:element ref="cl:client_ip" />
- <xs:element ref="cl:cluster_ip" />
- <xs:element ref="cl:java_home" minOccurs="0" />
- <xs:element ref="cl:log_dir" minOccurs="0" />
- <xs:element ref="cl:client_port" />
- <xs:element ref="cl:cluster_port" />
- <xs:element ref="cl:http_port" />
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
- <xs:element name="property">
- <xs:complexType>
- <xs:sequence>
- <xs:element ref="cl:key" />
- <xs:element ref="cl:value" />
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
- <xs:element name="env">
- <xs:complexType>
- <xs:sequence>
- <xs:element ref="cl:property" minOccurs="0" maxOccurs="unbounded" />
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
- <xs:element name="node">
- <xs:complexType>
- <xs:sequence>
- <xs:element ref="cl:id" />
- <xs:element ref="cl:cluster_ip" />
- <xs:element ref="cl:java_home" minOccurs="0" />
- <xs:element ref="cl:log_dir" minOccurs="0" />
- <xs:element ref="cl:txn_log_dir" minOccurs="0" />
- <xs:element ref="cl:store" minOccurs="0" />
- <xs:element ref="cl:iodevices" minOccurs="0" />
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
- <xs:element name="cluster">
- <xs:complexType>
- <xs:sequence>
- <xs:element ref="cl:name" />
- <xs:element ref="cl:username" />
- <xs:element ref="cl:env" minOccurs="0" />
- <xs:element ref="cl:java_home" minOccurs="0" />
- <xs:element ref="cl:log_dir" minOccurs="0" />
- <xs:element ref="cl:txn_log_dir" minOccurs="0" />
- <xs:element ref="cl:store" minOccurs="0" />
- <xs:element ref="cl:iodevices" minOccurs="0" />
- <xs:element ref="cl:working_dir" />
- <xs:element ref="cl:master_node" />
- <xs:element ref="cl:node" maxOccurs="unbounded" />
- </xs:sequence>
- </xs:complexType>
- </xs:element>
-
-</xs:schema>
diff --git a/asterix-installer/src/main/resources/schema/installer-conf.xsd b/asterix-events/src/main/resources/schema/installer-conf.xsd
similarity index 100%
rename from asterix-installer/src/main/resources/schema/installer-conf.xsd
rename to asterix-events/src/main/resources/schema/installer-conf.xsd
diff --git a/asterix-events/src/main/resources/scripts/execute.sh b/asterix-events/src/main/resources/scripts/execute.sh
old mode 100755
new mode 100644
diff --git a/asterix-events/src/main/resources/scripts/prepare.sh b/asterix-events/src/main/resources/scripts/prepare.sh
old mode 100755
new mode 100644
diff --git a/asterix-external-data/pom.xml b/asterix-external-data/pom.xml
index f8d5ea2..4f57a80 100644
--- a/asterix-external-data/pom.xml
+++ b/asterix-external-data/pom.xml
@@ -1,23 +1,18 @@
-<!--
- ! Copyright 2009-2013 by The Regents of the University of California
- ! Licensed under the Apache License, Version 2.0 (the "License");
- ! you may not use this file except in compliance with the License.
- ! you may obtain a copy of the License from
- !
- ! http://www.apache.org/licenses/LICENSE-2.0
- !
- ! Unless required by applicable law or agreed to in writing, software
- ! distributed under the License is distributed on an "AS IS" BASIS,
- ! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ! See the License for the specific language governing permissions and
- ! limitations under the License.
- !-->
+<!-- ! Copyright 2009-2013 by The Regents of the University of California
+ ! Licensed under the Apache License, Version 2.0 (the "License"); ! you may
+ not use this file except in compliance with the License. ! you may obtain
+ a copy of the License from ! ! http://www.apache.org/licenses/LICENSE-2.0
+ ! ! Unless required by applicable law or agreed to in writing, software !
+ distributed under the License is distributed on an "AS IS" BASIS, ! WITHOUT
+ WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ! See the
+ License for the specific language governing permissions and ! limitations
+ under the License. ! -->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
</parent>
<artifactId>asterix-external-data</artifactId>
@@ -34,53 +29,62 @@
</configuration>
</plugin>
<plugin>
- <groupId>org.codehaus.mojo</groupId>
- <artifactId>appassembler-maven-plugin</artifactId>
- <version>1.0</version>
+ <groupId>org.jvnet.jaxb2.maven2</groupId>
+ <artifactId>maven-jaxb2-plugin</artifactId>
<executions>
<execution>
- <configuration>
- <programs>
- <program>
- <mainClass>edu.uci.ics.asterix.drivers.AsterixWebServer</mainClass>
- <name>asterix-web</name>
- </program>
- <program>
- <mainClass>edu.uci.ics.asterix.drivers.AsterixClientDriver</mainClass>
- <name>asterix-cmd</name>
- </program>
- </programs>
- <repositoryLayout>flat</repositoryLayout>
- <repositoryName>lib</repositoryName>
- </configuration>
- <phase>package</phase>
+ <id>configuration</id>
<goals>
- <goal>assemble</goal>
+ <goal>generate</goal>
</goals>
+ <configuration>
+ <schemaDirectory>src/main/resources/schema</schemaDirectory>
+ <schemaIncludes>
+ <include>library.xsd</include>
+ </schemaIncludes>
+ <generatePackage>edu.uci.ics.asterix.external.library</generatePackage>
+ <generateDirectory>${project.build.directory}/generated-sources/configuration</generateDirectory>
+ </configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-surefire-plugin</artifactId>
- <version>2.7.2</version>
+ <artifactId>maven-jar-plugin</artifactId>
+ <version>2.2</version>
<configuration>
- <!-- doesn't work from m2eclipse, currently <additionalClasspathElements>
- <additionalClasspathElement>${basedir}/src/main/resources</additionalClasspathElement>
- </additionalClasspathElements> -->
- <forkMode>pertest</forkMode>
- <argLine>-enableassertions -Xmx${test.heap.size}m
- -Dfile.encoding=UTF-8
- -Djava.util.logging.config.file=src/test/resources/logging.properties</argLine>
<includes>
- <include>**/*TestSuite.java</include>
- <include>**/*Test.java</include>
+ <include>**/*.class</include>
+ <include>**/*.txt</include>
</includes>
</configuration>
+ <executions>
+ <execution>
+ <goals>
+ <goal>test-jar</goal>
+ </goals>
+ <phase>package</phase>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <artifactId>maven-assembly-plugin</artifactId>
+ <version>2.2-beta-5</version>
+ <executions>
+ <execution>
+ <configuration>
+ <descriptor>src/main/assembly/binary-assembly-libzip.xml</descriptor>
+ <finalName>testlib-zip</finalName>
+ </configuration>
+ <phase>package</phase>
+ <goals>
+ <goal>attached</goal>
+ </goals>
+ </execution>
+ </executions>
</plugin>
</plugins>
</build>
-
<dependencies>
<dependency>
<groupId>javax.servlet</groupId>
@@ -96,18 +100,29 @@
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-om</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-runtime</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
+ <groupId>edu.uci.ics.asterix</groupId>
+ <artifactId>asterix-metadata</artifactId>
+ <version>0.8.4-SNAPSHOT</version>
+ <type>jar</type>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>algebricks-compiler</artifactId>
+ </dependency>
+ <dependency>
<groupId>com.kenai.nbpwr</groupId>
<artifactId>org-apache-commons-io</artifactId>
<version>1.3.1-201002241208</version>
@@ -153,6 +168,15 @@
<artifactId>jdom</artifactId>
<version>1.0</version>
</dependency>
+ <dependency>
+ <groupId>edu.uci.ics.asterix</groupId>
+ <artifactId>asterix-common</artifactId>
+ <version>0.8.4-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>com.microsoft.windowsazure</groupId>
+ <artifactId>microsoft-windowsazure-api</artifactId>
+ <version>0.4.4</version>
+ </dependency>
</dependencies>
-
-</project>
+</project>
diff --git a/asterix-external-data/src/main/assembly/binary-assembly-libjar.xml b/asterix-external-data/src/main/assembly/binary-assembly-libjar.xml
new file mode 100644
index 0000000..d76174a
--- /dev/null
+++ b/asterix-external-data/src/main/assembly/binary-assembly-libjar.xml
@@ -0,0 +1,19 @@
+<assembly>
+ <id>binary-assembly</id>
+ <formats>
+ <format>jar</format>
+ </formats>
+ <includeBaseDirectory>false</includeBaseDirectory>
+ <fileSets>
+ <fileSet>
+ <directory>target/test-classes</directory>
+ <outputDirectory></outputDirectory>
+ <includes>
+ <include>**</include>
+ </includes>
+ <excludes>
+ <exclude>**.xml</exclude>
+ </excludes>
+ </fileSet>
+ </fileSets>
+</assembly>
diff --git a/asterix-external-data/src/main/assembly/binary-assembly-libzip.xml b/asterix-external-data/src/main/assembly/binary-assembly-libzip.xml
new file mode 100644
index 0000000..bec6e32
--- /dev/null
+++ b/asterix-external-data/src/main/assembly/binary-assembly-libzip.xml
@@ -0,0 +1,23 @@
+<assembly>
+ <id>binary-assembly</id>
+ <formats>
+ <format>zip</format>
+ </formats>
+ <includeBaseDirectory>false</includeBaseDirectory>
+ <fileSets>
+ <fileSet>
+ <directory>target</directory>
+ <outputDirectory></outputDirectory>
+ <includes>
+ <include>*test*.jar</include>
+ </includes>
+ </fileSet>
+ <fileSet>
+ <directory>src/test/resources</directory>
+ <outputDirectory></outputDirectory>
+ <includes>
+ <include>*.xml</include>
+ </includes>
+ </fileSet>
+ </fileSets>
+</assembly>
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/CNNFeedAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/CNNFeedAdapterFactory.java
index f1a3d6c..fa66715 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/CNNFeedAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/CNNFeedAdapterFactory.java
@@ -14,21 +14,74 @@
*/
package edu.uci.ics.asterix.external.adapter.factory;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
import java.util.Map;
-import edu.uci.ics.asterix.external.dataset.adapter.CNNFeedAdapter;
-import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
+import edu.uci.ics.asterix.external.dataset.adapter.RSSFeedAdapter;
+import edu.uci.ics.asterix.metadata.feeds.IDatasourceAdapter;
+import edu.uci.ics.asterix.metadata.feeds.ITypedAdapterFactory;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksCountPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
/**
- * A factory class for creating the @see {CNNFeedAdapter}.
+ * A factory class for creating the @see {CNNFeedAdapter}.
*/
-public class CNNFeedAdapterFactory implements ITypedDatasetAdapterFactory {
+public class CNNFeedAdapterFactory implements ITypedAdapterFactory {
private static final long serialVersionUID = 1L;
+ private Map<String, String> configuration;
+
+ private List<String> feedURLs = new ArrayList<String>();
+ private static Map<String, String> topicFeeds = new HashMap<String, String>();
+ private ARecordType recordType;
+
+ public static final String KEY_RSS_URL = "topic";
+ public static final String KEY_INTERVAL = "interval";
+ public static final String TOP_STORIES = "topstories";
+ public static final String WORLD = "world";
+ public static final String US = "us";
+ public static final String SPORTS = "sports";
+ public static final String BUSINESS = "business";
+ public static final String POLITICS = "politics";
+ public static final String CRIME = "crime";
+ public static final String TECHNOLOGY = "technology";
+ public static final String HEALTH = "health";
+ public static final String ENTERNTAINMENT = "entertainemnt";
+ public static final String TRAVEL = "travel";
+ public static final String LIVING = "living";
+ public static final String VIDEO = "video";
+ public static final String STUDENT = "student";
+ public static final String POPULAR = "popular";
+ public static final String RECENT = "recent";
+
+ private void initTopics() {
+ topicFeeds.put(TOP_STORIES, "http://rss.cnn.com/rss/cnn_topstories.rss");
+ topicFeeds.put(WORLD, "http://rss.cnn.com/rss/cnn_world.rss");
+ topicFeeds.put(US, "http://rss.cnn.com/rss/cnn_us.rss");
+ topicFeeds.put(SPORTS, "http://rss.cnn.com/rss/si_topstories.rss");
+ topicFeeds.put(BUSINESS, "http://rss.cnn.com/rss/money_latest.rss");
+ topicFeeds.put(POLITICS, "http://rss.cnn.com/rss/cnn_allpolitics.rss");
+ topicFeeds.put(CRIME, "http://rss.cnn.com/rss/cnn_crime.rss");
+ topicFeeds.put(TECHNOLOGY, "http://rss.cnn.com/rss/cnn_tech.rss");
+ topicFeeds.put(HEALTH, "http://rss.cnn.com/rss/cnn_health.rss");
+ topicFeeds.put(ENTERNTAINMENT, "http://rss.cnn.com/rss/cnn_showbiz.rss");
+ topicFeeds.put(LIVING, "http://rss.cnn.com/rss/cnn_living.rss");
+ topicFeeds.put(VIDEO, "http://rss.cnn.com/rss/cnn_freevideo.rss");
+ topicFeeds.put(TRAVEL, "http://rss.cnn.com/rss/cnn_travel.rss");
+ topicFeeds.put(STUDENT, "http://rss.cnn.com/rss/cnn_studentnews.rss");
+ topicFeeds.put(POPULAR, "http://rss.cnn.com/rss/cnn_mostpopular.rss");
+ topicFeeds.put(RECENT, "http://rss.cnn.com/rss/cnn_latest.rss");
+ }
+
@Override
- public IDatasourceAdapter createAdapter(Map<String, Object> configuration) throws Exception {
- CNNFeedAdapter cnnFeedAdapter = new CNNFeedAdapter();
- cnnFeedAdapter.configure(configuration);
+ public IDatasourceAdapter createAdapter(IHyracksTaskContext ctx, int partition) throws Exception {
+ RSSFeedAdapter cnnFeedAdapter = new RSSFeedAdapter(configuration, recordType, ctx);
return cnnFeedAdapter;
}
@@ -37,4 +90,61 @@
return "cnn_feed";
}
+ @Override
+ public AdapterType getAdapterType() {
+ return AdapterType.TYPED;
+ }
+
+ @Override
+ public void configure(Map<String, String> configuration) throws Exception {
+ this.configuration = configuration;
+ String rssURLProperty = (String) configuration.get(KEY_RSS_URL);
+ if (rssURLProperty == null) {
+ throw new IllegalArgumentException("no rss url provided");
+ }
+ initializeFeedURLs(rssURLProperty);
+ recordType = new ARecordType("FeedRecordType", new String[] { "id", "title", "description", "link" },
+ new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING },
+ false);
+
+ }
+
+ private void initializeFeedURLs(String rssURLProperty) {
+ feedURLs.clear();
+ String[] rssTopics = rssURLProperty.split(",");
+ initTopics();
+ for (String topic : rssTopics) {
+ String feedURL = topicFeeds.get(topic);
+ if (feedURL == null) {
+ throw new IllegalArgumentException(" unknown topic :" + topic + " please choose from the following "
+ + getValidTopics());
+ }
+ feedURLs.add(feedURL);
+ }
+ }
+
+ private static String getValidTopics() {
+ StringBuilder builder = new StringBuilder();
+ for (String key : topicFeeds.keySet()) {
+ builder.append(key);
+ builder.append(" ");
+ }
+ return new String(builder);
+ }
+
+ @Override
+ public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
+ return new AlgebricksCountPartitionConstraint(feedURLs.size());
+ }
+
+ @Override
+ public SupportedOperation getSupportedOperations() {
+ return SupportedOperation.READ;
+ }
+
+ @Override
+ public ARecordType getAdapterOutputType() {
+ return recordType;
+ }
+
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HDFSAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HDFSAdapterFactory.java
index 4ca3d72..b10ebcf 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HDFSAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HDFSAdapterFactory.java
@@ -14,6 +14,7 @@
*/
package edu.uci.ics.asterix.external.adapter.factory;
+import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
@@ -22,10 +23,17 @@
import org.apache.hadoop.mapred.JobConf;
import edu.uci.ics.asterix.external.dataset.adapter.HDFSAdapter;
-import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
+import edu.uci.ics.asterix.metadata.feeds.IDatasourceAdapter;
+import edu.uci.ics.asterix.metadata.feeds.IGenericAdapterFactory;
+import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
+import edu.uci.ics.asterix.om.util.AsterixClusterProperties;
import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.api.context.ICCContext;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
import edu.uci.ics.hyracks.hdfs.dataflow.ConfFactory;
import edu.uci.ics.hyracks.hdfs.dataflow.InputSplitsFactory;
import edu.uci.ics.hyracks.hdfs.scheduler.Scheduler;
@@ -34,7 +42,7 @@
* A factory class for creating an instance of HDFSAdapter
*/
@SuppressWarnings("deprecation")
-public class HDFSAdapterFactory implements IGenericDatasetAdapterFactory {
+public class HDFSAdapterFactory extends StreamBasedAdapterFactory implements IGenericAdapterFactory {
private static final long serialVersionUID = 1L;
public static final String HDFS_ADAPTER_NAME = "hdfs";
@@ -52,7 +60,22 @@
private boolean executed[];
private InputSplitsFactory inputSplitsFactory;
private ConfFactory confFactory;
- private boolean setup = false;
+ private IAType atype;
+ private boolean configured = false;
+ public static Scheduler hdfsScheduler;
+ private static boolean initialized = false;
+
+ private static Scheduler initializeHDFSScheduler() {
+ ICCContext ccContext = AsterixAppContextInfo.getInstance().getCCApplicationContext().getCCContext();
+ Scheduler scheduler = null;
+ try {
+ scheduler = new Scheduler(ccContext.getClusterControllerInfo().getClientNetAddress(), ccContext
+ .getClusterControllerInfo().getClientNetPort());
+ } catch (HyracksException e) {
+ throw new IllegalStateException("Cannot obtain hdfs scheduler");
+ }
+ return scheduler;
+ }
private static final Map<String, String> formatClassNames = initInputFormatMap();
@@ -64,30 +87,12 @@
}
@Override
- public IDatasourceAdapter createAdapter(Map<String, Object> configuration, IAType atype) throws Exception {
- if (!setup) {
- /** set up the factory --serializable stuff --- this if-block should be called only once for each factory instance */
- configureJobConf(configuration);
- JobConf conf = configureJobConf(configuration);
- confFactory = new ConfFactory(conf);
-
- clusterLocations = (AlgebricksPartitionConstraint) configuration.get(CLUSTER_LOCATIONS);
- int numPartitions = ((AlgebricksAbsolutePartitionConstraint) clusterLocations).getLocations().length;
-
- InputSplit[] inputSplits = conf.getInputFormat().getSplits(conf, numPartitions);
- inputSplitsFactory = new InputSplitsFactory(inputSplits);
-
- Scheduler scheduler = (Scheduler) configuration.get(SCHEDULER);
- readSchedule = scheduler.getLocationConstraints(inputSplits);
- executed = new boolean[readSchedule.length];
- Arrays.fill(executed, false);
-
- setup = true;
- }
+ public IDatasourceAdapter createAdapter(IHyracksTaskContext ctx, int partition) throws Exception {
JobConf conf = confFactory.getConf();
InputSplit[] inputSplits = inputSplitsFactory.getSplits();
- HDFSAdapter hdfsAdapter = new HDFSAdapter(atype, readSchedule, executed, inputSplits, conf, clusterLocations);
- hdfsAdapter.configure(configuration);
+ String nodeName = ctx.getJobletContext().getApplicationContext().getNodeId();
+ HDFSAdapter hdfsAdapter = new HDFSAdapter(atype, readSchedule, executed, inputSplits, conf, nodeName,
+ parserFactory, ctx);
return hdfsAdapter;
}
@@ -96,7 +101,7 @@
return HDFS_ADAPTER_NAME;
}
- private JobConf configureJobConf(Map<String, Object> configuration) throws Exception {
+ private JobConf configureJobConf(Map<String, String> configuration) throws Exception {
JobConf conf = new JobConf();
conf.set("fs.default.name", ((String) configuration.get(KEY_HDFS_URL)).trim());
conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
@@ -107,4 +112,64 @@
return conf;
}
+ @Override
+ public AdapterType getAdapterType() {
+ return AdapterType.GENERIC;
+ }
+
+ @Override
+ public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
+ if (!configured) {
+ throw new IllegalStateException("Adapter factory has not been configured yet");
+ }
+ return (AlgebricksPartitionConstraint) clusterLocations;
+ }
+
+ @Override
+ public void configure(Map<String, String> configuration, ARecordType outputType) throws Exception {
+ if (!initialized) {
+ hdfsScheduler = initializeHDFSScheduler();
+ initialized = true;
+ }
+ this.configuration = configuration;
+ JobConf conf = configureJobConf(configuration);
+ confFactory = new ConfFactory(conf);
+
+ clusterLocations = getClusterLocations();
+ int numPartitions = ((AlgebricksAbsolutePartitionConstraint) clusterLocations).getLocations().length;
+
+ InputSplit[] inputSplits = conf.getInputFormat().getSplits(conf, numPartitions);
+ inputSplitsFactory = new InputSplitsFactory(inputSplits);
+
+ readSchedule = hdfsScheduler.getLocationConstraints(inputSplits);
+ executed = new boolean[readSchedule.length];
+ Arrays.fill(executed, false);
+ configured = true;
+
+ atype = (IAType) outputType;
+ configureFormat(atype);
+ }
+
+ @Override
+ public SupportedOperation getSupportedOperations() {
+ return SupportedOperation.READ;
+ }
+
+ private static AlgebricksPartitionConstraint getClusterLocations() {
+ ArrayList<String> locs = new ArrayList<String>();
+ Map<String, String[]> stores = AsterixAppContextInfo.getInstance().getMetadataProperties().getStores();
+ for (String i : stores.keySet()) {
+ String[] nodeStores = stores.get(i);
+ int numIODevices = AsterixClusterProperties.INSTANCE.getNumberOfIODevices(i);
+ for (int j = 0; j < nodeStores.length; j++) {
+ for (int k = 0; k < numIODevices; k++) {
+ locs.add(i);
+ }
+ }
+ }
+ String[] cluster = new String[locs.size()];
+ cluster = locs.toArray(cluster);
+ return new AlgebricksAbsolutePartitionConstraint(cluster);
+ }
+
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HiveAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HiveAdapterFactory.java
index 409eb7a..669dc61 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HiveAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HiveAdapterFactory.java
@@ -14,45 +14,23 @@
*/
package edu.uci.ics.asterix.external.adapter.factory;
-import java.util.Arrays;
-import java.util.HashMap;
import java.util.Map;
-import org.apache.hadoop.mapred.InputSplit;
-import org.apache.hadoop.mapred.JobConf;
-
import edu.uci.ics.asterix.external.dataset.adapter.HDFSAdapter;
import edu.uci.ics.asterix.external.dataset.adapter.HiveAdapter;
-import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
+import edu.uci.ics.asterix.metadata.feeds.IDatasourceAdapter;
+import edu.uci.ics.asterix.metadata.feeds.IGenericAdapterFactory;
+import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
-import edu.uci.ics.hyracks.hdfs.dataflow.ConfFactory;
-import edu.uci.ics.hyracks.hdfs.dataflow.InputSplitsFactory;
-import edu.uci.ics.hyracks.hdfs.scheduler.Scheduler;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
/**
* A factory class for creating an instance of HiveAdapter
*/
-@SuppressWarnings("deprecation")
-public class HiveAdapterFactory implements IGenericDatasetAdapterFactory {
+public class HiveAdapterFactory extends StreamBasedAdapterFactory implements IGenericAdapterFactory {
private static final long serialVersionUID = 1L;
- public static final String HDFS_ADAPTER_NAME = "hdfs";
- public static final String CLUSTER_LOCATIONS = "cluster-locations";
- public static transient String SCHEDULER = "hdfs-scheduler";
-
- public static final String KEY_HDFS_URL = "hdfs";
- public static final String KEY_PATH = "path";
- public static final String KEY_INPUT_FORMAT = "input-format";
- public static final String INPUT_FORMAT_TEXT = "text-input-format";
- public static final String INPUT_FORMAT_SEQUENCE = "sequence-input-format";
-
- public static final String KEY_FORMAT = "format";
- public static final String KEY_PARSER_FACTORY = "parser";
- public static final String FORMAT_DELIMITED_TEXT = "delimited-text";
- public static final String FORMAT_ADM = "adm";
-
public static final String HIVE_DATABASE = "database";
public static final String HIVE_TABLE = "table";
public static final String HIVE_HOME = "hive-home";
@@ -60,47 +38,19 @@
public static final String HIVE_WAREHOUSE_DIR = "warehouse-dir";
public static final String HIVE_METASTORE_RAWSTORE_IMPL = "rawstore-impl";
- private String[] readSchedule;
- private boolean executed[];
- private InputSplitsFactory inputSplitsFactory;
- private ConfFactory confFactory;
- private transient AlgebricksPartitionConstraint clusterLocations;
- private boolean setup = false;
+ private HDFSAdapterFactory hdfsAdapterFactory;
+ private HDFSAdapter hdfsAdapter;
+ private boolean configured = false;
+ private IAType atype;
- private static final Map<String, String> formatClassNames = initInputFormatMap();
-
- private static Map<String, String> initInputFormatMap() {
- Map<String, String> formatClassNames = new HashMap<String, String>();
- formatClassNames.put(INPUT_FORMAT_TEXT, "org.apache.hadoop.mapred.TextInputFormat");
- formatClassNames.put(INPUT_FORMAT_SEQUENCE, "org.apache.hadoop.mapred.SequenceFileInputFormat");
- return formatClassNames;
+ public HiveAdapterFactory() {
+ hdfsAdapterFactory = new HDFSAdapterFactory();
}
@Override
- public IDatasourceAdapter createAdapter(Map<String, Object> configuration, IAType atype) throws Exception {
- if (!setup) {
- /** set up the factory --serializable stuff --- this if-block should be called only once for each factory instance */
- configureJobConf(configuration);
- JobConf conf = configureJobConf(configuration);
- confFactory = new ConfFactory(conf);
-
- clusterLocations = (AlgebricksPartitionConstraint) configuration.get(CLUSTER_LOCATIONS);
- int numPartitions = ((AlgebricksAbsolutePartitionConstraint) clusterLocations).getLocations().length;
-
- InputSplit[] inputSplits = conf.getInputFormat().getSplits(conf, numPartitions);
- inputSplitsFactory = new InputSplitsFactory(inputSplits);
-
- Scheduler scheduler = (Scheduler) configuration.get(SCHEDULER);
- readSchedule = scheduler.getLocationConstraints(inputSplits);
- executed = new boolean[readSchedule.length];
- Arrays.fill(executed, false);
-
- setup = true;
- }
- JobConf conf = confFactory.getConf();
- InputSplit[] inputSplits = inputSplitsFactory.getSplits();
- HiveAdapter hiveAdapter = new HiveAdapter(atype, readSchedule, executed, inputSplits, conf, clusterLocations);
- hiveAdapter.configure(configuration);
+ public IDatasourceAdapter createAdapter(IHyracksTaskContext ctx, int partition) throws Exception {
+ hdfsAdapter = (HDFSAdapter) hdfsAdapterFactory.createAdapter(ctx, partition);
+ HiveAdapter hiveAdapter = new HiveAdapter(atype, hdfsAdapter, parserFactory, ctx);
return hiveAdapter;
}
@@ -109,9 +59,25 @@
return "hive";
}
- private JobConf configureJobConf(Map<String, Object> configuration) throws Exception {
- JobConf conf = new JobConf();
+ @Override
+ public AdapterType getAdapterType() {
+ return AdapterType.GENERIC;
+ }
+ @Override
+ public SupportedOperation getSupportedOperations() {
+ return SupportedOperation.READ;
+ }
+
+ @Override
+ public void configure(Map<String, String> configuration, ARecordType outputType) throws Exception {
+ if (!configured) {
+ populateConfiguration(configuration);
+ hdfsAdapterFactory.configure(configuration, outputType);
+ }
+ }
+
+ private void populateConfiguration(Map<String, String> configuration) throws Exception {
/** configure hive */
String database = (String) configuration.get(HIVE_DATABASE);
String tablePath = null;
@@ -121,7 +87,7 @@
tablePath = configuration.get(HIVE_WAREHOUSE_DIR) + "/" + tablePath + ".db" + "/"
+ configuration.get(HIVE_TABLE);
}
- configuration.put(HDFSAdapter.KEY_PATH, tablePath);
+ configuration.put(HDFSAdapterFactory.KEY_PATH, tablePath);
if (!configuration.get(KEY_FORMAT).equals(FORMAT_DELIMITED_TEXT)) {
throw new IllegalArgumentException("format" + configuration.get(KEY_FORMAT) + " is not supported");
}
@@ -131,14 +97,11 @@
throw new IllegalArgumentException("file input format"
+ configuration.get(HDFSAdapterFactory.KEY_INPUT_FORMAT) + " is not supported");
}
-
- /** configure hdfs */
- conf.set("fs.default.name", ((String) configuration.get(KEY_HDFS_URL)).trim());
- conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
- conf.setClassLoader(HDFSAdapter.class.getClassLoader());
- conf.set("mapred.input.dir", ((String) configuration.get(KEY_PATH)).trim());
- conf.set("mapred.input.format.class",
- (String) formatClassNames.get(((String) configuration.get(KEY_INPUT_FORMAT)).trim()));
- return conf;
}
+
+ @Override
+ public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
+ return hdfsAdapterFactory.getPartitionConstraint();
+ }
+
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IAdapterFactory.java
deleted file mode 100644
index 5cb6777..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IAdapterFactory.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.external.adapter.factory;
-
-import java.io.Serializable;
-
-/**
- * Base interface for IGenericDatasetAdapterFactory and ITypedDatasetAdapterFactory.
- * Acts as a marker interface indicating that the implementation provides functionality
- * for creating an adapter.
- */
-public interface IAdapterFactory extends Serializable {
-
- /**
- * Returns the display name corresponding to the Adapter type that is created by the factory.
- *
- * @return the display name
- */
- public String getName();
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IGenericDatasetAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IGenericDatasetAdapterFactory.java
deleted file mode 100644
index 0a178a7..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IGenericDatasetAdapterFactory.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.external.adapter.factory;
-
-import java.util.Map;
-
-import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
-import edu.uci.ics.asterix.om.types.IAType;
-
-/**
- * A base interface for an adapter factory that creates instance of an adapter kind that
- * is 'generic' in nature. A 'typed' adapter returns records with a configurable datatype.
- */
-public interface IGenericDatasetAdapterFactory extends IAdapterFactory {
-
- public static final String KEY_TYPE_NAME = "output-type-name";
-
- /**
- * Creates an instance of IDatasourceAdapter.
- *
- * @param configuration
- * The configuration parameters for the adapter that is instantiated.
- * The passed-in configuration is used to configure the created instance of the adapter.
- * @param atype
- * The type for the ADM records that are returned by the adapter.
- * @return An instance of IDatasourceAdapter.
- * @throws Exception
- */
- public IDatasourceAdapter createAdapter(Map<String, Object> configuration, IAType atype) throws Exception;
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/ITypedDatasetAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/ITypedDatasetAdapterFactory.java
deleted file mode 100644
index 674bf52..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/ITypedDatasetAdapterFactory.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.external.adapter.factory;
-
-import java.util.Map;
-
-import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
-
-/**
- * A base interface for an adapter factory that creates instance of an adapter kind that
- * is 'typed' in nature. A 'typed' adapter returns records with a pre-defined datatype.
- */
-public interface ITypedDatasetAdapterFactory extends IAdapterFactory {
-
- /**
- * Creates an instance of IDatasourceAdapter.
- *
- * @param configuration
- * The configuration parameters for the adapter that is instantiated.
- * The passed-in configuration is used to configure the created instance of the adapter.
- * @return An instance of IDatasourceAdapter.
- * @throws Exception
- */
- public IDatasourceAdapter createAdapter(Map<String, Object> configuration) throws Exception;
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/NCFileSystemAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/NCFileSystemAdapterFactory.java
index e680232..1a8c0df 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/NCFileSystemAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/NCFileSystemAdapterFactory.java
@@ -14,25 +14,43 @@
*/
package edu.uci.ics.asterix.external.adapter.factory;
+import java.io.File;
import java.util.Map;
+import java.util.logging.Level;
-import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter;
+import edu.uci.ics.asterix.external.util.DNSResolverFactory;
+import edu.uci.ics.asterix.external.util.INodeResolver;
+import edu.uci.ics.asterix.external.util.INodeResolverFactory;
+import edu.uci.ics.asterix.metadata.feeds.IDatasourceAdapter;
+import edu.uci.ics.asterix.metadata.feeds.IGenericAdapterFactory;
+import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
/**
* Factory class for creating an instance of NCFileSystemAdapter. An
* NCFileSystemAdapter reads external data residing on the local file system of
* an NC.
*/
-public class NCFileSystemAdapterFactory implements IGenericDatasetAdapterFactory {
+public class NCFileSystemAdapterFactory extends StreamBasedAdapterFactory implements IGenericAdapterFactory {
private static final long serialVersionUID = 1L;
+
public static final String NC_FILE_SYSTEM_ADAPTER_NAME = "localfs";
+ private static final INodeResolver DEFAULT_NODE_RESOLVER = new DNSResolverFactory().createNodeResolver();
+
+ private IAType sourceDatatype;
+ private FileSplit[] fileSplits;
+
@Override
- public IDatasourceAdapter createAdapter(Map<String, Object> configuration, IAType atype) throws Exception {
- NCFileSystemAdapter fsAdapter = new NCFileSystemAdapter(atype);
- fsAdapter.configure(configuration);
+ public IDatasourceAdapter createAdapter(IHyracksTaskContext ctx, int partition) throws Exception {
+ NCFileSystemAdapter fsAdapter = new NCFileSystemAdapter(fileSplits, parserFactory, sourceDatatype, ctx);
return fsAdapter;
}
@@ -40,4 +58,88 @@
public String getName() {
return NC_FILE_SYSTEM_ADAPTER_NAME;
}
+
+ @Override
+ public AdapterType getAdapterType() {
+ return AdapterType.GENERIC;
+ }
+
+ @Override
+ public SupportedOperation getSupportedOperations() {
+ return SupportedOperation.READ;
+ }
+
+ @Override
+ public void configure(Map<String, String> configuration, ARecordType outputType) throws Exception {
+ this.configuration = configuration;
+ String[] splits = ((String) configuration.get(KEY_PATH)).split(",");
+ IAType sourceDatatype = (IAType) outputType;
+ configureFileSplits(splits);
+ configureFormat(sourceDatatype);
+ }
+
+ @Override
+ public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
+ return configurePartitionConstraint();
+ }
+
+ private void configureFileSplits(String[] splits) throws AsterixException {
+ if (fileSplits == null) {
+ fileSplits = new FileSplit[splits.length];
+ String nodeName;
+ String nodeLocalPath;
+ int count = 0;
+ String trimmedValue;
+ for (String splitPath : splits) {
+ trimmedValue = splitPath.trim();
+ if (!trimmedValue.contains("://")) {
+ throw new AsterixException("Invalid path: " + splitPath
+ + "\nUsage- path=\"Host://Absolute File Path\"");
+ }
+ nodeName = trimmedValue.split(":")[0];
+ nodeLocalPath = trimmedValue.split("://")[1];
+ FileSplit fileSplit = new FileSplit(nodeName, new FileReference(new File(nodeLocalPath)));
+ fileSplits[count++] = fileSplit;
+ }
+ }
+ }
+
+ private AlgebricksPartitionConstraint configurePartitionConstraint() throws AsterixException {
+ String[] locs = new String[fileSplits.length];
+ String location;
+ for (int i = 0; i < fileSplits.length; i++) {
+ location = getNodeResolver().resolveNode(fileSplits[i].getNodeName());
+ locs[i] = location;
+ }
+ return new AlgebricksAbsolutePartitionConstraint(locs);
+ }
+
+ protected INodeResolver getNodeResolver() {
+ if (nodeResolver == null) {
+ nodeResolver = initializeNodeResolver();
+ }
+ return nodeResolver;
+ }
+
+ private static INodeResolver initializeNodeResolver() {
+ INodeResolver nodeResolver = null;
+ String configuredNodeResolverFactory = System.getProperty(NODE_RESOLVER_FACTORY_PROPERTY);
+ if (configuredNodeResolverFactory != null) {
+ try {
+ nodeResolver = ((INodeResolverFactory) (Class.forName(configuredNodeResolverFactory).newInstance()))
+ .createNodeResolver();
+
+ } catch (Exception e) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.log(Level.WARNING, "Unable to create node resolver from the configured classname "
+ + configuredNodeResolverFactory + "\n" + e.getMessage());
+ }
+ nodeResolver = DEFAULT_NODE_RESOLVER;
+ }
+ } else {
+ nodeResolver = DEFAULT_NODE_RESOLVER;
+ }
+ return nodeResolver;
+ }
+
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PullBasedAzureTwitterAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PullBasedAzureTwitterAdapterFactory.java
new file mode 100644
index 0000000..b4dbe13
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PullBasedAzureTwitterAdapterFactory.java
@@ -0,0 +1,139 @@
+package edu.uci.ics.asterix.external.adapter.factory;
+
+import java.util.Map;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.external.dataset.adapter.PullBasedAzureTwitterAdapter;
+import edu.uci.ics.asterix.metadata.MetadataManager;
+import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
+import edu.uci.ics.asterix.metadata.entities.Datatype;
+import edu.uci.ics.asterix.metadata.feeds.IDatasourceAdapter;
+import edu.uci.ics.asterix.metadata.feeds.ITypedAdapterFactory;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+
+public class PullBasedAzureTwitterAdapterFactory implements ITypedAdapterFactory {
+
+ private static final long serialVersionUID = 1L;
+
+ private static final String INGESTOR_LOCATIONS_KEY = "ingestor-locations";
+ private static final String PARTITIONS_KEY = "partitions";
+ private static final String OUTPUT_TYPE_KEY = "output-type";
+ private static final String TABLE_NAME_KEY = "table-name";
+ private static final String ACCOUNT_NAME_KEY = "account-name";
+ private static final String ACCOUNT_KEY_KEY = "account-key";
+
+ private ARecordType recordType;
+ private Map<String, String> configuration;
+ private String tableName;
+ private String azureAccountName;
+ private String azureAccountKey;
+ private String[] locations;
+ private String[] partitions;
+
+ @Override
+ public SupportedOperation getSupportedOperations() {
+ return SupportedOperation.READ;
+ }
+
+ @Override
+ public String getName() {
+ return "azure_twitter";
+ }
+
+ @Override
+ public AdapterType getAdapterType() {
+ return AdapterType.TYPED;
+ }
+
+ @Override
+ public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
+ String locationsStr = configuration.get(INGESTOR_LOCATIONS_KEY);
+ if (locationsStr == null) {
+ return null;
+ }
+ String[] locations = locationsStr.split(",");
+ return new AlgebricksAbsolutePartitionConstraint(locations);
+ }
+
+ @Override
+ public IDatasourceAdapter createAdapter(IHyracksTaskContext ctx, int partition) throws Exception {
+ return new PullBasedAzureTwitterAdapter(azureAccountName, azureAccountKey, tableName, partitions,
+ configuration, ctx, recordType);
+ }
+
+ @Override
+ public ARecordType getAdapterOutputType() {
+ return recordType;
+ }
+
+ @Override
+ public void configure(Map<String, String> configuration) throws Exception {
+ this.configuration = configuration;
+
+ tableName = configuration.get(TABLE_NAME_KEY);
+ if (tableName == null) {
+ throw new AsterixException("You must specify a valid table name");
+ }
+ azureAccountName = configuration.get(ACCOUNT_NAME_KEY);
+ azureAccountKey = configuration.get(ACCOUNT_KEY_KEY);
+ if (azureAccountName == null || azureAccountKey == null) {
+ throw new AsterixException("You must specify a valid Azure account name and key");
+ }
+
+ int nIngestLocations = 1;
+ String locationsStr = configuration.get(INGESTOR_LOCATIONS_KEY);
+ if (locationsStr != null) {
+ locations = locationsStr.split(",");
+ nIngestLocations = locations.length;
+ }
+
+ int nPartitions = 1;
+ String partitionsStr = configuration.get(PARTITIONS_KEY);
+ if (partitionsStr != null) {
+ partitions = partitionsStr.split(",");
+ nPartitions = partitions.length;
+ }
+
+ if (nIngestLocations != nPartitions) {
+ throw new AsterixException("Invalid adapter configuration: number of ingestion-locations ("
+ + nIngestLocations + ") must be the same as the number of partitions (" + nPartitions + ")");
+ }
+ configureType();
+ }
+
+ private void configureType() throws Exception {
+ String fqOutputType = configuration.get(OUTPUT_TYPE_KEY);
+
+ if (fqOutputType == null) {
+ throw new IllegalArgumentException("No output type specified");
+ }
+ String[] dataverseAndType = fqOutputType.split("[.]");
+ String dataverseName = dataverseAndType[0];
+ String datatypeName = dataverseAndType[1];
+
+ MetadataTransactionContext ctx = null;
+ MetadataManager.INSTANCE.acquireReadLatch();
+ try {
+ ctx = MetadataManager.INSTANCE.beginTransaction();
+ Datatype t = MetadataManager.INSTANCE.getDatatype(ctx, dataverseName, datatypeName);
+ IAType type = t.getDatatype();
+ if (type.getTypeTag() != ATypeTag.RECORD) {
+ throw new IllegalStateException();
+ }
+ recordType = (ARecordType) t.getDatatype();
+ MetadataManager.INSTANCE.commitTransaction(ctx);
+ } catch (Exception e) {
+ if (ctx != null) {
+ MetadataManager.INSTANCE.abortTransaction(ctx);
+ }
+ throw e;
+ } finally {
+ MetadataManager.INSTANCE.releaseReadLatch();
+ }
+ }
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PullBasedTwitterAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PullBasedTwitterAdapterFactory.java
index 232d5e8..6058bd2 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PullBasedTwitterAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PullBasedTwitterAdapterFactory.java
@@ -16,23 +16,44 @@
import java.util.Map;
-import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
import edu.uci.ics.asterix.external.dataset.adapter.PullBasedTwitterAdapter;
+import edu.uci.ics.asterix.metadata.feeds.IDatasourceAdapter;
+import edu.uci.ics.asterix.metadata.feeds.ITypedAdapterFactory;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksCountPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
/**
* Factory class for creating an instance of PullBasedTwitterAdapter.
* This adapter provides the functionality of fetching tweets from Twitter service
* via pull-based Twitter API.
*/
-public class PullBasedTwitterAdapterFactory implements ITypedDatasetAdapterFactory {
+public class PullBasedTwitterAdapterFactory implements ITypedAdapterFactory {
private static final long serialVersionUID = 1L;
public static final String PULL_BASED_TWITTER_ADAPTER_NAME = "pull_twitter";
+ private Map<String, String> configuration;
+ private static ARecordType recordType = initOutputType();
+
+ private static ARecordType initOutputType() {
+ ARecordType recordType = null;
+ String[] fieldNames = { "id", "username", "location", "text", "timestamp" };
+ IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING,
+ BuiltinType.ASTRING };
+ try {
+ recordType = new ARecordType("TweetType", fieldNames, fieldTypes, false);
+ } catch (Exception e) {
+ throw new IllegalStateException("Unable to create adapter output type");
+ }
+ return recordType;
+ }
+
@Override
- public IDatasourceAdapter createAdapter(Map<String, Object> configuration) throws Exception {
- PullBasedTwitterAdapter twitterAdapter = new PullBasedTwitterAdapter();
- twitterAdapter.configure(configuration);
- return twitterAdapter;
+ public IDatasourceAdapter createAdapter(IHyracksTaskContext ctx, int partition) throws Exception {
+ return new PullBasedTwitterAdapter(configuration, recordType, ctx);
}
@Override
@@ -40,4 +61,29 @@
return PULL_BASED_TWITTER_ADAPTER_NAME;
}
+ @Override
+ public AdapterType getAdapterType() {
+ return AdapterType.TYPED;
+ }
+
+ @Override
+ public SupportedOperation getSupportedOperations() {
+ return SupportedOperation.READ;
+ }
+
+ @Override
+ public void configure(Map<String, String> configuration) throws Exception {
+ this.configuration = configuration;
+ }
+
+ @Override
+ public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
+ return new AlgebricksCountPartitionConstraint(1);
+ }
+
+ @Override
+ public ARecordType getAdapterOutputType() {
+ return recordType;
+ }
+
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/RSSFeedAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/RSSFeedAdapterFactory.java
index ab18455..41f1d56 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/RSSFeedAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/RSSFeedAdapterFactory.java
@@ -14,23 +14,38 @@
*/
package edu.uci.ics.asterix.external.adapter.factory;
+import java.util.ArrayList;
+import java.util.List;
import java.util.Map;
-import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
import edu.uci.ics.asterix.external.dataset.adapter.RSSFeedAdapter;
+import edu.uci.ics.asterix.metadata.feeds.IDatasourceAdapter;
+import edu.uci.ics.asterix.metadata.feeds.ITypedAdapterFactory;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksCountPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
/**
* Factory class for creating an instance of @see {RSSFeedAdapter}.
* RSSFeedAdapter provides the functionality of fetching an RSS based feed.
*/
-public class RSSFeedAdapterFactory implements ITypedDatasetAdapterFactory {
+public class RSSFeedAdapterFactory implements ITypedAdapterFactory {
private static final long serialVersionUID = 1L;
public static final String RSS_FEED_ADAPTER_NAME = "rss_feed";
+ public static final String KEY_RSS_URL = "url";
+ public static final String KEY_INTERVAL = "interval";
+
+ private Map<String, String> configuration;
+ private ARecordType recordType;
+ private List<String> feedURLs = new ArrayList<String>();
+
@Override
- public IDatasourceAdapter createAdapter(Map<String, Object> configuration) throws Exception {
- RSSFeedAdapter rssFeedAdapter = new RSSFeedAdapter();
- rssFeedAdapter.configure(configuration);
+ public IDatasourceAdapter createAdapter(IHyracksTaskContext ctx, int partition) throws Exception {
+ RSSFeedAdapter rssFeedAdapter = new RSSFeedAdapter(configuration, recordType, ctx);
return rssFeedAdapter;
}
@@ -39,4 +54,51 @@
return "rss_feed";
}
+ @Override
+ public AdapterType getAdapterType() {
+ return AdapterType.TYPED;
+ }
+
+ @Override
+ public SupportedOperation getSupportedOperations() {
+ return SupportedOperation.READ;
+ }
+
+ @Override
+ public void configure(Map<String, String> configuration) throws Exception {
+ this.configuration = configuration;
+ String rssURLProperty = (String) configuration.get(KEY_RSS_URL);
+ if (rssURLProperty == null) {
+ throw new IllegalArgumentException("no rss url provided");
+ }
+ initializeFeedURLs(rssURLProperty);
+ configurePartitionConstraints();
+ recordType = new ARecordType("FeedRecordType", new String[] { "id", "title", "description", "link" },
+ new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING },
+ false);
+
+ }
+
+ @Override
+ public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
+ return new AlgebricksCountPartitionConstraint(feedURLs.size());
+ }
+
+ private void initializeFeedURLs(String rssURLProperty) {
+ feedURLs.clear();
+ String[] feedURLProperty = rssURLProperty.split(",");
+ for (String feedURL : feedURLProperty) {
+ feedURLs.add(feedURL);
+ }
+ }
+
+ protected void configurePartitionConstraints() {
+
+ }
+
+ @Override
+ public ARecordType getAdapterOutputType() {
+ return recordType;
+ }
+
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/StreamBasedAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/StreamBasedAdapterFactory.java
new file mode 100644
index 0000000..f69eb2d
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/StreamBasedAdapterFactory.java
@@ -0,0 +1,144 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.adapter.factory;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.external.util.INodeResolver;
+import edu.uci.ics.asterix.metadata.feeds.ConditionalPushTupleParserFactory;
+import edu.uci.ics.asterix.metadata.feeds.IAdapterFactory;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.AUnionType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.runtime.operators.file.AdmSchemafullRecordParserFactory;
+import edu.uci.ics.asterix.runtime.operators.file.NtDelimitedDataTupleParserFactory;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.DoubleParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.FloatParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IntegerParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.LongParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.file.ITupleParser;
+import edu.uci.ics.hyracks.dataflow.std.file.ITupleParserFactory;
+
+public abstract class StreamBasedAdapterFactory implements IAdapterFactory {
+
+ private static final long serialVersionUID = 1L;
+ protected static final Logger LOGGER = Logger.getLogger(StreamBasedAdapterFactory.class.getName());
+
+ protected Map<String, String> configuration;
+ protected static INodeResolver nodeResolver;
+
+ public static final String KEY_FORMAT = "format";
+ public static final String KEY_PARSER_FACTORY = "parser";
+ public static final String KEY_DELIMITER = "delimiter";
+ public static final String KEY_PATH = "path";
+ public static final String KEY_SOURCE_DATATYPE = "output-type-name";
+ public static final String FORMAT_DELIMITED_TEXT = "delimited-text";
+ public static final String FORMAT_ADM = "adm";
+ public static final String NODE_RESOLVER_FACTORY_PROPERTY = "node.Resolver";
+ public static final String BATCH_SIZE = "batch-size";
+ public static final String BATCH_INTERVAL = "batch-interval";
+
+ protected ITupleParserFactory parserFactory;
+ protected ITupleParser parser;
+
+ protected static final HashMap<ATypeTag, IValueParserFactory> typeToValueParserFactMap = new HashMap<ATypeTag, IValueParserFactory>();
+ static {
+ typeToValueParserFactMap.put(ATypeTag.INT32, IntegerParserFactory.INSTANCE);
+ typeToValueParserFactMap.put(ATypeTag.FLOAT, FloatParserFactory.INSTANCE);
+ typeToValueParserFactMap.put(ATypeTag.DOUBLE, DoubleParserFactory.INSTANCE);
+ typeToValueParserFactMap.put(ATypeTag.INT64, LongParserFactory.INSTANCE);
+ typeToValueParserFactMap.put(ATypeTag.STRING, UTF8StringParserFactory.INSTANCE);
+ }
+
+ protected ITupleParserFactory getDelimitedDataTupleParserFactory(ARecordType recordType, boolean conditionalPush)
+ throws AsterixException {
+ int n = recordType.getFieldTypes().length;
+ IValueParserFactory[] fieldParserFactories = new IValueParserFactory[n];
+ for (int i = 0; i < n; i++) {
+ ATypeTag tag = null;
+ if (recordType.getFieldTypes()[i].getTypeTag() == ATypeTag.UNION) {
+ List<IAType> unionTypes = ((AUnionType) recordType.getFieldTypes()[i]).getUnionList();
+ if (unionTypes.size() != 2 && unionTypes.get(0).getTypeTag() != ATypeTag.NULL) {
+ throw new NotImplementedException("Non-optional UNION type is not supported.");
+ }
+ tag = unionTypes.get(1).getTypeTag();
+ } else {
+ tag = recordType.getFieldTypes()[i].getTypeTag();
+ }
+ if (tag == null) {
+ throw new NotImplementedException("Failed to get the type information for field " + i + ".");
+ }
+ IValueParserFactory vpf = typeToValueParserFactMap.get(tag);
+ if (vpf == null) {
+ throw new NotImplementedException("No value parser factory for delimited fields of type " + tag);
+ }
+ fieldParserFactories[i] = vpf;
+ }
+ String delimiterValue = (String) configuration.get(KEY_DELIMITER);
+ if (delimiterValue != null && delimiterValue.length() > 1) {
+ throw new AsterixException("improper delimiter");
+ }
+
+ Character delimiter = delimiterValue.charAt(0);
+
+ return conditionalPush ? new ConditionalPushTupleParserFactory(recordType, fieldParserFactories, delimiter,
+ configuration) : new NtDelimitedDataTupleParserFactory(recordType, fieldParserFactories, delimiter);
+ }
+
+ protected ITupleParserFactory getADMDataTupleParserFactory(ARecordType recordType, boolean conditionalPush)
+ throws AsterixException {
+ try {
+ return conditionalPush ? new ConditionalPushTupleParserFactory(recordType, configuration)
+ : new AdmSchemafullRecordParserFactory(recordType);
+ } catch (Exception e) {
+ throw new AsterixException(e);
+ }
+
+ }
+
+ protected void configureFormat(IAType sourceDatatype) throws Exception {
+ String propValue = (String) configuration.get(BATCH_SIZE);
+ int batchSize = propValue != null ? Integer.parseInt(propValue) : -1;
+ propValue = (String) configuration.get(BATCH_INTERVAL);
+ long batchInterval = propValue != null ? Long.parseLong(propValue) : -1;
+ boolean conditionalPush = batchSize > 0 || batchInterval > 0;
+
+ String parserFactoryClassname = (String) configuration.get(KEY_PARSER_FACTORY);
+ if (parserFactoryClassname == null) {
+ String specifiedFormat = (String) configuration.get(KEY_FORMAT);
+ if (specifiedFormat == null) {
+ throw new IllegalArgumentException(" Unspecified data format");
+ } else if (FORMAT_DELIMITED_TEXT.equalsIgnoreCase(specifiedFormat)) {
+ parserFactory = getDelimitedDataTupleParserFactory((ARecordType) sourceDatatype, conditionalPush);
+ } else if (FORMAT_ADM.equalsIgnoreCase((String) configuration.get(KEY_FORMAT))) {
+ parserFactory = getADMDataTupleParserFactory((ARecordType) sourceDatatype, conditionalPush);
+ } else {
+ throw new IllegalArgumentException(" format " + configuration.get(KEY_FORMAT) + " not supported");
+ }
+ } else {
+ parserFactory = (ITupleParserFactory) Class.forName(parserFactoryClassname).newInstance();
+ }
+
+ }
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/ExternalDataScanOperatorDescriptor.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/ExternalDataScanOperatorDescriptor.java
deleted file mode 100644
index 4290132..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/ExternalDataScanOperatorDescriptor.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.external.data.operator;
-
-import java.util.Map;
-
-import edu.uci.ics.asterix.external.adapter.factory.IGenericDatasetAdapterFactory;
-import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
-
-/*
- * A single activity operator that provides the functionality of scanning data using an
- * instance of the configured adapter.
- */
-public class ExternalDataScanOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
-
- private static final long serialVersionUID = 1L;
-
- private final Map<String, Object> adapterConfiguration;
- private final IAType atype;
- private IGenericDatasetAdapterFactory datasourceAdapterFactory;
-
- public ExternalDataScanOperatorDescriptor(JobSpecification spec, Map<String, Object> arguments, IAType atype,
- RecordDescriptor rDesc, IGenericDatasetAdapterFactory dataSourceAdapterFactory) {
- super(spec, 0, 1);
- recordDescriptors[0] = rDesc;
- this.adapterConfiguration = arguments;
- this.atype = atype;
- this.datasourceAdapterFactory = dataSourceAdapterFactory;
- }
-
- @Override
- public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
- IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions)
- throws HyracksDataException {
-
- return new AbstractUnaryOutputSourceOperatorNodePushable() {
- @Override
- public void initialize() throws HyracksDataException {
- writer.open();
- IDatasourceAdapter adapter = null;
- try {
- adapter = ((IGenericDatasetAdapterFactory) datasourceAdapterFactory).createAdapter(
- adapterConfiguration, atype);
- adapter.initialize(ctx);
- adapter.start(partition, writer);
- } catch (Exception e) {
- throw new HyracksDataException("exception during reading from external data source", e);
- } finally {
- writer.close();
- }
- }
- };
- }
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedIntakeOperatorDescriptor.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedIntakeOperatorDescriptor.java
deleted file mode 100644
index a48cfb8..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedIntakeOperatorDescriptor.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.external.data.operator;
-
-import java.util.Map;
-
-import edu.uci.ics.asterix.external.adapter.factory.IAdapterFactory;
-import edu.uci.ics.asterix.external.adapter.factory.IGenericDatasetAdapterFactory;
-import edu.uci.ics.asterix.external.adapter.factory.ITypedDatasetAdapterFactory;
-import edu.uci.ics.asterix.external.dataset.adapter.ITypedDatasourceAdapter;
-import edu.uci.ics.asterix.external.feed.lifecycle.FeedId;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
-
-/**
- * Operator responsible for ingesting data from an external source. This
- * operator uses a (configurable) adapter associated with the feed dataset.
- */
-public class FeedIntakeOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
-
- private static final long serialVersionUID = 1L;
-
- private final String adapterFactoryClassName;
- private final Map<String, Object> adapterConfiguration;
- private final IAType atype;
- private final FeedId feedId;
- private final IAdapterFactory datasourceAdapterFactory;
-
- public FeedIntakeOperatorDescriptor(JobSpecification spec, FeedId feedId, String adapter,
- Map<String, Object> arguments, ARecordType atype, RecordDescriptor rDesc,
- IAdapterFactory datasourceAdapterFactory) {
- super(spec, 1, 1);
- recordDescriptors[0] = rDesc;
- this.adapterFactoryClassName = adapter;
- this.adapterConfiguration = arguments;
- this.atype = atype;
- this.feedId = feedId;
- this.datasourceAdapterFactory = datasourceAdapterFactory;
- }
-
- public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
- IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions)
- throws HyracksDataException {
- ITypedDatasourceAdapter adapter;
- try {
- if (datasourceAdapterFactory instanceof IGenericDatasetAdapterFactory) {
- adapter = (ITypedDatasourceAdapter) ((IGenericDatasetAdapterFactory) datasourceAdapterFactory)
- .createAdapter(adapterConfiguration, atype);
- } else if (datasourceAdapterFactory instanceof ITypedDatasetAdapterFactory) {
- adapter = (ITypedDatasourceAdapter) ((ITypedDatasetAdapterFactory) datasourceAdapterFactory)
- .createAdapter(adapterConfiguration);
- } else {
- throw new IllegalStateException(" Unknown adapter factory type for " + adapterFactoryClassName);
- }
- adapter.initialize(ctx);
- } catch (Exception e) {
- throw new HyracksDataException("initialization of adapter failed", e);
- }
- return new FeedIntakeOperatorNodePushable(feedId, adapter, partition);
- }
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedIntakeOperatorNodePushable.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedIntakeOperatorNodePushable.java
deleted file mode 100644
index 31470f3..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedIntakeOperatorNodePushable.java
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.external.data.operator;
-
-import java.nio.ByteBuffer;
-import java.util.concurrent.LinkedBlockingQueue;
-
-import edu.uci.ics.asterix.common.api.AsterixThreadExecutor;
-import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
-import edu.uci.ics.asterix.external.feed.lifecycle.AlterFeedMessage;
-import edu.uci.ics.asterix.external.feed.lifecycle.FeedId;
-import edu.uci.ics.asterix.external.feed.lifecycle.FeedManager;
-import edu.uci.ics.asterix.external.feed.lifecycle.IFeedManager;
-import edu.uci.ics.asterix.external.feed.lifecycle.IFeedMessage;
-import edu.uci.ics.asterix.feed.managed.adapter.IManagedFeedAdapter;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
-
-/**
- * The runtime for @see{FeedIntakeOperationDescriptor}
- */
-public class FeedIntakeOperatorNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
-
- private final IDatasourceAdapter adapter;
- private final int partition;
- private final IFeedManager feedManager;
- private final FeedId feedId;
- private final LinkedBlockingQueue<IFeedMessage> inbox;
- private FeedInboxMonitor feedInboxMonitor;
-
- public FeedIntakeOperatorNodePushable(FeedId feedId, IDatasourceAdapter adapter, int partition) {
- this.adapter = adapter;
- this.partition = partition;
- this.feedManager = (IFeedManager) FeedManager.INSTANCE;
- this.feedId = feedId;
- inbox = new LinkedBlockingQueue<IFeedMessage>();
- }
-
- @Override
- public void open() throws HyracksDataException {
- if (adapter instanceof IManagedFeedAdapter) {
- feedInboxMonitor = new FeedInboxMonitor((IManagedFeedAdapter) adapter, inbox, partition);
- AsterixThreadExecutor.INSTANCE.execute(feedInboxMonitor);
- feedManager.registerFeedMsgQueue(feedId, inbox);
- }
- writer.open();
- try {
- adapter.start(partition, writer);
- } catch (Exception e) {
- e.printStackTrace();
- throw new HyracksDataException(e);
- /*
- we do not throw an exception, but allow the operator to close
- gracefully throwing an exception here would result in a job abort and a
- transaction roll back that undoes all the work done so far.
- */
-
- } finally {
- writer.close();
- if (adapter instanceof IManagedFeedAdapter) {
- feedManager.unregisterFeedMsgQueue(feedId, inbox);
- }
- }
- }
-
- @Override
- public void fail() throws HyracksDataException {
- writer.close();
- }
-
- @Override
- public void close() throws HyracksDataException {
-
- }
-
- @Override
- public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
- // do nothing
- }
-}
-
-class FeedInboxMonitor extends Thread {
-
- private LinkedBlockingQueue<IFeedMessage> inbox;
- private final IManagedFeedAdapter adapter;
-
- public FeedInboxMonitor(IManagedFeedAdapter adapter, LinkedBlockingQueue<IFeedMessage> inbox, int partition) {
- this.inbox = inbox;
- this.adapter = adapter;
- }
-
- @Override
- public void run() {
- while (true) {
- try {
- IFeedMessage feedMessage = inbox.take();
- switch (feedMessage.getMessageType()) {
- case STOP:
- adapter.stop();
- break;
- case ALTER:
- adapter.alter(((AlterFeedMessage) feedMessage).getAlteredConfParams());
- break;
- }
- } catch (InterruptedException ie) {
- break;
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
- }
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedMessageOperatorDescriptor.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedMessageOperatorDescriptor.java
deleted file mode 100644
index 300e3af..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedMessageOperatorDescriptor.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.external.data.operator;
-
-import java.util.List;
-
-import edu.uci.ics.asterix.external.feed.lifecycle.FeedId;
-import edu.uci.ics.asterix.external.feed.lifecycle.IFeedMessage;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
-
-/**
- * Sends a control message to the registered message queue for feed specified by its feedId.
- */
-public class FeedMessageOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
-
- private static final long serialVersionUID = 1L;
-
- private final FeedId feedId;
- private final List<IFeedMessage> feedMessages;
- private final boolean sendToAll = true;
-
- public FeedMessageOperatorDescriptor(JobSpecification spec, String dataverse, String dataset,
- List<IFeedMessage> feedMessages) {
- super(spec, 0, 1);
- this.feedId = new FeedId(dataverse, dataset);
- this.feedMessages = feedMessages;
- }
-
- @Override
- public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
- IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
- return new FeedMessageOperatorNodePushable(ctx, feedId, feedMessages, sendToAll, partition, nPartitions);
- }
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedMessageOperatorNodePushable.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedMessageOperatorNodePushable.java
deleted file mode 100644
index b6bacef..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedMessageOperatorNodePushable.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.external.data.operator;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import edu.uci.ics.asterix.external.feed.lifecycle.FeedId;
-import edu.uci.ics.asterix.external.feed.lifecycle.FeedManager;
-import edu.uci.ics.asterix.external.feed.lifecycle.IFeedManager;
-import edu.uci.ics.asterix.external.feed.lifecycle.IFeedMessage;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
-
-/**
- * Runtime for the @see{FeedMessageOperatorDescriptor}
- */
-public class FeedMessageOperatorNodePushable extends AbstractUnaryOutputSourceOperatorNodePushable {
-
- private final FeedId feedId;
- private final List<IFeedMessage> feedMessages;
- private IFeedManager feedManager;
-
- public FeedMessageOperatorNodePushable(IHyracksTaskContext ctx, FeedId feedId, List<IFeedMessage> feedMessages,
- boolean applyToAll, int partition, int nPartitions) {
- this.feedId = feedId;
- if (applyToAll) {
- this.feedMessages = feedMessages;
- } else {
- this.feedMessages = new ArrayList<IFeedMessage>();
- feedMessages.add(feedMessages.get(partition));
- }
- feedManager = (IFeedManager) FeedManager.INSTANCE;
- }
-
- @Override
- public void initialize() throws HyracksDataException {
- try {
- writer.open();
- for (IFeedMessage feedMessage : feedMessages) {
- feedManager.deliverMessage(feedId, feedMessage);
- }
- } catch (Exception e) {
- throw new HyracksDataException(e);
- } finally {
- writer.close();
- }
- }
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AbstractDatasourceAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AbstractDatasourceAdapter.java
deleted file mode 100644
index f96a030..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AbstractDatasourceAdapter.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.external.dataset.adapter;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.DoubleParserFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.FloatParserFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.IntegerParserFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.LongParserFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
-
-/**
- * Represents the base class that is required to be extended by every
- * implementation of the IDatasourceAdapter interface.
- */
-public abstract class AbstractDatasourceAdapter implements IDatasourceAdapter {
-
- private static final long serialVersionUID = 1L;
-
- protected Map<String, Object> configuration;
- protected transient AlgebricksPartitionConstraint partitionConstraint;
- protected IAType atype;
- protected IHyracksTaskContext ctx;
- protected AdapterType adapterType;
-
- protected static final HashMap<ATypeTag, IValueParserFactory> typeToValueParserFactMap = new HashMap<ATypeTag, IValueParserFactory>();
- static {
- typeToValueParserFactMap.put(ATypeTag.INT32, IntegerParserFactory.INSTANCE);
- typeToValueParserFactMap.put(ATypeTag.FLOAT, FloatParserFactory.INSTANCE);
- typeToValueParserFactMap.put(ATypeTag.DOUBLE, DoubleParserFactory.INSTANCE);
- typeToValueParserFactMap.put(ATypeTag.INT64, LongParserFactory.INSTANCE);
- typeToValueParserFactMap.put(ATypeTag.STRING, UTF8StringParserFactory.INSTANCE);
- }
-
- protected static final Map<String, Object> formatToParserFactoryMap = initializeFormatParserFactoryMap();
-
- public static final String KEY_FORMAT = "format";
- public static final String KEY_PARSER_FACTORY = "parser";
- public static final String FORMAT_DELIMITED_TEXT = "delimited-text";
- public static final String FORMAT_ADM = "adm";
-
- private static Map<String, Object> initializeFormatParserFactoryMap() {
- Map<String, Object> map = new HashMap<String, Object>();
- map.put(FORMAT_DELIMITED_TEXT, "edu.uci.ics.asterix.runtime.operators.file.NtDelimitedDataTupleParserFactory");
- map.put(FORMAT_ADM, "edu.uci.ics.asterix.runtime.operators.file.AdmSchemafullRecordParserFactory");
- return map;
- }
-
- /**
- * Get the partition constraint chosen by the adapter.
- * An adapter may have preferences as to where it needs to be instantiated and used.
- */
- public abstract AlgebricksPartitionConstraint getPartitionConstraint() throws Exception;
-
- /**
- * Get the configured value from the adapter configuration parameters, corresponding to the an attribute.
- *
- * @param attribute
- * The attribute whose value needs to be obtained.
- */
- public Object getAdapterProperty(String attribute) {
- return configuration.get(attribute);
- }
-
- /**
- * Get the adapter configuration parameters.
- *
- * @return A Map<String,String> instance representing the adapter configuration.
- */
- public Map<String, Object> getConfiguration() {
- return configuration;
- }
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AdapterIdentifier.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AdapterIdentifier.java
deleted file mode 100644
index f6164ea..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AdapterIdentifier.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.external.dataset.adapter;
-
-import java.io.Serializable;
-
-/**
- * A unique identifier for a datasource adapter.
- */
-public class AdapterIdentifier implements Serializable {
-
- private static final long serialVersionUID = 1L;
-
- private final String namespace;
- private final String adapterName;
-
- public AdapterIdentifier(String namespace, String adapterName) {
- this.namespace = namespace;
- this.adapterName = adapterName;
- }
-
- public String getNamespace() {
- return namespace;
- }
-
- public String getAdapterName() {
- return adapterName;
- }
-
- @Override
- public int hashCode() {
- return (namespace + "@" + adapterName).hashCode();
-
- }
-
- @Override
- public boolean equals(Object o) {
- if (!(o instanceof AdapterIdentifier)) {
- return false;
- }
- return namespace.equals(((AdapterIdentifier) o).getNamespace())
- && namespace.equals(((AdapterIdentifier) o).getNamespace());
- }
-}
\ No newline at end of file
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AzureTweetEntity.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AzureTweetEntity.java
new file mode 100644
index 0000000..ed98abf
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AzureTweetEntity.java
@@ -0,0 +1,33 @@
+package edu.uci.ics.asterix.external.dataset.adapter;
+
+import com.microsoft.windowsazure.services.table.client.TableServiceEntity;
+
+public class AzureTweetEntity extends TableServiceEntity {
+
+ private String postingType;
+ private String json;
+
+ public AzureTweetEntity() {
+ }
+
+ public AzureTweetEntity(String userID, String postingID) {
+ this.partitionKey = userID;
+ this.rowKey = postingID;
+ }
+
+ public String getPostingType() {
+ return postingType;
+ }
+
+ public void setPostingType(String postingType) {
+ this.postingType = postingType;
+ }
+
+ public void setJSON(String json) {
+ this.json = json;
+ }
+
+ public String getJSON() {
+ return json;
+ }
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AzureTweetMetadataEntity.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AzureTweetMetadataEntity.java
new file mode 100644
index 0000000..083a92e
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AzureTweetMetadataEntity.java
@@ -0,0 +1,77 @@
+package edu.uci.ics.asterix.external.dataset.adapter;
+
+import com.microsoft.windowsazure.services.table.client.TableServiceEntity;
+
+public class AzureTweetMetadataEntity extends TableServiceEntity {
+ private String creationTimestamp;
+ private String postingType;
+ private String productId;
+ private String ethnicity;
+ private String gender;
+ private String sentiment;
+ private String location;
+
+ public AzureTweetMetadataEntity() {
+ }
+
+ public AzureTweetMetadataEntity(String partitionKey, String rowKey) {
+ this.partitionKey = partitionKey;
+ this.rowKey = rowKey;
+ }
+
+ public String getCreationTimestamp() {
+ return creationTimestamp;
+ }
+
+ public void setCreationTimestamp(String creationTimestamp) {
+ this.creationTimestamp = creationTimestamp;
+ }
+
+ public String getPostingType() {
+ return postingType;
+ }
+
+ public void setPostingType(String postingType) {
+ this.postingType = postingType;
+ }
+
+ public String getProductId() {
+ return productId;
+ }
+
+ public void setProductId(String productId) {
+ this.productId = productId;
+ }
+
+ public String getEthnicity() {
+ return ethnicity;
+ }
+
+ public void setEthnicity(String ethnicity) {
+ this.ethnicity = ethnicity;
+ }
+
+ public String getGender() {
+ return gender;
+ }
+
+ public void setGender(String gender) {
+ this.gender = gender;
+ }
+
+ public String getSentiment() {
+ return sentiment;
+ }
+
+ public void setSentiment(String sentiment) {
+ this.sentiment = sentiment;
+ }
+
+ public String getLocation() {
+ return location;
+ }
+
+ public void setLocation(String location) {
+ this.location = location;
+ }
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/CNNFeedAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/CNNFeedAdapter.java
deleted file mode 100644
index 9112aae..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/CNNFeedAdapter.java
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.external.dataset.adapter;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import edu.uci.ics.asterix.feed.managed.adapter.IManagedFeedAdapter;
-
-/**
- * An Adapter that provides the functionality of fetching news feed from CNN service
- * The Adapter provides news feed as ADM records.
- */
-public class CNNFeedAdapter extends RSSFeedAdapter implements IDatasourceAdapter, IManagedFeedAdapter {
-
- private static final long serialVersionUID = 2523303758114582251L;
- private List<String> feedURLs = new ArrayList<String>();
- private static Map<String, String> topicFeeds = new HashMap<String, String>();
-
- public static final String KEY_RSS_URL = "topic";
- public static final String KEY_INTERVAL = "interval";
- public static final String TOP_STORIES = "topstories";
- public static final String WORLD = "world";
- public static final String US = "us";
- public static final String SPORTS = "sports";
- public static final String BUSINESS = "business";
- public static final String POLITICS = "politics";
- public static final String CRIME = "crime";
- public static final String TECHNOLOGY = "technology";
- public static final String HEALTH = "health";
- public static final String ENTERNTAINMENT = "entertainemnt";
- public static final String TRAVEL = "travel";
- public static final String LIVING = "living";
- public static final String VIDEO = "video";
- public static final String STUDENT = "student";
- public static final String POPULAR = "popular";
- public static final String RECENT = "recent";
-
- private void initTopics() {
- topicFeeds.put(TOP_STORIES, "http://rss.cnn.com/rss/cnn_topstories.rss");
- topicFeeds.put(WORLD, "http://rss.cnn.com/rss/cnn_world.rss");
- topicFeeds.put(US, "http://rss.cnn.com/rss/cnn_us.rss");
- topicFeeds.put(SPORTS, "http://rss.cnn.com/rss/si_topstories.rss");
- topicFeeds.put(BUSINESS, "http://rss.cnn.com/rss/money_latest.rss");
- topicFeeds.put(POLITICS, "http://rss.cnn.com/rss/cnn_allpolitics.rss");
- topicFeeds.put(CRIME, "http://rss.cnn.com/rss/cnn_crime.rss");
- topicFeeds.put(TECHNOLOGY, "http://rss.cnn.com/rss/cnn_tech.rss");
- topicFeeds.put(HEALTH, "http://rss.cnn.com/rss/cnn_health.rss");
- topicFeeds.put(ENTERNTAINMENT, "http://rss.cnn.com/rss/cnn_showbiz.rss");
- topicFeeds.put(LIVING, "http://rss.cnn.com/rss/cnn_living.rss");
- topicFeeds.put(VIDEO, "http://rss.cnn.com/rss/cnn_freevideo.rss");
- topicFeeds.put(TRAVEL, "http://rss.cnn.com/rss/cnn_travel.rss");
- topicFeeds.put(STUDENT, "http://rss.cnn.com/rss/cnn_studentnews.rss");
- topicFeeds.put(POPULAR, "http://rss.cnn.com/rss/cnn_mostpopular.rss");
- topicFeeds.put(RECENT, "http://rss.cnn.com/rss/cnn_latest.rss");
- }
-
- @Override
- public void configure(Map<String, Object> arguments) throws Exception {
- configuration = arguments;
- String rssURLProperty = (String) configuration.get(KEY_RSS_URL);
- if (rssURLProperty == null) {
- throw new IllegalArgumentException("no rss url provided");
- }
- initializeFeedURLs(rssURLProperty);
- configurePartitionConstraints();
-
- }
-
- private void initializeFeedURLs(String rssURLProperty) {
- feedURLs.clear();
- String[] rssTopics = rssURLProperty.split(",");
- initTopics();
- for (String topic : rssTopics) {
- String feedURL = topicFeeds.get(topic);
- if (feedURL == null) {
- throw new IllegalArgumentException(" unknown topic :" + topic + " please choose from the following "
- + getValidTopics());
- }
- feedURLs.add(feedURL);
- }
- }
-
- private static String getValidTopics() {
- StringBuilder builder = new StringBuilder();
- for (String key : topicFeeds.keySet()) {
- builder.append(key);
- builder.append(" ");
- }
- return new String(builder);
- }
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/FileSystemBasedAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/FileSystemBasedAdapter.java
index 753f7d1..b8e9ef9 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/FileSystemBasedAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/FileSystemBasedAdapter.java
@@ -16,158 +16,37 @@
import java.io.IOException;
import java.io.InputStream;
-import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.external.util.DNSResolverFactory;
-import edu.uci.ics.asterix.external.util.INodeResolver;
-import edu.uci.ics.asterix.external.util.INodeResolverFactory;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.AUnionType;
+import edu.uci.ics.asterix.metadata.feeds.IDatasourceAdapter;
import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
-import edu.uci.ics.asterix.runtime.operators.file.AdmSchemafullRecordParserFactory;
-import edu.uci.ics.asterix.runtime.operators.file.NtDelimitedDataTupleParserFactory;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
import edu.uci.ics.hyracks.api.comm.IFrameWriter;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.std.file.ITupleParser;
import edu.uci.ics.hyracks.dataflow.std.file.ITupleParserFactory;
-public abstract class FileSystemBasedAdapter extends AbstractDatasourceAdapter {
+public abstract class FileSystemBasedAdapter implements IDatasourceAdapter {
private static final long serialVersionUID = 1L;
public static final String NODE_RESOLVER_FACTORY_PROPERTY = "node.Resolver";
- public static final String KEY_DELIMITER = "delimiter";
- public static final String KEY_PATH = "path";
-
- protected ITupleParserFactory parserFactory;
- protected ITupleParser parser;
- protected static INodeResolver nodeResolver;
-
- private static final INodeResolver DEFAULT_NODE_RESOLVER = new DNSResolverFactory().createNodeResolver();
- private static final Logger LOGGER = Logger.getLogger(FileSystemBasedAdapter.class.getName());
public abstract InputStream getInputStream(int partition) throws IOException;
- public FileSystemBasedAdapter(IAType atype) {
- this.atype = atype;
+ protected final ITupleParser tupleParser;
+ protected final IAType sourceDatatype;
+ protected IHyracksTaskContext ctx;
+
+ public FileSystemBasedAdapter(ITupleParserFactory parserFactory, IAType sourceDatatype, IHyracksTaskContext ctx)
+ throws HyracksDataException {
+ this.tupleParser = parserFactory.createTupleParser(ctx);
+ this.sourceDatatype = sourceDatatype;
+ this.ctx = ctx;
}
@Override
public void start(int partition, IFrameWriter writer) throws Exception {
InputStream in = getInputStream(partition);
- parser = getTupleParser();
- parser.parse(in, writer);
- }
-
- @Override
- public abstract void initialize(IHyracksTaskContext ctx) throws Exception;
-
- @Override
- public abstract void configure(Map<String, Object> arguments) throws Exception;
-
- @Override
- public abstract AdapterType getAdapterType();
-
- @Override
- public abstract AlgebricksPartitionConstraint getPartitionConstraint() throws Exception;
-
- protected ITupleParser getTupleParser() throws Exception {
- return parserFactory.createTupleParser(ctx);
- }
-
- protected void configureFormat() throws Exception {
- String parserFactoryClassname = (String) configuration.get(KEY_PARSER_FACTORY);
- if (parserFactoryClassname == null) {
- String specifiedFormat = (String) configuration.get(KEY_FORMAT);
- if (specifiedFormat == null) {
- throw new IllegalArgumentException(" Unspecified data format");
- } else if (FORMAT_DELIMITED_TEXT.equalsIgnoreCase(specifiedFormat)) {
- parserFactory = getDelimitedDataTupleParserFactory((ARecordType) atype);
- } else if (FORMAT_ADM.equalsIgnoreCase((String) configuration.get(KEY_FORMAT))) {
- parserFactory = getADMDataTupleParserFactory((ARecordType) atype);
- } else {
- throw new IllegalArgumentException(" format " + configuration.get(KEY_FORMAT) + " not supported");
- }
- } else {
- parserFactory = (ITupleParserFactory) Class.forName(parserFactoryClassname).newInstance();
- }
-
- }
-
- protected ITupleParserFactory getDelimitedDataTupleParserFactory(ARecordType recordType) throws AsterixException {
- int n = recordType.getFieldTypes().length;
- IValueParserFactory[] fieldParserFactories = new IValueParserFactory[n];
- for (int i = 0; i < n; i++) {
- ATypeTag tag = null;
- if (recordType.getFieldTypes()[i].getTypeTag() == ATypeTag.UNION) {
- if (!NonTaggedFormatUtil.isOptionalField(((AUnionType) recordType.getFieldTypes()[i]))) {
- throw new NotImplementedException("Non-optional UNION type is not supported.");
- }
- tag = ((AUnionType) recordType.getFieldTypes()[i]).getUnionList()
- .get(NonTaggedFormatUtil.OPTIONAL_TYPE_INDEX_IN_UNION_LIST).getTypeTag();
- } else {
- tag = recordType.getFieldTypes()[i].getTypeTag();
- }
- if (tag == null) {
- throw new NotImplementedException("Failed to get the type information for field " + i + ".");
- }
- IValueParserFactory vpf = typeToValueParserFactMap.get(tag);
- if (vpf == null) {
- throw new NotImplementedException("No value parser factory for delimited fields of type " + tag);
- }
- fieldParserFactories[i] = vpf;
- }
- String delimiterValue = (String) configuration.get(KEY_DELIMITER);
- if (delimiterValue != null && delimiterValue.length() > 1) {
- throw new AsterixException("improper delimiter");
- }
-
- Character delimiter = delimiterValue.charAt(0);
- return new NtDelimitedDataTupleParserFactory(recordType, fieldParserFactories, delimiter);
- }
-
- protected ITupleParserFactory getADMDataTupleParserFactory(ARecordType recordType) throws AsterixException {
- try {
- return new AdmSchemafullRecordParserFactory(recordType);
- } catch (Exception e) {
- throw new AsterixException(e);
- }
-
- }
-
- protected INodeResolver getNodeResolver() {
- if (nodeResolver == null) {
- nodeResolver = initNodeResolver();
- }
- return nodeResolver;
- }
-
- private static INodeResolver initNodeResolver() {
- INodeResolver nodeResolver = null;
- String configuredNodeResolverFactory = System.getProperty(NODE_RESOLVER_FACTORY_PROPERTY);
- if (configuredNodeResolverFactory != null) {
- try {
- nodeResolver = ((INodeResolverFactory) (Class.forName(configuredNodeResolverFactory).newInstance()))
- .createNodeResolver();
-
- } catch (Exception e) {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.log(Level.WARNING, "Unable to create node resolver from the configured classname "
- + configuredNodeResolverFactory + "\n" + e.getMessage());
- }
- nodeResolver = DEFAULT_NODE_RESOLVER;
- }
- } else {
- nodeResolver = DEFAULT_NODE_RESOLVER;
- }
- return nodeResolver;
+ tupleParser.parse(in, writer);
}
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HDFSAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HDFSAdapter.java
index f8b381b..1a046a5 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HDFSAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HDFSAdapter.java
@@ -16,7 +16,6 @@
import java.io.IOException;
import java.io.InputStream;
-import java.util.Map;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.Counters.Counter;
@@ -28,9 +27,10 @@
import org.apache.hadoop.mapred.TextInputFormat;
import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.std.file.ITupleParserFactory;
/**
* Provides functionality for fetching external data stored in an HDFS instance.
@@ -44,34 +44,16 @@
private transient boolean executed[];
private transient InputSplit[] inputSplits;
private transient JobConf conf;
- private transient AlgebricksPartitionConstraint clusterLocations;
-
private transient String nodeName;
public HDFSAdapter(IAType atype, String[] readSchedule, boolean[] executed, InputSplit[] inputSplits, JobConf conf,
- AlgebricksPartitionConstraint clusterLocations) {
- super(atype);
+ String nodeName, ITupleParserFactory parserFactory, IHyracksTaskContext ctx) throws HyracksDataException {
+ super(parserFactory, atype, ctx);
this.readSchedule = readSchedule;
this.executed = executed;
this.inputSplits = inputSplits;
this.conf = conf;
- this.clusterLocations = clusterLocations;
- }
-
- @Override
- public void configure(Map<String, Object> arguments) throws Exception {
- this.configuration = arguments;
- configureFormat();
- }
-
- public AdapterType getAdapterType() {
- return AdapterType.READ_WRITE;
- }
-
- @Override
- public void initialize(IHyracksTaskContext ctx) throws Exception {
- this.ctx = ctx;
- this.nodeName = ctx.getJobletContext().getApplicationContext().getNodeId();
+ this.nodeName = nodeName;
}
private Reporter getReporter() {
@@ -227,9 +209,4 @@
}
- @Override
- public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
- return clusterLocations;
- }
-
}
\ No newline at end of file
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HiveAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HiveAdapter.java
index 91cf0d2..6280635 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HiveAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HiveAdapter.java
@@ -14,63 +14,32 @@
*/
package edu.uci.ics.asterix.external.dataset.adapter;
-import java.util.Map;
-
-import org.apache.hadoop.mapred.InputSplit;
-import org.apache.hadoop.mapred.JobConf;
+import java.io.IOException;
+import java.io.InputStream;
import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
-import edu.uci.ics.hyracks.api.comm.IFrameWriter;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.std.file.ITupleParserFactory;
/**
* Provides the functionality of fetching data in form of ADM records from a Hive dataset.
*/
-@SuppressWarnings("deprecation")
-public class HiveAdapter extends AbstractDatasourceAdapter {
+public class HiveAdapter extends FileSystemBasedAdapter {
private static final long serialVersionUID = 1L;
- public static final String HIVE_DATABASE = "database";
- public static final String HIVE_TABLE = "table";
- public static final String HIVE_HOME = "hive-home";
- public static final String HIVE_METASTORE_URI = "metastore-uri";
- public static final String HIVE_WAREHOUSE_DIR = "warehouse-dir";
- public static final String HIVE_METASTORE_RAWSTORE_IMPL = "rawstore-impl";
-
private HDFSAdapter hdfsAdapter;
- public HiveAdapter(IAType atype, String[] readSchedule, boolean[] executed, InputSplit[] inputSplits, JobConf conf,
- AlgebricksPartitionConstraint clusterLocations) {
- this.hdfsAdapter = new HDFSAdapter(atype, readSchedule, executed, inputSplits, conf, clusterLocations);
- this.atype = atype;
+ public HiveAdapter(IAType atype, HDFSAdapter hdfsAdapter, ITupleParserFactory parserFactory, IHyracksTaskContext ctx)
+ throws HyracksDataException {
+ super(parserFactory, atype, ctx);
+ this.hdfsAdapter = hdfsAdapter;
}
@Override
- public AdapterType getAdapterType() {
- return AdapterType.READ;
- }
-
- @Override
- public void configure(Map<String, Object> arguments) throws Exception {
- this.configuration = arguments;
- this.hdfsAdapter.configure(arguments);
- }
-
- @Override
- public void initialize(IHyracksTaskContext ctx) throws Exception {
- hdfsAdapter.initialize(ctx);
- }
-
- @Override
- public void start(int partition, IFrameWriter writer) throws Exception {
- hdfsAdapter.start(partition, writer);
- }
-
- @Override
- public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
- return hdfsAdapter.getPartitionConstraint();
+ public InputStream getInputStream(int partition) throws IOException {
+ return hdfsAdapter.getInputStream(partition);
}
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IDatasourceAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IDatasourceAdapter.java
deleted file mode 100644
index 9403bfe..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IDatasourceAdapter.java
+++ /dev/null
@@ -1,149 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.external.dataset.adapter;
-
-import java.io.Serializable;
-import java.util.Map;
-
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
-import edu.uci.ics.hyracks.api.comm.IFrameWriter;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-
-/**
- * A super interface implemented by a data source adapter. An adapter can be a
- * pull based or push based. This interface provides all common APIs that need
- * to be implemented by each adapter irrespective of the the kind of
- * adapter(pull or push).
- */
-public interface IDatasourceAdapter extends Serializable {
-
- /**
- * An adapter can be used to read from an external data source and may also
- * allow writing to the external data source. This enum type indicates the
- * kind of operations supported by the adapter.
- *
- * @caller Compiler uses this method to assert the validity of an operation
- * on an external dataset. The type of adapter associated with an
- * external dataset determines the set of valid operations allowed
- * on the dataset.
- */
- public enum AdapterType {
- READ,
- WRITE,
- READ_WRITE
- }
-
- /**
- * Returns the type of adapter indicating if the adapter can be used for
- * reading from an external data source or writing to an external data
- * source or can be used for both purposes.
- *
- * @Caller: Compiler: The compiler uses this API to verify if an operation
- * is supported by the adapter. For example, an write query against
- * an external dataset will not compile successfully if the
- * external dataset was declared with a read_only adapter.
- * @see AdapterType
- * @return
- */
- public AdapterType getAdapterType();
-
- /**
- * Each adapter instance is configured with a set of parameters that are
- * key-value pairs. When creating an external or a feed dataset, an adapter
- * instance is used in conjunction with a set of configuration parameters
- * for the adapter instance. The configuration parameters are stored
- * internally with the adapter and can be retrieved using this API.
- *
- * @param propertyKey
- * @return String the value corresponding to the configuration parameter
- * represented by the key- attributeKey.
- */
- public Object getAdapterProperty(String propertyKey);
-
- /**
- * Configures the IDatasourceAdapter instance.
- *
- * @caller Scenario 1) Called during compilation of DDL statement that
- * creates a Feed dataset and associates the adapter with the
- * dataset. The (key,value) configuration parameters provided as
- * part of the DDL statement are collected by the compiler and
- * passed on to this method. The adapter may as part of
- * configuration connect with the external data source and determine
- * the IAType associated with data residing with the external
- * datasource.
- * Scenario 2) An adapter instance is created by an ASTERIX operator
- * that wraps around the adapter instance. The operator, as part of
- * its initialization invokes the configure method. The (key,value)
- * configuration parameters are passed on to the operator by the
- * compiler. Subsequent to the invocation, the wrapping operator
- * obtains the partition constraints (if any). In addition, in the
- * case of a read adapter, the wrapping operator obtains the output
- * ASTERIX type associated with the data that will be output from
- * the adapter.
- * @param arguments
- * A map with key-value pairs that contains the configuration
- * parameters for the adapter. The arguments are obtained from
- * the metadata. Recall that the DDL to create an external
- * dataset or a feed dataset requires using an adapter and
- * providing all arguments as a set of (key,value) pairs. These
- * arguments are put into the metadata.
- */
- public void configure(Map<String, Object> arguments) throws Exception;
-
- /**
- * Returns a list of partition constraints. A partition constraint can be a
- * requirement to execute at a particular location or could be cardinality
- * constraints indicating the number of instances that need to run in
- * parallel. example, a IDatasourceAdapter implementation written for data
- * residing on the local file system of a node cannot run on any other node
- * and thus has a location partition constraint. The location partition
- * constraint can be expressed as a node IP address or a node controller id.
- * In the former case, the IP address is translated to a node controller id
- * running on the node with the given IP address.
- *
- * @Caller The wrapper operator configures its partition constraints from
- * the constraints obtained from the adapter.
- */
- public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception;
-
- /**
- * Allows the adapter to establish connection with the external data source
- * expressing intent for data and providing any configuration parameters
- * required by the external data source for the transfer of data. This
- * method does not result in any data transfer, but is a prerequisite for
- * any subsequent data transfer to happen between the external data source
- * and the adapter.
- *
- * @caller This method is called by the wrapping ASTERIX operator that
- * @param ctx
- * @throws Exception
- */
- public void initialize(IHyracksTaskContext ctx) throws Exception;
-
- /**
- * Triggers the adapter to begin ingestion of data from the external source.
- *
- * @param partition
- * The adapter could be running with a degree of parallelism.
- * partition corresponds to the i'th parallel instance.
- * @param writer
- * The instance of frame writer that is used by the adapter to
- * write frame to. Adapter packs the fetched bytes (from external source),
- * packs them into frames and forwards the frames to an upstream receiving
- * operator using the instance of IFrameWriter.
- * @throws Exception
- */
- public void start(int partition, IFrameWriter writer) throws Exception;
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IFeedClientFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IFeedClientFactory.java
new file mode 100644
index 0000000..4fb14ff
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IFeedClientFactory.java
@@ -0,0 +1,22 @@
+package edu.uci.ics.asterix.external.dataset.adapter;
+
+import java.util.Map;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+
+public interface IFeedClientFactory {
+
+ public IPullBasedFeedClient createFeedClient(IHyracksTaskContext ctx, Map<String, String> configuration)
+ throws Exception;
+
+ public ARecordType getRecordType() throws AsterixException;
+
+ public FeedClientType getFeedClientType();
+
+ public enum FeedClientType {
+ GENERIC,
+ TYPED
+ }
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IPullBasedFeedClient.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IPullBasedFeedClient.java
index aeecb5f..be3a2fd 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IPullBasedFeedClient.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IPullBasedFeedClient.java
@@ -20,32 +20,23 @@
public interface IPullBasedFeedClient {
+ public enum InflowState {
+ NO_MORE_DATA,
+ DATA_AVAILABLE,
+ DATA_NOT_AVAILABLE
+ }
+
/**
- * Writes the next fetched tuple into the provided instance of DatatOutput.
+ * Writes the next fetched tuple into the provided instance of DatatOutput. Invocation of this method blocks until
+ * a new tuple has been written or the specified time has expired.
*
* @param dataOutput
* The receiving channel for the feed client to write ADM records to.
- * @return true if a record was written to the DataOutput instance
- * false if no record was written to the DataOutput instance indicating non-availability of new data.
+ * @param timeout
+ * Threshold time (expressed in seconds) for the next tuple to be obtained from the externa source.
+ * @return
* @throws AsterixException
*/
- public boolean nextTuple(DataOutput dataOutput) throws AsterixException;
-
- /**
- * Provides logic for any corrective action that feed client needs to execute on
- * encountering an exception.
- *
- * @param e
- * The exception encountered during fetching of data from external source
- * @throws AsterixException
- */
- public void resetOnFailure(Exception e) throws AsterixException;
-
- /**
- * Terminates a feed, that is data ingestion activity ceases.
- *
- * @throws Exception
- */
- public void stop() throws Exception;
+ public InflowState nextTuple(DataOutput dataOutput, int timeout) throws AsterixException;
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/ITypedDatasourceAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/ITypedDatasourceAdapter.java
deleted file mode 100644
index 00b8530..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/ITypedDatasourceAdapter.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.external.dataset.adapter;
-
-import edu.uci.ics.asterix.om.types.ARecordType;
-
-/**
- * Implemented by datasource adapter that has a fixed output type.
- * Example @see {PullBasedTwitterAdapter}
- */
-public interface ITypedDatasourceAdapter extends IDatasourceAdapter {
-
- public ARecordType getAdapterOutputType();
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/NCFileSystemAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/NCFileSystemAdapter.java
index 182ccf7..ea8ed05 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/NCFileSystemAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/NCFileSystemAdapter.java
@@ -19,15 +19,12 @@
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
-import java.util.Map;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.ITupleParserFactory;
/**
* Factory class for creating an instance of NCFileSystemAdapter. An
@@ -37,59 +34,13 @@
public class NCFileSystemAdapter extends FileSystemBasedAdapter {
private static final long serialVersionUID = 1L;
- private FileSplit[] fileSplits;
- public NCFileSystemAdapter(IAType atype) {
- super(atype);
- }
+ private final FileSplit[] fileSplits;
- @Override
- public void configure(Map<String, Object> arguments) throws Exception {
- this.configuration = arguments;
- String[] splits = ((String) arguments.get(KEY_PATH)).split(",");
- configureFileSplits(splits);
- configureFormat();
- }
-
- @Override
- public void initialize(IHyracksTaskContext ctx) throws Exception {
- this.ctx = ctx;
- }
-
- @Override
- public AdapterType getAdapterType() {
- return AdapterType.READ;
- }
-
- private void configureFileSplits(String[] splits) throws AsterixException {
- if (fileSplits == null) {
- fileSplits = new FileSplit[splits.length];
- String nodeName;
- String nodeLocalPath;
- int count = 0;
- String trimmedValue;
- for (String splitPath : splits) {
- trimmedValue = splitPath.trim();
- if (!trimmedValue.contains("://")) {
- throw new AsterixException("Invalid path: " + splitPath
- + "\nUsage- path=\"Host://Absolute File Path\"");
- }
- nodeName = trimmedValue.split(":")[0];
- nodeLocalPath = trimmedValue.split("://")[1];
- FileSplit fileSplit = new FileSplit(nodeName, new FileReference(new File(nodeLocalPath)));
- fileSplits[count++] = fileSplit;
- }
- }
- }
-
- private void configurePartitionConstraint() throws AsterixException {
- String[] locs = new String[fileSplits.length];
- String location;
- for (int i = 0; i < fileSplits.length; i++) {
- location = getNodeResolver().resolveNode(fileSplits[i].getNodeName());
- locs[i] = location;
- }
- partitionConstraint = new AlgebricksAbsolutePartitionConstraint(locs);
+ public NCFileSystemAdapter(FileSplit[] fileSplits, ITupleParserFactory parserFactory, IAType atype,
+ IHyracksTaskContext ctx) throws HyracksDataException {
+ super(parserFactory, atype, ctx);
+ this.fileSplits = fileSplits;
}
@Override
@@ -105,11 +56,4 @@
}
}
- @Override
- public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
- if (partitionConstraint == null) {
- configurePartitionConstraint();
- }
- return partitionConstraint;
- }
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAdapter.java
index 93d2057..193cce4 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAdapter.java
@@ -15,32 +15,65 @@
package edu.uci.ics.asterix.external.dataset.adapter;
import java.nio.ByteBuffer;
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.external.dataset.adapter.IPullBasedFeedClient.InflowState;
+import edu.uci.ics.asterix.metadata.feeds.FeedPolicyEnforcer;
+import edu.uci.ics.asterix.metadata.feeds.IPullBasedFeedAdapter;
import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
/**
- * Acts as an abstract class for all pull-based external data adapters.
- * Captures the common logic for obtaining bytes from an external source
- * and packing them into frames as tuples.
+ * Acts as an abstract class for all pull-based external data adapters. Captures
+ * the common logic for obtaining bytes from an external source and packing them
+ * into frames as tuples.
*/
-public abstract class PullBasedAdapter extends AbstractDatasourceAdapter implements ITypedDatasourceAdapter {
+public abstract class PullBasedAdapter implements IPullBasedFeedAdapter {
private static final long serialVersionUID = 1L;
+ private static final Logger LOGGER = Logger.getLogger(PullBasedAdapter.class.getName());
+ private static final int timeout = 5; // seconds
protected ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(1);
protected IPullBasedFeedClient pullBasedFeedClient;
protected ARecordType adapterOutputType;
+ protected boolean continueIngestion = true;
+ protected Map<String, String> configuration;
+
private FrameTupleAppender appender;
private ByteBuffer frame;
+ private long tupleCount = 0;
+ private final IHyracksTaskContext ctx;
+ private int frameTupleCount = 0;
+
+ protected FeedPolicyEnforcer policyEnforcer;
+
+ public FeedPolicyEnforcer getPolicyEnforcer() {
+ return policyEnforcer;
+ }
+
+ public void setFeedPolicyEnforcer(FeedPolicyEnforcer policyEnforcer) {
+ this.policyEnforcer = policyEnforcer;
+ }
public abstract IPullBasedFeedClient getFeedClient(int partition) throws Exception;
+ public PullBasedAdapter(Map<String, String> configuration, IHyracksTaskContext ctx) {
+ this.ctx = ctx;
+ this.configuration = configuration;
+ }
+
+ public long getIngestedRecordsCount() {
+ return tupleCount;
+ }
+
@Override
public void start(int partition, IFrameWriter writer) throws Exception {
appender = new FrameTupleAppender(ctx.getFrameSize());
@@ -48,22 +81,50 @@
appender.reset(frame, true);
pullBasedFeedClient = getFeedClient(partition);
- boolean moreData = false;
- while (true) {
+ InflowState inflowState = null;
+
+ while (continueIngestion) {
tupleBuilder.reset();
try {
- moreData = pullBasedFeedClient.nextTuple(tupleBuilder.getDataOutput());
- if (moreData) {
- tupleBuilder.addFieldEndOffset();
- appendTupleToFrame(writer);
- } else {
- FrameUtils.flushFrame(frame, writer);
- break;
+ // blocking call
+ inflowState = pullBasedFeedClient.nextTuple(tupleBuilder.getDataOutput(), timeout);
+ switch (inflowState) {
+ case DATA_AVAILABLE:
+ tupleBuilder.addFieldEndOffset();
+ appendTupleToFrame(writer);
+ frameTupleCount++;
+ break;
+ case NO_MORE_DATA:
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Reached end of feed");
+ }
+ FrameUtils.flushFrame(frame, writer);
+ tupleCount += frameTupleCount;
+ frameTupleCount = 0;
+ continueIngestion = false;
+ break;
+ case DATA_NOT_AVAILABLE:
+ if (frameTupleCount > 0) {
+ FrameUtils.flushFrame(frame, writer);
+ tupleCount += frameTupleCount;
+ frameTupleCount = 0;
+ }
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Timed out on obtaining data from pull based adaptor. Trying again!");
+ }
+ break;
}
+
} catch (Exception failureException) {
try {
- pullBasedFeedClient.resetOnFailure(failureException);
- continue;
+ failureException.printStackTrace();
+ boolean continueIngestion = policyEnforcer.continueIngestionPostSoftwareFailure(failureException);
+ if (continueIngestion) {
+ tupleBuilder.reset();
+ continue;
+ } else {
+ throw failureException;
+ }
} catch (Exception recoveryException) {
throw new Exception(recoveryException);
}
@@ -71,16 +132,6 @@
}
}
- /**
- * Allows an adapter to handle a runtime exception.
- * @param e exception encountered during runtime
- * @throws AsterixException
- */
- public void resetOnFailure(Exception e) throws AsterixException {
- pullBasedFeedClient.resetOnFailure(e);
- tupleBuilder.reset();
- }
-
private void appendTupleToFrame(IFrameWriter writer) throws HyracksDataException {
if (!appender.append(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray(), 0, tupleBuilder.getSize())) {
FrameUtils.flushFrame(frame, writer);
@@ -92,9 +143,17 @@
}
}
- @Override
- public ARecordType getAdapterOutputType() {
- return adapterOutputType;
+ /**
+ * Discontinue the ingestion of data and end the feed.
+ *
+ * @throws Exception
+ */
+ public void stop() throws Exception {
+ continueIngestion = false;
+ }
+
+ public Map<String, String> getConfiguration() {
+ return configuration;
}
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAzureFeedClient.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAzureFeedClient.java
new file mode 100644
index 0000000..dfaee03
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAzureFeedClient.java
@@ -0,0 +1,172 @@
+package edu.uci.ics.asterix.external.dataset.adapter;
+
+import java.io.DataOutput;
+import java.io.DataOutputStream;
+import java.nio.charset.StandardCharsets;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.json.JSONException;
+import org.json.JSONObject;
+
+import com.microsoft.windowsazure.services.core.storage.CloudStorageAccount;
+import com.microsoft.windowsazure.services.table.client.CloudTableClient;
+import com.microsoft.windowsazure.services.table.client.TableConstants;
+import com.microsoft.windowsazure.services.table.client.TableQuery;
+import com.microsoft.windowsazure.services.table.client.TableQuery.Operators;
+import com.microsoft.windowsazure.services.table.client.TableQuery.QueryComparisons;
+import com.microsoft.windowsazure.services.table.client.TableServiceEntity;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.external.library.java.JObjects.ByteArrayAccessibleInputStream;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.util.ResettableByteArrayOutputStream;
+import edu.uci.ics.asterix.runtime.operators.file.ADMDataParser;
+
+public class PullBasedAzureFeedClient implements IPullBasedFeedClient {
+ private static final Logger LOGGER = Logger.getLogger(PullBasedAzureFeedClient.class.getName());
+
+ private final String tableName;
+ private final ARecordType outputType;
+ private final CloudTableClient ctc;
+ private final TableQuery<? extends TableServiceEntity> tableQuery;
+ private Iterator<? extends TableServiceEntity> entityIt;
+
+ private final Pattern arrayPattern = Pattern.compile("\\[(?<vals>.*)\\]");
+ private final Pattern int32Pattern = Pattern.compile(":(?<int>\\d+)(,|})");
+ private final Pattern doubleWithEndingZeroPattern = Pattern.compile("\\d+\\.(?<zero>0)(,|})");
+
+ private final ResettableByteArrayOutputStream rbaos;
+ private final DataOutputStream dos;
+ private final ADMDataParser adp;
+ private final ByteArrayAccessibleInputStream baais;
+
+ public PullBasedAzureFeedClient(CloudStorageAccount csa, ARecordType outputType, String tableName, String lowKey,
+ String highKey) throws AsterixException {
+ this.tableName = tableName;
+ this.outputType = outputType;
+ this.tableQuery = configureTableQuery(tableName, lowKey, highKey);
+ this.ctc = csa.createCloudTableClient();
+ rbaos = new ResettableByteArrayOutputStream();
+ dos = new DataOutputStream(rbaos);
+ baais = new ByteArrayAccessibleInputStream(rbaos.getByteArray(), 0, 0);
+ adp = new ADMDataParser();
+ adp.initialize(baais, outputType, false);
+ }
+
+ private TableQuery<? extends TableServiceEntity> configureTableQuery(String tableName, String lowKey, String highKey) {
+ TableQuery<? extends TableServiceEntity> baseTQ = TableQuery.from(tableName, classFromString(tableName));
+ if (lowKey != null && highKey != null) {
+ String lowKeyPredicate = TableQuery.generateFilterCondition(TableConstants.PARTITION_KEY,
+ QueryComparisons.GREATER_THAN_OR_EQUAL, lowKey);
+ String highKeyPredicate = TableQuery.generateFilterCondition(TableConstants.PARTITION_KEY,
+ QueryComparisons.LESS_THAN_OR_EQUAL, highKey);
+ String partitionPredicate = TableQuery.combineFilters(lowKeyPredicate, Operators.AND, highKeyPredicate);
+ return baseTQ.where(partitionPredicate);
+ }
+
+ return baseTQ;
+ }
+
+ private Class<? extends TableServiceEntity> classFromString(String tableName) {
+ return tableName.equals("Postings") ? AzureTweetEntity.class : AzureTweetMetadataEntity.class;
+ }
+
+ @Override
+ public InflowState nextTuple(DataOutput dataOutput, int timeout) throws AsterixException {
+ if (entityIt == null) {
+ entityIt = ctc.execute(tableQuery).iterator();
+ }
+
+ boolean moreTweets = entityIt.hasNext();
+ if (moreTweets) {
+ String json = null;
+ try {
+ json = getJSONString();
+ byte[] jsonBytes = json.getBytes(StandardCharsets.UTF_8);
+ rbaos.reset();
+ dos.write(jsonBytes, 0, jsonBytes.length);
+ dos.flush();
+ baais.setContent(rbaos.getByteArray(), 0, jsonBytes.length);
+ adp.initialize(baais, outputType, false);
+ adp.parse(dataOutput);
+ } catch (Exception e) {
+ if (json != null) {
+ if (LOGGER.isLoggable(Level.SEVERE)) {
+ LOGGER.severe("Record in error: " + json);
+ }
+ }
+ e.printStackTrace();
+ throw new AsterixException(e);
+ }
+ }
+ return moreTweets ? InflowState.DATA_AVAILABLE : InflowState.NO_MORE_DATA;
+ }
+
+ private String getJSONString() throws JSONException {
+ if (tableName.equals("Postings")) {
+ AzureTweetEntity tweet = (AzureTweetEntity) entityIt.next();
+ JSONObject tjo = new JSONObject(tweet.getJSON().toString());
+ tjo.put("posting_id", tweet.getRowKey());
+ tjo.put("user_id", tweet.getPartitionKey());
+ tjo.remove("id");
+ JSONObject utjo = tjo.getJSONObject("user");
+ utjo.remove("id");
+ tjo.put("user", utjo);
+ return tjo.toString();
+ } else if (tableName.equals("PostingMetadata")) {
+ AzureTweetMetadataEntity tweetMD = (AzureTweetMetadataEntity) entityIt.next();
+ JSONObject tmdjo = new JSONObject();
+ tmdjo.put("posting_id", tweetMD.getRowKey());
+ tmdjo.put("user_id", tweetMD.getPartitionKey());
+ tmdjo.put("created_at", stripTillColon(tweetMD.getCreationTimestamp()).replaceAll("\"", ""));
+ tmdjo.put("posting_type", stripTillColon(tweetMD.getPostingType()));
+ List<Integer> productIdList = Arrays.asList(extractArray(tweetMD.getProductId()));
+ tmdjo.put("product_id", productIdList);
+ if (tweetMD.getEthnicity() != null) {
+ tmdjo.put("ethnicity", new JSONObject(stripTillColon(tweetMD.getEthnicity())));
+ }
+ if (tweetMD.getGender() != null) {
+ tmdjo.put("gender", new JSONObject(stripTillColon(tweetMD.getGender())));
+ }
+ if (tweetMD.getLocation() != null) {
+ String locStr = stripTillColon(tweetMD.getLocation());
+ Matcher m = int32Pattern.matcher(locStr);
+ while (m.find()) {
+ locStr = locStr.replace(m.group("int"), m.group("int") + ".01");
+ }
+ m = doubleWithEndingZeroPattern.matcher(locStr);
+ while (m.find()) {
+ locStr = locStr.replace(m.group("zero"), "01");
+ }
+ tmdjo.put("location", new JSONObject(locStr));
+ }
+ if (tweetMD.getSentiment() != null) {
+ tmdjo.put("sentiment", stripTillColon(tweetMD.getSentiment()));
+ }
+ return tmdjo.toString();
+ } else {
+ throw new IllegalArgumentException();
+ }
+ }
+
+ private String stripTillColon(String str) {
+ return str.substring(str.indexOf(':') + 1);
+ }
+
+ private Integer[] extractArray(String str) {
+ Matcher m = arrayPattern.matcher(str);
+ m.find();
+ String[] stringNums = m.group("vals").replaceAll("\\s", "").split(",");
+ Integer[] nums = new Integer[stringNums.length];
+ for (int i = 0; i < nums.length; ++i) {
+ nums[i] = Integer.parseInt(stringNums[i]);
+ }
+ return nums;
+ }
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAzureTwitterAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAzureTwitterAdapter.java
new file mode 100644
index 0000000..c739ca3
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAzureTwitterAdapter.java
@@ -0,0 +1,80 @@
+package edu.uci.ics.asterix.external.dataset.adapter;
+
+import java.net.URISyntaxException;
+import java.security.InvalidKeyException;
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import com.microsoft.windowsazure.services.core.storage.CloudStorageAccount;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.metadata.feeds.IDatasourceAdapter;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+
+public class PullBasedAzureTwitterAdapter extends PullBasedAdapter implements IDatasourceAdapter {
+ private static final Logger LOGGER = Logger.getLogger(PullBasedAzureTwitterAdapter.class.getName());
+
+ private static final long serialVersionUID = 1L;
+
+ private final CloudStorageAccount csa;
+ private final String connectionString;
+ private final String azureAccountName;
+ private final String azureAccountKey;
+ private final ARecordType outputType;
+ private final String tableName;
+ private final boolean partitioned;
+
+ private String[] lowKeys;
+ private String[] highKeys;
+
+ public PullBasedAzureTwitterAdapter(String accountName, String accountKey, String tableName, String[] partitions,
+ Map<String, String> configuration, IHyracksTaskContext ctx, ARecordType outputType) throws AsterixException {
+ super(configuration, ctx);
+ this.outputType = outputType;
+ if (partitions != null) {
+ partitioned = true;
+ configurePartitions(partitions);
+ } else {
+ partitioned = false;
+ }
+ this.azureAccountName = accountName;
+ this.azureAccountKey = accountKey;
+ this.tableName = tableName;
+
+ connectionString = "DefaultEndpointsProtocol=http;" + "AccountName=" + azureAccountName + ";AccountKey="
+ + azureAccountKey + ";";
+ try {
+ csa = CloudStorageAccount.parse(connectionString);
+ } catch (InvalidKeyException | URISyntaxException e) {
+ throw new AsterixException("You must specify a valid Azure account name and key", e);
+ }
+ }
+
+ private void configurePartitions(String[] partitions) {
+ lowKeys = new String[partitions.length];
+ highKeys = new String[partitions.length];
+ for (int i = 0; i < partitions.length; ++i) {
+ String[] loHi = partitions[i].split(":");
+ lowKeys[i] = loHi[0];
+ highKeys[i] = loHi[1];
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Partition " + i + " configured for keys " + lowKeys[i] + " to " + highKeys[i]);
+ }
+ }
+ }
+
+ @Override
+ public IPullBasedFeedClient getFeedClient(int partition) throws Exception {
+ if (partitioned) {
+ return new PullBasedAzureFeedClient(csa, outputType, tableName, lowKeys[partition], highKeys[partition]);
+ }
+ return new PullBasedAzureFeedClient(csa, outputType, tableName, null, null);
+ }
+
+ @Override
+ public DataExchangeMode getDataExchangeMode() {
+ return DataExchangeMode.PULL;
+ }
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedFeedClient.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedFeedClient.java
index 6d05bde..e728787 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedFeedClient.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedFeedClient.java
@@ -16,47 +16,151 @@
import java.io.DataOutput;
import java.io.IOException;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import edu.uci.ics.asterix.builders.IARecordBuilder;
+import edu.uci.ics.asterix.builders.RecordBuilder;
+import edu.uci.ics.asterix.builders.UnorderedListBuilder;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ARecordSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ABoolean;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.AMutableDateTime;
+import edu.uci.ics.asterix.om.base.AMutableInt32;
+import edu.uci.ics.asterix.om.base.AMutablePoint;
import edu.uci.ics.asterix.om.base.AMutableRecord;
+import edu.uci.ics.asterix.om.base.AMutableString;
+import edu.uci.ics.asterix.om.base.AMutableUnorderedList;
+import edu.uci.ics.asterix.om.base.AString;
+import edu.uci.ics.asterix.om.base.IACursor;
+import edu.uci.ics.asterix.om.base.IAObject;
+import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.AUnorderedListType;
+import edu.uci.ics.asterix.om.types.BuiltinType;
import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
public abstract class PullBasedFeedClient implements IPullBasedFeedClient {
+ protected static final Logger LOGGER = Logger.getLogger(PullBasedFeedClient.class.getName());
+
protected ARecordSerializerDeserializer recordSerDe;
protected AMutableRecord mutableRecord;
protected boolean messageReceived;
- protected boolean continueIngestion=true;
+ protected boolean continueIngestion = true;
+ protected IARecordBuilder recordBuilder = new RecordBuilder();
- public abstract boolean setNextRecord() throws Exception;
+ protected AMutableString aString = new AMutableString("");
+ protected AMutableInt32 aInt32 = new AMutableInt32(0);
+ protected AMutablePoint aPoint = new AMutablePoint(0, 0);
+ protected AMutableDateTime aDateTime = new AMutableDateTime(0);
+
+ @SuppressWarnings("unchecked")
+ protected ISerializerDeserializer<AString> stringSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ASTRING);
+ @SuppressWarnings("unchecked")
+ protected ISerializerDeserializer<ABoolean> booleanSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ABOOLEAN);
+ @SuppressWarnings("unchecked")
+ protected ISerializerDeserializer<AInt32> int32Serde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AINT32);
+
+ public abstract InflowState setNextRecord() throws Exception;
@Override
- public boolean nextTuple(DataOutput dataOutput) throws AsterixException {
+ public InflowState nextTuple(DataOutput dataOutput, int timeout) throws AsterixException {
try {
- boolean newData = setNextRecord();
- if (newData && continueIngestion) {
- IAType t = mutableRecord.getType();
- ATypeTag tag = t.getTypeTag();
- try {
- dataOutput.writeByte(tag.serialize());
- } catch (IOException e) {
- throw new HyracksDataException(e);
+ InflowState state = null;
+ int waitCount = 0;
+ boolean continueWait = true;
+ while ((state == null || state.equals(InflowState.DATA_NOT_AVAILABLE)) && continueWait) {
+ state = setNextRecord();
+ switch (state) {
+ case DATA_AVAILABLE:
+ IAType t = mutableRecord.getType();
+ ATypeTag tag = t.getTypeTag();
+ dataOutput.writeByte(tag.serialize());
+ recordBuilder.reset(mutableRecord.getType());
+ recordBuilder.init();
+ writeRecord(mutableRecord, dataOutput, recordBuilder);
+ break;
+ case DATA_NOT_AVAILABLE:
+ if (waitCount > timeout) {
+ continueWait = false;
+ } else {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Waiting to obtaing data from pull based adaptor");
+ }
+ Thread.sleep(1000);
+ waitCount++;
+ }
+ break;
+ case NO_MORE_DATA:
+ break;
}
- recordSerDe.serialize(mutableRecord, dataOutput);
- return true;
}
- return false;
+ return state;
} catch (Exception e) {
throw new AsterixException(e);
}
}
- @Override
- public void stop() {
- continueIngestion = false;
+ private void writeRecord(AMutableRecord record, DataOutput dataOutput, IARecordBuilder recordBuilder)
+ throws IOException, AsterixException {
+ ArrayBackedValueStorage fieldValue = new ArrayBackedValueStorage();
+ int numFields = record.getType().getFieldNames().length;
+ for (int pos = 0; pos < numFields; pos++) {
+ fieldValue.reset();
+ IAObject obj = record.getValueByPos(pos);
+ writeObject(obj, fieldValue.getDataOutput());
+ recordBuilder.addField(pos, fieldValue);
+ }
+ recordBuilder.write(dataOutput, false);
+ }
+
+ private void writeObject(IAObject obj, DataOutput dataOutput) throws IOException, AsterixException {
+ switch (obj.getType().getTypeTag()) {
+ case RECORD:
+ ATypeTag tag = obj.getType().getTypeTag();
+ try {
+ dataOutput.writeByte(tag.serialize());
+ } catch (IOException e) {
+ throw new HyracksDataException(e);
+ }
+ IARecordBuilder recordBuilder = new RecordBuilder();
+ recordBuilder.reset((ARecordType) obj.getType());
+ recordBuilder.init();
+ writeRecord((AMutableRecord) obj, dataOutput, recordBuilder);
+ break;
+ case UNORDEREDLIST:
+ tag = obj.getType().getTypeTag();
+ try {
+ dataOutput.writeByte(tag.serialize());
+ } catch (IOException e) {
+ throw new HyracksDataException(e);
+ }
+ UnorderedListBuilder listBuilder = new UnorderedListBuilder();
+ listBuilder.reset((AUnorderedListType) ((AMutableUnorderedList) obj).getType());
+ IACursor cursor = ((AMutableUnorderedList) obj).getCursor();
+ ArrayBackedValueStorage listItemValue = new ArrayBackedValueStorage();
+ while (cursor.next()) {
+ listItemValue.reset();
+ IAObject item = cursor.get();
+ writeObject(item, listItemValue.getDataOutput());
+ listBuilder.addItem(listItemValue);
+ }
+ listBuilder.write(dataOutput, false);
+ break;
+ default:
+ AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(obj.getType()).serialize(obj,
+ dataOutput);
+ break;
+ }
}
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterAdapter.java
index 2715a00..838cfeb 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterAdapter.java
@@ -14,32 +14,25 @@
*/
package edu.uci.ics.asterix.external.dataset.adapter;
-import java.util.HashMap;
import java.util.Map;
-import edu.uci.ics.asterix.feed.managed.adapter.IManagedFeedAdapter;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.metadata.feeds.IFeedAdapter;
import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.BuiltinType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksCountPartitionConstraint;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
/**
* An adapter that provides the functionality of receiving tweets from the
* Twitter service in the form of ADM formatted records.
*/
-public class PullBasedTwitterAdapter extends PullBasedAdapter implements IManagedFeedAdapter {
+public class PullBasedTwitterAdapter extends PullBasedAdapter implements IFeedAdapter {
private static final long serialVersionUID = 1L;
public static final String QUERY = "query";
public static final String INTERVAL = "interval";
- private boolean alterRequested = false;
- private Map<String, String> alteredParams = new HashMap<String, String>();
private ARecordType recordType;
-
private PullBasedTwitterFeedClient tweetClient;
@Override
@@ -47,61 +40,18 @@
return tweetClient;
}
- @Override
- public void configure(Map<String, Object> arguments) throws Exception {
- configuration = arguments;
- String[] fieldNames = { "id", "username", "location", "text", "timestamp" };
- IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING,
- BuiltinType.ASTRING };
- recordType = new ARecordType("FeedRecordType", fieldNames, fieldTypes, false);
+ public PullBasedTwitterAdapter(Map<String, String> configuration, ARecordType recordType, IHyracksTaskContext ctx) throws AsterixException {
+ super(configuration, ctx);
+ tweetClient = new PullBasedTwitterFeedClient(ctx, recordType, this);
}
- @Override
- public void initialize(IHyracksTaskContext ctx) throws Exception {
- this.ctx = ctx;
- tweetClient = new PullBasedTwitterFeedClient(ctx, this);
- }
-
- @Override
- public AdapterType getAdapterType() {
- return AdapterType.READ;
- }
-
- @Override
- public void stop() {
- tweetClient.stop();
- }
-
- @Override
- public void alter(Map<String, String> properties) {
- alterRequested = true;
- this.alteredParams = properties;
- }
-
- public boolean isAlterRequested() {
- return alterRequested;
- }
-
- public Map<String, String> getAlteredParams() {
- return alteredParams;
- }
-
- public void postAlteration() {
- alteredParams = null;
- alterRequested = false;
- }
-
- @Override
public ARecordType getAdapterOutputType() {
return recordType;
}
@Override
- public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
- if (partitionConstraint == null) {
- partitionConstraint = new AlgebricksCountPartitionConstraint(1);
- }
- return partitionConstraint;
+ public DataExchangeMode getDataExchangeMode() {
+ return DataExchangeMode.PULL;
}
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterFeedClient.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterFeedClient.java
index 9f44a03..2c8d659 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterFeedClient.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterFeedClient.java
@@ -14,9 +14,9 @@
*/
package edu.uci.ics.asterix.external.dataset.adapter;
-import java.util.LinkedList;
+import java.util.List;
import java.util.Map;
-import java.util.Queue;
+import java.util.UUID;
import twitter4j.Query;
import twitter4j.QueryResult;
@@ -24,7 +24,6 @@
import twitter4j.Twitter;
import twitter4j.TwitterException;
import twitter4j.TwitterFactory;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ARecordSerializerDeserializer;
import edu.uci.ics.asterix.om.base.AMutableRecord;
import edu.uci.ics.asterix.om.base.AMutableString;
@@ -41,42 +40,24 @@
private String keywords;
private Query query;
- private long id = 0;
- private String id_prefix;
private Twitter twitter;
private int requestInterval = 10; // seconds
- private Queue<Tweet> tweetBuffer = new LinkedList<Tweet>();
+ private QueryResult result;
- IAObject[] mutableFields;
- String[] tupleFieldValues;
+ private IAObject[] mutableFields;
+ private String[] tupleFieldValues;
private ARecordType recordType;
+ private int nextTweetIndex = 0;
- public PullBasedTwitterFeedClient(IHyracksTaskContext ctx, PullBasedTwitterAdapter adapter) {
- this.id_prefix = ctx.getJobletContext().getApplicationContext().getNodeId();
+ public PullBasedTwitterFeedClient(IHyracksTaskContext ctx, ARecordType recordType, PullBasedTwitterAdapter adapter) {
twitter = new TwitterFactory().getInstance();
mutableFields = new IAObject[] { new AMutableString(null), new AMutableString(null), new AMutableString(null),
new AMutableString(null), new AMutableString(null) };
- recordType = adapter.getAdapterOutputType();
+ this.recordType = recordType;
recordSerDe = new ARecordSerializerDeserializer(recordType);
mutableRecord = new AMutableRecord(recordType, mutableFields);
- initialize(adapter.getConfiguration());
tupleFieldValues = new String[recordType.getFieldNames().length];
- }
-
- public void initialize(Map<String, Object> params) {
- this.keywords = (String) params.get(PullBasedTwitterAdapter.QUERY);
- this.query = new Query(keywords);
- query.setRpp(100);
- }
-
- private Tweet getNextTweet() throws TwitterException, InterruptedException {
- if (tweetBuffer.isEmpty()) {
- QueryResult result;
- Thread.sleep(1000 * requestInterval);
- result = twitter.search(query);
- tweetBuffer.addAll(result.getTweets());
- }
- return tweetBuffer.remove();
+ initialize(adapter.getConfiguration());
}
public ARecordType getRecordType() {
@@ -88,15 +69,14 @@
}
@Override
- public boolean setNextRecord() throws Exception {
+ public InflowState setNextRecord() throws Exception {
Tweet tweet;
tweet = getNextTweet();
if (tweet == null) {
- return false;
+ return InflowState.DATA_NOT_AVAILABLE;
}
int numFields = recordType.getFieldNames().length;
-
- tupleFieldValues[0] = id_prefix + ":" + id;
+ tupleFieldValues[0] = UUID.randomUUID().toString();
tupleFieldValues[1] = tweet.getFromUser();
tupleFieldValues[2] = tweet.getLocation() == null ? "" : tweet.getLocation();
tupleFieldValues[3] = tweet.getText();
@@ -105,13 +85,24 @@
((AMutableString) mutableFields[i]).setValue(tupleFieldValues[i]);
mutableRecord.setValueAtPos(i, mutableFields[i]);
}
- id++;
- return true;
+ return InflowState.DATA_AVAILABLE;
}
- @Override
- public void resetOnFailure(Exception e) throws AsterixException {
- // TOOO: implement resetting logic for Twitter
+ private void initialize(Map<String, String> params) {
+ this.keywords = (String) params.get(PullBasedTwitterAdapter.QUERY);
+ this.requestInterval = Integer.parseInt((String) params.get(PullBasedTwitterAdapter.INTERVAL));
+ this.query = new Query(keywords);
+ query.setRpp(100);
+ }
+
+ private Tweet getNextTweet() throws TwitterException, InterruptedException {
+ if (result == null || nextTweetIndex >= result.getTweets().size()) {
+ Thread.sleep(1000 * requestInterval);
+ result = twitter.search(query);
+ nextTweetIndex = 0;
+ }
+ List<Tweet> tw = result.getTweets();
+ return tw.get(nextTweetIndex++);
}
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedAdapter.java
index 8379b18..4eea034 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedAdapter.java
@@ -15,74 +15,35 @@
package edu.uci.ics.asterix.external.dataset.adapter;
import java.util.ArrayList;
-import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import edu.uci.ics.asterix.feed.managed.adapter.IManagedFeedAdapter;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.metadata.feeds.IFeedAdapter;
import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.BuiltinType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksCountPartitionConstraint;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
/**
* RSSFeedAdapter provides the functionality of fetching an RSS based feed.
*/
-public class RSSFeedAdapter extends PullBasedAdapter implements IManagedFeedAdapter {
+public class RSSFeedAdapter extends PullBasedAdapter implements IFeedAdapter {
private static final long serialVersionUID = 1L;
+ private static final String KEY_RSS_URL = "rss_url";
+
private List<String> feedURLs = new ArrayList<String>();
- private boolean isStopRequested = false;
- private boolean isAlterRequested = false;
- private Map<String, String> alteredParams = new HashMap<String, String>();
private String id_prefix = "";
- private ARecordType recordType;
private IPullBasedFeedClient rssFeedClient;
- public static final String KEY_RSS_URL = "url";
- public static final String KEY_INTERVAL = "interval";
+ private ARecordType recordType;
- public boolean isStopRequested() {
- return isStopRequested;
- }
-
- public void setStopRequested(boolean isStopRequested) {
- this.isStopRequested = isStopRequested;
- }
-
- @Override
- public void alter(Map<String, String> properties) {
- isAlterRequested = true;
- this.alteredParams = properties;
- reconfigure(properties);
- }
-
- @Override
- public void stop() {
- isStopRequested = true;
- }
-
- @Override
- public AdapterType getAdapterType() {
- return AdapterType.READ;
- }
-
- @Override
- public void configure(Map<String, Object> arguments) throws Exception {
- configuration = arguments;
- String rssURLProperty = (String) configuration.get(KEY_RSS_URL);
- if (rssURLProperty == null) {
- throw new IllegalArgumentException("no rss url provided");
- }
- initializeFeedURLs(rssURLProperty);
- configurePartitionConstraints();
- recordType = new ARecordType("FeedRecordType", new String[] { "id", "title", "description", "link" },
- new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING },
- false);
+ public RSSFeedAdapter(Map<String, String> configuration, ARecordType recordType, IHyracksTaskContext ctx)
+ throws AsterixException {
+ super(configuration, ctx);
+ id_prefix = ctx.getJobletContext().getApplicationContext().getNodeId();
+ this.recordType = recordType;
}
private void initializeFeedURLs(String rssURLProperty) {
@@ -94,30 +55,12 @@
}
protected void reconfigure(Map<String, String> arguments) {
- String rssURLProperty = (String) configuration.get(KEY_RSS_URL);
+ String rssURLProperty = configuration.get(KEY_RSS_URL);
if (rssURLProperty != null) {
initializeFeedURLs(rssURLProperty);
}
}
- protected void configurePartitionConstraints() {
- partitionConstraint = new AlgebricksCountPartitionConstraint(feedURLs.size());
- }
-
- @Override
- public void initialize(IHyracksTaskContext ctx) throws Exception {
- this.ctx = ctx;
- id_prefix = ctx.getJobletContext().getApplicationContext().getNodeId();
- }
-
- public boolean isAlterRequested() {
- return isAlterRequested;
- }
-
- public Map<String, String> getAlteredParams() {
- return alteredParams;
- }
-
@Override
public IPullBasedFeedClient getFeedClient(int partition) throws Exception {
if (rssFeedClient == null) {
@@ -126,17 +69,13 @@
return rssFeedClient;
}
- @Override
- public ARecordType getAdapterOutputType() {
+ public ARecordType getRecordType() {
return recordType;
}
@Override
- public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
- if (partitionConstraint == null) {
- configurePartitionConstraints();
- }
- return partitionConstraint;
+ public DataExchangeMode getDataExchangeMode() {
+ return DataExchangeMode.PULL;
}
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedClient.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedClient.java
index d89674f..41ed923 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedClient.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedClient.java
@@ -41,7 +41,6 @@
@SuppressWarnings("rawtypes")
public class RSSFeedClient extends PullBasedFeedClient {
- private final String feedURL;
private long id = 0;
private String idPrefix;
private boolean feedModified = false;
@@ -66,25 +65,24 @@
}
public RSSFeedClient(RSSFeedAdapter adapter, String feedURL, String id_prefix) throws MalformedURLException {
- this.feedURL = feedURL;
this.idPrefix = id_prefix;
- feedUrl = new URL(feedURL);
+ this.feedUrl = new URL(feedURL);
feedInfoCache = HashMapFeedInfoCache.getInstance();
fetcher = new HttpURLFeedFetcher(feedInfoCache);
listener = new FetcherEventListenerImpl(this);
fetcher.addFetcherEventListener(listener);
mutableFields = new IAObject[] { new AMutableString(null), new AMutableString(null), new AMutableString(null),
new AMutableString(null) };
- recordType = adapter.getAdapterOutputType();
+ recordType = adapter.getRecordType();
mutableRecord = new AMutableRecord(recordType, mutableFields);
tupleFieldValues = new String[recordType.getFieldNames().length];
}
@Override
- public boolean setNextRecord() throws Exception {
+ public InflowState setNextRecord() throws Exception {
SyndEntryImpl feedEntry = getNextRSSFeed();
if (feedEntry == null) {
- return false;
+ return InflowState.DATA_NOT_AVAILABLE;
}
tupleFieldValues[0] = idPrefix + ":" + id;
tupleFieldValues[1] = feedEntry.getTitle();
@@ -96,7 +94,7 @@
mutableRecord.setValueAtPos(i, mutableFields[i]);
}
id++;
- return true;
+ return InflowState.DATA_AVAILABLE;
}
private SyndEntryImpl getNextRSSFeed() throws Exception {
@@ -113,7 +111,6 @@
@SuppressWarnings("unchecked")
private void fetchFeed() {
try {
- System.err.println("Retrieving feed " + feedURL);
// Retrieve the feed.
// We will get a Feed Polled Event and then a
// Feed Retrieved event (assuming the feed is valid)
@@ -132,12 +129,6 @@
}
}
- @Override
- public void resetOnFailure(Exception e) {
- // TODO Auto-generated method stub
-
- }
-
}
class FetcherEventListenerImpl implements FetcherListener {
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/StreamBasedAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/StreamBasedAdapter.java
new file mode 100644
index 0000000..f09a841
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/StreamBasedAdapter.java
@@ -0,0 +1,46 @@
+package edu.uci.ics.asterix.external.dataset.adapter;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.metadata.feeds.IDatasourceAdapter;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.std.file.ITupleParser;
+import edu.uci.ics.hyracks.dataflow.std.file.ITupleParserFactory;
+
+public abstract class StreamBasedAdapter implements IDatasourceAdapter {
+
+ private static final long serialVersionUID = 1L;
+
+ protected static final Logger LOGGER = Logger.getLogger(StreamBasedAdapter.class.getName());
+
+ public abstract InputStream getInputStream(int partition) throws IOException;
+
+ protected final ITupleParser tupleParser;
+
+ protected final IAType sourceDatatype;
+
+ public StreamBasedAdapter(ITupleParserFactory parserFactory, IAType sourceDatatype, IHyracksTaskContext ctx)
+ throws HyracksDataException {
+ this.tupleParser = parserFactory.createTupleParser(ctx);
+ this.sourceDatatype = sourceDatatype;
+ }
+
+ @Override
+ public void start(int partition, IFrameWriter writer) throws Exception {
+ InputStream in = getInputStream(partition);
+ if (in != null) {
+ tupleParser.parse(in, writer);
+ } else {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Could not obtain input stream for parsing from adaptor " + this + "[" + partition + "]");
+ }
+ }
+ }
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/AlterFeedMessage.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/AlterFeedMessage.java
deleted file mode 100644
index c36dc03..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/AlterFeedMessage.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.external.feed.lifecycle;
-
-import java.util.Map;
-
-/**
- * A feed control message containing the altered values for
- * adapter configuration parameters. This message is dispatched
- * to all runtime instances of the feed's adapter.
- */
-public class AlterFeedMessage extends FeedMessage {
-
- private static final long serialVersionUID = 1L;
-
- private final Map<String, String> alteredConfParams;
-
- public AlterFeedMessage(Map<String, String> alteredConfParams) {
- super(MessageType.ALTER);
- this.alteredConfParams = alteredConfParams;
- }
-
- @Override
- public MessageType getMessageType() {
- return MessageType.ALTER;
- }
-
- public Map<String, String> getAlteredConfParams() {
- return alteredConfParams;
- }
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedId.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedId.java
deleted file mode 100644
index 94e679d..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedId.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.external.feed.lifecycle;
-
-import java.io.Serializable;
-
-/**
- * A unique identifier for a feed (dataset).
- */
-public class FeedId implements Serializable {
-
- private static final long serialVersionUID = 1L;
-
- private final String dataverse;
- private final String dataset;
- private final int hashcode;
-
- public FeedId(String dataverse, String dataset) {
- this.dataset = dataset;
- this.dataverse = dataverse;
- this.hashcode = (dataverse + "." + dataset).hashCode();
- }
-
- public String getDataverse() {
- return dataverse;
- }
-
- public String getDataset() {
- return dataset;
- }
-
- @Override
- public boolean equals(Object o) {
- if (o == null || !(o instanceof FeedId)) {
- return false;
- }
- if (((FeedId) o).getDataset().equals(dataset) && ((FeedId) o).getDataverse().equals(dataverse)) {
- return true;
- }
- return false;
- }
-
- @Override
- public int hashCode() {
- return hashcode;
- }
-
- @Override
- public String toString() {
- return dataverse + "." + dataset;
- }
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedManager.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedManager.java
deleted file mode 100644
index 5b3ed35..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedManager.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.external.feed.lifecycle;
-
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.LinkedBlockingQueue;
-
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-
-/**
- * Handle (de-)registration of feeds for delivery of control messages.
- */
-public class FeedManager implements IFeedManager {
-
- public static FeedManager INSTANCE = new FeedManager();
-
- private FeedManager() {
-
- }
-
- private Map<FeedId, Set<LinkedBlockingQueue<IFeedMessage>>> outGoingMsgQueueMap = new HashMap<FeedId, Set<LinkedBlockingQueue<IFeedMessage>>>();
-
- @Override
- public void deliverMessage(FeedId feedId, IFeedMessage feedMessage) throws AsterixException {
- Set<LinkedBlockingQueue<IFeedMessage>> operatorQueues = outGoingMsgQueueMap.get(feedId);
- try {
- if (operatorQueues != null) {
- for (LinkedBlockingQueue<IFeedMessage> queue : operatorQueues) {
- queue.put(feedMessage);
- }
- }
- } catch (Exception e) {
- throw new AsterixException(e);
- }
- }
-
- @Override
- public void registerFeedMsgQueue(FeedId feedId, LinkedBlockingQueue<IFeedMessage> queue) {
- Set<LinkedBlockingQueue<IFeedMessage>> feedQueues = outGoingMsgQueueMap.get(feedId);
- if (feedQueues == null) {
- feedQueues = new HashSet<LinkedBlockingQueue<IFeedMessage>>();
- }
- feedQueues.add(queue);
- outGoingMsgQueueMap.put(feedId, feedQueues);
- }
-
- @Override
- public void unregisterFeedMsgQueue(FeedId feedId, LinkedBlockingQueue<IFeedMessage> queue) {
- Set<LinkedBlockingQueue<IFeedMessage>> feedQueues = outGoingMsgQueueMap.get(feedId);
- if (feedQueues == null || !feedQueues.contains(queue)) {
- throw new IllegalArgumentException(" Unable to de-register feed message queue. Unknown feedId " + feedId);
- }
- feedQueues.remove(queue);
- }
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedMessage.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedMessage.java
deleted file mode 100644
index 96262fe..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedMessage.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.external.feed.lifecycle;
-
-/**
- * A control message that can be sent to the runtime instance of a
- * feed's adapter.
- */
-public class FeedMessage implements IFeedMessage {
-
- private static final long serialVersionUID = 1L;
-
- protected MessageType messageType;
-
- public FeedMessage(MessageType messageType) {
- this.messageType = messageType;
- }
-
- public MessageType getMessageType() {
- return messageType;
- }
-
- public void setMessageType(MessageType messageType) {
- this.messageType = messageType;
- }
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/IFeedManager.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/IFeedManager.java
deleted file mode 100644
index 2febbe4..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/IFeedManager.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.external.feed.lifecycle;
-
-import java.util.concurrent.LinkedBlockingQueue;
-
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-
-/**
- * Handle (de-)registration of feeds for delivery of control messages.
- */
-public interface IFeedManager {
-
- /**
- * Register an input message queue for a feed specified by feedId.
- * All messages sent to a feed are directed to the registered queue(s).
- *
- * @param feedId
- * an identifier for the feed dataset.
- * @param queue
- * an input message queue for receiving control messages.
- */
- public void registerFeedMsgQueue(FeedId feedId, LinkedBlockingQueue<IFeedMessage> queue);
-
- /**
- * Unregister an input message queue for a feed specified by feedId.
- * A feed prior to finishing should unregister all previously registered queue(s)
- * as it is no longer active and thus need not process any control messages.
- *
- * @param feedId
- * an identifier for the feed dataset.
- * @param queue
- * an input message queue for receiving control messages.
- */
- public void unregisterFeedMsgQueue(FeedId feedId, LinkedBlockingQueue<IFeedMessage> queue);
-
- /**
- * Deliver a message to a feed with a given feedId.
- *
- * @param feedId
- * identifier for the feed dataset.
- * @param feedMessage
- * control message that needs to be delivered.
- * @throws Exception
- */
- public void deliverMessage(FeedId feedId, IFeedMessage feedMessage) throws AsterixException;
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/IFeedMessage.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/IFeedMessage.java
deleted file mode 100644
index dcef2c8..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/IFeedMessage.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.external.feed.lifecycle;
-
-import java.io.Serializable;
-
-public interface IFeedMessage extends Serializable {
-
- public enum MessageType {
- STOP,
- ALTER,
- }
-
- public MessageType getMessageType();
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/ExternalFunction.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/ExternalFunction.java
new file mode 100755
index 0000000..5f0badd
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/ExternalFunction.java
@@ -0,0 +1,94 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.library;
+
+import java.io.IOException;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.metadata.functions.ExternalLibraryManager;
+import edu.uci.ics.asterix.om.functions.IExternalFunctionInfo;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public abstract class ExternalFunction implements IExternalFunction {
+
+ protected final IExternalFunctionInfo finfo;
+ protected final IFunctionFactory externalFunctionFactory;
+ protected final IExternalFunction externalFunction;
+ protected final ICopyEvaluatorFactory[] evaluatorFactories;
+ protected final IDataOutputProvider out;
+ protected final ArrayBackedValueStorage inputVal = new ArrayBackedValueStorage();
+ protected final ICopyEvaluator[] argumentEvaluators;
+ protected final JavaFunctionHelper functionHelper;
+
+ public ExternalFunction(IExternalFunctionInfo finfo, ICopyEvaluatorFactory args[],
+ IDataOutputProvider outputProvider) throws AlgebricksException {
+ this.finfo = finfo;
+ this.evaluatorFactories = args;
+ this.out = outputProvider;
+ argumentEvaluators = new ICopyEvaluator[args.length];
+ for (int i = 0; i < args.length; i++) {
+ argumentEvaluators[i] = args[i].createEvaluator(inputVal);
+ }
+ functionHelper = new JavaFunctionHelper(finfo, outputProvider);
+
+ String[] fnameComponents = finfo.getFunctionIdentifier().getName().split("#");
+ String functionLibary = fnameComponents[0];
+ String dataverse = finfo.getFunctionIdentifier().getNamespace();
+ ClassLoader libraryClassLoader = ExternalLibraryManager.getLibraryClassLoader(dataverse, functionLibary);
+ String classname = finfo.getFunctionBody().trim();
+ Class clazz;
+ try {
+ clazz = Class.forName(classname, true, libraryClassLoader);
+ externalFunctionFactory = (IFunctionFactory) clazz.newInstance();
+ externalFunction = externalFunctionFactory.getExternalFunction();
+ } catch (Exception e) {
+ throw new AlgebricksException(" Unable to load/instantiate class " + classname, e);
+ }
+ }
+
+ public static ISerializerDeserializer getSerDe(Object typeInfo) {
+ return AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(typeInfo);
+ }
+
+ public IExternalFunctionInfo getFinfo() {
+ return finfo;
+ }
+
+ public void setArguments(IFrameTupleReference tuple) throws AlgebricksException, IOException, AsterixException {
+ for (int i = 0; i < evaluatorFactories.length; i++) {
+ inputVal.reset();
+ argumentEvaluators[i].evaluate(tuple);
+ functionHelper.setArgument(i, inputVal.getByteArray());
+ }
+ }
+
+ @Override
+ public void deinitialize() {
+ externalFunction.deinitialize();
+ }
+
+ @Override
+ public void initialize(IFunctionHelper functionHelper) throws Exception {
+ externalFunction.initialize(functionHelper);
+ }
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/ExternalFunctionDescriptorProvider.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/ExternalFunctionDescriptorProvider.java
new file mode 100755
index 0000000..72ab2f9
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/ExternalFunctionDescriptorProvider.java
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.library;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.om.functions.IExternalFunctionInfo;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+
+public class ExternalFunctionDescriptorProvider {
+
+ public static IFunctionDescriptor getExternalFunctionDescriptor(IExternalFunctionInfo finfo)
+ throws AsterixException {
+ switch (finfo.getKind()) {
+ case SCALAR:
+ return new ExternalScalarFunctionDescriptor(finfo);
+ case AGGREGATE:
+ case UNNEST:
+ throw new AsterixException("Unsupported function kind :" + finfo.getKind());
+ default:
+ break;
+ }
+ return null;
+ }
+
+}
+
+class ExternalScalarFunctionDescriptor extends AbstractScalarFunctionDynamicDescriptor implements IFunctionDescriptor {
+
+ private final IFunctionInfo finfo;
+ private ICopyEvaluatorFactory evaluatorFactory;
+ private ICopyEvaluatorFactory[] args;
+
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ evaluatorFactory = new ExternalScalarFunctionEvaluatorFactory((IExternalFunctionInfo) finfo, args);
+ return evaluatorFactory;
+ }
+
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return finfo.getFunctionIdentifier();
+ }
+
+ public ExternalScalarFunctionDescriptor(IFunctionInfo finfo) {
+ this.finfo = finfo;
+ }
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/ExternalFunctionProvider.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/ExternalFunctionProvider.java
new file mode 100755
index 0000000..fc629ea
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/ExternalFunctionProvider.java
@@ -0,0 +1,75 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.library;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import edu.uci.ics.asterix.om.functions.IExternalFunctionInfo;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class ExternalFunctionProvider {
+
+ private static Map<IExternalFunctionInfo, ExternalScalarFunction> functionRepo = new HashMap<IExternalFunctionInfo, ExternalScalarFunction>();
+
+ public static IExternalFunction getExternalFunctionEvaluator(IExternalFunctionInfo finfo,
+ ICopyEvaluatorFactory args[], IDataOutputProvider outputProvider) throws AlgebricksException {
+ switch (finfo.getKind()) {
+ case SCALAR:
+ ExternalScalarFunction function = functionRepo.get(finfo);
+ function = new ExternalScalarFunction(finfo, args, outputProvider);
+ // functionRepo.put(finfo, function);
+ return function;
+ case AGGREGATE:
+ case UNNEST:
+ throw new IllegalArgumentException(" not supported function kind" + finfo.getKind());
+ default:
+ throw new IllegalArgumentException(" unknown function kind" + finfo.getKind());
+ }
+ }
+}
+
+class ExternalScalarFunction extends ExternalFunction implements IExternalScalarFunction, ICopyEvaluator {
+
+ public ExternalScalarFunction(IExternalFunctionInfo finfo, ICopyEvaluatorFactory args[],
+ IDataOutputProvider outputProvider) throws AlgebricksException {
+ super(finfo, args, outputProvider);
+ try {
+ initialize(functionHelper);
+ } catch (Exception e) {
+ throw new AlgebricksException(e);
+ }
+ }
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ try {
+ setArguments(tuple);
+ evaluate(functionHelper);
+ } catch (Exception e) {
+ e.printStackTrace();
+ throw new AlgebricksException(e);
+ }
+ }
+
+ public void evaluate(IFunctionHelper argumentProvider) throws Exception {
+ ((IExternalScalarFunction) externalFunction).evaluate(argumentProvider);
+ }
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/ExternalScalarFunctionEvaluatorFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/ExternalScalarFunctionEvaluatorFactory.java
new file mode 100755
index 0000000..4e3f9fc
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/ExternalScalarFunctionEvaluatorFactory.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.library;
+
+import edu.uci.ics.asterix.om.functions.IExternalFunctionInfo;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+
+public class ExternalScalarFunctionEvaluatorFactory implements ICopyEvaluatorFactory {
+
+ private static final long serialVersionUID = 1L;
+ private final IExternalFunctionInfo finfo;
+ private final ICopyEvaluatorFactory[] args;
+
+ public ExternalScalarFunctionEvaluatorFactory(IExternalFunctionInfo finfo, ICopyEvaluatorFactory[] args)
+ throws AlgebricksException {
+ this.finfo = finfo;
+ this.args = args;
+ }
+
+ @Override
+ public ICopyEvaluator createEvaluator(IDataOutputProvider output) throws AlgebricksException {
+ return (ExternalScalarFunction) ExternalFunctionProvider.getExternalFunctionEvaluator(finfo, args, output);
+ }
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/IExternalFunction.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/IExternalFunction.java
new file mode 100755
index 0000000..24ba691
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/IExternalFunction.java
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.library;
+
+public interface IExternalFunction {
+
+ public void initialize(IFunctionHelper functionHelper) throws Exception;
+
+ public void deinitialize();
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/IExternalScalarFunction.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/IExternalScalarFunction.java
new file mode 100755
index 0000000..3348466
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/IExternalScalarFunction.java
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.library;
+
+public interface IExternalScalarFunction extends IExternalFunction {
+
+ public void evaluate(IFunctionHelper functionHelper) throws Exception;
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/IFunctionFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/IFunctionFactory.java
new file mode 100755
index 0000000..f2f2a52
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/IFunctionFactory.java
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.library;
+
+public interface IFunctionFactory {
+
+ public IExternalFunction getExternalFunction();
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/IFunctionHelper.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/IFunctionHelper.java
new file mode 100755
index 0000000..43eef52
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/IFunctionHelper.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.library;
+
+import java.io.IOException;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.external.library.java.IJObject;
+import edu.uci.ics.asterix.external.library.java.JTypeTag;
+
+public interface IFunctionHelper {
+
+ public IJObject getArgument(int index);
+
+ public IJObject getResultObject();
+
+ public void setResult(IJObject result) throws IOException, AsterixException;
+
+ public IJObject getObject(JTypeTag jtypeTag);
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/IResultCollector.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/IResultCollector.java
new file mode 100755
index 0000000..741e071
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/IResultCollector.java
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.library;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.om.base.AOrderedList;
+import edu.uci.ics.asterix.om.base.ARecord;
+import edu.uci.ics.asterix.om.base.IAObject;
+
+public interface IResultCollector {
+
+ public void writeIntResult(int result) throws AsterixException;
+
+ public void writeFloatResult(float result) throws AsterixException;
+
+ public void writeDoubleResult(double result) throws AsterixException;
+
+ public void writeStringResult(String result) throws AsterixException;
+
+ public void writeRecordResult(ARecord result) throws AsterixException;
+
+ public void writeListResult(AOrderedList list) throws AsterixException;
+
+ public IAObject getComplexTypeResultHolder();
+
+ public DataOutput getDataOutput();
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/JTypeObjectFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/JTypeObjectFactory.java
new file mode 100644
index 0000000..3c5ddfd
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/JTypeObjectFactory.java
@@ -0,0 +1,142 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.library;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import edu.uci.ics.asterix.external.library.java.IJObject;
+import edu.uci.ics.asterix.external.library.java.JObjects.JBoolean;
+import edu.uci.ics.asterix.external.library.java.JObjects.JCircle;
+import edu.uci.ics.asterix.external.library.java.JObjects.JDate;
+import edu.uci.ics.asterix.external.library.java.JObjects.JDateTime;
+import edu.uci.ics.asterix.external.library.java.JObjects.JDouble;
+import edu.uci.ics.asterix.external.library.java.JObjects.JDuration;
+import edu.uci.ics.asterix.external.library.java.JObjects.JFloat;
+import edu.uci.ics.asterix.external.library.java.JObjects.JInt;
+import edu.uci.ics.asterix.external.library.java.JObjects.JInterval;
+import edu.uci.ics.asterix.external.library.java.JObjects.JLine;
+import edu.uci.ics.asterix.external.library.java.JObjects.JLong;
+import edu.uci.ics.asterix.external.library.java.JObjects.JOrderedList;
+import edu.uci.ics.asterix.external.library.java.JObjects.JPoint;
+import edu.uci.ics.asterix.external.library.java.JObjects.JPoint3D;
+import edu.uci.ics.asterix.external.library.java.JObjects.JPolygon;
+import edu.uci.ics.asterix.external.library.java.JObjects.JRecord;
+import edu.uci.ics.asterix.external.library.java.JObjects.JRectangle;
+import edu.uci.ics.asterix.external.library.java.JObjects.JString;
+import edu.uci.ics.asterix.external.library.java.JObjects.JTime;
+import edu.uci.ics.asterix.external.library.java.JObjects.JUnorderedList;
+import edu.uci.ics.asterix.om.types.AOrderedListType;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.AUnionType;
+import edu.uci.ics.asterix.om.types.AUnorderedListType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.util.container.IObjectFactory;
+
+public class JTypeObjectFactory implements IObjectFactory<IJObject, IAType> {
+
+ @Override
+ public IJObject create(IAType type) {
+ IJObject retValue = null;
+ switch (type.getTypeTag()) {
+ case INT32:
+ retValue = new JInt(0);
+ break;
+ case STRING:
+ retValue = new JString("");
+ break;
+ case FLOAT:
+ retValue = new JFloat(0);
+ break;
+ case DOUBLE:
+ retValue = new JDouble(0);
+ break;
+ case BOOLEAN:
+ retValue = new JBoolean(false);
+ break;
+ case CIRCLE:
+ retValue = new JCircle(new JPoint(0, 0), 0);
+ break;
+ case POINT:
+ retValue = new JPoint(0, 0);
+ break;
+ case POINT3D:
+ retValue = new JPoint3D(0, 0, 0);
+ break;
+ case POLYGON:
+ retValue = new JPolygon(new ArrayList<JPoint>());
+ break;
+ case LINE:
+ retValue = new JLine(new JPoint(0, 0), new JPoint(0, 0));
+ break;
+ case RECTANGLE:
+ retValue = new JRectangle(new JPoint(0, 0), new JPoint(1, 1));
+ break;
+ case DATE:
+ retValue = new JDate(0);
+ break;
+ case DATETIME:
+ retValue = new JDateTime(0);
+ break;
+ case DURATION:
+ retValue = new JDuration(0, 0);
+ break;
+ case INTERVAL:
+ retValue = new JInterval(0, 0);
+ break;
+ case TIME:
+ retValue = new JTime(0);
+ break;
+ case INT64:
+ retValue = new JLong(0);
+ break;
+ case ORDEREDLIST:
+ AOrderedListType ot = (AOrderedListType) type;
+ IAType orderedItemType = ot.getItemType();
+ IJObject orderedItemObject = create(orderedItemType);
+ retValue = new JOrderedList(orderedItemObject);
+ break;
+ case UNORDEREDLIST:
+ AUnorderedListType ut = (AUnorderedListType) type;
+ IAType unorderedItemType = ut.getItemType();
+ IJObject unorderedItemObject = create(unorderedItemType);
+ retValue = new JUnorderedList(unorderedItemObject);
+ break;
+ case RECORD:
+ IAType[] fieldTypes = ((ARecordType) type).getFieldTypes();
+ IJObject[] fieldObjects = new IJObject[fieldTypes.length];
+ int index = 0;
+ for (IAType fieldType : fieldTypes) {
+ fieldObjects[index] = create(fieldType);
+ index++;
+ }
+ retValue = new JRecord((ARecordType) type, fieldObjects);
+ break;
+ case UNION:
+ AUnionType unionType = (AUnionType) type;
+ List<IAType> unionList = unionType.getUnionList();
+ IJObject itemObject = null;
+ for (IAType elementType : unionList) {
+ if (!elementType.getTypeTag().equals(ATypeTag.NULL)) {
+ itemObject = create(elementType);
+ break;
+ }
+ }
+ return retValue = itemObject;
+ }
+ return retValue;
+ }
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/JavaFunctionHelper.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/JavaFunctionHelper.java
new file mode 100644
index 0000000..192cf3e
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/JavaFunctionHelper.java
@@ -0,0 +1,200 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.library;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.List;
+
+import edu.uci.ics.asterix.builders.RecordBuilder;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.external.library.java.IJObject;
+import edu.uci.ics.asterix.external.library.java.JObjectUtil;
+import edu.uci.ics.asterix.external.library.java.JObjects.ByteArrayAccessibleDataInputStream;
+import edu.uci.ics.asterix.external.library.java.JObjects.ByteArrayAccessibleInputStream;
+import edu.uci.ics.asterix.external.library.java.JObjects.JRecord;
+import edu.uci.ics.asterix.external.library.java.JTypeTag;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ARecord;
+import edu.uci.ics.asterix.om.base.AString;
+import edu.uci.ics.asterix.om.base.IAObject;
+import edu.uci.ics.asterix.om.functions.IExternalFunctionInfo;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.util.container.IObjectPool;
+import edu.uci.ics.asterix.om.util.container.ListObjectPool;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+
+public class JavaFunctionHelper implements IFunctionHelper {
+
+ private final IExternalFunctionInfo finfo;
+ private final IDataOutputProvider outputProvider;
+ private IJObject[] arguments;
+ private IJObject resultHolder;
+ private ISerializerDeserializer resultSerde;
+ private IObjectPool<IJObject, IAType> objectPool = new ListObjectPool<IJObject, IAType>(new JTypeObjectFactory());
+ byte[] buffer = new byte[32768];
+ ByteArrayAccessibleInputStream bis = new ByteArrayAccessibleInputStream(buffer, 0, buffer.length);
+ ByteArrayAccessibleDataInputStream dis = new ByteArrayAccessibleDataInputStream(bis);
+
+ public JavaFunctionHelper(IExternalFunctionInfo finfo, IDataOutputProvider outputProvider)
+ throws AlgebricksException {
+ this.finfo = finfo;
+ this.outputProvider = outputProvider;
+ List<IAType> params = finfo.getParamList();
+ arguments = new IJObject[params.size()];
+ int index = 0;
+ for (IAType param : params) {
+ this.arguments[index] = objectPool.allocate(param);
+ index++;
+ }
+ resultHolder = objectPool.allocate(finfo.getReturnType());
+ }
+
+ @Override
+ public IJObject getArgument(int index) {
+ return arguments[index];
+ }
+
+ @Override
+ public void setResult(IJObject result) throws IOException, AsterixException {
+ IAObject obj = result.getIAObject();
+ try {
+ outputProvider.getDataOutput().writeByte(obj.getType().getTypeTag().serialize());
+ } catch (IOException e) {
+ throw new HyracksDataException(e);
+ }
+
+ if (obj.getType().getTypeTag().equals(ATypeTag.RECORD)) {
+ ARecordType recType = (ARecordType) obj.getType();
+ if (recType.isOpen()) {
+ writeOpenRecord((JRecord) result, outputProvider.getDataOutput());
+ } else {
+ resultSerde = AqlSerializerDeserializerProvider.INSTANCE.getNonTaggedSerializerDeserializer(recType);
+ resultSerde.serialize(obj, outputProvider.getDataOutput());
+ }
+ } else {
+ resultSerde = AqlSerializerDeserializerProvider.INSTANCE.getNonTaggedSerializerDeserializer(obj.getType());
+ resultSerde.serialize(obj, outputProvider.getDataOutput());
+ }
+ reset();
+ }
+
+ private void writeOpenRecord(JRecord jRecord, DataOutput dataOutput) throws AsterixException, IOException {
+ ARecord aRecord = (ARecord) jRecord.getIAObject();
+ RecordBuilder recordBuilder = new RecordBuilder();
+ ARecordType recordType = aRecord.getType();
+ recordBuilder.reset(recordType);
+ ArrayBackedValueStorage fieldName = new ArrayBackedValueStorage();
+ ArrayBackedValueStorage fieldValue = new ArrayBackedValueStorage();
+ List<Boolean> openField = jRecord.getOpenField();
+
+ int fieldIndex = 0;
+ int closedFieldId = 0;
+ for (IJObject field : jRecord.getFields()) {
+ fieldValue.reset();
+ switch (field.getTypeTag()) {
+ case RECORD:
+ ARecordType recType = (ARecordType) field.getIAObject().getType();
+ if (recType.isOpen()) {
+ fieldValue.getDataOutput().writeByte(recType.getTypeTag().serialize());
+ writeOpenRecord((JRecord) field, fieldValue.getDataOutput());
+ } else {
+ AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(
+ field.getIAObject().getType()).serialize(field.getIAObject(),
+ fieldValue.getDataOutput());
+ }
+ break;
+ default:
+ AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(field.getIAObject().getType())
+ .serialize(field.getIAObject(), fieldValue.getDataOutput());
+ break;
+ }
+ if (openField.get(fieldIndex)) {
+ String fName = jRecord.getFieldNames().get(fieldIndex);
+ fieldName.reset();
+ AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING).serialize(
+ new AString(fName), fieldName.getDataOutput());
+ recordBuilder.addField(fieldName, fieldValue);
+ } else {
+ recordBuilder.addField(closedFieldId, fieldValue);
+ closedFieldId++;
+ }
+ fieldIndex++;
+ }
+
+ recordBuilder.write(dataOutput, false);
+
+ }
+
+ private void reset() {
+ for (IJObject jObject : arguments) {
+ switch (jObject.getTypeTag()) {
+ case RECORD:
+ reset((JRecord) jObject);
+ break;
+ }
+ }
+ switch (resultHolder.getTypeTag()) {
+ case RECORD:
+ reset((JRecord) resultHolder);
+ break;
+ }
+ }
+
+ private void reset(JRecord jRecord) {
+ List<IJObject> fields = ((JRecord) jRecord).getFields();
+ for (IJObject field : fields) {
+ switch (field.getTypeTag()) {
+ case RECORD:
+ reset((JRecord) field);
+ break;
+ }
+ }
+ jRecord.close();
+ }
+
+ public void setArgument(int index, byte[] argument) throws IOException, AsterixException {
+ bis.setContent(argument, 1, argument.length);
+ IAType type = finfo.getParamList().get(index);
+ arguments[index] = JObjectUtil.getJType(type.getTypeTag(), type, dis, objectPool);
+ }
+
+ @Override
+ public IJObject getResultObject() {
+ return resultHolder;
+ }
+
+ @Override
+ public IJObject getObject(JTypeTag jtypeTag) {
+ IJObject retValue = null;
+ switch (jtypeTag) {
+ case INT:
+ retValue = objectPool.allocate(BuiltinType.AINT32);
+ break;
+ case STRING:
+ retValue = objectPool.allocate(BuiltinType.ASTRING);
+ break;
+ }
+ return retValue;
+ }
+
+}
\ No newline at end of file
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/ResultCollector.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/ResultCollector.java
new file mode 100755
index 0000000..d53b044
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/ResultCollector.java
@@ -0,0 +1,147 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.library;
+
+import java.io.DataOutput;
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AMutableDouble;
+import edu.uci.ics.asterix.om.base.AMutableFloat;
+import edu.uci.ics.asterix.om.base.AMutableInt32;
+import edu.uci.ics.asterix.om.base.AMutableOrderedList;
+import edu.uci.ics.asterix.om.base.AMutableRecord;
+import edu.uci.ics.asterix.om.base.AMutableString;
+import edu.uci.ics.asterix.om.base.AOrderedList;
+import edu.uci.ics.asterix.om.base.ARecord;
+import edu.uci.ics.asterix.om.base.IAObject;
+import edu.uci.ics.asterix.om.functions.IExternalFunctionInfo;
+import edu.uci.ics.asterix.om.types.AOrderedListType;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+
+public class ResultCollector implements IResultCollector {
+
+ private IAObject reusableResultObjectHolder;
+ private ByteBuffer reusableResultBinaryHolder;
+ private IDataOutputProvider outputProvider;
+ private IExternalFunctionInfo finfo;
+
+ public ResultCollector(IExternalFunctionInfo finfo, IDataOutputProvider outputProvider) {
+ this.finfo = finfo;
+ IAType returnType = finfo.getReturnType();
+ reusableResultObjectHolder = allocateResultObjectHolder(returnType);
+ reusableResultBinaryHolder = allocateResultBinaryHolder(returnType);
+ this.outputProvider = outputProvider;
+ }
+
+ private IAObject allocateResultObjectHolder(IAType type) {
+ switch (type.getTypeTag()) {
+ case INT32:
+ return new AMutableInt32(0);
+ case FLOAT:
+ return new AMutableFloat(0f);
+ case DOUBLE:
+ return new AMutableDouble(0);
+ case STRING:
+ return new AMutableString("");
+ case ORDEREDLIST:
+ return new AMutableOrderedList((AOrderedListType) type);
+ case RECORD:
+ IAType[] fieldType = ((ARecordType) type).getFieldTypes();
+ IAObject[] fieldObjects = new IAObject[fieldType.length];
+ for (int i = 0; i < fieldType.length; i++) {
+ fieldObjects[i] = allocateResultObjectHolder(fieldType[i]);
+ }
+ return new AMutableRecord((ARecordType) type, fieldObjects);
+ }
+ return null;
+ }
+
+ private ByteBuffer allocateResultBinaryHolder(IAType type) {
+ switch (type.getTypeTag()) {
+ case INT32:
+ return ByteBuffer.allocate(4);
+ case FLOAT:
+ return ByteBuffer.allocate(4);
+ case DOUBLE:
+ return ByteBuffer.allocate(8);
+ case STRING:
+ return ByteBuffer.allocate(32 * 1024);
+ case ORDEREDLIST:
+ return ByteBuffer.allocate(32 * 1024);
+ case RECORD:
+ return ByteBuffer.allocate(32 * 1024);
+ }
+ return null;
+ }
+
+ @Override
+ public void writeDoubleResult(double result) throws AsterixException {
+ ((AMutableDouble) reusableResultObjectHolder).setValue(result);
+ serializeResult(reusableResultObjectHolder);
+ }
+
+ @Override
+ public void writeFloatResult(float result) throws AsterixException {
+ ((AMutableDouble) reusableResultObjectHolder).setValue(result);
+ serializeResult(reusableResultObjectHolder);
+ }
+
+ @Override
+ public void writeIntResult(int result) throws AsterixException {
+ ((AMutableInt32) reusableResultObjectHolder).setValue(result);
+ serializeResult(reusableResultObjectHolder);
+ }
+
+ @Override
+ public void writeStringResult(String result) throws AsterixException {
+ ((AMutableString) reusableResultObjectHolder).setValue(result);
+ serializeResult(reusableResultObjectHolder);
+
+ }
+
+ @Override
+ public void writeRecordResult(ARecord result) throws AsterixException {
+ serializeResult(result);
+ }
+
+ @Override
+ public void writeListResult(AOrderedList list) throws AsterixException {
+ serializeResult(list);
+ }
+
+ public IAObject getComplexTypeResultHolder() {
+ return reusableResultObjectHolder;
+ }
+
+ private void serializeResult(IAObject object) throws AsterixException {
+ try {
+ AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(finfo.getReturnType()).serialize(
+ reusableResultObjectHolder, outputProvider.getDataOutput());
+ } catch (HyracksDataException hde) {
+ throw new AsterixException(hde);
+ }
+ }
+
+ @Override
+ public DataOutput getDataOutput() {
+ return outputProvider.getDataOutput();
+ }
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/RuntimeExternalFunctionUtil.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/RuntimeExternalFunctionUtil.java
new file mode 100755
index 0000000..27613bd
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/RuntimeExternalFunctionUtil.java
@@ -0,0 +1,100 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.library;
+
+import java.nio.ByteBuffer;
+import java.util.HashMap;
+import java.util.Map;
+
+import edu.uci.ics.asterix.om.base.AMutableInt32;
+import edu.uci.ics.asterix.om.base.AMutableString;
+import edu.uci.ics.asterix.om.base.IAObject;
+import edu.uci.ics.asterix.om.functions.IExternalFunctionInfo;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
+
+public class RuntimeExternalFunctionUtil {
+
+ private static Map<String, ClassLoader> libraryClassLoaders = new HashMap<String, ClassLoader>();
+
+ public static void registerLibraryClassLoader(String dataverseName, String libraryName, ClassLoader classLoader) {
+ String key = dataverseName + "." + libraryName;
+ synchronized (libraryClassLoaders) {
+ if (libraryClassLoaders.get(dataverseName) != null) {
+ throw new IllegalStateException("library class loader already registered!");
+ }
+ libraryClassLoaders.put(key, classLoader);
+ }
+ }
+
+ public static ClassLoader getLibraryClassLoader(String dataverseName, String libraryName) {
+ String key = dataverseName + "." + libraryName;
+ synchronized (libraryClassLoaders) {
+ return libraryClassLoaders.get(key);
+ }
+ }
+
+ public static IFunctionDescriptor getFunctionDescriptor(IFunctionInfo finfo) {
+ switch (((IExternalFunctionInfo) finfo).getKind()) {
+ case SCALAR:
+ return getScalarFunctionDescriptor(finfo);
+ case AGGREGATE:
+ case UNNEST:
+ case STATEFUL:
+ throw new NotImplementedException("External " + finfo.getFunctionIdentifier().getName()
+ + " not supported");
+ }
+ return null;
+ }
+
+ private static AbstractScalarFunctionDynamicDescriptor getScalarFunctionDescriptor(IFunctionInfo finfo) {
+ return new ExternalScalarFunctionDescriptor(finfo);
+ }
+
+ public static ByteBuffer allocateArgumentBuffers(IAType type) {
+ switch (type.getTypeTag()) {
+ case INT32:
+ return ByteBuffer.allocate(4);
+ case STRING:
+ return ByteBuffer.allocate(32 * 1024);
+ default:
+ return ByteBuffer.allocate(32 * 1024);
+ }
+ }
+
+ public static IAObject allocateArgumentObjects(IAType type) {
+ switch (type.getTypeTag()) {
+ case INT32:
+ return new AMutableInt32(0);
+ case STRING:
+ return new AMutableString("");
+ default:
+ return null;
+ /*
+ ARecordType recordType = (ARecordType) type;
+ IAType[] fieldTypes = recordType.getFieldTypes();
+ IAObject[] fields = new IAObject[fieldTypes.length];
+ for (int i = 0; i < fields.length; i++) {
+ fields[i] = allocateArgumentObjects(fieldTypes[i]);
+ }
+ return new AMutableRecord((ARecordType) type, fields);
+ */
+ }
+ }
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/java/IJObject.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/java/IJObject.java
new file mode 100644
index 0000000..ff8e563
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/java/IJObject.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.library.java;
+
+import edu.uci.ics.asterix.om.base.IAObject;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+
+public interface IJObject {
+
+ public ATypeTag getTypeTag();
+
+ public IAObject getIAObject();
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/java/IJType.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/java/IJType.java
new file mode 100644
index 0000000..dfbc464
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/java/IJType.java
@@ -0,0 +1,11 @@
+package edu.uci.ics.asterix.external.library.java;
+
+import edu.uci.ics.asterix.om.base.IAObject;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+
+public interface IJType {
+
+ public ATypeTag getTypeTag();
+
+ public IAObject getIAObject();
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/java/JObjectUtil.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/java/JObjectUtil.java
new file mode 100644
index 0000000..0c5d287
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/java/JObjectUtil.java
@@ -0,0 +1,411 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.library.java;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AStringSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.SerializerDeserializerUtil;
+import edu.uci.ics.asterix.external.library.java.JObjects.ByteArrayAccessibleDataInputStream;
+import edu.uci.ics.asterix.external.library.java.JObjects.JBoolean;
+import edu.uci.ics.asterix.external.library.java.JObjects.JCircle;
+import edu.uci.ics.asterix.external.library.java.JObjects.JDate;
+import edu.uci.ics.asterix.external.library.java.JObjects.JDateTime;
+import edu.uci.ics.asterix.external.library.java.JObjects.JDouble;
+import edu.uci.ics.asterix.external.library.java.JObjects.JDuration;
+import edu.uci.ics.asterix.external.library.java.JObjects.JFloat;
+import edu.uci.ics.asterix.external.library.java.JObjects.JInt;
+import edu.uci.ics.asterix.external.library.java.JObjects.JInterval;
+import edu.uci.ics.asterix.external.library.java.JObjects.JLine;
+import edu.uci.ics.asterix.external.library.java.JObjects.JOrderedList;
+import edu.uci.ics.asterix.external.library.java.JObjects.JPoint;
+import edu.uci.ics.asterix.external.library.java.JObjects.JPoint3D;
+import edu.uci.ics.asterix.external.library.java.JObjects.JPolygon;
+import edu.uci.ics.asterix.external.library.java.JObjects.JRecord;
+import edu.uci.ics.asterix.external.library.java.JObjects.JRectangle;
+import edu.uci.ics.asterix.external.library.java.JObjects.JString;
+import edu.uci.ics.asterix.external.library.java.JObjects.JTime;
+import edu.uci.ics.asterix.external.library.java.JObjects.JUnorderedList;
+import edu.uci.ics.asterix.om.types.AOrderedListType;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.AUnionType;
+import edu.uci.ics.asterix.om.types.AUnorderedListType;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.EnumDeserializer;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
+import edu.uci.ics.asterix.om.util.container.IObjectPool;
+
+public class JObjectUtil {
+
+ public static IJObject getJType(ATypeTag typeTag, IAType type, ByteArrayAccessibleDataInputStream dis,
+ IObjectPool<IJObject, IAType> objectPool) throws IOException, AsterixException {
+ IJObject jObject;
+
+ switch (typeTag) {
+
+ case INT32: {
+ int v = dis.readInt();
+ jObject = objectPool.allocate(BuiltinType.AINT32);
+ ((JInt) jObject).setValue(v);
+ break;
+ }
+
+ case FLOAT: {
+ float v = dis.readFloat();
+ jObject = objectPool.allocate(BuiltinType.AFLOAT);
+ ((JFloat) jObject).setValue(v);
+ break;
+ }
+
+ case DOUBLE: {
+ double value = dis.readDouble();
+ jObject = objectPool.allocate(BuiltinType.ADOUBLE);
+ ((JDouble) jObject).setValue(value);
+ break;
+ }
+
+ case STRING: {
+ String v = dis.readUTF();
+ jObject = objectPool.allocate(BuiltinType.ASTRING);
+ ((JString) jObject).setValue(v);
+ break;
+ }
+
+ case BOOLEAN:
+ jObject = objectPool.allocate(BuiltinType.ABOOLEAN);
+ ((JBoolean) jObject).setValue(dis.readBoolean());
+ break;
+
+ case DATE: {
+ int d = dis.readInt();
+ jObject = objectPool.allocate(BuiltinType.ADATE);
+ ((JDate) jObject).setValue(d);
+ break;
+ }
+
+ case DATETIME: {
+ jObject = objectPool.allocate(BuiltinType.ADATETIME);
+ long value = dis.readLong();
+ ((JDateTime) jObject).setValue(value);
+ break;
+ }
+
+ case DURATION: {
+ jObject = objectPool.allocate(BuiltinType.ADURATION);
+ int months = dis.readInt();
+ long msecs = dis.readLong();
+ ((JDuration) jObject).setValue(months, msecs);
+ break;
+ }
+
+ case TIME: {
+ jObject = objectPool.allocate(BuiltinType.ATIME);
+ int time = dis.readInt();
+ ((JTime) jObject).setValue(time);
+ break;
+ }
+
+ case INTERVAL: {
+ jObject = objectPool.allocate(BuiltinType.AINTERVAL);
+ long start = dis.readLong();
+ long end = dis.readLong();
+ byte intervalType = dis.readByte();
+ ((JInterval) jObject).setValue(start, end, intervalType);
+ break;
+ }
+
+ case CIRCLE: {
+ jObject = objectPool.allocate(BuiltinType.ACIRCLE);
+ double x = dis.readDouble();
+ double y = dis.readDouble();
+ double radius = dis.readDouble();
+ JPoint jpoint = (JPoint) objectPool.allocate(BuiltinType.APOINT);
+ jpoint.setValue(x, y);
+ ((JCircle) jObject).setValue(jpoint, radius);
+ break;
+ }
+
+ case POINT: {
+ jObject = objectPool.allocate(BuiltinType.APOINT);
+ double x = dis.readDouble();
+ double y = dis.readDouble();
+ ((JPoint) jObject).setValue(x, y);
+ break;
+ }
+
+ case POINT3D: {
+ jObject = objectPool.allocate(BuiltinType.APOINT3D);
+ double x = dis.readDouble();
+ double y = dis.readDouble();
+ double z = dis.readDouble();
+ ((JPoint3D) jObject).setValue(x, y, z);
+ break;
+ }
+
+ case LINE: {
+ jObject = objectPool.allocate(BuiltinType.ALINE);
+ double x1 = dis.readDouble();
+ double y1 = dis.readDouble();
+ double x2 = dis.readDouble();
+ double y2 = dis.readDouble();
+ JPoint jpoint1 = (JPoint) objectPool.allocate(BuiltinType.APOINT);
+ jpoint1.setValue(x1, y1);
+ JPoint jpoint2 = (JPoint) objectPool.allocate(BuiltinType.APOINT);
+ jpoint2.setValue(x2, y2);
+ ((JLine) jObject).setValue(jpoint1, jpoint2);
+ break;
+ }
+
+ case POLYGON: {
+ jObject = objectPool.allocate(BuiltinType.APOLYGON);
+ short numberOfPoints = dis.readShort();
+ List<JPoint> points = new ArrayList<JPoint>();
+ for (int i = 0; i < numberOfPoints; i++) {
+ JPoint p1 = (JPoint) objectPool.allocate(BuiltinType.APOINT);
+ p1.setValue(dis.readDouble(), dis.readDouble());
+ points.add(p1);
+ }
+ ((JPolygon) jObject).setValue(points);
+ break;
+ }
+
+ case RECTANGLE: {
+ jObject = objectPool.allocate(BuiltinType.ARECTANGLE);
+ double x1 = dis.readDouble();
+ double y1 = dis.readDouble();
+ double x2 = dis.readDouble();
+ double y2 = dis.readDouble();
+ JPoint jpoint1 = (JPoint) objectPool.allocate(BuiltinType.APOINT);
+ jpoint1.setValue(x1, y1);
+ JPoint jpoint2 = (JPoint) objectPool.allocate(BuiltinType.APOINT);
+ jpoint2.setValue(x2, y2);
+ ((JRectangle) jObject).setValue(jpoint1, jpoint2);
+ break;
+ }
+
+ case UNORDEREDLIST: {
+ AUnorderedListType listType = (AUnorderedListType) type;
+ IAType elementType = listType.getItemType();
+ jObject = objectPool.allocate(listType);
+
+ boolean fixedSize = false;
+ ATypeTag tag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(dis.readByte());
+ switch (tag) {
+ case STRING:
+ case RECORD:
+ case ORDEREDLIST:
+ case UNORDEREDLIST:
+ case ANY:
+ fixedSize = false;
+ break;
+ default:
+ fixedSize = true;
+ break;
+ }
+ dis.readInt(); // list size
+ int numberOfitems;
+ numberOfitems = dis.readInt();
+ if (numberOfitems > 0) {
+ if (!fixedSize) {
+ for (int i = 0; i < numberOfitems; i++)
+ dis.readInt();
+ }
+ for (int i = 0; i < numberOfitems; i++) {
+ IJObject v = (IJObject) getJType(elementType.getTypeTag(), elementType, dis, objectPool);
+ ((JUnorderedList) jObject).add(v);
+ }
+ }
+
+ break;
+ }
+ case ORDEREDLIST: {
+ AOrderedListType listType = (AOrderedListType) type;
+ IAType elementType = listType.getItemType();
+ jObject = objectPool.allocate(listType);
+ boolean fixedSize = false;
+ ATypeTag tag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(dis.readByte());
+ switch (tag) {
+ case STRING:
+ case RECORD:
+ case ORDEREDLIST:
+ case UNORDEREDLIST:
+ case ANY:
+ fixedSize = false;
+ break;
+ default:
+ fixedSize = true;
+ break;
+ }
+
+ dis.readInt(); // list size
+ int numberOfitems;
+ numberOfitems = dis.readInt();
+ if (numberOfitems > 0) {
+ if (!fixedSize) {
+ for (int i = 0; i < numberOfitems; i++)
+ dis.readInt();
+ }
+ for (int i = 0; i < numberOfitems; i++) {
+ IJObject v = (IJObject) getJType(elementType.getTypeTag(), elementType, dis, objectPool);
+ ((JOrderedList) jObject).add(v);
+ }
+ }
+
+ break;
+ }
+ case RECORD:
+ ARecordType recordType = (ARecordType) type;
+ int numberOfSchemaFields = recordType.getFieldTypes().length;
+ byte[] recordBits = dis.getInputStream().getArray();
+ boolean isExpanded = false;
+ int s = dis.getInputStream().getPosition();
+ int recordOffset = s;
+ int openPartOffset = 0;
+ int offsetArrayOffset = 0;
+ int[] fieldOffsets = new int[numberOfSchemaFields];
+ IJObject[] closedFields = new IJObject[numberOfSchemaFields];
+
+ if (recordType == null) {
+ openPartOffset = s + AInt32SerializerDeserializer.getInt(recordBits, s + 6);
+ s += 8;
+ isExpanded = true;
+ } else {
+ dis.skip(4); // reading length is not required.
+ if (recordType.isOpen()) {
+ isExpanded = dis.readBoolean();
+ if (isExpanded) {
+ openPartOffset = s + dis.readInt(); // AInt32SerializerDeserializer.getInt(recordBits, s + 6);
+ } else {
+ // do nothing s += 6;
+ }
+ } else {
+ // do nothing s += 5;
+ }
+ }
+
+ if (numberOfSchemaFields > 0) {
+ int numOfSchemaFields = dis.readInt(); //s += 4;
+ int nullBitMapOffset = 0;
+ boolean hasNullableFields = NonTaggedFormatUtil.hasNullableField(recordType);
+ if (hasNullableFields) {
+ nullBitMapOffset = dis.getInputStream().getPosition();//s
+ offsetArrayOffset = dis.getInputStream().getPosition() //s
+ + (numberOfSchemaFields % 8 == 0 ? numberOfSchemaFields / 8
+ : numberOfSchemaFields / 8 + 1);
+ } else {
+ offsetArrayOffset = dis.getInputStream().getPosition();
+ }
+ for (int i = 0; i < numberOfSchemaFields; i++) {
+ fieldOffsets[i] = dis.readInt(); // AInt32SerializerDeserializer.getInt(recordBits, offsetArrayOffset) + recordOffset;
+ // offsetArrayOffset += 4;
+ }
+ for (int fieldNumber = 0; fieldNumber < numberOfSchemaFields; fieldNumber++) {
+ if (hasNullableFields) {
+ byte b1 = recordBits[nullBitMapOffset + fieldNumber / 8];
+ int p = 1 << (7 - (fieldNumber % 8));
+ if ((b1 & p) == 0) {
+ // set null value (including type tag inside)
+ //fieldValues.add(nullReference);
+ continue;
+ }
+ }
+ IAType[] fieldTypes = recordType.getFieldTypes();
+ ATypeTag fieldValueTypeTag = null;
+
+ IAType fieldType = fieldTypes[fieldNumber];
+ if (fieldTypes[fieldNumber].getTypeTag() == ATypeTag.UNION) {
+ if (NonTaggedFormatUtil.isOptionalField((AUnionType) fieldTypes[fieldNumber])) {
+ fieldType = ((AUnionType) fieldTypes[fieldNumber]).getUnionList().get(
+ NonTaggedFormatUtil.OPTIONAL_TYPE_INDEX_IN_UNION_LIST);
+ fieldValueTypeTag = fieldType.getTypeTag();
+ // fieldValueLength = NonTaggedFormatUtil.getFieldValueLength(recordBits,
+ // fieldOffsets[fieldNumber], typeTag, false);
+ }
+ } else {
+ fieldValueTypeTag = fieldTypes[fieldNumber].getTypeTag();
+ }
+ closedFields[fieldNumber] = getJType(fieldValueTypeTag, fieldType, dis, objectPool);
+ }
+ }
+ if (isExpanded) {
+ int numberOfOpenFields = dis.readInt();
+ String[] fieldNames = new String[numberOfOpenFields];
+ IAType[] fieldTypes = new IAType[numberOfOpenFields];
+ IJObject[] openFields = new IJObject[numberOfOpenFields];
+ for (int i = 0; i < numberOfOpenFields; i++) {
+ dis.readInt();
+ dis.readInt();
+ }
+ for (int i = 0; i < numberOfOpenFields; i++) {
+ fieldNames[i] = AStringSerializerDeserializer.INSTANCE.deserialize(dis).getStringValue();
+ ATypeTag openFieldTypeTag = SerializerDeserializerUtil.deserializeTag(dis);
+ openFields[i] = getJType(openFieldTypeTag, null, dis, objectPool);
+ fieldTypes[i] = openFields[i].getIAObject().getType();
+ }
+ ARecordType openPartRecType = new ARecordType(null, fieldNames, fieldTypes, true);
+ if (numberOfSchemaFields > 0) {
+ ARecordType mergedRecordType = mergeRecordTypes(recordType, openPartRecType);
+ IJObject[] mergedFields = mergeFields(closedFields, openFields);
+ jObject = objectPool.allocate(recordType);
+ return new JRecord(mergedRecordType, mergedFields);
+ } else {
+ return new JRecord(recordType, openFields);
+ }
+ } else {
+ return new JRecord(recordType, closedFields);
+ }
+
+ default:
+ throw new IllegalStateException("Argument type: " + typeTag);
+ }
+ return jObject;
+ }
+
+ private static IJObject[] mergeFields(IJObject[] closedFields, IJObject[] openFields) {
+ IJObject[] fields = new IJObject[closedFields.length + openFields.length];
+ int i = 0;
+ for (; i < closedFields.length; i++) {
+ fields[i] = closedFields[i];
+ }
+ for (int j = 0; j < openFields.length; j++) {
+ fields[closedFields.length + j] = openFields[j];
+ }
+ return fields;
+ }
+
+ private static ARecordType mergeRecordTypes(ARecordType recType1, ARecordType recType2) throws AsterixException {
+
+ String[] fieldNames = new String[recType1.getFieldNames().length + recType2.getFieldNames().length];
+ IAType[] fieldTypes = new IAType[recType1.getFieldTypes().length + recType2.getFieldTypes().length];
+
+ int i = 0;
+ for (; i < recType1.getFieldNames().length; i++) {
+ fieldNames[i] = recType1.getFieldNames()[i];
+ fieldTypes[i] = recType1.getFieldTypes()[i];
+ }
+
+ for (int j = 0; j < recType2.getFieldNames().length; i++, j++) {
+ fieldNames[i] = recType2.getFieldNames()[j];
+ fieldTypes[i] = recType2.getFieldTypes()[j];
+ }
+ return new ARecordType(null, fieldNames, fieldTypes, true);
+ }
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/java/JObjects.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/java/JObjects.java
new file mode 100644
index 0000000..e53f252
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/java/JObjects.java
@@ -0,0 +1,889 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.library.java;
+
+import java.io.ByteArrayInputStream;
+import java.io.DataInputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.om.base.ABoolean;
+import edu.uci.ics.asterix.om.base.AMutableCircle;
+import edu.uci.ics.asterix.om.base.AMutableDate;
+import edu.uci.ics.asterix.om.base.AMutableDateTime;
+import edu.uci.ics.asterix.om.base.AMutableDouble;
+import edu.uci.ics.asterix.om.base.AMutableDuration;
+import edu.uci.ics.asterix.om.base.AMutableFloat;
+import edu.uci.ics.asterix.om.base.AMutableInt32;
+import edu.uci.ics.asterix.om.base.AMutableInt64;
+import edu.uci.ics.asterix.om.base.AMutableInterval;
+import edu.uci.ics.asterix.om.base.AMutableLine;
+import edu.uci.ics.asterix.om.base.AMutableOrderedList;
+import edu.uci.ics.asterix.om.base.AMutablePoint;
+import edu.uci.ics.asterix.om.base.AMutablePoint3D;
+import edu.uci.ics.asterix.om.base.AMutablePolygon;
+import edu.uci.ics.asterix.om.base.AMutableRecord;
+import edu.uci.ics.asterix.om.base.AMutableRectangle;
+import edu.uci.ics.asterix.om.base.AMutableString;
+import edu.uci.ics.asterix.om.base.AMutableTime;
+import edu.uci.ics.asterix.om.base.AMutableUnorderedList;
+import edu.uci.ics.asterix.om.base.APoint;
+import edu.uci.ics.asterix.om.base.IAObject;
+import edu.uci.ics.asterix.om.types.AOrderedListType;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.AUnorderedListType;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+
+public class JObjects {
+
+ public static abstract class JObject implements IJObject {
+
+ protected IAObject value;
+ protected byte[] bytes;
+
+ protected JObject(IAObject value) {
+ this.value = value;
+ }
+
+ @Override
+ public ATypeTag getTypeTag() {
+ return value.getType().getTypeTag();
+ }
+
+ @Override
+ public IAObject getIAObject() {
+ return value;
+ }
+ }
+
+ public static final class JInt implements IJObject {
+
+ private AMutableInt32 value;
+
+ public JInt(int value) {
+ this.value = new AMutableInt32(value);
+ }
+
+ public void setValue(int v) {
+ if (value == null) {
+ value = new AMutableInt32(v);
+ } else {
+ ((AMutableInt32) value).setValue(v);
+ }
+ }
+
+ public void setValue(AMutableInt32 v) {
+ value = v;
+ }
+
+ public int getValue() {
+ return ((AMutableInt32) value).getIntegerValue().intValue();
+ }
+
+ @Override
+ public ATypeTag getTypeTag() {
+ return BuiltinType.AINT32.getTypeTag();
+ }
+
+ @Override
+ public IAObject getIAObject() {
+ return value;
+ }
+
+ }
+
+ public static final class JBoolean implements IJObject {
+
+ private boolean value;
+
+ public JBoolean(boolean value) {
+ this.value = value;
+ }
+
+ public void setValue(boolean value) {
+ this.value = value;
+ }
+
+ @Override
+ public ATypeTag getTypeTag() {
+ return ATypeTag.BOOLEAN;
+ }
+
+ @Override
+ public IAObject getIAObject() {
+ return value ? ABoolean.TRUE : ABoolean.FALSE;
+ }
+
+ }
+
+ public static final class JLong extends JObject {
+
+ public JLong(long v) {
+ super(new AMutableInt64(v));
+ }
+
+ public void setValue(long v) {
+ ((AMutableInt64) value).setValue(v);
+ }
+
+ public long getValue() {
+ return ((AMutableInt64) value).getLongValue();
+ }
+
+ }
+
+ public static final class JDouble extends JObject {
+
+ public JDouble(double v) {
+ super(new AMutableDouble(v));
+ }
+
+ public void setValue(double v) {
+ ((AMutableDouble) value).setValue(v);
+ }
+
+ public double getValue() {
+ return ((AMutableDouble) value).getDoubleValue();
+ }
+
+ }
+
+ public static final class JString extends JObject {
+
+ public JString(String v) {
+ super(new AMutableString(v));
+ }
+
+ public void setValue(String v) {
+ ((AMutableString) value).setValue(v);
+ }
+
+ public String getValue() {
+ return ((AMutableString) value).getStringValue();
+ }
+
+ }
+
+ public static final class JFloat implements IJObject {
+
+ private AMutableFloat value;
+
+ public JFloat(float v) {
+ value = new AMutableFloat(v);
+ }
+
+ public void setValue(float v) {
+ ((AMutableFloat) value).setValue(v);
+ }
+
+ public float getValue() {
+ return ((AMutableFloat) value).getFloatValue();
+ }
+
+ @Override
+ public ATypeTag getTypeTag() {
+ return BuiltinType.AFLOAT.getTypeTag();
+ }
+
+ @Override
+ public IAObject getIAObject() {
+ return value;
+ }
+
+ }
+
+ public static final class JPoint extends JObject {
+
+ public JPoint(double x, double y) {
+ super(new AMutablePoint(x, y));
+ }
+
+ public void setValue(double x, double y) {
+ ((AMutablePoint) value).setValue(x, y);
+ }
+
+ public double getXValue() {
+ return ((AMutablePoint) value).getX();
+ }
+
+ public double getYValue() {
+ return ((AMutablePoint) value).getY();
+ }
+
+ public IAObject getValue() {
+ return value;
+ }
+
+ @Override
+ public String toString() {
+ return value.toString();
+ }
+ }
+
+ public static final class JRectangle implements IJObject {
+
+ private AMutableRectangle rect;
+
+ public JRectangle(JPoint p1, JPoint p2) {
+ rect = new AMutableRectangle((APoint) p1.getValue(), (APoint) p2.getValue());
+ }
+
+ public void setValue(JPoint p1, JPoint p2) {
+ this.rect.setValue((APoint) p1.getValue(), (APoint) p2.getValue());
+ }
+
+ @Override
+ public ATypeTag getTypeTag() {
+ return ATypeTag.RECTANGLE;
+ }
+
+ @Override
+ public IAObject getIAObject() {
+ return rect;
+ }
+
+ @Override
+ public String toString() {
+ return rect.toString();
+ }
+
+ }
+
+ public static final class JTime implements IJObject {
+
+ private AMutableTime time;
+
+ public JTime(int timeInMillsec) {
+ time = new AMutableTime(timeInMillsec);
+ }
+
+ public void setValue(int timeInMillsec) {
+ time.setValue(timeInMillsec);
+ }
+
+ @Override
+ public ATypeTag getTypeTag() {
+ return ATypeTag.TIME;
+ }
+
+ @Override
+ public IAObject getIAObject() {
+ return time;
+ }
+
+ @Override
+ public String toString() {
+ return time.toString();
+ }
+
+ }
+
+ public static final class JInterval implements IJObject {
+
+ private AMutableInterval interval;
+
+ public JInterval(long intervalStart, long intervalEnd) {
+ interval = new AMutableInterval(intervalStart, intervalEnd, (byte) 0);
+ }
+
+ public void setValue(long intervalStart, long intervalEnd, byte typetag) {
+ interval.setValue(intervalStart, intervalEnd, typetag);
+ }
+
+ @Override
+ public ATypeTag getTypeTag() {
+ return ATypeTag.INTERVAL;
+ }
+
+ @Override
+ public IAObject getIAObject() {
+ return interval;
+ }
+
+ @Override
+ public String toString() {
+ return interval.toString();
+ }
+
+ public long getIntervalStart() {
+ return interval.getIntervalStart();
+ }
+
+ public long getIntervalEnd() {
+ return interval.getIntervalEnd();
+ }
+
+ public short getIntervalType() {
+ return interval.getIntervalType();
+ }
+
+ }
+
+ public static final class JDate implements IJObject {
+
+ private AMutableDate date;
+
+ public JDate(int chrononTimeInDays) {
+ date = new AMutableDate(chrononTimeInDays);
+ }
+
+ public void setValue(int chrononTimeInDays) {
+ date.setValue(chrononTimeInDays);
+ }
+
+ @Override
+ public ATypeTag getTypeTag() {
+ return ATypeTag.DATE;
+ }
+
+ @Override
+ public IAObject getIAObject() {
+ return date;
+ }
+
+ @Override
+ public String toString() {
+ return date.toString();
+ }
+
+ }
+
+ public static final class JDateTime implements IJObject {
+
+ private AMutableDateTime dateTime;
+
+ public JDateTime(long chrononTime) {
+ dateTime = new AMutableDateTime(chrononTime);
+ }
+
+ public void setValue(long chrononTime) {
+ dateTime.setValue(chrononTime);
+ }
+
+ @Override
+ public ATypeTag getTypeTag() {
+ return ATypeTag.DATETIME;
+ }
+
+ @Override
+ public IAObject getIAObject() {
+ return dateTime;
+ }
+
+ @Override
+ public String toString() {
+ return dateTime.toString();
+ }
+
+ }
+
+ public static final class JDuration implements IJObject {
+
+ private AMutableDuration duration;
+
+ public JDuration(int months, long milliseconds) {
+ duration = new AMutableDuration(months, milliseconds);
+ }
+
+ public void setValue(int months, long milliseconds) {
+ duration.setValue(months, milliseconds);
+ }
+
+ @Override
+ public ATypeTag getTypeTag() {
+ return ATypeTag.DURATION;
+ }
+
+ @Override
+ public IAObject getIAObject() {
+ return duration;
+ }
+
+ @Override
+ public String toString() {
+ return duration.toString();
+ }
+
+ }
+
+ public static final class JPolygon implements IJObject {
+
+ private AMutablePolygon polygon;
+ private List<JPoint> points;
+
+ public JPolygon(List<JPoint> points) {
+ this.points = points;
+ }
+
+ public void setValue(List<JPoint> points) {
+ this.points = points;
+ polygon = null;
+ }
+
+ @Override
+ public ATypeTag getTypeTag() {
+ return ATypeTag.POLYGON;
+ }
+
+ @Override
+ public IAObject getIAObject() {
+ if (polygon == null) {
+ APoint[] pts = new APoint[points.size()];
+ int index = 0;
+ for (JPoint p : points) {
+ pts[index++] = (APoint) p.getIAObject();
+ }
+ polygon = new AMutablePolygon(pts);
+ }
+ return polygon;
+ }
+
+ @Override
+ public String toString() {
+ return getIAObject().toString();
+ }
+
+ }
+
+ public static final class JCircle implements IJObject {
+
+ private AMutableCircle circle;
+
+ public JCircle(JPoint center, double radius) {
+ circle = new AMutableCircle((APoint) center.getIAObject(), radius);
+ }
+
+ public void setValue(JPoint center, double radius) {
+ circle.setValue((APoint) center.getIAObject(), radius);
+ }
+
+ @Override
+ public ATypeTag getTypeTag() {
+ return ATypeTag.CIRCLE;
+ }
+
+ @Override
+ public IAObject getIAObject() {
+ return circle;
+ }
+
+ @Override
+ public String toString() {
+ return circle.toString();
+ }
+
+ }
+
+ public static final class JLine implements IJObject {
+
+ private AMutableLine line;
+
+ public JLine(JPoint p1, JPoint p2) {
+ line = new AMutableLine((APoint) p1.getIAObject(), (APoint) p2.getIAObject());
+ }
+
+ public void setValue(JPoint p1, JPoint p2) {
+ line.setValue((APoint) p1.getIAObject(), (APoint) p2.getIAObject());
+ }
+
+ @Override
+ public ATypeTag getTypeTag() {
+ return ATypeTag.LINE;
+ }
+
+ @Override
+ public IAObject getIAObject() {
+ return line;
+ }
+
+ @Override
+ public String toString() {
+ return line.toString();
+ }
+
+ }
+
+ public static final class JPoint3D implements IJObject {
+
+ private AMutablePoint3D value;
+
+ public JPoint3D(double x, double y, double z) {
+ value = new AMutablePoint3D(x, y, z);
+ }
+
+ public void setValue(double x, double y, double z) {
+ value.setValue(x, y, z);
+ }
+
+ public double getXValue() {
+ return ((AMutablePoint3D) value).getX();
+ }
+
+ public double getYValue() {
+ return ((AMutablePoint3D) value).getY();
+ }
+
+ public double getZValue() {
+ return ((AMutablePoint3D) value).getZ();
+ }
+
+ public IAObject getValue() {
+ return value;
+ }
+
+ @Override
+ public String toString() {
+ return value.toString();
+ }
+
+ @Override
+ public ATypeTag getTypeTag() {
+ return ATypeTag.POINT3D;
+ }
+
+ @Override
+ public IAObject getIAObject() {
+ return value;
+ }
+ }
+
+ public static final class JOrderedList implements IJObject {
+
+ private AOrderedListType listType;
+ private List<IJObject> jObjects;
+
+ public JOrderedList(IJObject jObject) {
+ this.listType = new AOrderedListType(jObject.getIAObject().getType(), null);
+ this.jObjects = new ArrayList<IJObject>();
+ }
+
+ public void add(IJObject jObject) {
+ jObjects.add(jObject);
+ }
+
+ @Override
+ public ATypeTag getTypeTag() {
+ return ATypeTag.ORDEREDLIST;
+ }
+
+ @Override
+ public IAObject getIAObject() {
+ AMutableOrderedList v = new AMutableOrderedList(listType);
+ for (IJObject jObj : jObjects) {
+ v.add(jObj.getIAObject());
+ }
+ return v;
+ }
+
+ public AOrderedListType getListType() {
+ return listType;
+ }
+
+ public void addAll(Collection<IJObject> jObjectCollection) {
+ jObjects.addAll(jObjectCollection);
+ }
+
+ public void clear() {
+ jObjects.clear();
+ }
+
+ public IJObject getElement(int index) {
+ return jObjects.get(index);
+ }
+
+ public int size() {
+ return jObjects.size();
+ }
+
+ }
+
+ public static final class JUnorderedList implements IJObject {
+
+ private AUnorderedListType listType;
+ private List<IJObject> jObjects;
+
+ public JUnorderedList(IJObject jObject) {
+ this.listType = new AUnorderedListType(jObject.getIAObject().getType(), null);
+ this.jObjects = new ArrayList<IJObject>();
+ }
+
+ public void add(IJObject jObject) {
+ jObjects.add(jObject);
+ }
+
+ @Override
+ public ATypeTag getTypeTag() {
+ return ATypeTag.UNORDEREDLIST;
+ }
+
+ @Override
+ public IAObject getIAObject() {
+ AMutableUnorderedList v = new AMutableUnorderedList(listType);
+ for (IJObject jObj : jObjects) {
+ v.add(jObj.getIAObject());
+ }
+ return v;
+ }
+
+ public AUnorderedListType getListType() {
+ return listType;
+ }
+
+ public boolean isEmpty() {
+ return jObjects.isEmpty();
+ }
+
+ public void addAll(Collection<IJObject> jObjectCollection) {
+ jObjects.addAll(jObjectCollection);
+ }
+
+ public void clear() {
+ jObjects.clear();
+ }
+
+ public IJObject getElement(int index) {
+ return jObjects.get(index);
+ }
+
+ public int size() {
+ return jObjects.size();
+ }
+
+ }
+
+ public static final class JRecord implements IJObject {
+
+ private AMutableRecord value;
+ private ARecordType recordType;
+ private List<IJObject> fields;
+ private List<String> fieldNames;
+ private List<IAType> fieldTypes;
+ private int numFieldsAdded = 0;
+ private List<Boolean> openField;
+
+ public JRecord(ARecordType recordType) {
+ this.recordType = recordType;
+ this.fields = new ArrayList<IJObject>();
+ initFieldInfo();
+ }
+
+ public JRecord(ARecordType recordType, IJObject[] fields) {
+ this.recordType = recordType;
+ this.fields = new ArrayList<IJObject>();
+ for (IJObject jObject : fields) {
+ this.fields.add(jObject);
+ }
+ initFieldInfo();
+ }
+
+ public JRecord(String[] fieldNames, IJObject[] fields) throws AsterixException {
+ this.recordType = getARecordType(fieldNames, fields);
+ this.fields = new ArrayList<IJObject>();
+ for (IJObject jObject : fields) {
+ this.fields.add(jObject);
+ }
+ initFieldInfo();
+ }
+
+ private ARecordType getARecordType(String[] fieldNames, IJObject[] fields) throws AsterixException {
+ IAType[] fieldTypes = new IAType[fields.length];
+ int index = 0;
+ for (IJObject jObj : fields) {
+ fieldTypes[index++] = jObj.getIAObject().getType();
+ }
+ ARecordType recordType = new ARecordType(null, fieldNames, fieldTypes, false);
+ return recordType;
+ }
+
+ private void initFieldInfo() {
+ this.openField = new ArrayList<Boolean>();
+ fieldNames = new ArrayList<String>();
+ for (String name : recordType.getFieldNames()) {
+ fieldNames.add(name);
+ openField.add(false);
+ }
+ fieldTypes = new ArrayList<IAType>();
+ for (IAType type : recordType.getFieldTypes()) {
+ fieldTypes.add(type);
+ }
+
+ }
+
+ private IAObject[] getIAObjectArray(List<IJObject> fields) {
+ IAObject[] retValue = new IAObject[fields.size()];
+ int index = 0;
+ for (IJObject jObject : fields) {
+ retValue[index++] = getIAObject(jObject);
+ }
+ return retValue;
+ }
+
+ private IAObject getIAObject(IJObject jObject) {
+ IAObject retVal = null;
+ switch (jObject.getTypeTag()) {
+ case RECORD:
+ ARecordType recType = ((JRecord) jObject).getRecordType();
+ IAObject[] fields = new IAObject[((JRecord) jObject).getFields().size()];
+ int index = 0;
+ for (IJObject field : ((JRecord) jObject).getFields()) {
+ fields[index++] = getIAObject(field);
+ }
+ retVal = new AMutableRecord(recType, fields);
+ default:
+ retVal = jObject.getIAObject();
+ break;
+ }
+ return retVal;
+ }
+
+ public void addField(String fieldName, IJObject fieldValue) {
+ int pos = getFieldPosByName(fieldName);
+ if (pos >= 0) {
+ throw new IllegalArgumentException("field already defined");
+ }
+ numFieldsAdded++;
+ fields.add(fieldValue);
+ fieldNames.add(fieldName);
+ fieldTypes.add(fieldValue.getIAObject().getType());
+ openField.add(true);
+ }
+
+ public IJObject getValueByName(String fieldName) throws AsterixException, IOException {
+ int fieldPos = getFieldPosByName(fieldName);
+ if (fieldPos < 0) {
+ throw new AsterixException("unknown field: " + fieldName);
+ }
+ return fields.get(fieldPos);
+ }
+
+ public void setValueAtPos(int pos, IJObject jtype) {
+ fields.set(pos, jtype);
+ }
+
+ public void setValue(AMutableRecord mutableRecord) {
+ this.value = mutableRecord;
+ this.recordType = mutableRecord.getType();
+ }
+
+ @Override
+ public ATypeTag getTypeTag() {
+ return recordType.getTypeTag();
+ }
+
+ public void setField(String fieldName, IJObject fieldValue) {
+ int pos = getFieldPosByName(fieldName);
+ fields.set(pos, fieldValue);
+ if (value != null) {
+ value.setValueAtPos(pos, fieldValue.getIAObject());
+ }
+ }
+
+ private int getFieldPosByName(String fieldName) {
+ int index = 0;
+ for (String name : fieldNames) {
+ if (name.equals(fieldName)) {
+ return index;
+ }
+ index++;
+ }
+ return -1;
+ }
+
+ public ARecordType getRecordType() {
+ return recordType;
+ }
+
+ public List<IJObject> getFields() {
+ return fields;
+ }
+
+ @Override
+ public IAObject getIAObject() {
+ if (value == null || numFieldsAdded > 0) {
+ value = new AMutableRecord(recordType, getIAObjectArray(fields));
+ }
+ return value;
+ }
+
+ public void close() {
+ if (numFieldsAdded > 0) {
+ int totalFields = fieldNames.size();
+ for (int i = 0; i < numFieldsAdded; i++) {
+ fieldNames.remove(totalFields - 1 - i);
+ fieldTypes.remove(totalFields - 1 - i);
+ fields.remove(totalFields - 1 - i);
+ }
+ numFieldsAdded = 0;
+ }
+ }
+
+ public List<Boolean> getOpenField() {
+ return openField;
+ }
+
+ public List<String> getFieldNames() {
+ return fieldNames;
+ }
+
+ public List<IAType> getFieldTypes() {
+ return fieldTypes;
+ }
+
+ }
+
+ public static class ByteArrayAccessibleInputStream extends ByteArrayInputStream {
+
+ public ByteArrayAccessibleInputStream(byte[] buf, int offset, int length) {
+ super(buf, offset, length);
+ }
+
+ public void setContent(byte[] buf, int offset, int length) {
+ this.buf = buf;
+ this.pos = offset;
+ this.count = Math.min(offset + length, buf.length);
+ this.mark = offset;
+ }
+
+ public byte[] getArray() {
+ return buf;
+ }
+
+ public int getPosition() {
+ return pos;
+ }
+
+ public int getCount() {
+ return count;
+ }
+
+ }
+
+ public static class ByteArrayAccessibleDataInputStream extends DataInputStream {
+
+ public ByteArrayAccessibleDataInputStream(ByteArrayAccessibleInputStream in) {
+ super(in);
+ }
+
+ public ByteArrayAccessibleInputStream getInputStream() {
+ return (ByteArrayAccessibleInputStream) in;
+ }
+
+ }
+}
\ No newline at end of file
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/java/JTypeTag.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/java/JTypeTag.java
new file mode 100644
index 0000000..c851197
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/java/JTypeTag.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.library.java;
+
+public enum JTypeTag {
+
+ INT,
+ STRING,
+ LONG,
+ DOUBLE,
+ FLOAT,
+ LIST,
+ OBJECT
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/DNSResolver.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/DNSResolver.java
index d6de58e5..113a112 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/DNSResolver.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/DNSResolver.java
@@ -35,8 +35,7 @@
if (nodeControllers == null || nodeControllers.isEmpty()) {
throw new AsterixException(" No node controllers found at the address: " + value);
}
- String chosenNCId = nodeControllers.toArray(new String[]{})[random
- .nextInt(nodeControllers.size())];
+ String chosenNCId = nodeControllers.toArray(new String[] {})[random.nextInt(nodeControllers.size())];
return chosenNCId;
} catch (AsterixException ae) {
throw ae;
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/INodeResolver.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/INodeResolver.java
index c9572cd..b590ab9 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/INodeResolver.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/INodeResolver.java
@@ -17,8 +17,7 @@
import edu.uci.ics.asterix.common.exceptions.AsterixException;
/**
- * A policy for resolving a name to a node controller id.
- *
+ * A policy for resolving a name to a node controller id.
*/
public interface INodeResolver {
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/INodeResolverFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/INodeResolverFactory.java
index 655fe3e..9335f9e 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/INodeResolverFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/INodeResolverFactory.java
@@ -17,7 +17,7 @@
/**
* Factory for creating an instance of INodeResolver
*
- * @see INodeResolver
+ * @see INodeResolver
*/
public interface INodeResolverFactory {
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/managed/adapter/IManagedFeedAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/managed/adapter/IManagedFeedAdapter.java
deleted file mode 100644
index 87e8478..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/managed/adapter/IManagedFeedAdapter.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feed.managed.adapter;
-
-import java.util.Map;
-
-/**
- * Interface implemented by an adapter that can be controlled or managed by external
- * commands (stop,alter)
- */
-public interface IManagedFeedAdapter {
-
- /**
- * Discontinue the ingestion of data and end the feed.
- *
- * @throws Exception
- */
- public void stop();
-
- /**
- * Modify the adapter configuration parameters. This method is called
- * when the configuration parameters need to be modified while the adapter
- * is ingesting data in an active feed.
- *
- * @param properties
- * A HashMap containing the set of configuration parameters
- * that need to be altered.
- */
- public void alter(Map<String, String> properties);
-
-}
diff --git a/asterix-external-data/src/main/resources/schema/library.xsd b/asterix-external-data/src/main/resources/schema/library.xsd
new file mode 100644
index 0000000..00f71f5
--- /dev/null
+++ b/asterix-external-data/src/main/resources/schema/library.xsd
@@ -0,0 +1,63 @@
+<?xml version="1.0" encoding="ISO-8859-1" ?>
+<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema"
+ xmlns:lib="library" targetNamespace="library" elementFormDefault="qualified">
+
+ <!-- definition of simple types -->
+ <xs:element name="language" type="xs:string" />
+ <xs:element name="name" type="xs:string" />
+ <xs:element name="arguments" type="xs:string" />
+ <xs:element name="return_type" type="xs:string" />
+ <xs:element name="function_type" type="xs:string" />
+ <xs:element name="definition" type="xs:string" />
+ <xs:element name="factory_class" type="xs:string" />
+
+
+ <!-- definition of complex elements -->
+ <xs:element name="libraryFunction">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element ref="lib:name" />
+ <xs:element ref="lib:function_type" />
+ <xs:element ref="lib:arguments" />
+ <xs:element ref="lib:return_type" />
+ <xs:element ref="lib:definition" />
+ </xs:sequence>
+ </xs:complexType>
+ </xs:element>
+
+ <xs:element name="libraryFunctions">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element ref="lib:libraryFunction" maxOccurs="unbounded" />
+ </xs:sequence>
+ </xs:complexType>
+ </xs:element>
+
+ <xs:element name="libraryAdapter">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element ref="lib:name" />
+ <xs:element ref="lib:factory_class" />
+ </xs:sequence>
+ </xs:complexType>
+ </xs:element>
+
+ <xs:element name="libraryAdapters">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element ref="lib:libraryAdapter" maxOccurs="unbounded" />
+ </xs:sequence>
+ </xs:complexType>
+ </xs:element>
+
+ <xs:element name="externalLibrary">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element ref="lib:language" />
+ <xs:element ref="lib:libraryFunctions" minOccurs="0" />
+ <xs:element ref="lib:libraryAdapters" minOccurs="0" />
+ </xs:sequence>
+ </xs:complexType>
+ </xs:element>
+
+</xs:schema>
diff --git a/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/AllTypesFactory.java b/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/AllTypesFactory.java
new file mode 100644
index 0000000..f095321
--- /dev/null
+++ b/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/AllTypesFactory.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.library;
+
+import edu.uci.ics.asterix.external.library.IExternalFunction;
+import edu.uci.ics.asterix.external.library.IFunctionFactory;
+
+public class AllTypesFactory implements IFunctionFactory {
+
+ @Override
+ public IExternalFunction getExternalFunction() {
+ return new AllTypesFunction();
+ }
+
+}
diff --git a/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/AllTypesFunction.java b/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/AllTypesFunction.java
new file mode 100644
index 0000000..c5063db
--- /dev/null
+++ b/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/AllTypesFunction.java
@@ -0,0 +1,115 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.library;
+
+import edu.uci.ics.asterix.external.library.IExternalScalarFunction;
+import edu.uci.ics.asterix.external.library.IFunctionHelper;
+import edu.uci.ics.asterix.external.library.java.JObjects.JBoolean;
+import edu.uci.ics.asterix.external.library.java.JObjects.JCircle;
+import edu.uci.ics.asterix.external.library.java.JObjects.JDate;
+import edu.uci.ics.asterix.external.library.java.JObjects.JDateTime;
+import edu.uci.ics.asterix.external.library.java.JObjects.JDouble;
+import edu.uci.ics.asterix.external.library.java.JObjects.JDuration;
+import edu.uci.ics.asterix.external.library.java.JObjects.JFloat;
+import edu.uci.ics.asterix.external.library.java.JObjects.JInt;
+import edu.uci.ics.asterix.external.library.java.JObjects.JLine;
+import edu.uci.ics.asterix.external.library.java.JObjects.JOrderedList;
+import edu.uci.ics.asterix.external.library.java.JObjects.JPoint;
+import edu.uci.ics.asterix.external.library.java.JObjects.JPoint3D;
+import edu.uci.ics.asterix.external.library.java.JObjects.JPolygon;
+import edu.uci.ics.asterix.external.library.java.JObjects.JRecord;
+import edu.uci.ics.asterix.external.library.java.JObjects.JString;
+import edu.uci.ics.asterix.external.library.java.JObjects.JTime;
+import edu.uci.ics.asterix.external.library.java.JObjects.JUnorderedList;
+import edu.uci.ics.asterix.external.library.java.JTypeTag;
+
+public class AllTypesFunction implements IExternalScalarFunction {
+
+ private JOrderedList newFieldList;
+
+ @Override
+ public void initialize(IFunctionHelper functionHelper) {
+ newFieldList = new JOrderedList(functionHelper.getObject(JTypeTag.INT));
+ }
+
+ @Override
+ public void deinitialize() {
+ }
+
+ @Override
+ public void evaluate(IFunctionHelper functionHelper) throws Exception {
+ newFieldList.clear();
+ JRecord inputRecord = (JRecord) functionHelper.getArgument(0);
+ JInt id = (JInt) inputRecord.getValueByName("id");
+ JString name = (JString) inputRecord.getValueByName("name");
+ JFloat age = (JFloat) inputRecord.getValueByName("age");
+ JDouble salary = (JDouble) inputRecord.getValueByName("salary");
+ JBoolean married = (JBoolean) inputRecord.getValueByName("married");
+ JUnorderedList interest = (JUnorderedList) inputRecord
+ .getValueByName("interests");
+ JOrderedList children = (JOrderedList) inputRecord
+ .getValueByName("children");
+ JRecord address = (JRecord) inputRecord.getValueByName("address");
+ JDate dob = (JDate) inputRecord.getValueByName("dob");
+ JTime time = (JTime) inputRecord.getValueByName("time");
+ JDateTime dateTime = (JDateTime) inputRecord.getValueByName("datetime");
+ JDuration duration = (JDuration) inputRecord.getValueByName("duration");
+ JPoint location2d = (JPoint) inputRecord.getValueByName("location2d");
+ JPoint3D location3d = (JPoint3D) inputRecord
+ .getValueByName("location3d");
+ JLine line = (JLine) inputRecord.getValueByName("line");
+ JPolygon polygon = (JPolygon) inputRecord.getValueByName("polygon");
+ JCircle circle = (JCircle) inputRecord.getValueByName("circle");
+
+ JRecord result = (JRecord) functionHelper.getResultObject();
+ result.setField("id", id);
+ result.setField("name", name);
+ result.setField("age", age);
+ result.setField("salary", salary);
+ result.setField("married", married);
+ result.setField("interests", interest);
+ result.setField("children", children);
+ JInt zipCode = (JInt) functionHelper.getObject(JTypeTag.INT);
+ zipCode.setValue(92841);
+ address.addField("Zipcode", zipCode);
+ result.setField("address", address);
+ result.setField("dob", dob);
+ result.setField("time", time);
+ result.setField("datetime", dateTime);
+ result.setField("duration", duration);
+ result.setField("location2d", location2d);
+ result.setField("location3d", location3d);
+ result.setField("line", line);
+ result.setField("polygon", polygon);
+ result.setField("circle", circle);
+
+ JString newFieldString = (JString) functionHelper
+ .getObject(JTypeTag.STRING);
+ newFieldString.setValue("processed");
+ result.addField("status", newFieldString);
+
+ /*
+ * JString element = (JString)
+ * functionHelper.getObject(JTypeTag.STRING); element.setValue("raman");
+ * newFieldList.add(element); result.addField("mylist", newFieldList);
+ */
+
+ JString newFieldString2 = (JString) functionHelper
+ .getObject(JTypeTag.STRING);
+ newFieldString2.setValue("this is working");
+ result.addField("working", newFieldString);
+ functionHelper.setResult(result);
+ }
+}
diff --git a/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/CapitalFinderFactory.java b/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/CapitalFinderFactory.java
new file mode 100644
index 0000000..872b542
--- /dev/null
+++ b/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/CapitalFinderFactory.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.library;
+
+import edu.uci.ics.asterix.external.library.IExternalScalarFunction;
+import edu.uci.ics.asterix.external.library.IFunctionFactory;
+
+public class CapitalFinderFactory implements IFunctionFactory {
+
+ @Override
+ public IExternalScalarFunction getExternalFunction() {
+ return new CapitalFinderFunction();
+ }
+
+}
diff --git a/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/CapitalFinderFunction.java b/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/CapitalFinderFunction.java
new file mode 100644
index 0000000..8a11757
--- /dev/null
+++ b/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/CapitalFinderFunction.java
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.library;
+
+import java.io.InputStream;
+import java.util.Properties;
+
+import edu.uci.ics.asterix.external.library.java.JObjects.JRecord;
+import edu.uci.ics.asterix.external.library.java.JObjects.JString;
+import edu.uci.ics.asterix.external.library.java.JTypeTag;
+
+public class CapitalFinderFunction implements IExternalScalarFunction {
+
+ private static Properties capitalList;
+ private static final String NOT_FOUND = "NOT_FOUND";
+ private JString capital;
+
+ @Override
+ public void deinitialize() {
+ System.out.println("De-Initialized");
+ }
+
+ @Override
+ public void evaluate(IFunctionHelper functionHelper) throws Exception {
+ JString country = ((JString) functionHelper.getArgument(0));
+ JRecord record = (JRecord) functionHelper.getResultObject();
+ String capitalCity = capitalList.getProperty(country.getValue(), NOT_FOUND);
+ capital.setValue(capitalCity);
+
+ record.setField("country", country);
+ record.setField("capital", capital);
+ functionHelper.setResult(record);
+ }
+
+ @Override
+ public void initialize(IFunctionHelper functionHelper) throws Exception {
+ InputStream in = CapitalFinderFunction.class.getClassLoader().getResourceAsStream("data/countriesCapitals.txt");
+ capitalList = new Properties();
+ capitalList.load(in);
+ capital = (JString) functionHelper.getObject(JTypeTag.STRING);
+ }
+
+}
diff --git a/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/EchoDelayFactory.java b/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/EchoDelayFactory.java
new file mode 100644
index 0000000..d15d661
--- /dev/null
+++ b/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/EchoDelayFactory.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.library;
+
+
+public class EchoDelayFactory implements IFunctionFactory {
+
+ @Override
+ public IExternalScalarFunction getExternalFunction() {
+ return new EchoDelayFunction();
+ }
+
+}
diff --git a/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/EchoDelayFunction.java b/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/EchoDelayFunction.java
new file mode 100644
index 0000000..ce63eec
--- /dev/null
+++ b/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/EchoDelayFunction.java
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.library;
+
+import java.util.Random;
+
+import edu.uci.ics.asterix.external.library.java.JObjects.JRecord;
+
+public class EchoDelayFunction implements IExternalScalarFunction {
+
+ private Random rand = new Random();
+ private long sleepIntervalMin;
+ private long sleepIntervalMax;
+ private int range;
+
+ @Override
+ public void initialize(IFunctionHelper functionHelper) {
+ sleepIntervalMin = 50;
+ sleepIntervalMax = 100;
+ range = (new Long(sleepIntervalMax - sleepIntervalMin)).intValue();
+ }
+
+ @Override
+ public void deinitialize() {
+ }
+
+ @Override
+ public void evaluate(IFunctionHelper functionHelper) throws Exception {
+ JRecord inputRecord = (JRecord) functionHelper.getArgument(0);
+ long sleepInterval = rand.nextInt(range);
+ Thread.sleep(sleepInterval);
+ functionHelper.setResult(inputRecord);
+ }
+}
diff --git a/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/ParseTweetFactory.java b/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/ParseTweetFactory.java
new file mode 100644
index 0000000..d868f20
--- /dev/null
+++ b/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/ParseTweetFactory.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.library;
+
+import edu.uci.ics.asterix.external.library.IExternalScalarFunction;
+import edu.uci.ics.asterix.external.library.IFunctionFactory;
+
+public class ParseTweetFactory implements IFunctionFactory {
+
+ @Override
+ public IExternalScalarFunction getExternalFunction() {
+ return new ParseTweetFunction();
+ }
+
+}
diff --git a/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/ParseTweetFunction.java b/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/ParseTweetFunction.java
new file mode 100644
index 0000000..1046518
--- /dev/null
+++ b/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/ParseTweetFunction.java
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.library;
+
+import edu.uci.ics.asterix.external.library.java.JObjects.JRecord;
+import edu.uci.ics.asterix.external.library.java.JObjects.JString;
+import edu.uci.ics.asterix.external.library.java.JObjects.JUnorderedList;
+import edu.uci.ics.asterix.external.library.java.JTypeTag;
+
+public class ParseTweetFunction implements IExternalScalarFunction {
+
+ private JUnorderedList list = null;
+
+ @Override
+ public void initialize(IFunctionHelper functionHelper) {
+ list = new JUnorderedList(functionHelper.getObject(JTypeTag.STRING));
+ }
+
+ @Override
+ public void deinitialize() {
+ }
+
+ @Override
+ public void evaluate(IFunctionHelper functionHelper) throws Exception {
+ list.clear();
+ JRecord inputRecord = (JRecord) functionHelper.getArgument(0);
+ JString id = (JString) inputRecord.getValueByName("id");
+ JString text = (JString) inputRecord.getValueByName("text");
+
+ String[] tokens = text.getValue().split(" ");
+ for (String tk : tokens) {
+ if (tk.startsWith("#")) {
+ JString newField = (JString) functionHelper.getObject(JTypeTag.STRING);
+ newField.setValue(tk);
+ list.add(newField);
+ }
+ }
+ JRecord result = (JRecord) functionHelper.getResultObject();
+ result.setField("id", id);
+ result.setField("username", inputRecord.getValueByName("username"));
+ result.setField("location", inputRecord.getValueByName("location"));
+ result.setField("text", text);
+ result.setField("timestamp", inputRecord.getValueByName("timestamp"));
+ result.setField("topics", list);
+ functionHelper.setResult(result);
+ }
+
+}
diff --git a/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/SumFactory.java b/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/SumFactory.java
new file mode 100644
index 0000000..eabc6b5
--- /dev/null
+++ b/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/SumFactory.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.library;
+
+import edu.uci.ics.asterix.external.library.IExternalScalarFunction;
+import edu.uci.ics.asterix.external.library.IFunctionFactory;
+
+public class SumFactory implements IFunctionFactory {
+
+ @Override
+ public IExternalScalarFunction getExternalFunction() {
+ return new SumFunction();
+ }
+
+}
diff --git a/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/SumFunction.java b/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/SumFunction.java
new file mode 100644
index 0000000..96eba97
--- /dev/null
+++ b/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/SumFunction.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.library;
+
+import edu.uci.ics.asterix.external.library.IExternalScalarFunction;
+import edu.uci.ics.asterix.external.library.IFunctionHelper;
+import edu.uci.ics.asterix.external.library.java.JObjects.JInt;
+
+public class SumFunction implements IExternalScalarFunction {
+
+ private JInt result;
+
+ @Override
+ public void deinitialize() {
+ // nothing to do here
+ }
+
+ @Override
+ public void evaluate(IFunctionHelper functionHelper) throws Exception {
+ int arg0 = ((JInt) functionHelper.getArgument(0)).getValue();
+ int arg1 = ((JInt) functionHelper.getArgument(1)).getValue();
+ result.setValue(arg0 + arg1);
+ functionHelper.setResult(result);
+ }
+
+ @Override
+ public void initialize(IFunctionHelper functionHelper) {
+ result = (JInt) functionHelper.getResultObject();
+ }
+
+}
diff --git a/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/UpperCaseFactory.java b/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/UpperCaseFactory.java
new file mode 100644
index 0000000..2c5f607
--- /dev/null
+++ b/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/UpperCaseFactory.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.library;
+
+import edu.uci.ics.asterix.external.library.IExternalFunction;
+import edu.uci.ics.asterix.external.library.IFunctionFactory;
+
+public class UpperCaseFactory implements IFunctionFactory {
+
+ @Override
+ public IExternalFunction getExternalFunction() {
+ return new UpperCaseFunction();
+ }
+
+}
diff --git a/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/UpperCaseFunction.java b/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/UpperCaseFunction.java
new file mode 100644
index 0000000..e3d1c9c
--- /dev/null
+++ b/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/UpperCaseFunction.java
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.library;
+
+import java.util.Random;
+
+import edu.uci.ics.asterix.external.library.IExternalScalarFunction;
+import edu.uci.ics.asterix.external.library.IFunctionHelper;
+import edu.uci.ics.asterix.external.library.java.JTypeTag;
+import edu.uci.ics.asterix.external.library.java.JObjects.JInt;
+import edu.uci.ics.asterix.external.library.java.JObjects.JRecord;
+import edu.uci.ics.asterix.external.library.java.JObjects.JString;
+
+/**
+ * Accepts an input record of type Open{ id: int32, text: string }
+ *
+ * Converts the text field into upper case and appends an additional field -
+ * "substring" with value as a random substring of the text field.
+ *
+ * Return Type Open{ id: int32, text: string }
+ *
+ */
+public class UpperCaseFunction implements IExternalScalarFunction {
+
+ private Random random;
+
+ @Override
+ public void initialize(IFunctionHelper functionHelper) {
+ random = new Random();
+ }
+
+ @Override
+ public void deinitialize() {
+ }
+
+ @Override
+ public void evaluate(IFunctionHelper functionHelper) throws Exception {
+ JRecord inputRecord = (JRecord) functionHelper.getArgument(0);
+ JInt id = (JInt) inputRecord.getValueByName("id");
+ id.setValue(id.getValue() * -1); // for maintaining uniqueness
+ // constraint in the case when
+ // output is re-inserted into source
+ // dataset
+ JString text = (JString) inputRecord.getValueByName("text");
+ text.setValue(text.getValue().toUpperCase());
+ JRecord result = (JRecord) functionHelper.getResultObject();
+ result.setField("id", id);
+ result.setField("text", text);
+ JString newField = (JString) functionHelper.getObject(JTypeTag.STRING);
+ newField.setValue(text.getValue().substring(
+ random.nextInt(text.getValue().length())));
+ result.addField("substring", newField);
+ functionHelper.setResult(result);
+ }
+}
diff --git a/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/adaptor/TestTypedAdaptor.java b/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/adaptor/TestTypedAdaptor.java
new file mode 100644
index 0000000..cb0142a
--- /dev/null
+++ b/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/adaptor/TestTypedAdaptor.java
@@ -0,0 +1,134 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.library.adaptor;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.PipedInputStream;
+import java.io.PipedOutputStream;
+import java.util.Map;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+
+import edu.uci.ics.asterix.external.dataset.adapter.StreamBasedAdapter;
+import edu.uci.ics.asterix.metadata.feeds.IFeedAdapter;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.dataflow.std.file.ITupleParserFactory;
+
+public class TestTypedAdaptor extends StreamBasedAdapter implements IFeedAdapter {
+
+ private static final long serialVersionUID = 1L;
+
+ private final PipedOutputStream pos;
+
+ private final PipedInputStream pis;
+
+ private final Map<String, String> configuration;
+
+ private DummyGenerator generator;
+
+ public TestTypedAdaptor(ITupleParserFactory parserFactory, ARecordType sourceDatatype, IHyracksTaskContext ctx,
+ Map<String, String> configuration) throws IOException {
+ super(parserFactory, sourceDatatype, ctx);
+ pos = new PipedOutputStream();
+ pis = new PipedInputStream(pos);
+ this.configuration = configuration;
+ }
+
+ @Override
+ public InputStream getInputStream(int partition) throws IOException {
+ return pis;
+ }
+
+ @Override
+ public void start(int partition, IFrameWriter frameWriter) throws Exception {
+ generator = new DummyGenerator(configuration, pos);
+ ExecutorService executor = Executors.newSingleThreadExecutor();
+ executor.execute(generator);
+ super.start(partition, frameWriter);
+ }
+
+ private static class DummyGenerator implements Runnable {
+
+ private final int nOutputRecords;
+ private final OutputStream os;
+ private final byte[] EOL = "\n".getBytes();
+ private boolean continueIngestion;
+
+ public DummyGenerator(Map<String, String> configuration, OutputStream os) {
+ nOutputRecords = Integer.parseInt(configuration.get(TestTypedAdaptorFactory.KEY_NUM_OUTPUT_RECORDS));
+ this.os = os;
+ this.continueIngestion = true;
+ }
+
+ @Override
+ public void run() {
+ DummyRecord dummyRecord = new DummyRecord();
+ try {
+ int i = 0;
+ while (continueIngestion && i < nOutputRecords) {
+ dummyRecord.reset(i + 1, "" + (i + 1));
+ os.write(dummyRecord.toString().getBytes());
+ os.write(EOL);
+ i++;
+ }
+ } catch (IOException ioe) {
+ ioe.printStackTrace();
+ } finally {
+ try {
+ os.close();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+ public void stop() {
+ continueIngestion = false;
+ }
+ }
+
+ private static class DummyRecord {
+
+ private int tweetid = 0;
+ private String text = null;
+
+ public void reset(int tweetid, String text) {
+ this.tweetid = tweetid;
+ this.text = text;
+ }
+
+ @Override
+ public String toString() {
+ return "{" + "\"tweetid\":" + "int64(" + "\"" + tweetid + "\"" + ")" + "," + "\"message-text\":" + "\""
+ + text + "\"" + "}";
+ }
+
+ }
+
+ @Override
+ public DataExchangeMode getDataExchangeMode() {
+ return DataExchangeMode.PUSH;
+ }
+
+ @Override
+ public void stop() throws Exception {
+ generator.stop();
+ }
+
+}
diff --git a/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/adaptor/TestTypedAdaptorFactory.java b/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/adaptor/TestTypedAdaptorFactory.java
new file mode 100644
index 0000000..39f7ab2
--- /dev/null
+++ b/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/adaptor/TestTypedAdaptorFactory.java
@@ -0,0 +1,89 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.library.adaptor;
+
+import java.util.Map;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.metadata.feeds.IDatasourceAdapter;
+import edu.uci.ics.asterix.metadata.feeds.ITypedAdapterFactory;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.runtime.operators.file.AdmSchemafullRecordParserFactory;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksCountPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.dataflow.std.file.ITupleParserFactory;
+
+public class TestTypedAdaptorFactory implements ITypedAdapterFactory {
+
+ public static final String NAME = "test_typed_adaptor";
+
+ private static ARecordType adapterOutputType = initOutputType();
+
+ public static final String KEY_NUM_OUTPUT_RECORDS = "num_output_records";
+
+ private Map<String, String> configuration;
+
+ @Override
+ public SupportedOperation getSupportedOperations() {
+ return SupportedOperation.READ;
+ }
+
+ private static ARecordType initOutputType() {
+ String[] fieldNames = new String[] { "tweetid", "message-text" };
+ IAType[] fieldTypes = new IAType[] { BuiltinType.AINT64, BuiltinType.ASTRING };
+ ARecordType outputType = null;
+ try {
+ outputType = new ARecordType("TestTypedAdaptorOutputType", fieldNames, fieldTypes, false);
+ } catch (AsterixException exception) {
+ throw new IllegalStateException("Unable to create output type for adaptor " + NAME);
+ }
+ return outputType;
+ }
+
+ @Override
+ public String getName() {
+ return NAME;
+ }
+
+ @Override
+ public AdapterType getAdapterType() {
+ return AdapterType.TYPED;
+ }
+
+ @Override
+ public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
+ return new AlgebricksCountPartitionConstraint(1);
+ }
+
+ @Override
+ public IDatasourceAdapter createAdapter(IHyracksTaskContext ctx, int partition) throws Exception {
+ ITupleParserFactory tupleParserFactory = new AdmSchemafullRecordParserFactory(adapterOutputType);
+ return new TestTypedAdaptor(tupleParserFactory, adapterOutputType, ctx, configuration);
+ }
+
+ @Override
+ public ARecordType getAdapterOutputType() {
+ return adapterOutputType;
+ }
+
+ @Override
+ public void configure(Map<String, String> configuration) throws Exception {
+ this.configuration = configuration;
+ }
+
+}
diff --git a/asterix-external-data/src/test/resources/data/countriesCapitals.txt b/asterix-external-data/src/test/resources/data/countriesCapitals.txt
new file mode 100644
index 0000000..def17ed
--- /dev/null
+++ b/asterix-external-data/src/test/resources/data/countriesCapitals.txt
@@ -0,0 +1,189 @@
+United\ Arab\ Emirates=Abu Dhabi
+Nigeria=Abuja
+Ghana=Accra
+Ethiopia=Addis Ababa
+Algeria=Algiers
+Niue=Alofi
+Jordan=Amman
+Netherlands=Amsterdam
+Andorra=Andorra la Vella
+Turkey=Ankara
+Madagascar=Antananarivo
+Samoa=Apia
+Turkmenistan=Ashgabat
+Eritrea=Asmara
+Kazakhstan=Astana
+Paraguay=Asunción
+Greece=Athens
+Iraq=Baghdad
+Azerbaijan=Baku
+Mali=Bamako
+Brunei=Bandar Seri Begawan
+Thailand=Bangkok
+Gambia=Banjul
+China=Beijing
+Lebanon=Beirut
+Serbia=Belgrade
+Belize=Belmopan
+Germany=Berlin
+Switzerland=Bern
+Kyrgyzstan=Bishkek
+Guinea-Bissau=Bissau
+Colombia=Bogotá
+Brazil=Brasília
+Slovakia=Bratislava
+Barbados=Bridgetown
+Belgium=Brussels
+Romania=Bucharest
+Hungary=Budapest
+Argentina=Buenos Aires
+Burundi=Bujumbura
+Egypt=Cairo
+Australia=Canberra
+Venezuela=Caracas
+Wales=Cardiff
+Moldova=Chisinau
+Guinea=Conakry
+Denmark=Copenhagen
+Senegal=Dakar
+Syria=Damascus
+Bangladesh=Dhaka
+Djibouti=Djibouti
+Qatar=Doha
+Ireland=Dublin
+Tajikistan=Dushanbe
+Scotland=Edinburgh
+Botswana=Gaborone
+Guyana=Georgetown
+Gibraltar=Gibraltar
+Guatemala=Guatemala City
+Guam=Hagåtña
+Bermuda=Hamilton
+Vietnam=Hanoi
+Zimbabwe=Harare
+Cuba=Havana
+Finland=Helsinki
+Pakistan=Islamabad
+Indonesia=Jakarta
+Afghanistan=Kabul
+Uganda=Kampala
+Nepal=Kathmandu
+Sudan=Khartoum
+Ukraine=Kiev
+Rwanda=Kigali
+Jamaica=Kingston
+Malaysia=Kuala Lumpur
+Kuwait=Kuwait City
+Gabon=Libreville
+Malawi=Lilongwe
+Peru=Lima
+Portugal=Lisbon
+Slovenia=Ljubljana
+Togo=Lome
+England=London
+Angola=Luanda
+Zambia=Lusaka
+Luxembourg=Luxembourg
+Spain=Madrid
+Marshall Islands=Majuro
+Equatorial Guinea=Malabo
+Maldives=Malé
+Nicaragua=Managua
+Bahrain=Manama
+Philippines=Manila
+Mozambique=Maputo
+Saint Martin=Marigot
+Lesotho=Maseru
+Wallis and Futuna=Mata-Utu
+Palau=Melekeok
+Mexico=Mexico City
+Belarus=Minsk
+Somalia=Mogadishu
+Monaco=Monaco
+Liberia=Monrovia
+Uruguay=Montevideo
+Comoros=Moroni
+Russia=Moscow
+Oman=Muscat
+Kenya=Nairobi
+Bahamas=Nassau
+Myanmar=Naypyidaw
+Chad=NDjamena
+India=New Delhi
+Niger=Niamey
+Cyprus=Nicosia
+Mauritania=Nouakchott
+Greenland=Nuuk
+Aruba=Oranjestad
+Norway=Oslo
+Canada=Ottawa
+Panama=Panama City
+Suriname=Paramaribo
+France=Paris
+Cambodia=Phnom Penh
+Montserrat=Brades Estate (de facto)
+Montenegro=Podgorica
+Mauritius=Port Louis
+Vanuatu=Port Vila
+Haiti=Port-au-Prince
+Benin=Cotonou (de facto)
+Czech Republic=Prague
+Cape Verde=Praia
+South\ Africa=Cape Town
+North\ Korea=Pyongyang
+Ecuador=Quito
+Morocco=Rabat
+Iceland=Reykjavík
+Latvia=Riga
+Saudi\ Arabia=Riyadh
+Italy=Rome
+Dominica=Roseau
+Costa\ Rica=San José
+Puerto\ Rico=San Juan
+San\ Marino=San Marino
+El\ Salvador=San Salvador
+Yemen=Sanaa
+Chile=Santiago
+Dominican\ Republic=Santo Domingo
+Bosnia\ and\ Herzegovina=Sarajevo
+South\ Korea=Seoul
+Singapore=Singapore
+Macedonia=Skopje
+Bulgaria=Sofia
+Sri\ Lanka=Colombo
+Grenada=St. George's
+Jersey=St. Helier
+Guernsey=St. Peter Port
+Sweden=Stockholm
+Bolivia=La Paz
+Abkhazia=Sukhumi
+Fiji=Suva
+Taiwan=Taipei
+Estonia=Tallinn
+Kiribati=Tarawa
+Uzbekistan=Tashkent
+Georgia=Tbilisi
+Honduras=Tegucigalpa
+Iran=Tehran
+Bhutan=Thimphu
+Albania=Tirana
+Transnistria=Tiraspol
+Japan=Tokyo
+Libya=Tripoli
+Tunisia=Tunis
+Mongolia=Ulan Bator
+Liechtenstein=Vaduz
+Malta=Valletta
+Anguilla=The Valley
+Vatican\ City=Vatican City
+Seychelles=Victoria
+Austria=Vienna
+Laos=Vientiane
+Lithuania=Vilnius
+Poland=Warsaw
+United\ States=Washington D.C.
+New\ Zealand=Wellington
+Namibia=Windhoek
+Nauru=Yaren (de facto)
+Armenia=Yerevan
+Croatia=Zagreb
diff --git a/asterix-external-data/src/test/resources/text_functions.xml b/asterix-external-data/src/test/resources/text_functions.xml
new file mode 100644
index 0000000..a0b7bf9
--- /dev/null
+++ b/asterix-external-data/src/test/resources/text_functions.xml
@@ -0,0 +1,59 @@
+<externalLibrary xmlns="library">
+ <language>JAVA</language>
+ <libraryFunctions>
+ <libraryFunction>
+ <function_type>SCALAR</function_type>
+ <name>parseTweet</name>
+ <arguments>TweetInputType</arguments>
+ <return_type>TweetOutputType</return_type>
+ <definition>edu.uci.ics.asterix.external.library.ParseTweetFactory
+ </definition>
+ </libraryFunction>
+ <libraryFunction>
+ <function_type>SCALAR</function_type>
+ <name>mysum</name>
+ <arguments>AINT32,AINT32</arguments>
+ <return_type>AINT32</return_type>
+ <definition>edu.uci.ics.asterix.external.library.SumFactory
+ </definition>
+ </libraryFunction>
+ <libraryFunction>
+ <function_type>SCALAR</function_type>
+ <name>getCapital</name>
+ <arguments>ASTRING</arguments>
+ <return_type>CountryCapitalType</return_type>
+ <definition>edu.uci.ics.asterix.external.library.CapitalFinderFactory
+ </definition>
+ </libraryFunction>
+ <libraryFunction>
+ <function_type>SCALAR</function_type>
+ <name>toUpper</name>
+ <arguments>TextType</arguments>
+ <return_type>TextType</return_type>
+ <definition>edu.uci.ics.asterix.external.library.UpperCaseFactory
+ </definition>
+ </libraryFunction>
+ <libraryFunction>
+ <function_type>SCALAR</function_type>
+ <name>allTypes</name>
+ <arguments>AllType</arguments>
+ <return_type>AllType</return_type>
+ <definition>edu.uci.ics.asterix.external.library.AllTypesFactory
+ </definition>
+ </libraryFunction>
+ <libraryFunction>
+ <function_type>SCALAR</function_type>
+ <name>echoDelay</name>
+ <arguments>TweetMessageType</arguments>
+ <return_type>TweetMessageType</return_type>
+ <definition>edu.uci.ics.asterix.external.library.EchoDelayFactory
+ </definition>
+ </libraryFunction>
+ </libraryFunctions>
+ <libraryAdapters>
+ <libraryAdapter>
+ <name>test_typed_adaptor</name>
+ <factory_class>edu.uci.ics.asterix.external.library.adaptor.TestTypedAdaptorFactory</factory_class>
+ </libraryAdapter>
+ </libraryAdapters>
+</externalLibrary>
diff --git a/asterix-installer/pom.xml b/asterix-installer/pom.xml
index f75e15a..0c332ed 100644
--- a/asterix-installer/pom.xml
+++ b/asterix-installer/pom.xml
@@ -1,27 +1,23 @@
-<!--
- ! Copyright 2009-2013 by The Regents of the University of California
- ! Licensed under the Apache License, Version 2.0 (the "License");
- ! you may not use this file except in compliance with the License.
- ! you may obtain a copy of the License from
- !
- ! http://www.apache.org/licenses/LICENSE-2.0
- !
- ! Unless required by applicable law or agreed to in writing, software
- ! distributed under the License is distributed on an "AS IS" BASIS,
- ! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ! See the License for the specific language governing permissions and
- ! limitations under the License.
- !-->
+<!-- ! Copyright 2009-2013 by The Regents of the University of California
+ ! Licensed under the Apache License, Version 2.0 (the "License"); ! you may
+ not use this file except in compliance with the License. ! you may obtain
+ a copy of the License from ! ! http://www.apache.org/licenses/LICENSE-2.0
+ ! ! Unless required by applicable law or agreed to in writing, software !
+ distributed under the License is distributed on an "AS IS" BASIS, ! WITHOUT
+ WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ! See the
+ License for the specific language governing permissions and ! limitations
+ under the License. ! -->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <artifactId>asterix</artifactId>
- <groupId>edu.uci.ics.asterix</groupId>
- <version>0.8.1-SNAPSHOT</version>
- </parent>
- <artifactId>asterix-installer</artifactId>
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <artifactId>asterix</artifactId>
+ <groupId>edu.uci.ics.asterix</groupId>
+ <version>0.8.4-SNAPSHOT</version>
+ </parent>
+ <artifactId>asterix-installer</artifactId>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ <test.excludes>**/DmlRecoveryIT.java</test.excludes>
</properties>
<build>
@@ -31,11 +27,10 @@
<artifactId>maven-compiler-plugin</artifactId>
<version>2.0.2</version>
<configuration>
- <source>1.6</source>
- <target>1.6</target>
+ <source>1.7</source>
+ <target>1.7</target>
</configuration>
</plugin>
-
<plugin>
<groupId>org.jvnet.jaxb2.maven2</groupId>
<artifactId>maven-jaxb2-plugin</artifactId>
@@ -76,10 +71,6 @@
<goal>generate</goal>
</goals>
<configuration>
- <args>
- <arg>-Xsetters</arg>
- <arg>-Xvalue-constructor</arg>
- </args>
<schemaDirectory>src/main/resources/schema</schemaDirectory>
<schemaIncludes>
<include>cluster.xsd</include>
@@ -116,6 +107,9 @@
<configuration>
<runOrder>alphabetical</runOrder>
<forkMode>pertest</forkMode>
+ <excludes>
+ <exclude>${test.excludes}</exclude>
+ </excludes>
</configuration>
<executions>
<execution>
@@ -170,37 +164,29 @@
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-events</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-common</artifactId>
- <version>0.8.1-SNAPSHOT</version>
- <type>jar</type>
- <scope>compile</scope>
+ <version>0.8.4-SNAPSHOT</version>
+ <type>test-jar</type>
+ <scope>test</scope>
</dependency>
- <dependency>
- <groupId>edu.uci.ics.asterix</groupId>
- <artifactId>asterix-common</artifactId>
- <version>0.8.1-SNAPSHOT</version>
- <type>test-jar</type>
- <scope>test</scope>
- </dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-server</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
<type>zip</type>
<classifier>binary-assembly</classifier>
</dependency>
- <dependency>
- <groupId>edu.uci.ics.asterix</groupId>
- <artifactId>asterix-test-framework</artifactId>
- <version>0.8.1-SNAPSHOT</version>
- <scope>test</scope>
- </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.asterix</groupId>
+ <artifactId>asterix-test-framework</artifactId>
+ <version>0.8.4-SNAPSHOT</version>
+ <scope>test</scope>
+ </dependency>
</dependencies>
-
</project>
diff --git a/asterix-installer/src/main/assembly/binary-assembly.xml b/asterix-installer/src/main/assembly/binary-assembly.xml
index d42c5ae..930f686 100644
--- a/asterix-installer/src/main/assembly/binary-assembly.xml
+++ b/asterix-installer/src/main/assembly/binary-assembly.xml
@@ -13,112 +13,108 @@
! limitations under the License.
!-->
<assembly>
- <id>binary-assembly</id>
- <formats>
- <format>zip</format>
- <format>dir</format>
- </formats>
- <includeBaseDirectory>false</includeBaseDirectory>
- <fileSets>
- <fileSet>
- <directory>src/main/resources/conf</directory>
- <outputDirectory>conf</outputDirectory>
- </fileSet>
- <fileSet>
- <directory>src/main/resources/clusters</directory>
- <outputDirectory>clusters</outputDirectory>
- </fileSet>
- <fileSet>
- <directory>src/main/resources/zookeeper</directory>
- <fileMode>0755</fileMode>
- <outputDirectory>.installer/zookeeper/bin</outputDirectory>
- </fileSet>
- <fileSet>
- <directory>src/main/resources/scripts</directory>
- <fileMode>0755</fileMode>
- <includes>
- <include>managix</include>
- </includes>
- <outputDirectory>bin</outputDirectory>
- </fileSet>
- <fileSet>
- <directory>src/main/resources/scripts</directory>
- <fileMode>0755</fileMode>
- <excludes>
- <exclude>managix</exclude>
- </excludes>
- <outputDirectory>.installer/scripts</outputDirectory>
- </fileSet>
- <fileSet>
- <directory>src/main/resources/hadoop-0.20.2</directory>
- <outputDirectory>.installer/hadoop-0.20.2</outputDirectory>
- <fileMode>0755</fileMode>
- </fileSet>
- <fileSet>
- <directory>target</directory>
- <outputDirectory>lib</outputDirectory>
- <includes>
- <include>*.jar</include>
- </includes>
- </fileSet>
- <fileSet>
- <directory>../asterix-events/src/main/resources/events</directory>
- <outputDirectory>.installer/eventrix/events</outputDirectory>
- <fileMode>0755</fileMode>
- </fileSet>
- <fileSet>
- <directory>../asterix-events/src/main/resources/scripts</directory>
- <outputDirectory>.installer/eventrix/scripts</outputDirectory>
- </fileSet>
- </fileSets>
- <dependencySets>
- <dependencySet>
- <includes>
- <include>log4j:log4j</include>
- <include>edu.uci.ics.asterix:asterix-events</include>
- <include>edu.uci.ics.asterix:asterix-common</include>
- <include>org.apache.zookeeper:zookeeper</include>
- <include>args4j:args4j</include>
- <include>log4j:log4j</include>
- <include>commons-io:commons-io</include>
- <include>org.slf4j:slf4j-api</include>
- <include>org.slf4j:slf4j-log4j12</include>
- </includes>
- <unpack>false</unpack>
- <outputDirectory>lib</outputDirectory>
- </dependencySet>
- <dependencySet>
- <includes>
- <include>org.apache.hadoop:hadoop-core</include>
- <include>commons-cli:commons-cli</include>
- <include>commons-logging:commons-logging</include>
- </includes>
- <unpack>false</unpack>
- <outputDirectory>.installer/hadoop-0.20.2/lib</outputDirectory>
- </dependencySet>
- <dependencySet>
- <includes>
- <include>org.apache.zookeeper:zookeeper</include>
- <include>log4j:log4j</include>
- <include>org.slf4j:slf4j-api</include>
- </includes>
- <unpack>false</unpack>
- <outputDirectory>.installer/zookeeper/lib</outputDirectory>
- </dependencySet>
- <dependencySet>
- <outputDirectory>asterix</outputDirectory>
- <includes>
- <include>asterix-server*</include>
- </includes>
- <useTransitiveDependencies>false</useTransitiveDependencies>
- </dependencySet>
- <dependencySet>
- <outputDirectory>.installer/eventrix</outputDirectory>
- <includes>
- <include>asterix-events*</include>
- </includes>
- <unpack>false</unpack>
- <useTransitiveDependencies>false</useTransitiveDependencies>
- </dependencySet>
- </dependencySets>
-</assembly>
+ <id>binary-assembly</id>
+ <formats>
+ <format>zip</format>
+ <format>dir</format>
+ </formats>
+ <includeBaseDirectory>false</includeBaseDirectory>
+ <fileSets>
+ <fileSet>
+ <directory>src/main/resources/conf</directory>
+ <outputDirectory>conf</outputDirectory>
+ </fileSet>
+ <fileSet>
+ <directory>src/main/resources/clusters</directory>
+ <outputDirectory>clusters</outputDirectory>
+ </fileSet>
+ <fileSet>
+ <directory>src/main/resources/zookeeper</directory>
+ <fileMode>0755</fileMode>
+ <outputDirectory>.installer/zookeeper/bin</outputDirectory>
+ </fileSet>
+ <fileSet>
+ <directory>src/main/resources/scripts</directory>
+ <fileMode>0755</fileMode>
+ <includes>
+ <include>managix</include>
+ </includes>
+ <outputDirectory>bin</outputDirectory>
+ </fileSet>
+ <fileSet>
+ <directory>src/main/resources/scripts</directory>
+ <fileMode>0755</fileMode>
+ <excludes>
+ <exclude>managix</exclude>
+ </excludes>
+ <outputDirectory>.installer/scripts</outputDirectory>
+ </fileSet>
+ <fileSet>
+ <directory>src/main/resources/hadoop-0.20.2</directory>
+ <outputDirectory>.installer/hadoop-0.20.2</outputDirectory>
+ <fileMode>0755</fileMode>
+ </fileSet>
+ <fileSet>
+ <directory>target</directory>
+ <outputDirectory>lib</outputDirectory>
+ <includes>
+ <include>*.jar</include>
+ </includes>
+ </fileSet>
+ <fileSet>
+ <directory>../asterix-events/src/main/resources/events</directory>
+ <outputDirectory>.installer/events</outputDirectory>
+ <fileMode>0755</fileMode>
+ </fileSet>
+ </fileSets>
+ <dependencySets>
+ <dependencySet>
+ <includes>
+ <include>log4j:log4j</include>
+ <include>edu.uci.ics.asterix:asterix-events</include>
+ <include>edu.uci.ics.asterix:asterix-common</include>
+ <include>org.apache.zookeeper:zookeeper</include>
+ <include>args4j:args4j</include>
+ <include>log4j:log4j</include>
+ <include>commons-io:commons-io</include>
+ <include>org.slf4j:slf4j-api</include>
+ <include>org.slf4j:slf4j-log4j12</include>
+ </includes>
+ <unpack>false</unpack>
+ <outputDirectory>lib</outputDirectory>
+ </dependencySet>
+ <dependencySet>
+ <includes>
+ <include>org.apache.hadoop:hadoop-core</include>
+ <include>commons-cli:commons-cli</include>
+ <include>commons-logging:commons-logging</include>
+ </includes>
+ <unpack>false</unpack>
+ <outputDirectory>.installer/hadoop-0.20.2/lib</outputDirectory>
+ </dependencySet>
+ <dependencySet>
+ <includes>
+ <include>org.apache.zookeeper:zookeeper</include>
+ <include>log4j:log4j</include>
+ <include>org.slf4j:slf4j-api</include>
+ </includes>
+ <unpack>false</unpack>
+ <outputDirectory>.installer/zookeeper/lib</outputDirectory>
+ </dependencySet>
+ <dependencySet>
+ <outputDirectory>asterix</outputDirectory>
+ <includes>
+ <include>asterix-server*</include>
+ </includes>
+ <useTransitiveDependencies>false</useTransitiveDependencies>
+ </dependencySet>
+ <dependencySet>
+ <outputDirectory>.installer/events</outputDirectory>
+ <includes>
+ <include>asterix-events*</include>
+ </includes>
+ <unpack>false</unpack>
+ <useTransitiveDependencies>false</useTransitiveDependencies>
+ </dependencySet>
+ </dependencySets>
+ </assembly>
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/AlterCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/AlterCommand.java
index 8e4b2bc..44a6428 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/AlterCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/AlterCommand.java
@@ -19,12 +19,13 @@
import org.kohsuke.args4j.Option;
import edu.uci.ics.asterix.common.configuration.AsterixConfiguration;
+import edu.uci.ics.asterix.event.model.AsterixInstance;
+import edu.uci.ics.asterix.event.model.AsterixInstance.State;
+import edu.uci.ics.asterix.event.service.AsterixEventServiceUtil;
+import edu.uci.ics.asterix.event.service.ILookupService;
+import edu.uci.ics.asterix.event.service.ServiceProvider;
import edu.uci.ics.asterix.installer.driver.InstallerDriver;
import edu.uci.ics.asterix.installer.driver.InstallerUtil;
-import edu.uci.ics.asterix.installer.model.AsterixInstance;
-import edu.uci.ics.asterix.installer.model.AsterixInstance.State;
-import edu.uci.ics.asterix.installer.service.ILookupService;
-import edu.uci.ics.asterix.installer.service.ServiceProvider;
public class AlterCommand extends AbstractCommand {
@@ -32,10 +33,10 @@
protected void execCommand() throws Exception {
InstallerDriver.initConfig(true);
String instanceName = ((AlterConfig) config).name;
- InstallerUtil.validateAsterixInstanceExists(instanceName, State.INACTIVE);
+ AsterixEventServiceUtil.validateAsterixInstanceExists(instanceName, State.INACTIVE);
ILookupService lookupService = ServiceProvider.INSTANCE.getLookupService();
- AsterixInstance instance = lookupService.getAsterixInstance(instanceName);
- InstallerUtil.createClusterProperties(instance.getCluster(), instance.getAsterixConfiguration());
+ AsterixInstance instance = ServiceProvider.INSTANCE.getLookupService().getAsterixInstance(instanceName);
+ AsterixEventServiceUtil.createClusterProperties(instance.getCluster(), instance.getAsterixConfiguration());
AsterixConfiguration asterixConfiguration = InstallerUtil
.getAsterixConfiguration(((AlterConfig) config).confPath);
instance.setAsterixConfiguration(asterixConfiguration);
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/BackupCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/BackupCommand.java
index 91cbc86..3f30619 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/BackupCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/BackupCommand.java
@@ -19,15 +19,16 @@
import org.kohsuke.args4j.Option;
+import edu.uci.ics.asterix.event.model.AsterixInstance;
+import edu.uci.ics.asterix.event.model.AsterixInstance.State;
+import edu.uci.ics.asterix.event.model.BackupInfo;
import edu.uci.ics.asterix.event.schema.pattern.Patterns;
+import edu.uci.ics.asterix.event.service.AsterixEventService;
+import edu.uci.ics.asterix.event.service.AsterixEventServiceUtil;
+import edu.uci.ics.asterix.event.service.ServiceProvider;
+import edu.uci.ics.asterix.event.util.PatternCreator;
import edu.uci.ics.asterix.installer.driver.InstallerDriver;
-import edu.uci.ics.asterix.installer.driver.InstallerUtil;
-import edu.uci.ics.asterix.installer.events.PatternCreator;
-import edu.uci.ics.asterix.installer.model.AsterixInstance;
-import edu.uci.ics.asterix.installer.model.AsterixInstance.State;
-import edu.uci.ics.asterix.installer.model.BackupInfo;
import edu.uci.ics.asterix.installer.schema.conf.Backup;
-import edu.uci.ics.asterix.installer.service.ServiceProvider;
public class BackupCommand extends AbstractCommand {
@@ -37,12 +38,12 @@
protected void execCommand() throws Exception {
InstallerDriver.initConfig(true);
String asterixInstanceName = ((BackupConfig) config).name;
- AsterixInstance instance = InstallerUtil.validateAsterixInstanceExists(asterixInstanceName, State.INACTIVE);
+ AsterixInstance instance = AsterixEventServiceUtil.validateAsterixInstanceExists(asterixInstanceName,
+ State.INACTIVE);
List<BackupInfo> backupInfo = instance.getBackupInfo();
- PatternCreator pc = new PatternCreator();
- Backup backupConf = InstallerDriver.getConfiguration().getBackup();
- Patterns patterns = pc.getBackUpAsterixPattern(instance, backupConf);
- InstallerUtil.getEventrixClient(instance.getCluster()).submit(patterns);
+ Backup backupConf = AsterixEventService.getConfiguration().getBackup();
+ Patterns patterns = PatternCreator.INSTANCE.getBackUpAsterixPattern(instance, backupConf);
+ AsterixEventService.getAsterixEventServiceClient(instance.getCluster()).submit(patterns);
int backupId = backupInfo.size();
BackupInfo binfo = new BackupInfo(backupId, new Date(), backupConf);
backupInfo.add(binfo);
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/CommandHandler.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/CommandHandler.java
index 230a945..45cc9f7 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/CommandHandler.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/CommandHandler.java
@@ -52,6 +52,12 @@
case CONFIGURE:
cmd = new ConfigureCommand();
break;
+ case INSTALL:
+ cmd = new InstallCommand();
+ break;
+ case UNINSTALL:
+ cmd = new UninstallCommand();
+ break;
case LOG:
cmd = new LogCommand();
break;
@@ -61,6 +67,17 @@
case HELP:
cmd = new HelpCommand();
break;
+ case STOPNODE:
+ cmd = new StopNodeCommand();
+ break;
+ case STARTNODE:
+ cmd = new StartNodeCommand();
+ break;
+ case VERSION:
+ cmd = new VersionCommand();
+ break;
+ default:
+ break;
}
cmd.execute(args);
}
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/CreateCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/CreateCommand.java
index 2803fee..5b024ec 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/CreateCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/CreateCommand.java
@@ -19,17 +19,19 @@
import org.kohsuke.args4j.Option;
import edu.uci.ics.asterix.common.configuration.AsterixConfiguration;
+import edu.uci.ics.asterix.event.error.VerificationUtil;
+import edu.uci.ics.asterix.event.management.AsterixEventServiceClient;
import edu.uci.ics.asterix.event.management.EventUtil;
-import edu.uci.ics.asterix.event.management.EventrixClient;
+import edu.uci.ics.asterix.event.model.AsterixInstance;
+import edu.uci.ics.asterix.event.model.AsterixRuntimeState;
import edu.uci.ics.asterix.event.schema.cluster.Cluster;
import edu.uci.ics.asterix.event.schema.pattern.Patterns;
+import edu.uci.ics.asterix.event.service.AsterixEventService;
+import edu.uci.ics.asterix.event.service.AsterixEventServiceUtil;
+import edu.uci.ics.asterix.event.service.ServiceProvider;
+import edu.uci.ics.asterix.event.util.PatternCreator;
import edu.uci.ics.asterix.installer.driver.InstallerDriver;
import edu.uci.ics.asterix.installer.driver.InstallerUtil;
-import edu.uci.ics.asterix.installer.error.VerificationUtil;
-import edu.uci.ics.asterix.installer.events.PatternCreator;
-import edu.uci.ics.asterix.installer.model.AsterixInstance;
-import edu.uci.ics.asterix.installer.model.AsterixRuntimeState;
-import edu.uci.ics.asterix.installer.service.ServiceProvider;
public class CreateCommand extends AbstractCommand {
@@ -46,29 +48,31 @@
throw new Exception("Cannot create an Asterix instance.");
}
asterixInstanceName = ((CreateConfig) config).name;
- InstallerUtil.validateAsterixInstanceNotExists(asterixInstanceName);
+ AsterixEventServiceUtil.validateAsterixInstanceNotExists(asterixInstanceName);
CreateConfig createConfig = (CreateConfig) config;
cluster = EventUtil.getCluster(createConfig.clusterPath);
+ cluster.setInstanceName(asterixInstanceName);
asterixConfiguration = InstallerUtil.getAsterixConfiguration(createConfig.asterixConfPath);
- AsterixInstance asterixInstance = InstallerUtil.createAsterixInstance(asterixInstanceName, cluster,
+ AsterixInstance asterixInstance = AsterixEventServiceUtil.createAsterixInstance(asterixInstanceName, cluster,
asterixConfiguration);
- InstallerUtil.evaluateConflictWithOtherInstances(asterixInstance);
- InstallerUtil.createAsterixZip(asterixInstance);
- InstallerUtil.createClusterProperties(cluster, asterixConfiguration);
- EventrixClient eventrixClient = InstallerUtil.getEventrixClient(cluster);
- PatternCreator pc = new PatternCreator();
+ AsterixEventServiceUtil.evaluateConflictWithOtherInstances(asterixInstance);
+ AsterixEventServiceUtil.createAsterixZip(asterixInstance);
+ AsterixEventServiceUtil.createClusterProperties(cluster, asterixConfiguration);
+ AsterixEventServiceClient eventrixClient = AsterixEventService.getAsterixEventServiceClient(cluster, true,
+ false);
- Patterns asterixBinarytrasnferPattern = pc.getAsterixBinaryTransferPattern(asterixInstanceName, cluster);
+ Patterns asterixBinarytrasnferPattern = PatternCreator.INSTANCE.getAsterixBinaryTransferPattern(
+ asterixInstanceName, cluster);
eventrixClient.submit(asterixBinarytrasnferPattern);
- Patterns patterns = pc.getStartAsterixPattern(asterixInstanceName, cluster);
+ Patterns patterns = PatternCreator.INSTANCE.getStartAsterixPattern(asterixInstanceName, cluster);
eventrixClient.submit(patterns);
AsterixRuntimeState runtimeState = VerificationUtil.getAsterixRuntimeState(asterixInstance);
VerificationUtil.updateInstanceWithRuntimeDescription(asterixInstance, runtimeState, true);
ServiceProvider.INSTANCE.getLookupService().writeAsterixInstance(asterixInstance);
- InstallerUtil.deleteDirectory(InstallerDriver.getManagixHome() + File.separator + InstallerDriver.ASTERIX_DIR
- + File.separator + asterixInstanceName);
+ AsterixEventServiceUtil.deleteDirectory(InstallerDriver.getManagixHome() + File.separator
+ + InstallerDriver.ASTERIX_DIR + File.separator + asterixInstanceName);
LOGGER.info(asterixInstance.getDescription(false));
}
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/DeleteCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/DeleteCommand.java
index 00a3de4..3938ea5 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/DeleteCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/DeleteCommand.java
@@ -16,13 +16,14 @@
import org.kohsuke.args4j.Option;
+import edu.uci.ics.asterix.event.model.AsterixInstance;
+import edu.uci.ics.asterix.event.model.AsterixInstance.State;
import edu.uci.ics.asterix.event.schema.pattern.Patterns;
+import edu.uci.ics.asterix.event.service.AsterixEventService;
+import edu.uci.ics.asterix.event.service.AsterixEventServiceUtil;
+import edu.uci.ics.asterix.event.service.ServiceProvider;
+import edu.uci.ics.asterix.event.util.PatternCreator;
import edu.uci.ics.asterix.installer.driver.InstallerDriver;
-import edu.uci.ics.asterix.installer.driver.InstallerUtil;
-import edu.uci.ics.asterix.installer.events.PatternCreator;
-import edu.uci.ics.asterix.installer.model.AsterixInstance;
-import edu.uci.ics.asterix.installer.model.AsterixInstance.State;
-import edu.uci.ics.asterix.installer.service.ServiceProvider;
public class DeleteCommand extends AbstractCommand {
@@ -30,13 +31,13 @@
protected void execCommand() throws Exception {
InstallerDriver.initConfig(true);
String asterixInstanceName = ((DeleteConfig) config).name;
- AsterixInstance instance = InstallerUtil.validateAsterixInstanceExists(asterixInstanceName, State.INACTIVE);
- PatternCreator pc = new PatternCreator();
- Patterns patterns = pc.createDeleteInstancePattern(instance);
- InstallerUtil.getEventrixClient(instance.getCluster()).submit(patterns);
+ AsterixInstance instance = AsterixEventServiceUtil.validateAsterixInstanceExists(asterixInstanceName,
+ State.INACTIVE);
+ Patterns patterns = PatternCreator.INSTANCE.createDeleteInstancePattern(instance);
+ AsterixEventService.getAsterixEventServiceClient(instance.getCluster()).submit(patterns);
- patterns = pc.createRemoveAsterixWorkingDirPattern(instance);
- InstallerUtil.getEventrixClient(instance.getCluster()).submit(patterns);
+ patterns = PatternCreator.INSTANCE.createRemoveAsterixWorkingDirPattern(instance);
+ AsterixEventService.getAsterixEventServiceClient(instance.getCluster()).submit(patterns);
ServiceProvider.INSTANCE.getLookupService().removeAsterixInstance(asterixInstanceName);
LOGGER.info("Deleted Asterix instance: " + asterixInstanceName);
}
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/DescribeCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/DescribeCommand.java
index 58851cb..9010061 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/DescribeCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/DescribeCommand.java
@@ -18,14 +18,14 @@
import org.kohsuke.args4j.Option;
+import edu.uci.ics.asterix.event.error.VerificationUtil;
+import edu.uci.ics.asterix.event.model.AsterixInstance;
+import edu.uci.ics.asterix.event.model.AsterixInstance.State;
+import edu.uci.ics.asterix.event.model.AsterixRuntimeState;
+import edu.uci.ics.asterix.event.service.AsterixEventServiceUtil;
+import edu.uci.ics.asterix.event.service.ServiceProvider;
import edu.uci.ics.asterix.installer.driver.InstallerDriver;
-import edu.uci.ics.asterix.installer.driver.InstallerUtil;
import edu.uci.ics.asterix.installer.error.InstallerException;
-import edu.uci.ics.asterix.installer.error.VerificationUtil;
-import edu.uci.ics.asterix.installer.model.AsterixInstance;
-import edu.uci.ics.asterix.installer.model.AsterixInstance.State;
-import edu.uci.ics.asterix.installer.model.AsterixRuntimeState;
-import edu.uci.ics.asterix.installer.service.ServiceProvider;
public class DescribeCommand extends AbstractCommand {
@@ -35,7 +35,7 @@
String asterixInstanceName = ((DescribeConfig) config).name;
boolean adminView = ((DescribeConfig) config).admin;
if (asterixInstanceName != null) {
- InstallerUtil.validateAsterixInstanceExists(asterixInstanceName, State.INACTIVE, State.ACTIVE,
+ AsterixEventServiceUtil.validateAsterixInstanceExists(asterixInstanceName, State.INACTIVE, State.ACTIVE,
State.UNUSABLE);
AsterixInstance instance = ServiceProvider.INSTANCE.getLookupService().getAsterixInstance(
asterixInstanceName);
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/HelpCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/HelpCommand.java
index a2f00b3..68f8532 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/HelpCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/HelpCommand.java
@@ -55,12 +55,26 @@
case VALIDATE:
helpMessage = new ValidateCommand().getUsageDescription();
break;
+ case INSTALL:
+ helpMessage = new InstallCommand().getUsageDescription();
+ break;
+ case UNINSTALL:
+ helpMessage = new UninstallCommand().getUsageDescription();
case ALTER:
helpMessage = new AlterCommand().getUsageDescription();
break;
case LOG:
helpMessage = new LogCommand().getUsageDescription();
break;
+ case STOPNODE:
+ helpMessage = new StopNodeCommand().getUsageDescription();
+ break;
+ case STARTNODE:
+ helpMessage = new StartNodeCommand().getUsageDescription();
+ break;
+ case VERSION:
+ helpMessage = new VersionCommand().getUsageDescription();
+ break;
default:
helpMessage = "Unknown command " + command;
}
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/ICommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/ICommand.java
index 9e67bf5..288c882 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/ICommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/ICommand.java
@@ -27,9 +27,14 @@
ALTER,
VALIDATE,
CONFIGURE,
+ INSTALL,
+ UNINSTALL,
LOG,
SHUTDOWN,
- HELP
+ HELP,
+ STOPNODE,
+ STARTNODE,
+ VERSION
}
public void execute(String args[]) throws Exception;
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/InstallCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/InstallCommand.java
new file mode 100644
index 0000000..59a69bf
--- /dev/null
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/InstallCommand.java
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.installer.command;
+
+import org.kohsuke.args4j.Option;
+
+import edu.uci.ics.asterix.event.model.AsterixInstance;
+import edu.uci.ics.asterix.event.model.AsterixInstance.State;
+import edu.uci.ics.asterix.event.schema.pattern.Patterns;
+import edu.uci.ics.asterix.event.service.AsterixEventService;
+import edu.uci.ics.asterix.event.service.AsterixEventServiceUtil;
+import edu.uci.ics.asterix.event.util.PatternCreator;
+import edu.uci.ics.asterix.installer.driver.InstallerDriver;
+
+public class InstallCommand extends AbstractCommand {
+
+ @Override
+ protected void execCommand() throws Exception {
+ InstallerDriver.initConfig(true);
+ InstallConfig installConfig = ((InstallConfig) config);
+ String instanceName = installConfig.name;
+ AsterixInstance instance = AsterixEventServiceUtil.validateAsterixInstanceExists(instanceName, State.INACTIVE);
+ PatternCreator pc = PatternCreator.INSTANCE;
+ Patterns patterns = pc.getLibraryInstallPattern(instance, installConfig.dataverseName,
+ installConfig.libraryName, installConfig.libraryPath);
+ AsterixEventService.getAsterixEventServiceClient(instance.getCluster()).submit(patterns);
+ LOGGER.info("Installed library " + installConfig.libraryName);
+ }
+
+ @Override
+ protected CommandConfig getCommandConfig() {
+ return new InstallConfig();
+ }
+
+ @Override
+ protected String getUsageDescription() {
+ return "Installs a library to an asterix instance." + "\n" + "Arguments/Options\n"
+ + "-n Name of Asterix Instance\n"
+ + "-d Name of the dataverse under which the library will be installed\n" + "-l Name of the library\n"
+ + "-p Path to library zip bundle";
+
+ }
+
+}
+
+class InstallConfig extends CommandConfig {
+
+ @Option(name = "-n", required = true, usage = "Name of Asterix Instance")
+ public String name;
+
+ @Option(name = "-d", required = true, usage = "Name of the dataverse under which the library will be installed")
+ public String dataverseName;
+
+ @Option(name = "-l", required = true, usage = "Name of the library")
+ public String libraryName;
+
+ @Option(name = "-p", required = true, usage = "Path to library zip bundle")
+ public String libraryPath;
+
+}
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/LogCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/LogCommand.java
index 11b4aa7..bf30a48 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/LogCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/LogCommand.java
@@ -21,14 +21,15 @@
import org.apache.commons.io.FileUtils;
import org.kohsuke.args4j.Option;
-import edu.uci.ics.asterix.event.management.EventrixClient;
+import edu.uci.ics.asterix.event.management.AsterixEventServiceClient;
+import edu.uci.ics.asterix.event.model.AsterixInstance;
+import edu.uci.ics.asterix.event.model.AsterixInstance.State;
import edu.uci.ics.asterix.event.schema.pattern.Patterns;
+import edu.uci.ics.asterix.event.service.AsterixEventService;
+import edu.uci.ics.asterix.event.service.AsterixEventServiceUtil;
+import edu.uci.ics.asterix.event.util.PatternCreator;
import edu.uci.ics.asterix.installer.driver.InstallerDriver;
-import edu.uci.ics.asterix.installer.driver.InstallerUtil;
import edu.uci.ics.asterix.installer.error.InstallerException;
-import edu.uci.ics.asterix.installer.events.PatternCreator;
-import edu.uci.ics.asterix.installer.model.AsterixInstance;
-import edu.uci.ics.asterix.installer.model.AsterixInstance.State;
public class LogCommand extends AbstractCommand {
@@ -36,12 +37,15 @@
protected void execCommand() throws Exception {
InstallerDriver.initConfig(true);
String asterixInstanceName = ((LogConfig) config).name;
- AsterixInstance instance = InstallerUtil.validateAsterixInstanceExists(asterixInstanceName, State.INACTIVE,
- State.UNUSABLE, State.ACTIVE);
- PatternCreator pc = new PatternCreator();
- EventrixClient client = InstallerUtil.getEventrixClient(instance.getCluster());
- String outputDir = ((LogConfig) config).outputDir == null ? InstallerDriver.getManagixHome() + File.separator + "logdump"
- : ((LogConfig) config).outputDir;
+ AsterixInstance instance = AsterixEventServiceUtil.validateAsterixInstanceExists(asterixInstanceName,
+ State.INACTIVE, State.UNUSABLE, State.ACTIVE);
+ PatternCreator pc = PatternCreator.INSTANCE;
+
+ AsterixEventServiceClient eventrixClient = AsterixEventService.getAsterixEventServiceClient(
+ instance.getCluster(), true, false);
+
+ String outputDir = ((LogConfig) config).outputDir == null ? InstallerDriver.getManagixHome() + File.separator
+ + "logdump" : ((LogConfig) config).outputDir;
File f = new File(outputDir);
String outputDirPath = f.getAbsolutePath();
if (!f.exists()) {
@@ -50,12 +54,13 @@
throw new InstallerException("Unable to create output directory:" + outputDirPath);
}
}
- Patterns transferLogPattern = pc.getGenerateLogPattern(asterixInstanceName, instance.getCluster(), outputDirPath);
- client.submit(transferLogPattern);
+ Patterns transferLogPattern = pc.getGenerateLogPattern(asterixInstanceName, instance.getCluster(),
+ outputDirPath);
+ eventrixClient.submit(transferLogPattern);
File outputDirFile = new File(outputDirPath);
final String destFileName = "log_" + new Date().toString().replace(' ', '_') + ".zip";
File destFile = new File(outputDirFile, destFileName);
- InstallerUtil.zipDir(outputDirFile, destFile);
+ AsterixEventServiceUtil.zipDir(outputDirFile, destFile);
String[] filesToDelete = outputDirFile.list(new FilenameFilter() {
@Override
@@ -65,7 +70,7 @@
});
for (String fileS : filesToDelete) {
- f = new File(outputDirFile, fileS);
+ f = new File(outputDirFile, fileS);
if (f.isDirectory()) {
FileUtils.deleteDirectory(f);
} else {
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/RestoreCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/RestoreCommand.java
index 6627ac2..5b76099 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/RestoreCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/RestoreCommand.java
@@ -18,13 +18,14 @@
import org.kohsuke.args4j.Option;
+import edu.uci.ics.asterix.event.model.AsterixInstance;
+import edu.uci.ics.asterix.event.model.AsterixInstance.State;
+import edu.uci.ics.asterix.event.model.BackupInfo;
import edu.uci.ics.asterix.event.schema.pattern.Patterns;
+import edu.uci.ics.asterix.event.service.AsterixEventService;
+import edu.uci.ics.asterix.event.service.AsterixEventServiceUtil;
+import edu.uci.ics.asterix.event.util.PatternCreator;
import edu.uci.ics.asterix.installer.driver.InstallerDriver;
-import edu.uci.ics.asterix.installer.driver.InstallerUtil;
-import edu.uci.ics.asterix.installer.events.PatternCreator;
-import edu.uci.ics.asterix.installer.model.AsterixInstance;
-import edu.uci.ics.asterix.installer.model.AsterixInstance.State;
-import edu.uci.ics.asterix.installer.model.BackupInfo;
public class RestoreCommand extends AbstractCommand {
@@ -32,7 +33,8 @@
protected void execCommand() throws Exception {
InstallerDriver.initConfig(true);
String asterixInstanceName = ((RestoreConfig) config).name;
- AsterixInstance instance = InstallerUtil.validateAsterixInstanceExists(asterixInstanceName, State.INACTIVE);
+ AsterixInstance instance = AsterixEventServiceUtil.validateAsterixInstanceExists(asterixInstanceName,
+ State.INACTIVE);
int backupId = ((RestoreConfig) config).backupId;
List<BackupInfo> backupInfoList = instance.getBackupInfo();
if (backupInfoList.size() <= backupId || backupId < 0) {
@@ -40,9 +42,8 @@
}
BackupInfo backupInfo = backupInfoList.get(backupId);
- PatternCreator pc = new PatternCreator();
- Patterns patterns = pc.getRestoreAsterixPattern(instance, backupInfo);
- InstallerUtil.getEventrixClient(instance.getCluster()).submit(patterns);
+ Patterns patterns = PatternCreator.INSTANCE.getRestoreAsterixPattern(instance, backupInfo);
+ AsterixEventService.getAsterixEventServiceClient(instance.getCluster()).submit(patterns);
LOGGER.info("Asterix instance: " + asterixInstanceName + " has been restored from backup");
}
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/ShutdownCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/ShutdownCommand.java
index 0c44bd8..228cb1c 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/ShutdownCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/ShutdownCommand.java
@@ -14,9 +14,10 @@
*/
package edu.uci.ics.asterix.installer.command;
+import edu.uci.ics.asterix.event.service.AsterixEventService;
+import edu.uci.ics.asterix.event.service.ILookupService;
+import edu.uci.ics.asterix.event.service.ServiceProvider;
import edu.uci.ics.asterix.installer.driver.InstallerDriver;
-import edu.uci.ics.asterix.installer.service.ILookupService;
-import edu.uci.ics.asterix.installer.service.ServiceProvider;
public class ShutdownCommand extends AbstractCommand {
@@ -24,7 +25,7 @@
protected void execCommand() throws Exception {
InstallerDriver.initConfig(false);
ILookupService lookupService = ServiceProvider.INSTANCE.getLookupService();
- lookupService.stopService(InstallerDriver.getConfiguration());
+ lookupService.stopService(AsterixEventService.getConfiguration());
}
@Override
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/StartCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/StartCommand.java
index 3fc7e96..63ec6fc 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/StartCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/StartCommand.java
@@ -18,16 +18,17 @@
import org.kohsuke.args4j.Option;
-import edu.uci.ics.asterix.event.management.EventrixClient;
+import edu.uci.ics.asterix.event.error.VerificationUtil;
+import edu.uci.ics.asterix.event.management.AsterixEventServiceClient;
+import edu.uci.ics.asterix.event.model.AsterixInstance;
+import edu.uci.ics.asterix.event.model.AsterixInstance.State;
+import edu.uci.ics.asterix.event.model.AsterixRuntimeState;
import edu.uci.ics.asterix.event.schema.pattern.Patterns;
+import edu.uci.ics.asterix.event.service.AsterixEventService;
+import edu.uci.ics.asterix.event.service.AsterixEventServiceUtil;
+import edu.uci.ics.asterix.event.service.ServiceProvider;
+import edu.uci.ics.asterix.event.util.PatternCreator;
import edu.uci.ics.asterix.installer.driver.InstallerDriver;
-import edu.uci.ics.asterix.installer.driver.InstallerUtil;
-import edu.uci.ics.asterix.installer.error.VerificationUtil;
-import edu.uci.ics.asterix.installer.events.PatternCreator;
-import edu.uci.ics.asterix.installer.model.AsterixInstance;
-import edu.uci.ics.asterix.installer.model.AsterixRuntimeState;
-import edu.uci.ics.asterix.installer.model.AsterixInstance.State;
-import edu.uci.ics.asterix.installer.service.ServiceProvider;
public class StartCommand extends AbstractCommand {
@@ -35,18 +36,18 @@
protected void execCommand() throws Exception {
InstallerDriver.initConfig(true);
String asterixInstanceName = ((StartConfig) config).name;
- AsterixInstance instance = InstallerUtil.validateAsterixInstanceExists(asterixInstanceName, State.INACTIVE);
- InstallerUtil.createAsterixZip(instance);
- PatternCreator pc = new PatternCreator();
- EventrixClient client = InstallerUtil.getEventrixClient(instance.getCluster());
- Patterns asterixBinaryTransferPattern = pc.getAsterixBinaryTransferPattern(asterixInstanceName,
- instance.getCluster());
+ AsterixInstance instance = AsterixEventServiceUtil.validateAsterixInstanceExists(asterixInstanceName,
+ State.INACTIVE);
+ AsterixEventServiceUtil.createAsterixZip(instance);
+ AsterixEventServiceClient client = AsterixEventService.getAsterixEventServiceClient(instance.getCluster());
+ Patterns asterixBinaryTransferPattern = PatternCreator.INSTANCE.getAsterixBinaryTransferPattern(
+ asterixInstanceName, instance.getCluster());
client.submit(asterixBinaryTransferPattern);
- InstallerUtil.createClusterProperties(instance.getCluster(), instance.getAsterixConfiguration());
- Patterns patterns = pc.getStartAsterixPattern(asterixInstanceName, instance.getCluster());
+ AsterixEventServiceUtil.createClusterProperties(instance.getCluster(), instance.getAsterixConfiguration());
+ Patterns patterns = PatternCreator.INSTANCE.getStartAsterixPattern(asterixInstanceName, instance.getCluster());
client.submit(patterns);
- InstallerUtil.deleteDirectory(InstallerDriver.getManagixHome() + File.separator + InstallerDriver.ASTERIX_DIR
- + File.separator + asterixInstanceName);
+ AsterixEventServiceUtil.deleteDirectory(InstallerDriver.getManagixHome() + File.separator
+ + InstallerDriver.ASTERIX_DIR + File.separator + asterixInstanceName);
AsterixRuntimeState runtimeState = VerificationUtil.getAsterixRuntimeState(instance);
VerificationUtil.updateInstanceWithRuntimeDescription(instance, runtimeState, true);
LOGGER.info(instance.getDescription(false));
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/StartNodeCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/StartNodeCommand.java
new file mode 100644
index 0000000..7d0a6ee
--- /dev/null
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/StartNodeCommand.java
@@ -0,0 +1,101 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.installer.command;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.kohsuke.args4j.Option;
+
+import edu.uci.ics.asterix.event.error.VerificationUtil;
+import edu.uci.ics.asterix.event.management.AsterixEventServiceClient;
+import edu.uci.ics.asterix.event.model.AsterixInstance;
+import edu.uci.ics.asterix.event.model.AsterixInstance.State;
+import edu.uci.ics.asterix.event.model.AsterixRuntimeState;
+import edu.uci.ics.asterix.event.model.ProcessInfo;
+import edu.uci.ics.asterix.event.schema.cluster.Cluster;
+import edu.uci.ics.asterix.event.schema.cluster.Node;
+import edu.uci.ics.asterix.event.schema.pattern.Pattern;
+import edu.uci.ics.asterix.event.schema.pattern.Patterns;
+import edu.uci.ics.asterix.event.service.AsterixEventService;
+import edu.uci.ics.asterix.event.service.AsterixEventServiceUtil;
+import edu.uci.ics.asterix.event.service.ServiceProvider;
+import edu.uci.ics.asterix.event.util.PatternCreator;
+import edu.uci.ics.asterix.installer.driver.InstallerDriver;
+import edu.uci.ics.asterix.installer.error.InstallerException;
+
+public class StartNodeCommand extends AbstractCommand {
+
+ @Override
+ protected void execCommand() throws Exception {
+ InstallerDriver.initConfig(true);
+ String asterixInstanceName = ((StartNodeConfig) config).name;
+ AsterixInstance instance = AsterixEventServiceUtil.validateAsterixInstanceExists(asterixInstanceName,
+ State.INACTIVE, State.ACTIVE, State.UNUSABLE);
+
+ Cluster cluster = instance.getCluster();
+ List<Pattern> pl = new ArrayList<Pattern>();
+ AsterixRuntimeState runtimeState = VerificationUtil.getAsterixRuntimeState(instance);
+ String[] nodesToBeAdded = ((StartNodeConfig) config).nodes.split(",");
+ List<String> aliveNodes = new ArrayList<String>();
+ for (ProcessInfo p : runtimeState.getProcesses()) {
+ aliveNodes.add(p.getNodeId());
+ }
+ List<Node> clusterNodes = cluster.getNode();
+ for (String n : nodesToBeAdded) {
+ if (aliveNodes.contains(n)) {
+ throw new InstallerException("Node: " + n + " is already alive");
+ }
+ for (Node node : clusterNodes) {
+ if (n.equals(node.getId())) {
+ String iodevices = node.getIodevices() == null ? cluster.getIodevices() : node.getIodevices();
+ Pattern createNC = PatternCreator.INSTANCE.createNCStartPattern(cluster.getMasterNode()
+ .getClusterIp(), node.getId(), asterixInstanceName + "_" + node.getId(), iodevices);
+ pl.add(createNC);
+ break;
+ }
+ }
+ }
+ Patterns patterns = new Patterns(pl);
+ AsterixEventServiceClient client = AsterixEventService.getAsterixEventServiceClient(cluster);
+ client.submit(patterns);
+ runtimeState = VerificationUtil.getAsterixRuntimeState(instance);
+ VerificationUtil.updateInstanceWithRuntimeDescription(instance, runtimeState, true);
+ LOGGER.info(instance.getDescription(false));
+ ServiceProvider.INSTANCE.getLookupService().updateAsterixInstance(instance);
+ }
+
+ @Override
+ protected CommandConfig getCommandConfig() {
+ return new StartNodeConfig();
+ }
+
+ @Override
+ protected String getUsageDescription() {
+ return "\nStarts a set of nodes for an ASTERIX instance." + "\n\nAvailable arguments/options"
+ + "\n-n name of the ASTERIX instance. " + "\n-nodes"
+ + "Comma separated list of nodes that need to be started";
+ }
+}
+
+class StartNodeConfig extends CommandConfig {
+
+ @Option(name = "-n", required = true, usage = "Name of Asterix Instance")
+ public String name;
+
+ @Option(name = "-nodes", required = true, usage = "Comma separated list of nodes that need to be started")
+ public String nodes;
+
+}
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/StopCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/StopCommand.java
index c00fa86..5aa3123 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/StopCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/StopCommand.java
@@ -20,16 +20,17 @@
import org.kohsuke.args4j.Option;
-import edu.uci.ics.asterix.event.management.EventrixClient;
+import edu.uci.ics.asterix.event.management.AsterixEventServiceClient;
+import edu.uci.ics.asterix.event.model.AsterixInstance;
+import edu.uci.ics.asterix.event.model.AsterixInstance.State;
import edu.uci.ics.asterix.event.schema.cluster.Node;
import edu.uci.ics.asterix.event.schema.pattern.Pattern;
import edu.uci.ics.asterix.event.schema.pattern.Patterns;
+import edu.uci.ics.asterix.event.service.AsterixEventService;
+import edu.uci.ics.asterix.event.service.AsterixEventServiceUtil;
+import edu.uci.ics.asterix.event.service.ServiceProvider;
+import edu.uci.ics.asterix.event.util.PatternCreator;
import edu.uci.ics.asterix.installer.driver.InstallerDriver;
-import edu.uci.ics.asterix.installer.driver.InstallerUtil;
-import edu.uci.ics.asterix.installer.events.PatternCreator;
-import edu.uci.ics.asterix.installer.model.AsterixInstance;
-import edu.uci.ics.asterix.installer.model.AsterixInstance.State;
-import edu.uci.ics.asterix.installer.service.ServiceProvider;
public class StopCommand extends AbstractCommand {
@@ -37,18 +38,20 @@
protected void execCommand() throws Exception {
InstallerDriver.initConfig(true);
String asterixInstanceName = ((StopConfig) config).name;
- AsterixInstance asterixInstance = InstallerUtil.validateAsterixInstanceExists(asterixInstanceName,
+ AsterixInstance asterixInstance = AsterixEventServiceUtil.validateAsterixInstanceExists(asterixInstanceName,
State.ACTIVE, State.UNUSABLE);
- PatternCreator pc = new PatternCreator();
- EventrixClient client = InstallerUtil.getEventrixClient(asterixInstance.getCluster());
+ AsterixEventServiceClient client = AsterixEventService.getAsterixEventServiceClient(asterixInstance
+ .getCluster());
List<Pattern> ncKillPatterns = new ArrayList<Pattern>();
for (Node node : asterixInstance.getCluster().getNode()) {
- ncKillPatterns.add(pc.createNCStopPattern(node.getId(), asterixInstanceName + "_" + node.getId()));
+ ncKillPatterns.add(PatternCreator.INSTANCE.createNCStopPattern(node.getId(), asterixInstanceName + "_"
+ + node.getId()));
}
List<Pattern> ccKillPatterns = new ArrayList<Pattern>();
- ccKillPatterns.add(pc.createCCStopPattern(asterixInstance.getCluster().getMasterNode().getId()));
+ ccKillPatterns.add(PatternCreator.INSTANCE.createCCStopPattern(asterixInstance.getCluster().getMasterNode()
+ .getId()));
try {
client.submit(new Patterns(ncKillPatterns));
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/StopNodeCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/StopNodeCommand.java
new file mode 100644
index 0000000..c7cc6de
--- /dev/null
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/StopNodeCommand.java
@@ -0,0 +1,108 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.installer.command;
+
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+
+import org.kohsuke.args4j.Option;
+
+import edu.uci.ics.asterix.event.error.VerificationUtil;
+import edu.uci.ics.asterix.event.management.AsterixEventServiceClient;
+import edu.uci.ics.asterix.event.model.AsterixInstance;
+import edu.uci.ics.asterix.event.model.AsterixInstance.State;
+import edu.uci.ics.asterix.event.model.AsterixRuntimeState;
+import edu.uci.ics.asterix.event.model.ProcessInfo;
+import edu.uci.ics.asterix.event.schema.cluster.Node;
+import edu.uci.ics.asterix.event.schema.pattern.Pattern;
+import edu.uci.ics.asterix.event.schema.pattern.Patterns;
+import edu.uci.ics.asterix.event.service.AsterixEventService;
+import edu.uci.ics.asterix.event.service.AsterixEventServiceUtil;
+import edu.uci.ics.asterix.event.service.ServiceProvider;
+import edu.uci.ics.asterix.event.util.PatternCreator;
+import edu.uci.ics.asterix.installer.driver.InstallerDriver;
+import edu.uci.ics.asterix.installer.error.InstallerException;
+
+public class StopNodeCommand extends AbstractCommand {
+
+ @Override
+ protected void execCommand() throws Exception {
+ InstallerDriver.initConfig(true);
+ String asterixInstanceName = ((StopNodeConfig) config).name;
+ AsterixInstance asterixInstance = AsterixEventServiceUtil.validateAsterixInstanceExists(asterixInstanceName,
+ State.ACTIVE, State.UNUSABLE);
+
+ AsterixEventServiceClient client = AsterixEventService.getAsterixEventServiceClient(asterixInstance
+ .getCluster());
+
+ String[] nodesToStop = ((StopNodeConfig) config).nodeList.split(",");
+ AsterixRuntimeState runtimeState = VerificationUtil.getAsterixRuntimeState(asterixInstance);
+ List<String> aliveNodes = new ArrayList<String>();
+ for (ProcessInfo p : runtimeState.getProcesses()) {
+ aliveNodes.add(p.getNodeId());
+ }
+
+ List<String> validNodeIds = new ArrayList<String>();
+ for (Node node : asterixInstance.getCluster().getNode()) {
+ validNodeIds.add(node.getId());
+ }
+ List<Pattern> ncKillPatterns = new ArrayList<Pattern>();
+ for (String nodeId : nodesToStop) {
+ if (!nodeId.contains(nodeId)) {
+ throw new InstallerException("Invalid nodeId: " + nodeId);
+ }
+ if (!aliveNodes.contains(nodeId)) {
+ throw new InstallerException("Node: " + nodeId + " is not alive");
+ }
+ ncKillPatterns.add(PatternCreator.INSTANCE.createNCStopPattern(nodeId, asterixInstanceName + "_" + nodeId));
+ }
+
+ try {
+ client.submit(new Patterns(ncKillPatterns));
+ } catch (Exception e) {
+ // processes are already dead
+ LOGGER.debug("Attempt to kill non-existing processess");
+ }
+
+ asterixInstance.setStateChangeTimestamp(new Date());
+ ServiceProvider.INSTANCE.getLookupService().updateAsterixInstance(asterixInstance);
+ LOGGER.info("Stopped nodes " + ((StopNodeConfig) config).nodeList + " serving Asterix instance: "
+ + asterixInstanceName);
+ }
+
+ @Override
+ protected CommandConfig getCommandConfig() {
+ return new StopNodeConfig();
+ }
+
+ @Override
+ protected String getUsageDescription() {
+ return "\nStops a specified set of ASTERIX nodes." + "\n\nAvailable arguments/options"
+ + "\n-n name of the ASTERIX instance. "
+ + "\n-nodes Comma separated list of nodes that need to be stopped. ";
+
+ }
+}
+
+class StopNodeConfig extends CommandConfig {
+
+ @Option(name = "-n", required = true, usage = "Name of Asterix Instance")
+ public String name;
+
+ @Option(name = "-nodes", required = true, usage = "Comma separated list of nodes that need to be stopped")
+ public String nodeList;
+
+}
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/UninstallCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/UninstallCommand.java
new file mode 100644
index 0000000..39872e7
--- /dev/null
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/UninstallCommand.java
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.installer.command;
+
+import org.kohsuke.args4j.Option;
+
+import edu.uci.ics.asterix.event.management.AsterixEventServiceClient;
+import edu.uci.ics.asterix.event.model.AsterixInstance;
+import edu.uci.ics.asterix.event.model.AsterixInstance.State;
+import edu.uci.ics.asterix.event.schema.pattern.Patterns;
+import edu.uci.ics.asterix.event.service.AsterixEventService;
+import edu.uci.ics.asterix.event.service.AsterixEventServiceUtil;
+import edu.uci.ics.asterix.event.service.ILookupService;
+import edu.uci.ics.asterix.event.service.ServiceProvider;
+import edu.uci.ics.asterix.event.util.PatternCreator;
+import edu.uci.ics.asterix.installer.driver.InstallerDriver;
+
+public class UninstallCommand extends AbstractCommand {
+
+ @Override
+ protected void execCommand() throws Exception {
+ InstallerDriver.initConfig(true);
+ UninstallConfig uninstallConfig = ((UninstallConfig) config);
+ String instanceName = uninstallConfig.name;
+ AsterixEventServiceUtil.validateAsterixInstanceExists(instanceName, State.INACTIVE);
+ ILookupService lookupService = ServiceProvider.INSTANCE.getLookupService();
+ AsterixInstance instance = lookupService.getAsterixInstance(instanceName);
+ PatternCreator pc = PatternCreator.INSTANCE;
+ Patterns patterns = pc.getLibraryUninstallPattern(instance, uninstallConfig.dataverseName,
+ uninstallConfig.libraryName);
+ AsterixEventServiceClient client = AsterixEventService.getAsterixEventServiceClient(instance.getCluster());
+ client.submit(patterns);
+ LOGGER.info("Uninstalled library " + uninstallConfig.libraryName);
+ }
+
+ @Override
+ protected CommandConfig getCommandConfig() {
+ return new UninstallConfig();
+ }
+
+ @Override
+ protected String getUsageDescription() {
+ return "Uninstalls a library from an asterix instance." + "\n" + "Arguments/Options\n"
+ + "-n Name of Asterix Instance\n"
+ + "-d Name of the dataverse under which the library will be installed\n" + "-l Name of the library\n"
+ + "-l Name of the library";
+ }
+
+}
+
+class UninstallConfig extends CommandConfig {
+
+ @Option(name = "-n", required = true, usage = "Name of Asterix Instance")
+ public String name;
+
+ @Option(name = "-d", required = true, usage = "Name of the dataverse under which the library will be installed")
+ public String dataverseName;
+
+ @Option(name = "-l", required = true, usage = "Name of the library")
+ public String libraryName;
+
+}
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/ValidateCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/ValidateCommand.java
index 5b63166..5035028 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/ValidateCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/ValidateCommand.java
@@ -29,8 +29,8 @@
import edu.uci.ics.asterix.event.schema.cluster.Cluster;
import edu.uci.ics.asterix.event.schema.cluster.MasterNode;
import edu.uci.ics.asterix.event.schema.cluster.Node;
+import edu.uci.ics.asterix.event.service.AsterixEventServiceUtil;
import edu.uci.ics.asterix.installer.driver.InstallerDriver;
-import edu.uci.ics.asterix.installer.driver.InstallerUtil;
import edu.uci.ics.asterix.installer.schema.conf.Configuration;
import edu.uci.ics.asterix.installer.schema.conf.Zookeeper;
@@ -74,17 +74,10 @@
public boolean validateEnvironment() throws Exception {
boolean valid = true;
- String managixHome = System.getenv(InstallerDriver.ENV_MANAGIX_HOME);
- if (managixHome == null) {
+ File home = new File(InstallerDriver.getManagixHome());
+ if (!home.exists()) {
valid = false;
- LOGGER.fatal(InstallerDriver.ENV_MANAGIX_HOME + " not set " + ERROR);
- } else {
- File home = new File(managixHome);
- if (!home.exists()) {
- valid = false;
- LOGGER.fatal(InstallerDriver.ENV_MANAGIX_HOME + ": " + home.getAbsolutePath() + " does not exist!"
- + ERROR);
- }
+ LOGGER.fatal(InstallerDriver.ENV_MANAGIX_HOME + ": " + home.getAbsolutePath() + " does not exist!" + ERROR);
}
return valid;
@@ -109,7 +102,7 @@
MasterNode masterNode = cluster.getMasterNode();
Node master = new Node(masterNode.getId(), masterNode.getClusterIp(), masterNode.getJavaHome(),
- masterNode.getLogDir(), null, null, null);
+ masterNode.getLogDir(), null, null, null, null);
ipAddresses.add(masterNode.getClusterIp());
valid = valid & validateNodeConfiguration(master, cluster);
@@ -143,7 +136,7 @@
+ File.separator + "scripts" + File.separator + "validate_ssh.sh";
List<String> args = ipAddresses;
args.add(0, username);
- String output = InstallerUtil.executeLocalScript(script, args);
+ String output = AsterixEventServiceUtil.executeLocalScript(script, args);
ipAddresses.remove(0);
for (String line : output.split("\n")) {
ipAddresses.remove(line);
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/VersionCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/VersionCommand.java
new file mode 100644
index 0000000..041839d
--- /dev/null
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/VersionCommand.java
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.installer.command;
+
+import java.io.File;
+
+import edu.uci.ics.asterix.event.service.AsterixEventService;
+import edu.uci.ics.asterix.installer.driver.InstallerDriver;
+
+public class VersionCommand extends AbstractCommand {
+
+ @Override
+ protected void execCommand() throws Exception {
+ InstallerDriver.initConfig(false);
+ String asterixZipName = AsterixEventService.getAsterixZip().substring(
+ AsterixEventService.getAsterixZip().lastIndexOf(File.separator) + 1);
+ String asterixVersion = asterixZipName.substring("asterix-server-".length(),
+ asterixZipName.indexOf("-binary-assembly"));
+ LOGGER.info("Asterix/Managix version " + asterixVersion);
+ }
+
+ @Override
+ protected CommandConfig getCommandConfig() {
+ return new VersionConfig();
+ }
+
+ @Override
+ protected String getUsageDescription() {
+ return "Provides version of Managix/Asterix";
+ }
+
+}
+
+class VersionConfig extends CommandConfig {
+
+}
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/driver/InstallerDriver.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/driver/InstallerDriver.java
index 521adc6..8461518 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/driver/InstallerDriver.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/driver/InstallerDriver.java
@@ -15,52 +15,37 @@
package edu.uci.ics.asterix.installer.driver;
import java.io.File;
-import java.io.FileFilter;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.Unmarshaller;
-import org.apache.log4j.Level;
import org.apache.log4j.Logger;
+import edu.uci.ics.asterix.event.service.AsterixEventService;
+import edu.uci.ics.asterix.event.service.ILookupService;
+import edu.uci.ics.asterix.event.service.ServiceProvider;
import edu.uci.ics.asterix.installer.command.CommandHandler;
import edu.uci.ics.asterix.installer.schema.conf.Configuration;
-import edu.uci.ics.asterix.installer.service.ILookupService;
-import edu.uci.ics.asterix.installer.service.ServiceProvider;
public class InstallerDriver {
- public static final String MANAGIX_INTERNAL_DIR = ".installer";
- public static final String MANAGIX_EVENT_DIR = MANAGIX_INTERNAL_DIR + File.separator + "eventrix";
- public static final String MANAGIX_EVENT_SCRIPTS_DIR = MANAGIX_INTERNAL_DIR + File.separator + "eventrix"
- + File.separator + "scripts";
- public static final String DEFAULT_ASTERIX_CONFIGURATION_PATH = "conf" + File.separator + File.separator
- + "asterix-configuration.xml";
- public static final String ASTERIX_DIR = "asterix";
- public static final String EVENTS_DIR = "events";
-
private static final Logger LOGGER = Logger.getLogger(InstallerDriver.class.getName());
+
+ public static final String MANAGIX_INTERNAL_DIR = ".installer";
public static final String ENV_MANAGIX_HOME = "MANAGIX_HOME";
public static final String MANAGIX_CONF_XML = "conf" + File.separator + "managix-conf.xml";
+ public static final String ASTERIX_DIR = "asterix";
- private static Configuration conf;
private static String managixHome;
- private static String asterixZip;
-
- public static String getAsterixZip() {
- return asterixZip;
- }
-
- public static Configuration getConfiguration() {
- return conf;
- }
public static void initConfig(boolean ensureLookupServiceIsRunning) throws Exception {
File configFile = new File(managixHome + File.separator + MANAGIX_CONF_XML);
JAXBContext configCtx = JAXBContext.newInstance(Configuration.class);
Unmarshaller unmarshaller = configCtx.createUnmarshaller();
- conf = (Configuration) unmarshaller.unmarshal(configFile);
- asterixZip = initBinary("asterix-server");
+ Configuration conf = (Configuration) unmarshaller.unmarshal(configFile);
+ String asterixDir = managixHome + File.separator + ASTERIX_DIR;
+ String eventHome = managixHome + File.separator + MANAGIX_INTERNAL_DIR;
+ AsterixEventService.initialize(conf, asterixDir, eventHome);
ILookupService lookupService = ServiceProvider.INSTANCE.getLookupService();
if (ensureLookupServiceIsRunning && !lookupService.isRunning(conf)) {
@@ -68,28 +53,6 @@
}
}
- private static String initBinary(final String fileNamePattern) {
- String asterixDir = InstallerDriver.getAsterixDir();
- File file = new File(asterixDir);
- File[] zipFiles = file.listFiles(new FileFilter() {
- public boolean accept(File arg0) {
- return arg0.getAbsolutePath().contains(fileNamePattern) && arg0.isFile();
- }
- });
- if (zipFiles.length == 0) {
- String msg = " Binary not found at " + asterixDir;
- LOGGER.log(Level.FATAL, msg);
- throw new IllegalStateException(msg);
- }
- if (zipFiles.length > 1) {
- String msg = " Multiple binaries found at " + asterixDir;
- LOGGER.log(Level.FATAL, msg);
- throw new IllegalStateException(msg);
- }
-
- return zipFiles[0].getAbsolutePath();
- }
-
public static String getManagixHome() {
return managixHome;
}
@@ -98,10 +61,6 @@
InstallerDriver.managixHome = managixHome;
}
- public static String getAsterixDir() {
- return managixHome + File.separator + ASTERIX_DIR;
- }
-
public static void main(String args[]) {
try {
if (args.length != 0) {
@@ -134,12 +93,18 @@
buffer.append("alter " + ":" + " Alter the instance's configuration settings" + "\n");
buffer.append("describe " + ":" + " Describes an existing asterix instance" + "\n");
buffer.append("validate " + ":" + " Validates the installer/cluster configuration" + "\n");
- buffer.append("configure" + ":" + " Configure the Asterix installer" + "\n");
+ buffer.append("configure" + ":" + " Auto-generate configuration for local psedu-distributed Asterix instance"
+ + "\n");
+ buffer.append("install " + ":" + " Installs a library to an asterix instance" + "\n");
+ buffer.append("uninstall" + ":" + " Uninstalls a library from an asterix instance" + "\n");
buffer.append("log " + ":"
+ " Produce a tar archive contianing log files from the master and worker nodes" + "\n");
buffer.append("shutdown " + ":" + " Shutdown the installer service" + "\n");
buffer.append("help " + ":" + " Provides usage description of a command" + "\n");
+ buffer.append("version " + ":" + " Provides version of Asterix/Managix" + "\n");
+
buffer.append("\nTo get more information about a command, use managix help -cmd <command>");
LOGGER.info(buffer.toString());
}
+
}
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/driver/InstallerUtil.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/driver/InstallerUtil.java
index 2e04bbf..73645d2 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/driver/InstallerUtil.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/driver/InstallerUtil.java
@@ -14,58 +14,23 @@
*/
package edu.uci.ics.asterix.installer.driver;
-import java.io.BufferedInputStream;
-import java.io.BufferedOutputStream;
import java.io.File;
-import java.io.FileFilter;
-import java.io.FileInputStream;
import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.FileWriter;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.StringWriter;
-import java.net.URL;
-import java.net.URLClassLoader;
-import java.util.ArrayList;
-import java.util.Enumeration;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.Random;
-import java.util.jar.JarEntry;
-import java.util.jar.JarFile;
-import java.util.jar.JarOutputStream;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipInputStream;
-import java.util.zip.ZipOutputStream;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
-import javax.xml.bind.Marshaller;
import javax.xml.bind.Unmarshaller;
-import org.apache.commons.io.IOUtils;
-
import edu.uci.ics.asterix.common.configuration.AsterixConfiguration;
-import edu.uci.ics.asterix.common.configuration.Coredump;
-import edu.uci.ics.asterix.common.configuration.Store;
-import edu.uci.ics.asterix.common.configuration.TransactionLogDir;
-import edu.uci.ics.asterix.event.driver.EventDriver;
-import edu.uci.ics.asterix.event.management.EventUtil;
-import edu.uci.ics.asterix.event.management.EventrixClient;
import edu.uci.ics.asterix.event.schema.cluster.Cluster;
-import edu.uci.ics.asterix.event.schema.cluster.Env;
import edu.uci.ics.asterix.event.schema.cluster.Node;
-import edu.uci.ics.asterix.event.schema.cluster.Property;
-import edu.uci.ics.asterix.installer.error.InstallerException;
-import edu.uci.ics.asterix.installer.error.OutputHandler;
-import edu.uci.ics.asterix.installer.model.AsterixInstance;
-import edu.uci.ics.asterix.installer.model.AsterixInstance.State;
-import edu.uci.ics.asterix.installer.service.ServiceProvider;
public class InstallerUtil {
+ private static final String DEFAULT_ASTERIX_CONFIGURATION_PATH = "conf" + File.separator
+ + "asterix-configuration.xml";
+
public static final String TXN_LOG_DIR = "txnLogs";
public static final String TXN_LOG_DIR_KEY_SUFFIX = "txnLogDir";
public static final String ASTERIX_CONFIGURATION_FILE = "asterix-configuration.xml";
@@ -75,133 +40,6 @@
public static final int HTTP_PORT_DEFAULT = 8888;
public static final int WEB_INTERFACE_PORT_DEFAULT = 19001;
- public static AsterixInstance createAsterixInstance(String asterixInstanceName, Cluster cluster,
- AsterixConfiguration asterixConfiguration) throws FileNotFoundException, IOException {
- Node metadataNode = getMetadataNode(cluster);
- String asterixZipName = InstallerDriver.getAsterixZip().substring(
- InstallerDriver.getAsterixZip().lastIndexOf(File.separator) + 1);
- String asterixVersion = asterixZipName.substring("asterix-server-".length(),
- asterixZipName.indexOf("-binary-assembly"));
- AsterixInstance instance = new AsterixInstance(asterixInstanceName, cluster, asterixConfiguration,
- metadataNode.getId(), asterixVersion);
- return instance;
- }
-
- public static void createAsterixZip(AsterixInstance asterixInstance) throws IOException, InterruptedException,
- JAXBException, InstallerException {
-
- String modifiedZipPath = injectAsterixPropertyFile(InstallerDriver.getAsterixZip(), asterixInstance);
- injectAsterixLogPropertyFile(modifiedZipPath, asterixInstance);
- }
-
- public static void createClusterProperties(Cluster cluster, AsterixConfiguration asterixConfiguration) {
- List<Property> clusterProperties = null;
- if (cluster.getEnv() != null && cluster.getEnv().getProperty() != null) {
- clusterProperties = cluster.getEnv().getProperty();
- clusterProperties.clear();
- } else {
- clusterProperties = new ArrayList<Property>();
- }
- for (edu.uci.ics.asterix.common.configuration.Property property : asterixConfiguration.getProperty()) {
- if (property.getName().equalsIgnoreCase(EventUtil.CC_JAVA_OPTS)) {
- clusterProperties.add(new Property(EventUtil.CC_JAVA_OPTS, property.getValue()));
- } else if (property.getName().equalsIgnoreCase(EventUtil.NC_JAVA_OPTS)) {
- clusterProperties.add(new Property(EventUtil.NC_JAVA_OPTS, property.getValue()));
- }
- }
- clusterProperties.add(new Property("ASTERIX_HOME", cluster.getWorkingDir().getDir() + File.separator
- + "asterix"));
- clusterProperties.add(new Property("LOG_DIR", cluster.getLogDir()));
- clusterProperties.add(new Property("JAVA_HOME", cluster.getJavaHome()));
- clusterProperties.add(new Property("WORKING_DIR", cluster.getWorkingDir().getDir()));
- clusterProperties.add(new Property("CLIENT_NET_IP", cluster.getMasterNode().getClientIp()));
- clusterProperties.add(new Property("CLUSTER_NET_IP", cluster.getMasterNode().getClusterIp()));
-
- int clusterNetPort = cluster.getMasterNode().getClusterPort() != null ? cluster.getMasterNode()
- .getClusterPort().intValue() : CLUSTER_NET_PORT_DEFAULT;
- int clientNetPort = cluster.getMasterNode().getClientPort() != null ? cluster.getMasterNode().getClientPort()
- .intValue() : CLIENT_NET_PORT_DEFAULT;
- int httpPort = cluster.getMasterNode().getHttpPort() != null ? cluster.getMasterNode().getHttpPort().intValue()
- : HTTP_PORT_DEFAULT;
-
- clusterProperties.add(new Property("CLIENT_NET_PORT", "" + clientNetPort));
- clusterProperties.add(new Property("CLUSTER_NET_PORT", "" + clusterNetPort));
- clusterProperties.add(new Property("HTTP_PORT", "" + httpPort));
-
- cluster.setEnv(new Env(clusterProperties));
- }
-
- private static String injectAsterixPropertyFile(String origZipFile, AsterixInstance asterixInstance)
- throws IOException, JAXBException {
- writeAsterixConfigurationFile(asterixInstance);
- String asterixInstanceDir = InstallerDriver.getAsterixDir() + File.separator + asterixInstance.getName();
- unzip(origZipFile, asterixInstanceDir);
- File sourceJar = new File(asterixInstanceDir + File.separator + "lib" + File.separator + "asterix-app-"
- + asterixInstance.getAsterixVersion() + ".jar");
- File replacementFile = new File(asterixInstanceDir + File.separator + ASTERIX_CONFIGURATION_FILE);
- replaceInJar(sourceJar, ASTERIX_CONFIGURATION_FILE, replacementFile);
- new File(asterixInstanceDir + File.separator + ASTERIX_CONFIGURATION_FILE).delete();
- String asterixZipName = InstallerDriver.getAsterixZip().substring(
- InstallerDriver.getAsterixZip().lastIndexOf(File.separator) + 1);
- zipDir(new File(asterixInstanceDir), new File(asterixInstanceDir + File.separator + asterixZipName));
- return asterixInstanceDir + File.separator + asterixZipName;
- }
-
- private static String injectAsterixLogPropertyFile(String origZipFile, AsterixInstance asterixInstance)
- throws IOException, InstallerException {
- String asterixInstanceDir = InstallerDriver.getAsterixDir() + File.separator + asterixInstance.getName();
- unzip(origZipFile, asterixInstanceDir);
- File sourceJar1 = new File(asterixInstanceDir + File.separator + "lib" + File.separator + "asterix-app-"
- + asterixInstance.getAsterixVersion() + ".jar");
- Properties txnLogProperties = new Properties();
- URLClassLoader urlClassLoader = new URLClassLoader(new URL[] { sourceJar1.toURI().toURL() });
- InputStream in = urlClassLoader.getResourceAsStream(TXN_LOG_CONFIGURATION_FILE);
- if (in != null) {
- txnLogProperties.load(in);
- }
-
- writeAsterixLogConfigurationFile(asterixInstance, txnLogProperties);
-
- File sourceJar2 = new File(asterixInstanceDir + File.separator + "lib" + File.separator + "asterix-app-"
- + asterixInstance.getAsterixVersion() + ".jar");
- File replacementFile = new File(asterixInstanceDir + File.separator + "log.properties");
- replaceInJar(sourceJar2, TXN_LOG_CONFIGURATION_FILE, replacementFile);
-
- new File(asterixInstanceDir + File.separator + "log.properties").delete();
- String asterixZipName = InstallerDriver.getAsterixZip().substring(
- InstallerDriver.getAsterixZip().lastIndexOf(File.separator) + 1);
- zipDir(new File(asterixInstanceDir), new File(asterixInstanceDir + File.separator + asterixZipName));
- return asterixInstanceDir + File.separator + asterixZipName;
- }
-
- public static void addLibraryToAsterixZip(AsterixInstance asterixInstance, String dataverseName,
- String libraryName, String libraryPath) throws IOException {
- File instanceDir = new File(InstallerDriver.getAsterixDir() + File.separator + asterixInstance.getName());
- if (!instanceDir.exists()) {
- instanceDir.mkdirs();
- }
- String asterixZipName = InstallerDriver.getAsterixZip().substring(
- InstallerDriver.getAsterixZip().lastIndexOf(File.separator) + 1);
-
- String sourceZip = instanceDir.getAbsolutePath() + File.separator + asterixZipName;
- unzip(sourceZip, instanceDir.getAbsolutePath());
- File libraryPathInZip = new File(instanceDir.getAbsolutePath() + File.separator + "external" + File.separator
- + "library" + dataverseName + File.separator + "to-add" + File.separator + libraryName);
- libraryPathInZip.mkdirs();
- Runtime.getRuntime().exec("cp" + " " + libraryPath + " " + libraryPathInZip.getAbsolutePath());
- Runtime.getRuntime().exec("rm " + sourceZip);
- String destZip = InstallerDriver.getAsterixDir() + File.separator + asterixInstance.getName() + File.separator
- + asterixZipName;
- zipDir(instanceDir, new File(destZip));
- Runtime.getRuntime().exec("mv" + " " + destZip + " " + sourceZip);
- }
-
- private static Node getMetadataNode(Cluster cluster) {
- Random random = new Random();
- int nNodes = cluster.getNode().size();
- return cluster.getNode().get(random.nextInt(nNodes));
- }
-
public static String getNodeDirectories(String asterixInstanceName, Node node, Cluster cluster) {
String storeDataSubDir = asterixInstanceName + File.separator + "data" + File.separator;
String[] storeDirs = null;
@@ -224,83 +62,10 @@
return nodeDataStore.toString();
}
- private static void writeAsterixConfigurationFile(AsterixInstance asterixInstance) throws IOException,
- JAXBException {
- String asterixInstanceName = asterixInstance.getName();
- Cluster cluster = asterixInstance.getCluster();
- String metadataNodeId = asterixInstance.getMetadataNodeId();
-
- AsterixConfiguration configuration = asterixInstance.getAsterixConfiguration();
- configuration.setMetadataNode(asterixInstanceName + "_" + metadataNodeId);
-
- String storeDir = null;
- List<Store> stores = new ArrayList<Store>();
- for (Node node : cluster.getNode()) {
- storeDir = node.getStore() == null ? cluster.getStore() : node.getStore();
- stores.add(new Store(asterixInstanceName + "_" + node.getId(), storeDir));
- }
- configuration.setStore(stores);
-
- List<Coredump> coredump = new ArrayList<Coredump>();
- String coredumpDir = null;
- List<TransactionLogDir> txnLogDirs = new ArrayList<TransactionLogDir>();
- String txnLogDir = null;
- for (Node node : cluster.getNode()) {
- coredumpDir = node.getLogDir() == null ? cluster.getLogDir() : node.getLogDir();
- coredump.add(new Coredump(asterixInstanceName + "_" + node.getId(), coredumpDir + File.separator
- + asterixInstanceName + "_" + node.getId()));
-
- txnLogDir = node.getTxnLogDir() == null ? cluster.getTxnLogDir() : node.getTxnLogDir();
- txnLogDirs.add(new TransactionLogDir(asterixInstanceName + "_" + node.getId(), txnLogDir));
- }
- configuration.setCoredump(coredump);
- configuration.setTransactionLogDir(txnLogDirs);
-
- File asterixConfDir = new File(InstallerDriver.getAsterixDir() + File.separator + asterixInstanceName);
- asterixConfDir.mkdirs();
-
- JAXBContext ctx = JAXBContext.newInstance(AsterixConfiguration.class);
- Marshaller marshaller = ctx.createMarshaller();
- marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);
- marshaller.marshal(configuration, new FileOutputStream(asterixConfDir + File.separator
- + ASTERIX_CONFIGURATION_FILE));
- }
-
- private static void writeAsterixLogConfigurationFile(AsterixInstance asterixInstance, Properties logProperties)
- throws IOException, InstallerException {
- String asterixInstanceName = asterixInstance.getName();
- Cluster cluster = asterixInstance.getCluster();
- StringBuffer conf = new StringBuffer();
- for (Map.Entry<Object, Object> p : logProperties.entrySet()) {
- conf.append(p.getKey() + "=" + p.getValue() + "\n");
- }
-
- for (Node node : cluster.getNode()) {
- String txnLogDir = node.getTxnLogDir() == null ? cluster.getTxnLogDir() : node.getTxnLogDir();
- if (txnLogDir == null) {
- throw new InstallerException("Transaction log directory (txn_log_dir) not configured for node: "
- + node.getId());
- }
- conf.append(asterixInstanceName + "_" + node.getId() + "." + TXN_LOG_DIR_KEY_SUFFIX + "=" + txnLogDir
- + "\n");
- }
- List<edu.uci.ics.asterix.common.configuration.Property> properties = asterixInstance.getAsterixConfiguration()
- .getProperty();
- for (edu.uci.ics.asterix.common.configuration.Property p : properties) {
- if (p.getName().trim().toLowerCase().contains("log")) {
- conf.append(p.getValue() + "=" + p.getValue());
- }
- }
- dumpToFile(InstallerDriver.getAsterixDir() + File.separator + asterixInstanceName + File.separator
- + "log.properties", conf.toString());
-
- }
-
public static AsterixConfiguration getAsterixConfiguration(String asterixConf) throws FileNotFoundException,
IOException, JAXBException {
if (asterixConf == null) {
- asterixConf = InstallerDriver.getManagixHome() + File.separator
- + InstallerDriver.DEFAULT_ASTERIX_CONFIGURATION_PATH;
+ asterixConf = InstallerDriver.getManagixHome() + File.separator + DEFAULT_ASTERIX_CONFIGURATION_PATH;
}
File file = new File(asterixConf);
JAXBContext ctx = JAXBContext.newInstance(AsterixConfiguration.class);
@@ -308,200 +73,4 @@
AsterixConfiguration asterixConfiguration = (AsterixConfiguration) unmarshaller.unmarshal(file);
return asterixConfiguration;
}
-
- public static void unzip(String sourceFile, String destDir) throws IOException {
- BufferedOutputStream dest = null;
- FileInputStream fis = new FileInputStream(sourceFile);
- ZipInputStream zis = new ZipInputStream(new BufferedInputStream(fis));
- ZipEntry entry = null;
-
- int BUFFER_SIZE = 4096;
- while ((entry = zis.getNextEntry()) != null) {
- String dst = destDir + File.separator + entry.getName();
- if (entry.isDirectory()) {
- createDir(destDir, entry);
- continue;
- }
- int count;
- byte data[] = new byte[BUFFER_SIZE];
-
- // write the file to the disk
- FileOutputStream fos = new FileOutputStream(dst);
- dest = new BufferedOutputStream(fos, BUFFER_SIZE);
- while ((count = zis.read(data, 0, BUFFER_SIZE)) != -1) {
- dest.write(data, 0, count);
- }
- // close the output streams
- dest.flush();
- dest.close();
- }
-
- zis.close();
- }
-
- public static void zipDir(File sourceDir, File destFile) throws IOException {
- FileOutputStream fos = new FileOutputStream(destFile);
- ZipOutputStream zos = new ZipOutputStream(fos);
- zipDir(sourceDir, destFile, zos);
- zos.close();
- }
-
- private static void zipDir(File sourceDir, final File destFile, ZipOutputStream zos) throws IOException {
- File[] dirList = sourceDir.listFiles(new FileFilter() {
- public boolean accept(File f) {
- return !f.getName().endsWith(destFile.getName());
- }
- });
- for (int i = 0; i < dirList.length; i++) {
- File f = dirList[i];
- if (f.isDirectory()) {
- zipDir(f, destFile, zos);
- } else {
- int bytesIn = 0;
- byte[] readBuffer = new byte[2156];
- FileInputStream fis = new FileInputStream(f);
- ZipEntry entry = new ZipEntry(sourceDir.getName() + File.separator + f.getName());
- zos.putNextEntry(entry);
- while ((bytesIn = fis.read(readBuffer)) != -1) {
- zos.write(readBuffer, 0, bytesIn);
- }
- fis.close();
- }
- }
- }
-
- private static void replaceInJar(File sourceJar, String origFile, File replacementFile) throws IOException {
- File destJar = new File(sourceJar.getAbsolutePath() + ".modified");
- InputStream jarIs = null;
- FileInputStream fis = new FileInputStream(replacementFile);
- JarFile sourceJarFile = new JarFile(sourceJar);
- Enumeration<JarEntry> entries = sourceJarFile.entries();
- JarOutputStream jos = new JarOutputStream(new FileOutputStream(destJar));
- byte[] buffer = new byte[2048];
- int read;
- while (entries.hasMoreElements()) {
- JarEntry entry = (JarEntry) entries.nextElement();
- String name = entry.getName();
- if (name.equals(origFile)) {
- continue;
- }
- jarIs = sourceJarFile.getInputStream(entry);
- jos.putNextEntry(entry);
- while ((read = jarIs.read(buffer)) != -1) {
- jos.write(buffer, 0, read);
- }
- }
- JarEntry entry = new JarEntry(origFile);
- jos.putNextEntry(entry);
- while ((read = fis.read(buffer)) != -1) {
- jos.write(buffer, 0, read);
- }
- fis.close();
- jos.close();
- jarIs.close();
- sourceJar.delete();
- destJar.renameTo(sourceJar);
- sourceJar.setExecutable(true);
- }
-
- public static void dumpToFile(String dest, String content) throws IOException {
- FileWriter writer = new FileWriter(dest);
- writer.write(content);
- writer.close();
- }
-
- private static void createDir(String destDirectory, ZipEntry entry) {
- String name = entry.getName();
- int index = name.lastIndexOf(File.separator);
- String dirSequence = name.substring(0, index);
- File newDirs = new File(destDirectory + File.separator + dirSequence);
- newDirs.mkdirs();
- }
-
- public static AsterixInstance validateAsterixInstanceExists(String name, State... permissibleStates)
- throws Exception {
- AsterixInstance instance = ServiceProvider.INSTANCE.getLookupService().getAsterixInstance(name);
- if (instance == null) {
- throw new InstallerException("Asterix instance by name " + name + " does not exist.");
- }
- boolean valid = false;
- for (State state : permissibleStates) {
- if (state.equals(instance.getState())) {
- valid = true;
- break;
- }
- }
- if (!valid) {
- throw new InstallerException("Asterix instance by the name " + name + " is in " + instance.getState()
- + " state ");
- }
- return instance;
- }
-
- public static void validateAsterixInstanceNotExists(String name) throws Exception {
- AsterixInstance instance = ServiceProvider.INSTANCE.getLookupService().getAsterixInstance(name);
- if (instance != null) {
- throw new InstallerException("Asterix instance by name " + name + " already exists.");
- }
- }
-
- public static void evaluateConflictWithOtherInstances(AsterixInstance instance) throws Exception {
- List<AsterixInstance> existingInstances = ServiceProvider.INSTANCE.getLookupService().getAsterixInstances();
- List<String> usedIps = new ArrayList<String>();
- String masterIp = instance.getCluster().getMasterNode().getClusterIp();
- for (Node node : instance.getCluster().getNode()) {
- usedIps.add(node.getClusterIp());
- }
- usedIps.add(instance.getCluster().getMasterNode().getClusterIp());
- boolean conflictFound = false;
- AsterixInstance conflictingInstance = null;
- for (AsterixInstance existing : existingInstances) {
- if (!existing.getState().equals(State.INACTIVE)) {
- conflictFound = existing.getCluster().getMasterNode().getClusterIp().equals(masterIp);
- if (conflictFound) {
- conflictingInstance = existing;
- break;
- }
- for (Node n : existing.getCluster().getNode()) {
- if (usedIps.contains(n.getClusterIp())) {
- conflictFound = true;
- conflictingInstance = existing;
- break;
- }
- }
- }
- }
- if (conflictFound) {
- throw new Exception("Cluster definition conflicts with an existing instance of Asterix: "
- + conflictingInstance.getName());
- }
- }
-
- public static void deleteDirectory(String path) throws IOException {
- Runtime.getRuntime().exec("rm -rf " + path);
- }
-
- public static String executeLocalScript(String path, List<String> args) throws Exception {
- List<String> pargs = new ArrayList<String>();
- pargs.add("/bin/bash");
- pargs.add(path);
- if (args != null) {
- pargs.addAll(args);
- }
- ProcessBuilder pb = new ProcessBuilder(pargs);
- pb.environment().putAll(EventDriver.getEnvironment());
- pb.environment().put("IP_LOCATION", EventDriver.CLIENT_NODE.getClusterIp());
- Process p = pb.start();
- BufferedInputStream bis = new BufferedInputStream(p.getInputStream());
- StringWriter writer = new StringWriter();
- IOUtils.copy(bis, writer, "UTF-8");
- return writer.toString();
- }
-
- public static EventrixClient getEventrixClient(Cluster cluster) throws Exception {
- return new EventrixClient(
- InstallerDriver.getManagixHome() + File.separator + InstallerDriver.MANAGIX_EVENT_DIR, cluster, false,
- OutputHandler.INSTANCE);
- }
-
}
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/error/OutputHandler.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/error/OutputHandler.java
index 313fa50..0a86196 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/error/OutputHandler.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/error/OutputHandler.java
@@ -20,8 +20,8 @@
import edu.uci.ics.asterix.event.management.IOutputHandler;
import edu.uci.ics.asterix.event.management.OutputAnalysis;
+import edu.uci.ics.asterix.event.model.EventList.EventType;
import edu.uci.ics.asterix.event.schema.pattern.Event;
-import edu.uci.ics.asterix.installer.model.EventList.EventType;
public class OutputHandler implements IOutputHandler {
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/error/VerificationUtil.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/error/VerificationUtil.java
deleted file mode 100644
index 5be3f89..0000000
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/error/VerificationUtil.java
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.installer.error;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.List;
-
-import edu.uci.ics.asterix.event.schema.cluster.Cluster;
-import edu.uci.ics.asterix.event.schema.cluster.Node;
-import edu.uci.ics.asterix.installer.driver.InstallerDriver;
-import edu.uci.ics.asterix.installer.driver.InstallerUtil;
-import edu.uci.ics.asterix.installer.model.AsterixInstance;
-import edu.uci.ics.asterix.installer.model.AsterixInstance.State;
-import edu.uci.ics.asterix.installer.model.AsterixRuntimeState;
-import edu.uci.ics.asterix.installer.model.ProcessInfo;
-
-public class VerificationUtil {
-
- private static final String VERIFY_SCRIPT_PATH = InstallerDriver
- .getManagixHome()
- + File.separator
- + InstallerDriver.MANAGIX_INTERNAL_DIR
- + File.separator
- + "scripts"
- + File.separator + "verify.sh";
-
- public static AsterixRuntimeState getAsterixRuntimeState(
- AsterixInstance instance) throws Exception {
-
- Cluster cluster = instance.getCluster();
- List<String> args = new ArrayList<String>();
- args.add(instance.getName());
- args.add(instance.getCluster().getMasterNode().getClusterIp());
- for (Node node : cluster.getNode()) {
- args.add(node.getClusterIp());
- args.add(instance.getName() + "_" + node.getId());
- }
- Thread.sleep(2000);
- String output = InstallerUtil.executeLocalScript(VERIFY_SCRIPT_PATH,
- args);
- boolean ccRunning = true;
- List<String> failedNCs = new ArrayList<String>();
- String[] infoFields;
- ProcessInfo pInfo;
- List<ProcessInfo> processes = new ArrayList<ProcessInfo>();
-
- for (String line : output.split("\n")) {
- String nodeid = null;
- infoFields = line.split(":");
- try {
- int pid = Integer.parseInt(infoFields[3]);
- if (infoFields[0].equals("NC")) {
- nodeid = infoFields[2].split("_")[1];
- } else {
- nodeid = instance.getCluster().getMasterNode().getId();
- }
- pInfo = new ProcessInfo(infoFields[0], infoFields[1], nodeid,
- pid);
- processes.add(pInfo);
- } catch (Exception e) {
- if (infoFields[0].equalsIgnoreCase("CC")) {
- ccRunning = false;
- } else {
- failedNCs.add(infoFields[1]);
- }
- }
- }
- return new AsterixRuntimeState(processes, failedNCs, ccRunning);
- }
-
- public static void updateInstanceWithRuntimeDescription(
- AsterixInstance instance, AsterixRuntimeState state,
- boolean expectedRunning) {
- StringBuffer summary = new StringBuffer();
- if (expectedRunning) {
- if (!state.isCcRunning()) {
- summary.append("Cluster Controller not running at "
- + instance.getCluster().getMasterNode().getId() + "\n");
- instance.setState(State.UNUSABLE);
- }
- if (state.getFailedNCs() != null && !state.getFailedNCs().isEmpty()) {
- summary.append("Node Controller not running at the following nodes"
- + "\n");
- for (String failedNC : state.getFailedNCs()) {
- summary.append(failedNC + "\n");
- }
- instance.setState(State.UNUSABLE);
- }
- if (!(instance.getState().equals(State.UNUSABLE))) {
- instance.setState(State.ACTIVE);
- }
- } else {
- if (state.getProcesses() != null && state.getProcesses().size() > 0) {
- summary.append("Following process still running " + "\n");
- for (ProcessInfo pInfo : state.getProcesses()) {
- summary.append(pInfo + "\n");
- }
- instance.setState(State.UNUSABLE);
- } else {
- instance.setState(State.INACTIVE);
- }
- }
- state.setSummary(summary.toString());
- instance.setAsterixRuntimeStates(state);
- }
-
- public static void verifyBackupRestoreConfiguration(String hdfsUrl,
- String hadoopVersion, String hdfsBackupDir) throws Exception {
- StringBuffer errorCheck = new StringBuffer();
- if (hdfsUrl == null || hdfsUrl.length() == 0) {
- errorCheck.append("\n HDFS Url not configured");
- }
- if (hadoopVersion == null || hadoopVersion.length() == 0) {
- errorCheck.append("\n HDFS version not configured");
- }
- if (hdfsBackupDir == null || hdfsBackupDir.length() == 0) {
- errorCheck.append("\n HDFS backup directory not configured");
- }
- if (errorCheck.length() > 0) {
- throw new Exception("Incomplete hdfs configuration in "
- + InstallerDriver.getManagixHome() + File.separator
- + InstallerDriver.MANAGIX_CONF_XML + errorCheck);
- }
- }
-}
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/events/PatternCreator.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/events/PatternCreator.java
deleted file mode 100644
index fe61462..0000000
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/events/PatternCreator.java
+++ /dev/null
@@ -1,515 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.installer.events;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import edu.uci.ics.asterix.event.driver.EventDriver;
-import edu.uci.ics.asterix.event.schema.cluster.Cluster;
-import edu.uci.ics.asterix.event.schema.cluster.Node;
-import edu.uci.ics.asterix.event.schema.pattern.Delay;
-import edu.uci.ics.asterix.event.schema.pattern.Event;
-import edu.uci.ics.asterix.event.schema.pattern.Nodeid;
-import edu.uci.ics.asterix.event.schema.pattern.Pattern;
-import edu.uci.ics.asterix.event.schema.pattern.Patterns;
-import edu.uci.ics.asterix.event.schema.pattern.Value;
-import edu.uci.ics.asterix.installer.command.BackupCommand;
-import edu.uci.ics.asterix.installer.command.StopCommand;
-import edu.uci.ics.asterix.installer.driver.InstallerDriver;
-import edu.uci.ics.asterix.installer.driver.InstallerUtil;
-import edu.uci.ics.asterix.installer.error.VerificationUtil;
-import edu.uci.ics.asterix.installer.model.AsterixInstance;
-import edu.uci.ics.asterix.installer.model.BackupInfo;
-import edu.uci.ics.asterix.installer.model.BackupInfo.BackupType;
-import edu.uci.ics.asterix.installer.schema.conf.Backup;
-import edu.uci.ics.asterix.installer.service.ILookupService;
-import edu.uci.ics.asterix.installer.service.ServiceProvider;
-
-public class PatternCreator {
-
- private ILookupService lookupService = ServiceProvider.INSTANCE.getLookupService();
-
- private void addInitialDelay(Pattern p, int delay, String unit) {
- Delay d = new Delay(new Value(null, "" + delay), unit);
- p.setDelay(d);
- }
-
- public Patterns getAsterixBinaryTransferPattern(String asterixInstanceName, Cluster cluster) throws Exception {
- String ccLocationIp = cluster.getMasterNode().getClusterIp();
- String destDir = cluster.getWorkingDir().getDir() + File.separator + "asterix";
- List<Pattern> ps = new ArrayList<Pattern>();
-
- Pattern copyHyracks = createCopyHyracksPattern(asterixInstanceName, cluster, ccLocationIp, destDir);
- ps.add(copyHyracks);
-
- boolean copyHyracksToNC = !cluster.getWorkingDir().isNFS();
-
- for (Node node : cluster.getNode()) {
- if (copyHyracksToNC) {
- Pattern copyHyracksForNC = createCopyHyracksPattern(asterixInstanceName, cluster, node.getClusterIp(),
- destDir);
- ps.add(copyHyracksForNC);
- }
- }
- ps.addAll(createHadoopLibraryTransferPattern(cluster).getPattern());
- Patterns patterns = new Patterns(ps);
- return patterns;
- }
-
- public Patterns getStartAsterixPattern(String asterixInstanceName, Cluster cluster) throws Exception {
- String ccLocationId = cluster.getMasterNode().getId();
- List<Pattern> ps = new ArrayList<Pattern>();
-
- Pattern createCC = createCCStartPattern(ccLocationId);
- addInitialDelay(createCC, 3, "sec");
- ps.add(createCC);
-
- for (Node node : cluster.getNode()) {
- String iodevices = node.getIodevices() == null ? cluster.getIodevices() : node.getIodevices();
- Pattern createNC = createNCStartPattern(cluster.getMasterNode().getClusterIp(), node.getId(),
- asterixInstanceName + "_" + node.getId(), iodevices);
- addInitialDelay(createNC, 5, "sec");
- ps.add(createNC);
- }
-
- Patterns patterns = new Patterns(ps);
- return patterns;
- }
-
- public Patterns getStopCommandPattern(StopCommand stopCommand) throws Exception {
- List<Pattern> ps = new ArrayList<Pattern>();
- AsterixInstance asterixInstance = lookupService.getAsterixInstance(stopCommand.getAsterixInstanceName());
- Cluster cluster = asterixInstance.getCluster();
-
- String ccLocation = cluster.getMasterNode().getId();
- Pattern createCC = createCCStopPattern(ccLocation);
- addInitialDelay(createCC, 5, "sec");
- ps.add(createCC);
-
- String asterixInstanceName = stopCommand.getAsterixInstanceName();
- int nodeControllerIndex = 1;
- for (Node node : cluster.getNode()) {
- Pattern createNC = createNCStopPattern(node.getId(), asterixInstanceName + "_" + nodeControllerIndex);
- ps.add(createNC);
- nodeControllerIndex++;
- }
-
- Patterns patterns = new Patterns(ps);
- return patterns;
- }
-
- public Patterns getBackUpAsterixPattern(AsterixInstance instance, Backup backupConf) throws Exception {
- BackupType backupType = BackupInfo.getBackupType(backupConf);
- Patterns patterns = null;
- switch (backupType) {
- case HDFS:
- patterns = getHDFSBackUpAsterixPattern(instance, backupConf);
- break;
- case LOCAL:
- patterns = getLocalBackUpAsterixPattern(instance, backupConf);
- break;
- }
- return patterns;
- }
-
- public Patterns getRestoreAsterixPattern(AsterixInstance instance, BackupInfo backupInfo) throws Exception {
- BackupType backupType = backupInfo.getBackupType();
- Patterns patterns = null;
- switch (backupType) {
- case HDFS:
- patterns = getHDFSRestoreAsterixPattern(instance, backupInfo);
- break;
- case LOCAL:
- patterns = getLocalRestoreAsterixPattern(instance, backupInfo);
- break;
- }
- return patterns;
- }
-
- private Patterns getHDFSBackUpAsterixPattern(AsterixInstance instance, Backup backupConf) throws Exception {
- Cluster cluster = instance.getCluster();
- String hdfsUrl = backupConf.getHdfs().getUrl();
- String hadoopVersion = backupConf.getHdfs().getVersion();
- String hdfsBackupDir = backupConf.getBackupDir();
- VerificationUtil.verifyBackupRestoreConfiguration(hdfsUrl, hadoopVersion, hdfsBackupDir);
- String workingDir = cluster.getWorkingDir().getDir();
- String backupId = "" + instance.getBackupInfo().size();
- String store;
- String pargs;
- String iodevices;
- List<Pattern> patternList = new ArrayList<Pattern>();
- for (Node node : cluster.getNode()) {
- Nodeid nodeid = new Nodeid(new Value(null, node.getId()));
- iodevices = node.getIodevices() == null ? instance.getCluster().getIodevices() : node.getIodevices();
- store = node.getStore() == null ? cluster.getStore() : node.getStore();
- pargs = workingDir + " " + instance.getName() + " " + iodevices + " " + store + " "
- + BackupCommand.ASTERIX_ROOT_METADATA_DIR + " " + InstallerUtil.TXN_LOG_DIR + " " + backupId + " "
- + hdfsBackupDir + " " + "hdfs" + " " + node.getId() + " " + hdfsUrl + " " + hadoopVersion;
- Event event = new Event("backup", nodeid, pargs);
- patternList.add(new Pattern(null, 1, null, event));
- }
- return new Patterns(patternList);
- }
-
- private Patterns getLocalBackUpAsterixPattern(AsterixInstance instance, Backup backupConf) throws Exception {
- Cluster cluster = instance.getCluster();
- String backupDir = backupConf.getBackupDir();
- String workingDir = cluster.getWorkingDir().getDir();
- String backupId = "" + instance.getBackupInfo().size();
- String iodevices;
- String txnLogDir;
- String store;
- String pargs;
- List<Pattern> patternList = new ArrayList<Pattern>();
- for (Node node : cluster.getNode()) {
- Nodeid nodeid = new Nodeid(new Value(null, node.getId()));
- iodevices = node.getIodevices() == null ? instance.getCluster().getIodevices() : node.getIodevices();
- txnLogDir = node.getTxnLogDir() == null ? instance.getCluster().getTxnLogDir() : node.getTxnLogDir();
- store = node.getStore() == null ? cluster.getStore() : node.getStore();
- pargs = workingDir + " " + instance.getName() + " " + iodevices + " " + store + " "
- + BackupCommand.ASTERIX_ROOT_METADATA_DIR + " " + txnLogDir + " " + backupId + " " + backupDir
- + " " + "local" + " " + node.getId();
- Event event = new Event("backup", nodeid, pargs);
- patternList.add(new Pattern(null, 1, null, event));
- }
- return new Patterns(patternList);
- }
-
- public Patterns getHDFSRestoreAsterixPattern(AsterixInstance instance, BackupInfo backupInfo) throws Exception {
- Cluster cluster = instance.getCluster();
- String clusterStore = instance.getCluster().getStore();
- String hdfsUrl = backupInfo.getBackupConf().getHdfs().getUrl();
- String hadoopVersion = backupInfo.getBackupConf().getHdfs().getVersion();
- String hdfsBackupDir = backupInfo.getBackupConf().getBackupDir();
- VerificationUtil.verifyBackupRestoreConfiguration(hdfsUrl, hadoopVersion, hdfsBackupDir);
- String workingDir = cluster.getWorkingDir().getDir();
- int backupId = backupInfo.getId();
- String nodeStore;
- String pargs;
- List<Pattern> patternList = new ArrayList<Pattern>();
- for (Node node : cluster.getNode()) {
- Nodeid nodeid = new Nodeid(new Value(null, node.getId()));
- String iodevices = node.getIodevices() == null ? cluster.getIodevices() : node.getIodevices();
- nodeStore = node.getStore() == null ? clusterStore : node.getStore();
- pargs = workingDir + " " + instance.getName() + " " + iodevices + " " + nodeStore + " "
- + BackupCommand.ASTERIX_ROOT_METADATA_DIR + " " + InstallerUtil.TXN_LOG_DIR + " " + backupId + " "
- + " " + hdfsBackupDir + " " + "hdfs" + " " + node.getId() + " " + hdfsUrl + " " + hadoopVersion;
- Event event = new Event("restore", nodeid, pargs);
- patternList.add(new Pattern(null, 1, null, event));
- }
- return new Patterns(patternList);
- }
-
- public Patterns getLocalRestoreAsterixPattern(AsterixInstance instance, BackupInfo backupInfo) throws Exception {
- Cluster cluster = instance.getCluster();
- String clusterStore = instance.getCluster().getStore();
- String backupDir = backupInfo.getBackupConf().getBackupDir();
- String workingDir = cluster.getWorkingDir().getDir();
- int backupId = backupInfo.getId();
- String nodeStore;
- String pargs;
- List<Pattern> patternList = new ArrayList<Pattern>();
- for (Node node : cluster.getNode()) {
- Nodeid nodeid = new Nodeid(new Value(null, node.getId()));
- String iodevices = node.getIodevices() == null ? cluster.getIodevices() : node.getIodevices();
- nodeStore = node.getStore() == null ? clusterStore : node.getStore();
- pargs = workingDir + " " + instance.getName() + " " + iodevices + " " + nodeStore + " "
- + BackupCommand.ASTERIX_ROOT_METADATA_DIR + " " + InstallerUtil.TXN_LOG_DIR + " " + backupId + " "
- + backupDir + " " + "local" + " " + node.getId();
- Event event = new Event("restore", nodeid, pargs);
- patternList.add(new Pattern(null, 1, null, event));
- }
- return new Patterns(patternList);
- }
-
- public Patterns createHadoopLibraryTransferPattern(Cluster cluster) throws Exception {
- List<Pattern> patternList = new ArrayList<Pattern>();
- String workingDir = cluster.getWorkingDir().getDir();
- String hadoopVersion = InstallerDriver.getConfiguration().getBackup().getHdfs().getVersion();
- File hadoopDir = new File(InstallerDriver.getManagixHome() + File.separator
- + InstallerDriver.MANAGIX_INTERNAL_DIR + File.separator + "hadoop-" + hadoopVersion);
- if (!hadoopDir.exists()) {
- throw new IllegalStateException("Hadoop version :" + hadoopVersion + " not supported");
- }
-
- Nodeid nodeid = new Nodeid(new Value(null, EventDriver.CLIENT_NODE.getId()));
- String username = cluster.getUsername() != null ? cluster.getUsername() : System.getProperty("user.name");
- String pargs = username + " " + hadoopDir.getAbsolutePath() + " " + cluster.getMasterNode().getClusterIp()
- + " " + workingDir;
- Event event = new Event("directory_transfer", nodeid, pargs);
- Pattern p = new Pattern(null, 1, null, event);
- addInitialDelay(p, 2, "sec");
- patternList.add(p);
-
- boolean copyToNC = !cluster.getWorkingDir().isNFS();
- if (copyToNC) {
- for (Node node : cluster.getNode()) {
- nodeid = new Nodeid(new Value(null, node.getId()));
- pargs = cluster.getUsername() + " " + hadoopDir.getAbsolutePath() + " " + node.getClusterIp() + " "
- + workingDir;
- event = new Event("directory_transfer", nodeid, pargs);
- p = new Pattern(null, 1, null, event);
- addInitialDelay(p, 2, "sec");
- patternList.add(p);
- }
- }
- Patterns patterns = new Patterns(patternList);
- return patterns;
- }
-
- public Patterns createDeleteInstancePattern(AsterixInstance instance) throws Exception {
- List<Pattern> patternList = new ArrayList<Pattern>();
- patternList.addAll(createRemoveAsterixStoragePattern(instance).getPattern());
- if (instance.getBackupInfo() != null && instance.getBackupInfo().size() > 0) {
- List<BackupInfo> backups = instance.getBackupInfo();
- Set<String> removedBackupDirsHDFS = new HashSet<String>();
- Set<String> removedBackupDirsLocal = new HashSet<String>();
-
- String backupDir;
- for (BackupInfo binfo : backups) {
- backupDir = binfo.getBackupConf().getBackupDir();
- switch (binfo.getBackupType()) {
- case HDFS:
- if (removedBackupDirsHDFS.contains(backups)) {
- continue;
- }
- patternList.addAll(createRemoveHDFSBackupPattern(instance, backupDir).getPattern());
- removedBackupDirsHDFS.add(backupDir);
- break;
-
- case LOCAL:
- if (removedBackupDirsLocal.contains(backups)) {
- continue;
- }
- patternList.addAll(createRemoveLocalBackupPattern(instance, backupDir).getPattern());
- removedBackupDirsLocal.add(backupDir);
- break;
- }
-
- }
- }
- patternList.addAll(createRemoveAsterixLogDirPattern(instance).getPattern());
- patternList.addAll(createRemoveAsterixRootMetadata(instance).getPattern());
- patternList.addAll(createRemoveAsterixTxnLogs(instance).getPattern());
- Patterns patterns = new Patterns(patternList);
- return patterns;
- }
-
- private Patterns createRemoveAsterixTxnLogs(AsterixInstance instance) throws Exception {
- List<Pattern> patternList = new ArrayList<Pattern>();
- Cluster cluster = instance.getCluster();
- Nodeid nodeid = null;
- Event event = null;
- for (Node node : cluster.getNode()) {
- String txnLogDir = node.getTxnLogDir() == null ? cluster.getTxnLogDir() : node.getTxnLogDir();
- nodeid = new Nodeid(new Value(null, node.getId()));
- event = new Event("file_delete", nodeid, txnLogDir);
- patternList.add(new Pattern(null, 1, null, event));
- }
-
- Patterns patterns = new Patterns(patternList);
- return patterns;
- }
-
- private Patterns createRemoveHDFSBackupPattern(AsterixInstance instance, String hdfsBackupDir) throws Exception {
- List<Pattern> patternList = new ArrayList<Pattern>();
- Cluster cluster = instance.getCluster();
- String hdfsUrl = InstallerDriver.getConfiguration().getBackup().getHdfs().getUrl();
- String hadoopVersion = InstallerDriver.getConfiguration().getBackup().getHdfs().getVersion();
- String workingDir = cluster.getWorkingDir().getDir();
- Node launchingNode = cluster.getNode().get(0);
- Nodeid nodeid = new Nodeid(new Value(null, launchingNode.getId()));
- String pathToDelete = hdfsBackupDir + File.separator + instance.getName();
- String pargs = workingDir + " " + hadoopVersion + " " + hdfsUrl + " " + pathToDelete;
- Event event = new Event("hdfs_delete", nodeid, pargs);
- patternList.add(new Pattern(null, 1, null, event));
- Patterns patterns = new Patterns(patternList);
- return patterns;
- }
-
- private Patterns createRemoveLocalBackupPattern(AsterixInstance instance, String localBackupDir) throws Exception {
- List<Pattern> patternList = new ArrayList<Pattern>();
- Cluster cluster = instance.getCluster();
-
- String pathToDelete = localBackupDir + File.separator + instance.getName();
- String pargs = pathToDelete;
- List<String> removedBackupDirs = new ArrayList<String>();
- for (Node node : cluster.getNode()) {
- if (removedBackupDirs.contains(node.getClusterIp())) {
- continue;
- }
- Nodeid nodeid = new Nodeid(new Value(null, node.getId()));
- Event event = new Event("file_delete", nodeid, pargs);
- patternList.add(new Pattern(null, 1, null, event));
- removedBackupDirs.add(node.getClusterIp());
- }
-
- Patterns patterns = new Patterns(patternList);
- return patterns;
- }
-
- public Patterns createRemoveAsterixWorkingDirPattern(AsterixInstance instance) throws Exception {
- List<Pattern> patternList = new ArrayList<Pattern>();
- Cluster cluster = instance.getCluster();
- String workingDir = cluster.getWorkingDir().getDir();
- String pargs = workingDir;
- Nodeid nodeid = new Nodeid(new Value(null, cluster.getMasterNode().getId()));
- Event event = new Event("file_delete", nodeid, pargs);
- patternList.add(new Pattern(null, 1, null, event));
-
- if (!cluster.getWorkingDir().isNFS()) {
- for (Node node : cluster.getNode()) {
- nodeid = new Nodeid(new Value(null, node.getId()));
- event = new Event("file_delete", nodeid, pargs);
- patternList.add(new Pattern(null, 1, null, event));
- }
- }
- Patterns patterns = new Patterns(patternList);
- return patterns;
- }
-
- private Patterns createRemoveAsterixRootMetadata(AsterixInstance instance) throws Exception {
- List<Pattern> patternList = new ArrayList<Pattern>();
- Cluster cluster = instance.getCluster();
- Nodeid nodeid = null;
- String pargs = null;
- Event event = null;
- for (Node node : cluster.getNode()) {
- String iodevices = node.getIodevices() == null ? cluster.getIodevices() : node.getIodevices();
- String primaryIODevice = iodevices.split(",")[0].trim();
- pargs = primaryIODevice + File.separator + BackupCommand.ASTERIX_ROOT_METADATA_DIR;
- nodeid = new Nodeid(new Value(null, node.getId()));
- event = new Event("file_delete", nodeid, pargs);
- patternList.add(new Pattern(null, 1, null, event));
- }
-
- Patterns patterns = new Patterns(patternList);
- return patterns;
- }
-
- private Patterns createRemoveAsterixLogDirPattern(AsterixInstance instance) throws Exception {
- List<Pattern> patternList = new ArrayList<Pattern>();
- Cluster cluster = instance.getCluster();
- String pargs = instance.getCluster().getLogDir();
- Nodeid nodeid = new Nodeid(new Value(null, cluster.getMasterNode().getId()));
- Event event = new Event("file_delete", nodeid, pargs);
- patternList.add(new Pattern(null, 1, null, event));
-
- for (Node node : cluster.getNode()) {
- nodeid = new Nodeid(new Value(null, node.getId()));
- event = new Event("file_delete", nodeid, pargs);
- patternList.add(new Pattern(null, 1, null, event));
- }
-
- Patterns patterns = new Patterns(patternList);
- return patterns;
- }
-
- private Patterns createRemoveAsterixStoragePattern(AsterixInstance instance) throws Exception {
- List<Pattern> patternList = new ArrayList<Pattern>();
- Cluster cluster = instance.getCluster();
- String pargs = null;
-
- for (Node node : cluster.getNode()) {
- Nodeid nodeid = new Nodeid(new Value(null, node.getId()));
- String[] nodeIODevices;
- String iodevices = node.getIodevices() == null ? cluster.getIodevices() : node.getIodevices();
- nodeIODevices = iodevices.trim().split(",");
- for (String nodeIODevice : nodeIODevices) {
- String nodeStore = node.getStore() == null ? cluster.getStore() : node.getStore();
- pargs = nodeIODevice.trim() + File.separator + nodeStore;
- Event event = new Event("file_delete", nodeid, pargs);
- patternList.add(new Pattern(null, 1, null, event));
- }
- }
- Patterns patterns = new Patterns(patternList);
- return patterns;
- }
-
- private Pattern createCopyHyracksPattern(String instanceName, Cluster cluster, String destinationIp, String destDir) {
- Nodeid nodeid = new Nodeid(new Value(null, EventDriver.CLIENT_NODE.getId()));
- String username = cluster.getUsername() != null ? cluster.getUsername() : System.getProperty("user.name");
- String asterixZipName = InstallerDriver.getAsterixZip().substring(
- InstallerDriver.getAsterixZip().lastIndexOf(File.separator) + 1);
- String fileToTransfer = new File(InstallerDriver.getAsterixDir() + File.separator + instanceName
- + File.separator + asterixZipName).getAbsolutePath();
- String pargs = username + " " + fileToTransfer + " " + destinationIp + " " + destDir + " " + "unpack";
- Event event = new Event("file_transfer", nodeid, pargs);
- return new Pattern(null, 1, null, event);
- }
-
- private Pattern createCCStartPattern(String hostId) {
- Nodeid nodeid = new Nodeid(new Value(null, hostId));
- Event event = new Event("cc_start", nodeid, "");
- return new Pattern(null, 1, null, event);
- }
-
- public Pattern createCCStopPattern(String hostId) {
- Nodeid nodeid = new Nodeid(new Value(null, hostId));
- Event event = new Event("cc_failure", nodeid, null);
- return new Pattern(null, 1, null, event);
- }
-
- public Pattern createNCStartPattern(String ccHost, String hostId, String nodeControllerId, String iodevices) {
- Nodeid nodeid = new Nodeid(new Value(null, hostId));
- String pargs = ccHost + " " + nodeControllerId + " " + iodevices;
- Event event = new Event("node_join", nodeid, pargs);
- return new Pattern(null, 1, null, event);
- }
-
- public Pattern createNCStopPattern(String hostId, String nodeControllerId) {
- Nodeid nodeid = new Nodeid(new Value(null, hostId));
- Event event = new Event("node_failure", nodeid, nodeControllerId);
- return new Pattern(null, 1, null, event);
- }
-
- public Patterns getGenerateLogPattern(String asterixInstanceName, Cluster cluster, String outputDir) {
- List<Pattern> patternList = new ArrayList<Pattern>();
- Map<String,String> nodeLogs = new HashMap<String,String>();
-
- String username = cluster.getUsername() == null ? System.getProperty("user.name") : cluster.getUsername();
- String srcHost = cluster.getMasterNode().getClientIp();
- Nodeid nodeid = new Nodeid(new Value(null, EventDriver.CLIENT_NODE.getId()));
- String srcDir = cluster.getMasterNode().getLogDir() == null ? cluster.getLogDir() : cluster.getMasterNode()
- .getLogDir();
- String destDir = outputDir + File.separator + "cc";
- String pargs = username + " " + srcHost + " " + srcDir + " " + destDir;
- Event event = new Event("directory_copy", nodeid, pargs);
- Pattern p = new Pattern(null, 1, null, event);
- patternList.add(p);
- nodeLogs.put(cluster.getMasterNode().getClusterIp(),srcDir);
- for (Node node : cluster.getNode()) {
- srcHost = node.getClusterIp();
- srcDir = node.getLogDir() == null ? cluster.getLogDir() : node.getLogDir();
- if(nodeLogs.get(node.getClusterIp()) != null && nodeLogs.get(node.getClusterIp()).equals(srcDir)){
- continue;
- }
- destDir = outputDir + File.separator + node.getId();
- pargs = username + " " + srcHost + " " + srcDir + " " + destDir;
- event = new Event("directory_copy", nodeid, pargs);
- p = new Pattern(null, 1, null, event);
- patternList.add(p);
- }
- Patterns patterns = new Patterns(patternList);
- return patterns;
- }
-
-}
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/model/AsterixInstance.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/model/AsterixInstance.java
deleted file mode 100644
index c9a4743..0000000
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/model/AsterixInstance.java
+++ /dev/null
@@ -1,215 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.installer.model;
-
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.List;
-
-import edu.uci.ics.asterix.common.config.AsterixExternalProperties;
-import edu.uci.ics.asterix.common.configuration.AsterixConfiguration;
-import edu.uci.ics.asterix.common.configuration.Property;
-import edu.uci.ics.asterix.event.schema.cluster.Cluster;
-import edu.uci.ics.asterix.event.schema.cluster.Node;
-
-public class AsterixInstance implements Serializable {
-
- private static final long serialVersionUID = 1L;
-
- private static final int WEB_INTERFACE_PORT_DEFAULT = 19001;
-
- public enum State {
- ACTIVE,
- INACTIVE,
- UNUSABLE
- }
-
- private final Cluster cluster;
- private final String name;
- private final Date createdTimestamp;
- private Date stateChangeTimestamp;
- private Date modifiedTimestamp;
- private AsterixConfiguration asterixConfiguration;
- private State state;
- private final String metadataNodeId;
- private final String asterixVersion;
- private final List<BackupInfo> backupInfo;
- private AsterixRuntimeState runtimeState;
- private State previousState;
-
- public AsterixInstance(String name, Cluster cluster, AsterixConfiguration asterixConfiguration,
- String metadataNodeId, String asterixVersion) {
- this.name = name;
- this.cluster = cluster;
- this.asterixConfiguration = asterixConfiguration;
- this.metadataNodeId = metadataNodeId;
- this.state = State.ACTIVE;
- this.previousState = State.UNUSABLE;
- this.asterixVersion = asterixVersion;
- this.createdTimestamp = new Date();
- this.backupInfo = new ArrayList<BackupInfo>();
-
- }
-
- public Date getModifiedTimestamp() {
- return stateChangeTimestamp;
- }
-
- public State getState() {
- return state;
- }
-
- public void setState(State state) {
- this.previousState = this.state;
- this.state = state;
- }
-
- public Cluster getCluster() {
- return cluster;
- }
-
- public String getName() {
- return name;
- }
-
- public Date getCreatedTimestamp() {
- return createdTimestamp;
- }
-
- public Date getStateChangeTimestamp() {
- return stateChangeTimestamp;
- }
-
- public void setStateChangeTimestamp(Date stateChangeTimestamp) {
- this.stateChangeTimestamp = stateChangeTimestamp;
- }
-
- public void setModifiedTimestamp(Date modifiedTimestamp) {
- this.modifiedTimestamp = modifiedTimestamp;
- }
-
- public String getMetadataNodeId() {
- return metadataNodeId;
- }
-
- public String getAsterixVersion() {
- return asterixVersion;
- }
-
- public String getDescription(boolean detailed) {
- StringBuffer buffer = new StringBuffer();
- buffer.append("Name:" + name + "\n");
- buffer.append("Created:" + createdTimestamp + "\n");
-
- buffer.append("Web-Url:" + getWebInterfaceUrl() + "\n");
- buffer.append("State:" + state);
- if (!state.equals(State.UNUSABLE) && stateChangeTimestamp != null) {
- buffer.append(" (" + stateChangeTimestamp + ")" + "\n");
- } else {
- buffer.append("\n");
- }
- if (modifiedTimestamp != null) {
- buffer.append("Last modified timestamp:" + modifiedTimestamp + "\n");
- }
-
- if (runtimeState.getSummary() != null && runtimeState.getSummary().length() > 0) {
- buffer.append("\nWARNING!:" + runtimeState.getSummary() + "\n");
- }
- if (detailed) {
- addDetailedInformation(buffer);
- }
- return buffer.toString();
- }
-
- public List<BackupInfo> getBackupInfo() {
- return backupInfo;
- }
-
- public String getWebInterfaceUrl() {
- int webPort = WEB_INTERFACE_PORT_DEFAULT;
- for (Property p : asterixConfiguration.getProperty()) {
- if (p.getName().equalsIgnoreCase("web.port")) {
- webPort = Integer.parseInt(p.getValue());
- }
- }
- return "http://" + cluster.getMasterNode().getClientIp() + ":" + webPort;
- }
-
- public AsterixRuntimeState getAsterixRuntimeState() {
- return runtimeState;
- }
-
- public void setAsterixRuntimeStates(AsterixRuntimeState runtimeState) {
- this.runtimeState = runtimeState;
- }
-
- private void addDetailedInformation(StringBuffer buffer) {
- buffer.append("Master node:" + cluster.getMasterNode().getId() + ":" + cluster.getMasterNode().getClusterIp()
- + "\n");
- for (Node node : cluster.getNode()) {
- buffer.append(node.getId() + ":" + node.getClusterIp() + "\n");
- }
-
- if (backupInfo != null && backupInfo.size() > 0) {
- for (BackupInfo info : backupInfo) {
- buffer.append(info + "\n");
- }
- }
- buffer.append("\n");
- buffer.append("Asterix version:" + asterixVersion + "\n");
- buffer.append("Metadata Node:" + metadataNodeId + "\n");
- buffer.append("Processes" + "\n");
- for (ProcessInfo pInfo : runtimeState.getProcesses()) {
- buffer.append(pInfo + "\n");
- }
-
- buffer.append("\n");
- buffer.append("Asterix Configuration\n");
- int lenMax = 0;
- for (Property property : asterixConfiguration.getProperty()) {
- int nextLen = property.getName().length();
- if (nextLen > lenMax) {
- lenMax = nextLen;
- }
- }
- for (Property property : asterixConfiguration.getProperty()) {
- buffer.append(property.getName() + getIndentation(property.getName(), lenMax) + ":" + property.getValue()
- + "\n");
- }
-
- }
-
- private String getIndentation(String name, int lenMax) {
- int len = name.length();
- StringBuffer buf = new StringBuffer();
- for (int i = 0; i < lenMax - len; i++) {
- buf.append(" ");
- }
- return buf.toString();
- }
-
- public State getPreviousState() {
- return previousState;
- }
-
- public AsterixConfiguration getAsterixConfiguration() {
- return asterixConfiguration;
- }
-
- public void setAsterixConfiguration(AsterixConfiguration asterixConfiguration) {
- this.asterixConfiguration = asterixConfiguration;
- }
-}
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/model/AsterixRuntimeState.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/model/AsterixRuntimeState.java
deleted file mode 100644
index e1a5167..0000000
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/model/AsterixRuntimeState.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.installer.model;
-
-import java.io.Serializable;
-import java.util.List;
-
-public class AsterixRuntimeState implements Serializable {
-
- private final List<ProcessInfo> processes;
- private final List<String> failedNCs;
- private final boolean ccRunning;
- private String summary;
-
- public AsterixRuntimeState(List<ProcessInfo> processes, List<String> failedNCs, boolean ccRunning) {
- this.processes = processes;
- this.failedNCs = failedNCs;
- this.ccRunning = ccRunning;
- }
-
- public List<ProcessInfo> getProcesses() {
- return processes;
- }
-
- public List<String> getFailedNCs() {
- return failedNCs;
- }
-
- public boolean isCcRunning() {
- return ccRunning;
- }
-
- public void setSummary(String summary) {
- this.summary = summary;
- }
-
- public String getSummary() {
- return summary;
- }
-
-}
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/model/BackupInfo.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/model/BackupInfo.java
deleted file mode 100644
index c88fddb..0000000
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/model/BackupInfo.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.installer.model;
-
-import java.io.Serializable;
-import java.util.Date;
-
-import edu.uci.ics.asterix.installer.schema.conf.Backup;
-import edu.uci.ics.asterix.installer.schema.conf.Hdfs;
-
-public class BackupInfo implements Serializable {
-
- public static enum BackupType {
- LOCAL,
- HDFS
- };
-
- private final int id;
- private final Date date;
- private final Backup backupConf;
-
- public BackupInfo(int id, Date date, Backup backupConf) {
- this.id = id;
- this.date = date;
- this.backupConf = backupConf;
- }
-
- public int getId() {
- return id;
- }
-
- public Date getDate() {
- return date;
- }
-
- public Backup getBackupConf() {
- return backupConf;
- }
-
- @Override
- public String toString() {
- return id + " " + date + " " + "(" + getBackupType() + ")" + " " + "[ " + this.getBackupConf().getBackupDir()
- + " ]";
-
- }
-
- public BackupType getBackupType() {
- return getBackupType(this.getBackupConf());
- }
-
- public static BackupType getBackupType(Backup backupConf) {
- Hdfs hdfs = backupConf.getHdfs();
- return (hdfs != null && hdfs.getUrl() != null && hdfs.getUrl().length() > 0) ? BackupType.HDFS
- : BackupType.LOCAL;
- }
-}
\ No newline at end of file
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/model/EventList.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/model/EventList.java
deleted file mode 100644
index 91592e7..0000000
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/model/EventList.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.installer.model;
-
-public class EventList {
-
- public enum EventType {
- NODE_JOIN,
- NODE_FAILURE,
- CC_START,
- CC_FAILURE,
- BACKUP,
- RESTORE,
- FILE_DELETE,
- HDFS_DELETE,
- FILE_TRANSFER,
- FILE_CREATE,
- DIRECTORY_TRANSFER,
- DIRECTORY_COPY,
- NODE_INFO
- }
-}
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/model/ProcessInfo.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/model/ProcessInfo.java
deleted file mode 100644
index 56dfc8a..0000000
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/model/ProcessInfo.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.installer.model;
-
-import java.io.Serializable;
-
-public class ProcessInfo implements Serializable {
-
- private static final long serialVersionUID = 304186774065853730L;
- private final String processName;
- private final String host;
- private final String nodeId;
- private final int processId;
-
- public ProcessInfo(String processName, String host, String nodeId, int processId) {
- this.processName = processName;
- this.host = host;
- this.nodeId = nodeId;
- this.processId = processId;
- }
-
- public String getProcessName() {
- return processName;
- }
-
- public String getHost() {
- return host;
- }
-
- public int getProcessId() {
- return processId;
- }
-
- public String getNodeId() {
- return nodeId;
- }
-
- public String toString() {
- return processName + " at " + nodeId + " [ " + processId + " ] ";
- }
-
-}
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/service/ILookupService.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/service/ILookupService.java
deleted file mode 100644
index 59fa198..0000000
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/service/ILookupService.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.installer.service;
-
-import java.util.List;
-
-import edu.uci.ics.asterix.installer.model.AsterixInstance;
-import edu.uci.ics.asterix.installer.schema.conf.Configuration;
-
-public interface ILookupService {
-
- public void writeAsterixInstance(AsterixInstance asterixInstance) throws Exception;
-
- public AsterixInstance getAsterixInstance(String name) throws Exception;
-
- public boolean isRunning(Configuration conf) throws Exception;
-
- public void startService(Configuration conf) throws Exception;
-
- public void stopService(Configuration conf) throws Exception;
-
- public boolean exists(String name) throws Exception;
-
- public void removeAsterixInstance(String name) throws Exception;
-
- public List<AsterixInstance> getAsterixInstances() throws Exception;
-
- public void updateAsterixInstance(AsterixInstance updatedInstance) throws Exception;
-}
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/service/ServiceProvider.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/service/ServiceProvider.java
deleted file mode 100644
index 98d72f4..0000000
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/service/ServiceProvider.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.installer.service;
-
-public class ServiceProvider {
-
- public static ServiceProvider INSTANCE = new ServiceProvider();
- private static ILookupService lookupService = new ZooKeeperService();
-
- private ServiceProvider() {
-
- }
-
- public ILookupService getLookupService() {
- return lookupService;
- }
-
-}
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/service/ZooKeeperService.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/service/ZooKeeperService.java
deleted file mode 100644
index d3304d2..0000000
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/service/ZooKeeperService.java
+++ /dev/null
@@ -1,250 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.installer.service;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.IOException;
-import java.io.ObjectInputStream;
-import java.io.ObjectOutputStream;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.log4j.Logger;
-import org.apache.zookeeper.CreateMode;
-import org.apache.zookeeper.KeeperException;
-import org.apache.zookeeper.WatchedEvent;
-import org.apache.zookeeper.Watcher;
-import org.apache.zookeeper.ZooDefs.Ids;
-import org.apache.zookeeper.ZooKeeper;
-import org.apache.zookeeper.data.Stat;
-
-import edu.uci.ics.asterix.installer.driver.InstallerDriver;
-import edu.uci.ics.asterix.installer.driver.InstallerUtil;
-import edu.uci.ics.asterix.installer.error.InstallerException;
-import edu.uci.ics.asterix.installer.model.AsterixInstance;
-import edu.uci.ics.asterix.installer.schema.conf.Configuration;
-
-public class ZooKeeperService implements ILookupService {
-
- private static final Logger LOGGER = Logger.getLogger(ZooKeeperService.class.getName());
-
- private static final int ZOOKEEPER_LEADER_CONN_PORT = 2222;
- private static final int ZOOKEEPER_LEADER_ELEC_PORT = 2223;
- private static final int ZOOKEEPER_SESSION_TIME_OUT = 40 * 1000; //milliseconds
- private static final String ZOOKEEPER_HOME = InstallerDriver.getManagixHome() + File.separator
- + InstallerDriver.MANAGIX_INTERNAL_DIR + File.separator + "zookeeper";
- private static final String ZOO_KEEPER_CONFIG = ZOOKEEPER_HOME + File.separator + "zk.cfg";
-
- private boolean isRunning = false;
- private ZooKeeper zk;
- private String zkConnectionString;
- private static final String ASTERIX_INSTANCE_BASE_PATH = "/Asterix";
- private static final int DEFAULT_NODE_VERSION = -1;
- private LinkedBlockingQueue<String> msgQ = new LinkedBlockingQueue<String>();
- private ZooKeeperWatcher watcher = new ZooKeeperWatcher(msgQ);
-
- public boolean isRunning(Configuration conf) throws Exception {
- List<String> servers = conf.getZookeeper().getServers().getServer();
- int clientPort = conf.getZookeeper().getClientPort().intValue();
- StringBuffer connectionString = new StringBuffer();
- for (String serverAddress : servers) {
- connectionString.append(serverAddress);
- connectionString.append(":");
- connectionString.append(clientPort);
- connectionString.append(",");
- }
- if (connectionString.length() > 0) {
- connectionString.deleteCharAt(connectionString.length() - 1);
- }
- zkConnectionString = connectionString.toString();
-
- zk = new ZooKeeper(zkConnectionString, ZOOKEEPER_SESSION_TIME_OUT, watcher);
- try {
- zk.exists("/dummy", watcher);
- if (LOGGER.isDebugEnabled()) {
- LOGGER.debug("ZooKeeper running at " + connectionString);
- }
- createRootIfNotExist();
- isRunning = true;
- } catch (KeeperException ke) {
- isRunning = false;
- }
- return isRunning;
- }
-
- public void startService(Configuration conf) throws Exception {
- if (LOGGER.isDebugEnabled()) {
- LOGGER.debug("Starting ZooKeeper at " + zkConnectionString);
- }
- ZookeeperUtil.writeConfiguration(ZOO_KEEPER_CONFIG, conf, ZOOKEEPER_LEADER_CONN_PORT,
- ZOOKEEPER_LEADER_ELEC_PORT);
- String initScript = ZOOKEEPER_HOME + File.separator + "bin" + File.separator + "zk.init";
- StringBuffer cmdBuffer = new StringBuffer();
- cmdBuffer.append(initScript + " ");
- cmdBuffer.append(conf.getZookeeper().getHomeDir() + " ");
- cmdBuffer.append(conf.getZookeeper().getServers().getJavaHome() + " ");
- List<String> zkServers = conf.getZookeeper().getServers().getServer();
- for (String zkServer : zkServers) {
- cmdBuffer.append(zkServer + " ");
- }
- Runtime.getRuntime().exec(cmdBuffer.toString());
- zk = new ZooKeeper(zkConnectionString, ZOOKEEPER_SESSION_TIME_OUT, watcher);
- String head = msgQ.poll(10, TimeUnit.SECONDS);
- if (head == null) {
- StringBuilder msg = new StringBuilder(
- "Unable to start Zookeeper Service. This could be because of the following reasons.\n");
- msg.append("1) Managix is incorrectly configured. Please run " + InstallerDriver.getManagixHome()
- + "/bin/managix validate" + " to run a validation test and correct the errors reported.");
- msg.append("\n2) If validation in (1) is successful, ensure that java_home parameter is set correctly in Managix configuration ("
- + InstallerDriver.getManagixHome() + File.separator + InstallerDriver.MANAGIX_CONF_XML + ")");
- throw new Exception(msg.toString());
- }
- msgQ.take();
- createRootIfNotExist();
- }
-
- public void stopService(Configuration conf) throws Exception {
- if (LOGGER.isDebugEnabled()) {
- LOGGER.debug("Stopping ZooKeeper running at " + zkConnectionString);
- }
- String stopScript = ZOOKEEPER_HOME + File.separator + "bin" + File.separator + "stop_zk";
- StringBuffer cmdBuffer = new StringBuffer();
- cmdBuffer.append(stopScript + " ");
- cmdBuffer.append(conf.getZookeeper().getHomeDir() + " ");
- List<String> zkServers = conf.getZookeeper().getServers().getServer();
- for (String zkServer : zkServers) {
- cmdBuffer.append(zkServer + " ");
- }
- Runtime.getRuntime().exec(cmdBuffer.toString());
- if (LOGGER.isDebugEnabled()) {
- LOGGER.debug("Stopped ZooKeeper service at " + zkConnectionString);
- }
- }
-
- public void writeAsterixInstance(AsterixInstance asterixInstance) throws Exception {
- String instanceBasePath = ASTERIX_INSTANCE_BASE_PATH + File.separator + asterixInstance.getName();
- ByteArrayOutputStream b = new ByteArrayOutputStream();
- ObjectOutputStream o = new ObjectOutputStream(b);
- o.writeObject(asterixInstance);
- zk.create(instanceBasePath, b.toByteArray(), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
- }
-
- private void createRootIfNotExist() throws Exception {
- try {
- Stat stat = zk.exists(ASTERIX_INSTANCE_BASE_PATH, false);
- if (stat == null) {
- zk.create(ASTERIX_INSTANCE_BASE_PATH, "root".getBytes(), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
- }
- } catch (Exception e) {
- createRootIfNotExist();
- }
- }
-
- public AsterixInstance getAsterixInstance(String name) throws Exception {
- String path = ASTERIX_INSTANCE_BASE_PATH + File.separator + name;
- Stat stat = zk.exists(ASTERIX_INSTANCE_BASE_PATH + File.separator + name, false);
- if (stat == null) {
- return null;
- }
- byte[] asterixInstanceBytes = zk.getData(path, false, new Stat());
- return readAsterixInstanceObject(asterixInstanceBytes);
- }
-
- public boolean exists(String asterixInstanceName) throws Exception {
- return zk.exists(ASTERIX_INSTANCE_BASE_PATH + File.separator + asterixInstanceName, false) != null;
- }
-
- public void removeAsterixInstance(String name) throws Exception {
- if (!exists(name)) {
- throw new InstallerException("Asterix instance by name " + name + " does not exists.");
- }
- zk.delete(ASTERIX_INSTANCE_BASE_PATH + File.separator + name, DEFAULT_NODE_VERSION);
- }
-
- public List<AsterixInstance> getAsterixInstances() throws Exception {
- List<String> instanceNames = zk.getChildren(ASTERIX_INSTANCE_BASE_PATH, false);
- List<AsterixInstance> asterixInstances = new ArrayList<AsterixInstance>();
- String path;
- for (String instanceName : instanceNames) {
- path = ASTERIX_INSTANCE_BASE_PATH + File.separator + instanceName;
- byte[] asterixInstanceBytes = zk.getData(path, false, new Stat());
- asterixInstances.add(readAsterixInstanceObject(asterixInstanceBytes));
- }
- return asterixInstances;
- }
-
- private AsterixInstance readAsterixInstanceObject(byte[] asterixInstanceBytes) throws IOException,
- ClassNotFoundException {
- ByteArrayInputStream b = new ByteArrayInputStream(asterixInstanceBytes);
- ObjectInputStream ois = new ObjectInputStream(b);
- return (AsterixInstance) ois.readObject();
- }
-
- public void updateAsterixInstance(AsterixInstance updatedInstance) throws Exception {
- removeAsterixInstance(updatedInstance.getName());
- writeAsterixInstance(updatedInstance);
- }
-
-}
-
-class ZooKeeperWatcher implements Watcher {
-
- private boolean isRunning = true;
- private LinkedBlockingQueue<String> msgQ;
-
- public ZooKeeperWatcher(LinkedBlockingQueue<String> msgQ) {
- this.msgQ = msgQ;
- }
-
- public void process(WatchedEvent wEvent) {
- switch (wEvent.getState()) {
- case SyncConnected:
- msgQ.add("connected");
- break;
- }
- }
-
- public boolean isRunning() {
- return isRunning;
- }
-
-}
-
-class ZookeeperUtil {
-
- public static void writeConfiguration(String zooKeeperConfigPath, Configuration conf, int leaderConnPort,
- int leaderElecPort) throws IOException {
-
- StringBuffer buffer = new StringBuffer();
- buffer.append("tickTime=1000" + "\n");
- buffer.append("dataDir=" + conf.getZookeeper().getHomeDir() + File.separator + "data" + "\n");
- buffer.append("clientPort=" + conf.getZookeeper().getClientPort().intValue() + "\n");
- buffer.append("initLimit=" + 2 + "\n");
- buffer.append("syncLimit=" + 2 + "\n");
-
- List<String> servers = conf.getZookeeper().getServers().getServer();
- int serverId = 1;
- for (String server : servers) {
- buffer.append("server" + "." + serverId + "=" + server + ":" + leaderConnPort + ":" + leaderElecPort + "\n");
- serverId++;
- }
- InstallerUtil.dumpToFile(zooKeeperConfigPath, buffer.toString());
- }
-
-}
diff --git a/asterix-installer/src/main/resources/conf/asterix-configuration.xml b/asterix-installer/src/main/resources/conf/asterix-configuration.xml
index cbee8f8..6f96191 100644
--- a/asterix-installer/src/main/resources/conf/asterix-configuration.xml
+++ b/asterix-installer/src/main/resources/conf/asterix-configuration.xml
@@ -27,6 +27,14 @@
</description>
</property>
+ <property>
+ <name>max.wait.active.cluster</name>
+ <value>60</value>
+ <description>Maximum wait (in seconds) for a cluster to be ACTIVE (all nodes are available)
+ before a submitted query/statement can be executed. (Default = 60 seconds)
+ </description>
+ </property>
+
<property>
<name>storage.buffercache.pagesize</name>
<value>131072</value>
diff --git a/asterix-installer/src/main/resources/hadoop-0.20.2/bin/hadoop b/asterix-installer/src/main/resources/hadoop-0.20.2/bin/hadoop
old mode 100755
new mode 100644
diff --git a/asterix-installer/src/main/resources/hadoop-0.20.2/bin/hadoop-config.sh b/asterix-installer/src/main/resources/hadoop-0.20.2/bin/hadoop-config.sh
old mode 100755
new mode 100644
diff --git a/asterix-installer/src/main/resources/scripts/managix b/asterix-installer/src/main/resources/scripts/managix
old mode 100755
new mode 100644
diff --git a/asterix-installer/src/main/resources/scripts/validate_ssh.sh b/asterix-installer/src/main/resources/scripts/validate_ssh.sh
old mode 100755
new mode 100644
diff --git a/asterix-installer/src/main/resources/scripts/verify.sh b/asterix-installer/src/main/resources/scripts/verify.sh
old mode 100755
new mode 100644
diff --git a/asterix-installer/src/main/resources/zookeeper/start_zk.sh b/asterix-installer/src/main/resources/zookeeper/start_zk.sh
old mode 100755
new mode 100644
diff --git a/asterix-installer/src/main/resources/zookeeper/stop_zk b/asterix-installer/src/main/resources/zookeeper/stop_zk
old mode 100755
new mode 100644
diff --git a/asterix-installer/src/main/resources/zookeeper/zk.init b/asterix-installer/src/main/resources/zookeeper/zk.init
old mode 100755
new mode 100644
diff --git a/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixExternalLibraryIT.java b/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixExternalLibraryIT.java
new file mode 100644
index 0000000..a1226c3
--- /dev/null
+++ b/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixExternalLibraryIT.java
@@ -0,0 +1,78 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.installer.test;
+
+import java.io.File;
+import java.util.List;
+import java.util.logging.Logger;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import edu.uci.ics.asterix.event.model.AsterixInstance.State;
+import edu.uci.ics.asterix.test.aql.TestsUtils;
+import edu.uci.ics.asterix.testframework.context.TestCaseContext;
+
+public class AsterixExternalLibraryIT {
+
+ private static final String LIBRARY_NAME = "testlib";
+ private static final String LIBRARY_DATAVERSE = "externallibtest";
+ private static final String PATH_BASE = "src/test/resources/integrationts/library";
+ private static final String PATH_ACTUAL = "ittest/";
+ private static final String LIBRARY_PATH = "asterix-external-data" + File.separator + "target" + File.separator
+ + "testlib-zip-binary-assembly.zip";
+ private static final Logger LOGGER = Logger.getLogger(AsterixExternalLibraryIT.class.getName());
+ private static List<TestCaseContext> testCaseCollection;
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ AsterixInstallerIntegrationUtil.init();
+ File asterixInstallerProjectDir = new File(System.getProperty("user.dir"));
+ String asterixExternalLibraryPath = asterixInstallerProjectDir.getParentFile().getAbsolutePath()
+ + File.separator + LIBRARY_PATH;
+ LOGGER.info("Installing library :" + LIBRARY_NAME + " located at " + asterixExternalLibraryPath
+ + " in dataverse " + LIBRARY_DATAVERSE);
+ AsterixInstallerIntegrationUtil.installLibrary(LIBRARY_NAME, LIBRARY_DATAVERSE, asterixExternalLibraryPath);
+ AsterixInstallerIntegrationUtil.transformIntoRequiredState(State.ACTIVE);
+ TestCaseContext.Builder b = new TestCaseContext.Builder();
+ testCaseCollection = b.build(new File(PATH_BASE));
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ AsterixInstallerIntegrationUtil.deinit();
+ }
+
+ @Test
+ public void test() throws Exception {
+ for (TestCaseContext testCaseCtx : testCaseCollection) {
+ TestsUtils.executeTest(PATH_ACTUAL, testCaseCtx, null, false);
+ }
+ }
+
+ public static void main(String[] args) throws Exception {
+ try {
+ setUp();
+ new AsterixExternalLibraryIT().test();
+ } catch (Exception e) {
+ e.printStackTrace();
+ LOGGER.info("TEST CASES FAILED");
+ } finally {
+ tearDown();
+ }
+ }
+
+}
diff --git a/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixFaultToleranceIT.java b/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixFaultToleranceIT.java
new file mode 100644
index 0000000..c532de5
--- /dev/null
+++ b/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixFaultToleranceIT.java
@@ -0,0 +1,82 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.installer.test;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.logging.Logger;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.runners.Parameterized.Parameters;
+
+import edu.uci.ics.asterix.test.aql.TestsUtils;
+import edu.uci.ics.asterix.testframework.context.TestCaseContext;
+
+public class AsterixFaultToleranceIT {
+
+ private static final String PATH_BASE = "src/test/resources/integrationts/fault-tolerance";
+ private static final String PATH_ACTUAL = "ittest/";
+ private static final Logger LOGGER = Logger.getLogger(AsterixFaultToleranceIT.class.getName());
+ private static List<TestCaseContext> testCaseCollection;
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ AsterixInstallerIntegrationUtil.init();
+ TestCaseContext.Builder b = new TestCaseContext.Builder();
+ testCaseCollection = b.build(new File(PATH_BASE));
+ File outdir = new File(PATH_ACTUAL);
+ outdir.mkdirs();
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ AsterixInstallerIntegrationUtil.deinit();
+ File outdir = new File(PATH_ACTUAL);
+ File[] files = outdir.listFiles();
+ if (files == null || files.length == 0) {
+ outdir.delete();
+ }
+ }
+
+ @Parameters
+ public static Collection<Object[]> tests() throws Exception {
+ Collection<Object[]> testArgs = new ArrayList<Object[]>();
+ return testArgs;
+ }
+
+ @Test
+ public void test() throws Exception {
+ for (TestCaseContext testCaseCtx : testCaseCollection) {
+ TestsUtils.executeTest(PATH_ACTUAL, testCaseCtx, null, false);
+ }
+ }
+
+ public static void main(String[] args) throws Exception {
+ try {
+ setUp();
+ new AsterixFaultToleranceIT().test();
+ } catch (Exception e) {
+ e.printStackTrace();
+ LOGGER.info("TEST CASE(S) FAILED");
+ } finally {
+ tearDown();
+ }
+ }
+
+}
diff --git a/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixInstallerIntegrationUtil.java b/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixInstallerIntegrationUtil.java
index c7beb28..1312c5b 100644
--- a/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixInstallerIntegrationUtil.java
+++ b/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixInstallerIntegrationUtil.java
@@ -28,15 +28,14 @@
import javax.xml.bind.Marshaller;
import javax.xml.bind.Unmarshaller;
+import edu.uci.ics.asterix.event.error.VerificationUtil;
+import edu.uci.ics.asterix.event.model.AsterixInstance;
+import edu.uci.ics.asterix.event.model.AsterixInstance.State;
+import edu.uci.ics.asterix.event.model.AsterixRuntimeState;
+import edu.uci.ics.asterix.event.service.ServiceProvider;
import edu.uci.ics.asterix.installer.command.CommandHandler;
-import edu.uci.ics.asterix.installer.command.ShutdownCommand;
import edu.uci.ics.asterix.installer.driver.InstallerDriver;
-import edu.uci.ics.asterix.installer.error.VerificationUtil;
-import edu.uci.ics.asterix.installer.model.AsterixInstance;
-import edu.uci.ics.asterix.installer.model.AsterixInstance.State;
-import edu.uci.ics.asterix.installer.model.AsterixRuntimeState;
import edu.uci.ics.asterix.installer.schema.conf.Configuration;
-import edu.uci.ics.asterix.installer.service.ServiceProvider;
import edu.uci.ics.hyracks.api.client.HyracksConnection;
import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
@@ -62,6 +61,7 @@
public static void init() throws Exception {
File asterixProjectDir = new File(System.getProperty("user.dir"));
+
File installerTargetDir = new File(asterixProjectDir, "target");
String managixHomeDirName = installerTargetDir.list(new FilenameFilter() {
@Override
@@ -87,6 +87,7 @@
cmdHandler.processCommand(command.split(" "));
startZookeeper();
+ Thread.sleep(2000);
InstallerDriver.initConfig(true);
createInstance();
hcc = new HyracksConnection(CC_IP_ADDRESS, DEFAULT_HYRACKS_CC_CLIENT_PORT);
@@ -104,16 +105,15 @@
String command = "shutdown";
cmdHandler.processCommand(command.split(" "));
- Thread.sleep(2000);
+ Thread.sleep(2000);
- // start zookeeper
+ // start zookeeper
initZookeeperTestConfiguration(zookeeperTestClientPort);
ProcessBuilder pb2 = new ProcessBuilder(script, "describe");
Map<String, String> env2 = pb2.environment();
env2.put("MANAGIX_HOME", managixHome);
pb2.start();
- Thread.sleep(2000);
}
public static void createInstance() throws Exception {
diff --git a/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixLifecycleIT.java b/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixLifecycleIT.java
index ac4fb34..d2dd016 100644
--- a/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixLifecycleIT.java
+++ b/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/test/AsterixLifecycleIT.java
@@ -25,12 +25,12 @@
import org.junit.Test;
import org.junit.runners.Parameterized.Parameters;
+import edu.uci.ics.asterix.event.error.VerificationUtil;
+import edu.uci.ics.asterix.event.model.AsterixInstance;
+import edu.uci.ics.asterix.event.model.AsterixInstance.State;
+import edu.uci.ics.asterix.event.model.AsterixRuntimeState;
+import edu.uci.ics.asterix.event.service.ServiceProvider;
import edu.uci.ics.asterix.installer.command.CommandHandler;
-import edu.uci.ics.asterix.installer.error.VerificationUtil;
-import edu.uci.ics.asterix.installer.model.AsterixInstance;
-import edu.uci.ics.asterix.installer.model.AsterixInstance.State;
-import edu.uci.ics.asterix.installer.model.AsterixRuntimeState;
-import edu.uci.ics.asterix.installer.service.ServiceProvider;
import edu.uci.ics.asterix.test.aql.TestsUtils;
import edu.uci.ics.asterix.testframework.context.TestCaseContext;
@@ -121,7 +121,7 @@
@Test
public void test() throws Exception {
for (TestCaseContext testCaseCtx : testCaseCollection) {
- TestsUtils.executeTest(PATH_ACTUAL, testCaseCtx, null);
+ TestsUtils.executeTest(PATH_ACTUAL, testCaseCtx, null, false);
}
}
diff --git a/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/transaction/DmlRecoveryIT.java b/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/transaction/DmlRecoveryIT.java
new file mode 100644
index 0000000..ee41ddb
--- /dev/null
+++ b/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/transaction/DmlRecoveryIT.java
@@ -0,0 +1,116 @@
+package edu.uci.ics.asterix.installer.transaction;
+
+import java.io.File;
+import java.io.FilenameFilter;
+import java.io.InputStream;
+import java.io.PrintWriter;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.logging.Logger;
+
+import org.apache.commons.io.FileUtils;
+import org.codehaus.jackson.map.JsonMappingException;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+import edu.uci.ics.asterix.test.aql.TestsUtils;
+import edu.uci.ics.asterix.testframework.context.TestCaseContext;
+import edu.uci.ics.asterix.testframework.context.TestFileContext;
+import edu.uci.ics.asterix.testframework.xml.TestCase.CompilationUnit;
+
+@RunWith(Parameterized.class)
+public class DmlRecoveryIT {
+
+ // variable to indicate whether this test will be executed
+
+ private static final Logger LOGGER = Logger.getLogger(RecoveryIT.class.getName());
+ private static final String PATH_ACTUAL = "rttest/";
+
+ private static final String TESTSUITE_PATH_BASE = "../asterix-app/src/test/resources/runtimets/";
+
+ private TestCaseContext tcCtx;
+ private static File asterixInstallerPath;
+ private static File asterixAppPath;
+ private static File asterixDBPath;
+ private static File installerTargetPath;
+ private static String managixHomeDirName;
+ private static String managixHomePath;
+ private static String scriptHomePath;
+ private static ProcessBuilder pb;
+ private static Map<String, String> env;
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ File outdir = new File(PATH_ACTUAL);
+ outdir.mkdirs();
+
+ asterixInstallerPath = new File(System.getProperty("user.dir"));
+ asterixDBPath = new File(asterixInstallerPath.getParent());
+ asterixAppPath = new File(asterixDBPath.getAbsolutePath() + File.separator + "asterix-app");
+ installerTargetPath = new File(asterixInstallerPath, "target");
+ managixHomeDirName = installerTargetPath.list(new FilenameFilter() {
+ @Override
+ public boolean accept(File dir, String name) {
+ return new File(dir, name).isDirectory() && name.startsWith("asterix-installer")
+ && name.endsWith("binary-assembly");
+ }
+ })[0];
+ managixHomePath = new File(installerTargetPath, managixHomeDirName).getAbsolutePath();
+ LOGGER.info("MANAGIX_HOME=" + managixHomePath);
+
+ pb = new ProcessBuilder();
+ env = pb.environment();
+ env.put("MANAGIX_HOME", managixHomePath);
+ scriptHomePath = asterixInstallerPath + File.separator + "src" + File.separator + "test" + File.separator
+ + "resources" + File.separator + "transactionts" + File.separator + "scripts";
+ env.put("SCRIPT_HOME", scriptHomePath);
+
+ TestsUtils.executeScript(pb, scriptHomePath + File.separator + "dml_recovery" + File.separator
+ + "configure_and_validate.sh");
+ TestsUtils.executeScript(pb, scriptHomePath + File.separator + "dml_recovery" + File.separator
+ + "stop_and_delete.sh");
+
+ TestsUtils.executeScript(pb, scriptHomePath + File.separator + "dml_recovery" + File.separator
+ + "create_and_start.sh");
+
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ File outdir = new File(PATH_ACTUAL);
+ FileUtils.deleteDirectory(outdir);
+ TestsUtils.executeScript(pb, scriptHomePath + File.separator + "dml_recovery" + File.separator
+ + "stop_and_delete.sh");
+ TestsUtils.executeScript(pb, scriptHomePath + File.separator + "dml_recovery" + File.separator + "shutdown.sh");
+
+ }
+
+ @Parameters
+ public static Collection<Object[]> tests() throws Exception {
+
+ Collection<Object[]> testArgs = new ArrayList<Object[]>();
+ TestCaseContext.Builder b = new TestCaseContext.Builder();
+ for (TestCaseContext ctx : b.build(new File(TESTSUITE_PATH_BASE))) {
+ if (ctx.getTestCase().getFilePath().equals("dml"))
+ testArgs.add(new Object[] { ctx });
+ }
+ return testArgs;
+ }
+
+ public DmlRecoveryIT(TestCaseContext tcCtx) {
+ this.tcCtx = tcCtx;
+ }
+
+ @Test
+ public void test() throws Exception {
+
+ TestsUtils.executeTest(PATH_ACTUAL, tcCtx, pb, true);
+
+ }
+}
diff --git a/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/transaction/RecoveryIT.java b/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/transaction/RecoveryIT.java
index 89cbec8..44ca366 100644
--- a/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/transaction/RecoveryIT.java
+++ b/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/transaction/RecoveryIT.java
@@ -26,6 +26,7 @@
import org.apache.commons.io.FileUtils;
import org.junit.AfterClass;
+import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
@@ -69,14 +70,7 @@
}
})[0];
managixHomePath = new File(installerTargetPath, managixHomeDirName).getAbsolutePath();
-
- String fileListPath = asterixInstallerPath.getAbsolutePath() + File.separator + "src" + File.separator + "test"
- + File.separator + "resources" + File.separator + "transactionts" + File.separator + "data"
- + File.separator + "file_list.txt";
- String srcBasePath = asterixAppPath.getAbsolutePath();
- String destBasePath = managixHomePath + File.separator + "clusters" + File.separator + "local" + File.separator
- + "working_dir";
- prepareDataFiles(fileListPath, srcBasePath, destBasePath);
+ LOGGER.info("MANAGIX_HOME=" + managixHomePath);
pb = new ProcessBuilder();
env = pb.environment();
@@ -84,31 +78,20 @@
scriptHomePath = asterixInstallerPath + File.separator + "src" + File.separator + "test" + File.separator
+ "resources" + File.separator + "transactionts" + File.separator + "scripts";
env.put("SCRIPT_HOME", scriptHomePath);
-
+
TestsUtils.executeScript(pb, scriptHomePath + File.separator + "setup_teardown" + File.separator
+ "configure_and_validate.sh");
TestsUtils.executeScript(pb, scriptHomePath + File.separator + "setup_teardown" + File.separator
+ "stop_and_delete.sh");
}
- private static void prepareDataFiles(String fileListPath, String srcBasePath, String destBasePath)
- throws IOException {
- String line;
- File srcPathFile = null;
- File destPathFile = null;
- BufferedReader br = new BufferedReader(new FileReader(fileListPath));
- while ((line = br.readLine()) != null) {
- srcPathFile = new File(srcBasePath + File.separator + line.trim());
- destPathFile = new File(destBasePath + File.separator + line.trim());
- destPathFile.getParentFile().mkdirs();
- FileUtils.copyFile(srcPathFile, destPathFile);
- }
- }
-
@AfterClass
public static void tearDown() throws Exception {
File outdir = new File(PATH_ACTUAL);
FileUtils.deleteDirectory(outdir);
+ File dataCopyDir = new File(managixHomePath + File.separator + ".." + File.separator + ".." + File.separator
+ + "data");
+ FileUtils.deleteDirectory(dataCopyDir);
TestsUtils.executeScript(pb, scriptHomePath + File.separator + "setup_teardown" + File.separator
+ "stop_and_delete.sh");
TestsUtils.executeScript(pb, scriptHomePath + File.separator + "setup_teardown" + File.separator
@@ -131,6 +114,7 @@
@Test
public void test() throws Exception {
- TestsUtils.executeTest(PATH_ACTUAL, tcCtx, pb);
+ TestsUtils.executeTest(PATH_ACTUAL, tcCtx, pb, false);
}
+
}
diff --git a/asterix-installer/src/test/resources/integrationts/fault-tolerance/queries/feeds/IN1-cluster-restart/in1-cluster-restart.1.ddl.aql b/asterix-installer/src/test/resources/integrationts/fault-tolerance/queries/feeds/IN1-cluster-restart/in1-cluster-restart.1.ddl.aql
new file mode 100644
index 0000000..1f0678e
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/fault-tolerance/queries/feeds/IN1-cluster-restart/in1-cluster-restart.1.ddl.aql
@@ -0,0 +1,28 @@
+drop dataverse feeds if exists;
+create dataverse feeds;
+use dataverse feeds;
+
+create type TwitterUserType as closed {
+ screen-name: string,
+ lang: string,
+ friends_count: int32,
+ statuses_count: int32,
+ name: string,
+ followers_count: int32
+}
+
+create type TweetMessageType as closed {
+ tweetid: int64,
+ user: TwitterUserType,
+ sender-location: point,
+ send-time: datetime,
+ referred-topics: {{ string }},
+ message-text: string
+}
+
+create dataset Tweets(TweetMessageType)
+primary key tweetid;
+
+create feed TwitterFirehose
+using twitter_firehose
+(("duration"="30"),("tps"="50"),("tput-duration"="5"),("mode"="controlled"));
diff --git a/asterix-installer/src/test/resources/integrationts/fault-tolerance/queries/feeds/IN1-cluster-restart/in1-cluster-restart.2.update.aql b/asterix-installer/src/test/resources/integrationts/fault-tolerance/queries/feeds/IN1-cluster-restart/in1-cluster-restart.2.update.aql
new file mode 100644
index 0000000..64dbf25
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/fault-tolerance/queries/feeds/IN1-cluster-restart/in1-cluster-restart.2.update.aql
@@ -0,0 +1,3 @@
+use dataverse feeds;
+
+connect feed TwitterFirehose to dataset Tweets;
diff --git a/asterix-installer/src/test/resources/integrationts/fault-tolerance/queries/feeds/IN1-cluster-restart/in1-cluster-restart.3.sleep.aql b/asterix-installer/src/test/resources/integrationts/fault-tolerance/queries/feeds/IN1-cluster-restart/in1-cluster-restart.3.sleep.aql
new file mode 100644
index 0000000..5caff40
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/fault-tolerance/queries/feeds/IN1-cluster-restart/in1-cluster-restart.3.sleep.aql
@@ -0,0 +1 @@
+10000
diff --git a/asterix-installer/src/test/resources/integrationts/fault-tolerance/queries/feeds/IN1-cluster-restart/in1-cluster-restart.4.mgx.aql b/asterix-installer/src/test/resources/integrationts/fault-tolerance/queries/feeds/IN1-cluster-restart/in1-cluster-restart.4.mgx.aql
new file mode 100644
index 0000000..2d8a23e
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/fault-tolerance/queries/feeds/IN1-cluster-restart/in1-cluster-restart.4.mgx.aql
@@ -0,0 +1 @@
+stop -n asterix
diff --git a/asterix-installer/src/test/resources/integrationts/fault-tolerance/queries/feeds/IN1-cluster-restart/in1-cluster-restart.5.mgx.aql b/asterix-installer/src/test/resources/integrationts/fault-tolerance/queries/feeds/IN1-cluster-restart/in1-cluster-restart.5.mgx.aql
new file mode 100644
index 0000000..4e99f33
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/fault-tolerance/queries/feeds/IN1-cluster-restart/in1-cluster-restart.5.mgx.aql
@@ -0,0 +1 @@
+start -n asterix
diff --git a/asterix-installer/src/test/resources/integrationts/fault-tolerance/queries/feeds/IN1-cluster-restart/in1-cluster-restart.6.sleep.aql b/asterix-installer/src/test/resources/integrationts/fault-tolerance/queries/feeds/IN1-cluster-restart/in1-cluster-restart.6.sleep.aql
new file mode 100644
index 0000000..c5da56a
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/fault-tolerance/queries/feeds/IN1-cluster-restart/in1-cluster-restart.6.sleep.aql
@@ -0,0 +1 @@
+40000
diff --git a/asterix-installer/src/test/resources/integrationts/fault-tolerance/queries/feeds/IN1-cluster-restart/in1-cluster-restart.7.query.aql b/asterix-installer/src/test/resources/integrationts/fault-tolerance/queries/feeds/IN1-cluster-restart/in1-cluster-restart.7.query.aql
new file mode 100644
index 0000000..d03b9fe
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/fault-tolerance/queries/feeds/IN1-cluster-restart/in1-cluster-restart.7.query.aql
@@ -0,0 +1,10 @@
+use dataverse feeds;
+
+let $numTuples:=count(for $x in dataset Tweets
+return $x)
+let $result:=if($numTuples > 225)
+then
+ 1
+else
+ 0
+return $result
diff --git a/asterix-installer/src/test/resources/integrationts/fault-tolerance/results/feeds/IN1-cluster-restart/IN1-cluster-restart.1.adm b/asterix-installer/src/test/resources/integrationts/fault-tolerance/results/feeds/IN1-cluster-restart/IN1-cluster-restart.1.adm
new file mode 100644
index 0000000..d00491f
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/fault-tolerance/results/feeds/IN1-cluster-restart/IN1-cluster-restart.1.adm
@@ -0,0 +1 @@
+1
diff --git a/asterix-installer/src/test/resources/integrationts/fault-tolerance/results/feeds/IN1-cluster-restart/IN1-cluster-restart.2.adm b/asterix-installer/src/test/resources/integrationts/fault-tolerance/results/feeds/IN1-cluster-restart/IN1-cluster-restart.2.adm
new file mode 100644
index 0000000..d00491f
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/fault-tolerance/results/feeds/IN1-cluster-restart/IN1-cluster-restart.2.adm
@@ -0,0 +1 @@
+1
diff --git a/asterix-installer/src/test/resources/integrationts/fault-tolerance/testsuite.xml b/asterix-installer/src/test/resources/integrationts/fault-tolerance/testsuite.xml
new file mode 100644
index 0000000..0d9ed23
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/fault-tolerance/testsuite.xml
@@ -0,0 +1,10 @@
+<test-suite xmlns="urn:xml.testframework.asterix.ics.uci.edu" ResultOffsetPath="results" QueryOffsetPath="queries" QueryFileExtension=".aql">
+ <test-group name="fault-tolerance">
+ <test-case FilePath="feeds">
+ <compilation-unit name="IN1-cluster-restart">
+ <output-dir compare="Text">IN1-cluster-restart</output-dir>
+ </compilation-unit>
+ </test-case>
+ </test-group>
+</test-suite>
+
diff --git a/asterix-installer/src/test/resources/integrationts/library/queries/library-adapters/typed_adapter/typed_adapter.1.ddl.aql b/asterix-installer/src/test/resources/integrationts/library/queries/library-adapters/typed_adapter/typed_adapter.1.ddl.aql
new file mode 100644
index 0000000..f5fe458
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/library/queries/library-adapters/typed_adapter/typed_adapter.1.ddl.aql
@@ -0,0 +1,21 @@
+/*
+ * Description : Create a feed dataset that uses the feed simulator adapter.
+ The feed simulator simulates feed from a file in the local fs.
+ Associate with the feed an external user-defined function. The UDF
+ finds topics in each tweet. A topic is identified by a #.
+ Begin ingestion and apply external user defined function
+ * Expected Res : Success
+ * Date : 23rd Apr 2013
+ */
+use dataverse externallibtest;
+
+create type TestTypedAdaptorOutputType as closed {
+ tweetid: int64,
+ message-text: string
+}
+
+create dataset TweetsTestAdaptor(TestTypedAdaptorOutputType)
+primary key tweetid;
+
+create feed TestTypedAdaptorFeed
+using "testlib#test_typed_adaptor" (("num_output_records"="5"),("type-name"="TestTypedAdaptorOutputType"));
diff --git a/asterix-installer/src/test/resources/integrationts/library/queries/library-adapters/typed_adapter/typed_adapter.2.update.aql b/asterix-installer/src/test/resources/integrationts/library/queries/library-adapters/typed_adapter/typed_adapter.2.update.aql
new file mode 100644
index 0000000..a26a148
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/library/queries/library-adapters/typed_adapter/typed_adapter.2.update.aql
@@ -0,0 +1,14 @@
+/*
+ * Description : Create a feed dataset that uses the feed simulator adapter.
+ The feed simulator simulates feed from a file in the local fs.
+ Associate with the feed an external user-defined function. The UDF
+ finds topics in each tweet. A topic is identified by a #.
+ Begin ingestion and apply external user defined function
+ * Expected Res : Success
+ * Date : 23rd Apr 2013
+ */
+use dataverse externallibtest;
+
+set wait-for-completion-feed "true";
+
+connect feed TestTypedAdaptorFeed to dataset TweetsTestAdaptor;
diff --git a/asterix-installer/src/test/resources/integrationts/library/queries/library-adapters/typed_adapter/typed_adapter.3.query.aql b/asterix-installer/src/test/resources/integrationts/library/queries/library-adapters/typed_adapter/typed_adapter.3.query.aql
new file mode 100644
index 0000000..733b5a0
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/library/queries/library-adapters/typed_adapter/typed_adapter.3.query.aql
@@ -0,0 +1,14 @@
+/*
+ * Description : Create a feed dataset that uses the feed simulator adapter.
+ The feed simulator simulates feed from a file in the local fs.
+ Associate with the feed an external user-defined function. The UDF
+ finds topics in each tweet. A topic is identified by a #.
+ Begin ingestion and apply external user defined function
+ * Expected Res : Success
+ * Date : 23rd Apr 2013
+ */
+use dataverse externallibtest;
+
+for $x in dataset TweetsTestAdaptor
+order by $x.tweetid
+return $x
diff --git a/asterix-installer/src/test/resources/integrationts/library/queries/library-feeds/feed_ingest/feed_ingest.1.ddl.aql b/asterix-installer/src/test/resources/integrationts/library/queries/library-feeds/feed_ingest/feed_ingest.1.ddl.aql
new file mode 100644
index 0000000..43ff18b
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/library/queries/library-feeds/feed_ingest/feed_ingest.1.ddl.aql
@@ -0,0 +1,35 @@
+/*
+ * Description : Create a feed dataset that uses the feed simulator adapter.
+ The feed simulator simulates feed from a file in the local fs.
+ Associate with the feed an external user-defined function. The UDF
+ finds topics in each tweet. A topic is identified by a #.
+ Begin ingestion and apply external user defined function
+ * Expected Res : Success
+ * Date : 23rd Apr 2013
+ */
+use dataverse externallibtest;
+
+create type TweetInputType as closed {
+ id: string,
+ username : string,
+ location : string,
+ text : string,
+ timestamp : string
+}
+
+create type TweetOutputType as closed {
+ id: string,
+ username : string,
+ location : string,
+ text : string,
+ timestamp : string,
+ topics : {{string}}
+}
+
+create feed TweetFeed
+using file_feed
+(("type-name"="TweetInputType"),("fs"="localfs"),("path"="127.0.0.1://../../../../../../asterix-app/data/twitter/obamatweets.adm"),("format"="adm"),("tuple-interval"="10"))
+apply function testlib#parseTweet;
+
+create dataset TweetsFeedIngest(TweetOutputType)
+primary key id;
diff --git a/asterix-installer/src/test/resources/integrationts/library/queries/library-feeds/feed_ingest/feed_ingest.2.update.aql b/asterix-installer/src/test/resources/integrationts/library/queries/library-feeds/feed_ingest/feed_ingest.2.update.aql
new file mode 100644
index 0000000..7414bba
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/library/queries/library-feeds/feed_ingest/feed_ingest.2.update.aql
@@ -0,0 +1,14 @@
+/*
+ * Description : Create a feed dataset that uses the feed simulator adapter.
+ The feed simulator simulates feed from a file in the local fs.
+ Associate with the feed an external user-defined function. The UDF
+ finds topics in each tweet. A topic is identified by a #.
+ Begin ingestion and apply external user defined function
+ * Expected Res : Success
+ * Date : 23rd Apr 2013
+ */
+use dataverse externallibtest;
+
+set wait-for-completion-feed "true";
+
+connect feed TweetFeed to dataset TweetsFeedIngest;
diff --git a/asterix-installer/src/test/resources/integrationts/library/queries/library-feeds/feed_ingest/feed_ingest.3.query.aql b/asterix-installer/src/test/resources/integrationts/library/queries/library-feeds/feed_ingest/feed_ingest.3.query.aql
new file mode 100644
index 0000000..7d838be
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/library/queries/library-feeds/feed_ingest/feed_ingest.3.query.aql
@@ -0,0 +1,13 @@
+/*
+ * Description : Create a feed dataset that uses the feed simulator adapter.
+ The feed simulator simulates feed from a file in the local fs.
+ Associate with the feed an external user-defined function. The UDF
+ finds topics in each tweet. A topic is identified by a #.
+ Begin ingestion and apply external user defined function
+ * Expected Res : Success
+ * Date : 23rd Apr 2013
+ */
+use dataverse externallibtest;
+
+for $x in dataset TweetsFeedIngest
+return $x
diff --git a/asterix-installer/src/test/resources/integrationts/library/queries/library-functions/getCapital/getCapital.1.ddl.aql b/asterix-installer/src/test/resources/integrationts/library/queries/library-functions/getCapital/getCapital.1.ddl.aql
new file mode 100644
index 0000000..e140d9a
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/library/queries/library-functions/getCapital/getCapital.1.ddl.aql
@@ -0,0 +1,6 @@
+use dataverse externallibtest;
+
+create type CountryCapitalType if not exists as closed {
+country: string,
+capital: string
+};
diff --git a/asterix-installer/src/test/resources/integrationts/library/queries/library-functions/getCapital/getCapital.2.query.aql b/asterix-installer/src/test/resources/integrationts/library/queries/library-functions/getCapital/getCapital.2.query.aql
new file mode 100644
index 0000000..256a3cd
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/library/queries/library-functions/getCapital/getCapital.2.query.aql
@@ -0,0 +1,5 @@
+use dataverse externallibtest;
+
+let $input:=["England","Italy","China","United States","India","Jupiter"]
+for $country in $input
+return testlib#getCapital($country)
diff --git a/asterix-installer/src/test/resources/integrationts/library/queries/library-functions/insert-from-select/insert-from-select.1.ddl.aql b/asterix-installer/src/test/resources/integrationts/library/queries/library-functions/insert-from-select/insert-from-select.1.ddl.aql
new file mode 100644
index 0000000..11a5ddc
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/library/queries/library-functions/insert-from-select/insert-from-select.1.ddl.aql
@@ -0,0 +1,9 @@
+use dataverse externallibtest;
+
+create type TextType if not exists as closed {
+id: int32,
+text: string
+};
+
+create dataset Check(TextType)
+primary key id;
diff --git a/asterix-installer/src/test/resources/integrationts/library/queries/library-functions/insert-from-select/insert-from-select.2.update.aql b/asterix-installer/src/test/resources/integrationts/library/queries/library-functions/insert-from-select/insert-from-select.2.update.aql
new file mode 100644
index 0000000..8a14669
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/library/queries/library-functions/insert-from-select/insert-from-select.2.update.aql
@@ -0,0 +1,6 @@
+use dataverse externallibtest;
+
+insert into dataset Check (
+{"id": 1, "text":"university of california, irvine"}
+);
+
diff --git a/asterix-installer/src/test/resources/integrationts/library/queries/library-functions/insert-from-select/insert-from-select.3.update.aql b/asterix-installer/src/test/resources/integrationts/library/queries/library-functions/insert-from-select/insert-from-select.3.update.aql
new file mode 100644
index 0000000..36f3133
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/library/queries/library-functions/insert-from-select/insert-from-select.3.update.aql
@@ -0,0 +1,7 @@
+use dataverse externallibtest;
+
+insert into dataset Check (
+ for $x in dataset Check
+ let $y:=testlib#toUpper($x)
+ return $y
+);
diff --git a/asterix-installer/src/test/resources/integrationts/library/queries/library-functions/insert-from-select/insert-from-select.4.query.aql b/asterix-installer/src/test/resources/integrationts/library/queries/library-functions/insert-from-select/insert-from-select.4.query.aql
new file mode 100644
index 0000000..997c333
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/library/queries/library-functions/insert-from-select/insert-from-select.4.query.aql
@@ -0,0 +1,6 @@
+use dataverse externallibtest;
+
+for $x in dataset Check
+where $x.id < 0
+order by $x.id
+return $x
diff --git a/asterix-installer/src/test/resources/integrationts/library/queries/library-functions/mysum/mysum.1.query.aql b/asterix-installer/src/test/resources/integrationts/library/queries/library-functions/mysum/mysum.1.query.aql
new file mode 100644
index 0000000..ce255b8
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/library/queries/library-functions/mysum/mysum.1.query.aql
@@ -0,0 +1,4 @@
+use dataverse externallibtest;
+
+let $x:=testlib#mysum(3,4)
+return $x
diff --git a/asterix-installer/src/test/resources/integrationts/library/queries/library-functions/toUpper/toUpper.1.ddl.aql b/asterix-installer/src/test/resources/integrationts/library/queries/library-functions/toUpper/toUpper.1.ddl.aql
new file mode 100644
index 0000000..67635f5
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/library/queries/library-functions/toUpper/toUpper.1.ddl.aql
@@ -0,0 +1,7 @@
+use dataverse externallibtest;
+
+create type TextType if not exists as closed {
+id: int32,
+text: string
+};
+
diff --git a/asterix-installer/src/test/resources/integrationts/library/queries/library-functions/toUpper/toUpper.2.query.aql b/asterix-installer/src/test/resources/integrationts/library/queries/library-functions/toUpper/toUpper.2.query.aql
new file mode 100644
index 0000000..a742203
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/library/queries/library-functions/toUpper/toUpper.2.query.aql
@@ -0,0 +1,5 @@
+use dataverse externallibtest;
+
+let $input:={"id": 1, "text":"university of california, irvine"}
+let $x:=testlib#toUpper($input)
+return $x
diff --git a/asterix-installer/src/test/resources/integrationts/library/queries/library-metadata/dataverseDataset/dataverseDataset.1.query.aql b/asterix-installer/src/test/resources/integrationts/library/queries/library-metadata/dataverseDataset/dataverseDataset.1.query.aql
new file mode 100644
index 0000000..40316d8
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/library/queries/library-metadata/dataverseDataset/dataverseDataset.1.query.aql
@@ -0,0 +1,3 @@
+for $x in dataset Metadata.Dataverse
+order by $x.DataverseName
+return $x
diff --git a/asterix-installer/src/test/resources/integrationts/library/queries/library-metadata/functionDataset/functionDataset.1.query.aql b/asterix-installer/src/test/resources/integrationts/library/queries/library-metadata/functionDataset/functionDataset.1.query.aql
new file mode 100644
index 0000000..fc47972
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/library/queries/library-metadata/functionDataset/functionDataset.1.query.aql
@@ -0,0 +1,3 @@
+for $x in dataset Metadata.Function
+order by $x.Name
+return $x
diff --git a/asterix-installer/src/test/resources/integrationts/library/queries/library-metadata/libraryDataset/libraryDataset.1.query.aql b/asterix-installer/src/test/resources/integrationts/library/queries/library-metadata/libraryDataset/libraryDataset.1.query.aql
new file mode 100644
index 0000000..36a8a52
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/library/queries/library-metadata/libraryDataset/libraryDataset.1.query.aql
@@ -0,0 +1,3 @@
+for $x in dataset Metadata.Library
+order by $x.Name
+return $x
diff --git a/asterix-installer/src/test/resources/integrationts/library/results/library-adapters/typed_adapter/typed_adapter.1.adm b/asterix-installer/src/test/resources/integrationts/library/results/library-adapters/typed_adapter/typed_adapter.1.adm
new file mode 100644
index 0000000..2ad7b60
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/library/results/library-adapters/typed_adapter/typed_adapter.1.adm
@@ -0,0 +1,5 @@
+{ "tweetid": 1i64, "message-text": "1" }
+{ "tweetid": 2i64, "message-text": "2" }
+{ "tweetid": 3i64, "message-text": "3" }
+{ "tweetid": 4i64, "message-text": "4" }
+{ "tweetid": 5i64, "message-text": "5" }
diff --git a/asterix-installer/src/test/resources/integrationts/library/results/library-feeds/feed_ingest/feed_ingest.1.adm b/asterix-installer/src/test/resources/integrationts/library/results/library-feeds/feed_ingest/feed_ingest.1.adm
new file mode 100644
index 0000000..1291213
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/library/results/library-feeds/feed_ingest/feed_ingest.1.adm
@@ -0,0 +1,12 @@
+{ "id": "nc1:1", "username": "BronsonMike", "location": "", "text": "@GottaLaff @reutersus Christie and obama just foul weather friends", "timestamp": "Thu Dec 06 16:53:06 PST 2012", "topics": {{ }} }
+{ "id": "nc1:100", "username": "KidrauhlProuds", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:16 PST 2012", "topics": {{ }} }
+{ "id": "nc1:102", "username": "jaysauce82", "location": "", "text": "Not voting for President Obama #BadDecision", "timestamp": "Thu Dec 06 16:53:16 PST 2012", "topics": {{ "#BadDecision" }} }
+{ "id": "nc1:104", "username": "princeofsupras", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson e uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:15 PST 2012", "topics": {{ }} }
+{ "id": "nc1:106", "username": "GulfDogs", "location": "", "text": "Obama Admin Knew Libyan Terrorists Had US-Provided Weaponsteaparty #tcot #ccot #NewGuards #BreitbartArmy #patriotwttp://t.co/vJxzrQUE", "timestamp": "Thu Dec 06 16:53:14 PST 2012", "topics": {{ "#tcot", "#ccot", "#NewGuards", "#BreitbartArmy", "#patriotwttp://t.co/vJxzrQUE" }} }
+{ "id": "nc1:108", "username": "Laugzpz", "location": "", "text": "@AlfredoJalife Maestro Obama se hace de la vista gorda, es un acuerdo de siempre creo yo.", "timestamp": "Thu Dec 06 16:53:14 PST 2012", "topics": {{ }} }
+{ "id": "nc1:11", "username": "magarika", "location": "", "text": "RT @ken24xavier: Obama tells SOROS - our plan is ALMOST finished http://t.co/WvzK0GtU", "timestamp": "Thu Dec 06 16:53:05 PST 2012", "topics": {{ }} }
+{ "id": "nc1:111", "username": "ToucanMall", "location": "", "text": "RT @WorldWar3Watch: Michelle Obama Gets More Grammy Nominations Than Justin ... #Obama #WW3 http://t.co/0Wv2GKij", "timestamp": "Thu Dec 06 16:53:13 PST 2012", "topics": {{ "#Obama", "#WW3" }} }
+{ "id": "nc1:113", "username": "ToucanMall", "location": "", "text": "RT @ObamaPalooza: Tiffany Shared What $2,000 Meant to Her ... and the President Stopped by to Talk About It http://t.co/sgT7lsNV #Obama", "timestamp": "Thu Dec 06 16:53:12 PST 2012", "topics": {{ "#Obama" }} }
+{ "id": "nc1:115", "username": "thewildpitch", "location": "", "text": "RT @RevkahJC: Dennis Miller: Obama Should Just Say He Wants To Tax Successful People http://t.co/Ihlemy9Y", "timestamp": "Thu Dec 06 16:53:11 PST 2012", "topics": {{ }} }
+{ "id": "nc1:117", "username": "Rnugent24", "location": "", "text": "RT @ConservativeQuo: unemployment is above 8% again. I wonder how long it will take for Obama to start blaming Bush? 3-2-1 #tcot #antiobama", "timestamp": "Thu Dec 06 16:53:10 PST 2012", "topics": {{ "#tcot", "#antiobama" }} }
+{ "id": "nc1:119", "username": "ToucanMall", "location": "", "text": "RT @Newitrsdotcom: I hope #Obama will win re-election... Other four years without meaningless #wars", "timestamp": "Thu Dec 06 16:53:09 PST 2012", "topics": {{ "#Obama", "#wars" }} }
diff --git a/asterix-installer/src/test/resources/integrationts/library/results/library-functions/getCapital/getCapital.1.adm b/asterix-installer/src/test/resources/integrationts/library/results/library-functions/getCapital/getCapital.1.adm
new file mode 100644
index 0000000..16e9591
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/library/results/library-functions/getCapital/getCapital.1.adm
@@ -0,0 +1,6 @@
+{ "country": "England", "capital": "London" }
+{ "country": "Italy", "capital": "Rome" }
+{ "country": "China", "capital": "Beijing" }
+{ "country": "United States", "capital": "Washington D.C." }
+{ "country": "India", "capital": "New Delhi" }
+{ "country": "Jupiter", "capital": "NOT_FOUND" }
diff --git a/asterix-installer/src/test/resources/integrationts/library/results/library-functions/insert-from-select/insert-from-select.1.adm b/asterix-installer/src/test/resources/integrationts/library/results/library-functions/insert-from-select/insert-from-select.1.adm
new file mode 100644
index 0000000..a839cbc
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/library/results/library-functions/insert-from-select/insert-from-select.1.adm
@@ -0,0 +1 @@
+{ "id": -1, "text": "UNIVERSITY OF CALIFORNIA, IRVINE" }
diff --git a/asterix-installer/src/test/resources/integrationts/library/results/library-functions/mysum/mysum.1.adm b/asterix-installer/src/test/resources/integrationts/library/results/library-functions/mysum/mysum.1.adm
new file mode 100644
index 0000000..7f8f011
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/library/results/library-functions/mysum/mysum.1.adm
@@ -0,0 +1 @@
+7
diff --git a/asterix-installer/src/test/resources/integrationts/library/results/library-functions/toUpper/toUpper.1.adm b/asterix-installer/src/test/resources/integrationts/library/results/library-functions/toUpper/toUpper.1.adm
new file mode 100644
index 0000000..a839cbc
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/library/results/library-functions/toUpper/toUpper.1.adm
@@ -0,0 +1 @@
+{ "id": -1, "text": "UNIVERSITY OF CALIFORNIA, IRVINE" }
diff --git a/asterix-installer/src/test/resources/integrationts/library/results/library-metadata/dataverseDataset/dataverseDataset.1.adm b/asterix-installer/src/test/resources/integrationts/library/results/library-metadata/dataverseDataset/dataverseDataset.1.adm
new file mode 100644
index 0000000..330c347
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/library/results/library-metadata/dataverseDataset/dataverseDataset.1.adm
@@ -0,0 +1,2 @@
+{ "DataverseName": "Metadata", "DataFormat": "edu.uci.ics.asterix.runtime.formats.NonTaggedDataFormat", "Timestamp": "Thu Apr 25 11:17:56 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "externallibtest", "DataFormat": "edu.uci.ics.asterix.runtime.formats.NonTaggedDataFormat", "Timestamp": "Thu Apr 25 11:18:12 PDT 2013", "PendingOp": 0 }
diff --git a/asterix-installer/src/test/resources/integrationts/library/results/library-metadata/functionDataset/functionDataset.1.adm b/asterix-installer/src/test/resources/integrationts/library/results/library-metadata/functionDataset/functionDataset.1.adm
new file mode 100644
index 0000000..4a8369b
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/library/results/library-metadata/functionDataset/functionDataset.1.adm
@@ -0,0 +1,12 @@
+{ "DataverseName": "externallibtest", "Name": "testlib#allTypes", "Arity": "1", "Params": [ "AllType" ], "ReturnType": "AllType", "Definition": "edu.uci.ics.asterix.external.library.AllTypesFactory
+ ", "Language": "JAVA", "Kind": "SCALAR" }
+{ "DataverseName": "externallibtest", "Name": "testlib#echoDelay", "Arity": "1", "Params": [ "TweetMessageType" ], "ReturnType": "TweetMessageType", "Definition": "edu.uci.ics.asterix.external.library.EchoDelayFactory
+ ", "Language": "JAVA", "Kind": "SCALAR" }
+{ "DataverseName": "externallibtest", "Name": "testlib#getCapital", "Arity": "1", "Params": [ "ASTRING" ], "ReturnType": "CountryCapitalType", "Definition": "edu.uci.ics.asterix.external.library.CapitalFinderFactory
+ ", "Language": "JAVA", "Kind": "SCALAR" }
+{ "DataverseName": "externallibtest", "Name": "testlib#mysum", "Arity": "2", "Params": [ "AINT32", "AINT32" ], "ReturnType": "AINT32", "Definition": "edu.uci.ics.asterix.external.library.SumFactory
+ ", "Language": "JAVA", "Kind": "SCALAR" }
+{ "DataverseName": "externallibtest", "Name": "testlib#parseTweet", "Arity": "1", "Params": [ "TweetInputType" ], "ReturnType": "TweetOutputType", "Definition": "edu.uci.ics.asterix.external.library.ParseTweetFactory
+ ", "Language": "JAVA", "Kind": "SCALAR" }
+{ "DataverseName": "externallibtest", "Name": "testlib#toUpper", "Arity": "1", "Params": [ "TextType" ], "ReturnType": "TextType", "Definition": "edu.uci.ics.asterix.external.library.UpperCaseFactory
+ ", "Language": "JAVA", "Kind": "SCALAR" }
diff --git a/asterix-installer/src/test/resources/integrationts/library/results/library-metadata/libraryDataset/libraryDataset.1.adm b/asterix-installer/src/test/resources/integrationts/library/results/library-metadata/libraryDataset/libraryDataset.1.adm
new file mode 100644
index 0000000..573db0c
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/library/results/library-metadata/libraryDataset/libraryDataset.1.adm
@@ -0,0 +1 @@
+{ "DataverseName": "externallibtest", "Name": "testlib", "Timestamp": "Mon Apr 22 23:36:55 PDT 2013" }
diff --git a/asterix-installer/src/test/resources/integrationts/library/testsuite.xml b/asterix-installer/src/test/resources/integrationts/library/testsuite.xml
new file mode 100644
index 0000000..be9ba0e
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/library/testsuite.xml
@@ -0,0 +1,56 @@
+<test-suite xmlns="urn:xml.testframework.asterix.ics.uci.edu" ResultOffsetPath="results" QueryOffsetPath="queries" QueryFileExtension=".aql">
+ <test-group name="library-functions">
+ <test-case FilePath="library-functions">
+ <compilation-unit name="mysum">
+ <output-dir compare="Text">mysum</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="library-functions">
+ <compilation-unit name="toUpper">
+ <output-dir compare="Text">toUpper</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="library-functions">
+ <compilation-unit name="insert-from-select">
+ <output-dir compare="Text">insert-from-select</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="library-functions">
+ <compilation-unit name="getCapital">
+ <output-dir compare="Text">getCapital</output-dir>
+ </compilation-unit>
+ </test-case>
+ </test-group>
+ <test-group name="library-metadata">
+ <test-case FilePath="library-metadata">
+ <compilation-unit name="functionDataset">
+ <output-dir compare="Text">functionDataset</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="library-metadata">
+ <compilation-unit name="libraryDataset">
+ <output-dir compare="Text">libraryDataset</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="library-metadata">
+ <compilation-unit name="dataverseDataset">
+ <output-dir compare="Text">dataverseDataset</output-dir>
+ </compilation-unit>
+ </test-case>
+ </test-group>
+ <test-group name="library-feeds">
+ <test-case FilePath="library-feeds">
+ <compilation-unit name="feed_ingest">
+ <output-dir compare="Text">feed_ingest</output-dir>
+ </compilation-unit>
+ </test-case>
+ </test-group>
+ <test-group name="library-adapters">
+ <test-case FilePath="library-adapters">
+ <compilation-unit name="typed_adapter">
+ <output-dir compare="Text">typed_adapter</output-dir>
+ </compilation-unit>
+ </test-case>
+ </test-group>
+</test-suite>
+
diff --git a/asterix-installer/src/test/resources/integrationts/lifecycle/queries/asterix-lifecycle/installLibrary/installLibrary.1.mgx.aql b/asterix-installer/src/test/resources/integrationts/lifecycle/queries/asterix-lifecycle/installLibrary/installLibrary.1.mgx.aql
new file mode 100644
index 0000000..2d8a23e
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/lifecycle/queries/asterix-lifecycle/installLibrary/installLibrary.1.mgx.aql
@@ -0,0 +1 @@
+stop -n asterix
diff --git a/asterix-installer/src/test/resources/integrationts/lifecycle/queries/asterix-lifecycle/installLibrary/installLibrary.2.mgx.aql b/asterix-installer/src/test/resources/integrationts/lifecycle/queries/asterix-lifecycle/installLibrary/installLibrary.2.mgx.aql
new file mode 100644
index 0000000..97ad91e
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/lifecycle/queries/asterix-lifecycle/installLibrary/installLibrary.2.mgx.aql
@@ -0,0 +1 @@
+install -n asterix -d externallibtest -l testlib -p ../asterix-external-data/target/testlib-zip-binary-assembly.zip
diff --git a/asterix-installer/src/test/resources/integrationts/lifecycle/queries/asterix-lifecycle/installLibrary/installLibrary.3.mgx.aql b/asterix-installer/src/test/resources/integrationts/lifecycle/queries/asterix-lifecycle/installLibrary/installLibrary.3.mgx.aql
new file mode 100644
index 0000000..4e99f33
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/lifecycle/queries/asterix-lifecycle/installLibrary/installLibrary.3.mgx.aql
@@ -0,0 +1 @@
+start -n asterix
diff --git a/asterix-installer/src/test/resources/integrationts/lifecycle/queries/asterix-lifecycle/installLibrary/installLibrary.4.query.aql b/asterix-installer/src/test/resources/integrationts/lifecycle/queries/asterix-lifecycle/installLibrary/installLibrary.4.query.aql
new file mode 100644
index 0000000..5a46092
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/lifecycle/queries/asterix-lifecycle/installLibrary/installLibrary.4.query.aql
@@ -0,0 +1,2 @@
+for $x in dataset Metadata.Library
+return $x
diff --git a/asterix-installer/src/test/resources/integrationts/lifecycle/queries/asterix-lifecycle/uninstallLibrary/uninstallLibrary.1.mgx.aql b/asterix-installer/src/test/resources/integrationts/lifecycle/queries/asterix-lifecycle/uninstallLibrary/uninstallLibrary.1.mgx.aql
new file mode 100644
index 0000000..2d8a23e
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/lifecycle/queries/asterix-lifecycle/uninstallLibrary/uninstallLibrary.1.mgx.aql
@@ -0,0 +1 @@
+stop -n asterix
diff --git a/asterix-installer/src/test/resources/integrationts/lifecycle/queries/asterix-lifecycle/uninstallLibrary/uninstallLibrary.2.mgx.aql b/asterix-installer/src/test/resources/integrationts/lifecycle/queries/asterix-lifecycle/uninstallLibrary/uninstallLibrary.2.mgx.aql
new file mode 100644
index 0000000..1b5ae40
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/lifecycle/queries/asterix-lifecycle/uninstallLibrary/uninstallLibrary.2.mgx.aql
@@ -0,0 +1 @@
+uninstall -n asterix -d externallibtest -l testlib
diff --git a/asterix-installer/src/test/resources/integrationts/lifecycle/queries/asterix-lifecycle/uninstallLibrary/uninstallLibrary.3.mgx.aql b/asterix-installer/src/test/resources/integrationts/lifecycle/queries/asterix-lifecycle/uninstallLibrary/uninstallLibrary.3.mgx.aql
new file mode 100644
index 0000000..4e99f33
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/lifecycle/queries/asterix-lifecycle/uninstallLibrary/uninstallLibrary.3.mgx.aql
@@ -0,0 +1 @@
+start -n asterix
diff --git a/asterix-installer/src/test/resources/integrationts/lifecycle/queries/asterix-lifecycle/uninstallLibrary/uninstallLibrary.4.query.aql b/asterix-installer/src/test/resources/integrationts/lifecycle/queries/asterix-lifecycle/uninstallLibrary/uninstallLibrary.4.query.aql
new file mode 100644
index 0000000..5a46092
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/lifecycle/queries/asterix-lifecycle/uninstallLibrary/uninstallLibrary.4.query.aql
@@ -0,0 +1,2 @@
+for $x in dataset Metadata.Library
+return $x
diff --git a/asterix-installer/src/test/resources/integrationts/lifecycle/results/asterix-lifecycle/installLibrary/installLibrary.1.adm b/asterix-installer/src/test/resources/integrationts/lifecycle/results/asterix-lifecycle/installLibrary/installLibrary.1.adm
new file mode 100644
index 0000000..a5d5c9b
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/lifecycle/results/asterix-lifecycle/installLibrary/installLibrary.1.adm
@@ -0,0 +1 @@
+{ "DataverseName": "externallibtest", "Name": "testlib", "Timestamp": "Wed Apr 24 17:25:25 PDT 2013" }
diff --git a/asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443-2/query-issue443-2.1.ddl.aql b/asterix-installer/src/test/resources/integrationts/lifecycle/results/asterix-lifecycle/uninstallLibrary/uninstallLibrary.1.adm
similarity index 100%
copy from asterix-app/src/test/resources/runtimets/queries/distinct/query-issue443-2/query-issue443-2.1.ddl.aql
copy to asterix-installer/src/test/resources/integrationts/lifecycle/results/asterix-lifecycle/uninstallLibrary/uninstallLibrary.1.adm
diff --git a/asterix-installer/src/test/resources/integrationts/lifecycle/testsuite.xml b/asterix-installer/src/test/resources/integrationts/lifecycle/testsuite.xml
index f1949ca..969da43 100644
--- a/asterix-installer/src/test/resources/integrationts/lifecycle/testsuite.xml
+++ b/asterix-installer/src/test/resources/integrationts/lifecycle/testsuite.xml
@@ -19,6 +19,16 @@
<output-dir compare="Text">backupRestore</output-dir>
</compilation-unit>
</test-case>
+ <test-case FilePath="asterix-lifecycle">
+ <compilation-unit name="installLibrary">
+ <output-dir compare="Text">installLibrary</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="asterix-lifecycle">
+ <compilation-unit name="uninstallLibrary">
+ <output-dir compare="Text">uninstallLibrary</output-dir>
+ </compilation-unit>
+ </test-case>
</test-group>
</test-suite>
diff --git a/asterix-installer/src/test/resources/transactionts/data/file_list.txt b/asterix-installer/src/test/resources/transactionts/data/file_list.txt
deleted file mode 100644
index 4832ad0..0000000
--- a/asterix-installer/src/test/resources/transactionts/data/file_list.txt
+++ /dev/null
@@ -1 +0,0 @@
-data/csv/fragile_01.csv
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_index_only/primary_index_only.3.update.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_index_only/primary_index_only.3.update.aql
index 1aebe8d..6d0e431 100644
--- a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_index_only/primary_index_only.3.update.aql
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_index_only/primary_index_only.3.update.aql
@@ -8,4 +8,4 @@
use dataverse recovery;
load dataset Fragile_raw using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
-(("path"="127.0.0.1://data/csv/fragile_01.csv"),("format"="delimited-text"),("delimiter"=",")) pre-sorted;
+(("path"="127.0.0.1://../../../../../../asterix-app/data/csv/fragile_01.csv"),("format"="delimited-text"),("delimiter"=",")) pre-sorted;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.1.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.1.script.aql
new file mode 100644
index 0000000..323b1cf
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.1.script.aql
@@ -0,0 +1 @@
+create_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.2.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.2.ddl.aql
new file mode 100644
index 0000000..50775b0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.2.ddl.aql
@@ -0,0 +1,53 @@
+/*
+ * Test case Name : primary_plus_default_secondary_index.aql
+ * Description : Check that abort from duplicate key exception works and crash recovery works after the abort.
+ * Expected Result : Success
+ * Date : Oct 15 2013
+ */
+
+drop dataverse recovery if exists;
+create dataverse recovery;
+use dataverse recovery;
+
+/* For raw Fragile data */
+create type FragileTypeRaw as closed {
+ row_id: int32,
+ sid: int32,
+ date: string,
+ day: int32,
+ time: string,
+ bpm: int32,
+ RR: float,
+ /* new string field and location field*/
+ text: string,
+ location: point,
+ text2: string
+};
+
+/* For cleaned Fragile data */
+create type FragileType as closed {
+ row_id: int32,
+ sid: int32,
+ date: date,
+ day: int32,
+ time: time,
+ bpm: int32,
+ RR: float,
+
+ /* new string field and location field*/
+ text: string,
+ location: point,
+ text2: string
+};
+
+/* Create dataset for loading raw Fragile data */
+create dataset Fragile_raw (FragileTypeRaw)
+primary key row_id;
+
+/* Create dataset for cleaned Fragile data */
+create dataset Fragile (FragileType)
+primary key row_id;
+
+/* Create default secondary index on dataset clean Fragile */
+create index cfSidIdx on Fragile(sid);
+
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.3.update.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.3.update.aql
new file mode 100644
index 0000000..5d7d6f2
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.3.update.aql
@@ -0,0 +1,11 @@
+/*
+ * Test case Name : primary_plus_default_secondary_index.aql
+ * Description : Check that abort from duplicate key exception works and crash recovery works after the abort.
+ * Expected Result : Success
+ * Date : Oct 15 2013
+ */
+
+use dataverse recovery;
+
+load dataset Fragile_raw using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="127.0.0.1://../../../../../../asterix-app/data/csv/fragile_02.adm"),("format"="adm")) pre-sorted;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.4.txneu.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.4.txneu.aql
new file mode 100644
index 0000000..12fd699
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.4.txneu.aql
@@ -0,0 +1,25 @@
+/*
+ * Test case Name : primary_plus_default_secondary_index.aql
+ * Description : Check that abort from duplicate key exception works and crash recovery works after the abort.
+ * Expected Result : Success
+ * Date : Oct 15 2013
+ */
+
+use dataverse recovery;
+
+/* Load Fragile data from raw dataset into cleaned dataset */
+insert into dataset Fragile (
+ for $t in dataset Fragile_raw
+ return {
+ "row_id": $t.row_id % 28000,
+ "sid": $t.sid,
+ "date": date($t.date),
+ "day": $t.day,
+ "time": parse-time($t.time, "h:m:s"),
+ "bpm": $t.bpm,
+ "RR": $t.RR,
+ "text": $t.text,
+ "location": $t.location,
+ "text2": $t.text2
+ }
+);
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.5.txnqbc.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.5.txnqbc.aql
new file mode 100644
index 0000000..4d41446
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.5.txnqbc.aql
@@ -0,0 +1,10 @@
+/*
+ * Test case Name : primary_plus_default_secondary_index.aql
+ * Description : Check that abort from duplicate key exception works and crash recovery works after the abort.
+ * Expected Result : Success
+ * Date : Otc 15 2013
+ */
+
+use dataverse recovery;
+
+count (for $x in dataset Fragile where $x.sid=1 return $x);
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.6.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.6.script.aql
new file mode 100644
index 0000000..31d37ae
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.6.script.aql
@@ -0,0 +1 @@
+kill_cc_and_nc.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.7.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.7.script.aql
new file mode 100644
index 0000000..37ef6c0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.7.script.aql
@@ -0,0 +1 @@
+stop_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.8.txnqar.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.8.txnqar.aql
new file mode 100644
index 0000000..dd2cde5
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.8.txnqar.aql
@@ -0,0 +1,10 @@
+/*
+ * Test case Name : primary_plus_default_secondary_index.aql
+ * Description : Check that abort from duplicate key exception works and crash recovery works after the abort.
+ * Expected Result : Success
+ * Date : Oct 15 2013
+ */
+
+use dataverse recovery;
+
+count (for $x in dataset Fragile where $x.sid=1 return $x);
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.9.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.9.script.aql
new file mode 100644
index 0000000..f75dfc9
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.9.script.aql
@@ -0,0 +1 @@
+stop_and_delete.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.1.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.1.script.aql
new file mode 100644
index 0000000..323b1cf
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.1.script.aql
@@ -0,0 +1 @@
+create_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.2.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.2.ddl.aql
new file mode 100644
index 0000000..11397d7
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.2.ddl.aql
@@ -0,0 +1,54 @@
+/*
+ * Test case Name : primary_plus_keyword_secondary_index.aql
+ * Description : Check that abort from duplicate key exception works and crash recovery works after the abort.
+ * Expected Result : Success
+ * Date : Oct 15 2013
+ */
+
+drop dataverse recovery if exists;
+create dataverse recovery;
+use dataverse recovery;
+
+/* For raw Fragile data */
+create type FragileTypeRaw as closed {
+ row_id: int32,
+ sid: int32,
+ date: string,
+ day: int32,
+ time: string,
+ bpm: int32,
+ RR: float,
+ /* new string field and location field*/
+ text: string,
+ location: point,
+ text2: string
+
+};
+
+/* For cleaned Fragile data */
+create type FragileType as closed {
+ row_id: int32,
+ sid: int32,
+ date: date,
+ day: int32,
+ time: time,
+ bpm: int32,
+ RR: float,
+
+ /* new string field and location field*/
+ text: string,
+ location: point,
+ text2: string
+};
+
+/* Create dataset for loading raw Fragile data */
+create dataset Fragile_raw (FragileTypeRaw)
+primary key row_id;
+
+/* Create dataset for cleaned Fragile data */
+create dataset Fragile (FragileType)
+primary key row_id;
+
+/* Create keyword secondary index on dataset clean Fragile */
+create index cfText on Fragile(text) type keyword;
+
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.3.update.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.3.update.aql
new file mode 100644
index 0000000..9d9757f
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.3.update.aql
@@ -0,0 +1,12 @@
+/*
+ * Test case Name : primary_plus_keyword_secondary_index.aql
+ * Description : Check that abort from duplicate key exception works and crash recovery works after the abort.
+ * Expected Result : Success
+ * Date : Oct 15 2013
+ */
+
+use dataverse recovery;
+
+
+load dataset Fragile_raw using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="127.0.0.1://../../../../../../asterix-app/data/csv/fragile_02.adm"),("format"="adm")) pre-sorted;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.4.txneu.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.4.txneu.aql
new file mode 100644
index 0000000..3c38973
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.4.txneu.aql
@@ -0,0 +1,25 @@
+/*
+ * Test case Name : primary_plus_keyword_secondary_index.aql
+ * Description : Check that abort from duplicate key exception works and crash recovery works after the abort.
+ * Expected Result : Success
+ * Date : Oct 15 2013
+ */
+
+use dataverse recovery;
+
+/* Load Fragile data from raw dataset into cleaned dataset */
+insert into dataset Fragile (
+ for $t in dataset Fragile_raw
+ return {
+ "row_id": $t.row_id % 28000,
+ "sid": $t.sid,
+ "date": date($t.date),
+ "day": $t.day,
+ "time": parse-time($t.time, "h:m:s"),
+ "bpm": $t.bpm,
+ "RR": $t.RR,
+ "text": $t.text,
+ "location": $t.location,
+ "text2": $t.text2
+ }
+);
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.5.txnqbc.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.5.txnqbc.aql
new file mode 100644
index 0000000..f069443
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.5.txnqbc.aql
@@ -0,0 +1,5 @@
+use dataverse recovery;
+
+for $x in dataset Fragile
+where word-tokens($x.text) ~= word-tokens(" 1 20130417 1")
+return $x.text;
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.6.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.6.script.aql
new file mode 100644
index 0000000..31d37ae
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.6.script.aql
@@ -0,0 +1 @@
+kill_cc_and_nc.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.7.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.7.script.aql
new file mode 100644
index 0000000..37ef6c0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.7.script.aql
@@ -0,0 +1 @@
+stop_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.8.txnqar.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.8.txnqar.aql
new file mode 100644
index 0000000..f069443
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.8.txnqar.aql
@@ -0,0 +1,5 @@
+use dataverse recovery;
+
+for $x in dataset Fragile
+where word-tokens($x.text) ~= word-tokens(" 1 20130417 1")
+return $x.text;
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.9.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.9.script.aql
new file mode 100644
index 0000000..f75dfc9
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.9.script.aql
@@ -0,0 +1 @@
+stop_and_delete.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.1.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.1.script.aql
new file mode 100644
index 0000000..323b1cf
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.1.script.aql
@@ -0,0 +1 @@
+create_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.2.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.2.ddl.aql
new file mode 100644
index 0000000..8b83fc8
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.2.ddl.aql
@@ -0,0 +1,60 @@
+/*
+ * Test case Name : primary_plus_keyword_secondary_index.aql
+ * Description : Check that abort from duplicate key exception works and crash recovery works after the abort.
+ * Expected Result : Success
+ * Date : Oct 15 2013
+ */
+
+drop dataverse recovery if exists;
+create dataverse recovery;
+use dataverse recovery;
+
+/* For raw Fragile data */
+create type FragileTypeRaw as closed {
+ row_id: int32,
+ sid: int32,
+ date: string,
+ day: int32,
+ time: string,
+ bpm: int32,
+ RR: float,
+ /* new string field and location field*/
+ text: string,
+ location: point,
+ text2: string
+
+};
+
+/* For cleaned Fragile data */
+create type FragileType as closed {
+ row_id: int32,
+ sid: int32,
+ date: date,
+ day: int32,
+ time: time,
+ bpm: int32,
+ RR: float,
+
+ /* new string field and location field*/
+ text: string,
+ location: point,
+ text2: string
+};
+
+/* Create dataset for loading raw Fragile data */
+create dataset Fragile_raw (FragileTypeRaw)
+primary key row_id;
+
+/* Create dataset for cleaned Fragile data */
+create dataset Fragile (FragileType)
+primary key row_id;
+
+
+/* Create rtree secondary index on dataset clean Fragile */
+create index cfLocation on Fragile(location) type rtree;
+
+create index cfText on Fragile(text) type keyword;
+
+create index cfSidIdx on Fragile(sid);
+
+create index cfText2Ix on Fragile(text2) type ngram(3);
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.3.update.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.3.update.aql
new file mode 100644
index 0000000..9d9757f
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.3.update.aql
@@ -0,0 +1,12 @@
+/*
+ * Test case Name : primary_plus_keyword_secondary_index.aql
+ * Description : Check that abort from duplicate key exception works and crash recovery works after the abort.
+ * Expected Result : Success
+ * Date : Oct 15 2013
+ */
+
+use dataverse recovery;
+
+
+load dataset Fragile_raw using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="127.0.0.1://../../../../../../asterix-app/data/csv/fragile_02.adm"),("format"="adm")) pre-sorted;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.4.txneu.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.4.txneu.aql
new file mode 100644
index 0000000..3c38973
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.4.txneu.aql
@@ -0,0 +1,25 @@
+/*
+ * Test case Name : primary_plus_keyword_secondary_index.aql
+ * Description : Check that abort from duplicate key exception works and crash recovery works after the abort.
+ * Expected Result : Success
+ * Date : Oct 15 2013
+ */
+
+use dataverse recovery;
+
+/* Load Fragile data from raw dataset into cleaned dataset */
+insert into dataset Fragile (
+ for $t in dataset Fragile_raw
+ return {
+ "row_id": $t.row_id % 28000,
+ "sid": $t.sid,
+ "date": date($t.date),
+ "day": $t.day,
+ "time": parse-time($t.time, "h:m:s"),
+ "bpm": $t.bpm,
+ "RR": $t.RR,
+ "text": $t.text,
+ "location": $t.location,
+ "text2": $t.text2
+ }
+);
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.5.txnqbc.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.5.txnqbc.aql
new file mode 100644
index 0000000..97935fa
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.5.txnqbc.aql
@@ -0,0 +1,12 @@
+/*
+ * Test case Name : primary_plus_keyword_secondary_index.aql
+ * Description : Check that abort from duplicate key exception works and crash recovery works after the abort.
+ * Expected Result : Success
+ * Date : Otc 15 2013
+ */
+
+
+use dataverse recovery;
+
+count (for $x in dataset Fragile
+where contains($x.text2, "location") return $x);
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.6.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.6.script.aql
new file mode 100644
index 0000000..31d37ae
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.6.script.aql
@@ -0,0 +1 @@
+kill_cc_and_nc.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.7.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.7.script.aql
new file mode 100644
index 0000000..37ef6c0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.7.script.aql
@@ -0,0 +1 @@
+stop_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.8.txnqar.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.8.txnqar.aql
new file mode 100644
index 0000000..5e18be6
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.8.txnqar.aql
@@ -0,0 +1,5 @@
+
+use dataverse recovery;
+
+count (for $x in dataset Fragile
+where contains($x.text2, "location") return $x);
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.9.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.9.script.aql
new file mode 100644
index 0000000..f75dfc9
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.9.script.aql
@@ -0,0 +1 @@
+stop_and_delete.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.1.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.1.script.aql
new file mode 100644
index 0000000..323b1cf
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.1.script.aql
@@ -0,0 +1 @@
+create_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.2.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.2.ddl.aql
new file mode 100644
index 0000000..78c7f43
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.2.ddl.aql
@@ -0,0 +1,53 @@
+/*
+ * Test case Name : primary_plus_default_secondary_index.aql
+ * Description : Check that abort from duplicate key exception works and crash recovery works after the abort.
+ * Expected Result : Success
+ * Date : Oct 15 2013
+ */
+
+drop dataverse recovery if exists;
+create dataverse recovery;
+use dataverse recovery;
+
+/* For raw Fragile data */
+create type FragileTypeRaw as closed {
+ row_id: int32,
+ sid: int32,
+ date: string,
+ day: int32,
+ time: string,
+ bpm: int32,
+ RR: float,
+ /* new string field and location field*/
+ text: string,
+ location: point,
+ text2: string
+};
+
+/* For cleaned Fragile data */
+create type FragileType as closed {
+ row_id: int32,
+ sid: int32,
+ date: date,
+ day: int32,
+ time: time,
+ bpm: int32,
+ RR: float,
+
+ /* new string field and location field*/
+ text: string,
+ location: point,
+ text2: string
+};
+
+/* Create dataset for loading raw Fragile data */
+create dataset Fragile_raw (FragileTypeRaw)
+primary key row_id;
+
+/* Create dataset for cleaned Fragile data */
+create dataset Fragile (FragileType)
+primary key row_id;
+
+/* Create default secondary index on dataset clean Fragile */
+create index cfText2Ix on Fragile(text2) type ngram(3);
+
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.3.update.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.3.update.aql
new file mode 100644
index 0000000..5d7d6f2
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.3.update.aql
@@ -0,0 +1,11 @@
+/*
+ * Test case Name : primary_plus_default_secondary_index.aql
+ * Description : Check that abort from duplicate key exception works and crash recovery works after the abort.
+ * Expected Result : Success
+ * Date : Oct 15 2013
+ */
+
+use dataverse recovery;
+
+load dataset Fragile_raw using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="127.0.0.1://../../../../../../asterix-app/data/csv/fragile_02.adm"),("format"="adm")) pre-sorted;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.4.txneu.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.4.txneu.aql
new file mode 100644
index 0000000..12fd699
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.4.txneu.aql
@@ -0,0 +1,25 @@
+/*
+ * Test case Name : primary_plus_default_secondary_index.aql
+ * Description : Check that abort from duplicate key exception works and crash recovery works after the abort.
+ * Expected Result : Success
+ * Date : Oct 15 2013
+ */
+
+use dataverse recovery;
+
+/* Load Fragile data from raw dataset into cleaned dataset */
+insert into dataset Fragile (
+ for $t in dataset Fragile_raw
+ return {
+ "row_id": $t.row_id % 28000,
+ "sid": $t.sid,
+ "date": date($t.date),
+ "day": $t.day,
+ "time": parse-time($t.time, "h:m:s"),
+ "bpm": $t.bpm,
+ "RR": $t.RR,
+ "text": $t.text,
+ "location": $t.location,
+ "text2": $t.text2
+ }
+);
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.5.txnqbc.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.5.txnqbc.aql
new file mode 100644
index 0000000..434e8b0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.5.txnqbc.aql
@@ -0,0 +1,11 @@
+/*
+ * Test case Name : primary_plus_default_secondary_index.aql
+ * Description : Check that abort from duplicate key exception works and crash recovery works after the abort.
+ * Expected Result : Success
+ * Date : Otc 15 2013
+ */
+
+use dataverse recovery;
+
+count (for $x in dataset Fragile
+where contains($x.text2, "location") return $x);
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.6.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.6.script.aql
new file mode 100644
index 0000000..31d37ae
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.6.script.aql
@@ -0,0 +1 @@
+kill_cc_and_nc.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.7.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.7.script.aql
new file mode 100644
index 0000000..37ef6c0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.7.script.aql
@@ -0,0 +1 @@
+stop_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.8.txnqar.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.8.txnqar.aql
new file mode 100644
index 0000000..21a5d28
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.8.txnqar.aql
@@ -0,0 +1,11 @@
+/*
+ * Test case Name : primary_plus_default_secondary_index.aql
+ * Description : Check that abort from duplicate key exception works and crash recovery works after the abort.
+ * Expected Result : Success
+ * Date : Oct 15 2013
+ */
+
+use dataverse recovery;
+
+count (for $x in dataset Fragile
+where contains($x.text2, "location") return $x);
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.9.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.9.script.aql
new file mode 100644
index 0000000..f75dfc9
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.9.script.aql
@@ -0,0 +1 @@
+stop_and_delete.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.1.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.1.script.aql
new file mode 100644
index 0000000..323b1cf
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.1.script.aql
@@ -0,0 +1 @@
+create_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.2.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.2.ddl.aql
new file mode 100644
index 0000000..4faa6e3
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.2.ddl.aql
@@ -0,0 +1,54 @@
+/*
+ * Test case Name : primary_plus_keyword_secondary_index.aql
+ * Description : Check that abort from duplicate key exception works and crash recovery works after the abort.
+ * Expected Result : Success
+ * Date : Oct 15 2013
+ */
+
+drop dataverse recovery if exists;
+create dataverse recovery;
+use dataverse recovery;
+
+/* For raw Fragile data */
+create type FragileTypeRaw as closed {
+ row_id: int32,
+ sid: int32,
+ date: string,
+ day: int32,
+ time: string,
+ bpm: int32,
+ RR: float,
+ /* new string field and location field*/
+ text: string,
+ location: point,
+ text2: string
+
+};
+
+/* For cleaned Fragile data */
+create type FragileType as closed {
+ row_id: int32,
+ sid: int32,
+ date: date,
+ day: int32,
+ time: time,
+ bpm: int32,
+ RR: float,
+
+ /* new string field and location field*/
+ text: string,
+ location: point,
+ text2: string
+};
+
+/* Create dataset for loading raw Fragile data */
+create dataset Fragile_raw (FragileTypeRaw)
+primary key row_id;
+
+/* Create dataset for cleaned Fragile data */
+create dataset Fragile (FragileType)
+primary key row_id;
+
+
+/* Create rtree secondary index on dataset clean Fragile */
+create index cfLocation on Fragile(location) type rtree;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.3.update.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.3.update.aql
new file mode 100644
index 0000000..9d9757f
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.3.update.aql
@@ -0,0 +1,12 @@
+/*
+ * Test case Name : primary_plus_keyword_secondary_index.aql
+ * Description : Check that abort from duplicate key exception works and crash recovery works after the abort.
+ * Expected Result : Success
+ * Date : Oct 15 2013
+ */
+
+use dataverse recovery;
+
+
+load dataset Fragile_raw using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="127.0.0.1://../../../../../../asterix-app/data/csv/fragile_02.adm"),("format"="adm")) pre-sorted;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.4.txneu.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.4.txneu.aql
new file mode 100644
index 0000000..3c38973
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.4.txneu.aql
@@ -0,0 +1,25 @@
+/*
+ * Test case Name : primary_plus_keyword_secondary_index.aql
+ * Description : Check that abort from duplicate key exception works and crash recovery works after the abort.
+ * Expected Result : Success
+ * Date : Oct 15 2013
+ */
+
+use dataverse recovery;
+
+/* Load Fragile data from raw dataset into cleaned dataset */
+insert into dataset Fragile (
+ for $t in dataset Fragile_raw
+ return {
+ "row_id": $t.row_id % 28000,
+ "sid": $t.sid,
+ "date": date($t.date),
+ "day": $t.day,
+ "time": parse-time($t.time, "h:m:s"),
+ "bpm": $t.bpm,
+ "RR": $t.RR,
+ "text": $t.text,
+ "location": $t.location,
+ "text2": $t.text2
+ }
+);
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.5.txnqbc.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.5.txnqbc.aql
new file mode 100644
index 0000000..9189b28
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.5.txnqbc.aql
@@ -0,0 +1,5 @@
+use dataverse recovery;
+
+count (for $x in dataset Fragile where
+spatial-intersect($x.location, create-polygon([0.0,0.0, 2.0,2.0, 0.0,2.0, 2.0,0.0]))
+return $x);
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.6.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.6.script.aql
new file mode 100644
index 0000000..31d37ae
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.6.script.aql
@@ -0,0 +1 @@
+kill_cc_and_nc.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.7.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.7.script.aql
new file mode 100644
index 0000000..37ef6c0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.7.script.aql
@@ -0,0 +1 @@
+stop_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.8.txnqar.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.8.txnqar.aql
new file mode 100644
index 0000000..9189b28
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.8.txnqar.aql
@@ -0,0 +1,5 @@
+use dataverse recovery;
+
+count (for $x in dataset Fragile where
+spatial-intersect($x.location, create-polygon([0.0,0.0, 2.0,2.0, 0.0,2.0, 2.0,0.0]))
+return $x);
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.9.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.9.script.aql
new file mode 100644
index 0000000..f75dfc9
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.9.script.aql
@@ -0,0 +1 @@
+stop_and_delete.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.1.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.1.script.aql
new file mode 100644
index 0000000..323b1cf
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.1.script.aql
@@ -0,0 +1 @@
+create_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.2.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.2.ddl.aql
new file mode 100644
index 0000000..d32bbb0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.2.ddl.aql
@@ -0,0 +1,19 @@
+/*
+ * Test case Name : dataverse_recovery
+ * Description :
+ * Expected Result :
+ * Date :
+ */
+
+drop dataverse SampleDV if exists;
+create dataverse SampleDV;
+
+use dataverse SampleDV;
+
+create type SampleType as open {
+ id: int32,
+ text: string
+}
+
+create dataset SampleDS(SampleType)
+primary key id;
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.3.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.3.script.aql
new file mode 100644
index 0000000..31d37ae
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.3.script.aql
@@ -0,0 +1 @@
+kill_cc_and_nc.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.4.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.4.script.aql
new file mode 100644
index 0000000..3ba1dc0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.4.script.aql
@@ -0,0 +1 @@
+stop_and_start.sh
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.5.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.5.ddl.aql
new file mode 100644
index 0000000..da254c2
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.5.ddl.aql
@@ -0,0 +1,2 @@
+use dataverse SampleDV;
+drop dataset SampleDS;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.6.errddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.6.errddl.aql
new file mode 100644
index 0000000..da254c2
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.6.errddl.aql
@@ -0,0 +1,2 @@
+use dataverse SampleDV;
+drop dataset SampleDS;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.7.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.7.script.aql
new file mode 100644
index 0000000..10e1a51
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.7.script.aql
@@ -0,0 +1 @@
+stop_and_delete.sh
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.1.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.1.script.aql
new file mode 100644
index 0000000..323b1cf
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.1.script.aql
@@ -0,0 +1 @@
+create_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.2.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.2.ddl.aql
new file mode 100644
index 0000000..762c652
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.2.ddl.aql
@@ -0,0 +1,16 @@
+/*
+ * Test case Name : dataverse_recovery
+ * Description :
+ * Expected Result :
+ * Date :
+ */
+
+drop dataverse SampleDV if exists;
+create dataverse SampleDV;
+
+use dataverse SampleDV;
+
+create type SampleType as open {
+ id: int32,
+ text: string
+}
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.3.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.3.script.aql
new file mode 100644
index 0000000..31d37ae
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.3.script.aql
@@ -0,0 +1 @@
+kill_cc_and_nc.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.4.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.4.script.aql
new file mode 100644
index 0000000..3ba1dc0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.4.script.aql
@@ -0,0 +1 @@
+stop_and_start.sh
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.5.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.5.ddl.aql
new file mode 100644
index 0000000..9b7dce7
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.5.ddl.aql
@@ -0,0 +1,2 @@
+use dataverse SampleDV;
+drop type SampleType;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.6.errddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.6.errddl.aql
new file mode 100644
index 0000000..9b7dce7
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.6.errddl.aql
@@ -0,0 +1,2 @@
+use dataverse SampleDV;
+drop type SampleType;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.7.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.7.script.aql
new file mode 100644
index 0000000..10e1a51
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.7.script.aql
@@ -0,0 +1 @@
+stop_and_delete.sh
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.1.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.1.script.aql
new file mode 100644
index 0000000..323b1cf
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.1.script.aql
@@ -0,0 +1 @@
+create_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.2.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.2.ddl.aql
new file mode 100644
index 0000000..93139d6
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.2.ddl.aql
@@ -0,0 +1,9 @@
+/*
+ * Test case Name : dataverse_recovery
+ * Description :
+ * Expected Result :
+ * Date :
+ */
+
+drop dataverse SampleDV if exists;
+create dataverse SampleDV;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.3.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.3.script.aql
new file mode 100644
index 0000000..31d37ae
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.3.script.aql
@@ -0,0 +1 @@
+kill_cc_and_nc.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.4.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.4.script.aql
new file mode 100644
index 0000000..3ba1dc0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.4.script.aql
@@ -0,0 +1 @@
+stop_and_start.sh
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.5.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.5.ddl.aql
new file mode 100644
index 0000000..9b12205
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.5.ddl.aql
@@ -0,0 +1 @@
+drop dataverse SampleDV;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.6.errddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.6.errddl.aql
new file mode 100644
index 0000000..9b12205
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.6.errddl.aql
@@ -0,0 +1 @@
+drop dataverse SampleDV;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.7.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.7.script.aql
new file mode 100644
index 0000000..10e1a51
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.7.script.aql
@@ -0,0 +1 @@
+stop_and_delete.sh
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.1.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.1.script.aql
new file mode 100644
index 0000000..323b1cf
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.1.script.aql
@@ -0,0 +1 @@
+create_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.2.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.2.ddl.aql
new file mode 100644
index 0000000..3a8a9d2
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.2.ddl.aql
@@ -0,0 +1,41 @@
+/*
+ * Test case Name : primary_index_only.aql
+ * Description : Check that abort from duplicate key exception works and crash recovery works after the abort.
+ * Expected Result : Success
+ * Date : September 25 2013
+ */
+
+drop dataverse recovery if exists;
+create dataverse recovery;
+use dataverse recovery;
+
+/* For raw Fragile data */
+create type FragileTypeRaw as closed {
+ row_id: int32,
+ sid: int32,
+ date: string,
+ day: int32,
+ time: string,
+ bpm: int32,
+ RR: float
+};
+
+/* For cleaned Fragile data */
+create type FragileType as closed {
+ row_id: int32,
+ sid: int32,
+ date: date,
+ day: int32,
+ time: time,
+ bpm: int32,
+ RR: float
+};
+
+/* Create dataset for loading raw Fragile data */
+create dataset Fragile_raw (FragileTypeRaw)
+primary key row_id;
+
+/* Create dataset for cleaned Fragile data */
+create dataset Fragile (FragileType)
+primary key row_id;
+
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.3.update.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.3.update.aql
new file mode 100644
index 0000000..6d0e431
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.3.update.aql
@@ -0,0 +1,11 @@
+/*
+ * Test case Name : primary_index_only.aql
+ * Description : Check that abort from duplicate key exception works and crash recovery works after the abort.
+ * Expected Result : Success
+ * Date : September 25 2013
+ */
+
+use dataverse recovery;
+
+load dataset Fragile_raw using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="127.0.0.1://../../../../../../asterix-app/data/csv/fragile_01.csv"),("format"="delimited-text"),("delimiter"=",")) pre-sorted;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.4.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.4.script.aql
new file mode 100644
index 0000000..31d37ae
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.4.script.aql
@@ -0,0 +1 @@
+kill_cc_and_nc.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.5.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.5.script.aql
new file mode 100644
index 0000000..3ba1dc0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.5.script.aql
@@ -0,0 +1 @@
+stop_and_start.sh
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.6.update.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.6.update.aql
new file mode 100644
index 0000000..a5e94d2
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.6.update.aql
@@ -0,0 +1,5 @@
+use dataverse recovery;
+
+delete $r from dataset Fragile_raw
+ where $r.row_id % 2 = 1
+;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.7.query.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.7.query.aql
new file mode 100644
index 0000000..a63aee2
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.7.query.aql
@@ -0,0 +1,11 @@
+/*
+ * Test case Name : primary_index_only.aql
+ * Description : Check that abort from duplicate key exception works and crash recovery works after the abort.
+ * Expected Result : Success
+ * Date : September 25 2013
+ */
+
+use dataverse recovery;
+
+count (for $x in dataset Fragile_raw return $x);
+
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.8.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.8.script.aql
new file mode 100644
index 0000000..f75dfc9
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.8.script.aql
@@ -0,0 +1 @@
+stop_and_delete.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.1.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.1.script.aql
new file mode 100644
index 0000000..323b1cf
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.1.script.aql
@@ -0,0 +1 @@
+create_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.2.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.2.ddl.aql
new file mode 100644
index 0000000..736f6cd
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.2.ddl.aql
@@ -0,0 +1,7 @@
+create dataverse recovery;
+
+use dataverse recovery;
+
+create function add($a, $b) {
+ $a + $b
+};
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.3.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.3.script.aql
new file mode 100644
index 0000000..31d37ae
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.3.script.aql
@@ -0,0 +1 @@
+kill_cc_and_nc.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.4.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.4.script.aql
new file mode 100644
index 0000000..37ef6c0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.4.script.aql
@@ -0,0 +1 @@
+stop_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.5.query.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.5.query.aql
new file mode 100644
index 0000000..9bafad3
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.5.query.aql
@@ -0,0 +1,12 @@
+/*
+ * Test case Name : primary_index_only.aql
+ * Description : Check that abort from duplicate key exception works and crash recovery works after the abort.
+ * Expected Result : Success
+ * Date : September 25 2013
+ */
+
+use dataverse recovery;
+let $a := 1
+let $b := 2
+return add($a, $b);
+
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.6.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.6.ddl.aql
new file mode 100644
index 0000000..fddc3fc
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.6.ddl.aql
@@ -0,0 +1,3 @@
+use dataverse recovery;
+
+drop function add @ 2;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.7.errddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.7.errddl.aql
new file mode 100644
index 0000000..fddc3fc
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.7.errddl.aql
@@ -0,0 +1,3 @@
+use dataverse recovery;
+
+drop function add @ 2;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.8.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.8.script.aql
new file mode 100644
index 0000000..f75dfc9
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.8.script.aql
@@ -0,0 +1 @@
+stop_and_delete.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.1.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.1.script.aql
new file mode 100644
index 0000000..323b1cf
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.1.script.aql
@@ -0,0 +1 @@
+create_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.2.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.2.ddl.aql
new file mode 100644
index 0000000..3a8a9d2
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.2.ddl.aql
@@ -0,0 +1,41 @@
+/*
+ * Test case Name : primary_index_only.aql
+ * Description : Check that abort from duplicate key exception works and crash recovery works after the abort.
+ * Expected Result : Success
+ * Date : September 25 2013
+ */
+
+drop dataverse recovery if exists;
+create dataverse recovery;
+use dataverse recovery;
+
+/* For raw Fragile data */
+create type FragileTypeRaw as closed {
+ row_id: int32,
+ sid: int32,
+ date: string,
+ day: int32,
+ time: string,
+ bpm: int32,
+ RR: float
+};
+
+/* For cleaned Fragile data */
+create type FragileType as closed {
+ row_id: int32,
+ sid: int32,
+ date: date,
+ day: int32,
+ time: time,
+ bpm: int32,
+ RR: float
+};
+
+/* Create dataset for loading raw Fragile data */
+create dataset Fragile_raw (FragileTypeRaw)
+primary key row_id;
+
+/* Create dataset for cleaned Fragile data */
+create dataset Fragile (FragileType)
+primary key row_id;
+
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.3.update.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.3.update.aql
new file mode 100644
index 0000000..6d0e431
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.3.update.aql
@@ -0,0 +1,11 @@
+/*
+ * Test case Name : primary_index_only.aql
+ * Description : Check that abort from duplicate key exception works and crash recovery works after the abort.
+ * Expected Result : Success
+ * Date : September 25 2013
+ */
+
+use dataverse recovery;
+
+load dataset Fragile_raw using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="127.0.0.1://../../../../../../asterix-app/data/csv/fragile_01.csv"),("format"="delimited-text"),("delimiter"=",")) pre-sorted;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.4.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.4.script.aql
new file mode 100644
index 0000000..31d37ae
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.4.script.aql
@@ -0,0 +1 @@
+kill_cc_and_nc.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.5.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.5.script.aql
new file mode 100644
index 0000000..3ba1dc0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.5.script.aql
@@ -0,0 +1 @@
+stop_and_start.sh
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.6.update.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.6.update.aql
new file mode 100644
index 0000000..26d998d
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.6.update.aql
@@ -0,0 +1,15 @@
+use dataverse recovery;
+
+/* Load Fragile data from raw dataset into cleaned dataset */
+insert into dataset Fragile (
+ for $t in dataset Fragile_raw
+ return {
+ "row_id": $t.row_id,
+ "sid": $t.sid,
+ "date": date($t.date),
+ "day": $t.day,
+ "time": parse-time($t.time, "h:m:s"),
+ "bpm": $t.bpm,
+ "RR": $t.RR
+ }
+);
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.7.query.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.7.query.aql
new file mode 100644
index 0000000..f8ed3d0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.7.query.aql
@@ -0,0 +1,11 @@
+/*
+ * Test case Name : primary_index_only.aql
+ * Description : Check that abort from duplicate key exception works and crash recovery works after the abort.
+ * Expected Result : Success
+ * Date : September 25 2013
+ */
+
+use dataverse recovery;
+
+count (for $x in dataset Fragile return $x);
+
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.8.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.8.script.aql
new file mode 100644
index 0000000..f75dfc9
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.8.script.aql
@@ -0,0 +1 @@
+stop_and_delete.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.1.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.1.script.aql
new file mode 100644
index 0000000..323b1cf
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.1.script.aql
@@ -0,0 +1 @@
+create_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.2.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.2.ddl.aql
new file mode 100644
index 0000000..3a8a9d2
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.2.ddl.aql
@@ -0,0 +1,41 @@
+/*
+ * Test case Name : primary_index_only.aql
+ * Description : Check that abort from duplicate key exception works and crash recovery works after the abort.
+ * Expected Result : Success
+ * Date : September 25 2013
+ */
+
+drop dataverse recovery if exists;
+create dataverse recovery;
+use dataverse recovery;
+
+/* For raw Fragile data */
+create type FragileTypeRaw as closed {
+ row_id: int32,
+ sid: int32,
+ date: string,
+ day: int32,
+ time: string,
+ bpm: int32,
+ RR: float
+};
+
+/* For cleaned Fragile data */
+create type FragileType as closed {
+ row_id: int32,
+ sid: int32,
+ date: date,
+ day: int32,
+ time: time,
+ bpm: int32,
+ RR: float
+};
+
+/* Create dataset for loading raw Fragile data */
+create dataset Fragile_raw (FragileTypeRaw)
+primary key row_id;
+
+/* Create dataset for cleaned Fragile data */
+create dataset Fragile (FragileType)
+primary key row_id;
+
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.3.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.3.script.aql
new file mode 100644
index 0000000..31d37ae
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.3.script.aql
@@ -0,0 +1 @@
+kill_cc_and_nc.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.4.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.4.script.aql
new file mode 100644
index 0000000..37ef6c0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.4.script.aql
@@ -0,0 +1 @@
+stop_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.5.update.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.5.update.aql
new file mode 100644
index 0000000..6d0e431
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.5.update.aql
@@ -0,0 +1,11 @@
+/*
+ * Test case Name : primary_index_only.aql
+ * Description : Check that abort from duplicate key exception works and crash recovery works after the abort.
+ * Expected Result : Success
+ * Date : September 25 2013
+ */
+
+use dataverse recovery;
+
+load dataset Fragile_raw using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="127.0.0.1://../../../../../../asterix-app/data/csv/fragile_01.csv"),("format"="delimited-text"),("delimiter"=",")) pre-sorted;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.6.query.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.6.query.aql
new file mode 100644
index 0000000..fcd9e05
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.6.query.aql
@@ -0,0 +1,10 @@
+/*
+ * Test case Name : primary_index_only.aql
+ * Description : Check that abort from duplicate key exception works and crash recovery works after the abort.
+ * Expected Result : Success
+ * Date : September 25 2013
+ */
+
+use dataverse recovery;
+
+count (for $x in dataset Fragile_raw return $x);
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.7.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.7.script.aql
new file mode 100644
index 0000000..f75dfc9
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.7.script.aql
@@ -0,0 +1 @@
+stop_and_delete.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.1.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.1.script.aql
new file mode 100644
index 0000000..323b1cf
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.1.script.aql
@@ -0,0 +1 @@
+create_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.2.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.2.ddl.aql
new file mode 100644
index 0000000..77a573b
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.2.ddl.aql
@@ -0,0 +1,21 @@
+/*
+ * Test case Name : dataverse_recovery
+ * Description :
+ * Expected Result :
+ * Date :
+ */
+
+drop dataverse SampleDV if exists;
+create dataverse SampleDV;
+
+use dataverse SampleDV;
+
+create type SampleType as open {
+ id: int32,
+ text: string
+}
+
+create dataset SampleDS(SampleType)
+primary key id;
+
+create index SampleDSix on SampleDS(text);
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.3.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.3.script.aql
new file mode 100644
index 0000000..31d37ae
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.3.script.aql
@@ -0,0 +1 @@
+kill_cc_and_nc.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.4.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.4.script.aql
new file mode 100644
index 0000000..3ba1dc0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.4.script.aql
@@ -0,0 +1 @@
+stop_and_start.sh
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.5.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.5.ddl.aql
new file mode 100644
index 0000000..d765b16
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.5.ddl.aql
@@ -0,0 +1,2 @@
+use dataverse SampleDV;
+drop index SampleDS.SampleDSix;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.6.errddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.6.errddl.aql
new file mode 100644
index 0000000..d765b16
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.6.errddl.aql
@@ -0,0 +1,2 @@
+use dataverse SampleDV;
+drop index SampleDS.SampleDSix;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.7.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.7.script.aql
new file mode 100644
index 0000000..10e1a51
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.7.script.aql
@@ -0,0 +1 @@
+stop_and_delete.sh
diff --git a/asterix-installer/src/test/resources/transactionts/results/recovery_ddl/delete_after_recovery/delete_after_recovery.1.adm b/asterix-installer/src/test/resources/transactionts/results/recovery_ddl/delete_after_recovery/delete_after_recovery.1.adm
new file mode 100644
index 0000000..88859f8
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/results/recovery_ddl/delete_after_recovery/delete_after_recovery.1.adm
@@ -0,0 +1 @@
+129088i64
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/results/recovery_ddl/function_recovery/function_recovery.1.adm b/asterix-installer/src/test/resources/transactionts/results/recovery_ddl/function_recovery/function_recovery.1.adm
new file mode 100644
index 0000000..e440e5c
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/results/recovery_ddl/function_recovery/function_recovery.1.adm
@@ -0,0 +1 @@
+3
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/results/recovery_ddl/insert_after_recovery/insert_after_recovery.1.adm b/asterix-installer/src/test/resources/transactionts/results/recovery_ddl/insert_after_recovery/insert_after_recovery.1.adm
new file mode 100644
index 0000000..cefb395
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/results/recovery_ddl/insert_after_recovery/insert_after_recovery.1.adm
@@ -0,0 +1 @@
+258176i64
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/results/recovery_ddl/load_after_recovery/load_after_recovery.1.adm b/asterix-installer/src/test/resources/transactionts/results/recovery_ddl/load_after_recovery/load_after_recovery.1.adm
new file mode 100644
index 0000000..cefb395
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/results/recovery_ddl/load_after_recovery/load_after_recovery.1.adm
@@ -0,0 +1 @@
+258176i64
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/dml_recovery/configure_and_validate.sh b/asterix-installer/src/test/resources/transactionts/scripts/dml_recovery/configure_and_validate.sh
new file mode 100755
index 0000000..643e9ad
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/dml_recovery/configure_and_validate.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix configure;
+$MANAGIX_HOME/bin/managix validate;
+$MANAGIX_HOME/bin/managix validate -c $MANAGIX_HOME/clusters/local/local.xml;
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/dml_recovery/create_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/dml_recovery/create_and_start.sh
new file mode 100755
index 0000000..945f01d
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/dml_recovery/create_and_start.sh
@@ -0,0 +1 @@
+$MANAGIX_HOME/bin/managix create -n nc1 -c $MANAGIX_HOME/clusters/local/local.xml;
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/dml_recovery/kill_cc_and_nc.sh b/asterix-installer/src/test/resources/transactionts/scripts/dml_recovery/kill_cc_and_nc.sh
new file mode 100755
index 0000000..096d7df
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/dml_recovery/kill_cc_and_nc.sh
@@ -0,0 +1 @@
+jps | awk '{if ($2 == "NCDriver" || $2 == "CCDriver") print $1;}' | xargs -n 1 kill -9
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/dml_recovery/shutdown.sh b/asterix-installer/src/test/resources/transactionts/scripts/dml_recovery/shutdown.sh
new file mode 100755
index 0000000..4df5a05
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/dml_recovery/shutdown.sh
@@ -0,0 +1 @@
+$MANAGIX_HOME/bin/managix shutdown;
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/dml_recovery/stop_and_delete.sh b/asterix-installer/src/test/resources/transactionts/scripts/dml_recovery/stop_and_delete.sh
new file mode 100755
index 0000000..d7deea3
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/dml_recovery/stop_and_delete.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix delete -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/dml_recovery/stop_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/dml_recovery/stop_and_start.sh
new file mode 100755
index 0000000..7855938
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/dml_recovery/stop_and_start.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix start -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_default_secondary_index/create_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_default_secondary_index/create_and_start.sh
new file mode 100755
index 0000000..945f01d
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_default_secondary_index/create_and_start.sh
@@ -0,0 +1 @@
+$MANAGIX_HOME/bin/managix create -n nc1 -c $MANAGIX_HOME/clusters/local/local.xml;
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_default_secondary_index/kill_cc_and_nc.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_default_secondary_index/kill_cc_and_nc.sh
new file mode 100755
index 0000000..096d7df
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_default_secondary_index/kill_cc_and_nc.sh
@@ -0,0 +1 @@
+jps | awk '{if ($2 == "NCDriver" || $2 == "CCDriver") print $1;}' | xargs -n 1 kill -9
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_default_secondary_index/stop_and_delete.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_default_secondary_index/stop_and_delete.sh
new file mode 100755
index 0000000..d7deea3
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_default_secondary_index/stop_and_delete.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix delete -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_default_secondary_index/stop_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_default_secondary_index/stop_and_start.sh
new file mode 100755
index 0000000..7855938
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_default_secondary_index/stop_and_start.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix start -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_keyword_secondary_index/create_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_keyword_secondary_index/create_and_start.sh
new file mode 100755
index 0000000..945f01d
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_keyword_secondary_index/create_and_start.sh
@@ -0,0 +1 @@
+$MANAGIX_HOME/bin/managix create -n nc1 -c $MANAGIX_HOME/clusters/local/local.xml;
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_keyword_secondary_index/kill_cc_and_nc.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_keyword_secondary_index/kill_cc_and_nc.sh
new file mode 100755
index 0000000..096d7df
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_keyword_secondary_index/kill_cc_and_nc.sh
@@ -0,0 +1 @@
+jps | awk '{if ($2 == "NCDriver" || $2 == "CCDriver") print $1;}' | xargs -n 1 kill -9
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_keyword_secondary_index/stop_and_delete.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_keyword_secondary_index/stop_and_delete.sh
new file mode 100755
index 0000000..d7deea3
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_keyword_secondary_index/stop_and_delete.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix delete -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_keyword_secondary_index/stop_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_keyword_secondary_index/stop_and_start.sh
new file mode 100755
index 0000000..7855938
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_keyword_secondary_index/stop_and_start.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix start -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_multiple_secondary_indices/create_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_multiple_secondary_indices/create_and_start.sh
new file mode 100755
index 0000000..945f01d
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_multiple_secondary_indices/create_and_start.sh
@@ -0,0 +1 @@
+$MANAGIX_HOME/bin/managix create -n nc1 -c $MANAGIX_HOME/clusters/local/local.xml;
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_multiple_secondary_indices/kill_cc_and_nc.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_multiple_secondary_indices/kill_cc_and_nc.sh
new file mode 100755
index 0000000..096d7df
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_multiple_secondary_indices/kill_cc_and_nc.sh
@@ -0,0 +1 @@
+jps | awk '{if ($2 == "NCDriver" || $2 == "CCDriver") print $1;}' | xargs -n 1 kill -9
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_multiple_secondary_indices/stop_and_delete.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_multiple_secondary_indices/stop_and_delete.sh
new file mode 100755
index 0000000..d7deea3
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_multiple_secondary_indices/stop_and_delete.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix delete -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_multiple_secondary_indices/stop_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_multiple_secondary_indices/stop_and_start.sh
new file mode 100755
index 0000000..7855938
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_multiple_secondary_indices/stop_and_start.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix start -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_ngram_index/create_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_ngram_index/create_and_start.sh
new file mode 100755
index 0000000..945f01d
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_ngram_index/create_and_start.sh
@@ -0,0 +1 @@
+$MANAGIX_HOME/bin/managix create -n nc1 -c $MANAGIX_HOME/clusters/local/local.xml;
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_ngram_index/kill_cc_and_nc.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_ngram_index/kill_cc_and_nc.sh
new file mode 100755
index 0000000..096d7df
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_ngram_index/kill_cc_and_nc.sh
@@ -0,0 +1 @@
+jps | awk '{if ($2 == "NCDriver" || $2 == "CCDriver") print $1;}' | xargs -n 1 kill -9
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_ngram_index/stop_and_delete.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_ngram_index/stop_and_delete.sh
new file mode 100755
index 0000000..d7deea3
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_ngram_index/stop_and_delete.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix delete -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_ngram_index/stop_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_ngram_index/stop_and_start.sh
new file mode 100755
index 0000000..7855938
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_ngram_index/stop_and_start.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix start -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_rtree_index/create_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_rtree_index/create_and_start.sh
new file mode 100755
index 0000000..945f01d
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_rtree_index/create_and_start.sh
@@ -0,0 +1 @@
+$MANAGIX_HOME/bin/managix create -n nc1 -c $MANAGIX_HOME/clusters/local/local.xml;
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_rtree_index/kill_cc_and_nc.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_rtree_index/kill_cc_and_nc.sh
new file mode 100755
index 0000000..096d7df
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_rtree_index/kill_cc_and_nc.sh
@@ -0,0 +1 @@
+jps | awk '{if ($2 == "NCDriver" || $2 == "CCDriver") print $1;}' | xargs -n 1 kill -9
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_rtree_index/stop_and_delete.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_rtree_index/stop_and_delete.sh
new file mode 100755
index 0000000..d7deea3
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_rtree_index/stop_and_delete.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix delete -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_rtree_index/stop_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_rtree_index/stop_and_start.sh
new file mode 100755
index 0000000..7855938
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_rtree_index/stop_and_start.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix start -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataset_recovery/create_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataset_recovery/create_and_start.sh
new file mode 100755
index 0000000..945f01d
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataset_recovery/create_and_start.sh
@@ -0,0 +1 @@
+$MANAGIX_HOME/bin/managix create -n nc1 -c $MANAGIX_HOME/clusters/local/local.xml;
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataset_recovery/kill_cc_and_nc.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataset_recovery/kill_cc_and_nc.sh
new file mode 100755
index 0000000..096d7df
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataset_recovery/kill_cc_and_nc.sh
@@ -0,0 +1 @@
+jps | awk '{if ($2 == "NCDriver" || $2 == "CCDriver") print $1;}' | xargs -n 1 kill -9
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataset_recovery/stop_and_delete.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataset_recovery/stop_and_delete.sh
new file mode 100755
index 0000000..d7deea3
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataset_recovery/stop_and_delete.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix delete -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataset_recovery/stop_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataset_recovery/stop_and_start.sh
new file mode 100755
index 0000000..7855938
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataset_recovery/stop_and_start.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix start -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/datatype_recovery/create_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/datatype_recovery/create_and_start.sh
new file mode 100755
index 0000000..945f01d
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/datatype_recovery/create_and_start.sh
@@ -0,0 +1 @@
+$MANAGIX_HOME/bin/managix create -n nc1 -c $MANAGIX_HOME/clusters/local/local.xml;
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/datatype_recovery/kill_cc_and_nc.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/datatype_recovery/kill_cc_and_nc.sh
new file mode 100755
index 0000000..096d7df
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/datatype_recovery/kill_cc_and_nc.sh
@@ -0,0 +1 @@
+jps | awk '{if ($2 == "NCDriver" || $2 == "CCDriver") print $1;}' | xargs -n 1 kill -9
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/datatype_recovery/stop_and_delete.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/datatype_recovery/stop_and_delete.sh
new file mode 100755
index 0000000..d7deea3
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/datatype_recovery/stop_and_delete.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix delete -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/datatype_recovery/stop_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/datatype_recovery/stop_and_start.sh
new file mode 100755
index 0000000..7855938
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/datatype_recovery/stop_and_start.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix start -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataverse_recovery/create_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataverse_recovery/create_and_start.sh
new file mode 100755
index 0000000..945f01d
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataverse_recovery/create_and_start.sh
@@ -0,0 +1 @@
+$MANAGIX_HOME/bin/managix create -n nc1 -c $MANAGIX_HOME/clusters/local/local.xml;
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataverse_recovery/kill_cc_and_nc.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataverse_recovery/kill_cc_and_nc.sh
new file mode 100755
index 0000000..096d7df
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataverse_recovery/kill_cc_and_nc.sh
@@ -0,0 +1 @@
+jps | awk '{if ($2 == "NCDriver" || $2 == "CCDriver") print $1;}' | xargs -n 1 kill -9
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataverse_recovery/stop_and_delete.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataverse_recovery/stop_and_delete.sh
new file mode 100755
index 0000000..d7deea3
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataverse_recovery/stop_and_delete.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix delete -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataverse_recovery/stop_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataverse_recovery/stop_and_start.sh
new file mode 100755
index 0000000..7855938
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataverse_recovery/stop_and_start.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix start -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/delete_after_recovery/create_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/delete_after_recovery/create_and_start.sh
new file mode 100755
index 0000000..945f01d
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/delete_after_recovery/create_and_start.sh
@@ -0,0 +1 @@
+$MANAGIX_HOME/bin/managix create -n nc1 -c $MANAGIX_HOME/clusters/local/local.xml;
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/delete_after_recovery/kill_cc_and_nc.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/delete_after_recovery/kill_cc_and_nc.sh
new file mode 100755
index 0000000..096d7df
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/delete_after_recovery/kill_cc_and_nc.sh
@@ -0,0 +1 @@
+jps | awk '{if ($2 == "NCDriver" || $2 == "CCDriver") print $1;}' | xargs -n 1 kill -9
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/delete_after_recovery/stop_and_delete.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/delete_after_recovery/stop_and_delete.sh
new file mode 100755
index 0000000..d7deea3
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/delete_after_recovery/stop_and_delete.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix delete -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/delete_after_recovery/stop_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/delete_after_recovery/stop_and_start.sh
new file mode 100755
index 0000000..7855938
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/delete_after_recovery/stop_and_start.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix start -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/function_recovery/create_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/function_recovery/create_and_start.sh
new file mode 100755
index 0000000..945f01d
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/function_recovery/create_and_start.sh
@@ -0,0 +1 @@
+$MANAGIX_HOME/bin/managix create -n nc1 -c $MANAGIX_HOME/clusters/local/local.xml;
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/function_recovery/kill_cc_and_nc.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/function_recovery/kill_cc_and_nc.sh
new file mode 100755
index 0000000..096d7df
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/function_recovery/kill_cc_and_nc.sh
@@ -0,0 +1 @@
+jps | awk '{if ($2 == "NCDriver" || $2 == "CCDriver") print $1;}' | xargs -n 1 kill -9
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/function_recovery/stop_and_delete.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/function_recovery/stop_and_delete.sh
new file mode 100755
index 0000000..d7deea3
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/function_recovery/stop_and_delete.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix delete -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/function_recovery/stop_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/function_recovery/stop_and_start.sh
new file mode 100755
index 0000000..7855938
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/function_recovery/stop_and_start.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix start -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/insert_after_recovery/create_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/insert_after_recovery/create_and_start.sh
new file mode 100755
index 0000000..945f01d
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/insert_after_recovery/create_and_start.sh
@@ -0,0 +1 @@
+$MANAGIX_HOME/bin/managix create -n nc1 -c $MANAGIX_HOME/clusters/local/local.xml;
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/insert_after_recovery/kill_cc_and_nc.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/insert_after_recovery/kill_cc_and_nc.sh
new file mode 100755
index 0000000..096d7df
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/insert_after_recovery/kill_cc_and_nc.sh
@@ -0,0 +1 @@
+jps | awk '{if ($2 == "NCDriver" || $2 == "CCDriver") print $1;}' | xargs -n 1 kill -9
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/insert_after_recovery/stop_and_delete.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/insert_after_recovery/stop_and_delete.sh
new file mode 100755
index 0000000..d7deea3
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/insert_after_recovery/stop_and_delete.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix delete -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/insert_after_recovery/stop_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/insert_after_recovery/stop_and_start.sh
new file mode 100755
index 0000000..7855938
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/insert_after_recovery/stop_and_start.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix start -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/load_after_recovery/create_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/load_after_recovery/create_and_start.sh
new file mode 100755
index 0000000..945f01d
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/load_after_recovery/create_and_start.sh
@@ -0,0 +1 @@
+$MANAGIX_HOME/bin/managix create -n nc1 -c $MANAGIX_HOME/clusters/local/local.xml;
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/load_after_recovery/kill_cc_and_nc.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/load_after_recovery/kill_cc_and_nc.sh
new file mode 100755
index 0000000..096d7df
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/load_after_recovery/kill_cc_and_nc.sh
@@ -0,0 +1 @@
+jps | awk '{if ($2 == "NCDriver" || $2 == "CCDriver") print $1;}' | xargs -n 1 kill -9
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/load_after_recovery/stop_and_delete.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/load_after_recovery/stop_and_delete.sh
new file mode 100755
index 0000000..d7deea3
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/load_after_recovery/stop_and_delete.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix delete -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/load_after_recovery/stop_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/load_after_recovery/stop_and_start.sh
new file mode 100755
index 0000000..7855938
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/load_after_recovery/stop_and_start.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix start -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/secondary_index_recovery/create_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/secondary_index_recovery/create_and_start.sh
new file mode 100755
index 0000000..945f01d
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/secondary_index_recovery/create_and_start.sh
@@ -0,0 +1 @@
+$MANAGIX_HOME/bin/managix create -n nc1 -c $MANAGIX_HOME/clusters/local/local.xml;
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/secondary_index_recovery/kill_cc_and_nc.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/secondary_index_recovery/kill_cc_and_nc.sh
new file mode 100755
index 0000000..096d7df
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/secondary_index_recovery/kill_cc_and_nc.sh
@@ -0,0 +1 @@
+jps | awk '{if ($2 == "NCDriver" || $2 == "CCDriver") print $1;}' | xargs -n 1 kill -9
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/secondary_index_recovery/stop_and_delete.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/secondary_index_recovery/stop_and_delete.sh
new file mode 100755
index 0000000..d7deea3
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/secondary_index_recovery/stop_and_delete.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix delete -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/secondary_index_recovery/stop_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/secondary_index_recovery/stop_and_start.sh
new file mode 100755
index 0000000..7855938
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/secondary_index_recovery/stop_and_start.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix start -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/testsuite.xml b/asterix-installer/src/test/resources/transactionts/testsuite.xml
index 1991279..f2bd2b5 100644
--- a/asterix-installer/src/test/resources/transactionts/testsuite.xml
+++ b/asterix-installer/src/test/resources/transactionts/testsuite.xml
@@ -13,11 +13,98 @@
! limitations under the License.
!-->
<test-suite xmlns="urn:xml.testframework.asterix.ics.uci.edu" ResultOffsetPath="results" QueryOffsetPath="queries" QueryFileExtension=".aql">
+
<test-group name="recover_after_abort">
+
<test-case FilePath="recover_after_abort">
<compilation-unit name="primary_index_only">
<output-dir compare="Text">primary_index_only</output-dir>
</compilation-unit>
</test-case>
+
+ <test-case FilePath="recover_after_abort">
+ <compilation-unit name="primary_plus_default_secondary_index">
+ <output-dir compare="Text">primary_plus_default_secondary_index</output-dir>
+ </compilation-unit>
+ </test-case>
+
+ <test-case FilePath="recover_after_abort">
+ <compilation-unit name="primary_plus_rtree_index">
+ <output-dir compare="Text">primary_plus_rtree_index</output-dir>
+ </compilation-unit>
+ </test-case>
+
+ <test-case FilePath="recover_after_abort">
+ <compilation-unit name="primary_plus_keyword_secondary_index">
+ <output-dir compare="Text">primary_plus_keyword_secondary_index</output-dir>
+ </compilation-unit>
+ </test-case>
+
+ <test-case FilePath="recover_after_abort">
+ <compilation-unit name="primary_plus_ngram_index">
+ <output-dir compare="Text">primary_plus_ngram_index</output-dir>
+ </compilation-unit>
+ </test-case>
+
+ <test-case FilePath="recover_after_abort">
+ <compilation-unit name="primary_plus_multiple_secondary_indices">
+ <output-dir compare="Text">primary_plus_multiple_secondary_indices</output-dir>
+ </compilation-unit>
+ </test-case>
+
+ </test-group>
+
+ <test-group name="recovery_ddl">
+
+ <test-case FilePath="recovery_ddl">
+ <compilation-unit name="dataverse_recovery">
+ <output-dir compare="Text">dataverse_recovery</output-dir>
+ </compilation-unit>
+ </test-case>
+
+ <test-case FilePath="recovery_ddl">
+ <compilation-unit name="datatype_recovery">
+ <output-dir compare="Text">datatype_recovery</output-dir>
+ </compilation-unit>
+ </test-case>
+
+ <test-case FilePath="recovery_ddl">
+ <compilation-unit name="dataset_recovery">
+ <output-dir compare="Text">dataset_recovery</output-dir>
+ </compilation-unit>
+ </test-case>
+
+ <test-case FilePath="recovery_ddl">
+ <compilation-unit name="secondary_index_recovery">
+ <output-dir compare="Text">secondary_index_recovery</output-dir>
+ </compilation-unit>
+ </test-case>
+
+ <test-case FilePath="recovery_ddl">
+ <compilation-unit name="load_after_recovery">
+ <output-dir compare="Text">load_after_recovery</output-dir>
+ </compilation-unit>
+ </test-case>
+
+ <test-case FilePath="recovery_ddl">
+ <compilation-unit name="insert_after_recovery">
+ <output-dir compare="Text">insert_after_recovery</output-dir>
+ </compilation-unit>
+ </test-case>
+
+ <test-case FilePath="recovery_ddl">
+ <compilation-unit name="delete_after_recovery">
+ <output-dir compare="Text">delete_after_recovery</output-dir>
+ </compilation-unit>
+ </test-case>
+
+ <test-case FilePath="recovery_ddl">
+ <compilation-unit name="function_recovery">
+ <output-dir compare="Text">function_recovery</output-dir>
+ </compilation-unit>
+ </test-case>
+
</test-group>
+
+
</test-suite>
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/pom.xml b/asterix-maven-plugins/lexer-generator-maven-plugin/pom.xml
index aa0d253..37d4b0f 100644
--- a/asterix-maven-plugins/lexer-generator-maven-plugin/pom.xml
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/pom.xml
@@ -19,7 +19,7 @@
<parent>
<artifactId>asterix-maven-plugins</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
</parent>
<packaging>maven-plugin</packaging>
diff --git a/asterix-maven-plugins/pom.xml b/asterix-maven-plugins/pom.xml
index 56bc697..1da8da7 100644
--- a/asterix-maven-plugins/pom.xml
+++ b/asterix-maven-plugins/pom.xml
@@ -20,7 +20,7 @@
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
</parent>
<dependencies>
@@ -35,5 +35,6 @@
<modules>
<module>lexer-generator-maven-plugin</module>
+ <module>record-manager-generator-maven-plugin</module>
</modules>
</project>
diff --git a/asterix-maven-plugins/record-manager-generator-maven-plugin/pom.xml b/asterix-maven-plugins/record-manager-generator-maven-plugin/pom.xml
new file mode 100644
index 0000000..5f2ffaf
--- /dev/null
+++ b/asterix-maven-plugins/record-manager-generator-maven-plugin/pom.xml
@@ -0,0 +1,72 @@
+<!--
+ ! Copyright 2009-2013 by The Regents of the University of California
+ ! Licensed under the Apache License, Version 2.0 (the "License");
+ ! you may not use this file except in compliance with the License.
+ ! you may obtain a copy of the License from
+ !
+ ! http://www.apache.org/licenses/LICENSE-2.0
+ !
+ ! Unless required by applicable law or agreed to in writing, software
+ ! distributed under the License is distributed on an "AS IS" BASIS,
+ ! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ! See the License for the specific language governing permissions and
+ ! limitations under the License.
+ !-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>edu.uci.ics.asterix</groupId>
+ <artifactId>record-manager-generator-maven-plugin</artifactId>
+ <parent>
+ <artifactId>asterix-maven-plugins</artifactId>
+ <groupId>edu.uci.ics.asterix</groupId>
+ <version>0.8.4-SNAPSHOT</version>
+ </parent>
+
+ <packaging>maven-plugin</packaging>
+ <name>record-manager-generator-maven-plugin</name>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <version>2.0.2</version>
+ <configuration>
+ <source>1.7</source>
+ <target>1.7</target>
+ <fork>true</fork>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+ <dependencies>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>4.8.1</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>maven-plugin-api</artifactId>
+ <version>2.0.2</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>maven-artifact</artifactId>
+ <version>2.0.2</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>maven-project</artifactId>
+ <version>2.0.2</version>
+ </dependency>
+ <dependency>
+ <groupId>org.json</groupId>
+ <artifactId>json</artifactId>
+ <version>20090211</version>
+ <type>jar</type>
+ </dependency>
+ </dependencies>
+</project>
diff --git a/asterix-maven-plugins/record-manager-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/recordmanagergenerator/Generator.java b/asterix-maven-plugins/record-manager-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/recordmanagergenerator/Generator.java
new file mode 100644
index 0000000..14d8a7e
--- /dev/null
+++ b/asterix-maven-plugins/record-manager-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/recordmanagergenerator/Generator.java
@@ -0,0 +1,161 @@
+/*
+ * Copyright 2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.recordmanagergenerator;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+
+import edu.uci.ics.asterix.recordmanagergenerator.RecordType.Field;
+
+public class Generator {
+
+ public enum TemplateType {
+ RECORD_MANAGER,
+ ARENA_MANAGER,
+ SUPPORT
+ }
+
+ public static void generateSource(
+ TemplateType tmplType,
+ String packageName,
+ RecordType rec,
+ InputStream is,
+ StringBuilder sb,
+ boolean debug) {
+ try {
+ BufferedReader in = new BufferedReader(new InputStreamReader(is));
+
+ switch (tmplType) {
+ case RECORD_MANAGER:
+ generateMemoryManagerSource(packageName, rec, in, sb, debug);
+ break;
+ case ARENA_MANAGER:
+ generateArenaManagerSource(packageName, rec, in, sb, debug);
+ break;
+ case SUPPORT:
+ generateSupportFileSource(packageName, in, sb, debug);
+ break;
+ default:
+ throw new IllegalArgumentException();
+ }
+ } catch (IOException ioe) {
+ ioe.printStackTrace();
+ }
+
+ }
+
+ private static void generateMemoryManagerSource(
+ String packageName,
+ RecordType resource,
+ BufferedReader in,
+ StringBuilder sb,
+ boolean debug) throws IOException {
+ String line = null;
+ String indent = " ";
+
+ while((line = in.readLine()) != null) {
+ if (line.contains("@PACKAGE@")) {
+ line = line.replace("@PACKAGE@", packageName);
+ }
+ if (line.contains("@E@")) {
+ line = line.replace("@E@", resource.name);
+ }
+ if (line.contains("@DEBUG@")) {
+ line = line.replace("@DEBUG@", Boolean.toString(debug));
+ }
+ if (line.contains("@CONSTS@")) {
+ resource.appendConstants(sb, indent, 1);
+ sb.append('\n');
+ } else if (line.contains("@METHODS@")) {
+ for (int i = 0; i < resource.size(); ++i) {
+ final Field field = resource.fields.get(i);
+ if (field.accessible) {
+ field.appendMemoryManagerGetMethod(sb, indent, 1);
+ sb.append('\n');
+ field.appendMemoryManagerSetMethod(sb, indent, 1);
+ sb.append('\n');
+ }
+ }
+ } else if (line.contains("@INIT_SLOT@")) {
+ for (int i = 0; i < resource.size(); ++i) {
+ final Field field = resource.fields.get(i);
+ field.appendInitializers(sb, indent, 3);
+ }
+ } else if (line.contains("@CHECK_SLOT@")) {
+ for (int i = 0; i < resource.size(); ++i) {
+ final Field field = resource.fields.get(i);
+ field.appendChecks(sb, indent, 3);
+ }
+ } else if (line.contains("@PRINT_BUFFER@")) {
+ resource.appendBufferPrinter(sb, indent, 3);
+ sb.append('\n');
+ } else {
+ sb.append(line).append('\n');
+ }
+ }
+ }
+
+ private static void generateArenaManagerSource(
+ String packageName,
+ RecordType resource,
+ BufferedReader in,
+ StringBuilder sb,
+ boolean debug) throws IOException {
+ String line = null;
+ String indent = " ";
+
+ while((line = in.readLine()) != null) {
+ if (line.contains("@PACKAGE@")) {
+ line = line.replace("@PACKAGE@", packageName);
+ }
+ if (line.contains("@E@")) {
+ line = line.replace("@E@", resource.name);
+ }
+ if (line.contains("@DEBUG@")) {
+ line = line.replace("@DEBUG@", Boolean.toString(debug));
+ }
+ if (line.contains("@METHODS@")) {
+ for (int i = 0; i < resource.size(); ++i) {
+ final Field field = resource.fields.get(i);
+ if (field.accessible) {
+ field.appendArenaManagerGetMethod(sb, indent, 1);
+ sb.append('\n');
+ field.appendArenaManagerSetMethod(sb, indent, 1);
+ sb.append('\n');
+ }
+ }
+ } else {
+ sb.append(line).append('\n');
+ }
+ }
+ }
+
+ private static void generateSupportFileSource(
+ String packageName,
+ BufferedReader in,
+ StringBuilder sb,
+ boolean debug) throws IOException {
+ String line = null;
+ while((line = in.readLine()) != null) {
+ if (line.contains("@PACKAGE@")) {
+ line = line.replace("@PACKAGE@", packageName);
+ }
+ sb.append(line).append('\n');
+ }
+ }
+}
diff --git a/asterix-maven-plugins/record-manager-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/recordmanagergenerator/RecordManagerGeneratorMojo.java b/asterix-maven-plugins/record-manager-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/recordmanagergenerator/RecordManagerGeneratorMojo.java
new file mode 100644
index 0000000..13a80f1
--- /dev/null
+++ b/asterix-maven-plugins/record-manager-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/recordmanagergenerator/RecordManagerGeneratorMojo.java
@@ -0,0 +1,145 @@
+/*
+ * Copyright 2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.recordmanagergenerator;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.FileWriter;
+import java.io.InputStream;
+import java.io.Reader;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.maven.plugin.AbstractMojo;
+import org.apache.maven.plugin.MojoExecutionException;
+import org.apache.maven.plugin.MojoFailureException;
+import org.apache.maven.project.MavenProject;
+import org.json.JSONException;
+
+/**
+ * @goal generate-record-manager
+ * @phase generate-sources
+ * @requiresDependencyResolution compile
+ */
+public class RecordManagerGeneratorMojo extends AbstractMojo {
+
+ /**
+ * parameter injected from pom.xml
+ *
+ * @parameter
+ */
+ private boolean debug;
+ /**
+ * parameter injected from pom.xml
+ *
+ * @parameter
+ * @required
+ */
+ private String packageName;
+ /**
+ * parameter injected from pom.xml
+ *
+ * @parameter
+ * @required
+ */
+ private File[] inputFiles;
+ /**
+ * @parameter default-value="${project}"
+ * @required
+ * @readonly
+ */
+ MavenProject project;
+
+
+ String recordManagerTemplate = "RecordManager.java";
+ String arenaManagerTemplate = "ArenaManager.java";
+ String[] supportTemplates = { "RecordManagerStats.java", "AllocInfo.java", "TypeUtil.java" };
+
+ private Map<String, RecordType> typeMap;
+
+ public RecordManagerGeneratorMojo() {
+ }
+
+ private void readRecordTypes() throws MojoExecutionException {
+ if (debug) {
+ getLog().info("generating debug code");
+ }
+
+ typeMap = new HashMap<String, RecordType>();
+
+ for (int i = 0; i < inputFiles.length; ++i) {
+ try {
+ getLog().info("reading " + inputFiles[i].toString());
+ Reader read = new FileReader(inputFiles[i]);
+ RecordType type = RecordType.read(read);
+ // always add allocId to enable tracking of allocations
+ type.addField("alloc id", RecordType.Type.SHORT, null);
+ type.addToMap(typeMap);
+ } catch (FileNotFoundException fnfe) {
+ throw new MojoExecutionException("cound not find type description file " + inputFiles[i], fnfe);
+ } catch (JSONException jse) {
+ throw new MojoExecutionException("cound not parse type description file " + inputFiles[i], jse);
+ }
+ }
+ }
+
+ public void execute() throws MojoExecutionException, MojoFailureException {
+ String outputPath = project.getBuild().getDirectory() + File.separator
+ + "generated-sources" + File.separator
+ + "java" + File.separator
+ + packageName.replace('.', File.separatorChar);
+ File dir = new File(outputPath);
+ if (!dir.exists()) {
+ dir.mkdirs();
+ }
+
+ readRecordTypes();
+
+ for (String recordType : typeMap.keySet()) {
+ generateSource(Generator.TemplateType.RECORD_MANAGER, recordManagerTemplate, recordType, outputPath);
+ generateSource(Generator.TemplateType.ARENA_MANAGER, arenaManagerTemplate, recordType, outputPath);
+ }
+
+ for (int i = 0; i < supportTemplates.length; ++i) {
+ generateSource(Generator.TemplateType.SUPPORT, supportTemplates[i], "", outputPath);
+ }
+ }
+
+ private void generateSource(Generator.TemplateType mgrType, String template, String recordType, String outputPath) throws MojoFailureException {
+ InputStream is = getClass().getClassLoader().getResourceAsStream(template);
+ if (is == null) {
+ throw new MojoFailureException("template '" + template + "' not found in classpath");
+ }
+
+ StringBuilder sb = new StringBuilder();
+ File outputFile = new File(outputPath + File.separator + recordType + template);
+
+ try {
+ getLog().info("generating " + outputFile.toString());
+
+ Generator.generateSource(mgrType, packageName, typeMap.get(recordType), is, sb, debug);
+ is.close();
+
+ FileWriter outWriter = new FileWriter(outputFile);
+ outWriter.write(sb.toString());
+ outWriter.close();
+ } catch (Exception ex) {
+ getLog().error(ex);
+ throw new MojoFailureException("failed to generate " + outputFile.toString());
+ }
+ }
+}
diff --git a/asterix-maven-plugins/record-manager-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/recordmanagergenerator/RecordType.java b/asterix-maven-plugins/record-manager-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/recordmanagergenerator/RecordType.java
new file mode 100644
index 0000000..a0f6c61
--- /dev/null
+++ b/asterix-maven-plugins/record-manager-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/recordmanagergenerator/RecordType.java
@@ -0,0 +1,388 @@
+/*
+ * Copyright 2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.recordmanagergenerator;
+
+import java.io.Reader;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.Map;
+
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.json.JSONTokener;
+
+public class RecordType {
+
+ enum Type {
+ BYTE (1, "byte", "get", "put", "(byte)0xde", "TypeUtil.Byte.append"),
+ SHORT (2, "short", "getShort", "putShort", "(short)0xdead", "TypeUtil.Short.append"),
+ INT (4, "int", "getInt", "putInt", "0xdeadbeef", "TypeUtil.Int.append"),
+ GLOBAL(8, "long", "getLong", "putLong", "0xdeadbeefdeadbeefl", "TypeUtil.Global.append");
+
+ Type(int size, String javaType, String bbGetter, String bbSetter, String deadMemInitializer, String appender) {
+ this.size = size;
+ this.javaType = javaType;
+ this.bbGetter = bbGetter;
+ this.bbSetter = bbSetter;
+ this.deadMemInitializer = deadMemInitializer;
+ this.appender = appender;
+ }
+
+ int size;
+ String javaType;
+ String bbGetter;
+ String bbSetter;
+ String deadMemInitializer;
+ String appender;
+ }
+
+ static class Field {
+
+ String name;
+ Type type;
+ String initial;
+ int offset;
+ boolean accessible = true;
+
+ Field(String name, Type type, String initial, int offset, boolean accessible) {
+ this.name = name;
+ this.type = type;
+ this.initial = initial;
+ this.offset = offset;
+ this.accessible = accessible;
+ }
+
+ public static Field fromJSON(JSONObject obj) throws JSONException {
+ String name = obj.getString("name");
+ Type type = parseType(obj.getString("type"));
+ String initial = obj.optString("initial", null);
+ return new Field(name, type, initial, -1, true);
+ }
+
+ private static Type parseType(String string) {
+ string = string.toUpperCase();
+ if (string.equals("GLOBAL")) {
+ return Type.GLOBAL;
+ } else if (string.equals("INT")) {
+ return Type.INT;
+ } else if (string.equals("SHORT")) {
+ return Type.SHORT;
+ } else if (string.equals("BYTE")) {
+ return Type.BYTE;
+ }
+ throw new IllegalArgumentException("Unknown type \"" + string + "\"");
+ }
+
+ String methodName(String prefix) {
+ String words[] = name.split(" ");
+ assert(words.length > 0);
+ StringBuilder sb = new StringBuilder(prefix);
+ for (int j = 0; j < words.length; ++j) {
+ String word = words[j];
+ sb.append(word.substring(0, 1).toUpperCase());
+ sb.append(word.substring(1));
+ }
+ return sb.toString();
+ }
+
+ StringBuilder appendMemoryManagerGetMethod(StringBuilder sb, String indent, int level) {
+ sb = indent(sb, indent, level);
+ sb.append("public ")
+ .append(type.javaType)
+ .append(' ')
+ .append(methodName("get"))
+ .append("(int slotNum) {\n");
+ sb = indent(sb, indent, level + 1);
+ sb.append("final Buffer buf = buffers.get(slotNum / NO_SLOTS);\n");
+ sb = indent(sb, indent, level + 1);
+ sb.append("buf.checkSlot(slotNum % NO_SLOTS);\n");
+ sb = indent(sb, indent, level + 1);
+ sb.append("final ByteBuffer b = buf.bb;\n");
+ sb = indent(sb, indent, level + 1);
+ sb.append("return b.")
+ .append(type.bbGetter)
+ .append("((slotNum % NO_SLOTS) * ITEM_SIZE + ")
+ .append(offsetName())
+ .append(");\n");
+ sb = indent(sb, indent, level);
+ sb.append("}\n");
+ return sb;
+ }
+
+ StringBuilder appendMemoryManagerSetMethod(StringBuilder sb, String indent, int level) {
+ sb = indent(sb, indent, level);
+ sb.append("public void ")
+ .append(methodName("set"))
+ .append("(int slotNum, ")
+ .append(type.javaType)
+ .append(" value) {\n");
+ sb = indent(sb, indent, level + 1);
+ sb.append("final ByteBuffer b = buffers.get(slotNum / NO_SLOTS).bb;\n");
+ sb = indent(sb, indent, level + 1);
+ sb.append("b.")
+ .append(type.bbSetter)
+ .append("((slotNum % NO_SLOTS) * ITEM_SIZE + ")
+ .append(offsetName())
+ .append(", value);\n");
+ sb = indent(sb, indent, level);
+ sb.append("}\n");
+ return sb;
+ }
+
+ StringBuilder appendArenaManagerGetMethod(StringBuilder sb, String indent, int level) {
+ sb = indent(sb, indent, level);
+ sb.append("public ")
+ .append(type.javaType)
+ .append(' ')
+ .append(methodName("get"))
+ .append("(long slotNum) {\n");
+ if (initial != null) {
+ sb = indent(sb, indent, level + 1);
+ sb.append("if (TRACK_ALLOC_ID) checkAllocId(slotNum);\n");
+ }
+ sb = indent(sb, indent, level + 1);
+ sb.append("final int arenaId = TypeUtil.Global.arenaId(slotNum);\n");
+ sb = indent(sb, indent, level + 1);
+ sb.append("final int localId = TypeUtil.Global.localId(slotNum);\n");
+ sb = indent(sb, indent, level + 1);
+ sb.append("return get(arenaId).")
+ .append(methodName("get"))
+ .append("(localId);\n");
+ sb = indent(sb, indent, level);
+ sb.append("}\n");
+ return sb;
+ }
+
+ StringBuilder appendArenaManagerSetMethod(StringBuilder sb, String indent, int level) {
+ sb = indent(sb, indent, level);
+ sb.append("public void ")
+ .append(methodName("set"))
+ .append("(long slotNum, ")
+ .append(type.javaType)
+ .append(" value) {\n");
+ if (initial != null) {
+ sb = indent(sb, indent, level + 1);
+ sb.append("if (TRACK_ALLOC_ID) checkAllocId(slotNum);\n");
+ }
+ sb = indent(sb, indent, level + 1);
+ sb.append("final int arenaId = TypeUtil.Global.arenaId(slotNum);\n");
+ sb = indent(sb, indent, level + 1);
+ sb.append("final int localId = TypeUtil.Global.localId(slotNum);\n");
+ sb = indent(sb, indent, level + 1);
+ sb.append("get(arenaId).")
+ .append(methodName("set"))
+ .append("(localId, value);\n");
+ sb = indent(sb, indent, level);
+ sb.append("}\n");
+ return sb;
+ }
+
+ StringBuilder appendInitializers(StringBuilder sb, String indent, int level) {
+ sb = indent(sb, indent, level);
+ sb.append("bb.")
+ .append(type.bbSetter)
+ .append("(slotNum * ITEM_SIZE + ")
+ .append(offsetName())
+ .append(", ");
+ if (initial != null) {
+ sb.append(initial);
+ } else {
+ sb.append(type.deadMemInitializer);
+ }
+ sb.append(");\n");
+ return sb;
+ }
+
+ StringBuilder appendChecks(StringBuilder sb, String indent, int level) {
+ if (initial == null) {
+ return sb;
+ }
+ sb = indent(sb, indent, level);
+ sb.append("if (bb.")
+ .append(type.bbGetter)
+ .append("(itemOffset + ")
+ .append(offsetName())
+ .append(") == ")
+ .append(type.deadMemInitializer)
+ .append(") {\n");
+ sb = indent(sb, indent, level + 1);
+ sb.append("String msg = \"invalid value in field ")
+ .append(offsetName())
+ .append(" of slot \" + slotNum;\n");
+ sb = indent(sb, indent, level + 1);
+ sb.append("throw new IllegalStateException(msg);\n");
+ sb = indent(sb, indent, level);
+ sb.append("}\n");
+ return sb;
+ }
+
+ String offsetName() {
+ String words[] = name.split(" ");
+ assert(words.length > 0);
+ StringBuilder sb = new StringBuilder(words[0].toUpperCase());
+ for (int j = 1; j < words.length; ++j) {
+ sb.append("_").append(words[j].toUpperCase());
+ }
+ sb.append("_OFF");
+ return sb.toString();
+ }
+
+ int offset() {
+ return offset;
+ }
+ }
+
+ String name;
+ ArrayList<Field> fields;
+ int totalSize;
+ boolean modifiable = true;
+
+ static StringBuilder indent(StringBuilder sb, String indent, int level) {
+ for (int i = 0; i < level; ++i) {
+ sb.append(indent);
+ }
+ return sb;
+ }
+
+ public RecordType(String name) {
+ this.name = name;
+ fields = new ArrayList<Field>();
+ addField("next free slot", Type.INT, "-1", false);
+ }
+
+ public static RecordType read(Reader reader) throws JSONException {
+ JSONTokener tok = new JSONTokener(reader);
+ JSONObject obj = new JSONObject(tok);
+ return fromJSON(obj);
+ }
+
+ public static RecordType fromJSON(JSONObject obj) throws JSONException {
+ RecordType result = new RecordType(obj.getString("name"));
+ JSONArray fields = obj.getJSONArray("fields");
+ for (int i = 0; i < fields.length(); ++i) {
+ JSONObject field = fields.getJSONObject(i);
+ result.fields.add(Field.fromJSON(field));
+ }
+ return result;
+ }
+
+ public void addToMap(Map<String, RecordType> map) {
+ modifiable = false;
+ calcOffsetsAndSize();
+ map.put(name, this);
+ }
+
+ public void addField(String name, Type type, String initial) {
+ addField(name, type, initial, true);
+ }
+
+ private void addField(String name, Type type, String initial, boolean accessible) {
+ if (! modifiable) {
+ throw new IllegalStateException("cannot modify type anmore");
+ }
+ fields.add(new Field(name, type, initial, -1, accessible));
+ }
+
+ private void calcOffsetsAndSize() {
+ Collections.sort(fields, new Comparator<Field>() {
+ public int compare(Field left, Field right) {
+ return right.type.size - left.type.size;
+ }
+ });
+ // sort fields by size and align the items
+ totalSize = 0;
+ int alignment = 0;
+ for (int i = 0; i < fields.size(); ++i) {
+ final Field field = fields.get(i);
+ assert field.offset == -1;
+ field.offset = totalSize;
+ final int size = field.type.size;
+ totalSize += size;
+ if (size > alignment) alignment = size;
+ }
+ if (totalSize % alignment != 0) {
+ totalSize = ((totalSize / alignment) + 1) * alignment;
+ }
+ }
+
+ int size() {
+ return fields.size();
+ }
+
+ static String padRight(String s, int n) {
+ return String.format("%1$-" + n + "s", s);
+ }
+
+ static String padLeft(String s, int n) {
+ return String.format("%1$" + n + "s", s);
+ }
+
+ StringBuilder appendConstants(StringBuilder sb, String indent, int level) {
+ sb = indent(sb, indent, level);
+ sb.append("public static int ITEM_SIZE = ")
+ .append(totalSize)
+ .append(";\n");
+ for (int i = 0; i < fields.size(); ++i) {
+ final Field field = fields.get(i);
+ sb = indent(sb, indent, level);
+ sb.append("public static int ")
+ .append(field.offsetName())
+ .append(" = ")
+ .append(field.offset).append("; // size: ")
+ .append(field.type.size).append("\n");
+ }
+ return sb;
+ }
+
+ StringBuilder appendBufferPrinter(StringBuilder sb, String indent, int level) {
+ int maxNameWidth = 0;
+ for (int i = 0; i < fields.size(); ++i) {
+ int width = fields.get(i).name.length();
+ if (width > maxNameWidth) {
+ maxNameWidth = width;
+ }
+ }
+ for (int i = 0; i < fields.size(); ++i) {
+ final Field field = fields.get(i);
+ sb = indent(sb, indent, level);
+ sb.append("sb.append(\"")
+ .append(padRight(field.name, maxNameWidth))
+ .append(" | \");\n");
+ sb = indent(sb, indent, level);
+ sb.append("for (int i = 0; i < NO_SLOTS; ++i) {\n");
+ sb = indent(sb, indent, level + 1);
+ sb.append(field.type.javaType)
+ .append(" value = bb.")
+ .append(field.type.bbGetter)
+ .append("(i * ITEM_SIZE + ")
+ .append(field.offsetName())
+ .append(");\n");
+ sb = indent(sb, indent, level + 1);
+ sb.append("sb = ")
+ .append(field.type.appender)
+ .append("(sb, value);\n");
+ sb = indent(sb, indent, level + 1);
+ sb.append("sb.append(\" | \");\n");
+ sb = indent(sb, indent, level);
+ sb.append("}\n");
+ sb = indent(sb, indent, level);
+ sb.append("sb.append(\"\\n\");\n");
+ }
+ return sb;
+ }
+}
diff --git a/asterix-maven-plugins/record-manager-generator-maven-plugin/src/main/resources/AllocInfo.java b/asterix-maven-plugins/record-manager-generator-maven-plugin/src/main/resources/AllocInfo.java
new file mode 100644
index 0000000..ef8415f
--- /dev/null
+++ b/asterix-maven-plugins/record-manager-generator-maven-plugin/src/main/resources/AllocInfo.java
@@ -0,0 +1,35 @@
+package @PACKAGE@;
+
+import java.io.PrintWriter;
+import java.io.StringWriter;
+
+public class AllocInfo {
+ String alloc;
+ String free;
+
+ void alloc() {
+ alloc = getStackTrace();
+ }
+
+ void free() {
+ free = getStackTrace();
+ }
+
+ private String getStackTrace() {
+ StringWriter sw = new StringWriter();
+ PrintWriter pw = new PrintWriter(sw);
+ new Exception().printStackTrace(pw);
+ pw.close();
+ String res = sw.toString();
+ // remove first 3 lines
+ int nlPos = 0;
+ for (int i = 0; i < 3; ++i) {
+ nlPos = res.indexOf('\n', nlPos) + 1;
+ }
+ return res.substring(nlPos);
+ }
+
+ public String toString() {
+ return "allocation stack:\n" + alloc + "\nfree stack\n" + free;
+ }
+}
diff --git a/asterix-maven-plugins/record-manager-generator-maven-plugin/src/main/resources/ArenaManager.java b/asterix-maven-plugins/record-manager-generator-maven-plugin/src/main/resources/ArenaManager.java
new file mode 100644
index 0000000..6df032e
--- /dev/null
+++ b/asterix-maven-plugins/record-manager-generator-maven-plugin/src/main/resources/ArenaManager.java
@@ -0,0 +1,124 @@
+/*
+ * Copyright 2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package @PACKAGE@;
+
+public class @E@ArenaManager {
+
+ public static final boolean TRACK_ALLOC_ID = @DEBUG@;
+
+ private final int noArenas;
+ private final @E@RecordManager[] arenas;
+ private ThreadLocal<LocalManager> local;
+
+ static class LocalManager {
+ int arenaId;
+ @E@RecordManager mgr;
+ }
+
+ public @E@ArenaManager(final int noArenas, final long txnShrinkTimer) {
+ this.noArenas = noArenas;
+ arenas = new @E@RecordManager[noArenas];
+ for (int i = 0; i < noArenas; ++i) {
+ arenas[i] = new @E@RecordManager(txnShrinkTimer);
+ }
+ local = new ThreadLocal<LocalManager>() {
+ private int nextArena = 0;
+
+ @Override
+ protected synchronized LocalManager initialValue() {
+ @E@RecordManager mgr = arenas[nextArena];
+ LocalManager res = new LocalManager();
+ res.mgr = mgr;
+ res.arenaId = nextArena;
+ nextArena = (nextArena + 1) % noArenas;
+ return res;
+ }
+ };
+ }
+
+ public long allocate() {
+ final LocalManager localManager = local.get();
+ final @E@RecordManager recMgr = localManager.mgr;
+ final int allocId = TRACK_ALLOC_ID ? (++recMgr.allocCounter % 0x7fff) : 0;
+ final int localId = recMgr.allocate();
+
+ long result = TypeUtil.Global.build(localManager.arenaId, allocId, localId);
+
+ if (TRACK_ALLOC_ID) setAllocId(result, (short) allocId);
+
+ assert TypeUtil.Global.allocId(result) == allocId;
+ assert TypeUtil.Global.arenaId(result) == localManager.arenaId;
+ assert TypeUtil.Global.localId(result) == localId;
+ return result;
+ }
+
+ public void deallocate(long slotNum) {
+ if (TRACK_ALLOC_ID) checkAllocId(slotNum);
+ final int arenaId = TypeUtil.Global.arenaId(slotNum);
+ get(arenaId).deallocate(TypeUtil.Global.localId(slotNum));
+ }
+
+ public @E@RecordManager get(int i) {
+ return arenas[i];
+ }
+
+ public @E@RecordManager local() {
+ return local.get().mgr;
+ }
+
+ @METHODS@
+
+ private void checkAllocId(long slotNum) {
+ final int refAllocId = TypeUtil.Global.allocId(slotNum);
+ final short curAllocId = getAllocId(slotNum);
+ if (refAllocId != curAllocId) {
+ String msg = "reference to slot " + slotNum
+ + " of arena " + TypeUtil.Global.arenaId(slotNum)
+ + " refers to version " + Integer.toHexString(refAllocId)
+ + " current version is " + Integer.toHexString(curAllocId);
+ AllocInfo a = getAllocInfo(slotNum);
+ if (a != null) {
+ msg += "\n" + a.toString();
+ }
+ throw new IllegalStateException(msg);
+ }
+ }
+
+ public AllocInfo getAllocInfo(long slotNum) {
+ final int arenaId = TypeUtil.Global.arenaId(slotNum);
+ return get(arenaId).getAllocInfo(TypeUtil.Global.localId(slotNum));
+ }
+
+ public StringBuilder append(StringBuilder sb) {
+ for (int i = 0; i < noArenas; ++i) {
+ sb.append("++++ arena ").append(i).append(" ++++\n");
+ arenas[i].append(sb);
+ }
+ return sb;
+ }
+
+ public String toString() {
+ return append(new StringBuilder()).toString();
+ }
+
+ public RecordManagerStats addTo(RecordManagerStats s) {
+ s.arenas += noArenas;
+ for (int i = 0; i < noArenas; ++i) {
+ arenas[i].addTo(s);
+ }
+ return s;
+ }
+}
diff --git a/asterix-maven-plugins/record-manager-generator-maven-plugin/src/main/resources/RecordManager.java b/asterix-maven-plugins/record-manager-generator-maven-plugin/src/main/resources/RecordManager.java
new file mode 100644
index 0000000..39a344e
--- /dev/null
+++ b/asterix-maven-plugins/record-manager-generator-maven-plugin/src/main/resources/RecordManager.java
@@ -0,0 +1,320 @@
+/*
+ * Copyright 2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package @PACKAGE@;
+
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+
+public class @E@RecordManager {
+
+ public static final boolean CHECK_SLOTS = @DEBUG@;
+ public static final boolean TRACK_ALLOC_LOC = @DEBUG@;
+
+ static final int NO_SLOTS = 1000;
+
+ @CONSTS@
+
+ private final long txnShrinkTimer;
+ private long shrinkTimer;
+ private ArrayList<Buffer> buffers;
+ private int current;
+ private int occupiedSlots;
+ private boolean isShrinkTimerOn;
+
+ int allocCounter;
+
+ public @E@RecordManager(long txnShrinkTimer) {
+ this.txnShrinkTimer = txnShrinkTimer;
+ buffers = new ArrayList<Buffer>();
+ buffers.add(new Buffer());
+ current = 0;
+
+ allocCounter = 0;
+ }
+
+ enum SlotSource {
+ NON_FULL,
+ UNINITIALIZED,
+ NEW
+ }
+
+ synchronized int allocate() {
+ if (buffers.get(current).isFull()) {
+ final int size = buffers.size();
+ final int start = current + 1;
+ SlotSource source = SlotSource.NEW;
+ for (int j = start; j < start + size; ++j) {
+ // If we find a buffer with space, we use it. Otherwise we
+ // remember the first uninitialized one and use that one.
+ final int i = j % size;
+ final Buffer buffer = buffers.get(i);
+ if (buffer.isInitialized() && ! buffer.isFull()) {
+ source = SlotSource.NON_FULL;
+ current = i;
+ break;
+ } else if (! buffer.isInitialized() && source == SlotSource.NEW) {
+ source = SlotSource.UNINITIALIZED;
+ current = i;
+ }
+ }
+
+ switch (source) {
+ case NEW:
+ buffers.add(new Buffer());
+ current = buffers.size() - 1;
+ break;
+ case UNINITIALIZED:
+ buffers.get(current).initialize();
+ case NON_FULL:
+ break;
+ }
+ }
+ ++occupiedSlots;
+ return buffers.get(current).allocate() + current * NO_SLOTS;
+ }
+
+ synchronized void deallocate(int slotNum) {
+ buffers.get(slotNum / NO_SLOTS).deallocate(slotNum % NO_SLOTS);
+ --occupiedSlots;
+
+ if (needShrink()) {
+ shrink();
+ }
+ }
+
+ /**
+ * Shrink policy:
+ * Shrink when the resource under-utilization lasts for a certain amount of time.
+ * TODO Need to figure out which of the policies is better
+ * case1.
+ * buffers status : O x x x x x O (O is initialized, x is deinitialized)
+ * In the above status, 'CURRENT' needShrink() returns 'TRUE'
+ * even if there is nothing to shrink or deallocate.
+ * It doesn't distinguish the deinitialized children from initialized children
+ * by calculating totalNumOfSlots = buffers.size() * ChildEntityLockInfoArrayManager.NUM_OF_SLOTS.
+ * In other words, it doesn't subtract the deinitialized children's slots.
+ * case2.
+ * buffers status : O O x x x x x
+ * However, in the above case, if we subtract the deinitialized children's slots,
+ * needShrink() will return false even if we shrink the buffers at this case.
+ *
+ * @return
+ */
+ private boolean needShrink() {
+ int size = buffers.size();
+ int usedSlots = occupiedSlots;
+ if (usedSlots == 0) {
+ usedSlots = 1;
+ }
+
+ if (size > 1 && size * NO_SLOTS / usedSlots >= 3) {
+ if (isShrinkTimerOn) {
+ if (System.currentTimeMillis() - shrinkTimer >= txnShrinkTimer) {
+ isShrinkTimerOn = false;
+ return true;
+ }
+ } else {
+ //turn on timer
+ isShrinkTimerOn = true;
+ shrinkTimer = System.currentTimeMillis();
+ }
+ } else {
+ //turn off timer
+ isShrinkTimerOn = false;
+ }
+
+ return false;
+ }
+
+ /**
+ * Shrink() may
+ * deinitialize(:deallocates ByteBuffer of child) Children(s) or
+ * shrink buffers according to the deinitialized children's contiguity status.
+ * It doesn't deinitialze or shrink more than half of children at a time.
+ */
+ private void shrink() {
+ int i;
+ int removeCount = 0;
+ int size = buffers.size();
+ int maxDecreaseCount = size / 2;
+ Buffer buffer;
+
+ //The first buffer never be deinitialized.
+ for (i = 1; i < size; i++) {
+ if (buffers.get(i).isEmpty()) {
+ buffers.get(i).deinitialize();
+ }
+ }
+
+ //remove the empty buffers from the end
+ for (i = size - 1; i >= 1; i--) {
+ buffer = buffers.get(i);
+ if (! buffer.isInitialized()) {
+ buffers.remove(i);
+ if (++removeCount == maxDecreaseCount) {
+ break;
+ }
+ } else {
+ break;
+ }
+ }
+
+ //reset allocChild to the first buffer
+ current = 0;
+
+ isShrinkTimerOn = false;
+ }
+
+ @METHODS@
+
+ public AllocInfo getAllocInfo(int slotNum) {
+ final Buffer buf = buffers.get(slotNum / NO_SLOTS);
+ if (buf.allocList == null) {
+ return null;
+ } else {
+ return buf.allocList.get(slotNum % NO_SLOTS);
+ }
+ }
+
+ StringBuilder append(StringBuilder sb) {
+ sb.append("+++ current: ")
+ .append(current)
+ .append(" no occupied slots: ")
+ .append(occupiedSlots)
+ .append(" +++\n");
+ for (int i = 0; i < buffers.size(); ++i) {
+ buffers.get(i).append(sb);
+ sb.append("\n");
+ }
+ return sb;
+ }
+
+ public String toString() {
+ return append(new StringBuilder()).toString();
+ }
+
+ public RecordManagerStats addTo(RecordManagerStats s) {
+ final int size = buffers.size();
+ s.buffers += size;
+ s.slots += size * NO_SLOTS;
+ s.size += size * NO_SLOTS * ITEM_SIZE;
+ for (int i = 0; i < size; ++i) {
+ buffers.get(i).addTo(s);
+ }
+ return s;
+ }
+
+ static class Buffer {
+ private ByteBuffer bb = null; // null represents 'deinitialized' state.
+ private int freeSlotNum;
+ private int occupiedSlots;
+
+ ArrayList<AllocInfo> allocList;
+
+ Buffer() {
+ initialize();
+ }
+
+ void initialize() {
+ bb = ByteBuffer.allocate(NO_SLOTS * ITEM_SIZE);
+ freeSlotNum = 0;
+ occupiedSlots = 0;
+
+ for (int i = 0; i < NO_SLOTS - 1; i++) {
+ setNextFreeSlot(i, i + 1);
+ }
+ setNextFreeSlot(NO_SLOTS - 1, -1); //-1 represents EOL(end of link)
+
+ if (TRACK_ALLOC_LOC) {
+ allocList = new ArrayList<AllocInfo>(NO_SLOTS);
+ for (int i = 0; i < NO_SLOTS; ++i) {
+ allocList.add(new AllocInfo());
+ }
+ }
+ }
+
+ public void deinitialize() {
+ if (TRACK_ALLOC_LOC) allocList = null;
+ bb = null;
+ }
+
+ public boolean isInitialized() {
+ return bb != null;
+ }
+
+ public boolean isFull() {
+ return freeSlotNum == -1;
+ }
+
+ public boolean isEmpty() {
+ return occupiedSlots == 0;
+ }
+
+ public int allocate() {
+ int slotNum = freeSlotNum;
+ freeSlotNum = getNextFreeSlot(slotNum);
+ @INIT_SLOT@
+ occupiedSlots++;
+ if (TRACK_ALLOC_LOC) allocList.get(slotNum).alloc();
+ return slotNum;
+ }
+
+ public void deallocate(int slotNum) {
+ @INIT_SLOT@
+ setNextFreeSlot(slotNum, freeSlotNum);
+ freeSlotNum = slotNum;
+ occupiedSlots--;
+ if (TRACK_ALLOC_LOC) allocList.get(slotNum).free();
+ }
+
+ public int getNextFreeSlot(int slotNum) {
+ return bb.getInt(slotNum * ITEM_SIZE + NEXT_FREE_SLOT_OFF);
+ }
+
+ public void setNextFreeSlot(int slotNum, int nextFreeSlot) {
+ bb.putInt(slotNum * ITEM_SIZE + NEXT_FREE_SLOT_OFF, nextFreeSlot);
+ }
+
+ StringBuilder append(StringBuilder sb) {
+ sb.append("++ free slot: ")
+ .append(freeSlotNum)
+ .append(" no occupied slots: ")
+ .append(occupiedSlots)
+ .append(" ++\n");
+ @PRINT_BUFFER@
+ return sb;
+ }
+
+ public String toString() {
+ return append(new StringBuilder()).toString();
+ }
+
+ public void addTo(RecordManagerStats s) {
+ if (isInitialized()) {
+ s.items += occupiedSlots;
+ }
+ }
+
+ private void checkSlot(int slotNum) {
+ if (! CHECK_SLOTS) {
+ return;
+ }
+ final int itemOffset = (slotNum % NO_SLOTS) * ITEM_SIZE;
+ // @CHECK_SLOT@
+ }
+ }
+
+}
diff --git a/asterix-maven-plugins/record-manager-generator-maven-plugin/src/main/resources/RecordManagerStats.java b/asterix-maven-plugins/record-manager-generator-maven-plugin/src/main/resources/RecordManagerStats.java
new file mode 100644
index 0000000..5842ed6
--- /dev/null
+++ b/asterix-maven-plugins/record-manager-generator-maven-plugin/src/main/resources/RecordManagerStats.java
@@ -0,0 +1,19 @@
+package @PACKAGE@;
+
+public class RecordManagerStats {
+ int arenas = 0;
+ int buffers = 0;
+ int slots = 0;
+ int items = 0;
+ int size = 0;
+
+ public String toString() {
+ StringBuilder sb = new StringBuilder();
+ sb.append("{ arenas : ").append(arenas);
+ sb.append(", buffers : ").append(buffers);
+ sb.append(", slots : ").append(slots);
+ sb.append(", items : ").append(items);
+ sb.append(", size : ").append(size).append(" }");
+ return sb.toString();
+ }
+}
diff --git a/asterix-maven-plugins/record-manager-generator-maven-plugin/src/main/resources/TypeUtil.java b/asterix-maven-plugins/record-manager-generator-maven-plugin/src/main/resources/TypeUtil.java
new file mode 100644
index 0000000..9571156
--- /dev/null
+++ b/asterix-maven-plugins/record-manager-generator-maven-plugin/src/main/resources/TypeUtil.java
@@ -0,0 +1,59 @@
+package @PACKAGE@;
+
+public class TypeUtil {
+
+ public static class Byte {
+ public static StringBuilder append(StringBuilder sb, byte b) {
+ return sb.append(String.format("%1$18x", b));
+ }
+ }
+
+ public static class Short {
+ public static StringBuilder append(StringBuilder sb, short s) {
+ return sb.append(String.format("%1$18x", s));
+ }
+ }
+
+ public static class Int {
+ public static StringBuilder append(StringBuilder sb, int i) {
+ return sb.append(String.format("%1$18x", i));
+ }
+ }
+
+ public static class Global {
+
+ public static long build(int arenaId, int allocId, int localId) {
+ long result = arenaId;
+ result = result << 48;
+ result |= (allocId << 32);
+ result |= localId;
+ return result;
+ }
+
+ public static int arenaId(long l) {
+ return (int)((l >>> 48) & 0xffff);
+ }
+
+ public static int allocId(long l) {
+ return (int)((l >>> 32) & 0xffff);
+ }
+
+ public static int localId(long l) {
+ return (int) (l & 0xffffffffL);
+ }
+
+ public static StringBuilder append(StringBuilder sb, long l) {
+ sb.append(String.format("%1$4x", TypeUtil.Global.arenaId(l)));
+ sb.append(':');
+ sb.append(String.format("%1$4x", TypeUtil.Global.allocId(l)));
+ sb.append(':');
+ sb.append(String.format("%1$8x", TypeUtil.Global.localId(l)));
+ return sb;
+ }
+
+ public static String toString(long l) {
+ return append(new StringBuilder(), l).toString();
+ }
+
+ }
+}
diff --git a/asterix-metadata/pom.xml b/asterix-metadata/pom.xml
index 6b1e8cf..cef041a 100644
--- a/asterix-metadata/pom.xml
+++ b/asterix-metadata/pom.xml
@@ -1,23 +1,18 @@
-<!--
- ! Copyright 2009-2013 by The Regents of the University of California
- ! Licensed under the Apache License, Version 2.0 (the "License");
- ! you may not use this file except in compliance with the License.
- ! you may obtain a copy of the License from
- !
- ! http://www.apache.org/licenses/LICENSE-2.0
- !
- ! Unless required by applicable law or agreed to in writing, software
- ! distributed under the License is distributed on an "AS IS" BASIS,
- ! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ! See the License for the specific language governing permissions and
- ! limitations under the License.
- !-->
+<!-- ! Copyright 2009-2013 by The Regents of the University of California
+ ! Licensed under the Apache License, Version 2.0 (the "License"); ! you may
+ not use this file except in compliance with the License. ! you may obtain
+ a copy of the License from ! ! http://www.apache.org/licenses/LICENSE-2.0
+ ! ! Unless required by applicable law or agreed to in writing, software !
+ distributed under the License is distributed on an "AS IS" BASIS, ! WITHOUT
+ WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ! See the
+ License for the specific language governing permissions and ! limitations
+ under the License. ! -->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
</parent>
<artifactId>asterix-metadata</artifactId>
@@ -40,25 +35,25 @@
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-common</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-om</artifactId>
- <version>0.8.1-SNAPSHOT</version>
- <scope>compile</scope>
- </dependency>
- <dependency>
- <groupId>edu.uci.ics.asterix</groupId>
- <artifactId>asterix-external-data</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-runtime</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.asterix</groupId>
+ <artifactId>asterix-events</artifactId>
+ <version>0.8.4-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
@@ -80,6 +75,12 @@
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-lsm-rtree</artifactId>
</dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-hdfs-core</artifactId>
+ <version>${hyracks.version}</version>
+ </dependency>
+
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-core</artifactId>
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataCache.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataCache.java
index 91be90b..c36726b 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataCache.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataCache.java
@@ -20,6 +20,7 @@
import java.util.List;
import java.util.Map;
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
import edu.uci.ics.asterix.common.functions.FunctionSignature;
import edu.uci.ics.asterix.metadata.api.IMetadataEntity;
import edu.uci.ics.asterix.metadata.entities.CompactionPolicy;
@@ -27,8 +28,12 @@
import edu.uci.ics.asterix.metadata.entities.DatasourceAdapter;
import edu.uci.ics.asterix.metadata.entities.Datatype;
import edu.uci.ics.asterix.metadata.entities.Dataverse;
+import edu.uci.ics.asterix.metadata.entities.Feed;
+import edu.uci.ics.asterix.metadata.entities.FeedActivity;
+import edu.uci.ics.asterix.metadata.entities.FeedPolicy;
import edu.uci.ics.asterix.metadata.entities.Function;
import edu.uci.ics.asterix.metadata.entities.Index;
+import edu.uci.ics.asterix.metadata.entities.Library;
import edu.uci.ics.asterix.metadata.entities.NodeGroup;
/**
@@ -38,6 +43,7 @@
* simply ignored, i.e., updates are not not applied to the cache.
*/
public class MetadataCache {
+
// Key is dataverse name.
protected final Map<String, Dataverse> dataverses = new HashMap<String, Dataverse>();
// Key is dataverse name. Key of value map is dataset name.
@@ -52,6 +58,15 @@
protected final Map<FunctionSignature, Function> functions = new HashMap<FunctionSignature, Function>();
// Key is adapter dataverse. Key of value map is the adapter name
protected final Map<String, Map<String, DatasourceAdapter>> adapters = new HashMap<String, Map<String, DatasourceAdapter>>();
+ // Key is FeedId
+ protected final Map<FeedConnectionId, FeedActivity> feedActivity = new HashMap<FeedConnectionId, FeedActivity>();
+
+ // Key is DataverseName, Key of the value map is the Policy name
+ protected final Map<String, Map<String, FeedPolicy>> feedPolicies = new HashMap<String, Map<String, FeedPolicy>>();
+ // Key is library dataverse. Key of value map is the library name
+ protected final Map<String, Map<String, Library>> libraries = new HashMap<String, Map<String, Library>>();
+ // Key is library dataverse. Key of value map is the feed name
+ protected final Map<String, Map<String, Feed>> feeds = new HashMap<String, Map<String, Feed>>();
// Key is DataverseName, Key of the value map is the Policy name
protected final Map<String, Map<String, CompactionPolicy>> compactionPolicies = new HashMap<String, Map<String, CompactionPolicy>>();
@@ -89,15 +104,21 @@
synchronized (datatypes) {
synchronized (functions) {
synchronized (adapters) {
- synchronized (compactionPolicies) {
- dataverses.clear();
- nodeGroups.clear();
- datasets.clear();
- indexes.clear();
- datatypes.clear();
- functions.clear();
- adapters.clear();
- compactionPolicies.clear();
+ synchronized (feedActivity) {
+ synchronized (libraries) {
+ synchronized (compactionPolicies) {
+ dataverses.clear();
+ nodeGroups.clear();
+ datasets.clear();
+ indexes.clear();
+ datatypes.clear();
+ functions.clear();
+ adapters.clear();
+ feedActivity.clear();
+ libraries.clear();
+ compactionPolicies.clear();
+ }
+ }
}
}
}
@@ -212,22 +233,43 @@
synchronized (indexes) {
synchronized (datatypes) {
synchronized (functions) {
- synchronized (compactionPolicies) {
- datasets.remove(dataverse.getDataverseName());
- indexes.remove(dataverse.getDataverseName());
- datatypes.remove(dataverse.getDataverseName());
- adapters.remove(dataverse.getDataverseName());
- compactionPolicies.remove(dataverse.getDataverseName());
- List<FunctionSignature> markedFunctionsForRemoval = new ArrayList<FunctionSignature>();
- for (FunctionSignature signature : functions.keySet()) {
- if (signature.getNamespace().equals(dataverse.getDataverseName())) {
- markedFunctionsForRemoval.add(signature);
+ synchronized (adapters) {
+ synchronized (libraries) {
+ synchronized (feedActivity) {
+ synchronized (feeds) {
+ synchronized (compactionPolicies) {
+ datasets.remove(dataverse.getDataverseName());
+ indexes.remove(dataverse.getDataverseName());
+ datatypes.remove(dataverse.getDataverseName());
+ adapters.remove(dataverse.getDataverseName());
+ compactionPolicies.remove(dataverse.getDataverseName());
+ List<FunctionSignature> markedFunctionsForRemoval = new ArrayList<FunctionSignature>();
+ for (FunctionSignature signature : functions.keySet()) {
+ if (signature.getNamespace().equals(dataverse.getDataverseName())) {
+ markedFunctionsForRemoval.add(signature);
+ }
+ }
+ for (FunctionSignature signature : markedFunctionsForRemoval) {
+ functions.remove(signature);
+ }
+ List<FeedConnectionId> feedActivitiesMarkedForRemoval = new ArrayList<FeedConnectionId>();
+ for (FeedConnectionId fid : feedActivity.keySet()) {
+ if (fid.getDataverse().equals(dataverse.getDataverseName())) {
+ feedActivitiesMarkedForRemoval.add(fid);
+ }
+ }
+ for (FeedConnectionId fid : feedActivitiesMarkedForRemoval) {
+ feedActivity.remove(fid);
+ }
+
+ libraries.remove(dataverse.getDataverseName());
+ feeds.remove(dataverse.getDataverseName());
+
+ return dataverses.remove(dataverse.getDataverseName());
+ }
+ }
}
}
- for (FunctionSignature signature : markedFunctionsForRemoval) {
- functions.remove(signature);
- }
- return dataverses.remove(dataverse.getDataverseName());
}
}
}
@@ -409,6 +451,32 @@
}
}
+ public Object addFeedPolicyIfNotExists(FeedPolicy feedPolicy) {
+ synchronized (feedPolicy) {
+ Map<String, FeedPolicy> p = feedPolicies.get(feedPolicy.getDataverseName());
+ if (p == null) {
+ p = new HashMap<String, FeedPolicy>();
+ p.put(feedPolicy.getPolicyName(), feedPolicy);
+ feedPolicies.put(feedPolicy.getDataverseName(), p);
+ } else {
+ if (p.get(feedPolicy.getPolicyName()) == null) {
+ p.put(feedPolicy.getPolicyName(), feedPolicy);
+ }
+ }
+ return null;
+ }
+ }
+
+ public Object dropFeedPolicy(FeedPolicy feedPolicy) {
+ synchronized (feedPolicies) {
+ Map<String, FeedPolicy> p = feedPolicies.get(feedPolicy.getDataverseName());
+ if (p != null && p.get(feedPolicy.getPolicyName()) != null) {
+ return p.remove(feedPolicy).getPolicyName();
+ }
+ return null;
+ }
+ }
+
public Object addAdapterIfNotExists(DatasourceAdapter adapter) {
synchronized (adapters) {
Map<String, DatasourceAdapter> adaptersInDataverse = adapters.get(adapter.getAdapterIdentifier()
@@ -435,4 +503,61 @@
return null;
}
}
+
+ public Object addFeedActivityIfNotExists(FeedActivity fa) {
+ synchronized (feedActivity) {
+ FeedConnectionId fid = new FeedConnectionId(fa.getDataverseName(), fa.getFeedName(), fa.getDatasetName());
+ if (!feedActivity.containsKey(fid)) {
+ feedActivity.put(fid, fa);
+ }
+ }
+ return null;
+ }
+
+ public Object dropFeedActivity(FeedActivity fa) {
+ synchronized (feedActivity) {
+ FeedConnectionId fid = new FeedConnectionId(fa.getDataverseName(), fa.getFeedName(), fa.getDatasetName());
+ return feedActivity.remove(fid);
+ }
+ }
+
+ public Object addLibraryIfNotExists(Library library) {
+ synchronized (libraries) {
+ Map<String, Library> libsInDataverse = libraries.get(library.getDataverseName());
+ boolean needToAddd = (libsInDataverse == null || libsInDataverse.get(library.getName()) != null);
+ if (needToAddd) {
+ if (libsInDataverse == null) {
+ libsInDataverse = new HashMap<String, Library>();
+ libraries.put(library.getDataverseName(), libsInDataverse);
+ }
+ return libsInDataverse.put(library.getDataverseName(), library);
+ }
+ return null;
+ }
+ }
+
+ public Object dropLibrary(Library library) {
+ synchronized (libraries) {
+ Map<String, Library> librariesInDataverse = libraries.get(library.getDataverseName());
+ if (librariesInDataverse != null) {
+ return librariesInDataverse.remove(library.getName());
+ }
+ return null;
+ }
+ }
+
+ public Object addFeedIfNotExists(Feed feed) {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ public Object dropFeed(Feed feed) {
+ synchronized (feeds) {
+ Map<String, Feed> feedsInDataverse = feeds.get(feed.getDataverseName());
+ if (feedsInDataverse != null) {
+ return feedsInDataverse.remove(feed.getFeedName());
+ }
+ return null;
+ }
+ }
}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataManager.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataManager.java
index 46159ba..3bdf73a 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataManager.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataManager.java
@@ -22,6 +22,8 @@
import edu.uci.ics.asterix.common.config.AsterixMetadataProperties;
import edu.uci.ics.asterix.common.exceptions.ACIDException;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
import edu.uci.ics.asterix.common.functions.FunctionSignature;
import edu.uci.ics.asterix.common.transactions.JobId;
import edu.uci.ics.asterix.metadata.api.IAsterixStateProxy;
@@ -32,11 +34,18 @@
import edu.uci.ics.asterix.metadata.entities.DatasourceAdapter;
import edu.uci.ics.asterix.metadata.entities.Datatype;
import edu.uci.ics.asterix.metadata.entities.Dataverse;
+import edu.uci.ics.asterix.metadata.entities.Feed;
+import edu.uci.ics.asterix.metadata.entities.FeedActivity;
+import edu.uci.ics.asterix.metadata.entities.FeedActivity.FeedActivityType;
+import edu.uci.ics.asterix.metadata.entities.FeedPolicy;
import edu.uci.ics.asterix.metadata.entities.Function;
import edu.uci.ics.asterix.metadata.entities.Index;
+import edu.uci.ics.asterix.metadata.entities.Library;
import edu.uci.ics.asterix.metadata.entities.Node;
import edu.uci.ics.asterix.metadata.entities.NodeGroup;
+import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.asterix.transaction.management.service.transaction.JobIdFactory;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
/**
* Provides access to Asterix metadata via remote methods to the metadata node.
@@ -71,6 +80,10 @@
* with transaction ids of regular jobs or other metadata transactions.
*/
public class MetadataManager implements IMetadataManager {
+ private static final int INITIAL_SLEEP_TIME = 64;
+ private static final int RETRY_MULTIPLIER = 4;
+ private static final int MAX_RETRY_COUNT = 6;
+
// Set in init().
public static MetadataManager INSTANCE;
private final MetadataCache cache = new MetadataCache();
@@ -78,6 +91,7 @@
private IMetadataNode metadataNode;
private final ReadWriteLock metadataLatch;
private final AsterixMetadataProperties metadataProperties;
+ private IHyracksClientConnection hcc;
public MetadataManager(IAsterixStateProxy proxy, AsterixMetadataProperties metadataProperties) {
if (proxy == null) {
@@ -89,14 +103,37 @@
this.metadataLatch = new ReentrantReadWriteLock(true);
}
+ public MetadataManager(IAsterixStateProxy proxy, IMetadataNode metadataNode) {
+ if (metadataNode == null) {
+ throw new Error("Null metadataNode given to MetadataManager.");
+ }
+ this.proxy = proxy;
+ this.metadataProperties = null;
+ this.metadataNode = metadataNode;
+ this.metadataLatch = new ReentrantReadWriteLock(true);
+ }
+
@Override
- public void init() throws RemoteException {
+ public void init() throws RemoteException, MetadataException {
// Could be synchronized on any object. Arbitrarily chose proxy.
synchronized (proxy) {
if (metadataNode != null) {
return;
}
- metadataNode = proxy.getMetadataNode();
+ try {
+ int retry = 0;
+ int sleep = INITIAL_SLEEP_TIME;
+ while (retry++ < MAX_RETRY_COUNT) {
+ metadataNode = proxy.getMetadataNode();
+ if (metadataNode != null) {
+ break;
+ }
+ Thread.sleep(sleep);
+ sleep *= RETRY_MULTIPLIER;
+ }
+ } catch (InterruptedException e) {
+ throw new MetadataException(e);
+ }
if (metadataNode == null) {
throw new Error("Failed to get the MetadataNode.\n" + "The MetadataNode was configured to run on NC: "
+ metadataProperties.getMetadataNodeName());
@@ -128,8 +165,8 @@
}
@Override
- public void unlock(MetadataTransactionContext ctx) throws RemoteException, ACIDException {
- metadataNode.unlock(ctx.getJobId());
+ public void unlock(MetadataTransactionContext ctx, byte lockMode) throws RemoteException, ACIDException {
+ metadataNode.unlock(ctx.getJobId(), lockMode);
}
@Override
@@ -347,7 +384,17 @@
datatype = cache.getDatatype(dataverseName, datatypeName);
if (datatype != null) {
// Datatype is already in the cache, don't add it again.
- return datatype;
+ try {
+ //create a new Datatype object with a new ARecordType object in order to avoid
+ //concurrent access to UTF8StringPointable comparator in ARecordType object.
+ //see issue 510
+ ARecordType aRecType = (ARecordType) datatype.getDatatype();
+ return new Datatype(datatype.getDataverseName(), datatype.getDatatypeName(), new ARecordType(
+ aRecType.getTypeName(), aRecType.getFieldNames(), aRecType.getFieldTypes(), aRecType.isOpen()),
+ datatype.getIsAnonymous());
+ } catch (AsterixException e) {
+ throw new MetadataException(e);
+ }
}
try {
datatype = metadataNode.getDatatype(ctx.getJobId(), dataverseName, datatypeName);
@@ -555,6 +602,16 @@
}
@Override
+ public void addFeedPolicy(MetadataTransactionContext mdTxnCtx, FeedPolicy feedPolicy) throws MetadataException {
+ try {
+ metadataNode.addFeedPolicy(mdTxnCtx.getJobId(), feedPolicy);
+ } catch (RemoteException e) {
+ throw new MetadataException(e);
+ }
+ mdTxnCtx.addFeedPolicy(feedPolicy);
+ }
+
+ @Override
public void initializeDatasetIdFactory(MetadataTransactionContext ctx) throws MetadataException {
try {
metadataNode.initializeDatasetIdFactory(ctx.getJobId());
@@ -610,6 +667,77 @@
}
@Override
+ public void registerFeedActivity(MetadataTransactionContext ctx, FeedConnectionId feedId, FeedActivity feedActivity)
+ throws MetadataException {
+ try {
+ metadataNode.registerFeedActivity(ctx.getJobId(), feedId, feedActivity);
+ } catch (RemoteException e) {
+ throw new MetadataException(e);
+ }
+ }
+
+ @Override
+ public FeedActivity getRecentActivityOnFeedConnection(MetadataTransactionContext ctx, FeedConnectionId feedId,
+ FeedActivityType... feedActivityTypes) throws MetadataException {
+
+ FeedActivity feedActivity = null;
+ try {
+ feedActivity = metadataNode.getRecentFeedActivity(ctx.getJobId(), feedId, feedActivityTypes);
+ } catch (RemoteException e) {
+ throw new MetadataException(e);
+ }
+ return feedActivity;
+ }
+
+ public void dropLibrary(MetadataTransactionContext ctx, String dataverseName, String libraryName)
+ throws MetadataException {
+ try {
+ metadataNode.dropLibrary(ctx.getJobId(), dataverseName, libraryName);
+ } catch (RemoteException e) {
+ throw new MetadataException(e);
+ }
+ ctx.dropLibrary(dataverseName, libraryName);
+ }
+
+ @Override
+ public List<Library> getDataverseLibraries(MetadataTransactionContext ctx, String dataverseName)
+ throws MetadataException {
+ List<Library> dataverseLibaries = null;
+ try {
+ // Assuming that the transaction can read its own writes on the
+ // metadata node.
+ dataverseLibaries = metadataNode.getDataverseLibraries(ctx.getJobId(), dataverseName);
+ } catch (RemoteException e) {
+ throw new MetadataException(e);
+ }
+ // Don't update the cache to avoid checking against the transaction's
+ // uncommitted functions.
+ return dataverseLibaries;
+ }
+
+ @Override
+ public void addLibrary(MetadataTransactionContext ctx, Library library) throws MetadataException {
+ try {
+ metadataNode.addLibrary(ctx.getJobId(), library);
+ } catch (RemoteException e) {
+ throw new MetadataException(e);
+ }
+ ctx.addLibrary(library);
+ }
+
+ @Override
+ public Library getLibrary(MetadataTransactionContext ctx, String dataverseName, String libraryName)
+ throws MetadataException, RemoteException {
+ Library library = null;
+ try {
+ library = metadataNode.getLibrary(ctx.getJobId(), dataverseName, libraryName);
+ } catch (RemoteException e) {
+ throw new MetadataException(e);
+ }
+ return library;
+ }
+
+ @Override
public void acquireWriteLatch() {
metadataLatch.writeLock().lock();
}
@@ -628,4 +756,83 @@
public void releaseReadLatch() {
metadataLatch.readLock().unlock();
}
-}
\ No newline at end of file
+
+ @Override
+ public FeedPolicy getFeedPolicy(MetadataTransactionContext ctx, String dataverse, String policyName)
+ throws MetadataException {
+
+ FeedPolicy FeedPolicy = null;
+ try {
+ FeedPolicy = metadataNode.getFeedPolicy(ctx.getJobId(), dataverse, policyName);
+ } catch (RemoteException e) {
+ throw new MetadataException(e);
+ }
+ return FeedPolicy;
+ }
+
+ @Override
+ public List<FeedActivity> getActiveFeeds(MetadataTransactionContext ctx, String dataverse, String dataset)
+ throws MetadataException {
+ List<FeedActivity> feedActivities = null;
+ try {
+ feedActivities = metadataNode.getActiveFeeds(ctx.getJobId(), dataverse, dataset);
+ } catch (RemoteException e) {
+ throw new MetadataException(e);
+ }
+ return feedActivities;
+ }
+
+ @Override
+ public Feed getFeed(MetadataTransactionContext ctx, String dataverse, String feedName) throws MetadataException {
+ Feed feed = null;
+ try {
+ feed = metadataNode.getFeed(ctx.getJobId(), dataverse, feedName);
+ } catch (RemoteException e) {
+ throw new MetadataException(e);
+ }
+ return feed;
+ }
+
+ @Override
+ public void dropFeed(MetadataTransactionContext ctx, String dataverse, String feedName) throws MetadataException {
+ try {
+ metadataNode.dropFeed(ctx.getJobId(), dataverse, feedName);
+ } catch (RemoteException e) {
+ throw new MetadataException(e);
+ }
+ ctx.dropFeed(dataverse, feedName);
+ }
+
+ @Override
+ public void addFeed(MetadataTransactionContext ctx, Feed feed) throws MetadataException {
+ try {
+ metadataNode.addFeed(ctx.getJobId(), feed);
+ } catch (RemoteException e) {
+ throw new MetadataException(e);
+ }
+ ctx.addFeed(feed);
+ }
+
+ public List<FeedActivity> getConnectFeedActivitiesForFeed(MetadataTransactionContext ctx, String dataverse,
+ String feedName) throws MetadataException {
+ List<FeedActivity> feedActivities = null;
+ try {
+ feedActivities = metadataNode.getDatasetsServedByFeed(ctx.getJobId(), dataverse, feedName);
+ } catch (RemoteException e) {
+ throw new MetadataException(e);
+ }
+ return feedActivities;
+ }
+
+ public List<DatasourceAdapter> getDataverseAdapters(MetadataTransactionContext mdTxnCtx, String dataverse)
+ throws MetadataException {
+ List<DatasourceAdapter> dataverseAdapters;
+ try {
+ dataverseAdapters = metadataNode.getDataverseAdapters(mdTxnCtx.getJobId(), dataverse);
+ } catch (RemoteException e) {
+ throw new MetadataException(e);
+ }
+ return dataverseAdapters;
+ }
+
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataNode.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataNode.java
index b344f69..24bc208 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataNode.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataNode.java
@@ -17,13 +17,19 @@
import java.rmi.RemoteException;
import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
import java.util.List;
+import java.util.Map;
+import java.util.Set;
import edu.uci.ics.asterix.common.api.IAsterixAppRuntimeContext;
import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
import edu.uci.ics.asterix.common.exceptions.ACIDException;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
import edu.uci.ics.asterix.common.functions.FunctionSignature;
import edu.uci.ics.asterix.common.transactions.AbstractOperationCallback;
import edu.uci.ics.asterix.common.transactions.DatasetId;
@@ -42,9 +48,14 @@
import edu.uci.ics.asterix.metadata.entities.DatasourceAdapter;
import edu.uci.ics.asterix.metadata.entities.Datatype;
import edu.uci.ics.asterix.metadata.entities.Dataverse;
+import edu.uci.ics.asterix.metadata.entities.Feed;
+import edu.uci.ics.asterix.metadata.entities.FeedActivity;
+import edu.uci.ics.asterix.metadata.entities.FeedActivity.FeedActivityType;
+import edu.uci.ics.asterix.metadata.entities.FeedPolicy;
import edu.uci.ics.asterix.metadata.entities.Function;
import edu.uci.ics.asterix.metadata.entities.Index;
import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails;
+import edu.uci.ics.asterix.metadata.entities.Library;
import edu.uci.ics.asterix.metadata.entities.Node;
import edu.uci.ics.asterix.metadata.entities.NodeGroup;
import edu.uci.ics.asterix.metadata.entitytupletranslators.CompactionPolicyTupleTranslator;
@@ -52,10 +63,15 @@
import edu.uci.ics.asterix.metadata.entitytupletranslators.DatasourceAdapterTupleTranslator;
import edu.uci.ics.asterix.metadata.entitytupletranslators.DatatypeTupleTranslator;
import edu.uci.ics.asterix.metadata.entitytupletranslators.DataverseTupleTranslator;
+import edu.uci.ics.asterix.metadata.entitytupletranslators.FeedActivityTupleTranslator;
+import edu.uci.ics.asterix.metadata.entitytupletranslators.FeedPolicyTupleTranslator;
+import edu.uci.ics.asterix.metadata.entitytupletranslators.FeedTupleTranslator;
import edu.uci.ics.asterix.metadata.entitytupletranslators.FunctionTupleTranslator;
import edu.uci.ics.asterix.metadata.entitytupletranslators.IndexTupleTranslator;
+import edu.uci.ics.asterix.metadata.entitytupletranslators.LibraryTupleTranslator;
import edu.uci.ics.asterix.metadata.entitytupletranslators.NodeGroupTupleTranslator;
import edu.uci.ics.asterix.metadata.entitytupletranslators.NodeTupleTranslator;
+import edu.uci.ics.asterix.metadata.feeds.FeedActivityIdFactory;
import edu.uci.ics.asterix.metadata.valueextractors.DatasetNameValueExtractor;
import edu.uci.ics.asterix.metadata.valueextractors.DatatypeNameValueExtractor;
import edu.uci.ics.asterix.metadata.valueextractors.MetadataEntityValueExtractor;
@@ -140,9 +156,9 @@
}
@Override
- public void unlock(JobId jobId) throws ACIDException, RemoteException {
+ public void unlock(JobId jobId, byte lockMode) throws ACIDException, RemoteException {
ITransactionContext txnCtx = transactionSubsystem.getTransactionManager().getTransactionContext(jobId, false);
- transactionSubsystem.getLockManager().unlock(METADATA_DATASET_ID, -1, txnCtx);
+ transactionSubsystem.getLockManager().unlock(METADATA_DATASET_ID, -1, lockMode, txnCtx);
}
@Override
@@ -166,7 +182,7 @@
DatasetTupleTranslator tupleReaderWriter = new DatasetTupleTranslator(true);
ITupleReference datasetTuple = tupleReaderWriter.getTupleFromMetadataEntity(dataset);
insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.DATASET_DATASET, datasetTuple);
- if (dataset.getDatasetType() == DatasetType.INTERNAL || dataset.getDatasetType() == DatasetType.FEED) {
+ if (dataset.getDatasetType() == DatasetType.INTERNAL) {
// Add the primary index for the dataset.
InternalDatasetDetails id = (InternalDatasetDetails) dataset.getDatasetDetails();
Index primaryIndex = new Index(dataset.getDataverseName(), dataset.getDatasetName(),
@@ -305,13 +321,15 @@
@Override
public void dropDataverse(JobId jobId, String dataverseName) throws MetadataException, RemoteException {
try {
- List<Dataset> dataverseDatasets;
+ List<Dataset> dataverseDatasets;
+ Dataset ds;
dataverseDatasets = getDataverseDatasets(jobId, dataverseName);
if (dataverseDatasets != null && dataverseDatasets.size() > 0) {
// Drop all datasets in this dataverse.
for (int i = 0; i < dataverseDatasets.size(); i++) {
- dropDataset(jobId, dataverseName, dataverseDatasets.get(i).getDatasetName());
+ ds = dataverseDatasets.get(i);
+ dropDataset(jobId, dataverseName, ds.getDatasetName());
}
}
List<Datatype> dataverseDatatypes;
@@ -345,6 +363,17 @@
}
}
+ List<Feed> dataverseFeeds;
+ Feed feed;
+ dataverseFeeds = getDataverseFeeds(jobId, dataverseName);
+ if (dataverseFeeds != null && dataverseFeeds.size() > 0) {
+ // Drop all datasets in this dataverse.
+ for (int i = 0; i < dataverseFeeds.size(); i++) {
+ feed = dataverseFeeds.get(i);
+ dropFeed(jobId, dataverseName, feed.getFeedName());
+ }
+ }
+
// Delete the dataverse entry from the 'dataverse' dataset.
ITupleReference searchKey = createTuple(dataverseName);
// As a side effect, acquires an S lock on the 'dataverse' dataset
@@ -388,7 +417,7 @@
}
// Delete entry from secondary index 'group'.
- if (dataset.getDatasetType() == DatasetType.INTERNAL || dataset.getDatasetType() == DatasetType.FEED) {
+ if (dataset.getDatasetType() == DatasetType.INTERNAL) {
InternalDatasetDetails id = (InternalDatasetDetails) dataset.getDatasetDetails();
ITupleReference groupNameSearchKey = createTuple(id.getNodeGroupName(), dataverseName, datasetName);
// Searches the index for the tuple to be deleted. Acquires an S
@@ -416,7 +445,7 @@
}
// Delete entry(s) from the 'indexes' dataset.
- if (dataset.getDatasetType() == DatasetType.INTERNAL || dataset.getDatasetType() == DatasetType.FEED) {
+ if (dataset.getDatasetType() == DatasetType.INTERNAL) {
List<Index> datasetIndexes = getDatasetIndexes(jobId, dataverseName, datasetName);
if (datasetIndexes != null) {
for (Index index : datasetIndexes) {
@@ -424,6 +453,7 @@
}
}
}
+
} catch (Exception e) {
throw new MetadataException(e);
}
@@ -644,6 +674,34 @@
}
}
+ @Override
+ public List<Feed> getDataverseFeeds(JobId jobId, String dataverseName) throws MetadataException, RemoteException {
+ try {
+ ITupleReference searchKey = createTuple(dataverseName);
+ FeedTupleTranslator tupleReaderWriter = new FeedTupleTranslator(false);
+ IValueExtractor<Feed> valueExtractor = new MetadataEntityValueExtractor<Feed>(tupleReaderWriter);
+ List<Feed> results = new ArrayList<Feed>();
+ searchIndex(jobId, MetadataPrimaryIndexes.FEED_DATASET, searchKey, valueExtractor, results);
+ return results;
+ } catch (Exception e) {
+ throw new MetadataException(e);
+ }
+ }
+
+ public List<Library> getDataverseLibraries(JobId jobId, String dataverseName) throws MetadataException,
+ RemoteException {
+ try {
+ ITupleReference searchKey = createTuple(dataverseName);
+ LibraryTupleTranslator tupleReaderWriter = new LibraryTupleTranslator(false);
+ IValueExtractor<Library> valueExtractor = new MetadataEntityValueExtractor<Library>(tupleReaderWriter);
+ List<Library> results = new ArrayList<Library>();
+ searchIndex(jobId, MetadataPrimaryIndexes.LIBRARY_DATASET, searchKey, valueExtractor, results);
+ return results;
+ } catch (Exception e) {
+ throw new MetadataException(e);
+ }
+ }
+
private List<Datatype> getDataverseDatatypes(JobId jobId, String dataverseName) throws MetadataException,
RemoteException {
try {
@@ -778,6 +836,7 @@
}
return results.get(0);
} catch (Exception e) {
+ e.printStackTrace();
throw new MetadataException(e);
}
}
@@ -839,9 +898,9 @@
+ functionSignature.getArity());
// Searches the index for the tuple to be deleted. Acquires an S
// lock on the 'function' dataset.
- ITupleReference datasetTuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.FUNCTION_DATASET,
+ ITupleReference functionTuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.FUNCTION_DATASET,
searchKey);
- deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.FUNCTION_DATASET, datasetTuple);
+ deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.FUNCTION_DATASET, functionTuple);
// TODO: Change this to be a BTree specific exception, e.g.,
// BTreeKeyDoesNotExistException.
@@ -991,29 +1050,33 @@
try {
IIndex indexInstance = indexLifecycleManager.getIndex(resourceID);
indexLifecycleManager.open(resourceID);
- IIndexAccessor indexAccessor = indexInstance.createAccessor(NoOpOperationCallback.INSTANCE,
- NoOpOperationCallback.INSTANCE);
- IIndexCursor rangeCursor = indexAccessor.createSearchCursor(false);
-
- DatasetTupleTranslator tupleReaderWriter = new DatasetTupleTranslator(false);
- IValueExtractor<Dataset> valueExtractor = new MetadataEntityValueExtractor<Dataset>(tupleReaderWriter);
- RangePredicate rangePred = new RangePredicate(null, null, true, true, null, null);
-
- indexAccessor.search(rangeCursor, rangePred);
- int datasetId;
-
try {
- while (rangeCursor.hasNext()) {
- rangeCursor.next();
- ITupleReference ref = rangeCursor.getTuple();
- Dataset ds = valueExtractor.getValue(jobId, rangeCursor.getTuple());
- datasetId = ((Dataset) valueExtractor.getValue(jobId, rangeCursor.getTuple())).getDatasetId();
- if (mostRecentDatasetId < datasetId) {
- mostRecentDatasetId = datasetId;
+ IIndexAccessor indexAccessor = indexInstance.createAccessor(NoOpOperationCallback.INSTANCE,
+ NoOpOperationCallback.INSTANCE);
+ IIndexCursor rangeCursor = indexAccessor.createSearchCursor(false);
+
+ DatasetTupleTranslator tupleReaderWriter = new DatasetTupleTranslator(false);
+ IValueExtractor<Dataset> valueExtractor = new MetadataEntityValueExtractor<Dataset>(tupleReaderWriter);
+ RangePredicate rangePred = new RangePredicate(null, null, true, true, null, null);
+
+ indexAccessor.search(rangeCursor, rangePred);
+ int datasetId;
+
+ try {
+ while (rangeCursor.hasNext()) {
+ rangeCursor.next();
+ final ITupleReference ref = rangeCursor.getTuple();
+ final Dataset ds = valueExtractor.getValue(jobId, ref);
+ datasetId = ds.getDatasetId();
+ if (mostRecentDatasetId < datasetId) {
+ mostRecentDatasetId = datasetId;
+ }
}
+ } finally {
+ rangeCursor.close();
}
} finally {
- rangeCursor.close();
+ indexLifecycleManager.close(resourceID);
}
} catch (Exception e) {
@@ -1177,7 +1240,321 @@
}
@Override
+ public void addLibrary(JobId jobId, Library library) throws MetadataException, RemoteException {
+ try {
+ // Insert into the 'Library' dataset.
+ LibraryTupleTranslator tupleReaderWriter = new LibraryTupleTranslator(true);
+ ITupleReference libraryTuple = tupleReaderWriter.getTupleFromMetadataEntity(library);
+ insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.LIBRARY_DATASET, libraryTuple);
+
+ } catch (TreeIndexException e) {
+ throw new MetadataException("A library with this name " + library.getDataverseName()
+ + " already exists in dataverse '" + library.getDataverseName() + "'.", e);
+ } catch (Exception e) {
+ throw new MetadataException(e);
+ }
+
+ }
+
+ @Override
+ public void dropLibrary(JobId jobId, String dataverseName, String libraryName) throws MetadataException,
+ RemoteException {
+ Library library;
+ try {
+ library = getLibrary(jobId, dataverseName, libraryName);
+ } catch (Exception e) {
+ throw new MetadataException(e);
+ }
+ if (library == null) {
+ throw new MetadataException("Cannot drop library '" + library + "' because it doesn't exist.");
+ }
+ try {
+ // Delete entry from the 'Library' dataset.
+ ITupleReference searchKey = createTuple(dataverseName, libraryName);
+ // Searches the index for the tuple to be deleted. Acquires an S
+ // lock on the 'Adapter' dataset.
+ ITupleReference datasetTuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.LIBRARY_DATASET, searchKey);
+ deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.LIBRARY_DATASET, datasetTuple);
+
+ // TODO: Change this to be a BTree specific exception, e.g.,
+ // BTreeKeyDoesNotExistException.
+ } catch (TreeIndexException e) {
+ throw new MetadataException("Cannot drop library '" + libraryName, e);
+ } catch (Exception e) {
+ throw new MetadataException(e);
+ }
+
+ }
+
+ @Override
+ public Library getLibrary(JobId jobId, String dataverseName, String libraryName) throws MetadataException,
+ RemoteException {
+ try {
+ ITupleReference searchKey = createTuple(dataverseName, libraryName);
+ LibraryTupleTranslator tupleReaderWriter = new LibraryTupleTranslator(false);
+ List<Library> results = new ArrayList<Library>();
+ IValueExtractor<Library> valueExtractor = new MetadataEntityValueExtractor<Library>(tupleReaderWriter);
+ searchIndex(jobId, MetadataPrimaryIndexes.LIBRARY_DATASET, searchKey, valueExtractor, results);
+ if (results.isEmpty()) {
+ return null;
+ }
+ return results.get(0);
+ } catch (Exception e) {
+ throw new MetadataException(e);
+ }
+ }
+
public int getMostRecentDatasetId() throws MetadataException, RemoteException {
return DatasetIdFactory.getMostRecentDatasetId();
}
+
+ @Override
+ public void registerFeedActivity(JobId jobId, FeedConnectionId feedId, FeedActivity feedActivity)
+ throws MetadataException, RemoteException {
+ try {
+ if (!FeedActivityIdFactory.isInitialized()) {
+ initializeFeedActivityIdFactory(jobId);
+ }
+ feedActivity.setActivityId(FeedActivityIdFactory.generateFeedActivityId());
+ FeedActivityTupleTranslator tupleReaderWriter = new FeedActivityTupleTranslator(true);
+ ITupleReference tuple = tupleReaderWriter.getTupleFromMetadataEntity(feedActivity);
+ insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.FEED_ACTIVITY_DATASET, tuple);
+ } catch (Exception e) {
+ throw new MetadataException(e);
+ }
+
+ }
+
+ @Override
+ public FeedActivity getRecentFeedActivity(JobId jobId, FeedConnectionId feedId, FeedActivityType... activityType)
+ throws MetadataException, RemoteException {
+ try {
+ ITupleReference searchKey = createTuple(feedId.getDataverse(), feedId.getFeedName(),
+ feedId.getDatasetName());
+ FeedActivityTupleTranslator tupleReaderWriter = new FeedActivityTupleTranslator(false);
+ List<FeedActivity> results = new ArrayList<FeedActivity>();
+ IValueExtractor<FeedActivity> valueExtractor = new MetadataEntityValueExtractor<FeedActivity>(
+ tupleReaderWriter);
+ searchIndex(jobId, MetadataPrimaryIndexes.FEED_ACTIVITY_DATASET, searchKey, valueExtractor, results);
+ if (!results.isEmpty()) {
+ Collections.sort(results);
+ if (activityType == null) {
+ return results.get(0);
+ } else {
+ for (FeedActivity result : results) {
+ for (FeedActivityType ft : activityType) {
+ if (result.getActivityType().equals(ft)) {
+ return result;
+ }
+ }
+ }
+ }
+ }
+ return null;
+ } catch (Exception e) {
+ throw new MetadataException(e);
+ }
+ }
+
+ @Override
+ public void initializeFeedActivityIdFactory(JobId jobId) throws MetadataException, RemoteException {
+ try {
+ ITupleReference searchKey = createTuple();
+ FeedActivityTupleTranslator tupleReaderWriter = new FeedActivityTupleTranslator(true);
+ List<FeedActivity> results = new ArrayList<FeedActivity>();
+ IValueExtractor<FeedActivity> valueExtractor = new MetadataEntityValueExtractor<FeedActivity>(
+ tupleReaderWriter);
+ searchIndex(jobId, MetadataPrimaryIndexes.FEED_ACTIVITY_DATASET, searchKey, valueExtractor, results);
+ int maxActivityId = 0;
+ for (FeedActivity fa : results) {
+ if (maxActivityId < fa.getActivityId()) {
+ maxActivityId = fa.getActivityId();
+ }
+ }
+ FeedActivityIdFactory.initialize(maxActivityId);
+ } catch (Exception e) {
+ throw new MetadataException(e);
+ }
+
+ }
+
+ @Override
+ public void addFeedPolicy(JobId jobId, FeedPolicy feedPolicy) throws MetadataException, RemoteException {
+ try {
+ // Insert into the 'FeedPolicy' dataset.
+ FeedPolicyTupleTranslator tupleReaderWriter = new FeedPolicyTupleTranslator(true);
+ ITupleReference feedPolicyTuple = tupleReaderWriter.getTupleFromMetadataEntity(feedPolicy);
+ insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.FEED_POLICY_DATASET, feedPolicyTuple);
+
+ } catch (TreeIndexException e) {
+ throw new MetadataException("A feed policy with this name " + feedPolicy.getPolicyName()
+ + " already exists in dataverse '" + feedPolicy.getPolicyName() + "'.", e);
+ } catch (Exception e) {
+ throw new MetadataException(e);
+ }
+
+ }
+
+ @Override
+ public FeedPolicy getFeedPolicy(JobId jobId, String dataverse, String policyName) throws MetadataException,
+ RemoteException {
+
+ try {
+ ITupleReference searchKey = createTuple(dataverse, policyName);
+ FeedPolicyTupleTranslator tupleReaderWriter = new FeedPolicyTupleTranslator(false);
+ List<FeedPolicy> results = new ArrayList<FeedPolicy>();
+ IValueExtractor<FeedPolicy> valueExtractor = new MetadataEntityValueExtractor<FeedPolicy>(tupleReaderWriter);
+ searchIndex(jobId, MetadataPrimaryIndexes.FEED_POLICY_DATASET, searchKey, valueExtractor, results);
+ if (!results.isEmpty()) {
+ return results.get(0);
+ }
+ return null;
+ } catch (Exception e) {
+ throw new MetadataException(e);
+ }
+ }
+
+ @Override
+ public List<FeedActivity> getActiveFeeds(JobId jobId, String dataverse, String dataset) throws MetadataException,
+ RemoteException {
+ List<FeedActivity> activeFeeds = new ArrayList<FeedActivity>();
+ Map<FeedConnectionId, FeedActivity> aFeeds = new HashMap<FeedConnectionId, FeedActivity>();
+ boolean invalidArgs = (dataverse == null && dataset != null);
+ if (invalidArgs) {
+ throw new MetadataException("Invalid arguments " + dataverse + " " + dataset);
+ }
+ try {
+ ITupleReference searchKey = createTuple();
+ FeedActivityTupleTranslator tupleReaderWriter = new FeedActivityTupleTranslator(true);
+ List<FeedActivity> results = new ArrayList<FeedActivity>();
+ IValueExtractor<FeedActivity> valueExtractor = new MetadataEntityValueExtractor<FeedActivity>(
+ tupleReaderWriter);
+ searchIndex(jobId, MetadataPrimaryIndexes.FEED_ACTIVITY_DATASET, searchKey, valueExtractor, results);
+ Collections.sort(results); // recent activity first
+ FeedConnectionId fid = null;
+ Set<FeedConnectionId> terminatedFeeds = new HashSet<FeedConnectionId>();
+ for (FeedActivity fa : results) {
+ if (dataverse != null) {
+ if (dataset != null
+ && (!fa.getDataverseName().equals(dataverse) || !dataset.equals(fa.getDatasetName()))) {
+ continue;
+ }
+ }
+
+ fid = new FeedConnectionId(fa.getDataverseName(), fa.getFeedName(), fa.getDatasetName());
+ switch (fa.getActivityType()) {
+ case FEED_BEGIN:
+ if (!terminatedFeeds.contains(fid)) {
+ if (aFeeds.get(fid) == null || fa.getActivityId() > aFeeds.get(fid).getActivityId()) {
+ aFeeds.put(fid, fa);
+ }
+ }
+ break;
+ case FEED_END:
+ terminatedFeeds.add(fid);
+ break;
+ default: //ignore
+ }
+ }
+ for (FeedActivity f : aFeeds.values()) {
+ System.out.println("ACTIVE FEEDS " + f.getFeedName());
+ activeFeeds.add(f);
+ }
+ return activeFeeds;
+ } catch (Exception e) {
+ throw new MetadataException(e);
+ }
+ }
+
+ @Override
+ public void addFeed(JobId jobId, Feed feed) throws MetadataException, RemoteException {
+ try {
+ // Insert into the 'Feed' dataset.
+ FeedTupleTranslator tupleReaderWriter = new FeedTupleTranslator(true);
+ ITupleReference feedTuple = tupleReaderWriter.getTupleFromMetadataEntity(feed);
+ insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.FEED_DATASET, feedTuple);
+
+ } catch (TreeIndexException e) {
+ throw new MetadataException("A feed with this name " + feed.getFeedName()
+ + " already exists in dataverse '" + feed.getDataverseName() + "'.", e);
+ } catch (Exception e) {
+ throw new MetadataException(e);
+ }
+ }
+
+ @Override
+ public Feed getFeed(JobId jobId, String dataverse, String feedName) throws MetadataException, RemoteException {
+ try {
+ ITupleReference searchKey = createTuple(dataverse, feedName);
+ FeedTupleTranslator tupleReaderWriter = new FeedTupleTranslator(false);
+ List<Feed> results = new ArrayList<Feed>();
+ IValueExtractor<Feed> valueExtractor = new MetadataEntityValueExtractor<Feed>(tupleReaderWriter);
+ searchIndex(jobId, MetadataPrimaryIndexes.FEED_DATASET, searchKey, valueExtractor, results);
+ if (!results.isEmpty()) {
+ return results.get(0);
+ }
+ return null;
+ } catch (Exception e) {
+ throw new MetadataException(e);
+ }
+ }
+
+ @Override
+ public void dropFeed(JobId jobId, String dataverse, String feedName) throws MetadataException, RemoteException {
+
+ try {
+ ITupleReference searchKey = createTuple(dataverse, feedName);
+ // Searches the index for the tuple to be deleted. Acquires an S
+ // lock on the 'nodegroup' dataset.
+ ITupleReference tuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.FEED_DATASET, searchKey);
+ deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.FEED_DATASET, tuple);
+ // TODO: Change this to be a BTree specific exception, e.g.,
+ // BTreeKeyDoesNotExistException.
+ } catch (TreeIndexException e) {
+ throw new MetadataException("Cannot drop feed '" + feedName + "' because it doesn't exist", e);
+ } catch (Exception e) {
+ throw new MetadataException(e);
+ }
+
+ }
+
+ public List<FeedActivity> getDatasetsServedByFeed(JobId jobId, String dataverse, String feedName)
+ throws MetadataException, RemoteException {
+ List<FeedActivity> feedActivities = new ArrayList<FeedActivity>();
+ try {
+ ITupleReference searchKey = createTuple(dataverse, feedName);
+ FeedActivityTupleTranslator tupleReaderWriter = new FeedActivityTupleTranslator(false);
+ List<FeedActivity> results = new ArrayList<FeedActivity>();
+ IValueExtractor<FeedActivity> valueExtractor = new MetadataEntityValueExtractor<FeedActivity>(
+ tupleReaderWriter);
+ searchIndex(jobId, MetadataPrimaryIndexes.FEED_ACTIVITY_DATASET, searchKey, valueExtractor, results);
+
+ if (!results.isEmpty()) {
+ Collections.sort(results); // most recent feed activity
+ Set<String> terminatedDatasets = new HashSet<String>();
+ Set<String> activeDatasets = new HashSet<String>();
+
+ for (FeedActivity result : results) {
+ switch (result.getFeedActivityType()) {
+ case FEED_BEGIN:
+ if (!terminatedDatasets.contains(result.getDatasetName())) {
+ feedActivities.add(result);
+ activeDatasets.add(result.getDatasetName());
+ }
+ break;
+ case FEED_END:
+ if (!activeDatasets.contains(result.getDatasetName())) {
+ terminatedDatasets.add(result.getDatasetName());
+ }
+ break;
+ }
+
+ }
+ }
+ return feedActivities;
+ } catch (Exception e) {
+ throw new MetadataException(e);
+ }
+ }
+
}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataTransactionContext.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataTransactionContext.java
index 3705fa1..6859e45 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataTransactionContext.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataTransactionContext.java
@@ -19,16 +19,19 @@
import edu.uci.ics.asterix.common.functions.FunctionSignature;
import edu.uci.ics.asterix.common.transactions.JobId;
-import edu.uci.ics.asterix.external.dataset.adapter.AdapterIdentifier;
import edu.uci.ics.asterix.metadata.api.IMetadataEntity;
import edu.uci.ics.asterix.metadata.entities.CompactionPolicy;
import edu.uci.ics.asterix.metadata.entities.Dataset;
import edu.uci.ics.asterix.metadata.entities.DatasourceAdapter;
import edu.uci.ics.asterix.metadata.entities.Datatype;
import edu.uci.ics.asterix.metadata.entities.Dataverse;
+import edu.uci.ics.asterix.metadata.entities.Feed;
+import edu.uci.ics.asterix.metadata.entities.FeedPolicy;
import edu.uci.ics.asterix.metadata.entities.Function;
import edu.uci.ics.asterix.metadata.entities.Index;
+import edu.uci.ics.asterix.metadata.entities.Library;
import edu.uci.ics.asterix.metadata.entities.NodeGroup;
+import edu.uci.ics.asterix.metadata.feeds.AdapterIdentifier;
/**
* Used to implement serializable transactions against the MetadataCache.
@@ -130,6 +133,11 @@
logAndApply(new MetadataLogicalOperation(dataverse, false));
}
+ public void addLibrary(Library library) {
+ droppedCache.dropLibrary(library);
+ logAndApply(new MetadataLogicalOperation(library, true));
+ }
+
public void dropDataDatatype(String dataverseName, String datatypeName) {
Datatype datatype = new Datatype(dataverseName, datatypeName, null, false);
droppedCache.addDatatypeIfNotExists(datatype);
@@ -156,6 +164,12 @@
logAndApply(new MetadataLogicalOperation(adapter, false));
}
+ public void dropLibrary(String dataverseName, String libraryName) {
+ Library library = new Library(dataverseName, libraryName);
+ droppedCache.addLibraryIfNotExists(library);
+ logAndApply(new MetadataLogicalOperation(library, false));
+ }
+
public void logAndApply(MetadataLogicalOperation op) {
opLog.add(op);
doOperation(op);
@@ -201,10 +215,29 @@
return opLog;
}
+ public void addFeedPolicy(FeedPolicy feedPolicy) {
+ droppedCache.dropFeedPolicy(feedPolicy);
+ logAndApply(new MetadataLogicalOperation(feedPolicy, true));
+
+ }
+
+ public void addFeed(Feed feed) {
+ droppedCache.dropFeed(feed);
+ logAndApply(new MetadataLogicalOperation(feed, true));
+
+ }
+
+ public void dropFeed(String dataverse, String feedName) {
+ Feed feed = new Feed(dataverse, feedName, null, null, null);
+ droppedCache.addFeedIfNotExists(feed);
+ logAndApply(new MetadataLogicalOperation(feed, false));
+ }
+
@Override
public void clear() {
super.clear();
droppedCache.clear();
opLog.clear();
}
+
}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IClusterEventsSubscriber.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IClusterEventsSubscriber.java
new file mode 100644
index 0000000..049a45c
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IClusterEventsSubscriber.java
@@ -0,0 +1,47 @@
+package edu.uci.ics.asterix.metadata.api;
+
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import java.util.Set;
+
+import edu.uci.ics.asterix.metadata.cluster.IClusterManagementWorkResponse;
+import edu.uci.ics.asterix.om.util.AsterixClusterProperties.State;
+
+public interface IClusterEventsSubscriber {
+
+ /**
+ * @param deadNodeIds
+ * @return
+ */
+ public Set<IClusterManagementWork> notifyNodeFailure(Set<String> deadNodeIds);
+
+ /**
+ * @param joinedNodeId
+ * @return
+ */
+ public Set<IClusterManagementWork> notifyNodeJoin(String joinedNodeId);
+
+ /**
+ * @param response
+ */
+ public void notifyRequestCompletion(IClusterManagementWorkResponse response);
+
+ /**
+ * @param previousState
+ * @param newState
+ */
+ public void notifyStateChange(State previousState, State newState);
+
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IClusterManagementWork.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IClusterManagementWork.java
new file mode 100644
index 0000000..65ac354
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IClusterManagementWork.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.api;
+
+public interface IClusterManagementWork {
+
+ public enum WorkType {
+ ADD_NODE,
+ REMOVE_NODE
+ }
+
+ public WorkType getClusterManagementWorkType();
+
+ public int getWorkId();
+
+ public IClusterEventsSubscriber getSourceSubscriber();
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IClusterManager.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IClusterManager.java
new file mode 100644
index 0000000..ea07a62
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IClusterManager.java
@@ -0,0 +1,38 @@
+package edu.uci.ics.asterix.metadata.api;
+
+import java.util.Set;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.event.schema.cluster.Node;
+
+public interface IClusterManager {
+
+ /**
+ * @param node
+ * @throws AsterixException
+ */
+ public void addNode(Node node) throws AsterixException;
+
+ /**
+ * @param node
+ * @throws AsterixException
+ */
+ public void removeNode(Node node) throws AsterixException;
+
+ /**
+ * @param subscriber
+ */
+ public void registerSubscriber(IClusterEventsSubscriber subscriber);
+
+ /**
+ * @param sunscriber
+ * @return
+ */
+ public boolean deregisterSubscriber(IClusterEventsSubscriber sunscriber);
+
+ /**
+ * @return
+ */
+ public Set<IClusterEventsSubscriber> getRegisteredClusterEventSubscribers();
+
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataEntity.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataEntity.java
index 6a553bd..971b9a7 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataEntity.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataEntity.java
@@ -20,11 +20,11 @@
import edu.uci.ics.asterix.metadata.MetadataCache;
public interface IMetadataEntity extends Serializable {
-
+
public static final int PENDING_NO_OP = 0;
public static final int PENDING_ADD_OP = 1;
public static final int PENDING_DROP_OP = 2;
-
+
Object addToCache(MetadataCache cache);
Object dropFromCache(MetadataCache cache);
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataEntityTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataEntityTupleTranslator.java
index 9c9d02b..745b817 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataEntityTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataEntityTupleTranslator.java
@@ -40,7 +40,7 @@
* @throws MetadataException
* @throws IOException
*/
- public T getMetadataEntytiFromTuple(ITupleReference tuple) throws MetadataException, IOException;
+ public T getMetadataEntityFromTuple(ITupleReference tuple) throws MetadataException, IOException;
/**
* Serializes the given metadata entity of type T into an appropriate tuple
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataManager.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataManager.java
index 392c8a1..a605b02 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataManager.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataManager.java
@@ -8,7 +8,7 @@
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * WIThOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
@@ -19,6 +19,7 @@
import java.util.List;
import edu.uci.ics.asterix.common.exceptions.ACIDException;
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
import edu.uci.ics.asterix.common.functions.FunctionSignature;
import edu.uci.ics.asterix.metadata.MetadataException;
import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
@@ -27,8 +28,13 @@
import edu.uci.ics.asterix.metadata.entities.DatasourceAdapter;
import edu.uci.ics.asterix.metadata.entities.Datatype;
import edu.uci.ics.asterix.metadata.entities.Dataverse;
+import edu.uci.ics.asterix.metadata.entities.Feed;
+import edu.uci.ics.asterix.metadata.entities.FeedActivity;
+import edu.uci.ics.asterix.metadata.entities.FeedActivity.FeedActivityType;
+import edu.uci.ics.asterix.metadata.entities.FeedPolicy;
import edu.uci.ics.asterix.metadata.entities.Function;
import edu.uci.ics.asterix.metadata.entities.Index;
+import edu.uci.ics.asterix.metadata.entities.Library;
import edu.uci.ics.asterix.metadata.entities.Node;
import edu.uci.ics.asterix.metadata.entities.NodeGroup;
@@ -105,7 +111,7 @@
* @throws ACIDException
* @throws RemoteException
*/
- public void unlock(MetadataTransactionContext ctx) throws ACIDException, RemoteException;
+ public void unlock(MetadataTransactionContext ctx, byte lockMode) throws ACIDException, RemoteException;
/**
* Inserts a new dataverse into the metadata.
@@ -458,6 +464,79 @@
public List<Function> getDataverseFunctions(MetadataTransactionContext ctx, String dataverseName)
throws MetadataException;
+ /**
+ * @param ctx
+ * @param feed
+ * @throws MetadataException
+ */
+ public void addFeed(MetadataTransactionContext ctx, Feed feed) throws MetadataException;
+
+ /**
+ * @param ctx
+ * @param dataverse
+ * @param feedName
+ * @return
+ * @throws MetadataException
+ */
+ public Feed getFeed(MetadataTransactionContext ctx, String dataverse, String feedName) throws MetadataException;
+
+ /**
+ * @param ctx
+ * @param dataverse
+ * @param feedName
+ * @throws MetadataException
+ */
+ public void dropFeed(MetadataTransactionContext ctx, String dataverse, String feedName) throws MetadataException;
+
+ /**
+ * @param ctx
+ * @param feedId
+ * @param feedActivity
+ * @throws MetadataException
+ */
+ public void registerFeedActivity(MetadataTransactionContext ctx, FeedConnectionId feedId, FeedActivity feedActivity)
+ throws MetadataException;
+
+ /**
+ * @param ctx
+ * @param dataverseName
+ * @param datasetName
+ * @return
+ * @throws MetadataException
+ */
+ public FeedActivity getRecentActivityOnFeedConnection(MetadataTransactionContext ctx, FeedConnectionId feedId,
+ FeedActivityType... activityTypeFilter) throws MetadataException;
+
+ /**
+ * @param ctx
+ * @param policy
+ * @throws MetadataException
+ */
+ public void addFeedPolicy(MetadataTransactionContext ctx, FeedPolicy policy) throws MetadataException;
+
+ /**
+ * @param ctx
+ * @param dataverse
+ * @param policyName
+ * @return
+ * @throws MetadataException
+ */
+ public FeedPolicy getFeedPolicy(MetadataTransactionContext ctx, String dataverse, String policyName)
+ throws MetadataException;
+
+ /**
+ * @param ctx
+ * @param dataverse
+ * @param dataset
+ * @return
+ * @throws MetadataException
+ */
+ public List<FeedActivity> getActiveFeeds(MetadataTransactionContext ctx, String dataverse, String dataset)
+ throws MetadataException;
+
+ public List<FeedActivity> getConnectFeedActivitiesForFeed(MetadataTransactionContext ctx, String dataverse,
+ String dataset) throws MetadataException;
+
public void initializeDatasetIdFactory(MetadataTransactionContext ctx) throws MetadataException;
public int getMostRecentDatasetId() throws MetadataException;
@@ -470,4 +549,62 @@
public void releaseReadLatch();
+ /**
+ * Removes a library , acquiring local locks on behalf of the given
+ * transaction id.
+ *
+ * @param ctx
+ * MetadataTransactionContext of an active metadata transaction.
+ * @param dataverseName
+ * dataverse asociated with the adapter that is to be deleted.
+ * @param libraryName
+ * Name of library to be deleted. MetadataException for example,
+ * if the library does not exists.
+ * @throws RemoteException
+ */
+ public void dropLibrary(MetadataTransactionContext ctx, String dataverseName, String libraryName)
+ throws MetadataException;
+
+ /**
+ * Adds a library, acquiring local locks on behalf of the given
+ * transaction id.
+ *
+ * @param ctx
+ * MetadataTransactionContext of an active metadata transaction.
+ * @param library
+ * Library to be added
+ * @throws MetadataException
+ * for example, if the library is already added.
+ * @throws RemoteException
+ */
+ public void addLibrary(MetadataTransactionContext ctx, Library library) throws MetadataException;
+
+ /**
+ * @param ctx
+ * MetadataTransactionContext of an active metadata transaction.
+ * @param dataverseName
+ * dataverse asociated with the library that is to be retrieved.
+ * @param libraryName
+ * name of the library that is to be retrieved
+ * @return Library
+ * @throws MetadataException
+ * @throws RemoteException
+ */
+ public Library getLibrary(MetadataTransactionContext ctx, String dataverseName, String libraryName)
+ throws MetadataException, RemoteException;
+
+ /**
+ * Retireve libraries installed in a given dataverse.
+ *
+ * @param ctx
+ * MetadataTransactionContext of an active metadata transaction.
+ * @param dataverseName
+ * dataverse asociated with the library that is to be retrieved.
+ * @return Library
+ * @throws MetadataException
+ * @throws RemoteException
+ */
+ public List<Library> getDataverseLibraries(MetadataTransactionContext ctx, String dataverseName)
+ throws MetadataException;
+
}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataNode.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataNode.java
index f27268f..45a350b 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataNode.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataNode.java
@@ -21,6 +21,7 @@
import java.util.List;
import edu.uci.ics.asterix.common.exceptions.ACIDException;
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
import edu.uci.ics.asterix.common.functions.FunctionSignature;
import edu.uci.ics.asterix.common.transactions.JobId;
import edu.uci.ics.asterix.metadata.MetadataException;
@@ -29,8 +30,13 @@
import edu.uci.ics.asterix.metadata.entities.DatasourceAdapter;
import edu.uci.ics.asterix.metadata.entities.Datatype;
import edu.uci.ics.asterix.metadata.entities.Dataverse;
+import edu.uci.ics.asterix.metadata.entities.Feed;
+import edu.uci.ics.asterix.metadata.entities.FeedActivity;
+import edu.uci.ics.asterix.metadata.entities.FeedActivity.FeedActivityType;
+import edu.uci.ics.asterix.metadata.entities.FeedPolicy;
import edu.uci.ics.asterix.metadata.entities.Function;
import edu.uci.ics.asterix.metadata.entities.Index;
+import edu.uci.ics.asterix.metadata.entities.Library;
import edu.uci.ics.asterix.metadata.entities.Node;
import edu.uci.ics.asterix.metadata.entities.NodeGroup;
@@ -84,7 +90,7 @@
* @throws ACIDException
* @throws RemoteException
*/
- public void unlock(JobId jobId) throws ACIDException, RemoteException;
+ public void unlock(JobId jobId, byte lockMode) throws ACIDException, RemoteException;
/**
* Inserts a new dataverse into the metadata, acquiring local locks on
@@ -443,6 +449,14 @@
public List<DatasourceAdapter> getDataverseAdapters(JobId jobId, String dataverseName) throws MetadataException,
RemoteException;
+ /**
+ * @param jobId
+ * @param dataverseName
+ * @param adapterName
+ * @return
+ * @throws MetadataException
+ * @throws RemoteException
+ */
public DatasourceAdapter getAdapter(JobId jobId, String dataverseName, String adapterName)
throws MetadataException, RemoteException;
@@ -482,6 +496,16 @@
public void addCompactionPolicy(JobId jobId, CompactionPolicy compactionPolicy) throws MetadataException,
RemoteException;
+ public FeedActivity getRecentFeedActivity(JobId jobId, FeedConnectionId feedId,
+ FeedActivityType... feedActivityFilter) throws MetadataException, RemoteException;
+
+ /**
+ * @param jobId
+ * @throws MetadataException
+ * @throws RemoteException
+ */
+ public void initializeFeedActivityIdFactory(JobId jobId) throws MetadataException, RemoteException;
+
/**
* @param jobId
* @param dataverse
@@ -493,8 +517,163 @@
public CompactionPolicy getCompactionPolicy(JobId jobId, String dataverse, String policy) throws MetadataException,
RemoteException;
+ /**
+ * @param jobId
+ * @throws MetadataException
+ * @throws RemoteException
+ */
public void initializeDatasetIdFactory(JobId jobId) throws MetadataException, RemoteException;
+ /**
+ * @return
+ * @throws MetadataException
+ * @throws RemoteException
+ */
public int getMostRecentDatasetId() throws MetadataException, RemoteException;
+ /**
+ * @param jobId
+ * @param feed
+ * @throws MetadataException
+ * @throws RemoteException
+ */
+ public void addFeed(JobId jobId, Feed feed) throws MetadataException, RemoteException;
+
+ /**
+ * @param jobId
+ * @param dataverse
+ * @param feedName
+ * @return
+ * @throws MetadataException
+ * @throws RemoteException
+ */
+ public Feed getFeed(JobId jobId, String dataverse, String feedName) throws MetadataException, RemoteException;
+
+ /**
+ * @param jobId
+ * @param dataverse
+ * @param feedName
+ * @throws MetadataException
+ * @throws RemoteException
+ */
+ public void dropFeed(JobId jobId, String dataverse, String feedName) throws MetadataException, RemoteException;
+
+ /**
+ * @param jobId
+ * A globally unique id for an active metadata transaction.
+ * @param feedId
+ * A unique id for the feed
+ * @param feedActivity
+ */
+ public void registerFeedActivity(JobId jobId, FeedConnectionId feedId, FeedActivity feedActivity)
+ throws MetadataException, RemoteException;
+
+ /**
+ * @param jobId
+ * @param feedPolicy
+ * @throws MetadataException
+ * @throws RemoteException
+ */
+ public void addFeedPolicy(JobId jobId, FeedPolicy feedPolicy) throws MetadataException, RemoteException;
+
+ /**
+ * @param jobId
+ * @param dataverse
+ * @param policy
+ * @return
+ * @throws MetadataException
+ * @throws RemoteException
+ */
+ public FeedPolicy getFeedPolicy(JobId jobId, String dataverse, String policy) throws MetadataException,
+ RemoteException;
+
+ /**
+ * @param jobId
+ * @param dataverse
+ * @param dataset
+ * @return
+ * @throws MetadataException
+ * @throws RemoteException
+ */
+ public List<FeedActivity> getActiveFeeds(JobId jobId, String dataverse, String dataset) throws MetadataException,
+ RemoteException;
+
+ /**
+ * Removes a library , acquiring local locks on behalf of the given
+ * transaction id.
+ *
+ * @param txnId
+ * A globally unique id for an active metadata transaction.
+ * @param dataverseName
+ * dataverse asociated with the adapter that is to be deleted.
+ * @param libraryName
+ * Name of library to be deleted. MetadataException for example,
+ * if the library does not exists.
+ * @throws RemoteException
+ */
+ public void dropLibrary(JobId jobId, String dataverseName, String libraryName) throws MetadataException,
+ RemoteException;
+
+ /**
+ * Adds a library, acquiring local locks on behalf of the given
+ * transaction id.
+ *
+ * @param txnId
+ * A globally unique id for an active metadata transaction.
+ * @param library
+ * Library to be added
+ * @throws MetadataException
+ * for example, if the library is already added.
+ * @throws RemoteException
+ */
+ public void addLibrary(JobId jobId, Library library) throws MetadataException, RemoteException;
+
+ /**
+ * @param txnId
+ * A globally unique id for an active metadata transaction.
+ * @param dataverseName
+ * dataverse asociated with the library that is to be retrieved.
+ * @param libraryName
+ * name of the library that is to be retrieved
+ * @return Library
+ * @throws MetadataException
+ * @throws RemoteException
+ */
+ public Library getLibrary(JobId jobId, String dataverseName, String libraryName) throws MetadataException,
+ RemoteException;
+
+ /**
+ * Retireve libraries installed in a given dataverse.
+ *
+ * @param txnId
+ * A globally unique id for an active metadata transaction.
+ * @param dataverseName
+ * dataverse asociated with the library that is to be retrieved.
+ * @return Library
+ * @throws MetadataException
+ * @throws RemoteException
+ */
+ public List<Library> getDataverseLibraries(JobId jobId, String dataverseName) throws MetadataException,
+ RemoteException;
+
+ /**
+ * @param jobId
+ * @param dataverseName
+ * @return
+ * @throws MetadataException
+ * @throws RemoteException
+ */
+ public List<Feed> getDataverseFeeds(JobId jobId, String dataverseName) throws MetadataException, RemoteException;
+
+ /**
+ * @param jobId
+ * @param dataverseName
+ * @param deedName
+ * @return
+ * @throws MetadataException
+ * @throws RemoteException
+ */
+ public List<FeedActivity> getDatasetsServedByFeed(JobId jobId, String dataverseName, String deedName)
+ throws MetadataException, RemoteException;
+
}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java
index 455715d..2f11190 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java
@@ -37,8 +37,6 @@
import edu.uci.ics.asterix.common.context.DatasetLifecycleManager;
import edu.uci.ics.asterix.common.exceptions.ACIDException;
import edu.uci.ics.asterix.common.ioopcallbacks.LSMBTreeIOOperationCallbackFactory;
-import edu.uci.ics.asterix.external.adapter.factory.IAdapterFactory;
-import edu.uci.ics.asterix.external.dataset.adapter.AdapterIdentifier;
import edu.uci.ics.asterix.metadata.IDatasetDetails;
import edu.uci.ics.asterix.metadata.MetadataException;
import edu.uci.ics.asterix.metadata.MetadataManager;
@@ -51,12 +49,16 @@
import edu.uci.ics.asterix.metadata.entities.DatasourceAdapter;
import edu.uci.ics.asterix.metadata.entities.Datatype;
import edu.uci.ics.asterix.metadata.entities.Dataverse;
+import edu.uci.ics.asterix.metadata.entities.FeedPolicy;
import edu.uci.ics.asterix.metadata.entities.Index;
import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails;
import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails.FileStructure;
import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails.PartitioningStrategy;
import edu.uci.ics.asterix.metadata.entities.Node;
import edu.uci.ics.asterix.metadata.entities.NodeGroup;
+import edu.uci.ics.asterix.metadata.feeds.AdapterIdentifier;
+import edu.uci.ics.asterix.metadata.feeds.BuiltinFeedPolicies;
+import edu.uci.ics.asterix.metadata.feeds.IAdapterFactory;
import edu.uci.ics.asterix.om.types.BuiltinType;
import edu.uci.ics.asterix.om.types.IAType;
import edu.uci.ics.asterix.runtime.formats.NonTaggedDataFormat;
@@ -119,7 +121,10 @@
MetadataPrimaryIndexes.DATASET_DATASET, MetadataPrimaryIndexes.DATATYPE_DATASET,
MetadataPrimaryIndexes.INDEX_DATASET, MetadataPrimaryIndexes.NODE_DATASET,
MetadataPrimaryIndexes.NODEGROUP_DATASET, MetadataPrimaryIndexes.FUNCTION_DATASET,
- MetadataPrimaryIndexes.DATASOURCE_ADAPTER_DATASET, MetadataPrimaryIndexes.COMPACTION_POLICY_DATASET };
+ MetadataPrimaryIndexes.DATASOURCE_ADAPTER_DATASET, MetadataPrimaryIndexes.FEED_DATASET,
+ MetadataPrimaryIndexes.FEED_ACTIVITY_DATASET, MetadataPrimaryIndexes.FEED_POLICY_DATASET,
+ MetadataPrimaryIndexes.LIBRARY_DATASET, MetadataPrimaryIndexes.COMPACTION_POLICY_DATASET };
+
secondaryIndexes = new IMetadataIndex[] { MetadataSecondaryIndexes.GROUPNAME_ON_DATASET_INDEX,
MetadataSecondaryIndexes.DATATYPENAME_ON_DATASET_INDEX,
MetadataSecondaryIndexes.DATATYPENAME_ON_DATATYPE_INDEX };
@@ -164,6 +169,11 @@
for (int i = 0; i < secondaryIndexes.length; i++) {
enlistMetadataDataset(secondaryIndexes[i], true, mdTxnCtx);
}
+
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Finished enlistment of metadata B-trees in new universe");
+ }
+
insertInitialDataverses(mdTxnCtx);
insertInitialDatasets(mdTxnCtx);
insertInitialDatatypes(mdTxnCtx);
@@ -171,6 +181,7 @@
insertNodes(mdTxnCtx);
insertInitialGroups(mdTxnCtx);
insertInitialAdapters(mdTxnCtx);
+ insertInitialFeedPolicies(mdTxnCtx);
insertInitialCompactionPolicies(mdTxnCtx);
if (LOGGER.isLoggable(Level.INFO)) {
@@ -185,7 +196,7 @@
}
if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Finished enlistment of metadata B-trees.");
+ LOGGER.info("Finished enlistment of metadata B-trees in old universe.");
}
}
@@ -210,18 +221,7 @@
public static void stopUniverse() throws HyracksDataException {
// Close all BTree files in BufferCache.
- for (int i = 0; i < primaryIndexes.length; i++) {
- long resourceID = localResourceRepository
- .getResourceByName(primaryIndexes[i].getFile().getFile().getPath()).getResourceId();
- indexLifecycleManager.close(resourceID);
- indexLifecycleManager.unregister(resourceID);
- }
- for (int i = 0; i < secondaryIndexes.length; i++) {
- long resourceID = localResourceRepository.getResourceByName(
- secondaryIndexes[i].getFile().getFile().getPath()).getResourceId();
- indexLifecycleManager.close(resourceID);
- indexLifecycleManager.unregister(resourceID);
- }
+ // metadata datasets will be closed when the dataset life cycle manger is closed
}
public static void insertInitialDataverses(MetadataTransactionContext mdTxnCtx) throws Exception {
@@ -235,13 +235,16 @@
for (int i = 0; i < primaryIndexes.length; i++) {
IDatasetDetails id = new InternalDatasetDetails(FileStructure.BTREE, PartitioningStrategy.HASH,
primaryIndexes[i].getPartitioningExpr(), primaryIndexes[i].getPartitioningExpr(),
- primaryIndexes[i].getNodeGroupName(), GlobalConfig.DEFAULT_COMPACTION_POLICY_NAME,
+ primaryIndexes[i].getNodeGroupName(), false, GlobalConfig.DEFAULT_COMPACTION_POLICY_NAME,
GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES);
MetadataManager.INSTANCE.addDataset(mdTxnCtx, new Dataset(primaryIndexes[i].getDataverseName(),
primaryIndexes[i].getIndexedDatasetName(), primaryIndexes[i].getPayloadRecordType().getTypeName(),
id, new HashMap<String, String>(), DatasetType.INTERNAL, primaryIndexes[i].getDatasetId().getId(),
IMetadataEntity.PENDING_NO_OP));
}
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Finished inserting initial datasets.");
+ }
}
public static void getBuiltinTypes(ArrayList<IAType> types) throws Exception {
@@ -265,6 +268,9 @@
MetadataManager.INSTANCE.addDatatype(mdTxnCtx, new Datatype(dataverseName, types.get(i).getTypeName(),
types.get(i), false));
}
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Finished inserting initial datatypes.");
+ }
}
public static void insertInitialIndexes(MetadataTransactionContext mdTxnCtx) throws Exception {
@@ -273,6 +279,9 @@
secondaryIndexes[i].getIndexedDatasetName(), secondaryIndexes[i].getIndexName(), IndexType.BTREE,
secondaryIndexes[i].getPartitioningExpr(), false, IMetadataEntity.PENDING_NO_OP));
}
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Finished inserting initial indexes.");
+ }
}
public static void insertNodes(MetadataTransactionContext mdTxnCtx) throws Exception {
@@ -305,17 +314,34 @@
private static void insertInitialAdapters(MetadataTransactionContext mdTxnCtx) throws Exception {
String[] builtInAdapterClassNames = new String[] {
+ "edu.uci.ics.asterix.external.adapter.factory.PullBasedAzureTwitterAdapterFactory",
"edu.uci.ics.asterix.external.adapter.factory.NCFileSystemAdapterFactory",
"edu.uci.ics.asterix.external.adapter.factory.HDFSAdapterFactory",
"edu.uci.ics.asterix.external.adapter.factory.HiveAdapterFactory",
"edu.uci.ics.asterix.external.adapter.factory.PullBasedTwitterAdapterFactory",
"edu.uci.ics.asterix.external.adapter.factory.RSSFeedAdapterFactory",
- "edu.uci.ics.asterix.external.adapter.factory.CNNFeedAdapterFactory", };
+ "edu.uci.ics.asterix.external.adapter.factory.CNNFeedAdapterFactory",
+ "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory",
+ "edu.uci.ics.asterix.tools.external.data.TwitterFirehoseFeedAdapterFactory",
+ "edu.uci.ics.asterix.tools.external.data.GenericSocketFeedAdapterFactory",
+ "edu.uci.ics.asterix.tools.external.data.SocketClientAdapterFactory" };
DatasourceAdapter adapter;
for (String adapterClassName : builtInAdapterClassNames) {
adapter = getAdapter(adapterClassName);
MetadataManager.INSTANCE.addAdapter(mdTxnCtx, adapter);
}
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Finished inserting built-in adapters.");
+ }
+ }
+
+ private static void insertInitialFeedPolicies(MetadataTransactionContext mdTxnCtx) throws Exception {
+ for (FeedPolicy feedPolicy : BuiltinFeedPolicies.policies) {
+ MetadataManager.INSTANCE.addFeedPolicy(mdTxnCtx, feedPolicy);
+ }
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Finished adding built-in feed policies.");
+ }
}
private static void insertInitialCompactionPolicies(MetadataTransactionContext mdTxnCtx) throws Exception {
@@ -358,6 +384,7 @@
ILSMOperationTracker opTracker = index.isPrimaryIndex() ? runtimeContext.getLSMBTreeOperationTracker(index
.getDatasetId().getId()) : new BaseOperationTracker((DatasetLifecycleManager) indexLifecycleManager,
index.getDatasetId().getId());
+ final String path = file.getFile().getPath();
if (create) {
lsmBtree = LSMBTreeUtils.createLSMTree(
virtualBufferCaches,
@@ -380,11 +407,11 @@
ILocalResourceFactoryProvider localResourceFactoryProvider = new PersistentLocalResourceFactoryProvider(
localResourceMetadata, LocalResource.LSMBTreeResource);
ILocalResourceFactory localResourceFactory = localResourceFactoryProvider.getLocalResourceFactory();
- localResourceRepository.insert(localResourceFactory.createLocalResource(resourceID, file.getFile()
- .getPath(), 0));
+ localResourceRepository.insert(localResourceFactory.createLocalResource(resourceID, path, 0));
indexLifecycleManager.register(resourceID, lsmBtree);
} else {
- resourceID = localResourceRepository.getResourceByName(file.getFile().getPath()).getResourceId();
+ final LocalResource resource = localResourceRepository.getResourceByName(path);
+ resourceID = resource.getResourceId();
lsmBtree = (LSMBTree) indexLifecycleManager.getIndex(resourceID);
if (lsmBtree == null) {
lsmBtree = LSMBTreeUtils.createLSMTree(virtualBufferCaches, file, bufferCache, fileMapProvider,
@@ -399,7 +426,6 @@
index.setResourceID(resourceID);
index.setFile(file);
- indexLifecycleManager.open(resourceID);
}
public static String getOutputDir() {
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataConstants.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataConstants.java
index 4c9b7d2..1957666 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataConstants.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataConstants.java
@@ -19,10 +19,10 @@
// Name of the dataverse the metadata lives in.
public final static String METADATA_DATAVERSE_NAME = "Metadata";
-
+
// Name of the node group where metadata is stored on.
public final static String METADATA_NODEGROUP_NAME = "MetadataGroup";
-
+
// Name of the default nodegroup where internal/feed datasets will be partitioned
// if an explicit nodegroup is not specified at the time of creation of a dataset
public static final String METADATA_DEFAULT_NODEGROUP_NAME = "DEFAULT_NG_ALL_NODES";
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataPrimaryIndexes.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataPrimaryIndexes.java
index f6bc9e6..632b6fd 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataPrimaryIndexes.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataPrimaryIndexes.java
@@ -32,6 +32,13 @@
public static IMetadataIndex NODE_DATASET;
public static IMetadataIndex NODEGROUP_DATASET;
public static IMetadataIndex FUNCTION_DATASET;
+ public static IMetadataIndex DATASOURCE_ADAPTER_DATASET;
+ public static IMetadataIndex LIBRARY_DATASET;
+ public static IMetadataIndex FEED_DATASET;
+ public static IMetadataIndex FEED_ACTIVITY_DATASET;
+ public static IMetadataIndex FEED_POLICY_DATASET;
+ public static IMetadataIndex COMPACTION_POLICY_DATASET;
+
public static final int METADATA_DATASET_ID = 0;
public static final int DATAVERSE_DATASET_ID = 1;
public static final int DATASET_DATASET_ID = 2;
@@ -41,11 +48,14 @@
public static final int NODEGROUP_DATASET_ID = 6;
public static final int FUNCTION_DATASET_ID = 7;
public static final int DATASOURCE_ADAPTER_DATASET_ID = 8;
- public static final int COMPACTION_POLICY_DATASET_ID = 9;
- public static final int FIRST_AVAILABLE_USER_DATASET_ID = 100;
- public static IMetadataIndex DATASOURCE_ADAPTER_DATASET;
- public static IMetadataIndex COMPACTION_POLICY_DATASET;
+ public static final int LIBRARY_DATASET_ID = 9;
+ public static final int FEED_DATASET_ID = 10;
+ public static final int FEED_ACTIVITY_DATASET_ID = 11;
+ public static final int FEED_POLICY_DATASET_ID = 12;
+ public static final int COMPACTION_POLICY_DATASET_ID = 13;
+
+ public static final int FIRST_AVAILABLE_USER_DATASET_ID = 100;
/**
* Create all metadata primary index descriptors. MetadataRecordTypes must
@@ -95,6 +105,23 @@
MetadataRecordTypes.DATASOURCE_ADAPTER_RECORDTYPE, DATASOURCE_ADAPTER_DATASET_ID, true, new int[] { 0,
1 });
+ FEED_DATASET = new MetadataIndex("Feed", null, 3, new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING },
+ new String[] { "DataverseName", "FeedName" }, 0, MetadataRecordTypes.FEED_RECORDTYPE, FEED_DATASET_ID,
+ true, new int[] { 0, 1 });
+
+ FEED_ACTIVITY_DATASET = new MetadataIndex("FeedActivity", null, 5, new IAType[] { BuiltinType.ASTRING,
+ BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.AINT32 }, new String[] { "DataverseName",
+ "FeedName", "DatasetName", "ActivityId" }, 0, MetadataRecordTypes.FEED_ACTIVITY_RECORDTYPE,
+ FEED_ACTIVITY_DATASET_ID, true, new int[] { 0, 1, 2, 3 });
+
+ LIBRARY_DATASET = new MetadataIndex("Library", null, 3,
+ new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING }, new String[] { "DataverseName", "Name" }, 0,
+ MetadataRecordTypes.LIBRARY_RECORDTYPE, LIBRARY_DATASET_ID, true, new int[] { 0, 1 });
+
+ FEED_POLICY_DATASET = new MetadataIndex("FeedPolicy", null, 3, new IAType[] { BuiltinType.ASTRING,
+ BuiltinType.ASTRING }, new String[] { "DataverseName", "PolicyName" }, 0,
+ MetadataRecordTypes.FEED_POLICY_RECORDTYPE, FEED_POLICY_DATASET_ID, true, new int[] { 0, 1 });
+
COMPACTION_POLICY_DATASET = new MetadataIndex("CompactionPolicy", null, 3, new IAType[] { BuiltinType.ASTRING,
BuiltinType.ASTRING }, new String[] { "DataverseName", "CompactionPolicy" }, 0,
MetadataRecordTypes.COMPACTION_POLICY_RECORDTYPE, COMPACTION_POLICY_DATASET_ID, true,
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataRecordTypes.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataRecordTypes.java
index 8452340..c400daa 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataRecordTypes.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataRecordTypes.java
@@ -48,6 +48,13 @@
public static ARecordType NODEGROUP_RECORDTYPE;
public static ARecordType FUNCTION_RECORDTYPE;
public static ARecordType DATASOURCE_ADAPTER_RECORDTYPE;
+ public static ARecordType FEED_RECORDTYPE;
+ public static ARecordType FEED_ADAPTOR_CONFIGURATION_RECORDTYPE;
+ public static ARecordType FEED_ACTIVITY_RECORDTYPE;
+ public static ARecordType FEED_POLICY_RECORDTYPE;
+ public static ARecordType POLICY_PARAMS_RECORDTYPE;
+ public static ARecordType FEED_ACTIVITY_DETAILS_RECORDTYPE;
+ public static ARecordType LIBRARY_RECORDTYPE;
public static ARecordType COMPACTION_POLICY_RECORDTYPE;
/**
@@ -58,6 +65,7 @@
// depend on other types being created first.
// These calls are one "dependency chain".
try {
+ POLICY_PARAMS_RECORDTYPE = createPropertiesRecordType();
DATASOURCE_ADAPTER_PROPERTIES_RECORDTYPE = createPropertiesRecordType();
COMPACTION_POLICY_PROPERTIES_RECORDTYPE = createPropertiesRecordType();
INTERNAL_DETAILS_RECORDTYPE = createInternalDetailsRecordType();
@@ -79,12 +87,33 @@
NODEGROUP_RECORDTYPE = createNodeGroupRecordType();
FUNCTION_RECORDTYPE = createFunctionRecordType();
DATASOURCE_ADAPTER_RECORDTYPE = createDatasourceAdapterRecordType();
+
+ FEED_RECORDTYPE = createFeedRecordType();
+ FEED_ADAPTOR_CONFIGURATION_RECORDTYPE = createPropertiesRecordType();
+ FEED_ACTIVITY_DETAILS_RECORDTYPE = createPropertiesRecordType();
+ FEED_ACTIVITY_RECORDTYPE = createFeedActivityRecordType();
+ FEED_POLICY_RECORDTYPE = createFeedPolicyRecordType();
+ LIBRARY_RECORDTYPE = createLibraryRecordType();
+
COMPACTION_POLICY_RECORDTYPE = createCompactionPolicyRecordType();
+
} catch (AsterixException e) {
throw new MetadataException(e);
}
}
+ public static final int FEED_POLICY_ARECORD_DATAVERSE_NAME_FIELD_INDEX = 0;
+ public static final int FEED_POLICY_ARECORD_POLICY_NAME_FIELD_INDEX = 1;
+ public static final int FEED_POLICY_ARECORD_DESCRIPTION_FIELD_INDEX = 2;
+ public static final int FEED_POLICY_ARECORD_PROPERTIES_FIELD_INDEX = 3;
+
+ private static ARecordType createFeedPolicyRecordType() throws AsterixException {
+ AUnorderedListType listPropertiesType = new AUnorderedListType(POLICY_PARAMS_RECORDTYPE, null);
+ String[] fieldNames = { "DataverseName", "PolicyName", "Description", "Properties" };
+ IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING, listPropertiesType };
+ return new ARecordType("FeedPolicyRecordType", fieldNames, fieldTypes, true);
+ }
+
// Helper constants for accessing fields in an ARecord of type
// DataverseRecordType.
public static final int DATAVERSE_ARECORD_NAME_FIELD_INDEX = 0;
@@ -117,17 +146,18 @@
public static final int INTERNAL_DETAILS_ARECORD_PARTITIONKEY_FIELD_INDEX = 2;
public static final int INTERNAL_DETAILS_ARECORD_PRIMARYKEY_FIELD_INDEX = 3;
public static final int INTERNAL_DETAILS_ARECORD_GROUPNAME_FIELD_INDEX = 4;
- public static final int INTERNAL_DETAILS_ARECORD_COMPACTION_POLICY_FIELD_INDEX = 5;
- public static final int INTERNAL_DETAILS_ARECORD_COMPACTION_POLICY_PROPERTIES_FIELD_INDEX = 6;
+ public static final int INTERNAL_DETAILS_ARECORD_AUTOGENERATED_FIELD_INDEX = 5;
+ public static final int INTERNAL_DETAILS_ARECORD_COMPACTION_POLICY_FIELD_INDEX = 6;
+ public static final int INTERNAL_DETAILS_ARECORD_COMPACTION_POLICY_PROPERTIES_FIELD_INDEX = 7;
private static final ARecordType createInternalDetailsRecordType() throws AsterixException {
AOrderedListType olType = new AOrderedListType(BuiltinType.ASTRING, null);
AOrderedListType compactionPolicyPropertyListType = new AOrderedListType(
COMPACTION_POLICY_PROPERTIES_RECORDTYPE, null);
String[] fieldNames = { "FileStructure", "PartitioningStrategy", "PartitioningKey", "PrimaryKey", "GroupName",
- "CompactionPolicy", "CompactionPolicyProperties" };
+ "Autogenerated", "CompactionPolicy", "CompactionPolicyProperties" };
IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, olType, olType, BuiltinType.ASTRING,
- BuiltinType.ASTRING, compactionPolicyPropertyListType };
+ BuiltinType.ABOOLEAN, BuiltinType.ASTRING, compactionPolicyPropertyListType };
return new ARecordType(null, fieldNames, fieldTypes, true);
}
@@ -149,7 +179,7 @@
public static final int COMPACTION_POLICY_ARECORD_CLASSNAME_FIELD_INDEX = 2;
private static ARecordType createCompactionPolicyRecordType() throws AsterixException {
- String[] fieldNames = { "DataverseName", "PolicyName", "Classname" };
+ String[] fieldNames = { "DataverseName", "CompactionPolicy", "Classname" };
IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING };
return new ARecordType("CompactionPolicyRecordType", fieldNames, fieldTypes, true);
}
@@ -194,15 +224,14 @@
public static final int DATASET_ARECORD_DATASETTYPE_FIELD_INDEX = 3;
public static final int DATASET_ARECORD_INTERNALDETAILS_FIELD_INDEX = 4;
public static final int DATASET_ARECORD_EXTERNALDETAILS_FIELD_INDEX = 5;
- public static final int DATASET_ARECORD_FEEDDETAILS_FIELD_INDEX = 6;
- public static final int DATASET_ARECORD_HINTS_FIELD_INDEX = 7;
- public static final int DATASET_ARECORD_TIMESTAMP_FIELD_INDEX = 8;
- public static final int DATASET_ARECORD_DATASETID_FIELD_INDEX = 9;
- public static final int DATASET_ARECORD_PENDINGOP_FIELD_INDEX = 10;
+ public static final int DATASET_ARECORD_HINTS_FIELD_INDEX = 6;
+ public static final int DATASET_ARECORD_TIMESTAMP_FIELD_INDEX = 7;
+ public static final int DATASET_ARECORD_DATASETID_FIELD_INDEX = 8;
+ public static final int DATASET_ARECORD_PENDINGOP_FIELD_INDEX = 9;
private static final ARecordType createDatasetRecordType() throws AsterixException {
String[] fieldNames = { "DataverseName", "DatasetName", "DataTypeName", "DatasetType", "InternalDetails",
- "ExternalDetails", "FeedDetails", "Hints", "Timestamp", "DatasetId", "PendingOp" };
+ "ExternalDetails", "Hints", "Timestamp", "DatasetId", "PendingOp" };
List<IAType> internalRecordUnionList = new ArrayList<IAType>();
internalRecordUnionList.add(BuiltinType.ANULL);
@@ -214,16 +243,11 @@
externalRecordUnionList.add(EXTERNAL_DETAILS_RECORDTYPE);
AUnionType externalRecordUnion = new AUnionType(externalRecordUnionList, null);
- List<IAType> feedRecordUnionList = new ArrayList<IAType>();
- feedRecordUnionList.add(BuiltinType.ANULL);
- feedRecordUnionList.add(FEED_DETAILS_RECORDTYPE);
- AUnionType feedRecordUnion = new AUnionType(feedRecordUnionList, null);
-
AUnorderedListType unorderedListOfHintsType = new AUnorderedListType(DATASET_HINTS_RECORDTYPE, null);
IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING,
- internalRecordUnion, externalRecordUnion, feedRecordUnion, unorderedListOfHintsType,
- BuiltinType.ASTRING, BuiltinType.AINT32, BuiltinType.AINT32 };
+ internalRecordUnion, externalRecordUnion, unorderedListOfHintsType, BuiltinType.ASTRING,
+ BuiltinType.AINT32, BuiltinType.AINT32 };
return new ARecordType("DatasetRecordType", fieldNames, fieldTypes, true);
}
@@ -382,4 +406,58 @@
return new ARecordType("DatasourceAdapterRecordType", fieldNames, fieldTypes, true);
}
+ // Helper constants for accessing fields in an ARecord of type
+ // FeedActivityRecordType.
+ public static final int FEED_ACTIVITY_ARECORD_DATAVERSE_NAME_FIELD_INDEX = 0;
+ public static final int FEED_ACTIVITY_ARECORD_FEED_NAME_FIELD_INDEX = 1;
+ public static final int FEED_ACTIVITY_ARECORD_DATASET_NAME_FIELD_INDEX = 2;
+ public static final int FEED_ACTIVITY_ARECORD_ACTIVITY_ID_FIELD_INDEX = 3;
+ public static final int FEED_ACTIVITY_ARECORD_ACTIVITY_TYPE_FIELD_INDEX = 4;
+ public static final int FEED_ACTIVITY_ARECORD_DETAILS_FIELD_INDEX = 5;
+ public static final int FEED_ACTIVITY_ARECORD_LAST_UPDATE_TIMESTAMP_FIELD_INDEX = 6;
+
+ private static ARecordType createFeedActivityRecordType() throws AsterixException {
+ AUnorderedListType unorderedPropertyListType = new AUnorderedListType(FEED_ACTIVITY_DETAILS_RECORDTYPE, null);
+ String[] fieldNames = { "DataverseName", "FeedName", "DatasetName", "ActivityId", "ActivityType", "Details",
+ "Timestamp" };
+ IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.AINT32,
+ BuiltinType.ASTRING, unorderedPropertyListType, BuiltinType.ASTRING };
+ return new ARecordType("FeedActivityRecordType", fieldNames, fieldTypes, true);
+ }
+
+ public static final int FEED_ARECORD_DATAVERSE_NAME_FIELD_INDEX = 0;
+ public static final int FEED_ARECORD_FEED_NAME_FIELD_INDEX = 1;
+ public static final int FEED_ARECORD_ADAPTOR_NAME_FIELD_INDEX = 2;
+ public static final int FEED_ARECORD_ADAPTOR_CONFIGURATION_FIELD_INDEX = 3;
+ public static final int FEED_ARECORD_FUNCTION_FIELD_INDEX = 4;
+ public static final int FEED_ARECORD_TIMESTAMP_FIELD_INDEX = 5;
+
+ private static ARecordType createFeedRecordType() throws AsterixException {
+
+ AUnorderedListType unorderedAdaptorPropertyListType = new AUnorderedListType(
+ DATASOURCE_ADAPTER_PROPERTIES_RECORDTYPE, null);
+
+ List<IAType> feedFunctionUnionList = new ArrayList<IAType>();
+ feedFunctionUnionList.add(BuiltinType.ANULL);
+ feedFunctionUnionList.add(BuiltinType.ASTRING);
+ AUnionType feedFunctionUnion = new AUnionType(feedFunctionUnionList, null);
+
+ String[] fieldNames = { "DataverseName", "FeedName", "AdaptorName", "AdaptorConfiguration", "Function",
+ "Timestamp" };
+ IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING,
+ unorderedAdaptorPropertyListType, feedFunctionUnion, BuiltinType.ASTRING };
+
+ return new ARecordType("FeedRecordType", fieldNames, fieldTypes, true);
+
+ }
+
+ public static final int LIBRARY_ARECORD_DATAVERSENAME_FIELD_INDEX = 0;
+ public static final int LIBRARY_ARECORD_NAME_FIELD_INDEX = 1;
+ public static final int LIBRARY_ARECORD_TIMESTAMP_FIELD_INDEX = 2;
+
+ private static ARecordType createLibraryRecordType() throws AsterixException {
+ String[] fieldNames = { "DataverseName", "Name", "Timestamp" };
+ IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING };
+ return new ARecordType("LibraryRecordType", fieldNames, fieldTypes, true);
+ }
}
\ No newline at end of file
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataSecondaryIndexes.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataSecondaryIndexes.java
index 315c5e9..c33c21f 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataSecondaryIndexes.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataSecondaryIndexes.java
@@ -56,5 +56,6 @@
BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING }, new String[] { "DataverseName",
"NestedDatatypeName", "TopDatatypeName" }, 2, null, MetadataPrimaryIndexes.DATATYPE_DATASET_ID, false,
new int[] { 0, 2 });
+
}
}
\ No newline at end of file
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/cluster/AbstractClusterManagementWork.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/cluster/AbstractClusterManagementWork.java
new file mode 100644
index 0000000..6948dbc
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/cluster/AbstractClusterManagementWork.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.cluster;
+
+import java.util.concurrent.atomic.AtomicInteger;
+
+import edu.uci.ics.asterix.metadata.api.IClusterEventsSubscriber;
+import edu.uci.ics.asterix.metadata.api.IClusterManagementWork;
+
+public abstract class AbstractClusterManagementWork implements IClusterManagementWork {
+
+ protected final IClusterEventsSubscriber subscriber;
+
+ protected final int workId;
+
+ @Override
+ public int getWorkId() {
+ return workId;
+ }
+
+ public AbstractClusterManagementWork(IClusterEventsSubscriber subscriber) {
+ this.subscriber = subscriber;
+ this.workId = WorkIdGenerator.getNextWorkId();
+ }
+
+ private static class WorkIdGenerator {
+ private static AtomicInteger workId = new AtomicInteger(0);
+
+ public static int getNextWorkId() {
+ return workId.incrementAndGet();
+ }
+
+ }
+
+ @Override
+ public IClusterEventsSubscriber getSourceSubscriber() {
+ return subscriber;
+ }
+
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/cluster/AddNodeWork.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/cluster/AddNodeWork.java
new file mode 100644
index 0000000..68dcc4c
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/cluster/AddNodeWork.java
@@ -0,0 +1,27 @@
+package edu.uci.ics.asterix.metadata.cluster;
+
+import edu.uci.ics.asterix.metadata.api.IClusterEventsSubscriber;
+
+public class AddNodeWork extends AbstractClusterManagementWork {
+
+ private final int numberOfNodes;
+
+ @Override
+ public WorkType getClusterManagementWorkType() {
+ return WorkType.ADD_NODE;
+ }
+
+ public AddNodeWork(int numberOfNodes, IClusterEventsSubscriber subscriber) {
+ super(subscriber);
+ this.numberOfNodes = numberOfNodes;
+ }
+
+ public int getNumberOfNodes() {
+ return numberOfNodes;
+ }
+
+ @Override
+ public String toString() {
+ return WorkType.ADD_NODE + " " + numberOfNodes + " requested by " + subscriber;
+ }
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/cluster/AddNodeWorkResponse.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/cluster/AddNodeWorkResponse.java
new file mode 100644
index 0000000..40999f0
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/cluster/AddNodeWorkResponse.java
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.cluster;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class AddNodeWorkResponse extends ClusterManagementWorkResponse {
+
+ private final List<String> nodesToBeAdded;
+ private final List<String> nodesAdded;
+
+ public AddNodeWorkResponse(AddNodeWork w, List<String> nodesToBeAdded) {
+ super(w);
+ this.nodesToBeAdded = nodesToBeAdded;
+ this.nodesAdded = new ArrayList<String>();
+ }
+
+ public List<String> getNodesAdded() {
+ return nodesAdded;
+ }
+
+ public boolean updateProgress(String addedNode) {
+ nodesToBeAdded.remove(addedNode);
+ nodesAdded.add(addedNode);
+ return nodesToBeAdded.isEmpty();
+ }
+
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/cluster/ClusterManagementWorkResponse.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/cluster/ClusterManagementWorkResponse.java
new file mode 100644
index 0000000..d578a77
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/cluster/ClusterManagementWorkResponse.java
@@ -0,0 +1,31 @@
+package edu.uci.ics.asterix.metadata.cluster;
+
+import edu.uci.ics.asterix.metadata.api.IClusterManagementWork;
+
+public class ClusterManagementWorkResponse implements IClusterManagementWorkResponse {
+
+ protected final IClusterManagementWork work;
+
+ protected Status status;
+
+ public ClusterManagementWorkResponse(IClusterManagementWork w) {
+ this.work = w;
+ this.status = Status.IN_PROGRESS;
+ }
+
+ @Override
+ public IClusterManagementWork getWork() {
+ return work;
+ }
+
+ @Override
+ public Status getStatus() {
+ return status;
+ }
+
+ @Override
+ public void setStatus(Status status) {
+ this.status = status;
+ }
+
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/cluster/ClusterManager.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/cluster/ClusterManager.java
new file mode 100644
index 0000000..cefb431
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/cluster/ClusterManager.java
@@ -0,0 +1,169 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.cluster;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import javax.xml.bind.JAXBContext;
+import javax.xml.bind.Unmarshaller;
+
+import edu.uci.ics.asterix.common.config.AsterixExternalProperties;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.event.management.AsterixEventServiceClient;
+import edu.uci.ics.asterix.event.model.AsterixInstance;
+import edu.uci.ics.asterix.event.schema.cluster.Cluster;
+import edu.uci.ics.asterix.event.schema.cluster.Node;
+import edu.uci.ics.asterix.event.schema.pattern.Pattern;
+import edu.uci.ics.asterix.event.schema.pattern.Patterns;
+import edu.uci.ics.asterix.event.service.AsterixEventService;
+import edu.uci.ics.asterix.event.service.AsterixEventServiceUtil;
+import edu.uci.ics.asterix.event.service.ILookupService;
+import edu.uci.ics.asterix.event.service.ServiceProvider;
+import edu.uci.ics.asterix.event.util.PatternCreator;
+import edu.uci.ics.asterix.installer.schema.conf.Configuration;
+import edu.uci.ics.asterix.metadata.api.IClusterEventsSubscriber;
+import edu.uci.ics.asterix.metadata.api.IClusterManager;
+import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
+import edu.uci.ics.asterix.om.util.AsterixClusterProperties;
+
+public class ClusterManager implements IClusterManager {
+
+ private static final Logger LOGGER = Logger.getLogger(AsterixEventServiceClient.class.getName());
+
+ public static ClusterManager INSTANCE = new ClusterManager();
+
+ private static String eventsDir = System.getenv("user.dir") + File.separator + "eventrix";
+
+ private static AsterixEventServiceClient client;
+
+ private static ILookupService lookupService;
+
+ private static final Set<IClusterEventsSubscriber> eventSubscribers = new HashSet<IClusterEventsSubscriber>();
+
+ private ClusterManager() {
+ Cluster asterixCluster = AsterixClusterProperties.INSTANCE.getCluster();
+ String eventHome = asterixCluster == null ? null : asterixCluster.getWorkingDir().getDir();
+
+ if (asterixCluster != null) {
+ String asterixDir = System.getProperty("user.dir") + File.separator + "asterix";
+ File configFile = new File(System.getProperty("user.dir") + File.separator + "configuration.xml");
+ Configuration configuration = null;
+
+ try {
+ JAXBContext configCtx = JAXBContext.newInstance(Configuration.class);
+ Unmarshaller unmarshaller = configCtx.createUnmarshaller();
+ configuration = (Configuration) unmarshaller.unmarshal(configFile);
+ AsterixEventService.initialize(configuration, asterixDir, eventHome);
+ client = AsterixEventService.getAsterixEventServiceClient(AsterixClusterProperties.INSTANCE
+ .getCluster());
+
+ lookupService = ServiceProvider.INSTANCE.getLookupService();
+ if (!lookupService.isRunning(configuration)) {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Lookup service not running. Starting lookup service ...");
+ }
+ lookupService.startService(configuration);
+ } else {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Lookup service running");
+ }
+ }
+
+ } catch (Exception e) {
+ throw new IllegalStateException("Unable to initialize cluster manager" + e);
+ }
+ }
+ }
+
+ @Override
+ public void addNode(Node node) throws AsterixException {
+ try {
+ Cluster cluster = AsterixClusterProperties.INSTANCE.getCluster();
+ List<Pattern> pattern = new ArrayList<Pattern>();
+ String asterixInstanceName = AsterixAppContextInfo.getInstance().getMetadataProperties().getInstanceName();
+ Patterns prepareNode = PatternCreator.INSTANCE.createPrepareNodePattern(asterixInstanceName,
+ AsterixClusterProperties.INSTANCE.getCluster(), node);
+ cluster.getNode().add(node);
+ client.submit(prepareNode);
+
+ AsterixExternalProperties externalProps = AsterixAppContextInfo.getInstance().getExternalProperties();
+ AsterixEventServiceUtil.poulateClusterEnvironmentProperties(cluster, externalProps.getCCJavaParams(),
+ externalProps.getNCJavaParams());
+
+ pattern.clear();
+ String ccHost = cluster.getMasterNode().getClusterIp();
+ String hostId = node.getId();
+ String nodeControllerId = asterixInstanceName + "_" + node.getId();
+ String iodevices = node.getIodevices() == null ? cluster.getIodevices() : node.getIodevices();
+ Pattern startNC = PatternCreator.INSTANCE.createNCStartPattern(ccHost, hostId, nodeControllerId, iodevices);
+ pattern.add(startNC);
+ Patterns startNCPattern = new Patterns(pattern);
+ client.submit(startNCPattern);
+
+ removeNode(cluster.getSubstituteNodes().getNode(), node);
+
+ AsterixInstance instance = lookupService.getAsterixInstance(cluster.getInstanceName());
+ instance.getCluster().getNode().add(node);
+ removeNode(instance.getCluster().getSubstituteNodes().getNode(), node);
+ lookupService.updateAsterixInstance(instance);
+
+ } catch (Exception e) {
+ throw new AsterixException(e);
+ }
+
+ }
+
+ private void removeNode(List<Node> list, Node node) {
+ Node nodeToRemove = null;
+ for (Node n : list) {
+ if (n.getId().equals(node.getId())) {
+ nodeToRemove = n;
+ break;
+ }
+ }
+ if (nodeToRemove != null) {
+ boolean removed = list.remove(nodeToRemove);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("attempt to remove node :" + nodeToRemove + " successful " + removed);
+ }
+ }
+ }
+
+ @Override
+ public void removeNode(Node node) throws AsterixException {
+ // to be implemented later.
+ }
+
+ @Override
+ public void registerSubscriber(IClusterEventsSubscriber subscriber) {
+ eventSubscribers.add(subscriber);
+ }
+
+ @Override
+ public boolean deregisterSubscriber(IClusterEventsSubscriber subscriber) {
+ return eventSubscribers.remove(subscriber);
+ }
+
+ @Override
+ public Set<IClusterEventsSubscriber> getRegisteredClusterEventSubscribers() {
+ return eventSubscribers;
+ }
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/cluster/IClusterManagementWorkResponse.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/cluster/IClusterManagementWorkResponse.java
new file mode 100644
index 0000000..dfc88ac
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/cluster/IClusterManagementWorkResponse.java
@@ -0,0 +1,28 @@
+package edu.uci.ics.asterix.metadata.cluster;
+
+import edu.uci.ics.asterix.metadata.api.IClusterManagementWork;
+
+public interface IClusterManagementWorkResponse {
+
+ public enum Status {
+ IN_PROGRESS,
+ SUCCESS,
+ FAILURE
+ }
+
+ /**
+ * @return
+ */
+ public IClusterManagementWork getWork();
+
+ /**
+ * @return
+ */
+ public Status getStatus();
+
+ /**
+ * @param status
+ */
+ public void setStatus(Status status);
+
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/cluster/RemoveNodeWork.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/cluster/RemoveNodeWork.java
new file mode 100644
index 0000000..90683d1
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/cluster/RemoveNodeWork.java
@@ -0,0 +1,36 @@
+package edu.uci.ics.asterix.metadata.cluster;
+
+import java.util.Set;
+
+import edu.uci.ics.asterix.metadata.api.IClusterEventsSubscriber;
+
+public class RemoveNodeWork extends AbstractClusterManagementWork {
+
+ private final Set<String> nodesToBeRemoved;
+
+ @Override
+ public WorkType getClusterManagementWorkType() {
+ return WorkType.REMOVE_NODE;
+ }
+
+ public RemoveNodeWork(Set<String> nodesToBeRemoved, IClusterEventsSubscriber subscriber) {
+ super(subscriber);
+ this.nodesToBeRemoved = nodesToBeRemoved;
+ }
+
+ public Set<String> getNodesToBeRemoved() {
+ return nodesToBeRemoved;
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder builder = new StringBuilder();
+ builder.append(WorkType.REMOVE_NODE);
+ for (String node : nodesToBeRemoved) {
+ builder.append(node + " ");
+ }
+ builder.append(" requested by " + subscriber);
+ return builder.toString();
+ }
+
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/cluster/RemoveNodeWorkResponse.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/cluster/RemoveNodeWorkResponse.java
new file mode 100644
index 0000000..58ea05e
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/cluster/RemoveNodeWorkResponse.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.cluster;
+
+import java.util.HashSet;
+import java.util.Set;
+
+public class RemoveNodeWorkResponse extends ClusterManagementWorkResponse {
+
+ private Set<String> nodesToBeRemoved = new HashSet<String>();
+
+ public RemoveNodeWorkResponse(RemoveNodeWork w, Status status) {
+ super(w);
+ nodesToBeRemoved.addAll(w.getNodesToBeRemoved());
+ }
+
+ public boolean updateProgress(Set<String> failedNodeIds) {
+ nodesToBeRemoved.removeAll(failedNodeIds);
+ return nodesToBeRemoved.isEmpty();
+
+ }
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/dataset/hints/DatasetHints.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/dataset/hints/DatasetHints.java
index 0b7a7d9..e27a4cb 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/dataset/hints/DatasetHints.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/dataset/hints/DatasetHints.java
@@ -17,6 +17,7 @@
import java.util.HashSet;
import java.util.Set;
+import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
/**
@@ -51,6 +52,7 @@
private static Set<IHint> initHints() {
Set<IHint> hints = new HashSet<IHint>();
hints.add(new DatasetCardinalityHint());
+ hints.add(new DatasetNodegroupCardinalityHint());
return hints;
}
@@ -84,4 +86,43 @@
}
}
+
+ /**
+ * Hint representing the cardinality of nodes forming the nodegroup for the dataset.
+ */
+ public static class DatasetNodegroupCardinalityHint implements IHint {
+ public static final String NAME = "NODEGROUP_CARDINALITY";
+
+ public static final int DEFAULT = 1;
+
+ @Override
+ public String getName() {
+ return NAME;
+ }
+
+ @Override
+ public Pair<Boolean, String> validateValue(String value) {
+ boolean valid = true;
+ int intValue;
+ try {
+ intValue = Integer.parseInt(value);
+ if (intValue < 0) {
+ return new Pair<Boolean, String>(false, "Value must be >= 0");
+ }
+ int numNodesInCluster = AsterixAppContextInfo.getInstance().getMetadataProperties().getNodeNames()
+ .size();
+ if (numNodesInCluster < intValue) {
+ return new Pair<Boolean, String>(false,
+ "Value must be greater or equal to the existing number of nodes in cluster ("
+ + numNodesInCluster + ")");
+ }
+ } catch (NumberFormatException nfe) {
+ valid = false;
+ return new Pair<Boolean, String>(valid, "Inappropriate value");
+ }
+ return new Pair<Boolean, String>(true, null);
+ }
+
+ }
+
}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlCompiledMetadataDeclarations.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlCompiledMetadataDeclarations.java
index da15cfe..fa0311d 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlCompiledMetadataDeclarations.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlCompiledMetadataDeclarations.java
@@ -222,8 +222,8 @@
File relPathFile = new File(getRelativePath(datasetName + "_idx_" + targetIdxName));
Dataset dataset = findDataset(datasetName);
- if (dataset.getDatasetType() != DatasetType.INTERNAL & dataset.getDatasetType() != DatasetType.FEED) {
- throw new AlgebricksException("Not an internal or feed dataset");
+ if (dataset.getDatasetType() != DatasetType.INTERNAL) {
+ throw new AlgebricksException("Not an internal dataset");
}
InternalDatasetDetails datasetDetails = (InternalDatasetDetails) dataset.getDatasetDetails();
List<String> nodeGroup = findNodeGroupNodeNames(datasetDetails.getNodeGroupName());
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlDataSource.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlDataSource.java
index 166de00..6d1a00b 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlDataSource.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlDataSource.java
@@ -15,16 +15,13 @@
package edu.uci.ics.asterix.metadata.declared;
-import java.io.IOException;
+import java.io.Serializable;
import java.util.ArrayList;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
import java.util.Set;
-import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
-import edu.uci.ics.asterix.metadata.entities.Dataset;
-import edu.uci.ics.asterix.metadata.entities.ExternalDatasetDetails;
-import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
-import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.asterix.om.types.IAType;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
import edu.uci.ics.hyracks.algebricks.common.utils.ListSet;
@@ -32,7 +29,6 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSource;
import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSourcePropertiesProvider;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder.OrderKind;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.DefaultNodeGroupDomain;
import edu.uci.ics.hyracks.algebricks.core.algebra.properties.FunctionalDependency;
import edu.uci.ics.hyracks.algebricks.core.algebra.properties.ILocalStructuralProperty;
import edu.uci.ics.hyracks.algebricks.core.algebra.properties.INodeDomain;
@@ -44,144 +40,57 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector;
import edu.uci.ics.hyracks.algebricks.core.algebra.properties.UnorderedPartitionedProperty;
-public class AqlDataSource implements IDataSource<AqlSourceId> {
+public abstract class AqlDataSource implements IDataSource<AqlSourceId> {
private AqlSourceId id;
- private Dataset dataset;
- private IAType[] schemaTypes;
- private INodeDomain domain;
+ private String datasourceDataverse;
+ private String datasourceName;
private AqlDataSourceType datasourceType;
+ protected IAType[] schemaTypes;
+ protected INodeDomain domain;
+ private Map<String, Serializable> properties = new HashMap<String, Serializable>();
public enum AqlDataSourceType {
- INTERNAL,
- FEED,
- EXTERNAL,
- EXTERNAL_FEED
+ INTERNAL_DATASET,
+ EXTERNAL_DATASET,
+ FEED
}
- public AqlDataSource(AqlSourceId id, Dataset dataset, IAType itemType, AqlDataSourceType datasourceType)
- throws AlgebricksException {
+ public AqlDataSource(AqlSourceId id, String datasourceDataverse, String datasourceName, IAType itemType,
+ AqlDataSourceType datasourceType) throws AlgebricksException {
this.id = id;
- this.dataset = dataset;
+ this.datasourceDataverse = datasourceDataverse;
+ this.datasourceName = datasourceName;
this.datasourceType = datasourceType;
- try {
- switch (datasourceType) {
- case FEED:
- initFeedDataset(itemType, dataset);
- case INTERNAL: {
- initInternalDataset(itemType);
- break;
- }
- case EXTERNAL_FEED:
- case EXTERNAL: {
- initExternalDataset(itemType);
- break;
- }
- default: {
- throw new IllegalArgumentException();
- }
- }
- } catch (IOException e) {
- throw new AlgebricksException(e);
- }
}
- public AqlDataSource(AqlSourceId id, Dataset dataset, IAType itemType) throws AlgebricksException {
- this.id = id;
- this.dataset = dataset;
- try {
- switch (dataset.getDatasetType()) {
- case FEED:
- initFeedDataset(itemType, dataset);
- break;
- case INTERNAL:
- initInternalDataset(itemType);
- break;
- case EXTERNAL: {
- initExternalDataset(itemType);
- break;
- }
- default: {
- throw new IllegalArgumentException();
- }
- }
- } catch (IOException e) {
- throw new AlgebricksException(e);
- }
+ public String getDatasourceDataverse() {
+ return datasourceDataverse;
}
- // TODO: Seems like initFeedDataset() could simply call this method.
- private void initInternalDataset(IAType itemType) throws IOException {
- List<String> partitioningKeys = DatasetUtils.getPartitioningKeys(dataset);
- ARecordType recordType = (ARecordType) itemType;
- int n = partitioningKeys.size();
- schemaTypes = new IAType[n + 1];
- for (int i = 0; i < n; i++) {
- schemaTypes[i] = recordType.getFieldType(partitioningKeys.get(i));
- }
- schemaTypes[n] = itemType;
- domain = new DefaultNodeGroupDomain(DatasetUtils.getNodegroupName(dataset));
+ public String getDatasourceName() {
+ return datasourceName;
}
- private void initFeedDataset(IAType itemType, Dataset dataset) throws IOException {
- if (dataset.getDatasetDetails() instanceof ExternalDatasetDetails) {
- initExternalDataset(itemType);
- } else {
- List<String> partitioningKeys = DatasetUtils.getPartitioningKeys(dataset);
- int n = partitioningKeys.size();
- schemaTypes = new IAType[n + 1];
- ARecordType recordType = (ARecordType) itemType;
- for (int i = 0; i < n; i++) {
- schemaTypes[i] = recordType.getFieldType(partitioningKeys.get(i));
- }
- schemaTypes[n] = itemType;
- domain = new DefaultNodeGroupDomain(DatasetUtils.getNodegroupName(dataset));
- }
- }
+ public abstract IAType[] getSchemaTypes();
- private void initExternalDataset(IAType itemType) {
- schemaTypes = new IAType[1];
- schemaTypes[0] = itemType;
- INodeDomain domainForExternalData = new INodeDomain() {
- @Override
- public Integer cardinality() {
- return null;
- }
-
- @Override
- public boolean sameAs(INodeDomain domain) {
- return domain == this;
- }
- };
- domain = domainForExternalData;
- }
+ public abstract INodeDomain getDomain();
@Override
public AqlSourceId getId() {
return id;
}
- public Dataset getDataset() {
- return dataset;
- }
-
- @Override
- public IAType[] getSchemaTypes() {
- return schemaTypes;
- }
-
@Override
public String toString() {
return id.toString();
- // return "AqlDataSource(\"" + id.getDataverseName() + "/" +
- // id.getDatasetName() + "\")";
}
@Override
public IDataSourcePropertiesProvider getPropertiesProvider() {
- return new AqlDataSourcePartitioningProvider(dataset.getDatasetType(), domain);
+ return new AqlDataSourcePartitioningProvider(datasourceType, domain);
}
-
+
@Override
public void computeFDs(List<LogicalVariable> scanVariables, List<FunctionalDependency> fdList) {
int n = scanVariables.size();
@@ -193,26 +102,30 @@
fdList.add(fd);
}
}
-
+
+
private static class AqlDataSourcePartitioningProvider implements IDataSourcePropertiesProvider {
private INodeDomain domain;
- private DatasetType datasetType;
+ private AqlDataSourceType aqlDataSourceType;
- public AqlDataSourcePartitioningProvider(DatasetType datasetType, INodeDomain domain) {
- this.datasetType = datasetType;
+ public AqlDataSourcePartitioningProvider(AqlDataSourceType datasetSourceType, INodeDomain domain) {
+ this.aqlDataSourceType = datasetSourceType;
this.domain = domain;
}
@Override
public IPhysicalPropertiesVector computePropertiesVector(List<LogicalVariable> scanVariables) {
- switch (datasetType) {
- case EXTERNAL: {
+ IPhysicalPropertiesVector propsVector = null;
+
+ switch (aqlDataSourceType) {
+ case EXTERNAL_DATASET: {
IPartitioningProperty pp = new RandomPartitioningProperty(domain);
List<ILocalStructuralProperty> propsLocal = new ArrayList<ILocalStructuralProperty>();
- return new StructuralPropertiesVector(pp, propsLocal);
+ propsVector = new StructuralPropertiesVector(pp, propsLocal);
}
+
case FEED: {
int n = scanVariables.size();
IPartitioningProperty pp;
@@ -231,9 +144,11 @@
pp = new UnorderedPartitionedProperty(pvars, domain);
}
List<ILocalStructuralProperty> propsLocal = new ArrayList<ILocalStructuralProperty>();
- return new StructuralPropertiesVector(pp, propsLocal);
+ propsVector = new StructuralPropertiesVector(pp, propsLocal);
+ break;
}
- case INTERNAL: {
+
+ case INTERNAL_DATASET: {
int n = scanVariables.size();
IPartitioningProperty pp;
if (n < 2) {
@@ -254,12 +169,15 @@
for (int i = 0; i < n - 1; i++) {
propsLocal.add(new LocalOrderProperty(new OrderColumn(scanVariables.get(i), OrderKind.ASC)));
}
- return new StructuralPropertiesVector(pp, propsLocal);
+ propsVector = new StructuralPropertiesVector(pp, propsLocal);
}
+ break;
+
default: {
throw new IllegalArgumentException();
}
}
+ return propsVector;
}
}
@@ -268,4 +186,12 @@
return datasourceType;
}
+ public Map<String, Serializable> getProperties() {
+ return properties;
+ }
+
+ public void setProperties(Map<String, Serializable> properties) {
+ this.properties = properties;
+ }
+
}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java
index 8301f5d..1fb64be 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java
@@ -21,6 +21,7 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.logging.Level;
import java.util.logging.Logger;
import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
@@ -33,6 +34,7 @@
import edu.uci.ics.asterix.common.dataflow.AsterixLSMInvertedIndexInsertDeleteOperatorDescriptor;
import edu.uci.ics.asterix.common.dataflow.AsterixLSMTreeInsertDeleteOperatorDescriptor;
import edu.uci.ics.asterix.common.dataflow.IAsterixApplicationContextInfo;
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
import edu.uci.ics.asterix.common.ioopcallbacks.LSMBTreeIOOperationCallbackFactory;
import edu.uci.ics.asterix.common.ioopcallbacks.LSMInvertedIndexIOOperationCallbackFactory;
import edu.uci.ics.asterix.common.ioopcallbacks.LSMRTreeIOOperationCallbackFactory;
@@ -40,17 +42,6 @@
import edu.uci.ics.asterix.common.transactions.IRecoveryManager.ResourceType;
import edu.uci.ics.asterix.common.transactions.JobId;
import edu.uci.ics.asterix.dataflow.data.nontagged.valueproviders.AqlPrimitiveValueProviderFactory;
-import edu.uci.ics.asterix.external.adapter.factory.HDFSAdapterFactory;
-import edu.uci.ics.asterix.external.adapter.factory.IAdapterFactory;
-import edu.uci.ics.asterix.external.adapter.factory.IGenericDatasetAdapterFactory;
-import edu.uci.ics.asterix.external.adapter.factory.ITypedDatasetAdapterFactory;
-import edu.uci.ics.asterix.external.data.operator.ExternalDataScanOperatorDescriptor;
-import edu.uci.ics.asterix.external.data.operator.FeedIntakeOperatorDescriptor;
-import edu.uci.ics.asterix.external.data.operator.FeedMessageOperatorDescriptor;
-import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
-import edu.uci.ics.asterix.external.dataset.adapter.ITypedDatasourceAdapter;
-import edu.uci.ics.asterix.external.feed.lifecycle.FeedId;
-import edu.uci.ics.asterix.external.feed.lifecycle.IFeedMessage;
import edu.uci.ics.asterix.formats.base.IDataFormat;
import edu.uci.ics.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;
import edu.uci.ics.asterix.formats.nontagged.AqlTypeTraitProvider;
@@ -59,14 +50,30 @@
import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
import edu.uci.ics.asterix.metadata.bootstrap.MetadataConstants;
import edu.uci.ics.asterix.metadata.dataset.hints.DatasetHints.DatasetCardinalityHint;
+import edu.uci.ics.asterix.metadata.declared.AqlDataSource.AqlDataSourceType;
import edu.uci.ics.asterix.metadata.entities.Dataset;
import edu.uci.ics.asterix.metadata.entities.DatasourceAdapter;
+import edu.uci.ics.asterix.metadata.entities.DatasourceAdapter.AdapterType;
import edu.uci.ics.asterix.metadata.entities.Datatype;
import edu.uci.ics.asterix.metadata.entities.Dataverse;
import edu.uci.ics.asterix.metadata.entities.ExternalDatasetDetails;
-import edu.uci.ics.asterix.metadata.entities.FeedDatasetDetails;
+import edu.uci.ics.asterix.metadata.entities.Feed;
+import edu.uci.ics.asterix.metadata.entities.FeedActivity;
+import edu.uci.ics.asterix.metadata.entities.FeedActivity.FeedActivityDetails;
+import edu.uci.ics.asterix.metadata.entities.FeedPolicy;
import edu.uci.ics.asterix.metadata.entities.Index;
import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails;
+import edu.uci.ics.asterix.metadata.feeds.BuiltinFeedPolicies;
+import edu.uci.ics.asterix.metadata.feeds.EndFeedMessage;
+import edu.uci.ics.asterix.metadata.feeds.ExternalDataScanOperatorDescriptor;
+import edu.uci.ics.asterix.metadata.feeds.FeedIntakeOperatorDescriptor;
+import edu.uci.ics.asterix.metadata.feeds.FeedMessageOperatorDescriptor;
+import edu.uci.ics.asterix.metadata.feeds.FeedUtil;
+import edu.uci.ics.asterix.metadata.feeds.IAdapterFactory;
+import edu.uci.ics.asterix.metadata.feeds.IAdapterFactory.SupportedOperation;
+import edu.uci.ics.asterix.metadata.feeds.IFeedMessage;
+import edu.uci.ics.asterix.metadata.feeds.IGenericAdapterFactory;
+import edu.uci.ics.asterix.metadata.feeds.ITypedAdapterFactory;
import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
import edu.uci.ics.asterix.om.types.ARecordType;
@@ -91,6 +98,7 @@
import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.common.utils.Triple;
import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionRuntimeProvider;
@@ -110,7 +118,6 @@
import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
import edu.uci.ics.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
import edu.uci.ics.hyracks.algebricks.runtime.operators.std.SinkWriterRuntimeFactory;
-import edu.uci.ics.hyracks.api.context.ICCContext;
import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
@@ -129,12 +136,12 @@
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
import edu.uci.ics.hyracks.dataflow.std.file.ITupleParserFactory;
import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
-import edu.uci.ics.hyracks.hdfs.scheduler.Scheduler;
import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallbackFactory;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
@@ -142,6 +149,7 @@
import edu.uci.ics.hyracks.storage.am.lsm.btree.dataflow.LSMBTreeDataflowHelperFactory;
import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.LSMInvertedIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.PartitionedLSMInvertedIndexDataflowHelperFactory;
import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
import edu.uci.ics.hyracks.storage.am.lsm.rtree.dataflow.LSMRTreeDataflowHelperFactory;
import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeSearchOperatorDescriptor;
@@ -167,8 +175,7 @@
private final AsterixStorageProperties storageProperties;
- private static final Map<String, String> adapterFactoryMapping = initializeAdapterFactoryMapping();
- private static Scheduler hdfsScheduler;
+ public static final Map<String, String> adapterFactoryMapping = initializeAdapterFactoryMapping();
public String getPropertyValue(String propertyName) {
return config.get(propertyName);
@@ -190,16 +197,6 @@
this.defaultDataverse = defaultDataverse;
this.stores = AsterixAppContextInfo.getInstance().getMetadataProperties().getStores();
this.storageProperties = AsterixAppContextInfo.getInstance().getStorageProperties();
- ICCContext ccContext = AsterixAppContextInfo.getInstance().getCCApplicationContext().getCCContext();
- try {
- if (hdfsScheduler == null) {
- //set the singleton hdfs scheduler
- hdfsScheduler = new Scheduler(ccContext.getClusterControllerInfo().getClientNetAddress(), ccContext
- .getClusterControllerInfo().getClientNetPort());
- }
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
}
public void setJobId(JobId jobId) {
@@ -286,34 +283,20 @@
List<LogicalVariable> projectVariables, boolean projectPushed, IOperatorSchema opSchema,
IVariableTypeEnvironment typeEnv, JobGenContext context, JobSpecification jobSpec, Object implConfig)
throws AlgebricksException {
- Dataset dataset;
try {
- dataset = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataSource.getId().getDataverseName(), dataSource
- .getId().getDatasetName());
-
- if (dataset == null) {
- throw new AlgebricksException("Unknown dataset " + dataSource.getId().getDatasetName()
- + " in dataverse " + dataSource.getId().getDataverseName());
- }
- switch (dataset.getDatasetType()) {
+ switch (((AqlDataSource) dataSource).getDatasourceType()) {
case FEED:
- if (dataSource instanceof ExternalFeedDataSource) {
- return buildExternalDatasetScan(jobSpec, dataset, dataSource);
- } else {
- return buildInternalDatasetScan(jobSpec, scanVariables, opSchema, typeEnv, dataset, dataSource,
- context, implConfig);
+ return buildFeedIntakeRuntime(jobSpec, dataSource);
+ case INTERNAL_DATASET:
+ return buildInternalDatasetScan(jobSpec, scanVariables, opSchema, typeEnv, dataSource, context,
+ implConfig);
- }
- case INTERNAL: {
- return buildInternalDatasetScan(jobSpec, scanVariables, opSchema, typeEnv, dataset, dataSource,
- context, implConfig);
- }
- case EXTERNAL: {
- return buildExternalDatasetScan(jobSpec, dataset, dataSource);
- }
- default: {
+ case EXTERNAL_DATASET:
+ return buildExternalDatasetScan(jobSpec, dataSource);
+
+ default:
throw new IllegalArgumentException();
- }
+
}
} catch (MetadataException e) {
throw new AlgebricksException(e);
@@ -322,27 +305,26 @@
private Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildInternalDatasetScan(JobSpecification jobSpec,
List<LogicalVariable> outputVars, IOperatorSchema opSchema, IVariableTypeEnvironment typeEnv,
- Dataset dataset, IDataSource<AqlSourceId> dataSource, JobGenContext context, Object implConfig)
- throws AlgebricksException, MetadataException {
+ IDataSource<AqlSourceId> dataSource, JobGenContext context, Object implConfig) throws AlgebricksException,
+ MetadataException {
AqlSourceId asid = dataSource.getId();
String dataverseName = asid.getDataverseName();
String datasetName = asid.getDatasetName();
Index primaryIndex = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataverseName, datasetName, datasetName);
- return buildBtreeRuntime(jobSpec, outputVars, opSchema, typeEnv, context, false, dataset,
- primaryIndex.getIndexName(), null, null, true, true, implConfig);
+ return buildBtreeRuntime(jobSpec, outputVars, opSchema, typeEnv, context, true,
+ ((DatasetDataSource) dataSource).getDataset(), primaryIndex.getIndexName(), null, null, true, true,
+ implConfig);
}
private Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildExternalDatasetScan(JobSpecification jobSpec,
- Dataset dataset, IDataSource<AqlSourceId> dataSource) throws AlgebricksException, MetadataException {
+ IDataSource<AqlSourceId> dataSource) throws AlgebricksException, MetadataException {
+ Dataset dataset = ((DatasetDataSource) dataSource).getDataset();
String itemTypeName = dataset.getItemTypeName();
IAType itemType = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataset.getDataverseName(), itemTypeName)
.getDatatype();
- if (dataSource instanceof ExternalFeedDataSource) {
- return buildFeedIntakeRuntime(jobSpec, dataset);
- } else {
- return buildExternalDataScannerRuntime(jobSpec, itemType,
- (ExternalDatasetDetails) dataset.getDatasetDetails(), NonTaggedDataFormat.INSTANCE);
- }
+ return buildExternalDataScannerRuntime(jobSpec, itemType, (ExternalDatasetDetails) dataset.getDatasetDetails(),
+ NonTaggedDataFormat.INSTANCE);
+
}
@SuppressWarnings("rawtypes")
@@ -353,8 +335,7 @@
throw new AlgebricksException("Can only scan datasets of records.");
}
- IGenericDatasetAdapterFactory adapterFactory;
- IDatasourceAdapter adapter;
+ IAdapterFactory adapterFactory;
String adapterName;
DatasourceAdapter adapterEntity;
String adapterFactoryClassname;
@@ -364,39 +345,45 @@
adapterName);
if (adapterEntity != null) {
adapterFactoryClassname = adapterEntity.getClassname();
- adapterFactory = (IGenericDatasetAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
+ adapterFactory = (IAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
} else {
adapterFactoryClassname = adapterFactoryMapping.get(adapterName);
if (adapterFactoryClassname == null) {
throw new AlgebricksException(" Unknown adapter :" + adapterName);
}
- adapterFactory = (IGenericDatasetAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
+ adapterFactory = (IAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
}
- adapter = ((IGenericDatasetAdapterFactory) adapterFactory).createAdapter(
- wrapProperties(datasetDetails.getProperties()), itemType);
+ Map<String, String> configuration = datasetDetails.getProperties();
+
+ switch (adapterFactory.getAdapterType()) {
+ case GENERIC:
+ ((IGenericAdapterFactory) adapterFactory).configure(configuration, (ARecordType) itemType);
+ break;
+ case TYPED:
+ ((ITypedAdapterFactory) adapterFactory).configure(configuration);
+ break;
+ }
} catch (AlgebricksException ae) {
throw ae;
} catch (Exception e) {
- e.printStackTrace();
throw new AlgebricksException("Unable to create adapter " + e);
}
- if (!(adapter.getAdapterType().equals(IDatasourceAdapter.AdapterType.READ) || adapter.getAdapterType().equals(
- IDatasourceAdapter.AdapterType.READ_WRITE))) {
- throw new AlgebricksException("external dataset adapter does not support read operation");
+ if (!(adapterFactory.getSupportedOperations().equals(SupportedOperation.READ) || adapterFactory
+ .getSupportedOperations().equals(SupportedOperation.READ_WRITE))) {
+ throw new AlgebricksException(" External dataset adapter does not support read operation");
}
- ARecordType rt = (ARecordType) itemType;
ISerializerDeserializer payloadSerde = format.getSerdeProvider().getSerializerDeserializer(itemType);
RecordDescriptor scannerDesc = new RecordDescriptor(new ISerializerDeserializer[] { payloadSerde });
- ExternalDataScanOperatorDescriptor dataScanner = new ExternalDataScanOperatorDescriptor(jobSpec,
- wrapPropertiesEmpty(datasetDetails.getProperties()), rt, scannerDesc, adapterFactory);
+ ExternalDataScanOperatorDescriptor dataScanner = new ExternalDataScanOperatorDescriptor(jobSpec, scannerDesc,
+ adapterFactory);
AlgebricksPartitionConstraint constraint;
try {
- constraint = adapter.getPartitionConstraint();
+ constraint = adapterFactory.getPartitionConstraint();
} catch (Exception e) {
throw new AlgebricksException(e);
}
@@ -428,80 +415,76 @@
@SuppressWarnings("rawtypes")
public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildFeedIntakeRuntime(JobSpecification jobSpec,
- Dataset dataset) throws AlgebricksException {
+ IDataSource<AqlSourceId> dataSource) throws AlgebricksException {
- FeedDatasetDetails datasetDetails = (FeedDatasetDetails) dataset.getDatasetDetails();
- DatasourceAdapter adapterEntity;
- IDatasourceAdapter adapter;
- IAdapterFactory adapterFactory;
- IAType adapterOutputType;
- String adapterName;
- String adapterFactoryClassname;
-
- try {
- adapterName = datasetDetails.getAdapterFactory();
- adapterEntity = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, MetadataConstants.METADATA_DATAVERSE_NAME,
- adapterName);
- if (adapterEntity != null) {
- adapterFactoryClassname = adapterEntity.getClassname();
- adapterFactory = (IAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
- } else {
- adapterFactoryClassname = adapterFactoryMapping.get(adapterName);
- if (adapterFactoryClassname != null) {
- } else {
- // adapterName has been provided as a fully qualified
- // classname
- adapterFactoryClassname = adapterName;
- }
- adapterFactory = (IAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
- }
-
- if (adapterFactory instanceof ITypedDatasetAdapterFactory) {
- adapter = ((ITypedDatasetAdapterFactory) adapterFactory).createAdapter(wrapProperties(datasetDetails
- .getProperties()));
- adapterOutputType = ((ITypedDatasourceAdapter) adapter).getAdapterOutputType();
- } else if (adapterFactory instanceof IGenericDatasetAdapterFactory) {
- String outputTypeName = datasetDetails.getProperties().get(IGenericDatasetAdapterFactory.KEY_TYPE_NAME);
- adapterOutputType = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataset.getDataverseName(),
- outputTypeName).getDatatype();
- adapter = ((IGenericDatasetAdapterFactory) adapterFactory).createAdapter(
- wrapProperties(datasetDetails.getProperties()), adapterOutputType);
- } else {
- throw new IllegalStateException(" Unknown factory type for " + adapterFactoryClassname);
- }
- } catch (AlgebricksException ae) {
- throw ae;
- } catch (Exception e) {
- e.printStackTrace();
- throw new AlgebricksException("unable to create adapter " + e);
- }
-
- ISerializerDeserializer payloadSerde = NonTaggedDataFormat.INSTANCE.getSerdeProvider()
- .getSerializerDeserializer(adapterOutputType);
- RecordDescriptor feedDesc = new RecordDescriptor(new ISerializerDeserializer[] { payloadSerde });
-
- FeedIntakeOperatorDescriptor feedIngestor = new FeedIntakeOperatorDescriptor(jobSpec, new FeedId(
- dataset.getDataverseName(), dataset.getDatasetName()), adapterFactoryClassname,
- this.wrapPropertiesEmpty(datasetDetails.getProperties()), (ARecordType) adapterOutputType, feedDesc,
- adapterFactory);
-
+ FeedDataSource feedDataSource = (FeedDataSource) dataSource;
+ FeedIntakeOperatorDescriptor feedIngestor = null;
+ Triple<IAdapterFactory, ARecordType, AdapterType> factoryOutput = null;
AlgebricksPartitionConstraint constraint = null;
+
try {
- constraint = adapter.getPartitionConstraint();
+ factoryOutput = FeedUtil.getFeedFactoryAndOutput(feedDataSource.getFeed(), mdTxnCtx);
+ IAdapterFactory adapterFactory = factoryOutput.first;
+ ARecordType adapterOutputType = factoryOutput.second;
+ AdapterType adapterType = factoryOutput.third;
+
+ ISerializerDeserializer payloadSerde = NonTaggedDataFormat.INSTANCE.getSerdeProvider()
+ .getSerializerDeserializer(adapterOutputType);
+ RecordDescriptor feedDesc = new RecordDescriptor(new ISerializerDeserializer[] { payloadSerde });
+
+ FeedPolicy feedPolicy = (FeedPolicy) ((AqlDataSource) dataSource).getProperties().get(
+ BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY);
+ if (feedPolicy == null) {
+ throw new AlgebricksException("Feed not configured with a policy");
+ }
+ feedPolicy.getProperties().put(BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY, feedPolicy.getPolicyName());
+ switch (adapterType) {
+ case INTERNAL:
+ feedIngestor = new FeedIntakeOperatorDescriptor(jobSpec, new FeedConnectionId(
+ feedDataSource.getDatasourceDataverse(), feedDataSource.getDatasourceName(), feedDataSource
+ .getFeedConnectionId().getDatasetName()), adapterFactory,
+ (ARecordType) adapterOutputType, feedDesc, feedPolicy.getProperties());
+ break;
+ case EXTERNAL:
+ String libraryName = feedDataSource.getFeed().getAdaptorName().split("#")[0];
+ feedIngestor = new FeedIntakeOperatorDescriptor(jobSpec, feedDataSource.getFeedConnectionId(),
+ libraryName, adapterFactory.getClass().getName(), feedDataSource.getFeed()
+ .getAdaptorConfiguration(), (ARecordType) adapterOutputType, feedDesc,
+ feedPolicy.getProperties());
+ break;
+ }
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Cofigured feed intake operator with " + adapterType + " adapter");
+ }
+ constraint = factoryOutput.first.getPartitionConstraint();
} catch (Exception e) {
throw new AlgebricksException(e);
}
return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(feedIngestor, constraint);
}
- public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildFeedMessengerRuntime(
- AqlMetadataProvider metadataProvider, JobSpecification jobSpec, FeedDatasetDetails datasetDetails,
- String dataverse, String dataset, List<IFeedMessage> feedMessages) throws AlgebricksException {
- Pair<IFileSplitProvider, AlgebricksPartitionConstraint> spPc = metadataProvider
- .splitProviderAndPartitionConstraintsForInternalOrFeedDataset(dataverse, dataset, dataset);
- FeedMessageOperatorDescriptor feedMessenger = new FeedMessageOperatorDescriptor(jobSpec, dataverse, dataset,
- feedMessages);
- return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(feedMessenger, spPc.second);
+ public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildSendFeedMessageRuntime(
+ JobSpecification jobSpec, String dataverse, String feedName, String dataset, IFeedMessage feedMessage,
+ String[] locations) throws AlgebricksException {
+ AlgebricksPartitionConstraint partitionConstraint = new AlgebricksAbsolutePartitionConstraint(locations);
+ FeedMessageOperatorDescriptor feedMessenger = new FeedMessageOperatorDescriptor(jobSpec, dataverse, feedName,
+ dataset, feedMessage);
+ return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(feedMessenger, partitionConstraint);
+ }
+
+ public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildDisconnectFeedMessengerRuntime(
+ JobSpecification jobSpec, String dataverse, String feedName, String dataset, FeedActivity feedActivity)
+ throws AlgebricksException {
+ List<String> feedLocations = new ArrayList<String>();
+ String[] ingestLocs = feedActivity.getFeedActivityDetails().get(FeedActivityDetails.INGEST_LOCATIONS)
+ .split(",");
+ for (String loc : ingestLocs) {
+ feedLocations.add(loc);
+ }
+ FeedConnectionId feedId = new FeedConnectionId(dataverse, feedName, dataset);
+ String[] locations = feedLocations.toArray(new String[] {});
+ IFeedMessage feedMessage = new EndFeedMessage(feedId);
+ return buildSendFeedMessageRuntime(jobSpec, dataverse, feedName, dataset, feedMessage, locations);
}
public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildBtreeRuntime(JobSpecification jobSpec,
@@ -518,31 +501,33 @@
}
int numPrimaryKeys = DatasetUtils.getPartitioningKeys(dataset).size();
RecordDescriptor outputRecDesc = JobGenHelper.mkRecordDescriptor(typeEnv, opSchema, context);
- int numKeys = numPrimaryKeys;
- int keysStartIndex = outputRecDesc.getFieldCount() - numKeys - 1;
- ITypeTraits[] typeTraits = null;
int[] bloomFilterKeyFields;
+ ITypeTraits[] typeTraits;
+ IBinaryComparatorFactory[] comparatorFactories;
if (isSecondary) {
Index secondaryIndex = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataset.getDataverseName(),
dataset.getDatasetName(), indexName);
int numSecondaryKeys = secondaryIndex.getKeyFieldNames().size();
- numKeys += numSecondaryKeys;
- keysStartIndex = outputVars.size() - numKeys;
- typeTraits = JobGenHelper.variablesToTypeTraits(outputVars, keysStartIndex, numKeys, typeEnv, context);
bloomFilterKeyFields = new int[numSecondaryKeys];
for (int i = 0; i < numSecondaryKeys; i++) {
bloomFilterKeyFields[i] = i;
}
+ typeTraits = JobGenHelper.variablesToTypeTraits(outputVars, 0, outputVars.size(), typeEnv, context);
+ comparatorFactories = JobGenHelper.variablesToAscBinaryComparatorFactories(outputVars, 0,
+ outputVars.size(), typeEnv, context);
} else {
- typeTraits = JobGenHelper.variablesToTypeTraits(outputVars, keysStartIndex, numKeys + 1, typeEnv,
- context);
bloomFilterKeyFields = new int[numPrimaryKeys];
for (int i = 0; i < numPrimaryKeys; i++) {
bloomFilterKeyFields[i] = i;
}
+ String itemTypeName = dataset.getItemTypeName();
+ ARecordType itemType = (ARecordType) MetadataManager.INSTANCE.getDatatype(mdTxnCtx,
+ dataset.getDataverseName(), itemTypeName).getDatatype();
+
+ typeTraits = DatasetUtils.computeTupleTypeTraits(dataset, itemType);
+ comparatorFactories = DatasetUtils.computeKeysBinaryComparatorFactories(dataset, itemType,
+ context.getBinaryComparatorFactoryProvider());
}
- IBinaryComparatorFactory[] comparatorFactories = JobGenHelper.variablesToAscBinaryComparatorFactories(
- outputVars, keysStartIndex, numKeys, typeEnv, context);
IAsterixApplicationContextInfo appContext = (IAsterixApplicationContextInfo) context.getAppContext();
Pair<IFileSplitProvider, AlgebricksPartitionConstraint> spPc;
@@ -636,13 +621,14 @@
IBinaryComparatorFactory[] comparatorFactories = JobGenHelper.variablesToAscBinaryComparatorFactories(
outputVars, keysStartIndex, numNestedSecondaryKeyFields, typeEnv, context);
ITypeTraits[] typeTraits = JobGenHelper.variablesToTypeTraits(outputVars, keysStartIndex,
- numNestedSecondaryKeyFields, typeEnv, context);
+ numNestedSecondaryKeyFields + numPrimaryKeys, typeEnv, context);
IAsterixApplicationContextInfo appContext = (IAsterixApplicationContextInfo) context.getAppContext();
Pair<IFileSplitProvider, AlgebricksPartitionConstraint> spPc = splitProviderAndPartitionConstraintsForInternalOrFeedDataset(
dataset.getDataverseName(), dataset.getDatasetName(), indexName);
IBinaryComparatorFactory[] primaryComparatorFactories = DatasetUtils.computeKeysBinaryComparatorFactories(
dataset, recType, context.getBinaryComparatorFactoryProvider());
+
IAType nestedKeyType = NonTaggedFormatUtil.getNestedSpatialType(keyType.getTypeTag());
Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(
dataset, mdTxnCtx);
@@ -704,10 +690,11 @@
public IDataSourceIndex<String, AqlSourceId> findDataSourceIndex(String indexId, AqlSourceId dataSourceId)
throws AlgebricksException {
AqlDataSource ads = findDataSource(dataSourceId);
- Dataset dataset = ads.getDataset();
- if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
+ if (ads.getDatasourceType() == AqlDataSourceType.EXTERNAL_DATASET) {
throw new AlgebricksException("No index for external dataset " + dataSourceId);
}
+ Dataset dataset = ((DatasetDataSource) ads).getDataset();
+
try {
String indexName = (String) indexId;
Index secondaryIndex = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataset.getDataverseName(),
@@ -735,25 +722,24 @@
}
String tName = dataset.getItemTypeName();
IAType itemType = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, aqlId.getDataverseName(), tName).getDatatype();
- return new AqlDataSource(aqlId, dataset, itemType);
+ AqlDataSourceType datasourceType = dataset.getDatasetType().equals(DatasetType.EXTERNAL) ? AqlDataSourceType.EXTERNAL_DATASET
+ : AqlDataSourceType.INTERNAL_DATASET;
+ return new DatasetDataSource(aqlId, aqlId.getDataverseName(), aqlId.getDatasetName(), itemType, datasourceType);
}
@Override
public boolean scannerOperatorIsLeaf(IDataSource<AqlSourceId> dataSource) {
- AqlSourceId asid = dataSource.getId();
- String dataverseName = asid.getDataverseName();
- String datasetName = asid.getDatasetName();
- Dataset dataset = null;
- try {
- dataset = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseName, datasetName);
- } catch (MetadataException e) {
- throw new IllegalStateException(e);
+ boolean result = false;
+ switch (((AqlDataSource) dataSource).getDatasourceType()) {
+ case INTERNAL_DATASET:
+ case EXTERNAL_DATASET:
+ result = ((DatasetDataSource) dataSource).getDataset().getDatasetType() == DatasetType.EXTERNAL;
+ break;
+ case FEED:
+ result = true;
+ break;
}
-
- if (dataset == null) {
- throw new IllegalArgumentException("Unknown dataset " + datasetName + " in dataverse " + dataverseName);
- }
- return dataset.getDatasetType() == DatasetType.EXTERNAL;
+ return result;
}
@Override
@@ -999,7 +985,9 @@
// generate field permutations
int[] fieldPermutation = new int[numKeys];
int[] bloomFilterKeyFields = new int[secondaryKeys.size()];
+ int[] modificationCallbackPrimaryKeyFields = new int[primaryKeys.size()];
int i = 0;
+ int j = 0;
for (LogicalVariable varKey : secondaryKeys) {
int idx = propagatedSchema.findVariable(varKey);
fieldPermutation[i] = idx;
@@ -1009,7 +997,9 @@
for (LogicalVariable varKey : primaryKeys) {
int idx = propagatedSchema.findVariable(varKey);
fieldPermutation[i] = idx;
+ modificationCallbackPrimaryKeyFields[j] = i;
i++;
+ j++;
}
Dataset dataset = findDataset(dataverseName, datasetName);
@@ -1059,16 +1049,10 @@
// prepare callback
JobId jobId = ((JobEventListenerFactory) spec.getJobletEventListenerFactory()).getJobId();
int datasetId = dataset.getDatasetId();
- int[] primaryKeyFields = new int[primaryKeys.size()];
- i = 0;
- for (LogicalVariable varKey : primaryKeys) {
- int idx = propagatedSchema.findVariable(varKey);
- primaryKeyFields[i] = idx;
- i++;
- }
TransactionSubsystemProvider txnSubsystemProvider = new TransactionSubsystemProvider();
SecondaryIndexModificationOperationCallbackFactory modificationCallbackFactory = new SecondaryIndexModificationOperationCallbackFactory(
- jobId, datasetId, primaryKeyFields, txnSubsystemProvider, indexOp, ResourceType.LSM_BTREE);
+ jobId, datasetId, modificationCallbackPrimaryKeyFields, txnSubsystemProvider, indexOp,
+ ResourceType.LSM_BTREE);
Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(
dataset, mdTxnCtx);
@@ -1108,7 +1092,9 @@
int numKeys = primaryKeys.size() + secondaryKeys.size();
// generate field permutations
int[] fieldPermutation = new int[numKeys];
+ int[] modificationCallbackPrimaryKeyFields = new int[primaryKeys.size()];
int i = 0;
+ int j = 0;
for (LogicalVariable varKey : secondaryKeys) {
int idx = propagatedSchema.findVariable(varKey);
fieldPermutation[i] = idx;
@@ -1117,7 +1103,9 @@
for (LogicalVariable varKey : primaryKeys) {
int idx = propagatedSchema.findVariable(varKey);
fieldPermutation[i] = idx;
+ modificationCallbackPrimaryKeyFields[j] = i;
i++;
+ j++;
}
boolean isPartitioned;
@@ -1154,7 +1142,8 @@
ITypeTraits[] tokenTypeTraits = new ITypeTraits[numTokenFields];
ITypeTraits[] invListsTypeTraits = new ITypeTraits[primaryKeys.size()];
IBinaryComparatorFactory[] tokenComparatorFactories = new IBinaryComparatorFactory[numTokenFields];
- IBinaryComparatorFactory[] invListComparatorFactories = new IBinaryComparatorFactory[primaryKeys.size()];
+ IBinaryComparatorFactory[] invListComparatorFactories = DatasetUtils.computeKeysBinaryComparatorFactories(
+ dataset, recType, context.getBinaryComparatorFactoryProvider());
IAType secondaryKeyType = null;
for (i = 0; i < secondaryKeys.size(); ++i) {
@@ -1188,28 +1177,34 @@
// prepare callback
JobId jobId = ((JobEventListenerFactory) spec.getJobletEventListenerFactory()).getJobId();
int datasetId = dataset.getDatasetId();
- int[] primaryKeyFields = new int[primaryKeys.size()];
- i = 0;
- for (LogicalVariable varKey : primaryKeys) {
- int idx = propagatedSchema.findVariable(varKey);
- primaryKeyFields[i] = idx;
- i++;
- }
TransactionSubsystemProvider txnSubsystemProvider = new TransactionSubsystemProvider();
SecondaryIndexModificationOperationCallbackFactory modificationCallbackFactory = new SecondaryIndexModificationOperationCallbackFactory(
- jobId, datasetId, primaryKeyFields, txnSubsystemProvider, indexOp, ResourceType.LSM_INVERTED_INDEX);
+ jobId, datasetId, modificationCallbackPrimaryKeyFields, txnSubsystemProvider, indexOp,
+ ResourceType.LSM_INVERTED_INDEX);
Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(
dataset, mdTxnCtx);
+ IIndexDataflowHelperFactory indexDataFlowFactory;
+ if (!isPartitioned) {
+ indexDataFlowFactory = new LSMInvertedIndexDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(
+ datasetId), compactionInfo.first, compactionInfo.second,
+ new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+ AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+ LSMInvertedIndexIOOperationCallbackFactory.INSTANCE,
+ storageProperties.getBloomFilterFalsePositiveRate());
+ } else {
+ indexDataFlowFactory = new PartitionedLSMInvertedIndexDataflowHelperFactory(
+ new AsterixVirtualBufferCacheProvider(datasetId), compactionInfo.first, compactionInfo.second,
+ new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+ AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+ LSMInvertedIndexIOOperationCallbackFactory.INSTANCE,
+ storageProperties.getBloomFilterFalsePositiveRate());
+ }
AsterixLSMInvertedIndexInsertDeleteOperatorDescriptor insertDeleteOp = new AsterixLSMInvertedIndexInsertDeleteOperatorDescriptor(
spec, recordDesc, appContext.getStorageManagerInterface(), splitsAndConstraint.first,
appContext.getIndexLifecycleManagerProvider(), tokenTypeTraits, tokenComparatorFactories,
invListsTypeTraits, invListComparatorFactories, tokenizerFactory, fieldPermutation, indexOp,
- new LSMInvertedIndexDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(datasetId),
- compactionInfo.first, compactionInfo.second, new SecondaryIndexOperationTrackerProvider(
- dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
- LSMInvertedIndexIOOperationCallbackFactory.INSTANCE, storageProperties
- .getBloomFilterFalsePositiveRate()), filterFactory, modificationCallbackFactory);
+ indexDataFlowFactory, filterFactory, modificationCallbackFactory);
return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(insertDeleteOp,
splitsAndConstraint.second);
} catch (MetadataException e) {
@@ -1242,9 +1237,11 @@
int numPrimaryKeys = primaryKeys.size();
int numKeys = numSecondaryKeys + numPrimaryKeys;
ITypeTraits[] typeTraits = new ITypeTraits[numKeys];
- IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[numKeys];
+ IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[numSecondaryKeys];
int[] fieldPermutation = new int[numKeys];
+ int[] modificationCallbackPrimaryKeyFields = new int[primaryKeys.size()];
int i = 0;
+ int j = 0;
for (LogicalVariable varKey : secondaryKeys) {
int idx = propagatedSchema.findVariable(varKey);
@@ -1254,7 +1251,9 @@
for (LogicalVariable varKey : primaryKeys) {
int idx = propagatedSchema.findVariable(varKey);
fieldPermutation[i] = idx;
+ modificationCallbackPrimaryKeyFields[j] = i;
i++;
+ j++;
}
IAType nestedKeyType = NonTaggedFormatUtil.getNestedSpatialType(spatialType.getTypeTag());
IPrimitiveValueProviderFactory[] valueProviderFactories = new IPrimitiveValueProviderFactory[numSecondaryKeys];
@@ -1267,8 +1266,6 @@
List<String> partitioningKeys = DatasetUtils.getPartitioningKeys(dataset);
for (String partitioningKey : partitioningKeys) {
IAType keyType = recType.getFieldType(partitioningKey);
- comparatorFactories[i] = AqlBinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(
- keyType, true);
typeTraits[i] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(keyType);
++i;
}
@@ -1282,16 +1279,10 @@
// prepare callback
JobId jobId = ((JobEventListenerFactory) spec.getJobletEventListenerFactory()).getJobId();
int datasetId = dataset.getDatasetId();
- int[] primaryKeyFields = new int[numPrimaryKeys];
- i = 0;
- for (LogicalVariable varKey : primaryKeys) {
- int idx = propagatedSchema.findVariable(varKey);
- primaryKeyFields[i] = idx;
- i++;
- }
TransactionSubsystemProvider txnSubsystemProvider = new TransactionSubsystemProvider();
SecondaryIndexModificationOperationCallbackFactory modificationCallbackFactory = new SecondaryIndexModificationOperationCallbackFactory(
- jobId, datasetId, primaryKeyFields, txnSubsystemProvider, indexOp, ResourceType.LSM_RTREE);
+ jobId, datasetId, modificationCallbackPrimaryKeyFields, txnSubsystemProvider, indexOp,
+ ResourceType.LSM_RTREE);
Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(
dataset, mdTxnCtx);
@@ -1335,11 +1326,13 @@
}
/**
- * Calculate an estimate size of the bloom filter. Note that this is an estimation which assumes that the data
- * is going to be uniformly distributed across all partitions.
+ * Calculate an estimate size of the bloom filter. Note that this is an
+ * estimation which assumes that the data is going to be uniformly
+ * distributed across all partitions.
*
* @param dataset
- * @return Number of elements that will be used to create a bloom filter per dataset per partition
+ * @return Number of elements that will be used to create a bloom filter per
+ * dataset per partition
* @throws MetadataException
* @throws AlgebricksException
*/
@@ -1419,8 +1412,8 @@
try {
File relPathFile = new File(getRelativePath(dataverseName, datasetName + "_idx_" + targetIdxName));
Dataset dataset = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseName, datasetName);
- if (dataset.getDatasetType() != DatasetType.INTERNAL & dataset.getDatasetType() != DatasetType.FEED) {
- throw new AlgebricksException("Not an internal or feed dataset");
+ if (dataset.getDatasetType() != DatasetType.INTERNAL) {
+ throw new AlgebricksException("Not an internal dataset");
}
InternalDatasetDetails datasetDetails = (InternalDatasetDetails) dataset.getDatasetDetails();
List<String> nodeGroup = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, datasetDetails.getNodeGroupName())
@@ -1475,6 +1468,9 @@
"edu.uci.ics.asterix.external.dataset.adapter..RSSFeedAdapterFactory");
adapterFactoryMapping.put("edu.uci.ics.asterix.external.dataset.adapter.CNNFeedAdapter",
"edu.uci.ics.asterix.external.dataset.adapter.CNNFeedAdapterFactory");
+ adapterFactoryMapping.put("edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapter",
+ "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory");
+
return adapterFactoryMapping;
}
@@ -1516,6 +1512,22 @@
return type.getDatatype();
}
+ public Feed findFeed(String dataverse, String feedName) throws AlgebricksException {
+ try {
+ return MetadataManager.INSTANCE.getFeed(mdTxnCtx, dataverse, feedName);
+ } catch (MetadataException e) {
+ throw new AlgebricksException(e);
+ }
+ }
+
+ public FeedPolicy findFeedPolicy(String dataverse, String policyName) throws AlgebricksException {
+ try {
+ return MetadataManager.INSTANCE.getFeedPolicy(mdTxnCtx, dataverse, policyName);
+ } catch (MetadataException e) {
+ throw new AlgebricksException(e);
+ }
+ }
+
public List<Index> getDatasetIndexes(String dataverseName, String datasetName) throws AlgebricksException {
try {
return MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
@@ -1549,13 +1561,14 @@
*
* @param properties
* the original dataset properties
- * @return a new map containing the original dataset properties and the scheduler/locations
+ * @return a new map containing the original dataset properties and the
+ * scheduler/locations
*/
private Map<String, Object> wrapProperties(Map<String, String> properties) {
Map<String, Object> wrappedProperties = new HashMap<String, Object>();
wrappedProperties.putAll(properties);
- wrappedProperties.put(HDFSAdapterFactory.SCHEDULER, hdfsScheduler);
- wrappedProperties.put(HDFSAdapterFactory.CLUSTER_LOCATIONS, getClusterLocations());
+ // wrappedProperties.put(SCHEDULER, hdfsScheduler);
+ // wrappedProperties.put(CLUSTER_LOCATIONS, getClusterLocations());
return wrappedProperties;
}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/DatasetDataSource.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/DatasetDataSource.java
new file mode 100644
index 0000000..cb1124f
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/DatasetDataSource.java
@@ -0,0 +1,98 @@
+package edu.uci.ics.asterix.metadata.declared;
+
+import java.io.IOException;
+import java.util.List;
+
+import edu.uci.ics.asterix.metadata.MetadataManager;
+import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
+import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.DefaultNodeGroupDomain;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.INodeDomain;
+
+public class DatasetDataSource extends AqlDataSource {
+
+ private Dataset dataset;
+
+ public DatasetDataSource(AqlSourceId id, String datasourceDataverse, String datasourceName, IAType itemType,
+ AqlDataSourceType datasourceType) throws AlgebricksException {
+ super(id, datasourceDataverse, datasourceName, itemType, datasourceType);
+ MetadataTransactionContext ctx = null;
+ try {
+ ctx = MetadataManager.INSTANCE.beginTransaction();
+ dataset = MetadataManager.INSTANCE.getDataset(ctx, datasourceDataverse, datasourceName);
+ if (dataset == null) {
+ throw new AlgebricksException("Unknown dataset " + datasourceName + " in dataverse "
+ + datasourceDataverse);
+ }
+ MetadataManager.INSTANCE.commitTransaction(ctx);
+ switch (dataset.getDatasetType()) {
+ case INTERNAL:
+ initInternalDataset(itemType);
+ break;
+ case EXTERNAL:
+ initExternalDataset(itemType);
+ break;
+
+ }
+ } catch (Exception e) {
+ if (ctx != null) {
+ try {
+ MetadataManager.INSTANCE.abortTransaction(ctx);
+ } catch (Exception e2) {
+ e2.addSuppressed(e);
+ throw new IllegalStateException("Unable to abort " + e2.getMessage());
+ }
+ }
+
+ }
+
+ }
+
+ public Dataset getDataset() {
+ return dataset;
+ }
+
+ private void initInternalDataset(IAType itemType) throws IOException {
+ List<String> partitioningKeys = DatasetUtils.getPartitioningKeys(dataset);
+ ARecordType recordType = (ARecordType) itemType;
+ int n = partitioningKeys.size();
+ schemaTypes = new IAType[n + 1];
+ for (int i = 0; i < n; i++) {
+ schemaTypes[i] = recordType.getFieldType(partitioningKeys.get(i));
+ }
+ schemaTypes[n] = itemType;
+ domain = new DefaultNodeGroupDomain(DatasetUtils.getNodegroupName(dataset));
+ }
+
+ private void initExternalDataset(IAType itemType) {
+ schemaTypes = new IAType[1];
+ schemaTypes[0] = itemType;
+ INodeDomain domainForExternalData = new INodeDomain() {
+ @Override
+ public Integer cardinality() {
+ return null;
+ }
+
+ @Override
+ public boolean sameAs(INodeDomain domain) {
+ return domain == this;
+ }
+ };
+ domain = domainForExternalData;
+ }
+
+ @Override
+ public IAType[] getSchemaTypes() {
+ return schemaTypes;
+ }
+
+ @Override
+ public INodeDomain getDomain() {
+ return domain;
+ }
+
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/ExternalFeedDataSource.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/ExternalFeedDataSource.java
deleted file mode 100644
index ed5df31..0000000
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/ExternalFeedDataSource.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.metadata.declared;
-
-import edu.uci.ics.asterix.metadata.entities.Dataset;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-
-public class ExternalFeedDataSource extends AqlDataSource {
-
- public ExternalFeedDataSource(AqlSourceId id, Dataset dataset, IAType itemType) throws AlgebricksException {
- super(id, dataset, itemType);
- }
-
- public ExternalFeedDataSource(AqlSourceId id, Dataset dataset, IAType itemType, AqlDataSourceType dataSourceType)
- throws AlgebricksException {
- super(id, dataset, itemType, dataSourceType);
- }
-}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/FeedDataSource.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/FeedDataSource.java
new file mode 100644
index 0000000..f729683
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/FeedDataSource.java
@@ -0,0 +1,94 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.declared;
+
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+import edu.uci.ics.asterix.metadata.MetadataManager;
+import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
+import edu.uci.ics.asterix.metadata.entities.Feed;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.INodeDomain;
+
+public class FeedDataSource extends AqlDataSource {
+
+ private Feed feed;
+ private final FeedConnectionId feedConnectionId;
+
+ public FeedDataSource(AqlSourceId id, FeedConnectionId feedId, IAType itemType, AqlDataSourceType dataSourceType)
+ throws AlgebricksException {
+ super(id, feedId.getDataverse(), feedId.getFeedName(), itemType, dataSourceType);
+ this.feedConnectionId = feedId;
+ feed = null;
+ MetadataTransactionContext ctx = null;
+ try {
+ MetadataManager.INSTANCE.acquireReadLatch();
+ ctx = MetadataManager.INSTANCE.beginTransaction();
+ feed = MetadataManager.INSTANCE.getFeed(ctx, feedId.getDataverse(), feedId.getFeedName());
+ if (feed == null) {
+ throw new AlgebricksException("Unknown feed " + feedId);
+ }
+ MetadataManager.INSTANCE.commitTransaction(ctx);
+ initFeedDataSource(itemType);
+ } catch (Exception e) {
+ if (ctx != null) {
+ try {
+ MetadataManager.INSTANCE.abortTransaction(ctx);
+ } catch (Exception e2) {
+ e2.addSuppressed(e);
+ throw new IllegalStateException("Unable to abort " + e2.getMessage());
+ }
+ }
+
+ } finally {
+ MetadataManager.INSTANCE.releaseReadLatch();
+ }
+ }
+
+ public Feed getFeed() {
+ return feed;
+ }
+
+ @Override
+ public IAType[] getSchemaTypes() {
+ return schemaTypes;
+ }
+
+ @Override
+ public INodeDomain getDomain() {
+ return domain;
+ }
+
+ public FeedConnectionId getFeedConnectionId() {
+ return feedConnectionId;
+ }
+
+ private void initFeedDataSource(IAType itemType) {
+ schemaTypes = new IAType[1];
+ schemaTypes[0] = itemType;
+ INodeDomain domainForExternalData = new INodeDomain() {
+ @Override
+ public Integer cardinality() {
+ return null;
+ }
+
+ @Override
+ public boolean sameAs(INodeDomain domain) {
+ return domain == this;
+ }
+ };
+ domain = domainForExternalData;
+ }
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/AsterixBuiltinTypeMap.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/AsterixBuiltinTypeMap.java
index 7da2947..e13f47a 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/AsterixBuiltinTypeMap.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/AsterixBuiltinTypeMap.java
@@ -50,6 +50,7 @@
_builtinTypeMap.put("circle", BuiltinType.ACIRCLE);
_builtinTypeMap.put("rectangle", BuiltinType.ARECTANGLE);
_builtinTypeMap.put("null", BuiltinType.ANULL);
+ _builtinTypeMap.put("uuid", BuiltinType.AUUID);
}
public static Map<String, BuiltinType> getBuiltinTypes() {
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Dataset.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Dataset.java
index 7455cec..09db248 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Dataset.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Dataset.java
@@ -117,4 +117,9 @@
}
return true;
}
+
+ @Override
+ public String toString() {
+ return dataverseName + "." + datasetName;
+ }
}
\ No newline at end of file
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/DatasourceAdapter.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/DatasourceAdapter.java
index 3c72084..621933f 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/DatasourceAdapter.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/DatasourceAdapter.java
@@ -14,12 +14,14 @@
*/
package edu.uci.ics.asterix.metadata.entities;
-import edu.uci.ics.asterix.external.dataset.adapter.AdapterIdentifier;
import edu.uci.ics.asterix.metadata.MetadataCache;
import edu.uci.ics.asterix.metadata.api.IMetadataEntity;
+import edu.uci.ics.asterix.metadata.feeds.AdapterIdentifier;
public class DatasourceAdapter implements IMetadataEntity {
+ private static final long serialVersionUID = 1L;
+
public enum AdapterType {
INTERNAL,
EXTERNAL
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Dataverse.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Dataverse.java
index 7b894f1..4eee082 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Dataverse.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Dataverse.java
@@ -42,7 +42,7 @@
public String getDataFormat() {
return dataFormat;
}
-
+
public int getPendingOp() {
return pendingOp;
}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Feed.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Feed.java
new file mode 100644
index 0000000..6a63f28
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Feed.java
@@ -0,0 +1,93 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.metadata.entities;
+
+import java.util.Map;
+
+import edu.uci.ics.asterix.common.functions.FunctionSignature;
+import edu.uci.ics.asterix.metadata.MetadataCache;
+import edu.uci.ics.asterix.metadata.api.IMetadataEntity;
+
+/**
+ * Metadata describing a feed.
+ */
+public class Feed implements IMetadataEntity {
+
+ private static final long serialVersionUID = 1L;
+
+ private final String dataverseName;
+ private final String feedName;
+ private final String adaptorName;
+ private final Map<String, String> adaptorConfiguration;
+ private final FunctionSignature appliedFunction;
+
+ public Feed(String dataverseName, String datasetName, String adaptorName, Map<String, String> adaptorConfiguration,
+ FunctionSignature appliedFunction) {
+ this.dataverseName = dataverseName;
+ this.feedName = datasetName;
+ this.adaptorName = adaptorName;
+ this.adaptorConfiguration = adaptorConfiguration;
+ this.appliedFunction = appliedFunction;
+ }
+
+ public String getDataverseName() {
+ return dataverseName;
+ }
+
+ public String getFeedName() {
+ return feedName;
+ }
+
+ public String getAdaptorName() {
+ return adaptorName;
+ }
+
+ public Map<String, String> getAdaptorConfiguration() {
+ return adaptorConfiguration;
+ }
+
+ public FunctionSignature getAppliedFunction() {
+ return appliedFunction;
+ }
+
+ @Override
+ public Object addToCache(MetadataCache cache) {
+ return cache.addFeedIfNotExists(this);
+ }
+
+ @Override
+ public Object dropFromCache(MetadataCache cache) {
+ return cache.dropFeed(this);
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (this == other) {
+ return true;
+ }
+ if (!(other instanceof Feed)) {
+ return false;
+ }
+ Feed otherDataset = (Feed) other;
+ if (!otherDataset.dataverseName.equals(dataverseName)) {
+ return false;
+ }
+ if (!otherDataset.feedName.equals(feedName)) {
+ return false;
+ }
+ return true;
+ }
+}
\ No newline at end of file
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/FeedActivity.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/FeedActivity.java
new file mode 100644
index 0000000..679276f
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/FeedActivity.java
@@ -0,0 +1,175 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.metadata.entities;
+
+import java.util.Map;
+
+import edu.uci.ics.asterix.metadata.MetadataCache;
+import edu.uci.ics.asterix.metadata.api.IMetadataEntity;
+
+/**
+ * Metadata describing a feed activity record.
+ */
+public class FeedActivity implements IMetadataEntity, Comparable<FeedActivity> {
+
+ private static final long serialVersionUID = 1L;
+
+ private int activityId;
+
+ private final String dataverseName;
+ private final String datasetName;
+ private final String feedName;
+
+ private String lastUpdatedTimestamp;
+ private FeedActivityType activityType;
+ private Map<String, String> feedActivityDetails;
+
+ public static enum FeedActivityType {
+ FEED_BEGIN,
+ FEED_FAILURE,
+ FEED_END
+ }
+
+ public static class FeedActivityDetails {
+ public static final String COMPUTE_LOCATIONS = "compute-locations";
+ public static final String INGEST_LOCATIONS = "ingest-locations";
+ public static final String STORAGE_LOCATIONS = "storage-locations";
+ public static final String TOTAL_INGESTED = "total-ingested";
+ public static final String INGESTION_RATE = "ingestion-rate";
+ public static final String EXCEPTION_LOCATION = "exception-location";
+ public static final String EXCEPTION_MESSAGE = "exception-message";
+ public static final String FEED_POLICY_NAME = "feed-policy-name";
+ public static final String SUPER_FEED_MANAGER_HOST = "super-feed-manager-host";
+ public static final String SUPER_FEED_MANAGER_PORT = "super-feed-manager-port";
+ public static final String FEED_NODE_FAILURE = "feed-node-failure";
+
+ }
+
+ public FeedActivity(String dataverseName, String feedName, String datasetName, FeedActivityType feedActivityType,
+ Map<String, String> feedActivityDetails) {
+ this.dataverseName = dataverseName;
+ this.feedName = feedName;
+ this.datasetName = datasetName;
+ this.activityType = feedActivityType;
+ this.feedActivityDetails = feedActivityDetails;
+ }
+
+ public String getDataverseName() {
+ return dataverseName;
+ }
+
+ public String getDatasetName() {
+ return datasetName;
+ }
+
+ public String getFeedName() {
+ return feedName;
+ }
+
+ @Override
+ public Object addToCache(MetadataCache cache) {
+ return cache.addFeedActivityIfNotExists(this);
+ }
+
+ @Override
+ public Object dropFromCache(MetadataCache cache) {
+ return cache.dropFeedActivity(this);
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (this == other) {
+ return true;
+ }
+ if (!(other instanceof FeedActivity)) {
+ return false;
+ }
+
+ if (!((FeedActivity) other).dataverseName.equals(dataverseName)) {
+ return false;
+ }
+ if (!((FeedActivity) other).datasetName.equals(datasetName)) {
+ return false;
+ }
+ if (!((FeedActivity) other).getFeedName().equals(feedName)) {
+ return false;
+ }
+ if (!((FeedActivity) other).getFeedActivityType().equals(activityType)) {
+ return false;
+ }
+ if (((FeedActivity) other).getActivityId() != (activityId)) {
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return toString().hashCode();
+ }
+
+ @Override
+ public String toString() {
+ return dataverseName + "." + feedName + " --> " + datasetName + " " + activityType + " " + activityId;
+ }
+
+ public FeedActivityType getFeedActivityType() {
+ return activityType;
+ }
+
+ public void setFeedActivityType(FeedActivityType feedActivityType) {
+ this.activityType = feedActivityType;
+ }
+
+ public String getLastUpdatedTimestamp() {
+ return lastUpdatedTimestamp;
+ }
+
+ public void setLastUpdatedTimestamp(String lastUpdatedTimestamp) {
+ this.lastUpdatedTimestamp = lastUpdatedTimestamp;
+ }
+
+ public int getActivityId() {
+ return activityId;
+ }
+
+ public void setActivityId(int activityId) {
+ this.activityId = activityId;
+ }
+
+ public Map<String, String> getFeedActivityDetails() {
+ return feedActivityDetails;
+ }
+
+ public void setFeedActivityDetails(Map<String, String> feedActivityDetails) {
+ this.feedActivityDetails = feedActivityDetails;
+ }
+
+ public FeedActivityType getActivityType() {
+ return activityType;
+ }
+
+ public void setActivityType(FeedActivityType activityType) {
+ this.activityType = activityType;
+ }
+
+ @Override
+ public int compareTo(FeedActivity o) {
+ return o.getActivityId() - this.activityId;
+ }
+
+}
\ No newline at end of file
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/FeedDatasetDetails.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/FeedDatasetDetails.java
deleted file mode 100644
index 5058e24..0000000
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/FeedDatasetDetails.java
+++ /dev/null
@@ -1,215 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.asterix.metadata.entities;
-
-import java.io.DataOutput;
-import java.io.IOException;
-import java.util.List;
-import java.util.Map;
-
-import edu.uci.ics.asterix.builders.IARecordBuilder;
-import edu.uci.ics.asterix.builders.OrderedListBuilder;
-import edu.uci.ics.asterix.builders.RecordBuilder;
-import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.common.functions.FunctionSignature;
-import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
-import edu.uci.ics.asterix.metadata.bootstrap.MetadataRecordTypes;
-import edu.uci.ics.asterix.om.base.AMutableString;
-import edu.uci.ics.asterix.om.base.AString;
-import edu.uci.ics.asterix.om.types.AOrderedListType;
-import edu.uci.ics.asterix.om.types.BuiltinType;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
-
-/**
- * Provides functionality for writing parameters for a FEED dataset into the
- * Metadata. Since FEED dataset is a special kind of INTERNAL dataset, this
- * class extends InternalDatasetDetails.
- */
-public class FeedDatasetDetails extends InternalDatasetDetails {
-
- private static final long serialVersionUID = 1L;
- private final String adapterFactory;
- private final Map<String, String> properties;
- private final FunctionSignature signature;
- private FeedState feedState;
-
- public enum FeedState {
- INACTIVE,
- // INACTIVE state signifies that the feed dataset is not
- // connected with the external world through the feed
- // adapter.
- ACTIVE
- // ACTIVE state signifies that the feed dataset is connected to the
- // external world using an adapter that may put data into the dataset.
- }
-
- public FeedDatasetDetails(FileStructure fileStructure, PartitioningStrategy partitioningStrategy,
- List<String> partitioningKey, List<String> primaryKey, String groupName, String adapterFactory,
- Map<String, String> properties, FunctionSignature signature, String feedState, String compactionPolicy,
- Map<String, String> compactionPolicyProperties) {
- super(fileStructure, partitioningStrategy, partitioningKey, primaryKey, groupName, compactionPolicy,
- compactionPolicyProperties);
- this.properties = properties;
- this.adapterFactory = adapterFactory;
- this.signature = signature;
- this.feedState = feedState.equals(FeedState.ACTIVE.toString()) ? FeedState.ACTIVE : FeedState.INACTIVE;
- }
-
- @Override
- public DatasetType getDatasetType() {
- return DatasetType.FEED;
- }
-
- @Override
- public void writeDatasetDetailsRecordType(DataOutput out) throws HyracksDataException {
- IARecordBuilder feedRecordBuilder = new RecordBuilder();
- OrderedListBuilder listBuilder = new OrderedListBuilder();
- ArrayBackedValueStorage fieldValue = new ArrayBackedValueStorage();
- ArrayBackedValueStorage itemValue = new ArrayBackedValueStorage();
- feedRecordBuilder.reset(MetadataRecordTypes.FEED_DETAILS_RECORDTYPE);
- AMutableString aString = new AMutableString("");
- ISerializerDeserializer<AString> stringSerde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ASTRING);
-
- // write field 0
- fieldValue.reset();
- aString.setValue(this.getFileStructure().toString());
- stringSerde.serialize(aString, fieldValue.getDataOutput());
- feedRecordBuilder.addField(MetadataRecordTypes.FEED_DETAILS_ARECORD_FILESTRUCTURE_FIELD_INDEX, fieldValue);
-
- // write field 1
- fieldValue.reset();
- aString.setValue(this.getPartitioningStrategy().toString());
- stringSerde.serialize(aString, fieldValue.getDataOutput());
- feedRecordBuilder.addField(MetadataRecordTypes.FEED_DETAILS_ARECORD_PARTITIONSTRATEGY_FIELD_INDEX, fieldValue);
-
- // write field 2
- listBuilder.reset((AOrderedListType) MetadataRecordTypes.FEED_DETAILS_RECORDTYPE.getFieldTypes()[2]);
- for (String field : partitioningKeys) {
- itemValue.reset();
- aString.setValue(field);
- stringSerde.serialize(aString, itemValue.getDataOutput());
- listBuilder.addItem(itemValue);
- }
- fieldValue.reset();
- listBuilder.write(fieldValue.getDataOutput(), true);
- feedRecordBuilder.addField(MetadataRecordTypes.FEED_DETAILS_ARECORD_PARTITIONKEY_FIELD_INDEX, fieldValue);
-
- // write field 3
- listBuilder.reset((AOrderedListType) MetadataRecordTypes.FEED_DETAILS_RECORDTYPE.getFieldTypes()[3]);
- for (String field : primaryKeys) {
- itemValue.reset();
- aString.setValue(field);
- stringSerde.serialize(aString, itemValue.getDataOutput());
- listBuilder.addItem(itemValue);
- }
- fieldValue.reset();
- listBuilder.write(fieldValue.getDataOutput(), true);
- feedRecordBuilder.addField(MetadataRecordTypes.FEED_DETAILS_ARECORD_PRIMARYKEY_FIELD_INDEX, fieldValue);
-
- // write field 4
- fieldValue.reset();
- aString.setValue(getNodeGroupName());
- stringSerde.serialize(aString, fieldValue.getDataOutput());
- feedRecordBuilder.addField(MetadataRecordTypes.FEED_DETAILS_ARECORD_GROUPNAME_FIELD_INDEX, fieldValue);
-
- // write field 5
- fieldValue.reset();
- aString.setValue(getAdapterFactory());
- stringSerde.serialize(aString, fieldValue.getDataOutput());
- feedRecordBuilder.addField(MetadataRecordTypes.FEED_DETAILS_ARECORD_DATASOURCE_ADAPTER_FIELD_INDEX, fieldValue);
-
- // write field 6
- listBuilder.reset((AOrderedListType) MetadataRecordTypes.FEED_DETAILS_RECORDTYPE.getFieldTypes()[6]);
- for (Map.Entry<String, String> property : properties.entrySet()) {
- String name = property.getKey();
- String value = property.getValue();
- itemValue.reset();
- writePropertyTypeRecord(name, value, itemValue.getDataOutput(),
- MetadataRecordTypes.DATASOURCE_ADAPTER_PROPERTIES_RECORDTYPE);
- listBuilder.addItem(itemValue);
- }
- fieldValue.reset();
- listBuilder.write(fieldValue.getDataOutput(), true);
- feedRecordBuilder.addField(MetadataRecordTypes.FEED_DETAILS_ARECORD_PROPERTIES_FIELD_INDEX, fieldValue);
-
- // write field 7
- fieldValue.reset();
- if (signature != null) {
- aString.setValue(signature.toString());
- stringSerde.serialize(aString, fieldValue.getDataOutput());
- feedRecordBuilder.addField(MetadataRecordTypes.FEED_DETAILS_ARECORD_FUNCTION_FIELD_INDEX, fieldValue);
- }
-
- // write field 8
- fieldValue.reset();
- aString.setValue(getFeedState().toString());
- stringSerde.serialize(aString, fieldValue.getDataOutput());
- feedRecordBuilder.addField(MetadataRecordTypes.FEED_DETAILS_ARECORD_STATE_FIELD_INDEX, fieldValue);
-
- // write field 9
- fieldValue.reset();
- aString.setValue(getCompactionPolicy().toString());
- stringSerde.serialize(aString, fieldValue.getDataOutput());
- feedRecordBuilder.addField(MetadataRecordTypes.FEED_DETAILS_ARECORD_COMPACTION_POLICY_FIELD_INDEX, fieldValue);
-
- // write field 10
- listBuilder.reset((AOrderedListType) MetadataRecordTypes.FEED_DETAILS_RECORDTYPE.getFieldTypes()[10]);
- for (Map.Entry<String, String> property : compactionPolicyProperties.entrySet()) {
- String name = property.getKey();
- String value = property.getValue();
- itemValue.reset();
- writePropertyTypeRecord(name, value, itemValue.getDataOutput(),
- MetadataRecordTypes.COMPACTION_POLICY_PROPERTIES_RECORDTYPE);
- listBuilder.addItem(itemValue);
- }
- fieldValue.reset();
- listBuilder.write(fieldValue.getDataOutput(), true);
- feedRecordBuilder.addField(MetadataRecordTypes.FEED_DETAILS_ARECORD_COMPACTION_POLICY_PROPERTIES_FIELD_INDEX,
- fieldValue);
-
- try {
- feedRecordBuilder.write(out, true);
- } catch (IOException | AsterixException e) {
- throw new HyracksDataException(e);
- }
-
- }
-
- public FeedState getFeedState() {
- return feedState;
- }
-
- public void setFeedState(FeedState feedState) {
- this.feedState = feedState;
- }
-
- public String getAdapterFactory() {
- return adapterFactory;
- }
-
- public Map<String, String> getProperties() {
- return properties;
- }
-
- public FunctionSignature getFunction() {
- return signature;
- }
-
-}
\ No newline at end of file
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/FeedPolicy.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/FeedPolicy.java
new file mode 100644
index 0000000..b011e5c
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/FeedPolicy.java
@@ -0,0 +1,95 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.metadata.entities;
+
+import java.util.Map;
+
+import edu.uci.ics.asterix.metadata.MetadataCache;
+import edu.uci.ics.asterix.metadata.api.IMetadataEntity;
+
+/**
+ * Metadata describing a feed activity record.
+ */
+public class FeedPolicy implements IMetadataEntity {
+
+ private static final long serialVersionUID = 1L;
+
+ private final String dataverseName;
+ // Enforced to be unique within a dataverse.
+ private final String policyName;
+ // A description of the policy
+ private final String description;
+ // The policy properties associated with the feed dataset
+ private Map<String, String> properties;
+
+ public FeedPolicy(String dataverseName, String policyName, String description, Map<String, String> properties) {
+ this.dataverseName = dataverseName;
+ this.policyName = policyName;
+ this.description = description;
+ this.properties = properties;
+ }
+
+ public String getDataverseName() {
+ return dataverseName;
+ }
+
+ public String getPolicyName() {
+ return policyName;
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (this == other) {
+ return true;
+ }
+ if (!(other instanceof FeedPolicy)) {
+ return false;
+ }
+ FeedPolicy otherPolicy = (FeedPolicy) other;
+ if (!otherPolicy.dataverseName.equals(dataverseName)) {
+ return false;
+ }
+ if (!otherPolicy.policyName.equals(policyName)) {
+ return false;
+ }
+ return true;
+ }
+
+ @Override
+ public Object addToCache(MetadataCache cache) {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public Object dropFromCache(MetadataCache cache) {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public Map<String, String> getProperties() {
+ return properties;
+ }
+
+ public void setProperties(Map<String, String> properties) {
+ this.properties = properties;
+ }
+
+}
\ No newline at end of file
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Index.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Index.java
index ed7efae..773cc8d 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Index.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Index.java
@@ -98,11 +98,11 @@
public boolean isPrimaryIndex() {
return isPrimaryIndex;
}
-
+
public int getPendingOp() {
return pendingOp;
}
-
+
public void setPendingOp(int pendingOp) {
this.pendingOp = pendingOp;
}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/InternalDatasetDetails.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/InternalDatasetDetails.java
index c5e6c96..212f525 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/InternalDatasetDetails.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/InternalDatasetDetails.java
@@ -28,6 +28,7 @@
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
import edu.uci.ics.asterix.metadata.IDatasetDetails;
import edu.uci.ics.asterix.metadata.bootstrap.MetadataRecordTypes;
+import edu.uci.ics.asterix.om.base.ABoolean;
import edu.uci.ics.asterix.om.base.AMutableString;
import edu.uci.ics.asterix.om.base.AString;
import edu.uci.ics.asterix.om.types.AOrderedListType;
@@ -54,16 +55,18 @@
protected final List<String> partitioningKeys;
protected final List<String> primaryKeys;
protected final String nodeGroupName;
+ protected final boolean autogenerated;
protected final String compactionPolicy;
protected final Map<String, String> compactionPolicyProperties;
public InternalDatasetDetails(FileStructure fileStructure, PartitioningStrategy partitioningStrategy,
- List<String> partitioningKey, List<String> primaryKey, String groupName, String compactionPolicy,
- Map<String, String> compactionPolicyProperties) {
+ List<String> partitioningKey, List<String> primaryKey, String groupName, boolean autogenerated,
+ String compactionPolicy, Map<String, String> compactionPolicyProperties) {
this.fileStructure = fileStructure;
this.partitioningStrategy = partitioningStrategy;
this.partitioningKeys = partitioningKey;
this.primaryKeys = primaryKey;
+ this.autogenerated = autogenerated;
this.nodeGroupName = groupName;
this.compactionPolicy = compactionPolicy;
this.compactionPolicyProperties = compactionPolicyProperties;
@@ -77,6 +80,10 @@
return partitioningKeys;
}
+ public boolean isAutogenerated() {
+ return autogenerated;
+ }
+
public List<String> getPrimaryKey() {
return primaryKeys;
}
@@ -111,6 +118,8 @@
ArrayBackedValueStorage itemValue = new ArrayBackedValueStorage();
internalRecordBuilder.reset(MetadataRecordTypes.INTERNAL_DETAILS_RECORDTYPE);
AMutableString aString = new AMutableString("");
+ ISerializerDeserializer<ABoolean> booleanSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ABOOLEAN);
ISerializerDeserializer<AString> stringSerde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.ASTRING);
@@ -163,12 +172,19 @@
// write field 5
fieldValue.reset();
+ ABoolean b = isAutogenerated() ? ABoolean.TRUE : ABoolean.FALSE;
+ booleanSerde.serialize(b, fieldValue.getDataOutput());
+ internalRecordBuilder.addField(MetadataRecordTypes.INTERNAL_DETAILS_ARECORD_AUTOGENERATED_FIELD_INDEX,
+ fieldValue);
+
+ // write field 6
+ fieldValue.reset();
aString.setValue(getCompactionPolicy().toString());
stringSerde.serialize(aString, fieldValue.getDataOutput());
internalRecordBuilder.addField(MetadataRecordTypes.INTERNAL_DETAILS_ARECORD_COMPACTION_POLICY_FIELD_INDEX,
fieldValue);
- // write field 6
+ // write field 7
listBuilder
.reset((AOrderedListType) MetadataRecordTypes.INTERNAL_DETAILS_RECORDTYPE.getFieldTypes()[MetadataRecordTypes.INTERNAL_DETAILS_ARECORD_COMPACTION_POLICY_PROPERTIES_FIELD_INDEX]);
for (Map.Entry<String, String> property : compactionPolicyProperties.entrySet()) {
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Library.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Library.java
new file mode 100644
index 0000000..a341ba9
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Library.java
@@ -0,0 +1,36 @@
+package edu.uci.ics.asterix.metadata.entities;
+
+import edu.uci.ics.asterix.metadata.MetadataCache;
+import edu.uci.ics.asterix.metadata.api.IMetadataEntity;
+
+public class Library implements IMetadataEntity {
+
+ private static final long serialVersionUID = 1L;
+
+ private final String dataverse;
+ private final String name;
+
+ public Library(String dataverseName, String libraryName) {
+ this.dataverse = dataverseName;
+ this.name = libraryName;
+ }
+
+ public String getDataverseName() {
+ return dataverse;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public Object addToCache(MetadataCache cache) {
+ return cache.addLibraryIfNotExists(this);
+ }
+
+ @Override
+ public Object dropFromCache(MetadataCache cache) {
+ return cache.dropLibrary(this);
+ }
+
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/AbstractTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/AbstractTupleTranslator.java
index dc8c390..a85787f 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/AbstractTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/AbstractTupleTranslator.java
@@ -20,6 +20,7 @@
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
import edu.uci.ics.asterix.metadata.api.IMetadataEntityTupleTranslator;
import edu.uci.ics.asterix.om.base.ABoolean;
+import edu.uci.ics.asterix.om.base.AInt32;
import edu.uci.ics.asterix.om.base.AMutableString;
import edu.uci.ics.asterix.om.base.AString;
import edu.uci.ics.asterix.om.types.BuiltinType;
@@ -40,6 +41,10 @@
@SuppressWarnings("unchecked")
protected ISerializerDeserializer<ABoolean> booleanSerde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.ABOOLEAN);
+ @SuppressWarnings("unchecked")
+ protected ISerializerDeserializer<AInt32> int32Serde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AINT32);
+
protected final IARecordBuilder recordBuilder;
protected final ArrayBackedValueStorage fieldValue;
protected final ArrayTupleBuilder tupleBuilder;
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/CompactionPolicyTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/CompactionPolicyTupleTranslator.java
index cabe6b4..6d044ea 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/CompactionPolicyTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/CompactionPolicyTupleTranslator.java
@@ -53,10 +53,10 @@
}
@Override
- public CompactionPolicy getMetadataEntytiFromTuple(ITupleReference frameTuple) throws IOException {
- byte[] serRecord = frameTuple.getFieldData(COMPACTION_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
- int recordStartOffset = frameTuple.getFieldStart(COMPACTION_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
- int recordLength = frameTuple.getFieldLength(COMPACTION_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
+ public CompactionPolicy getMetadataEntityFromTuple(ITupleReference tuple) throws IOException {
+ byte[] serRecord = tuple.getFieldData(COMPACTION_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
+ int recordStartOffset = tuple.getFieldStart(COMPACTION_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
+ int recordLength = tuple.getFieldLength(COMPACTION_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
DataInput in = new DataInputStream(stream);
ARecord compactionPolicyRecord = (ARecord) recordSerDes.deserialize(in);
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
index 3f70e2b..4bc0994 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
@@ -23,6 +23,7 @@
import java.util.ArrayList;
import java.util.Calendar;
import java.util.HashMap;
+import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
@@ -31,7 +32,6 @@
import edu.uci.ics.asterix.builders.UnorderedListBuilder;
import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.common.functions.FunctionSignature;
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
import edu.uci.ics.asterix.metadata.IDatasetDetails;
import edu.uci.ics.asterix.metadata.MetadataException;
@@ -39,14 +39,13 @@
import edu.uci.ics.asterix.metadata.bootstrap.MetadataRecordTypes;
import edu.uci.ics.asterix.metadata.entities.Dataset;
import edu.uci.ics.asterix.metadata.entities.ExternalDatasetDetails;
-import edu.uci.ics.asterix.metadata.entities.FeedDatasetDetails;
import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails;
import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails.FileStructure;
import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails.PartitioningStrategy;
+import edu.uci.ics.asterix.om.base.ABoolean;
import edu.uci.ics.asterix.om.base.AInt32;
import edu.uci.ics.asterix.om.base.AMutableInt32;
import edu.uci.ics.asterix.om.base.AMutableString;
-import edu.uci.ics.asterix.om.base.ANull;
import edu.uci.ics.asterix.om.base.AOrderedList;
import edu.uci.ics.asterix.om.base.ARecord;
import edu.uci.ics.asterix.om.base.AString;
@@ -85,7 +84,7 @@
}
@Override
- public Dataset getMetadataEntytiFromTuple(ITupleReference frameTuple) throws IOException {
+ public Dataset getMetadataEntityFromTuple(ITupleReference frameTuple) throws IOException {
byte[] serRecord = frameTuple.getFieldData(DATASET_PAYLOAD_TUPLE_FIELD_INDEX);
int recordStartOffset = frameTuple.getFieldStart(DATASET_PAYLOAD_TUPLE_FIELD_INDEX);
int recordLength = frameTuple.getFieldLength(DATASET_PAYLOAD_TUPLE_FIELD_INDEX);
@@ -110,86 +109,6 @@
int pendingOp = ((AInt32) datasetRecord
.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_PENDINGOP_FIELD_INDEX)).getIntegerValue();
switch (datasetType) {
- case FEED: {
- ARecord datasetDetailsRecord = (ARecord) datasetRecord
- .getValueByPos(MetadataRecordTypes.DATASET_ARECORD_FEEDDETAILS_FIELD_INDEX);
- FileStructure fileStructure = FileStructure.valueOf(((AString) datasetDetailsRecord
- .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_FILESTRUCTURE_FIELD_INDEX))
- .getStringValue());
- PartitioningStrategy partitioningStrategy = PartitioningStrategy
- .valueOf(((AString) datasetDetailsRecord
- .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_PARTITIONSTRATEGY_FIELD_INDEX))
- .getStringValue());
- IACursor cursor = ((AOrderedList) datasetDetailsRecord
- .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_PARTITIONKEY_FIELD_INDEX)).getCursor();
- List<String> partitioningKey = new ArrayList<String>();
- while (cursor.next())
- partitioningKey.add(((AString) cursor.get()).getStringValue());
- String groupName = ((AString) datasetDetailsRecord
- .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_GROUPNAME_FIELD_INDEX))
- .getStringValue();
- String adapter = ((AString) datasetDetailsRecord
- .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_DATASOURCE_ADAPTER_FIELD_INDEX))
- .getStringValue();
- cursor = ((AOrderedList) datasetDetailsRecord
- .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_PROPERTIES_FIELD_INDEX)).getCursor();
- Map<String, String> properties = new HashMap<String, String>();
- String key;
- String value;
- while (cursor.next()) {
- ARecord field = (ARecord) cursor.get();
- key = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_NAME_FIELD_INDEX))
- .getStringValue();
- value = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_VALUE_FIELD_INDEX))
- .getStringValue();
- properties.put(key, value);
- }
-
- Object o = datasetDetailsRecord
- .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_FUNCTION_FIELD_INDEX);
- FunctionSignature signature = null;
- if (!(o instanceof ANull)) {
- String functionIdentifier = ((AString) o).getStringValue();
- String[] qnameComponents = functionIdentifier.split("\\.");
- String functionDataverse;
- String functionName;
- if (qnameComponents.length == 2) {
- functionDataverse = qnameComponents[0];
- functionName = qnameComponents[1];
- } else {
- functionDataverse = dataverseName;
- functionName = qnameComponents[0];
- }
-
- String[] nameComponents = functionName.split("@");
- signature = new FunctionSignature(functionDataverse, nameComponents[0],
- Integer.parseInt(nameComponents[1]));
- }
-
- String feedState = ((AString) datasetDetailsRecord
- .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_STATE_FIELD_INDEX)).getStringValue();
-
- String compactionPolicy = ((AString) datasetDetailsRecord
- .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_COMPACTION_POLICY_FIELD_INDEX))
- .getStringValue();
- cursor = ((AOrderedList) datasetDetailsRecord
- .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_COMPACTION_POLICY_PROPERTIES_FIELD_INDEX))
- .getCursor();
- Map<String, String> compactionPolicyProperties = new HashMap<String, String>();
- while (cursor.next()) {
- ARecord field = (ARecord) cursor.get();
- key = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_NAME_FIELD_INDEX))
- .getStringValue();
- value = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_VALUE_FIELD_INDEX))
- .getStringValue();
- compactionPolicyProperties.put(key, value);
- }
-
- datasetDetails = new FeedDatasetDetails(fileStructure, partitioningStrategy, partitioningKey,
- partitioningKey, groupName, adapter, properties, signature, feedState, compactionPolicy,
- compactionPolicyProperties);
- break;
- }
case INTERNAL: {
ARecord datasetDetailsRecord = (ARecord) datasetRecord
.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_INTERNALDETAILS_FIELD_INDEX);
@@ -204,19 +123,22 @@
.getValueByPos(MetadataRecordTypes.INTERNAL_DETAILS_ARECORD_PARTITIONKEY_FIELD_INDEX))
.getCursor();
List<String> partitioningKey = new ArrayList<String>();
- while (cursor.next())
+ while (cursor.next()) {
partitioningKey.add(((AString) cursor.get()).getStringValue());
+ }
String groupName = ((AString) datasetDetailsRecord
.getValueByPos(MetadataRecordTypes.INTERNAL_DETAILS_ARECORD_GROUPNAME_FIELD_INDEX))
.getStringValue();
-
+ boolean autogenerated = ((ABoolean) datasetDetailsRecord
+ .getValueByPos(MetadataRecordTypes.INTERNAL_DETAILS_ARECORD_AUTOGENERATED_FIELD_INDEX))
+ .getBoolean();
String compactionPolicy = ((AString) datasetDetailsRecord
.getValueByPos(MetadataRecordTypes.INTERNAL_DETAILS_ARECORD_COMPACTION_POLICY_FIELD_INDEX))
.getStringValue();
cursor = ((AOrderedList) datasetDetailsRecord
.getValueByPos(MetadataRecordTypes.INTERNAL_DETAILS_ARECORD_COMPACTION_POLICY_PROPERTIES_FIELD_INDEX))
.getCursor();
- Map<String, String> compactionPolicyProperties = new HashMap<String, String>();
+ Map<String, String> compactionPolicyProperties = new LinkedHashMap<String, String>();
String key;
String value;
while (cursor.next()) {
@@ -229,7 +151,7 @@
}
datasetDetails = new InternalDatasetDetails(fileStructure, partitioningStrategy, partitioningKey,
- partitioningKey, groupName, compactionPolicy, compactionPolicyProperties);
+ partitioningKey, groupName, autogenerated, compactionPolicy, compactionPolicyProperties);
break;
}
@@ -363,9 +285,6 @@
case EXTERNAL:
recordBuilder.addField(MetadataRecordTypes.DATASET_ARECORD_EXTERNALDETAILS_FIELD_INDEX, fieldValue);
break;
- case FEED:
- recordBuilder.addField(MetadataRecordTypes.DATASET_ARECORD_FEEDDETAILS_FIELD_INDEX, fieldValue);
- break;
}
}
@@ -379,10 +298,8 @@
IACursor cursor = list.getCursor();
while (cursor.next()) {
ARecord field = (ARecord) cursor.get();
- key = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_NAME_FIELD_INDEX))
- .getStringValue();
- value = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_VALUE_FIELD_INDEX))
- .getStringValue();
+ key = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_NAME_FIELD_INDEX)).getStringValue();
+ value = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_VALUE_FIELD_INDEX)).getStringValue();
hints.put(key, value);
}
return hints;
@@ -415,4 +332,4 @@
}
}
-}
\ No newline at end of file
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java
index 4a5e4dcf..1bb34d2 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java
@@ -22,13 +22,13 @@
import java.util.Calendar;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.external.dataset.adapter.AdapterIdentifier;
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
import edu.uci.ics.asterix.metadata.MetadataException;
import edu.uci.ics.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
import edu.uci.ics.asterix.metadata.bootstrap.MetadataRecordTypes;
import edu.uci.ics.asterix.metadata.entities.DatasourceAdapter;
import edu.uci.ics.asterix.metadata.entities.DatasourceAdapter.AdapterType;
+import edu.uci.ics.asterix.metadata.feeds.AdapterIdentifier;
import edu.uci.ics.asterix.om.base.ARecord;
import edu.uci.ics.asterix.om.base.AString;
import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
@@ -54,7 +54,7 @@
}
@Override
- public DatasourceAdapter getMetadataEntytiFromTuple(ITupleReference tuple) throws MetadataException, IOException {
+ public DatasourceAdapter getMetadataEntityFromTuple(ITupleReference tuple) throws MetadataException, IOException {
byte[] serRecord = tuple.getFieldData(ADAPTER_PAYLOAD_TUPLE_FIELD_INDEX);
int recordStartOffset = tuple.getFieldStart(ADAPTER_PAYLOAD_TUPLE_FIELD_INDEX);
int recordLength = tuple.getFieldLength(ADAPTER_PAYLOAD_TUPLE_FIELD_INDEX);
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java
index 541d703..fd4ac16 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java
@@ -90,7 +90,7 @@
}
@Override
- public Datatype getMetadataEntytiFromTuple(ITupleReference frameTuple) throws MetadataException, IOException {
+ public Datatype getMetadataEntityFromTuple(ITupleReference frameTuple) throws MetadataException, IOException {
byte[] serRecord = frameTuple.getFieldData(DATATYPE_PAYLOAD_TUPLE_FIELD_INDEX);
int recordStartOffset = frameTuple.getFieldStart(DATATYPE_PAYLOAD_TUPLE_FIELD_INDEX);
int recordLength = frameTuple.getFieldLength(DATATYPE_PAYLOAD_TUPLE_FIELD_INDEX);
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java
index 23e42d6..93948af 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java
@@ -60,7 +60,7 @@
}
@Override
- public Dataverse getMetadataEntytiFromTuple(ITupleReference frameTuple) throws IOException {
+ public Dataverse getMetadataEntityFromTuple(ITupleReference frameTuple) throws IOException {
byte[] serRecord = frameTuple.getFieldData(DATAVERSE_PAYLOAD_TUPLE_FIELD_INDEX);
int recordStartOffset = frameTuple.getFieldStart(DATAVERSE_PAYLOAD_TUPLE_FIELD_INDEX);
int recordLength = frameTuple.getFieldLength(DATAVERSE_PAYLOAD_TUPLE_FIELD_INDEX);
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/FeedActivityTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/FeedActivityTupleTranslator.java
new file mode 100644
index 0000000..6c71036
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/FeedActivityTupleTranslator.java
@@ -0,0 +1,244 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.metadata.entitytupletranslators;
+
+import java.io.ByteArrayInputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.Calendar;
+import java.util.HashMap;
+import java.util.Map;
+
+import edu.uci.ics.asterix.builders.IARecordBuilder;
+import edu.uci.ics.asterix.builders.RecordBuilder;
+import edu.uci.ics.asterix.builders.UnorderedListBuilder;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.metadata.MetadataException;
+import edu.uci.ics.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
+import edu.uci.ics.asterix.metadata.bootstrap.MetadataRecordTypes;
+import edu.uci.ics.asterix.metadata.entities.FeedActivity;
+import edu.uci.ics.asterix.metadata.entities.FeedActivity.FeedActivityType;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.AMutableInt32;
+import edu.uci.ics.asterix.om.base.AMutableString;
+import edu.uci.ics.asterix.om.base.ARecord;
+import edu.uci.ics.asterix.om.base.AString;
+import edu.uci.ics.asterix.om.base.AUnorderedList;
+import edu.uci.ics.asterix.om.base.IACursor;
+import edu.uci.ics.asterix.om.types.AUnorderedListType;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+
+/**
+ * Translates a Dataset metadata entity to an ITupleReference and vice versa.
+ */
+public class FeedActivityTupleTranslator extends AbstractTupleTranslator<FeedActivity> {
+ // Field indexes of serialized FeedActivity in a tuple.
+ // Key field.
+ public static final int FEED_ACTIVITY_ACTIVITY_DATAVERSE_NAME_FIELD_INDEX = 0;
+
+ public static final int FEED_ACTIVITY_ACTIVITY_FEED_NAME_FIELD_INDEX = 1;
+
+ public static final int FEED_ACTIVITY_ACTIVITY_DATASET_NAME_FIELD_INDEX = 2;
+
+ public static final int FEED_ACTIVITY_ACTIVITY_ID_FIELD_INDEX = 3;
+
+ // Payload field containing serialized FeedActivity.
+ public static final int FEED_ACTIVITY_PAYLOAD_TUPLE_FIELD_INDEX = 4;
+
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ARecord> recordSerDes = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(MetadataRecordTypes.FEED_ACTIVITY_RECORDTYPE);
+ private AMutableInt32 aInt32;
+ protected ISerializerDeserializer<AInt32> aInt32Serde;
+
+ @SuppressWarnings("unchecked")
+ public FeedActivityTupleTranslator(boolean getTuple) {
+ super(getTuple, MetadataPrimaryIndexes.FEED_ACTIVITY_DATASET.getFieldCount());
+ aInt32 = new AMutableInt32(-1);
+ aInt32Serde = AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT32);
+ }
+
+ @Override
+ public FeedActivity getMetadataEntityFromTuple(ITupleReference frameTuple) throws IOException {
+ byte[] serRecord = frameTuple.getFieldData(FEED_ACTIVITY_PAYLOAD_TUPLE_FIELD_INDEX);
+ int recordStartOffset = frameTuple.getFieldStart(FEED_ACTIVITY_PAYLOAD_TUPLE_FIELD_INDEX);
+ int recordLength = frameTuple.getFieldLength(FEED_ACTIVITY_PAYLOAD_TUPLE_FIELD_INDEX);
+ ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
+ DataInput in = new DataInputStream(stream);
+ ARecord feedActivityRecord = (ARecord) recordSerDes.deserialize(in);
+ return createFeedActivityFromARecord(feedActivityRecord);
+ }
+
+ private FeedActivity createFeedActivityFromARecord(ARecord feedActivityRecord) {
+
+ String dataverseName = ((AString) feedActivityRecord
+ .getValueByPos(MetadataRecordTypes.FEED_ACTIVITY_ARECORD_DATAVERSE_NAME_FIELD_INDEX)).getStringValue();
+ String feedName = ((AString) feedActivityRecord
+ .getValueByPos(MetadataRecordTypes.FEED_ACTIVITY_ARECORD_FEED_NAME_FIELD_INDEX)).getStringValue();
+ String datasetName = ((AString) feedActivityRecord
+ .getValueByPos(MetadataRecordTypes.FEED_ACTIVITY_ARECORD_DATASET_NAME_FIELD_INDEX)).getStringValue();
+ int activityId = ((AInt32) feedActivityRecord
+ .getValueByPos(MetadataRecordTypes.FEED_ACTIVITY_ARECORD_ACTIVITY_ID_FIELD_INDEX)).getIntegerValue();
+ String feedActivityType = ((AString) feedActivityRecord
+ .getValueByPos(MetadataRecordTypes.FEED_ACTIVITY_ARECORD_ACTIVITY_TYPE_FIELD_INDEX)).getStringValue();
+
+ IACursor cursor = ((AUnorderedList) feedActivityRecord
+ .getValueByPos(MetadataRecordTypes.FEED_ACTIVITY_ARECORD_DETAILS_FIELD_INDEX)).getCursor();
+ Map<String, String> activityDetails = new HashMap<String, String>();
+ String key;
+ String value;
+ while (cursor.next()) {
+ ARecord field = (ARecord) cursor.get();
+ key = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_NAME_FIELD_INDEX)).getStringValue();
+ value = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_VALUE_FIELD_INDEX)).getStringValue();
+ activityDetails.put(key, value);
+ }
+
+ String feedActivityTimestamp = ((AString) feedActivityRecord
+ .getValueByPos(MetadataRecordTypes.FEED_ACTIVITY_ARECORD_LAST_UPDATE_TIMESTAMP_FIELD_INDEX))
+ .getStringValue();
+
+ FeedActivity fa = new FeedActivity(dataverseName, feedName, datasetName,
+ FeedActivityType.valueOf(feedActivityType), activityDetails);
+ fa.setLastUpdatedTimestamp(feedActivityTimestamp);
+ fa.setActivityId(activityId);
+ return fa;
+ }
+
+ @Override
+ public ITupleReference getTupleFromMetadataEntity(FeedActivity feedActivity) throws IOException, MetadataException {
+ // write the key in the first three fields of the tuple
+ ArrayBackedValueStorage itemValue = new ArrayBackedValueStorage();
+
+ tupleBuilder.reset();
+ aString.setValue(feedActivity.getDataverseName());
+ stringSerde.serialize(aString, tupleBuilder.getDataOutput());
+ tupleBuilder.addFieldEndOffset();
+
+ aString.setValue(feedActivity.getFeedName());
+ stringSerde.serialize(aString, tupleBuilder.getDataOutput());
+ tupleBuilder.addFieldEndOffset();
+
+ aString.setValue(feedActivity.getDatasetName());
+ stringSerde.serialize(aString, tupleBuilder.getDataOutput());
+ tupleBuilder.addFieldEndOffset();
+
+ aInt32.setValue(feedActivity.getActivityId());
+ int32Serde.serialize(aInt32, tupleBuilder.getDataOutput());
+ tupleBuilder.addFieldEndOffset();
+ // write the pay-load in the 2nd field of the tuple
+
+ recordBuilder.reset(MetadataRecordTypes.FEED_ACTIVITY_RECORDTYPE);
+
+ // write field 0
+ fieldValue.reset();
+ aString.setValue(feedActivity.getDataverseName());
+ stringSerde.serialize(aString, fieldValue.getDataOutput());
+ recordBuilder.addField(MetadataRecordTypes.FEED_ACTIVITY_ARECORD_DATAVERSE_NAME_FIELD_INDEX, fieldValue);
+
+ // write field 1
+ fieldValue.reset();
+ aString.setValue(feedActivity.getFeedName());
+ stringSerde.serialize(aString, fieldValue.getDataOutput());
+ recordBuilder.addField(MetadataRecordTypes.FEED_ACTIVITY_ARECORD_FEED_NAME_FIELD_INDEX, fieldValue);
+
+ // write field 2
+ fieldValue.reset();
+ aString.setValue(feedActivity.getDatasetName());
+ stringSerde.serialize(aString, fieldValue.getDataOutput());
+ recordBuilder.addField(MetadataRecordTypes.FEED_ACTIVITY_ARECORD_DATASET_NAME_FIELD_INDEX, fieldValue);
+
+ // write field 3
+ fieldValue.reset();
+ aInt32.setValue(feedActivity.getActivityId());
+ int32Serde.serialize(aInt32, fieldValue.getDataOutput());
+ recordBuilder.addField(MetadataRecordTypes.FEED_ACTIVITY_ARECORD_ACTIVITY_ID_FIELD_INDEX, fieldValue);
+
+ // write field 4
+ fieldValue.reset();
+ aString.setValue(feedActivity.getFeedActivityType().name());
+ stringSerde.serialize(aString, fieldValue.getDataOutput());
+ recordBuilder.addField(MetadataRecordTypes.FEED_ACTIVITY_ARECORD_ACTIVITY_TYPE_FIELD_INDEX, fieldValue);
+
+ // write field 5
+ Map<String, String> properties = feedActivity.getFeedActivityDetails();
+ UnorderedListBuilder listBuilder = new UnorderedListBuilder();
+ listBuilder
+ .reset((AUnorderedListType) MetadataRecordTypes.FEED_ACTIVITY_RECORDTYPE.getFieldTypes()[MetadataRecordTypes.FEED_ACTIVITY_ARECORD_DETAILS_FIELD_INDEX]);
+ for (Map.Entry<String, String> property : properties.entrySet()) {
+ String name = property.getKey();
+ String value = property.getValue();
+ itemValue.reset();
+ writePropertyTypeRecord(name, value, itemValue.getDataOutput());
+ listBuilder.addItem(itemValue);
+ }
+ fieldValue.reset();
+ listBuilder.write(fieldValue.getDataOutput(), true);
+ recordBuilder.addField(MetadataRecordTypes.FEED_ACTIVITY_ARECORD_DETAILS_FIELD_INDEX, fieldValue);
+
+ // write field 6
+ fieldValue.reset();
+ aString.setValue(Calendar.getInstance().getTime().toString());
+ stringSerde.serialize(aString, fieldValue.getDataOutput());
+ recordBuilder.addField(MetadataRecordTypes.FEED_ACTIVITY_ARECORD_LAST_UPDATE_TIMESTAMP_FIELD_INDEX, fieldValue);
+
+ // write record
+ try {
+ recordBuilder.write(tupleBuilder.getDataOutput(), true);
+ } catch (AsterixException e) {
+ throw new MetadataException(e);
+ }
+ tupleBuilder.addFieldEndOffset();
+
+ tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
+ return tuple;
+ }
+
+ public void writePropertyTypeRecord(String name, String value, DataOutput out) throws HyracksDataException {
+ IARecordBuilder propertyRecordBuilder = new RecordBuilder();
+ ArrayBackedValueStorage fieldValue = new ArrayBackedValueStorage();
+ propertyRecordBuilder.reset(MetadataRecordTypes.FEED_ACTIVITY_DETAILS_RECORDTYPE);
+ AMutableString aString = new AMutableString("");
+ ISerializerDeserializer<AString> stringSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ASTRING);
+
+ // write field 0
+ fieldValue.reset();
+ aString.setValue(name);
+ stringSerde.serialize(aString, fieldValue.getDataOutput());
+ propertyRecordBuilder.addField(0, fieldValue);
+
+ // write field 1
+ fieldValue.reset();
+ aString.setValue(value);
+ stringSerde.serialize(aString, fieldValue.getDataOutput());
+ propertyRecordBuilder.addField(1, fieldValue);
+
+ try {
+ propertyRecordBuilder.write(out, true);
+ } catch (IOException | AsterixException e) {
+ throw new HyracksDataException(e);
+ }
+ }
+
+}
\ No newline at end of file
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/FeedPolicyTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/FeedPolicyTupleTranslator.java
new file mode 100644
index 0000000..8924ef7
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/FeedPolicyTupleTranslator.java
@@ -0,0 +1,200 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.metadata.entitytupletranslators;
+
+import java.io.ByteArrayInputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+import edu.uci.ics.asterix.builders.IARecordBuilder;
+import edu.uci.ics.asterix.builders.RecordBuilder;
+import edu.uci.ics.asterix.builders.UnorderedListBuilder;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.metadata.MetadataException;
+import edu.uci.ics.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
+import edu.uci.ics.asterix.metadata.bootstrap.MetadataRecordTypes;
+import edu.uci.ics.asterix.metadata.entities.FeedPolicy;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.AMutableInt32;
+import edu.uci.ics.asterix.om.base.AMutableString;
+import edu.uci.ics.asterix.om.base.ARecord;
+import edu.uci.ics.asterix.om.base.AString;
+import edu.uci.ics.asterix.om.base.AUnorderedList;
+import edu.uci.ics.asterix.om.base.IACursor;
+import edu.uci.ics.asterix.om.types.AUnorderedListType;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+
+/**
+ * Translates a Dataset metadata entity to an ITupleReference and vice versa.
+ */
+public class FeedPolicyTupleTranslator extends AbstractTupleTranslator<FeedPolicy> {
+ // Field indexes of serialized FeedPolicy in a tuple.
+ // Key field.
+ public static final int FEED_POLICY_DATAVERSE_NAME_FIELD_INDEX = 0;
+
+ public static final int FEED_POLICY_POLICY_NAME_FIELD_INDEX = 1;
+
+ // Payload field containing serialized feedPolicy.
+ public static final int FEED_POLICY_PAYLOAD_TUPLE_FIELD_INDEX = 2;
+
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ARecord> recordSerDes = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(MetadataRecordTypes.FEED_POLICY_RECORDTYPE);
+ private AMutableInt32 aInt32;
+ protected ISerializerDeserializer<AInt32> aInt32Serde;
+
+ @SuppressWarnings("unchecked")
+ public FeedPolicyTupleTranslator(boolean getTuple) {
+ super(getTuple, MetadataPrimaryIndexes.FEED_POLICY_DATASET.getFieldCount());
+ aInt32 = new AMutableInt32(-1);
+ aInt32Serde = AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT32);
+ }
+
+ @Override
+ public FeedPolicy getMetadataEntityFromTuple(ITupleReference frameTuple) throws IOException {
+ byte[] serRecord = frameTuple.getFieldData(FEED_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
+ int recordStartOffset = frameTuple.getFieldStart(FEED_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
+ int recordLength = frameTuple.getFieldLength(FEED_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
+ ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
+ DataInput in = new DataInputStream(stream);
+ ARecord feedPolicyRecord = (ARecord) recordSerDes.deserialize(in);
+ return createFeedPolicyFromARecord(feedPolicyRecord);
+ }
+
+ private FeedPolicy createFeedPolicyFromARecord(ARecord feedPolicyRecord) {
+ FeedPolicy feedPolicy = null;
+ String dataverseName = ((AString) feedPolicyRecord
+ .getValueByPos(MetadataRecordTypes.FEED_POLICY_ARECORD_DATAVERSE_NAME_FIELD_INDEX)).getStringValue();
+ String policyName = ((AString) feedPolicyRecord
+ .getValueByPos(MetadataRecordTypes.FEED_POLICY_ARECORD_POLICY_NAME_FIELD_INDEX)).getStringValue();
+
+ String description = ((AString) feedPolicyRecord
+ .getValueByPos(MetadataRecordTypes.FEED_POLICY_ARECORD_DESCRIPTION_FIELD_INDEX)).getStringValue();
+
+ IACursor cursor = ((AUnorderedList) feedPolicyRecord
+ .getValueByPos(MetadataRecordTypes.FEED_POLICY_ARECORD_PROPERTIES_FIELD_INDEX)).getCursor();
+ Map<String, String> policyParamters = new HashMap<String, String>();
+ String key;
+ String value;
+ while (cursor.next()) {
+ ARecord field = (ARecord) cursor.get();
+ key = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_NAME_FIELD_INDEX)).getStringValue();
+ value = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_VALUE_FIELD_INDEX)).getStringValue();
+ policyParamters.put(key, value);
+ }
+
+ feedPolicy = new FeedPolicy(dataverseName, policyName, description, policyParamters);
+ return feedPolicy;
+ }
+
+ @Override
+ public ITupleReference getTupleFromMetadataEntity(FeedPolicy feedPolicy) throws IOException, MetadataException {
+ // write the key in the first three fields of the tuple
+ ArrayBackedValueStorage itemValue = new ArrayBackedValueStorage();
+
+ tupleBuilder.reset();
+ aString.setValue(feedPolicy.getDataverseName());
+ stringSerde.serialize(aString, tupleBuilder.getDataOutput());
+ tupleBuilder.addFieldEndOffset();
+
+ aString.setValue(feedPolicy.getPolicyName());
+ stringSerde.serialize(aString, tupleBuilder.getDataOutput());
+ tupleBuilder.addFieldEndOffset();
+
+ recordBuilder.reset(MetadataRecordTypes.FEED_POLICY_RECORDTYPE);
+
+ // write field 0
+ fieldValue.reset();
+ aString.setValue(feedPolicy.getDataverseName());
+ stringSerde.serialize(aString, fieldValue.getDataOutput());
+ recordBuilder.addField(MetadataRecordTypes.FEED_POLICY_ARECORD_DATAVERSE_NAME_FIELD_INDEX, fieldValue);
+
+ // write field 1
+ fieldValue.reset();
+ aString.setValue(feedPolicy.getPolicyName());
+ stringSerde.serialize(aString, fieldValue.getDataOutput());
+ recordBuilder.addField(MetadataRecordTypes.FEED_POLICY_ARECORD_POLICY_NAME_FIELD_INDEX, fieldValue);
+
+ // write field 2
+ fieldValue.reset();
+ aString.setValue(feedPolicy.getDescription());
+ stringSerde.serialize(aString, fieldValue.getDataOutput());
+ recordBuilder.addField(MetadataRecordTypes.FEED_POLICY_ARECORD_POLICY_NAME_FIELD_INDEX, fieldValue);
+
+ // write field 3 (properties)
+ Map<String, String> properties = feedPolicy.getProperties();
+ UnorderedListBuilder listBuilder = new UnorderedListBuilder();
+ listBuilder
+ .reset((AUnorderedListType) MetadataRecordTypes.FEED_POLICY_RECORDTYPE.getFieldTypes()[MetadataRecordTypes.FEED_POLICY_ARECORD_PROPERTIES_FIELD_INDEX]);
+ for (Map.Entry<String, String> property : properties.entrySet()) {
+ String name = property.getKey();
+ String value = property.getValue();
+ itemValue.reset();
+ writePropertyTypeRecord(name, value, itemValue.getDataOutput());
+ listBuilder.addItem(itemValue);
+ }
+ fieldValue.reset();
+ listBuilder.write(fieldValue.getDataOutput(), true);
+ recordBuilder.addField(MetadataRecordTypes.FEED_POLICY_ARECORD_PROPERTIES_FIELD_INDEX, fieldValue);
+
+ // write record
+ try {
+ recordBuilder.write(tupleBuilder.getDataOutput(), true);
+ } catch (AsterixException e) {
+ throw new MetadataException(e);
+ }
+ tupleBuilder.addFieldEndOffset();
+
+ tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
+ return tuple;
+ }
+
+ public void writePropertyTypeRecord(String name, String value, DataOutput out) throws HyracksDataException {
+ IARecordBuilder propertyRecordBuilder = new RecordBuilder();
+ ArrayBackedValueStorage fieldValue = new ArrayBackedValueStorage();
+ propertyRecordBuilder.reset(MetadataRecordTypes.POLICY_PARAMS_RECORDTYPE);
+ AMutableString aString = new AMutableString("");
+ ISerializerDeserializer<AString> stringSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ASTRING);
+
+ // write field 0
+ fieldValue.reset();
+ aString.setValue(name);
+ stringSerde.serialize(aString, fieldValue.getDataOutput());
+ propertyRecordBuilder.addField(0, fieldValue);
+
+ // write field 1
+ fieldValue.reset();
+ aString.setValue(value);
+ stringSerde.serialize(aString, fieldValue.getDataOutput());
+ propertyRecordBuilder.addField(1, fieldValue);
+
+ try {
+ propertyRecordBuilder.write(out, true);
+ } catch (IOException | AsterixException e) {
+ throw new HyracksDataException(e);
+ }
+ }
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/FeedTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/FeedTupleTranslator.java
new file mode 100644
index 0000000..db4b112
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/FeedTupleTranslator.java
@@ -0,0 +1,235 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.metadata.entitytupletranslators;
+
+import java.io.ByteArrayInputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.Calendar;
+import java.util.HashMap;
+import java.util.Map;
+
+import edu.uci.ics.asterix.builders.IARecordBuilder;
+import edu.uci.ics.asterix.builders.RecordBuilder;
+import edu.uci.ics.asterix.builders.UnorderedListBuilder;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.functions.FunctionSignature;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.metadata.MetadataException;
+import edu.uci.ics.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
+import edu.uci.ics.asterix.metadata.bootstrap.MetadataRecordTypes;
+import edu.uci.ics.asterix.metadata.entities.Feed;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.AMutableInt32;
+import edu.uci.ics.asterix.om.base.AMutableString;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.ARecord;
+import edu.uci.ics.asterix.om.base.AString;
+import edu.uci.ics.asterix.om.base.AUnorderedList;
+import edu.uci.ics.asterix.om.base.IACursor;
+import edu.uci.ics.asterix.om.types.AUnorderedListType;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+
+/**
+ * Translates a Feed metadata entity to an ITupleReference and vice versa.
+ */
+public class FeedTupleTranslator extends AbstractTupleTranslator<Feed> {
+ // Field indexes of serialized Feed in a tuple.
+ // Key field.
+ public static final int FEED_DATAVERSE_NAME_FIELD_INDEX = 0;
+
+ public static final int FEED_NAME_FIELD_INDEX = 1;
+
+ // Payload field containing serialized feed.
+ public static final int FEED_PAYLOAD_TUPLE_FIELD_INDEX = 2;
+
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ARecord> recordSerDes = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(MetadataRecordTypes.FEED_RECORDTYPE);
+ private AMutableInt32 aInt32;
+ protected ISerializerDeserializer<AInt32> aInt32Serde;
+
+ @SuppressWarnings("unchecked")
+ public FeedTupleTranslator(boolean getTuple) {
+ super(getTuple, MetadataPrimaryIndexes.FEED_DATASET.getFieldCount());
+ aInt32 = new AMutableInt32(-1);
+ aInt32Serde = AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT32);
+ }
+
+ @Override
+ public Feed getMetadataEntityFromTuple(ITupleReference frameTuple) throws IOException {
+ byte[] serRecord = frameTuple.getFieldData(FEED_PAYLOAD_TUPLE_FIELD_INDEX);
+ int recordStartOffset = frameTuple.getFieldStart(FEED_PAYLOAD_TUPLE_FIELD_INDEX);
+ int recordLength = frameTuple.getFieldLength(FEED_PAYLOAD_TUPLE_FIELD_INDEX);
+ ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
+ DataInput in = new DataInputStream(stream);
+ ARecord feedRecord = (ARecord) recordSerDes.deserialize(in);
+ return createFeedFromARecord(feedRecord);
+ }
+
+ private Feed createFeedFromARecord(ARecord feedRecord) {
+ Feed feed = null;
+ String dataverseName = ((AString) feedRecord
+ .getValueByPos(MetadataRecordTypes.FEED_ARECORD_DATAVERSE_NAME_FIELD_INDEX)).getStringValue();
+ String feedName = ((AString) feedRecord.getValueByPos(MetadataRecordTypes.FEED_ARECORD_FEED_NAME_FIELD_INDEX))
+ .getStringValue();
+ String adaptorName = ((AString) feedRecord
+ .getValueByPos(MetadataRecordTypes.FEED_ARECORD_ADAPTOR_NAME_FIELD_INDEX)).getStringValue();
+
+ IACursor cursor = ((AUnorderedList) feedRecord
+ .getValueByPos(MetadataRecordTypes.FEED_ARECORD_ADAPTOR_CONFIGURATION_FIELD_INDEX)).getCursor();
+ String key;
+ String value;
+ Map<String, String> adaptorConfiguration = new HashMap<String, String>();
+ while (cursor.next()) {
+ ARecord field = (ARecord) cursor.get();
+ key = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_NAME_FIELD_INDEX)).getStringValue();
+ value = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_VALUE_FIELD_INDEX)).getStringValue();
+ adaptorConfiguration.put(key, value);
+ }
+
+ Object o = feedRecord.getValueByPos(MetadataRecordTypes.FEED_ARECORD_FUNCTION_FIELD_INDEX);
+ FunctionSignature signature = null;
+ if (!(o instanceof ANull)) {
+ String functionIdentifier = ((AString) o).getStringValue();
+ String[] qnameComponents = functionIdentifier.split("\\.");
+ String functionDataverse;
+ String functionName;
+ if (qnameComponents.length == 2) {
+ functionDataverse = qnameComponents[0];
+ functionName = qnameComponents[1];
+ } else {
+ functionDataverse = dataverseName;
+ functionName = qnameComponents[0];
+ }
+
+ String[] nameComponents = functionName.split("@");
+ signature = new FunctionSignature(functionDataverse, nameComponents[0], Integer.parseInt(nameComponents[1]));
+ }
+
+ feed = new Feed(dataverseName, feedName, adaptorName, adaptorConfiguration, signature);
+ return feed;
+ }
+
+ @Override
+ public ITupleReference getTupleFromMetadataEntity(Feed feed) throws IOException, MetadataException {
+ // write the key in the first three fields of the tuple
+ ArrayBackedValueStorage itemValue = new ArrayBackedValueStorage();
+
+ tupleBuilder.reset();
+ aString.setValue(feed.getDataverseName());
+ stringSerde.serialize(aString, tupleBuilder.getDataOutput());
+ tupleBuilder.addFieldEndOffset();
+
+ aString.setValue(feed.getFeedName());
+ stringSerde.serialize(aString, tupleBuilder.getDataOutput());
+ tupleBuilder.addFieldEndOffset();
+
+ recordBuilder.reset(MetadataRecordTypes.FEED_RECORDTYPE);
+
+ // write field 0
+ fieldValue.reset();
+ aString.setValue(feed.getDataverseName());
+ stringSerde.serialize(aString, fieldValue.getDataOutput());
+ recordBuilder.addField(MetadataRecordTypes.FEED_ARECORD_DATAVERSE_NAME_FIELD_INDEX, fieldValue);
+
+ // write field 1
+ fieldValue.reset();
+ aString.setValue(feed.getFeedName());
+ stringSerde.serialize(aString, fieldValue.getDataOutput());
+ recordBuilder.addField(MetadataRecordTypes.FEED_ARECORD_FEED_NAME_FIELD_INDEX, fieldValue);
+
+ // write field 2
+ fieldValue.reset();
+ aString.setValue(feed.getAdaptorName());
+ stringSerde.serialize(aString, fieldValue.getDataOutput());
+ recordBuilder.addField(MetadataRecordTypes.FEED_ARECORD_ADAPTOR_NAME_FIELD_INDEX, fieldValue);
+
+ // write field 3 (adaptorConfiguration)
+ Map<String, String> adaptorConfiguration = feed.getAdaptorConfiguration();
+ UnorderedListBuilder listBuilder = new UnorderedListBuilder();
+ listBuilder
+ .reset((AUnorderedListType) MetadataRecordTypes.FEED_RECORDTYPE.getFieldTypes()[MetadataRecordTypes.FEED_ARECORD_ADAPTOR_CONFIGURATION_FIELD_INDEX]);
+ for (Map.Entry<String, String> property : adaptorConfiguration.entrySet()) {
+ String name = property.getKey();
+ String value = property.getValue();
+ itemValue.reset();
+ writePropertyTypeRecord(name, value, itemValue.getDataOutput());
+ listBuilder.addItem(itemValue);
+ }
+ fieldValue.reset();
+ listBuilder.write(fieldValue.getDataOutput(), true);
+ recordBuilder.addField(MetadataRecordTypes.FEED_ARECORD_ADAPTOR_CONFIGURATION_FIELD_INDEX, fieldValue);
+
+ // write field 4
+ fieldValue.reset();
+ if (feed.getAppliedFunction() != null) {
+ aString.setValue(feed.getAppliedFunction().toString());
+ stringSerde.serialize(aString, fieldValue.getDataOutput());
+ recordBuilder.addField(MetadataRecordTypes.FEED_ARECORD_FUNCTION_FIELD_INDEX, fieldValue);
+ }
+
+ // write field 5
+ fieldValue.reset();
+ aString.setValue(Calendar.getInstance().getTime().toString());
+ stringSerde.serialize(aString, fieldValue.getDataOutput());
+ recordBuilder.addField(MetadataRecordTypes.FEED_ARECORD_TIMESTAMP_FIELD_INDEX, fieldValue);
+
+ // write record
+ try {
+ recordBuilder.write(tupleBuilder.getDataOutput(), true);
+ } catch (AsterixException e) {
+ throw new MetadataException(e);
+ }
+ tupleBuilder.addFieldEndOffset();
+
+ tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
+ return tuple;
+ }
+
+ public void writePropertyTypeRecord(String name, String value, DataOutput out) throws HyracksDataException {
+ IARecordBuilder propertyRecordBuilder = new RecordBuilder();
+ ArrayBackedValueStorage fieldValue = new ArrayBackedValueStorage();
+ propertyRecordBuilder.reset(MetadataRecordTypes.FEED_ADAPTOR_CONFIGURATION_RECORDTYPE);
+ AMutableString aString = new AMutableString("");
+ ISerializerDeserializer<AString> stringSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ASTRING);
+
+ // write field 0
+ fieldValue.reset();
+ aString.setValue(name);
+ stringSerde.serialize(aString, fieldValue.getDataOutput());
+ propertyRecordBuilder.addField(0, fieldValue);
+
+ // write field 1
+ fieldValue.reset();
+ aString.setValue(value);
+ stringSerde.serialize(aString, fieldValue.getDataOutput());
+ propertyRecordBuilder.addField(1, fieldValue);
+
+ try {
+ propertyRecordBuilder.write(out, true);
+ } catch (IOException | AsterixException e) {
+ throw new HyracksDataException(e);
+ }
+ }
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java
index c34bc72..ca867fd 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java
@@ -62,7 +62,7 @@
}
@Override
- public Function getMetadataEntytiFromTuple(ITupleReference frameTuple) throws IOException {
+ public Function getMetadataEntityFromTuple(ITupleReference frameTuple) throws IOException {
byte[] serRecord = frameTuple.getFieldData(FUNCTION_PAYLOAD_TUPLE_FIELD_INDEX);
int recordStartOffset = frameTuple.getFieldStart(FUNCTION_PAYLOAD_TUPLE_FIELD_INDEX);
int recordLength = frameTuple.getFieldLength(FUNCTION_PAYLOAD_TUPLE_FIELD_INDEX);
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java
index e6f0462..884af5a 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java
@@ -75,7 +75,7 @@
}
@Override
- public Index getMetadataEntytiFromTuple(ITupleReference frameTuple) throws IOException {
+ public Index getMetadataEntityFromTuple(ITupleReference frameTuple) throws IOException {
byte[] serRecord = frameTuple.getFieldData(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
int recordStartOffset = frameTuple.getFieldStart(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
int recordLength = frameTuple.getFieldLength(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/LibraryTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/LibraryTupleTranslator.java
new file mode 100644
index 0000000..950ce5f
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/LibraryTupleTranslator.java
@@ -0,0 +1,120 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.metadata.entitytupletranslators;
+
+import java.io.ByteArrayInputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.IOException;
+import java.util.Calendar;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.metadata.MetadataException;
+import edu.uci.ics.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
+import edu.uci.ics.asterix.metadata.bootstrap.MetadataRecordTypes;
+import edu.uci.ics.asterix.metadata.entities.Library;
+import edu.uci.ics.asterix.om.base.ARecord;
+import edu.uci.ics.asterix.om.base.AString;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+
+/**
+ * Translates a Library metadata entity to an ITupleReference and vice versa.
+ */
+public class LibraryTupleTranslator extends AbstractTupleTranslator<Library> {
+ // Field indexes of serialized Library in a tuple.
+ // First key field.
+ public static final int LIBRARY_DATAVERSENAME_TUPLE_FIELD_INDEX = 0;
+ // Second key field.
+ public static final int LIBRARY_NAME_TUPLE_FIELD_INDEX = 1;
+
+ // Payload field containing serialized Library.
+ public static final int LIBRARY_PAYLOAD_TUPLE_FIELD_INDEX = 2;
+
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ARecord> recordSerDes = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(MetadataRecordTypes.LIBRARY_RECORDTYPE);
+
+ public LibraryTupleTranslator(boolean getTuple) {
+ super(getTuple, MetadataPrimaryIndexes.LIBRARY_DATASET.getFieldCount());
+ }
+
+ @Override
+ public Library getMetadataEntityFromTuple(ITupleReference frameTuple) throws IOException {
+ byte[] serRecord = frameTuple.getFieldData(LIBRARY_PAYLOAD_TUPLE_FIELD_INDEX);
+ int recordStartOffset = frameTuple.getFieldStart(LIBRARY_PAYLOAD_TUPLE_FIELD_INDEX);
+ int recordLength = frameTuple.getFieldLength(LIBRARY_PAYLOAD_TUPLE_FIELD_INDEX);
+ ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
+ DataInput in = new DataInputStream(stream);
+ ARecord libraryRecord = (ARecord) recordSerDes.deserialize(in);
+ return createLibraryFromARecord(libraryRecord);
+ }
+
+ private Library createLibraryFromARecord(ARecord libraryRecord) {
+ String dataverseName = ((AString) libraryRecord
+ .getValueByPos(MetadataRecordTypes.LIBRARY_ARECORD_DATAVERSENAME_FIELD_INDEX)).getStringValue();
+ String libraryName = ((AString) libraryRecord
+ .getValueByPos(MetadataRecordTypes.LIBRARY_ARECORD_NAME_FIELD_INDEX)).getStringValue();
+
+ return new Library(dataverseName, libraryName);
+ }
+
+ @Override
+ public ITupleReference getTupleFromMetadataEntity(Library library) throws IOException, MetadataException {
+ // write the key in the first 2 fields of the tuple
+ tupleBuilder.reset();
+ aString.setValue(library.getDataverseName());
+ stringSerde.serialize(aString, tupleBuilder.getDataOutput());
+ tupleBuilder.addFieldEndOffset();
+ aString.setValue(library.getName());
+ stringSerde.serialize(aString, tupleBuilder.getDataOutput());
+ tupleBuilder.addFieldEndOffset();
+
+ // write the pay-load in the third field of the tuple
+
+ recordBuilder.reset(MetadataRecordTypes.LIBRARY_RECORDTYPE);
+
+ // write field 0
+ fieldValue.reset();
+ aString.setValue(library.getDataverseName());
+ stringSerde.serialize(aString, fieldValue.getDataOutput());
+ recordBuilder.addField(MetadataRecordTypes.LIBRARY_ARECORD_DATAVERSENAME_FIELD_INDEX, fieldValue);
+
+ // write field 1
+ fieldValue.reset();
+ aString.setValue(library.getName());
+ stringSerde.serialize(aString, fieldValue.getDataOutput());
+ recordBuilder.addField(MetadataRecordTypes.LIBRARY_ARECORD_NAME_FIELD_INDEX, fieldValue);
+
+ // write field 2
+ fieldValue.reset();
+ aString.setValue(Calendar.getInstance().getTime().toString());
+ stringSerde.serialize(aString, fieldValue.getDataOutput());
+ recordBuilder.addField(MetadataRecordTypes.LIBRARY_ARECORD_TIMESTAMP_FIELD_INDEX, fieldValue);
+
+ // write record
+ try {
+ recordBuilder.write(tupleBuilder.getDataOutput(), true);
+ } catch (AsterixException e) {
+ throw new MetadataException(e);
+ }
+ tupleBuilder.addFieldEndOffset();
+ tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
+ return tuple;
+ }
+
+}
\ No newline at end of file
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/NodeGroupTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/NodeGroupTupleTranslator.java
index ce72322..2fa8e8d 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/NodeGroupTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/NodeGroupTupleTranslator.java
@@ -62,7 +62,7 @@
}
@Override
- public NodeGroup getMetadataEntytiFromTuple(ITupleReference frameTuple) throws IOException {
+ public NodeGroup getMetadataEntityFromTuple(ITupleReference frameTuple) throws IOException {
byte[] serRecord = frameTuple.getFieldData(NODEGROUP_PAYLOAD_TUPLE_FIELD_INDEX);
int recordStartOffset = frameTuple.getFieldStart(NODEGROUP_PAYLOAD_TUPLE_FIELD_INDEX);
int recordLength = frameTuple.getFieldLength(NODEGROUP_PAYLOAD_TUPLE_FIELD_INDEX);
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/NodeTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/NodeTupleTranslator.java
index 9e276cc..5ddcc5b 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/NodeTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/NodeTupleTranslator.java
@@ -56,7 +56,7 @@
}
@Override
- public Node getMetadataEntytiFromTuple(ITupleReference frameTuple) throws IOException {
+ public Node getMetadataEntityFromTuple(ITupleReference frameTuple) throws IOException {
throw new NotImplementedException();
// TODO: Implement this.
// try {
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/AbstractDatasourceAdapter.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/AbstractDatasourceAdapter.java
new file mode 100644
index 0000000..9e8e5f7
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/AbstractDatasourceAdapter.java
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.feeds;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+
+/**
+ * Represents the base class that is required to be extended by every
+ * implementation of the IDatasourceAdapter interface.
+ */
+public abstract class AbstractDatasourceAdapter implements IDatasourceAdapter {
+
+ private static final long serialVersionUID = 1L;
+
+ protected Map<String, Object> configuration;
+ protected transient AlgebricksPartitionConstraint partitionConstraint;
+ protected IAType atype;
+ protected IHyracksTaskContext ctx;
+
+ protected static final Map<String, Object> formatToParserFactoryMap = initializeFormatParserFactoryMap();
+
+ public static final String KEY_FORMAT = "format";
+ public static final String KEY_PARSER_FACTORY = "parser";
+ public static final String FORMAT_DELIMITED_TEXT = "delimited-text";
+ public static final String FORMAT_ADM = "adm";
+
+ private static Map<String, Object> initializeFormatParserFactoryMap() {
+ Map<String, Object> map = new HashMap<String, Object>();
+ map.put(FORMAT_DELIMITED_TEXT, "edu.uci.ics.asterix.runtime.operators.file.NtDelimitedDataTupleParserFactory");
+ map.put(FORMAT_ADM, "edu.uci.ics.asterix.runtime.operators.file.AdmSchemafullRecordParserFactory");
+ return map;
+ }
+
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/AdapterIdentifier.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/AdapterIdentifier.java
new file mode 100644
index 0000000..897faae
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/AdapterIdentifier.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.feeds;
+
+import java.io.Serializable;
+
+/**
+ * A unique identifier for a datasource adapter.
+ */
+public class AdapterIdentifier implements Serializable {
+
+ private static final long serialVersionUID = 1L;
+
+ private final String namespace;
+ private final String adapterName;
+
+ public AdapterIdentifier(String namespace, String adapterName) {
+ this.namespace = namespace;
+ this.adapterName = adapterName;
+ }
+
+ public String getNamespace() {
+ return namespace;
+ }
+
+ public String getAdapterName() {
+ return adapterName;
+ }
+
+ @Override
+ public int hashCode() {
+ return (namespace + "@" + adapterName).hashCode();
+
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (!(o instanceof AdapterIdentifier)) {
+ return false;
+ }
+ return namespace.equals(((AdapterIdentifier) o).getNamespace())
+ && namespace.equals(((AdapterIdentifier) o).getNamespace());
+ }
+}
\ No newline at end of file
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/AdapterRuntimeManager.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/AdapterRuntimeManager.java
new file mode 100644
index 0000000..b9a5e73
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/AdapterRuntimeManager.java
@@ -0,0 +1,205 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.feeds;
+
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+import edu.uci.ics.asterix.common.feeds.FeedRuntime.FeedRuntimeType;
+import edu.uci.ics.asterix.common.feeds.IFeedManager;
+import edu.uci.ics.asterix.metadata.feeds.FeedFrameWriter.Mode;
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public class AdapterRuntimeManager implements IAdapterExecutor {
+
+ private static final Logger LOGGER = Logger.getLogger(AdapterRuntimeManager.class.getName());
+
+ private final FeedConnectionId feedId;
+
+ private IFeedAdapter feedAdapter;
+
+ private AdapterExecutor adapterExecutor;
+
+ private State state;
+
+ private int partition;
+
+ private IngestionRuntime ingestionRuntime;
+
+ private final IFeedManager feedManager;
+
+ public enum State {
+ /*
+ * Indicates that data from external source will be pushed downstream for storage
+ */
+ ACTIVE_INGESTION,
+ /*
+ * Indicates that data from external source would be buffered and not pushed downstream
+ */
+ INACTIVE_INGESTION,
+ /*
+ * Indicates that feed ingestion activity has finished
+ */
+ FINISHED_INGESTION
+ }
+
+ public AdapterRuntimeManager(FeedConnectionId feedId, IFeedAdapter feedAdapter, FeedFrameWriter writer,
+ int partition, LinkedBlockingQueue<IFeedMessage> inbox, IFeedManager feedManager) {
+ this.feedId = feedId;
+ this.feedAdapter = feedAdapter;
+ this.partition = partition;
+ this.feedManager = feedManager;
+ this.adapterExecutor = new AdapterExecutor(partition, writer, feedAdapter, this);
+ }
+
+ @Override
+ public void start() throws Exception {
+ state = State.ACTIVE_INGESTION;
+ ingestionRuntime = new IngestionRuntime(feedId, partition, FeedRuntimeType.INGESTION, this);
+ feedManager.registerFeedRuntime(ingestionRuntime);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Registered feed runtime manager for " + this.getFeedId());
+ }
+ ExecutorService executorService = feedManager.getFeedExecutorService(feedId);
+ executorService.execute(adapterExecutor);
+ }
+
+ @Override
+ public void stop() {
+ try {
+ feedAdapter.stop();
+ state = State.FINISHED_INGESTION;
+ synchronized (this) {
+ notifyAll();
+ }
+ } catch (Exception exception) {
+ if (LOGGER.isLoggable(Level.SEVERE)) {
+ LOGGER.severe("Unable to stop adapter " + feedAdapter + ", encountered exception " + exception);
+ }
+ }
+ }
+
+ @Override
+ public FeedConnectionId getFeedId() {
+ return feedId;
+ }
+
+ @Override
+ public String toString() {
+ return feedId + "[" + partition + "]";
+ }
+
+ public IFeedAdapter getFeedAdapter() {
+ return feedAdapter;
+ }
+
+ public void setFeedAdapter(IFeedAdapter feedAdapter) {
+ this.feedAdapter = feedAdapter;
+ }
+
+ public static class AdapterExecutor implements Runnable {
+
+ private FeedFrameWriter writer;
+
+ private IFeedAdapter adapter;
+
+ private AdapterRuntimeManager runtimeManager;
+
+ public AdapterExecutor(int partition, FeedFrameWriter writer, IFeedAdapter adapter,
+ AdapterRuntimeManager adapterRuntimeMgr) {
+ this.writer = writer;
+ this.adapter = adapter;
+ this.runtimeManager = adapterRuntimeMgr;
+ }
+
+ @Override
+ public void run() {
+ try {
+ int partition = runtimeManager.getPartition();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Starting ingestion for partition:" + partition);
+ }
+ adapter.start(partition, writer);
+ runtimeManager.setState(State.FINISHED_INGESTION);
+ } catch (Exception e) {
+ e.printStackTrace();
+ if (LOGGER.isLoggable(Level.SEVERE)) {
+ LOGGER.severe("Exception during feed ingestion " + e.getMessage());
+ }
+ } finally {
+ synchronized (runtimeManager) {
+ runtimeManager.notifyAll();
+ }
+ }
+ }
+
+ public FeedFrameWriter getWriter() {
+ return writer;
+ }
+
+ public void setWriter(IFrameWriter writer) {
+ if (this.writer != null) {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Switching writer to:" + writer + " from " + this.writer);
+ }
+ this.writer.setWriter(writer);
+ }
+ }
+
+ }
+
+ public synchronized State getState() {
+ return state;
+ }
+
+ @SuppressWarnings("incomplete-switch")
+ public synchronized void setState(State state) throws HyracksDataException {
+ if (this.state.equals(state)) {
+ return;
+ }
+ switch (state) {
+ case INACTIVE_INGESTION:
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Set " + Mode.STORE + " mode");
+ }
+ adapterExecutor.getWriter().setMode(Mode.STORE);
+ break;
+ case ACTIVE_INGESTION:
+ adapterExecutor.getWriter().setMode(Mode.FORWARD);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Set " + Mode.FORWARD + " mode");
+ }
+ break;
+ }
+ this.state = state;
+ }
+
+ public AdapterExecutor getAdapterExecutor() {
+ return adapterExecutor;
+ }
+
+ public int getPartition() {
+ return partition;
+ }
+
+ public IngestionRuntime getIngestionRuntime() {
+ return ingestionRuntime;
+ }
+
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/BuiltinFeedPolicies.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/BuiltinFeedPolicies.java
new file mode 100644
index 0000000..3bd73a3
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/BuiltinFeedPolicies.java
@@ -0,0 +1,126 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.feeds;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import edu.uci.ics.asterix.metadata.bootstrap.MetadataConstants;
+import edu.uci.ics.asterix.metadata.entities.FeedPolicy;
+
+public class BuiltinFeedPolicies {
+
+ public static final FeedPolicy BRITTLE = initializeBrittlePolicy();
+
+ public static final FeedPolicy BASIC = initializeBasicPolicy();
+
+ public static final FeedPolicy BASIC_MONITORED = initializeBasicMonitoredPolicy();
+
+ public static final FeedPolicy FAULT_TOLERANT_BASIC_MONITORED = initializeFaultTolerantBasicMonitoredPolicy();
+
+ public static final FeedPolicy ELASTIC = initializeFaultTolerantBasicMonitoredElasticPolicy();
+
+ public static final FeedPolicy[] policies = new FeedPolicy[] { BRITTLE, BASIC, BASIC_MONITORED,
+ FAULT_TOLERANT_BASIC_MONITORED, ELASTIC };
+
+ public static final FeedPolicy DEFAULT_POLICY = BASIC;
+
+ public static final String CONFIG_FEED_POLICY_KEY = "policy";
+
+ public static FeedPolicy getFeedPolicy(String policyName) {
+ for (FeedPolicy policy : policies) {
+ if (policy.getPolicyName().equalsIgnoreCase(policyName)) {
+ return policy;
+ }
+ }
+ return null;
+ }
+
+ // BMFE
+ private static FeedPolicy initializeFaultTolerantBasicMonitoredElasticPolicy() {
+ Map<String, String> policyParams = new HashMap<String, String>();
+ policyParams.put(FeedPolicyAccessor.FAILURE_LOG_ERROR, "true");
+ policyParams.put(FeedPolicyAccessor.APPLICATION_FAILURE_CONTINUE, "true");
+ policyParams.put(FeedPolicyAccessor.APPLICATION_FAILURE_LOG_DATA, "true");
+ policyParams.put(FeedPolicyAccessor.HARDWARE_FAILURE_CONTINUE, "true");
+ policyParams.put(FeedPolicyAccessor.CLUSTER_REBOOT_AUTO_RESTART, "true");
+ policyParams.put(FeedPolicyAccessor.COLLECT_STATISTICS, "true");
+ policyParams.put(FeedPolicyAccessor.COLLECT_STATISTICS_PERIOD, "60");
+ policyParams.put(FeedPolicyAccessor.COLLECT_STATISTICS_PERIOD_UNIT, FeedPolicyAccessor.TimeUnit.SEC.name());
+ policyParams.put(FeedPolicyAccessor.ELASTIC, "true");
+ String description = "Basic Monitored Fault-Tolerant Elastic";
+ return new FeedPolicy(MetadataConstants.METADATA_DATAVERSE_NAME, "BMFE", description, policyParams);
+ }
+
+ //BMF
+ private static FeedPolicy initializeFaultTolerantBasicMonitoredPolicy() {
+ Map<String, String> policyParams = new HashMap<String, String>();
+ policyParams.put(FeedPolicyAccessor.FAILURE_LOG_ERROR, "true");
+ policyParams.put(FeedPolicyAccessor.APPLICATION_FAILURE_CONTINUE, "true");
+ policyParams.put(FeedPolicyAccessor.APPLICATION_FAILURE_LOG_DATA, "true");
+ policyParams.put(FeedPolicyAccessor.HARDWARE_FAILURE_CONTINUE, "true");
+ policyParams.put(FeedPolicyAccessor.CLUSTER_REBOOT_AUTO_RESTART, "true");
+ policyParams.put(FeedPolicyAccessor.COLLECT_STATISTICS, "true");
+ policyParams.put(FeedPolicyAccessor.COLLECT_STATISTICS_PERIOD, "60");
+ policyParams.put(FeedPolicyAccessor.COLLECT_STATISTICS_PERIOD_UNIT, FeedPolicyAccessor.TimeUnit.SEC.name());
+ policyParams.put(FeedPolicyAccessor.ELASTIC, "false");
+ String description = "Basic Monitored Fault-Tolerant";
+ return new FeedPolicy(MetadataConstants.METADATA_DATAVERSE_NAME, "BMF", description, policyParams);
+ }
+
+ //BM
+ private static FeedPolicy initializeBasicMonitoredPolicy() {
+ Map<String, String> policyParams = new HashMap<String, String>();
+ policyParams.put(FeedPolicyAccessor.FAILURE_LOG_ERROR, "false");
+ policyParams.put(FeedPolicyAccessor.APPLICATION_FAILURE_CONTINUE, "true");
+ policyParams.put(FeedPolicyAccessor.APPLICATION_FAILURE_LOG_DATA, "true");
+ policyParams.put(FeedPolicyAccessor.HARDWARE_FAILURE_CONTINUE, "false");
+ policyParams.put(FeedPolicyAccessor.CLUSTER_REBOOT_AUTO_RESTART, "true");
+ policyParams.put(FeedPolicyAccessor.COLLECT_STATISTICS, "true");
+ policyParams.put(FeedPolicyAccessor.COLLECT_STATISTICS_PERIOD, "60");
+ policyParams.put(FeedPolicyAccessor.COLLECT_STATISTICS_PERIOD_UNIT, FeedPolicyAccessor.TimeUnit.SEC.name());
+ policyParams.put(FeedPolicyAccessor.ELASTIC, "false");
+ String description = "Basic Monitored Fault-Tolerant";
+ return new FeedPolicy(MetadataConstants.METADATA_DATAVERSE_NAME, "BM", description, policyParams);
+ }
+
+ //B
+ private static FeedPolicy initializeBasicPolicy() {
+ Map<String, String> policyParams = new HashMap<String, String>();
+ policyParams.put(FeedPolicyAccessor.FAILURE_LOG_ERROR, "true");
+ policyParams.put(FeedPolicyAccessor.APPLICATION_FAILURE_CONTINUE, "true");
+ policyParams.put(FeedPolicyAccessor.APPLICATION_FAILURE_LOG_DATA, "false");
+ policyParams.put(FeedPolicyAccessor.CLUSTER_REBOOT_AUTO_RESTART, "true");
+ policyParams.put(FeedPolicyAccessor.COLLECT_STATISTICS, "false");
+ policyParams.put(FeedPolicyAccessor.ELASTIC, "false");
+ String description = "Basic";
+ return new FeedPolicy(MetadataConstants.METADATA_DATAVERSE_NAME, "B", description, policyParams);
+ }
+
+ //Br
+ private static FeedPolicy initializeBrittlePolicy() {
+ Map<String, String> policyParams = new HashMap<String, String>();
+ policyParams.put(FeedPolicyAccessor.FAILURE_LOG_ERROR, "false");
+ policyParams.put(FeedPolicyAccessor.APPLICATION_FAILURE_CONTINUE, "false");
+ policyParams.put(FeedPolicyAccessor.APPLICATION_FAILURE_LOG_DATA, "false");
+ policyParams.put(FeedPolicyAccessor.HARDWARE_FAILURE_CONTINUE, "false");
+ policyParams.put(FeedPolicyAccessor.CLUSTER_REBOOT_AUTO_RESTART, "false");
+ policyParams.put(FeedPolicyAccessor.COLLECT_STATISTICS, "false");
+ policyParams.put(FeedPolicyAccessor.ELASTIC, "false");
+ String description = "Brittle";
+ return new FeedPolicy(MetadataConstants.METADATA_DATAVERSE_NAME, "Br", description, policyParams);
+ }
+
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/ConditionalPushTupleParserFactory.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/ConditionalPushTupleParserFactory.java
new file mode 100644
index 0000000..62893e2
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/ConditionalPushTupleParserFactory.java
@@ -0,0 +1,215 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.feeds;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Map;
+import java.util.Timer;
+import java.util.TimerTask;
+import java.util.logging.Level;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.runtime.operators.file.ADMDataParser;
+import edu.uci.ics.asterix.runtime.operators.file.AbstractTupleParser;
+import edu.uci.ics.asterix.runtime.operators.file.DelimitedDataParser;
+import edu.uci.ics.asterix.runtime.operators.file.IDataParser;
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.file.ITupleParser;
+import edu.uci.ics.hyracks.dataflow.std.file.ITupleParserFactory;
+
+public class ConditionalPushTupleParserFactory implements ITupleParserFactory {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ITupleParser createTupleParser(IHyracksTaskContext ctx) throws HyracksDataException {
+ IDataParser dataParser = null;
+ switch (parserType) {
+ case ADM:
+ dataParser = new ADMDataParser();
+ break;
+ case DELIMITED_DATA:
+ dataParser = new DelimitedDataParser(recordType, valueParserFactories, delimiter);
+ break;
+ }
+ return new ConditionalPushTupleParser(ctx, recordType, dataParser, configuration);
+ }
+
+ private final ARecordType recordType;
+ private final Map<String, String> configuration;
+ private IValueParserFactory[] valueParserFactories;
+ private char delimiter;
+ private final ParserType parserType;
+
+ public enum ParserType {
+ ADM,
+ DELIMITED_DATA
+ }
+
+ public ConditionalPushTupleParserFactory(ARecordType recordType, IValueParserFactory[] valueParserFactories,
+ char fieldDelimiter, Map<String, String> configuration) {
+ this.recordType = recordType;
+ this.valueParserFactories = valueParserFactories;
+ this.delimiter = fieldDelimiter;
+ this.configuration = configuration;
+ this.parserType = ParserType.DELIMITED_DATA;
+
+ }
+
+ public ConditionalPushTupleParserFactory(ARecordType recordType, Map<String, String> configuration) {
+ this.recordType = recordType;
+ this.configuration = configuration;
+ this.parserType = ParserType.ADM;
+ }
+
+}
+
+class ConditionalPushTupleParser extends AbstractTupleParser {
+
+ private final IDataParser dataParser;
+ private int batchSize;
+ private long batchInterval;
+ private boolean continueIngestion = true;
+ private int tuplesInFrame = 0;
+ private TimeBasedFlushTask flushTask;
+ private Timer timer = new Timer();
+ private Object lock = new Object();
+ private boolean activeTimer = false;
+
+ public static final String BATCH_SIZE = "batch-size";
+ public static final String BATCH_INTERVAL = "batch-interval";
+
+ public ConditionalPushTupleParser(IHyracksTaskContext ctx, ARecordType recType, IDataParser dataParser,
+ Map<String, String> configuration) throws HyracksDataException {
+ super(ctx, recType);
+ this.dataParser = dataParser;
+ String propValue = (String) configuration.get(BATCH_SIZE);
+ batchSize = propValue != null ? Integer.parseInt(propValue) : Integer.MAX_VALUE;
+ propValue = (String) configuration.get(BATCH_INTERVAL);
+ batchInterval = propValue != null ? Long.parseLong(propValue) : -1;
+ activeTimer = batchInterval > 0;
+ }
+
+ public void stop() {
+ continueIngestion = false;
+ }
+
+ @Override
+ public IDataParser getDataParser() {
+ return dataParser;
+ }
+
+ @Override
+ public void parse(InputStream in, IFrameWriter writer) throws HyracksDataException {
+ flushTask = new TimeBasedFlushTask(writer, lock);
+ appender.reset(frame, true);
+ IDataParser parser = getDataParser();
+ try {
+ parser.initialize(in, recType, true);
+ if (activeTimer) {
+ timer.schedule(flushTask, 0, batchInterval);
+ }
+ while (continueIngestion) {
+ tb.reset();
+ if (!parser.parse(tb.getDataOutput())) {
+ break;
+ }
+ tb.addFieldEndOffset();
+ addTuple(writer);
+ }
+ if (appender.getTupleCount() > 0) {
+ if (activeTimer) {
+ synchronized (lock) {
+ FrameUtils.flushFrame(frame, writer);
+ }
+ } else {
+ FrameUtils.flushFrame(frame, writer);
+ }
+ }
+ } catch (AsterixException ae) {
+ throw new HyracksDataException(ae);
+ } catch (IOException ioe) {
+ throw new HyracksDataException(ioe);
+ } finally {
+ if (activeTimer) {
+ timer.cancel();
+ }
+ }
+ }
+
+ protected void addTuple(IFrameWriter writer) throws HyracksDataException {
+ if (activeTimer) {
+ synchronized (lock) {
+ addTupleToFrame(writer);
+ }
+ } else {
+ addTupleToFrame(writer);
+ }
+ }
+
+ protected void addTupleToFrame(IFrameWriter writer) throws HyracksDataException {
+ if (tuplesInFrame == batchSize || !appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+ FrameUtils.flushFrame(frame, writer);
+ appender.reset(frame, true);
+ if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+ throw new IllegalStateException();
+ }
+ if (tuplesInFrame == batchSize) {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Batch size exceeded! flushing frame " + "(" + tuplesInFrame + ")");
+ }
+ }
+ tuplesInFrame = 0;
+ }
+ tuplesInFrame++;
+ }
+
+ private class TimeBasedFlushTask extends TimerTask {
+
+ private IFrameWriter writer;
+ private final Object lock;
+
+ public TimeBasedFlushTask(IFrameWriter writer, Object lock) {
+ this.writer = writer;
+ this.lock = lock;
+ }
+
+ @Override
+ public void run() {
+ try {
+ if (tuplesInFrame > 0) {
+ synchronized (lock) {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("TTL expired flushing frame (" + tuplesInFrame + ")");
+ }
+ FrameUtils.flushFrame(frame, writer);
+ appender.reset(frame, true);
+ tuplesInFrame = 0;
+ }
+ }
+ } catch (HyracksDataException e) {
+ e.printStackTrace();
+ }
+ }
+
+ }
+
+}
\ No newline at end of file
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/EndFeedMessage.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/EndFeedMessage.java
new file mode 100644
index 0000000..2d56658
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/EndFeedMessage.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.feeds;
+
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+
+/**
+ * A feed control message indicating the need to end the feed. This message is dispatched
+ * to all locations that host an operator invovled in the feed pipeline.
+ */
+public class EndFeedMessage extends FeedMessage {
+
+ private static final long serialVersionUID = 1L;
+
+ private final FeedConnectionId feedId;
+
+ public EndFeedMessage(FeedConnectionId feedId) {
+ super(MessageType.END, feedId);
+ this.feedId = feedId;
+ }
+
+ @Override
+ public String toString() {
+ return MessageType.END.name() + feedId;
+ }
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/ExternalDataScanOperatorDescriptor.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/ExternalDataScanOperatorDescriptor.java
new file mode 100644
index 0000000..48c9a2c
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/ExternalDataScanOperatorDescriptor.java
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.feeds;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
+
+/*
+ * A single activity operator that provides the functionality of scanning data using an
+ * instance of the configured adapter.
+ */
+public class ExternalDataScanOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+
+ private static final long serialVersionUID = 1L;
+
+ private IAdapterFactory adapterFactory;
+
+ public ExternalDataScanOperatorDescriptor(JobSpecification spec, RecordDescriptor rDesc,
+ IAdapterFactory dataSourceAdapterFactory) {
+ super(spec, 0, 1);
+ recordDescriptors[0] = rDesc;
+ this.adapterFactory = dataSourceAdapterFactory;
+ }
+
+ @Override
+ public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+ IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions)
+ throws HyracksDataException {
+
+ return new AbstractUnaryOutputSourceOperatorNodePushable() {
+ @Override
+ public void initialize() throws HyracksDataException {
+ writer.open();
+ IDatasourceAdapter adapter = null;
+ try {
+ adapter = ((IAdapterFactory) adapterFactory).createAdapter(ctx, partition);
+ adapter.start(partition, writer);
+ } catch (Exception e) {
+ throw new HyracksDataException("exception during reading from external data source", e);
+ } finally {
+ writer.close();
+ }
+ }
+ };
+ }
+
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedActivityIdFactory.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedActivityIdFactory.java
new file mode 100644
index 0000000..2109425
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedActivityIdFactory.java
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.feeds;
+
+import java.util.concurrent.atomic.AtomicInteger;
+
+public class FeedActivityIdFactory {
+ private static AtomicInteger id = new AtomicInteger();
+ private static boolean isInitialized = false;
+
+ public static boolean isInitialized() {
+ return isInitialized;
+ }
+
+ public static void initialize(int initialId) {
+ id.set(initialId);
+ isInitialized = true;
+ }
+
+ public static int generateFeedActivityId() {
+ return id.incrementAndGet();
+ }
+
+}
\ No newline at end of file
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedFrameWriter.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedFrameWriter.java
new file mode 100644
index 0000000..899da77
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedFrameWriter.java
@@ -0,0 +1,385 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.feeds;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Timer;
+import java.util.TimerTask;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+import edu.uci.ics.asterix.common.feeds.FeedMessageService;
+import edu.uci.ics.asterix.common.feeds.FeedRuntime.FeedRuntimeType;
+import edu.uci.ics.asterix.common.feeds.IFeedManager;
+import edu.uci.ics.asterix.common.feeds.SuperFeedManager;
+import edu.uci.ics.asterix.common.feeds.SuperFeedManager.FeedReportMessageType;
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+
+/**
+ * A wrapper around the standard frame writer provided to an operator node pushable.
+ * The wrapper monitors the flow of data from this operator to a downstream operator
+ * over a connector. It collects statistics if required by the feed ingestion policy
+ * and reports them to the Super Feed Manager chosen for the feed. In addition any
+ * congestion experienced by the operator is also reported.
+ */
+public class FeedFrameWriter implements IFrameWriter {
+
+ private static final Logger LOGGER = Logger.getLogger(FeedFrameWriter.class.getName());
+
+ /** The threshold for the time required in pushing a frame to the network. **/
+ public static final long FLUSH_THRESHOLD_TIME = 5000; // 5 seconds
+
+ /** Actual frame writer provided to an operator. **/
+ private IFrameWriter writer;
+
+ /** The node pushable associated with the operator **/
+ private IOperatorNodePushable nodePushable;
+
+ /** set to true if health need to be monitored **/
+ private final boolean reportHealth;
+
+ /** A buffer for keeping frames that are waiting to be processed **/
+ private List<ByteBuffer> frames = new ArrayList<ByteBuffer>();
+
+ /**
+ * Mode associated with the frame writer
+ * Possible values: FORWARD, STORE
+ *
+ * @see Mode
+ */
+ private Mode mode;
+
+ /**
+ * Detects if the operator is unable to push a frame downstream
+ * within a threshold period of time. In addition, it measure the
+ * throughput as observed on the output channel of the associated operator.
+ */
+ private HealthMonitor healthMonitor;
+
+ /**
+ * A Timer instance for managing scheduling of tasks.
+ */
+ private Timer timer;
+
+ /**
+ * Provides access to the tuples in a frame. Used in collecting statistics
+ */
+ private FrameTupleAccessor fta;
+
+ public enum Mode {
+ /**
+ * **
+ * Normal mode of operation for an operator when
+ * frames are pushed to the downstream operator.
+ */
+ FORWARD,
+
+ /**
+ * Failure mode of operation for an operator when
+ * input frames are not pushed to the downstream operator but
+ * are buffered for future retrieval. This mode is adopted
+ * during failure recovery.
+ */
+ STORE
+ }
+
+ public FeedFrameWriter(IFrameWriter writer, IOperatorNodePushable nodePushable, FeedConnectionId feedId,
+ FeedPolicyEnforcer policyEnforcer, String nodeId, FeedRuntimeType feedRuntimeType, int partition,
+ FrameTupleAccessor fta, IFeedManager feedManager) {
+ this.writer = writer;
+ this.mode = Mode.FORWARD;
+ this.nodePushable = nodePushable;
+ this.reportHealth = policyEnforcer.getFeedPolicyAccessor().collectStatistics();
+ if (reportHealth) {
+ timer = new Timer();
+ healthMonitor = new HealthMonitor(feedId, nodeId, feedRuntimeType, partition, timer, fta, feedManager);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Statistics collection enabled for the feed " + feedId + " " + feedRuntimeType + " ["
+ + partition + "]");
+ }
+ timer.scheduleAtFixedRate(healthMonitor, 0, FLUSH_THRESHOLD_TIME);
+ } else {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Statistics collection *not* enabled for the feed " + feedId + " " + feedRuntimeType + " ["
+ + partition + "]");
+ }
+ }
+ this.fta = fta;
+ }
+
+ public Mode getMode() {
+ return mode;
+ }
+
+ public void setMode(Mode newMode) throws HyracksDataException {
+ if (this.mode.equals(newMode)) {
+ return;
+ }
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Switching to :" + newMode + " from " + this.mode);
+ }
+ this.mode = newMode;
+ }
+
+ @Override
+ public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+ switch (mode) {
+ case FORWARD:
+ try {
+ if (reportHealth) {
+ fta.reset(buffer);
+ healthMonitor.notifyStartFrameFlushActivity();
+ writer.nextFrame(buffer);
+ healthMonitor.notifyFinishFrameFlushActivity();
+ } else {
+ writer.nextFrame(buffer);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ if (LOGGER.isLoggable(Level.SEVERE)) {
+ LOGGER.severe("Unable to write frame " + " on behalf of " + nodePushable.getDisplayName()
+ + ":\n" + e);
+ }
+ }
+ if (frames.size() > 0) {
+ for (ByteBuffer buf : frames) {
+ writer.nextFrame(buf);
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Flushed old frame (from previous failed execution) : " + buf
+ + " on behalf of " + nodePushable.getDisplayName());
+ }
+ }
+ frames.clear();
+ }
+ break;
+ case STORE:
+
+ /* TODO:
+ * Limit the in-memory space utilized during the STORE mode. The limit (expressed in bytes)
+ * is a parameter specified as part of the feed ingestion policy. Below is a basic implemenation
+ * that allocates a buffer on demand.
+ * */
+
+ ByteBuffer storageBuffer = ByteBuffer.allocate(buffer.capacity());
+ storageBuffer.put(buffer);
+ frames.add(storageBuffer);
+ storageBuffer.flip();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Stored frame for " + nodePushable.getDisplayName());
+ }
+ break;
+ }
+ }
+
+ /**
+ * Detects if the operator is unable to push a frame downstream
+ * within a threshold period of time. In addition, it measure the
+ * throughput as observed on the output channel of the associated operator.
+ */
+ private static class HealthMonitor extends TimerTask {
+
+ private static final String EOL = "\n";
+
+ private long startTime = -1;
+ private FramePushState state;
+ private AtomicLong numTuplesInInterval = new AtomicLong(0);
+ private boolean collectThroughput;
+ private FeedMessageService mesgService;
+
+ private final FeedConnectionId feedId;
+ private final String nodeId;
+ private final FeedRuntimeType feedRuntimeType;
+ private final int partition;
+ private final long period;
+ private final FrameTupleAccessor fta;
+ private final IFeedManager feedManager;
+
+ public HealthMonitor(FeedConnectionId feedId, String nodeId, FeedRuntimeType feedRuntimeType, int partition,
+ Timer timer, FrameTupleAccessor fta, IFeedManager feedManager) {
+ this.state = FramePushState.INTIALIZED;
+ this.feedId = feedId;
+ this.nodeId = nodeId;
+ this.feedRuntimeType = feedRuntimeType;
+ this.partition = partition;
+ this.period = FLUSH_THRESHOLD_TIME;
+ this.collectThroughput = feedRuntimeType.equals(FeedRuntimeType.INGESTION);
+ this.fta = fta;
+ this.feedManager = feedManager;
+ }
+
+ public void notifyStartFrameFlushActivity() {
+ startTime = System.currentTimeMillis();
+ state = FramePushState.WAITING_FOR_FLUSH_COMPLETION;
+ }
+
+ /**
+ * Reset method is invoked when a live instance of operator needs to take
+ * over from the zombie instance from the previously failed execution
+ */
+ public void reset() {
+ mesgService = null;
+ collectThroughput = feedRuntimeType.equals(FeedRuntimeType.INGESTION);
+ }
+
+ public void notifyFinishFrameFlushActivity() {
+ state = FramePushState.WAITNG_FOR_NEXT_FRAME;
+ numTuplesInInterval.set(numTuplesInInterval.get() + fta.getTupleCount());
+ }
+
+ @Override
+ public void run() {
+ if (state.equals(FramePushState.WAITING_FOR_FLUSH_COMPLETION)) {
+ long currentTime = System.currentTimeMillis();
+ if (currentTime - startTime > FLUSH_THRESHOLD_TIME) {
+ if (LOGGER.isLoggable(Level.SEVERE)) {
+ LOGGER.severe("Congestion reported by " + feedRuntimeType + " [" + partition + "]");
+ }
+ sendReportToSuperFeedManager(currentTime - startTime, FeedReportMessageType.CONGESTION,
+ System.currentTimeMillis());
+ }
+ }
+ if (collectThroughput) {
+ int instantTput = (int) Math.ceil((((double) numTuplesInInterval.get() * 1000) / period));
+ sendReportToSuperFeedManager(instantTput, FeedReportMessageType.THROUGHPUT, System.currentTimeMillis());
+ }
+ numTuplesInInterval.set(0);
+ }
+
+ private void sendReportToSuperFeedManager(long value, SuperFeedManager.FeedReportMessageType mesgType,
+ long timestamp) {
+ if (mesgService == null) {
+ waitTillMessageServiceIsUp();
+ }
+ String feedRep = feedId.getDataverse() + ":" + feedId.getFeedName() + ":" + feedId.getDatasetName();
+ String message = mesgType.name().toLowerCase() + FeedMessageService.MessageSeparator + feedRep
+ + FeedMessageService.MessageSeparator + feedRuntimeType + FeedMessageService.MessageSeparator
+ + partition + FeedMessageService.MessageSeparator + value + FeedMessageService.MessageSeparator
+ + nodeId + FeedMessageService.MessageSeparator + timestamp + FeedMessageService.MessageSeparator
+ + EOL;
+ try {
+ mesgService.sendMessage(message);
+ } catch (IOException ioe) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Unable to send feed report to Super Feed Manager for feed " + feedId + " "
+ + feedRuntimeType + "[" + partition + "]");
+ }
+ }
+ }
+
+ private void waitTillMessageServiceIsUp() {
+ while (mesgService == null) {
+ mesgService = feedManager.getFeedMessageService(feedId);
+ if (mesgService == null) {
+ try {
+ /**
+ * wait for the message service to be available
+ */
+ Thread.sleep(2000);
+ } catch (InterruptedException e) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Encountered an interrupted exception " + " Exception " + e);
+ }
+ }
+ }
+ }
+ }
+
+ public void deactivate() {
+ // cancel the timer task to avoid future execution.
+ cancel();
+ collectThroughput = false;
+ }
+
+ private enum FramePushState {
+ /**
+ * Frame writer has been initialized
+ */
+ INTIALIZED,
+
+ /**
+ * Frame writer is waiting for a pending flush to finish.
+ */
+ WAITING_FOR_FLUSH_COMPLETION,
+
+ /**
+ * Frame writer is waiting to be given the next frame.
+ */
+ WAITNG_FOR_NEXT_FRAME
+ }
+
+ }
+
+ @Override
+ public void fail() throws HyracksDataException {
+ writer.fail();
+ if(healthMonitor != null) {
+ if (!healthMonitor.feedRuntimeType.equals(FeedRuntimeType.INGESTION)) {
+ healthMonitor.deactivate();
+ } else {
+ healthMonitor.reset();
+ }
+ }
+ }
+
+ @Override
+ public void close() throws HyracksDataException {
+ if (healthMonitor != null) {
+ healthMonitor.deactivate();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Closing frame statistics collection activity" + healthMonitor);
+ }
+ }
+ writer.close();
+ }
+
+ public IFrameWriter getWriter() {
+ return writer;
+ }
+
+ public void setWriter(IFrameWriter writer) {
+ this.writer = writer;
+ }
+
+ @Override
+ public String toString() {
+ return "MaterializingFrameWriter using " + writer;
+ }
+
+ public List<ByteBuffer> getStoredFrames() {
+ return frames;
+ }
+
+ public void clear() {
+ frames.clear();
+ }
+
+ @Override
+ public void open() throws HyracksDataException {
+ writer.open();
+ }
+
+ public void reset() {
+ healthMonitor.reset();
+ }
+
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedIntakeOperatorDescriptor.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedIntakeOperatorDescriptor.java
new file mode 100644
index 0000000..b732191
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedIntakeOperatorDescriptor.java
@@ -0,0 +1,208 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.feeds;
+
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.common.api.IAsterixAppRuntimeContext;
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+import edu.uci.ics.asterix.common.feeds.FeedRuntime.FeedRuntimeId;
+import edu.uci.ics.asterix.common.feeds.FeedRuntime.FeedRuntimeType;
+import edu.uci.ics.asterix.common.feeds.IFeedManager;
+import edu.uci.ics.asterix.metadata.functions.ExternalLibraryManager;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+
+/**
+ * FeedIntakeOperatorDescriptor is responsible for ingesting data from an external source. This
+ * operator uses a user specified for a built-in adaptor for retrieving data from the external
+ * data source.
+ */
+public class FeedIntakeOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+
+ private static final long serialVersionUID = 1L;
+ private static final Logger LOGGER = Logger.getLogger(FeedIntakeOperatorDescriptor.class.getName());
+
+ /** The type associated with the ADM data output from the feed adaptor */
+ private final IAType outputType;
+
+ /** unique identifier for a feed instance. */
+ private final FeedConnectionId feedId;
+
+ /** Map representation of policy parameters */
+ private final Map<String, String> feedPolicy;
+
+ /** The adaptor factory that is used to create an instance of the feed adaptor **/
+ private IAdapterFactory adapterFactory;
+
+ /** The (singleton) instance of IFeedManager **/
+ private IFeedManager feedManager;
+
+ /** The library that contains the adapter in use. **/
+ private String adapterLibraryName;
+
+ /**
+ * The adapter factory class that is used to create an instance of the feed adapter.
+ * This value is used only in the case of external adapters.
+ **/
+ private String adapterFactoryClassName;
+
+ /** The configuration parameters associated with the adapter. **/
+ private Map<String, String> adapterConfiguration;
+
+ private ARecordType adapterOutputType;
+
+ public FeedIntakeOperatorDescriptor(JobSpecification spec, FeedConnectionId feedId, IAdapterFactory adapterFactory,
+ ARecordType atype, RecordDescriptor rDesc, Map<String, String> feedPolicy) {
+ super(spec, 0, 1);
+ recordDescriptors[0] = rDesc;
+ this.adapterFactory = adapterFactory;
+ this.outputType = atype;
+ this.feedId = feedId;
+ this.feedPolicy = feedPolicy;
+ }
+
+ public FeedIntakeOperatorDescriptor(JobSpecification spec, FeedConnectionId feedId, String adapterLibraryName,
+ String adapterFactoryClassName, Map<String, String> configuration, ARecordType atype,
+ RecordDescriptor rDesc, Map<String, String> feedPolicy) {
+ super(spec, 0, 1);
+ recordDescriptors[0] = rDesc;
+ this.adapterFactoryClassName = adapterFactoryClassName;
+ this.adapterConfiguration = configuration;
+ this.adapterLibraryName = adapterLibraryName;
+ this.outputType = atype;
+ this.feedId = feedId;
+ this.feedPolicy = feedPolicy;
+ }
+
+ public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+ IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions)
+ throws HyracksDataException {
+ IFeedAdapter adapter = null;
+ FeedRuntimeId feedRuntimeId = new FeedRuntimeId(FeedRuntimeType.INGESTION, feedId, partition);
+ IAsterixAppRuntimeContext runtimeCtx = (IAsterixAppRuntimeContext) ctx.getJobletContext()
+ .getApplicationContext().getApplicationObject();
+ this.feedManager = runtimeCtx.getFeedManager();
+ IngestionRuntime ingestionRuntime = (IngestionRuntime) feedManager.getFeedRuntime(feedRuntimeId);
+ try {
+ if (ingestionRuntime == null) {
+ // create an instance of a feed adaptor to ingest data.
+ adapter = createAdapter(ctx, partition);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Beginning new feed:" + feedId);
+ }
+ } else {
+ // retrieve the instance of the feed adaptor used in previous failed execution.
+ adapter = ((IngestionRuntime) ingestionRuntime).getAdapterRuntimeManager().getFeedAdapter();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Resuming old feed:" + feedId);
+ }
+ }
+ } catch (Exception exception) {
+ if (LOGGER.isLoggable(Level.SEVERE)) {
+ LOGGER.severe("Initialization of the feed adaptor failed with exception " + exception);
+ }
+ throw new HyracksDataException("Initialization of the feed adapter failed", exception);
+ }
+ return new FeedIntakeOperatorNodePushable(ctx, feedId, adapter, feedPolicy, partition, ingestionRuntime);
+ }
+
+ public FeedConnectionId getFeedId() {
+ return feedId;
+ }
+
+ public Map<String, String> getFeedPolicy() {
+ return feedPolicy;
+ }
+
+ public IAdapterFactory getAdapterFactory() {
+ return adapterFactory;
+ }
+
+ public IAType getOutputType() {
+ return outputType;
+ }
+
+ public RecordDescriptor getRecordDescriptor() {
+ return recordDescriptors[0];
+ }
+
+ private IFeedAdapter createAdapter(IHyracksTaskContext ctx, int partition) throws Exception {
+ IFeedAdapter feedAdapter = null;
+ if (adapterFactory != null) {
+ feedAdapter = (IFeedAdapter) adapterFactory.createAdapter(ctx, partition);
+ } else {
+ ClassLoader classLoader = ExternalLibraryManager.getLibraryClassLoader(feedId.getDataverse(),
+ adapterLibraryName);
+ if (classLoader != null) {
+ IAdapterFactory adapterFactory = ((IAdapterFactory) (classLoader.loadClass(adapterFactoryClassName)
+ .newInstance()));
+
+ switch (adapterFactory.getAdapterType()) {
+ case TYPED: {
+ ((ITypedAdapterFactory) adapterFactory).configure(adapterConfiguration);
+ feedAdapter = (IFeedAdapter) ((ITypedAdapterFactory) adapterFactory).createAdapter(ctx,
+ partition);
+ }
+ break;
+ case GENERIC: {
+ String outputTypeName = adapterConfiguration.get(IGenericAdapterFactory.KEY_TYPE_NAME);
+ if (outputTypeName == null) {
+ throw new IllegalArgumentException(
+ "You must specify the datatype associated with the incoming data. Datatype is specified by the "
+ + IGenericAdapterFactory.KEY_TYPE_NAME + " configuration parameter");
+ }
+ ((IGenericAdapterFactory) adapterFactory).configure(adapterConfiguration,
+ (ARecordType) adapterOutputType);
+ ((IGenericAdapterFactory) adapterFactory).createAdapter(ctx, partition);
+ }
+ break;
+ }
+
+ feedAdapter = (IFeedAdapter) adapterFactory.createAdapter(ctx, partition);
+ } else {
+ String message = "Unable to create adapter as class loader not configured for library "
+ + adapterLibraryName + " in dataverse " + feedId.getDataverse();
+ if (LOGGER.isLoggable(Level.SEVERE)) {
+ LOGGER.severe(message);
+ }
+ throw new IllegalArgumentException(message);
+
+ }
+ }
+ return feedAdapter;
+ }
+
+ public String getAdapterLibraryName() {
+ return adapterLibraryName;
+ }
+
+ public String getAdapterFactoryClassName() {
+ return adapterFactoryClassName;
+ }
+
+ public Map<String, String> getAdapterConfiguration() {
+ return adapterConfiguration;
+ }
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedIntakeOperatorNodePushable.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedIntakeOperatorNodePushable.java
new file mode 100644
index 0000000..8f9adeb
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedIntakeOperatorNodePushable.java
@@ -0,0 +1,145 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.feeds;
+
+import java.io.IOException;
+import java.util.Map;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.common.api.IAsterixAppRuntimeContext;
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+import edu.uci.ics.asterix.common.feeds.FeedRuntime;
+import edu.uci.ics.asterix.common.feeds.FeedRuntime.FeedRuntimeType;
+import edu.uci.ics.asterix.common.feeds.FeedRuntimeManager;
+import edu.uci.ics.asterix.common.feeds.IFeedManager;
+import edu.uci.ics.asterix.metadata.feeds.AdapterRuntimeManager.State;
+import edu.uci.ics.asterix.metadata.feeds.IFeedAdapter.DataExchangeMode;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
+
+/**
+ * The runtime for @see{FeedIntakeOperationDescriptor}
+ */
+public class FeedIntakeOperatorNodePushable extends AbstractUnaryOutputSourceOperatorNodePushable {
+
+ private static Logger LOGGER = Logger.getLogger(FeedIntakeOperatorNodePushable.class.getName());
+
+ private final int partition;
+ private final FeedConnectionId feedId;
+ private final LinkedBlockingQueue<IFeedMessage> inbox;
+ private final Map<String, String> feedPolicy;
+ private final FeedPolicyEnforcer policyEnforcer;
+ private final String nodeId;
+ private final FrameTupleAccessor fta;
+ private final IFeedManager feedManager;
+
+ private FeedRuntime ingestionRuntime;
+ private IFeedAdapter adapter;
+ private FeedFrameWriter feedFrameWriter;
+
+ public FeedIntakeOperatorNodePushable(IHyracksTaskContext ctx, FeedConnectionId feedId, IFeedAdapter adapter,
+ Map<String, String> feedPolicy, int partition, IngestionRuntime ingestionRuntime) {
+ this.adapter = adapter;
+ this.partition = partition;
+ this.feedId = feedId;
+ this.ingestionRuntime = ingestionRuntime;
+ inbox = new LinkedBlockingQueue<IFeedMessage>();
+ this.feedPolicy = feedPolicy;
+ policyEnforcer = new FeedPolicyEnforcer(feedId, feedPolicy);
+ nodeId = ctx.getJobletContext().getApplicationContext().getNodeId();
+ fta = new FrameTupleAccessor(ctx.getFrameSize(), recordDesc);
+ IAsterixAppRuntimeContext runtimeCtx = (IAsterixAppRuntimeContext) ctx.getJobletContext()
+ .getApplicationContext().getApplicationObject();
+ this.feedManager = runtimeCtx.getFeedManager();
+ }
+
+ @Override
+ public void initialize() throws HyracksDataException {
+
+ AdapterRuntimeManager adapterRuntimeMgr = null;
+ try {
+ if (ingestionRuntime == null) {
+ feedFrameWriter = new FeedFrameWriter(writer, this, feedId, policyEnforcer, nodeId,
+ FeedRuntimeType.INGESTION, partition, fta, feedManager);
+ adapterRuntimeMgr = new AdapterRuntimeManager(feedId, adapter, feedFrameWriter, partition, inbox,
+ feedManager);
+
+ if (adapter.getDataExchangeMode().equals(DataExchangeMode.PULL) && adapter instanceof IPullBasedFeedAdapter) {
+ ((IPullBasedFeedAdapter) adapter).setFeedPolicyEnforcer(policyEnforcer);
+ }
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Beginning new feed:" + feedId);
+ }
+ feedFrameWriter.open();
+ adapterRuntimeMgr.start();
+ } else {
+ adapterRuntimeMgr = ((IngestionRuntime) ingestionRuntime).getAdapterRuntimeManager();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Resuming old feed:" + feedId);
+ }
+ adapter = adapterRuntimeMgr.getFeedAdapter();
+ writer.open();
+ adapterRuntimeMgr.getAdapterExecutor().setWriter(writer);
+ adapterRuntimeMgr.getAdapterExecutor().getWriter().reset();
+ adapterRuntimeMgr.setState(State.ACTIVE_INGESTION);
+ feedFrameWriter = adapterRuntimeMgr.getAdapterExecutor().getWriter();
+ }
+
+ ingestionRuntime = adapterRuntimeMgr.getIngestionRuntime();
+ synchronized (adapterRuntimeMgr) {
+ while (!adapterRuntimeMgr.getState().equals(State.FINISHED_INGESTION)) {
+ adapterRuntimeMgr.wait();
+ }
+ }
+ feedManager.deRegisterFeedRuntime(ingestionRuntime.getFeedRuntimeId());
+ feedFrameWriter.close();
+ } catch (InterruptedException ie) {
+ if (policyEnforcer.getFeedPolicyAccessor().continueOnHardwareFailure()) {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Continuing on failure as per feed policy, switching to INACTIVE INGESTION temporarily");
+ }
+ adapterRuntimeMgr.setState(State.INACTIVE_INGESTION);
+ FeedRuntimeManager runtimeMgr = feedManager.getFeedRuntimeManager(feedId);
+ try {
+ runtimeMgr.close(false);
+ } catch (IOException ioe) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Unable to close Feed Runtime Manager " + ioe.getMessage());
+ }
+ }
+ feedFrameWriter.fail();
+ } else {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Interrupted Exception, something went wrong");
+ }
+
+ feedManager.deRegisterFeedRuntime(ingestionRuntime.getFeedRuntimeId());
+ feedFrameWriter.close();
+ throw new HyracksDataException(ie);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ throw new HyracksDataException(e);
+ }
+ }
+
+ public Map<String, String> getFeedPolicy() {
+ return feedPolicy;
+ }
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedManager.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedManager.java
new file mode 100644
index 0000000..8b92994
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedManager.java
@@ -0,0 +1,143 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.feeds;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.ExecutorService;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+import edu.uci.ics.asterix.common.feeds.FeedMessageService;
+import edu.uci.ics.asterix.common.feeds.FeedRuntime;
+import edu.uci.ics.asterix.common.feeds.FeedRuntime.FeedRuntimeId;
+import edu.uci.ics.asterix.common.feeds.FeedRuntimeManager;
+import edu.uci.ics.asterix.common.feeds.IFeedManager;
+import edu.uci.ics.asterix.common.feeds.SuperFeedManager;
+
+/**
+ * An implementation of the IFeedManager interface.
+ * Provider necessary central repository for registering/retrieving
+ * artifacts/services associated with a feed.
+ */
+public class FeedManager implements IFeedManager {
+
+ private static final Logger LOGGER = Logger.getLogger(FeedManager.class.getName());
+
+ private Map<FeedConnectionId, FeedRuntimeManager> feedRuntimeManagers = new HashMap<FeedConnectionId, FeedRuntimeManager>();
+ private final String nodeId;
+
+ public FeedManager(String nodeId) {
+ this.nodeId = nodeId;
+ }
+
+ public FeedRuntimeManager getFeedRuntimeManager(FeedConnectionId feedId) {
+ return feedRuntimeManagers.get(feedId);
+ }
+
+ public ExecutorService getFeedExecutorService(FeedConnectionId feedId) {
+ FeedRuntimeManager mgr = feedRuntimeManagers.get(feedId);
+ return mgr == null ? null : mgr.getExecutorService();
+ }
+
+ @Override
+ public FeedMessageService getFeedMessageService(FeedConnectionId feedId) {
+ FeedRuntimeManager mgr = feedRuntimeManagers.get(feedId);
+ return mgr == null ? null : mgr.getMessageService();
+ }
+
+ @Override
+ public void deregisterFeed(FeedConnectionId feedId) {
+ try {
+ FeedRuntimeManager mgr = feedRuntimeManagers.get(feedId);
+ if (mgr == null) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Unknown feed id: " + feedId);
+ }
+ } else {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Closing feed runtime manager: " + mgr);
+ }
+ mgr.close(true);
+ }
+ } catch (Exception e) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Exception in closing feed runtime" + e.getMessage());
+ }
+ e.printStackTrace();
+ }
+
+ feedRuntimeManagers.remove(feedId);
+ }
+
+ @Override
+ public void registerFeedRuntime(FeedRuntime feedRuntime) throws Exception {
+ FeedConnectionId feedId = feedRuntime.getFeedRuntimeId().getFeedId();
+ FeedRuntimeManager runtimeMgr = feedRuntimeManagers.get(feedId);
+ if (runtimeMgr == null) {
+ synchronized (feedRuntimeManagers) {
+ if (runtimeMgr == null) {
+ runtimeMgr = new FeedRuntimeManager(feedId, this);
+ feedRuntimeManagers.put(feedId, runtimeMgr);
+ }
+ }
+ }
+
+ runtimeMgr.registerFeedRuntime(feedRuntime.getFeedRuntimeId(), feedRuntime);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Registered runtime " + feedRuntime + " for feed " + feedId);
+ }
+ }
+
+ @Override
+ public void deRegisterFeedRuntime(FeedRuntimeId feedRuntimeId) {
+ FeedRuntimeManager runtimeMgr = feedRuntimeManagers.get(feedRuntimeId.getFeedId());
+ if (runtimeMgr != null) {
+ runtimeMgr.deregisterFeedRuntime(feedRuntimeId);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Deregistered Feed Runtime " + feedRuntimeId);
+ }
+ }
+ }
+
+ @Override
+ public FeedRuntime getFeedRuntime(FeedRuntimeId feedRuntimeId) {
+ FeedRuntimeManager runtimeMgr = feedRuntimeManagers.get(feedRuntimeId.getFeedId());
+ return runtimeMgr != null ? runtimeMgr.getFeedRuntime(feedRuntimeId) : null;
+ }
+
+ @Override
+ public void registerSuperFeedManager(FeedConnectionId feedId, SuperFeedManager sfm) throws Exception {
+ FeedRuntimeManager runtimeMgr = feedRuntimeManagers.get(feedId);
+ if (runtimeMgr != null) {
+ runtimeMgr.setSuperFeedManager(sfm);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Registered Super Feed Manager " + sfm);
+ }
+ }
+ }
+
+ @Override
+ public SuperFeedManager getSuperFeedManager(FeedConnectionId feedId) {
+ FeedRuntimeManager runtimeMgr = feedRuntimeManagers.get(feedId);
+ return runtimeMgr != null ? runtimeMgr.getSuperFeedManager() : null;
+ }
+
+ @Override
+ public String toString() {
+ return "FeedManager " + "[" + nodeId + "]";
+ }
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedManagerElectMessage.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedManagerElectMessage.java
new file mode 100644
index 0000000..f3cc6a8
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedManagerElectMessage.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.feeds;
+
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+
+/**
+ * A feed control message containing the altered values for
+ * adapter configuration parameters. This message is dispatched
+ * to all runtime instances of the feed's adapter.
+ */
+public class FeedManagerElectMessage extends FeedMessage {
+
+ private static final long serialVersionUID = 1L;
+
+ private final String host;
+ private final String nodeId;
+ private final int port;
+
+ public FeedManagerElectMessage(String host, String nodeId, int port, FeedConnectionId feedId) {
+ super(MessageType.SUPER_FEED_MANAGER_ELECT, feedId);
+ this.host = host;
+ this.port = port;
+ this.nodeId = nodeId;
+ }
+
+ @Override
+ public MessageType getMessageType() {
+ return MessageType.SUPER_FEED_MANAGER_ELECT;
+ }
+
+ @Override
+ public String toString() {
+ return MessageType.SUPER_FEED_MANAGER_ELECT.name() + " " + host + "_" + nodeId + "[" + port + "]";
+ }
+
+ public String getHost() {
+ return host;
+ }
+
+ public String getNodeId() {
+ return nodeId;
+ }
+
+ public int getPort() {
+ return port;
+ }
+
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedMessage.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedMessage.java
new file mode 100644
index 0000000..019f21c
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedMessage.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.feeds;
+
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+
+/**
+ * A control message that can be sent to the runtime instance of a
+ * feed's adapter.
+ */
+public class FeedMessage implements IFeedMessage {
+
+ private static final long serialVersionUID = 1L;
+
+ protected final MessageType messageType;
+ protected final FeedConnectionId feedId;
+
+ public FeedMessage(MessageType messageType, FeedConnectionId feedId) {
+ this.messageType = messageType;
+ this.feedId = feedId;
+ }
+
+ public MessageType getMessageType() {
+ return messageType;
+ }
+
+ public FeedConnectionId getFeedId() {
+ return feedId;
+ }
+
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedMessageOperatorDescriptor.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedMessageOperatorDescriptor.java
new file mode 100644
index 0000000..9b00322
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedMessageOperatorDescriptor.java
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.feeds;
+
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+
+/**
+ * Sends a control message to the registered message queue for feed specified by its feedId.
+ */
+public class FeedMessageOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+
+ private static final long serialVersionUID = 1L;
+
+ private final FeedConnectionId feedId;
+ private final IFeedMessage feedMessage;
+
+ public FeedMessageOperatorDescriptor(JobSpecification spec, String dataverse, String feedName, String dataset,
+ IFeedMessage feedMessage) {
+ super(spec, 0, 1);
+ this.feedId = new FeedConnectionId(dataverse, feedName, dataset);
+ this.feedMessage = feedMessage;
+ }
+
+ @Override
+ public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+ IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
+ return new FeedMessageOperatorNodePushable(ctx, feedId, feedMessage, partition, nPartitions);
+ }
+
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedMessageOperatorNodePushable.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedMessageOperatorNodePushable.java
new file mode 100644
index 0000000..47b00dd
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedMessageOperatorNodePushable.java
@@ -0,0 +1,109 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.feeds;
+
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.common.api.IAsterixAppRuntimeContext;
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+import edu.uci.ics.asterix.common.feeds.FeedRuntime;
+import edu.uci.ics.asterix.common.feeds.FeedRuntime.FeedRuntimeId;
+import edu.uci.ics.asterix.common.feeds.FeedRuntime.FeedRuntimeType;
+import edu.uci.ics.asterix.common.feeds.IFeedManager;
+import edu.uci.ics.asterix.common.feeds.SuperFeedManager;
+import edu.uci.ics.hyracks.api.application.INCApplicationContext;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
+
+/**
+ * Runtime for the @see{FeedMessageOperatorDescriptor}
+ */
+public class FeedMessageOperatorNodePushable extends AbstractUnaryOutputSourceOperatorNodePushable {
+
+ private static final Logger LOGGER = Logger.getLogger(FeedMessageOperatorNodePushable.class.getName());
+
+ private final FeedConnectionId feedId;
+ private final IFeedMessage feedMessage;
+ private final int partition;
+ private final IHyracksTaskContext ctx;
+ private final IFeedManager feedManager;
+
+ public FeedMessageOperatorNodePushable(IHyracksTaskContext ctx, FeedConnectionId feedId, IFeedMessage feedMessage,
+ int partition, int nPartitions) {
+ this.feedId = feedId;
+ this.feedMessage = feedMessage;
+ this.partition = partition;
+ this.ctx = ctx;
+ IAsterixAppRuntimeContext runtimeCtx = (IAsterixAppRuntimeContext) ctx.getJobletContext()
+ .getApplicationContext().getApplicationObject();
+ this.feedManager = runtimeCtx.getFeedManager();
+ }
+
+ @Override
+ public void initialize() throws HyracksDataException {
+ try {
+ writer.open();
+ FeedRuntimeId runtimeId = new FeedRuntimeId(FeedRuntimeType.INGESTION, feedId, partition);
+ FeedRuntime feedRuntime = feedManager.getFeedRuntime(runtimeId);
+ boolean ingestionLocation = feedRuntime != null;
+
+ switch (feedMessage.getMessageType()) {
+ case END:
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Ending feed:" + feedId);
+ }
+
+ if (ingestionLocation) {
+ AdapterRuntimeManager adapterRuntimeMgr = ((IngestionRuntime) feedRuntime)
+ .getAdapterRuntimeManager();
+ adapterRuntimeMgr.stop();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Terminating ingestion for :" + feedId);
+ }
+ }
+ break;
+
+ case SUPER_FEED_MANAGER_ELECT:
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Registering Supers Feed Manager for :" + feedId);
+ }
+ FeedManagerElectMessage mesg = ((FeedManagerElectMessage) feedMessage);
+ SuperFeedManager sfm = new SuperFeedManager(mesg.getFeedId(), mesg.getHost(), mesg.getNodeId(),
+ mesg.getPort(), feedManager);
+ synchronized (feedManager) {
+ INCApplicationContext ncCtx = ctx.getJobletContext().getApplicationContext();
+ String nodeId = ncCtx.getNodeId();
+ if (sfm.getNodeId().equals(nodeId)) {
+ sfm.setLocal(true);
+ } else {
+ Thread.sleep(5000);
+ }
+ feedManager.registerSuperFeedManager(feedId, sfm);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Registered super feed mgr " + sfm + " for feed " + feedId);
+ }
+ }
+ break;
+ }
+
+ } catch (Exception e) {
+ throw new HyracksDataException(e);
+ } finally {
+ writer.close();
+ }
+ }
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedMetaOperatorDescriptor.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedMetaOperatorDescriptor.java
new file mode 100644
index 0000000..1a8a460
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedMetaOperatorDescriptor.java
@@ -0,0 +1,253 @@
+package edu.uci.ics.asterix.metadata.feeds;
+
+import java.nio.ByteBuffer;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.common.api.IAsterixAppRuntimeContext;
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+import edu.uci.ics.asterix.common.feeds.FeedRuntime;
+import edu.uci.ics.asterix.common.feeds.FeedRuntime.FeedRuntimeId;
+import edu.uci.ics.asterix.common.feeds.FeedRuntime.FeedRuntimeState;
+import edu.uci.ics.asterix.common.feeds.FeedRuntime.FeedRuntimeType;
+import edu.uci.ics.asterix.common.feeds.IFeedManager;
+import edu.uci.ics.asterix.metadata.entities.FeedPolicy;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IActivity;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+
+/**
+ * FeedMetaOperatorDescriptor is a wrapper operator that provides a sanboox like
+ * environment for an hyracks operator that is part of a feed ingestion pipeline.
+ * The MetaFeed operator provides an interface iden- tical to that offered by the
+ * underlying wrapped operator, hereafter referred to as the core operator.
+ * As seen by Hyracks, the altered pipeline is identical to the earlier version formed
+ * from core operators. The MetaFeed operator enhances each core operator by providing
+ * functionality for handling runtime exceptions, saving any state for future retrieval,
+ * and measuring/reporting of performance characteristics. We next describe how the added
+ * functionality contributes to providing fault- tolerance.
+ */
+
+public class FeedMetaOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+
+ private static final long serialVersionUID = 1L;
+
+ private static final Logger LOGGER = Logger.getLogger(FeedMetaOperatorDescriptor.class.getName());
+
+ /** The actual (Hyracks) operator that is wrapped around by the Metafeed Adaptor **/
+ private IOperatorDescriptor coreOperator;
+
+ /**
+ * A unique identifier for the feed instance. A feed instance represents the flow of data
+ * from a feed to a dataset.
+ **/
+ private final FeedConnectionId feedConnectionId;
+
+ /**
+ * The policy associated with the feed instance.
+ */
+ private final FeedPolicy feedPolicy;
+
+ /**
+ * type for the feed runtime associated with the operator.
+ * Possible values: INGESTION, COMPUTE, STORAGE, COMMIT
+ */
+ private final FeedRuntimeType runtimeType;
+
+ public FeedMetaOperatorDescriptor(JobSpecification spec, FeedConnectionId feedConnectionId,
+ IOperatorDescriptor coreOperatorDescriptor, FeedPolicy feedPolicy, FeedRuntimeType runtimeType) {
+ super(spec, coreOperatorDescriptor.getInputArity(), coreOperatorDescriptor.getOutputArity());
+ this.feedConnectionId = feedConnectionId;
+ this.feedPolicy = feedPolicy;
+ if (coreOperatorDescriptor.getOutputRecordDescriptors().length == 1) {
+ recordDescriptors[0] = coreOperatorDescriptor.getOutputRecordDescriptors()[0];
+ }
+ this.coreOperator = coreOperatorDescriptor;
+ this.runtimeType = runtimeType;
+ }
+
+ @Override
+ public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+ IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
+ return new FeedMetaNodePushable(ctx, recordDescProvider, partition, nPartitions, coreOperator,
+ feedConnectionId, feedPolicy, runtimeType);
+ }
+
+ @Override
+ public String toString() {
+ return "FeedMeta [" + coreOperator + " ]";
+ }
+
+ private static class FeedMetaNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
+
+ /** Runtime node pushable corresponding to the core feed operator **/
+ private AbstractUnaryInputUnaryOutputOperatorNodePushable coreOperatorNodePushable;
+
+ /**
+ * A policy enforcer that ensures dyanmic decisions for a feed are taken in accordance
+ * with the associated ingestion policy
+ **/
+ private FeedPolicyEnforcer policyEnforcer;
+
+ /**
+ * The Feed Runtime instance associated with the operator. Feed Runtime captures the state of the operator while
+ * the feed is active.
+ */
+ private FeedRuntime feedRuntime;
+
+ /**
+ * A unique identifier for the feed instance. A feed instance represents the flow of data
+ * from a feed to a dataset.
+ **/
+ private FeedConnectionId feedId;
+
+ /** Denotes the i'th operator instance in a setting where K operator instances are scheduled to run in parallel **/
+ private int partition;
+
+ /** A buffer that is used to hold the current frame that is being processed **/
+ private ByteBuffer currentBuffer;
+
+ /** Type associated with the core feed operator **/
+ private final FeedRuntimeType runtimeType;
+
+ /** True is the feed is recovering from a previous failed execution **/
+ private boolean resumeOldState;
+
+ /** The Node Controller ID for the host NC **/
+
+ private String nodeId;
+
+ /** Allows to iterate over the tuples in a frame **/
+ private FrameTupleAccessor fta;
+
+ /** The (singleton) instance of IFeedManager **/
+ private IFeedManager feedManager;
+
+ public FeedMetaNodePushable(IHyracksTaskContext ctx, IRecordDescriptorProvider recordDescProvider,
+ int partition, int nPartitions, IOperatorDescriptor coreOperator, FeedConnectionId feedConnectionId,
+ FeedPolicy feedPolicy, FeedRuntimeType runtimeType) throws HyracksDataException {
+ this.coreOperatorNodePushable = (AbstractUnaryInputUnaryOutputOperatorNodePushable) ((IActivity) coreOperator)
+ .createPushRuntime(ctx, recordDescProvider, partition, nPartitions);
+ this.policyEnforcer = new FeedPolicyEnforcer(feedConnectionId, feedPolicy.getProperties());
+ this.partition = partition;
+ this.runtimeType = runtimeType;
+ this.feedId = feedConnectionId;
+ this.nodeId = ctx.getJobletContext().getApplicationContext().getNodeId();
+ fta = new FrameTupleAccessor(ctx.getFrameSize(), recordDesc);
+ IAsterixAppRuntimeContext runtimeCtx = (IAsterixAppRuntimeContext) ctx.getJobletContext()
+ .getApplicationContext().getApplicationObject();
+ this.feedManager = runtimeCtx.getFeedManager();
+ }
+
+ @Override
+ public void open() throws HyracksDataException {
+ FeedRuntimeId runtimeId = new FeedRuntimeId(runtimeType, feedId, partition);
+ try {
+ feedRuntime = feedManager.getFeedRuntime(runtimeId);
+ if (feedRuntime == null) {
+ feedRuntime = new FeedRuntime(feedId, partition, runtimeType);
+ feedManager.registerFeedRuntime(feedRuntime);
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Did not find a saved state from a previous zombie, starting a new instance for "
+ + runtimeType + " node.");
+ }
+ resumeOldState = false;
+ } else {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Retreived state from the zombie instance from previous execution for "
+ + runtimeType + " node.");
+ }
+ resumeOldState = true;
+ }
+ FeedFrameWriter mWriter = new FeedFrameWriter(writer, this, feedId, policyEnforcer, nodeId,
+ runtimeType, partition, fta, feedManager);
+ coreOperatorNodePushable.setOutputFrameWriter(0, mWriter, recordDesc);
+ coreOperatorNodePushable.open();
+ } catch (Exception e) {
+ if (LOGGER.isLoggable(Level.SEVERE)) {
+ LOGGER.severe("Unable to initialize feed operator " + feedRuntime + " [" + partition + "]");
+ }
+ throw new HyracksDataException(e);
+ }
+ }
+
+ @Override
+ public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+ try {
+ if (resumeOldState) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("State from previous zombie instance "
+ + feedRuntime.getRuntimeState().getFrame());
+ }
+ coreOperatorNodePushable.nextFrame(feedRuntime.getRuntimeState().getFrame());
+ feedRuntime.setRuntimeState(null);
+ resumeOldState = false;
+ }
+ currentBuffer = buffer;
+ coreOperatorNodePushable.nextFrame(buffer);
+ currentBuffer = null;
+ } catch (HyracksDataException e) {
+ if (policyEnforcer.getFeedPolicyAccessor().continueOnApplicationFailure()) {
+ boolean isExceptionHarmful = e.getCause() instanceof TreeIndexException && !resumeOldState;
+ if (isExceptionHarmful) {
+ // TODO: log the tuple
+ FeedRuntimeState runtimeState = new FeedRuntimeState(buffer, writer, e);
+ feedRuntime.setRuntimeState(runtimeState);
+ } else {
+ // ignore the frame (exception is expected)
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Ignoring exception " + e);
+ }
+ }
+ } else {
+ if (LOGGER.isLoggable(Level.SEVERE)) {
+ LOGGER.severe("Feed policy does not require feed to survive soft failure");
+ }
+ throw e;
+ }
+ }
+ }
+
+ @Override
+ public void fail() throws HyracksDataException {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.info("Core Op:" + coreOperatorNodePushable.getDisplayName() + " fail ");
+ }
+ if (policyEnforcer.getFeedPolicyAccessor().continueOnHardwareFailure()) {
+ if (currentBuffer != null) {
+ FeedRuntimeState runtimeState = new FeedRuntimeState(currentBuffer, writer, null);
+ feedRuntime.setRuntimeState(runtimeState);
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Saved feed compute runtime for revivals" + feedRuntime.getFeedRuntimeId());
+ }
+ } else {
+ feedManager.deRegisterFeedRuntime(feedRuntime.getFeedRuntimeId());
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("No state to save, de-registered feed runtime " + feedRuntime.getFeedRuntimeId());
+ }
+ }
+ }
+ coreOperatorNodePushable.fail();
+ }
+
+ @Override
+ public void close() throws HyracksDataException {
+ coreOperatorNodePushable.close();
+ feedManager.deRegisterFeedRuntime(feedRuntime.getFeedRuntimeId());
+ }
+
+ }
+
+ public IOperatorDescriptor getCoreOperator() {
+ return coreOperator;
+ }
+
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedPolicyAccessor.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedPolicyAccessor.java
new file mode 100644
index 0000000..fd9716c
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedPolicyAccessor.java
@@ -0,0 +1,105 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.feeds;
+
+import java.util.Map;
+
+public class FeedPolicyAccessor {
+ public static final String FAILURE_LOG_ERROR = "failure.log.error";
+ public static final String APPLICATION_FAILURE_LOG_DATA = "application.failure.log.data";
+ public static final String APPLICATION_FAILURE_CONTINUE = "application.failure.continue";
+ public static final String HARDWARE_FAILURE_CONTINUE = "hardware.failure.continue";
+ public static final String CLUSTER_REBOOT_AUTO_RESTART = "cluster.reboot.auto.restart";
+ public static final String COLLECT_STATISTICS = "collect.statistics";
+ public static final String COLLECT_STATISTICS_PERIOD = "collect.statistics.period";
+ public static final String COLLECT_STATISTICS_PERIOD_UNIT = "collect.statistics.period.unit";
+ public static final String ELASTIC = "elastic";
+
+ public enum TimeUnit {
+ SEC,
+ MIN,
+ HRS,
+ DAYS
+ }
+
+ private Map<String, String> feedPolicy;
+
+ public FeedPolicyAccessor(Map<String, String> feedPolicy) {
+ this.feedPolicy = feedPolicy;
+ }
+
+ public boolean logErrorOnFailure() {
+ return getBooleanPropertyValue(FAILURE_LOG_ERROR);
+ }
+
+ public boolean logDataOnApplicationFailure() {
+ return getBooleanPropertyValue(APPLICATION_FAILURE_LOG_DATA);
+ }
+
+ public boolean continueOnApplicationFailure() {
+ return getBooleanPropertyValue(APPLICATION_FAILURE_CONTINUE);
+ }
+
+ public boolean continueOnHardwareFailure() {
+ return getBooleanPropertyValue(HARDWARE_FAILURE_CONTINUE);
+ }
+
+ public boolean autoRestartOnClusterReboot() {
+ return getBooleanPropertyValue(CLUSTER_REBOOT_AUTO_RESTART);
+ }
+
+ public boolean collectStatistics() {
+ return getBooleanPropertyValue(COLLECT_STATISTICS);
+ }
+
+ public long getStatisicsCollectionPeriodInSecs() {
+ return getIntegerPropertyValue(COLLECT_STATISTICS_PERIOD) * getTimeUnitFactor();
+ }
+
+ public boolean isElastic() {
+ return getBooleanPropertyValue(ELASTIC);
+ }
+
+ private int getTimeUnitFactor() {
+ String v = feedPolicy.get(COLLECT_STATISTICS_PERIOD_UNIT);
+ int factor = 1;
+ switch (TimeUnit.valueOf(v)) {
+ case SEC:
+ factor = 1;
+ break;
+ case MIN:
+ factor = 60;
+ break;
+ case HRS:
+ factor = 3600;
+ break;
+ case DAYS:
+ factor = 216000;
+ break;
+
+ }
+ return factor;
+ }
+
+ private boolean getBooleanPropertyValue(String key) {
+ String v = feedPolicy.get(key);
+ return v == null ? false : Boolean.valueOf(v);
+ }
+
+ private int getIntegerPropertyValue(String key) {
+ String v = feedPolicy.get(key);
+ return Integer.parseInt(v);
+ }
+}
\ No newline at end of file
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedPolicyEnforcer.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedPolicyEnforcer.java
new file mode 100644
index 0000000..44487ec
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedPolicyEnforcer.java
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.feeds;
+
+import java.rmi.RemoteException;
+import java.util.HashMap;
+import java.util.Map;
+
+import edu.uci.ics.asterix.common.exceptions.ACIDException;
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+import edu.uci.ics.asterix.metadata.MetadataManager;
+import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
+import edu.uci.ics.asterix.metadata.entities.FeedActivity;
+import edu.uci.ics.asterix.metadata.entities.FeedActivity.FeedActivityType;
+
+public class FeedPolicyEnforcer {
+
+ private final FeedConnectionId feedId;
+ private final FeedPolicyAccessor feedPolicyAccessor;
+ private final FeedActivity feedActivity;
+
+ public FeedPolicyEnforcer(FeedConnectionId feedId, Map<String, String> feedPolicy) {
+ this.feedId = feedId;
+ this.feedPolicyAccessor = new FeedPolicyAccessor(feedPolicy);
+ this.feedActivity = new FeedActivity(feedId.getDataverse(), feedId.getFeedName(), feedId.getDatasetName(),
+ null, new HashMap<String, String>());
+ }
+
+ public boolean continueIngestionPostSoftwareFailure(Exception e) throws RemoteException, ACIDException {
+ boolean continueIngestion = feedPolicyAccessor.continueOnApplicationFailure();
+ if (feedPolicyAccessor.logErrorOnFailure()) {
+ persistExceptionDetails(e);
+ }
+ return continueIngestion;
+ }
+
+ private synchronized void persistExceptionDetails(Exception e) throws RemoteException, ACIDException {
+ MetadataManager.INSTANCE.acquireWriteLatch();
+ MetadataTransactionContext ctx = null;
+ try {
+ ctx = MetadataManager.INSTANCE.beginTransaction();
+ feedActivity.setActivityType(FeedActivityType.FEED_FAILURE);
+ feedActivity.getFeedActivityDetails().put(FeedActivity.FeedActivityDetails.EXCEPTION_MESSAGE,
+ e.getMessage());
+ MetadataManager.INSTANCE.registerFeedActivity(ctx, feedId, feedActivity);
+ MetadataManager.INSTANCE.commitTransaction(ctx);
+ } catch (Exception e2) {
+ MetadataManager.INSTANCE.abortTransaction(ctx);
+ } finally {
+ MetadataManager.INSTANCE.releaseWriteLatch();
+ }
+ }
+
+ public FeedPolicyAccessor getFeedPolicyAccessor() {
+ return feedPolicyAccessor;
+ }
+
+ public FeedConnectionId getFeedId() {
+ return feedId;
+ }
+
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedReport.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedReport.java
new file mode 100644
index 0000000..d3225a2
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedReport.java
@@ -0,0 +1,117 @@
+package edu.uci.ics.asterix.metadata.feeds;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+import edu.uci.ics.asterix.common.feeds.FeedRuntime.FeedRuntimeType;
+import edu.uci.ics.asterix.common.feeds.SuperFeedManager.FeedReportMessageType;
+
+public class FeedReport implements Comparable {
+
+ private FeedConnectionId feedId;
+ private FeedReportMessageType reportType;
+ private int partition = -1;
+ private FeedRuntimeType runtimeType;
+ private long value = -1;
+ private String[] representation;
+
+ public FeedReport() {
+ }
+
+ public FeedReport(String message) {
+ representation = message.split("\\|");
+ }
+
+ public void reset(String message) {
+ representation = message.split("\\|");
+ reportType = null;
+ feedId = null;
+ runtimeType = null;
+ partition = -1;
+ value = -1;
+ }
+
+ @Override
+ public String toString() {
+ return getFeedId() + " " + getReportType() + " " + getPartition() + " " + getRuntimeType() + " " + getValue();
+ }
+
+ public FeedConnectionId getFeedId() {
+ if (feedId == null) {
+ String feedIdRep = representation[1];
+ String[] feedIdComp = feedIdRep.split(":");
+ feedId = new FeedConnectionId(feedIdComp[0], feedIdComp[1], feedIdComp[2]);
+ }
+ return feedId;
+ }
+
+ public FeedReportMessageType getReportType() {
+ if (reportType == null) {
+ reportType = FeedReportMessageType.valueOf(representation[0].toUpperCase());
+ }
+ return reportType;
+ }
+
+ public int getPartition() {
+ if (partition < 0) {
+ partition = Integer.parseInt(representation[3]);
+ }
+ return partition;
+ }
+
+ public FeedRuntimeType getRuntimeType() {
+ if (runtimeType == null) {
+ runtimeType = FeedRuntimeType.valueOf(representation[2].toUpperCase());
+ }
+ return runtimeType;
+ }
+
+ public long getValue() {
+ if (value < 0) {
+ value = Long.parseLong(representation[4]);
+ }
+ return value;
+ }
+
+ public String[] getRepresentation() {
+ return representation;
+ }
+
+ @Override
+ public int compareTo(Object o) {
+ if (!(o instanceof FeedReport)) {
+ throw new IllegalArgumentException("Incorrect operand type " + o);
+ }
+
+ FeedReport other = (FeedReport) o;
+ if (!other.getReportType().equals(getReportType())) {
+ throw new IllegalArgumentException("Incorrect operand type " + o);
+ }
+
+ int returnValue = 0;
+
+ switch (getReportType()) {
+ case CONGESTION:
+ returnValue = ranking.get(getRuntimeType()) - ranking.get(other.getRuntimeType());
+ break;
+
+ case THROUGHPUT:
+ returnValue = (int) (other.getValue() - getValue());
+ break;
+ }
+
+ return returnValue;
+ }
+
+ private static Map<FeedRuntimeType, Integer> ranking = populateRanking();
+
+ private static Map<FeedRuntimeType, Integer> populateRanking() {
+ Map<FeedRuntimeType, Integer> ranking = new HashMap<FeedRuntimeType, Integer>();
+ ranking.put(FeedRuntimeType.INGESTION, 1);
+ ranking.put(FeedRuntimeType.COMPUTE, 2);
+ ranking.put(FeedRuntimeType.STORAGE, 3);
+ ranking.put(FeedRuntimeType.COMMIT, 4);
+ return ranking;
+ }
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedUtil.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedUtil.java
new file mode 100644
index 0000000..6e0ed83
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedUtil.java
@@ -0,0 +1,301 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.feeds;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.commons.lang3.tuple.Pair;
+
+import edu.uci.ics.asterix.common.dataflow.AsterixLSMTreeInsertDeleteOperatorDescriptor;
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+import edu.uci.ics.asterix.common.feeds.FeedRuntime.FeedRuntimeType;
+import edu.uci.ics.asterix.metadata.MetadataManager;
+import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
+import edu.uci.ics.asterix.metadata.bootstrap.MetadataConstants;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.entities.DatasourceAdapter;
+import edu.uci.ics.asterix.metadata.entities.DatasourceAdapter.AdapterType;
+import edu.uci.ics.asterix.metadata.entities.Feed;
+import edu.uci.ics.asterix.metadata.entities.FeedActivity;
+import edu.uci.ics.asterix.metadata.entities.FeedActivity.FeedActivityType;
+import edu.uci.ics.asterix.metadata.entities.FeedPolicy;
+import edu.uci.ics.asterix.metadata.functions.ExternalLibraryManager;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Triple;
+import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
+import edu.uci.ics.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
+import edu.uci.ics.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory;
+import edu.uci.ics.hyracks.algebricks.runtime.operators.std.StreamProjectRuntimeFactory;
+import edu.uci.ics.hyracks.api.constraints.Constraint;
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.constraints.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.api.constraints.expressions.ConstraintExpression;
+import edu.uci.ics.hyracks.api.constraints.expressions.LValueConstraintExpression;
+import edu.uci.ics.hyracks.api.constraints.expressions.PartitionCountExpression;
+import edu.uci.ics.hyracks.api.constraints.expressions.PartitionLocationExpression;
+import edu.uci.ics.hyracks.api.dataflow.ConnectorDescriptorId;
+import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.OperatorDescriptorId;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+
+/**
+ * A utility class for providing helper functions for feeds
+ */
+public class FeedUtil {
+
+ private static Logger LOGGER = Logger.getLogger(FeedUtil.class.getName());
+
+ public static boolean isFeedActive(FeedActivity feedActivity) {
+ return (feedActivity != null && !(feedActivity.getActivityType().equals(FeedActivityType.FEED_FAILURE) || feedActivity
+ .getActivityType().equals(FeedActivityType.FEED_END)));
+ }
+
+ private static class LocationConstraint {
+ int partition;
+ String location;
+ }
+
+ public static JobSpecification alterJobSpecificationForFeed(JobSpecification spec,
+ FeedConnectionId feedConnectionId, FeedPolicy feedPolicy) {
+
+ FeedPolicyAccessor fpa = new FeedPolicyAccessor(feedPolicy.getProperties());
+ boolean alterationRequired = (fpa.collectStatistics() || fpa.continueOnApplicationFailure()
+ || fpa.continueOnHardwareFailure() || fpa.isElastic());
+ if (!alterationRequired) {
+ return spec;
+ }
+
+ JobSpecification altered = new JobSpecification();
+ Map<OperatorDescriptorId, IOperatorDescriptor> operatorMap = spec.getOperatorMap();
+
+ // copy operators
+ Map<OperatorDescriptorId, OperatorDescriptorId> oldNewOID = new HashMap<OperatorDescriptorId, OperatorDescriptorId>();
+ for (Entry<OperatorDescriptorId, IOperatorDescriptor> entry : operatorMap.entrySet()) {
+ IOperatorDescriptor opDesc = entry.getValue();
+ if (opDesc instanceof FeedIntakeOperatorDescriptor) {
+ FeedIntakeOperatorDescriptor orig = (FeedIntakeOperatorDescriptor) opDesc;
+ FeedIntakeOperatorDescriptor fiop;
+ if (orig.getAdapterFactory() != null) {
+ fiop = new FeedIntakeOperatorDescriptor(altered, orig.getFeedId(), orig.getAdapterFactory(),
+ (ARecordType) orig.getOutputType(), orig.getRecordDescriptor(), orig.getFeedPolicy());
+ } else {
+ fiop = new FeedIntakeOperatorDescriptor(altered, orig.getFeedId(), orig.getAdapterLibraryName(),
+ orig.getAdapterFactoryClassName(), orig.getAdapterConfiguration(),
+ (ARecordType) orig.getOutputType(), orig.getRecordDescriptor(), orig.getFeedPolicy());
+ }
+ oldNewOID.put(opDesc.getOperatorId(), fiop.getOperatorId());
+ } else if (opDesc instanceof AsterixLSMTreeInsertDeleteOperatorDescriptor) {
+ FeedMetaOperatorDescriptor metaOp = new FeedMetaOperatorDescriptor(altered, feedConnectionId, opDesc,
+ feedPolicy, FeedRuntimeType.STORAGE);
+ oldNewOID.put(opDesc.getOperatorId(), metaOp.getOperatorId());
+ } else {
+ FeedRuntimeType runtimeType = null;
+ if (opDesc instanceof AlgebricksMetaOperatorDescriptor) {
+ IPushRuntimeFactory runtimeFactory = ((AlgebricksMetaOperatorDescriptor) opDesc).getPipeline()
+ .getRuntimeFactories()[0];
+ if (runtimeFactory instanceof AssignRuntimeFactory) {
+ runtimeType = FeedRuntimeType.COMPUTE;
+ } else if (runtimeFactory instanceof StreamProjectRuntimeFactory) {
+ runtimeType = FeedRuntimeType.COMMIT;
+ }
+ }
+ FeedMetaOperatorDescriptor metaOp = new FeedMetaOperatorDescriptor(altered, feedConnectionId, opDesc,
+ feedPolicy, runtimeType);
+
+ oldNewOID.put(opDesc.getOperatorId(), metaOp.getOperatorId());
+ }
+ }
+
+ // copy connectors
+ Map<ConnectorDescriptorId, ConnectorDescriptorId> connectorMapping = new HashMap<ConnectorDescriptorId, ConnectorDescriptorId>();
+ for (Entry<ConnectorDescriptorId, IConnectorDescriptor> entry : spec.getConnectorMap().entrySet()) {
+ IConnectorDescriptor connDesc = entry.getValue();
+ ConnectorDescriptorId newConnId = altered.createConnectorDescriptor(connDesc);
+ connectorMapping.put(entry.getKey(), newConnId);
+ }
+
+ // make connections between operators
+ for (Entry<ConnectorDescriptorId, Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>>> entry : spec
+ .getConnectorOperatorMap().entrySet()) {
+ IConnectorDescriptor connDesc = altered.getConnectorMap().get(connectorMapping.get(entry.getKey()));
+ Pair<IOperatorDescriptor, Integer> leftOp = entry.getValue().getLeft();
+ Pair<IOperatorDescriptor, Integer> rightOp = entry.getValue().getRight();
+
+ IOperatorDescriptor leftOpDesc = altered.getOperatorMap().get(
+ oldNewOID.get(leftOp.getLeft().getOperatorId()));
+ IOperatorDescriptor rightOpDesc = altered.getOperatorMap().get(
+ oldNewOID.get(rightOp.getLeft().getOperatorId()));
+
+ altered.connect(connDesc, leftOpDesc, leftOp.getRight(), rightOpDesc, rightOp.getRight());
+ }
+
+ // prepare for setting partition constraints
+ Map<OperatorDescriptorId, List<LocationConstraint>> operatorLocations = new HashMap<OperatorDescriptorId, List<LocationConstraint>>();
+ Map<OperatorDescriptorId, Integer> operatorCounts = new HashMap<OperatorDescriptorId, Integer>();
+
+ for (Constraint constraint : spec.getUserConstraints()) {
+ LValueConstraintExpression lexpr = constraint.getLValue();
+ ConstraintExpression cexpr = constraint.getRValue();
+ OperatorDescriptorId opId;
+ switch (lexpr.getTag()) {
+ case PARTITION_COUNT:
+ opId = ((PartitionCountExpression) lexpr).getOperatorDescriptorId();
+ operatorCounts.put(opId, (int) ((ConstantExpression) cexpr).getValue());
+ break;
+ case PARTITION_LOCATION:
+ opId = ((PartitionLocationExpression) lexpr).getOperatorDescriptorId();
+
+ IOperatorDescriptor opDesc = altered.getOperatorMap().get(oldNewOID.get(opId));
+ List<LocationConstraint> locations = operatorLocations.get(opDesc.getOperatorId());
+ if (locations == null) {
+ locations = new ArrayList<>();
+ operatorLocations.put(opDesc.getOperatorId(), locations);
+ }
+ String location = (String) ((ConstantExpression) cexpr).getValue();
+ LocationConstraint lc = new LocationConstraint();
+ lc.location = location;
+ lc.partition = ((PartitionLocationExpression) lexpr).getPartition();
+ locations.add(lc);
+ break;
+ }
+ }
+
+ // set absolute location constraints
+ for (Entry<OperatorDescriptorId, List<LocationConstraint>> entry : operatorLocations.entrySet()) {
+ IOperatorDescriptor opDesc = altered.getOperatorMap().get(oldNewOID.get(entry.getKey()));
+ Collections.sort(entry.getValue(), new Comparator<LocationConstraint>() {
+
+ @Override
+ public int compare(LocationConstraint o1, LocationConstraint o2) {
+ return o1.partition - o2.partition;
+ }
+ });
+ String[] locations = new String[entry.getValue().size()];
+ for (int i = 0; i < locations.length; ++i) {
+ locations[i] = entry.getValue().get(i).location;
+ }
+ PartitionConstraintHelper.addAbsoluteLocationConstraint(altered, opDesc, locations);
+ }
+
+ // set count constraints
+ for (Entry<OperatorDescriptorId, Integer> entry : operatorCounts.entrySet()) {
+ IOperatorDescriptor opDesc = altered.getOperatorMap().get(oldNewOID.get(entry.getKey()));
+ if (!operatorLocations.keySet().contains(entry.getKey())) {
+ PartitionConstraintHelper.addPartitionCountConstraint(altered, opDesc, entry.getValue());
+ }
+ }
+
+ // useConnectorSchedulingPolicy
+ altered.setUseConnectorPolicyForScheduling(spec.isUseConnectorPolicyForScheduling());
+
+ // connectorAssignmentPolicy
+ altered.setConnectorPolicyAssignmentPolicy(spec.getConnectorPolicyAssignmentPolicy());
+
+ // roots
+ for (OperatorDescriptorId root : spec.getRoots()) {
+ altered.addRoot(altered.getOperatorMap().get(oldNewOID.get(root)));
+ }
+
+ // jobEventListenerFactory
+ altered.setJobletEventListenerFactory(spec.getJobletEventListenerFactory());
+
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("New Job Spec:" + altered);
+ }
+
+ return altered;
+
+ }
+
+ public static Triple<IAdapterFactory, ARecordType, AdapterType> getFeedFactoryAndOutput(Feed feed,
+ MetadataTransactionContext mdTxnCtx) throws AlgebricksException {
+
+ String adapterName = null;
+ DatasourceAdapter adapterEntity = null;
+ String adapterFactoryClassname = null;
+ IAdapterFactory adapterFactory = null;
+ ARecordType adapterOutputType = null;
+ Triple<IAdapterFactory, ARecordType, AdapterType> feedProps = null;
+ try {
+ adapterName = feed.getAdaptorName();
+ adapterEntity = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, MetadataConstants.METADATA_DATAVERSE_NAME,
+ adapterName);
+ if (adapterEntity == null) {
+ adapterEntity = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, feed.getDataverseName(), adapterName);
+ }
+
+ if (adapterEntity != null) {
+ adapterFactoryClassname = adapterEntity.getClassname();
+ switch (adapterEntity.getType()) {
+ case INTERNAL:
+ adapterFactory = (IAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
+ break;
+ case EXTERNAL:
+ String[] anameComponents = adapterName.split("#");
+ String libraryName = anameComponents[0];
+ ClassLoader cl = ExternalLibraryManager.getLibraryClassLoader(feed.getDataverseName(),
+ libraryName);
+ adapterFactory = (IAdapterFactory) cl.loadClass(adapterFactoryClassname).newInstance();
+ break;
+ }
+ } else {
+ adapterFactoryClassname = AqlMetadataProvider.adapterFactoryMapping.get(adapterName);
+ if (adapterFactoryClassname == null) {
+ adapterFactoryClassname = adapterName;
+ }
+ adapterFactory = (IAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
+ }
+
+ Map<String, String> configuration = feed.getAdaptorConfiguration();
+
+ switch (adapterFactory.getAdapterType()) {
+ case TYPED:
+ ((ITypedAdapterFactory) adapterFactory).configure(configuration);
+ adapterOutputType = ((ITypedAdapterFactory) adapterFactory).getAdapterOutputType();
+ break;
+ case GENERIC:
+ String outputTypeName = configuration.get(IGenericAdapterFactory.KEY_TYPE_NAME);
+ if (outputTypeName == null) {
+ throw new IllegalArgumentException(
+ "You must specify the datatype associated with the incoming data. Datatype is specified by the "
+ + IGenericAdapterFactory.KEY_TYPE_NAME + " configuration parameter");
+ }
+ adapterOutputType = (ARecordType) MetadataManager.INSTANCE.getDatatype(mdTxnCtx,
+ feed.getDataverseName(), outputTypeName).getDatatype();
+ ((IGenericAdapterFactory) adapterFactory).configure(configuration, (ARecordType) adapterOutputType);
+ break;
+ default:
+ throw new IllegalStateException(" Unknown factory type for " + adapterFactoryClassname);
+ }
+
+ feedProps = new Triple<IAdapterFactory, ARecordType, AdapterType>(adapterFactory, adapterOutputType,
+ adapterEntity.getType());
+ } catch (Exception e) {
+ e.printStackTrace();
+ throw new AlgebricksException("unable to create adapter " + e);
+ }
+ return feedProps;
+ }
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IAdapterExecutor.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IAdapterExecutor.java
new file mode 100644
index 0000000..a03bf2b
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IAdapterExecutor.java
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.feeds;
+
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+
+public interface IAdapterExecutor {
+
+ /**
+ * @throws Exception
+ */
+ public void start() throws Exception;
+
+ /**
+ * @throws Exception
+ */
+ public void stop() throws Exception;
+
+ /**
+ * @return
+ */
+ public FeedConnectionId getFeedId();
+
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IAdapterFactory.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IAdapterFactory.java
new file mode 100644
index 0000000..f0a3aa4
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IAdapterFactory.java
@@ -0,0 +1,91 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.feeds;
+
+import java.io.Serializable;
+
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+
+/**
+ * Base interface for IGenericDatasetAdapterFactory and ITypedDatasetAdapterFactory.
+ * Acts as a marker interface indicating that the implementation provides functionality
+ * for creating an adapter.
+ */
+public interface IAdapterFactory extends Serializable {
+
+ /**
+ * A 'GENERIC' adapter can be configured to return a given datatype.
+ * A 'TYPED' adapter returns records with a pre-defined datatype.
+ */
+ public enum AdapterType {
+ GENERIC,
+ TYPED
+ }
+
+ public enum SupportedOperation {
+ READ,
+ WRITE,
+ READ_WRITE
+ }
+
+ /**
+ * Returns the type of adapter indicating if the adapter can be used for
+ * reading from an external data source or writing to an external data
+ * source or can be used for both purposes.
+ *
+ * @see SupportedOperation
+ * @return
+ */
+ public SupportedOperation getSupportedOperations();
+
+ /**
+ * Returns the display name corresponding to the Adapter type that is created by the factory.
+ *
+ * @return the display name
+ */
+ public String getName();
+
+ /**
+ * Returns the type of the adapter (GENERIC or TYPED)
+ *
+ * @return
+ */
+ public AdapterType getAdapterType();
+
+ /**
+ * Returns a list of partition constraints. A partition constraint can be a
+ * requirement to execute at a particular location or could be cardinality
+ * constraints indicating the number of instances that need to run in
+ * parallel. example, a IDatasourceAdapter implementation written for data
+ * residing on the local file system of a node cannot run on any other node
+ * and thus has a location partition constraint. The location partition
+ * constraint can be expressed as a node IP address or a node controller id.
+ * In the former case, the IP address is translated to a node controller id
+ * running on the node with the given IP address.
+ */
+ public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception;
+
+ /**
+ * Creates an instance of IDatasourceAdapter.
+ *
+ * @param HyracksTaskContext
+ * @param partition
+ * @return An instance of IDatasourceAdapter.
+ * @throws Exception
+ */
+ public IDatasourceAdapter createAdapter(IHyracksTaskContext ctx, int partition) throws Exception;
+
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IDatasourceAdapter.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IDatasourceAdapter.java
new file mode 100644
index 0000000..a4c5de9
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IDatasourceAdapter.java
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.feeds;
+
+import java.io.Serializable;
+
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+
+/**
+ * A super interface implemented by a data source adapter. An adapter can be a
+ * pull based or push based. This interface provides all common APIs that need
+ * to be implemented by each adapter irrespective of the the kind of
+ * adapter(pull or push).
+ */
+public interface IDatasourceAdapter extends Serializable {
+
+ /**
+ * An adapter can be used to read from an external data source and may also
+ * allow writing to the external data source. This enum type indicates the
+ * kind of operations supported by the adapter.
+ */
+
+ /**
+ * Triggers the adapter to begin ingesting data from the external source.
+ *
+ * @param partition
+ * The adapter could be running with a degree of parallelism.
+ * partition corresponds to the i'th parallel instance.
+ * @param writer
+ * The instance of frame writer that is used by the adapter to
+ * write frame to. Adapter packs the fetched bytes (from external source),
+ * packs them into frames and forwards the frames to an upstream receiving
+ * operator using the instance of IFrameWriter.
+ * @throws Exception
+ */
+ public void start(int partition, IFrameWriter writer) throws Exception;
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IFeedAdapter.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IFeedAdapter.java
new file mode 100644
index 0000000..55abd73
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IFeedAdapter.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.feeds;
+
+/**
+ * Interface implemented by an adapter that can be controlled or managed by external
+ * commands (stop,alter)
+ */
+public interface IFeedAdapter extends IDatasourceAdapter {
+
+ public enum DataExchangeMode {
+ PULL,
+ PUSH
+ }
+
+ /**
+ * @return
+ */
+ public DataExchangeMode getDataExchangeMode();
+
+ /**
+ * Discontinue the ingestion of data and end the feed.
+ *
+ * @throws Exception
+ */
+ public void stop() throws Exception;
+
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IFeedMessage.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IFeedMessage.java
new file mode 100644
index 0000000..0241e5b
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IFeedMessage.java
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.feeds;
+
+import java.io.Serializable;
+
+public interface IFeedMessage extends Serializable {
+
+ public enum MessageType {
+ END,
+ SUPER_FEED_MANAGER_ELECT
+ }
+
+ public MessageType getMessageType();
+
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IGenericAdapterFactory.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IGenericAdapterFactory.java
new file mode 100644
index 0000000..16c3c80
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IGenericAdapterFactory.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.feeds;
+
+import java.util.Map;
+
+import edu.uci.ics.asterix.om.types.ARecordType;
+
+public interface IGenericAdapterFactory extends IAdapterFactory {
+
+ public static final String KEY_TYPE_NAME = "type-name";
+
+ public void configure(Map<String, String> configuration, ARecordType outputType) throws Exception;
+
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IPullBasedFeedAdapter.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IPullBasedFeedAdapter.java
new file mode 100644
index 0000000..50641b0
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IPullBasedFeedAdapter.java
@@ -0,0 +1,28 @@
+/*
+x * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.feeds;
+
+public interface IPullBasedFeedAdapter extends IFeedAdapter {
+
+ /**
+ * @return
+ */
+ public FeedPolicyEnforcer getPolicyEnforcer();
+
+ /**
+ * @param feedPolicyEnforcer
+ */
+ public void setFeedPolicyEnforcer(FeedPolicyEnforcer feedPolicyEnforcer);
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/ITypedAdapterFactory.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/ITypedAdapterFactory.java
new file mode 100644
index 0000000..6faa44b
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/ITypedAdapterFactory.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.feeds;
+
+import java.util.Map;
+
+import edu.uci.ics.asterix.om.types.ARecordType;
+
+public interface ITypedAdapterFactory extends IAdapterFactory {
+
+ public ARecordType getAdapterOutputType();
+
+ public void configure(Map<String, String> configuration) throws Exception;
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IngestionRuntime.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IngestionRuntime.java
new file mode 100644
index 0000000..0ea0d38
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IngestionRuntime.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.feeds;
+
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+import edu.uci.ics.asterix.common.feeds.FeedRuntime;
+
+public class IngestionRuntime extends FeedRuntime {
+
+ private AdapterRuntimeManager adapterRuntimeManager;
+
+ public IngestionRuntime(FeedConnectionId feedId, int partition, FeedRuntimeType feedRuntimeType,
+ AdapterRuntimeManager adaptorRuntimeManager) {
+ super(feedId, partition, feedRuntimeType);
+ this.adapterRuntimeManager = adaptorRuntimeManager;
+ }
+
+ public AdapterRuntimeManager getAdapterRuntimeManager() {
+ return adapterRuntimeManager;
+ }
+
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/MessageListener.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/MessageListener.java
new file mode 100644
index 0000000..1bd042a
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/MessageListener.java
@@ -0,0 +1,134 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.feeds;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.ServerSocket;
+import java.net.Socket;
+import java.nio.CharBuffer;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+public class MessageListener {
+
+ private static final Logger LOGGER = Logger.getLogger(MessageListener.class.getName());
+
+ private final int port;
+ private final LinkedBlockingQueue<String> outbox;
+
+ private ExecutorService executorService = Executors.newFixedThreadPool(10);
+
+ private MessageListenerServer listenerServer;
+
+ public MessageListener(int port, LinkedBlockingQueue<String> outbox) {
+ this.port = port;
+ this.outbox = outbox;
+ }
+
+ public void stop() {
+ listenerServer.stop();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Stopped message service at " + port);
+ }
+ if (!executorService.isShutdown()) {
+ executorService.shutdownNow();
+ }
+
+ }
+
+ public void start() throws IOException {
+ listenerServer = new MessageListenerServer(port, outbox);
+ executorService.execute(listenerServer);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Starting message service at " + port);
+ }
+ }
+
+ private static class MessageListenerServer implements Runnable {
+
+ private final int port;
+ private final LinkedBlockingQueue<String> outbox;
+ private ServerSocket server;
+
+ public MessageListenerServer(int port, LinkedBlockingQueue<String> outbox) {
+ this.port = port;
+ this.outbox = outbox;
+ }
+
+ public void stop() {
+ try {
+ server.close();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+
+ @Override
+ public void run() {
+ char EOL = (char) "\n".getBytes()[0];
+ Socket client = null;
+ try {
+ server = new ServerSocket(port);
+ client = server.accept();
+ InputStream in = client.getInputStream();
+ CharBuffer buffer = CharBuffer.allocate(5000);
+ char ch;
+ while (true) {
+ ch = (char) in.read();
+ if (((int) ch) == -1) {
+ break;
+ }
+ while (ch != EOL) {
+ buffer.put(ch);
+ ch = (char) in.read();
+ }
+ buffer.flip();
+ String s = new String(buffer.array());
+ synchronized (outbox) {
+ outbox.add(s + "\n");
+ }
+ buffer.position(0);
+ buffer.limit(5000);
+ }
+
+ } catch (Exception e) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Unable to start Message listener" + server);
+ }
+ } finally {
+ if (server != null) {
+ try {
+ server.close();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+ }
+
+ }
+
+ public static interface IMessageAnalyzer {
+
+ public LinkedBlockingQueue<String> getMessageQueue();
+
+ }
+
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/RemoteSocketMessageListener.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/RemoteSocketMessageListener.java
new file mode 100644
index 0000000..d7e2e2f
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/RemoteSocketMessageListener.java
@@ -0,0 +1,179 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.feeds;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.Socket;
+import java.nio.CharBuffer;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+public class RemoteSocketMessageListener {
+
+ private static final Logger LOGGER = Logger.getLogger(RemoteSocketMessageListener.class.getName());
+
+ private final String host;
+ private final int port;
+ private final LinkedBlockingQueue<String> outbox;
+
+ private ExecutorService executorService = Executors.newFixedThreadPool(10);
+
+ private RemoteMessageListenerServer listenerServer;
+
+ public RemoteSocketMessageListener(String host, int port, LinkedBlockingQueue<String> outbox) {
+ this.host = host;
+ this.port = port;
+ this.outbox = outbox;
+ }
+
+ public void stop() {
+ if (!executorService.isShutdown()) {
+ executorService.shutdownNow();
+ }
+ listenerServer.stop();
+
+ }
+
+ public void start() throws IOException {
+ listenerServer = new RemoteMessageListenerServer(host, port, outbox);
+ executorService.execute(listenerServer);
+ }
+
+ private static class RemoteMessageListenerServer implements Runnable {
+
+ private final String host;
+ private final int port;
+ private final LinkedBlockingQueue<String> outbox;
+ private Socket client;
+
+ public RemoteMessageListenerServer(String host, int port, LinkedBlockingQueue<String> outbox) {
+ this.host = host;
+ this.port = port;
+ this.outbox = outbox;
+ }
+
+ public void stop() {
+ try {
+ client.close();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+
+ @Override
+ public void run() {
+ char EOL = (char) "\n".getBytes()[0];
+ Socket client = null;
+ try {
+ client = new Socket(host, port);
+ InputStream in = client.getInputStream();
+ CharBuffer buffer = CharBuffer.allocate(5000);
+ char ch;
+ while (true) {
+ ch = (char) in.read();
+ if (((int) ch) == -1) {
+ break;
+ }
+ while (ch != EOL) {
+ buffer.put(ch);
+ ch = (char) in.read();
+ }
+ buffer.flip();
+ String s = new String(buffer.array());
+ synchronized (outbox) {
+ outbox.add(s + "\n");
+ }
+ buffer.position(0);
+ buffer.limit(5000);
+ }
+
+ } catch (Exception e) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Unable to start Remote Message listener" + client);
+ }
+ } finally {
+ if (client != null && !client.isClosed()) {
+ try {
+ client.close();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+ }
+
+ }
+
+ private static class MessageParser implements Runnable {
+
+ private Socket client;
+ private IMessageAnalyzer messageAnalyzer;
+ private static final char EOL = (char) "\n".getBytes()[0];
+
+ public MessageParser(Socket client, IMessageAnalyzer messageAnalyzer) {
+ this.client = client;
+ this.messageAnalyzer = messageAnalyzer;
+ }
+
+ @Override
+ public void run() {
+ CharBuffer buffer = CharBuffer.allocate(5000);
+ char ch;
+ try {
+ InputStream in = client.getInputStream();
+ while (true) {
+ ch = (char) in.read();
+ if (((int) ch) == -1) {
+ break;
+ }
+ while (ch != EOL) {
+ buffer.put(ch);
+ ch = (char) in.read();
+ }
+ buffer.flip();
+ String s = new String(buffer.array());
+ synchronized (messageAnalyzer) {
+ messageAnalyzer.getMessageQueue().add(s + "\n");
+ }
+ buffer.position(0);
+ buffer.limit(5000);
+ }
+ } catch (IOException ioe) {
+ ioe.printStackTrace();
+ } finally {
+ try {
+ client.close();
+ } catch (IOException ioe) {
+ // do nothing
+ }
+ }
+ }
+ }
+
+ public static interface IMessageAnalyzer {
+
+ /**
+ * @return
+ */
+ public LinkedBlockingQueue<String> getMessageQueue();
+
+ }
+
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/functions/AsterixExternalScalarFunctionInfo.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/functions/AsterixExternalScalarFunctionInfo.java
new file mode 100644
index 0000000..4a06b7d
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/functions/AsterixExternalScalarFunctionInfo.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.functions;
+
+import java.util.List;
+
+import edu.uci.ics.asterix.om.functions.AsterixExternalFunctionInfo;
+import edu.uci.ics.asterix.om.functions.AsterixFunction;
+import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression.FunctionKind;
+
+public class AsterixExternalScalarFunctionInfo extends AsterixExternalFunctionInfo {
+
+ private static final long serialVersionUID = 1L;
+
+ public AsterixExternalScalarFunctionInfo(String namespace, AsterixFunction asterixFunction, IAType returnType,
+ String body, String language, List<IAType> argumentTypes, IResultTypeComputer rtc) {
+ super(namespace, asterixFunction, FunctionKind.SCALAR, argumentTypes, returnType, rtc, body, language);
+ }
+
+ public AsterixExternalScalarFunctionInfo() {
+ super();
+ }
+
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/functions/ExternalFunctionCompilerUtil.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/functions/ExternalFunctionCompilerUtil.java
new file mode 100644
index 0000000..d0e6aeb
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/functions/ExternalFunctionCompilerUtil.java
@@ -0,0 +1,220 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.functions;
+
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.ObjectOutputStream;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import edu.uci.ics.asterix.metadata.MetadataException;
+import edu.uci.ics.asterix.metadata.MetadataManager;
+import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
+import edu.uci.ics.asterix.metadata.entities.Datatype;
+import edu.uci.ics.asterix.metadata.entities.Function;
+import edu.uci.ics.asterix.om.functions.AsterixFunction;
+import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
+import edu.uci.ics.asterix.om.typecomputer.impl.ADoubleTypeComputer;
+import edu.uci.ics.asterix.om.typecomputer.impl.AFloatTypeComputer;
+import edu.uci.ics.asterix.om.typecomputer.impl.AInt32TypeComputer;
+import edu.uci.ics.asterix.om.typecomputer.impl.AStringTypeComputer;
+import edu.uci.ics.asterix.om.types.AOrderedListType;
+import edu.uci.ics.asterix.om.types.AUnorderedListType;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression.FunctionKind;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
+
+public class ExternalFunctionCompilerUtil implements Serializable {
+
+ private static Pattern orderedListPattern = Pattern.compile("\\[*\\]");
+ private static Pattern unorderedListPattern = Pattern.compile("[{{*}}]");
+
+ public static IFunctionInfo getExternalFunctionInfo(MetadataTransactionContext txnCtx, Function function)
+ throws MetadataException {
+
+ String functionKind = function.getKind();
+ IFunctionInfo finfo = null;
+ if (FunctionKind.SCALAR.toString().equalsIgnoreCase(functionKind)) {
+ finfo = getScalarFunctionInfo(txnCtx, function);
+ } else if (FunctionKind.AGGREGATE.toString().equalsIgnoreCase(functionKind)) {
+ finfo = getAggregateFunctionInfo(txnCtx, function);
+ } else if (FunctionKind.STATEFUL.toString().equalsIgnoreCase(functionKind)) {
+ finfo = getStatefulFunctionInfo(txnCtx, function);
+ } else if (FunctionKind.UNNEST.toString().equalsIgnoreCase(functionKind)) {
+ finfo = getUnnestFunctionInfo(txnCtx, function);
+ }
+ return finfo;
+ }
+
+ private static IFunctionInfo getScalarFunctionInfo(MetadataTransactionContext txnCtx, Function function)
+ throws MetadataException {
+ FunctionIdentifier fid = new FunctionIdentifier(function.getDataverseName(), function.getName(),
+ function.getArity());
+ IResultTypeComputer typeComputer = getResultTypeComputer(txnCtx, function);
+ List<IAType> arguments = new ArrayList<IAType>();
+ IAType returnType = null;
+ List<String> paramTypes = function.getParams();
+ for (String paramType : paramTypes) {
+ arguments.add(getTypeInfo(paramType, txnCtx, function));
+ }
+
+ returnType = getTypeInfo(function.getReturnType(), txnCtx, function);
+
+ AsterixExternalScalarFunctionInfo scalarFunctionInfo = new AsterixExternalScalarFunctionInfo(
+ fid.getNamespace(), new AsterixFunction(fid.getName(), fid.getArity()), returnType,
+ function.getFunctionBody(), function.getLanguage(), arguments, typeComputer);
+ return scalarFunctionInfo;
+ }
+
+ private static IAType getTypeInfo(String paramType, MetadataTransactionContext txnCtx, Function function)
+ throws MetadataException {
+ if (paramType.equalsIgnoreCase(BuiltinType.AINT32.getDisplayName())) {
+ return (BuiltinType.AINT32);
+ } else if (paramType.equalsIgnoreCase(BuiltinType.AFLOAT.getDisplayName())) {
+ return (BuiltinType.AFLOAT);
+ } else if (paramType.equalsIgnoreCase(BuiltinType.ASTRING.getDisplayName())) {
+ return (BuiltinType.ASTRING);
+ } else if (paramType.equalsIgnoreCase(BuiltinType.ADOUBLE.getDisplayName())) {
+ return (BuiltinType.ADOUBLE);
+ } else {
+ IAType collection = getCollectionType(paramType, txnCtx, function);
+ if (collection != null) {
+ return collection;
+ } else {
+ Datatype datatype;
+ datatype = MetadataManager.INSTANCE.getDatatype(txnCtx, function.getDataverseName(), paramType);
+ if (datatype == null) {
+ throw new MetadataException(" Type " + paramType + " not defined");
+ }
+ return (datatype.getDatatype());
+ }
+ }
+ }
+
+ private static IAType getCollectionType(String paramType, MetadataTransactionContext txnCtx, Function function)
+ throws MetadataException {
+
+ Matcher matcher = orderedListPattern.matcher(paramType);
+ if (matcher.find()) {
+ String subType = paramType.substring(paramType.indexOf('[') + 1, paramType.lastIndexOf(']'));
+ return new AOrderedListType(getTypeInfo(subType, txnCtx, function), "AOrderedList");
+ } else {
+ matcher = unorderedListPattern.matcher(paramType);
+ if (matcher.find()) {
+ String subType = paramType.substring(paramType.indexOf("{{") + 2, paramType.lastIndexOf("}}"));
+ return new AUnorderedListType(getTypeInfo(subType, txnCtx, function), "AUnorderedList");
+ }
+ }
+ return null;
+ }
+
+ private static IResultTypeComputer getResultTypeComputer(final MetadataTransactionContext txnCtx,
+ final Function function) throws MetadataException {
+
+ final IAType type = getTypeInfo(function.getReturnType(), txnCtx, function);
+ switch (type.getTypeTag()) {
+ case INT32:
+ return AInt32TypeComputer.INSTANCE;
+ case FLOAT:
+ return AFloatTypeComputer.INSTANCE;
+ case DOUBLE:
+ return ADoubleTypeComputer.INSTANCE;
+ case STRING:
+ return AStringTypeComputer.INSTANCE;
+ case ORDEREDLIST:
+ return new IResultTypeComputer() {
+ @Override
+ public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env,
+ IMetadataProvider<?, ?> metadataProvider) throws AlgebricksException {
+
+ return new AOrderedListType(((AOrderedListType) type).getItemType(), ((AOrderedListType) type)
+ .getItemType().getTypeName());
+ }
+
+ };
+ case UNORDEREDLIST:
+ return new IResultTypeComputer() {
+ @Override
+ public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env,
+ IMetadataProvider<?, ?> metadataProvider) throws AlgebricksException {
+
+ return new AUnorderedListType(type, type.getTypeName());
+ }
+
+ };
+ default:
+ IResultTypeComputer typeComputer = new IResultTypeComputer() {
+ @Override
+ public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env,
+ IMetadataProvider<?, ?> mp) throws AlgebricksException {
+ return type;
+ }
+ };
+ return typeComputer;
+ }
+
+ }
+
+ private static IAType getType(Function function, MetadataTransactionContext txnCtx) throws AlgebricksException {
+ IAType collectionType = null;
+ try {
+ collectionType = getCollectionType(function.getReturnType(), txnCtx, function);
+ if (collectionType != null) {
+ return collectionType;
+ } else {
+
+ Datatype datatype;
+ datatype = MetadataManager.INSTANCE.getDatatype(txnCtx, function.getDataverseName(),
+ function.getReturnType());
+ return datatype.getDatatype();
+ }
+ } catch (MetadataException me) {
+ throw new AlgebricksException(me);
+ }
+ }
+
+ private static IFunctionInfo getUnnestFunctionInfo(MetadataTransactionContext txnCtx, Function function) {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ private static IFunctionInfo getStatefulFunctionInfo(MetadataTransactionContext txnCtx, Function function) {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ private static IFunctionInfo getAggregateFunctionInfo(MetadataTransactionContext txnCtx, Function function) {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ public static void main(String args[]) throws FileNotFoundException, IOException {
+ ExternalFunctionCompilerUtil obj = new ExternalFunctionCompilerUtil();
+ ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream("/tmp/ecu.obj"));
+ oos.writeObject(obj);
+ }
+
+}
\ No newline at end of file
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/functions/ExternalLibraryManager.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/functions/ExternalLibraryManager.java
new file mode 100755
index 0000000..aa506d0
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/functions/ExternalLibraryManager.java
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.functions;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class ExternalLibraryManager {
+
+ private static Map<String, ClassLoader> libraryClassLoaders = new HashMap<String, ClassLoader>();
+
+ public static void registerLibraryClassLoader(String dataverseName, String libraryName, ClassLoader classLoader) {
+ String key = getKey(dataverseName, libraryName);
+ synchronized (libraryClassLoaders) {
+ if (libraryClassLoaders.get(dataverseName) != null) {
+ throw new IllegalStateException("Library class loader already registered!");
+ }
+ libraryClassLoaders.put(key, classLoader);
+ }
+ }
+
+ public static void deregisterLibraryClassLoader(String dataverseName, String libraryName) {
+ String key = getKey(dataverseName, libraryName);
+ synchronized (libraryClassLoaders) {
+ if (libraryClassLoaders.get(dataverseName) != null) {
+ libraryClassLoaders.remove(key);
+ }
+ }
+ }
+
+ public static ClassLoader getLibraryClassLoader(String dataverseName, String libraryName) {
+ String key = getKey(dataverseName, libraryName);
+ synchronized (libraryClassLoaders) {
+ return libraryClassLoaders.get(key);
+ }
+ }
+
+ private static String getKey(String dataverseName, String libraryName) {
+ return dataverseName + "." + libraryName;
+ }
+
+}
\ No newline at end of file
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/functions/MetadataBuiltinFunctions.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/functions/MetadataBuiltinFunctions.java
index d5d4cc2..086caf1 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/functions/MetadataBuiltinFunctions.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/functions/MetadataBuiltinFunctions.java
@@ -40,6 +40,8 @@
AsterixBuiltinFunctions.addDatasetFunction(AsterixBuiltinFunctions.DATASET);
AsterixBuiltinFunctions.addUnnestFun(AsterixBuiltinFunctions.FEED_INGEST, false);
AsterixBuiltinFunctions.addDatasetFunction(AsterixBuiltinFunctions.FEED_INGEST);
+ AsterixBuiltinFunctions.addUnnestFun(AsterixBuiltinFunctions.FEED_INTERCEPT, false);
+ AsterixBuiltinFunctions.addDatasetFunction(AsterixBuiltinFunctions.FEED_INTERCEPT);
}
public static void addMetadataBuiltinFunctions() {
@@ -85,7 +87,7 @@
}
return t2;
}
- });
+ }, true);
AsterixBuiltinFunctions.addPrivateFunction(AsterixBuiltinFunctions.FEED_INGEST, new IResultTypeComputer() {
@@ -93,6 +95,44 @@
public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env,
IMetadataProvider<?, ?> mp) throws AlgebricksException {
AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) expression;
+ if (f.getArguments().size() != 3) {
+ throw new AlgebricksException("Incorrect number of arguments -> arity is 3, not "
+ + f.getArguments().size());
+ }
+ ILogicalExpression a1 = f.getArguments().get(1).getValue();
+ IAType t1 = (IAType) env.getType(a1);
+ if (t1.getTypeTag() == ATypeTag.ANY) {
+ return BuiltinType.ANY;
+ }
+ if (t1.getTypeTag() != ATypeTag.STRING) {
+ throw new AlgebricksException("Illegal type " + t1 + " for feed-ingest argument.");
+ }
+ if (a1.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
+ return BuiltinType.ANY;
+ }
+ AsterixConstantValue acv = (AsterixConstantValue) ((ConstantExpression) a1).getValue();
+ String typeArg = ((AString) acv.getObject()).getStringValue();
+ AqlMetadataProvider metadata = ((AqlMetadataProvider) mp);
+ Pair<String, String> argInfo = getDatasetInfo(metadata, typeArg);
+ String dataverseName = argInfo.first;
+ String typeName = argInfo.second;
+ if (dataverseName == null) {
+ throw new AlgebricksException("Unspecified dataverse!");
+ }
+ IAType t2 = metadata.findType(dataverseName, typeName);
+ if (t2 == null) {
+ throw new AlgebricksException("Unknown type " + typeName);
+ }
+ return t2;
+ }
+ }, true);
+
+ AsterixBuiltinFunctions.addFunction(AsterixBuiltinFunctions.FEED_INTERCEPT, new IResultTypeComputer() {
+
+ @Override
+ public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env,
+ IMetadataProvider<?, ?> mp) throws AlgebricksException {
+ AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) expression;
if (f.getArguments().size() != 1) {
throw new AlgebricksException("dataset arity is 1, not " + f.getArguments().size());
}
@@ -128,21 +168,20 @@
}
return t2;
}
- });
+ }, true);
}
private static Pair<String, String> getDatasetInfo(AqlMetadataProvider metadata, String datasetArg) {
- String[] datasetNameComponents = datasetArg.split("\\.");
- String dataverseName;
- String datasetName;
- if (datasetNameComponents.length == 1) {
- dataverseName = metadata.getDefaultDataverse() == null ? null : metadata.getDefaultDataverse()
- .getDataverseName();
- datasetName = datasetNameComponents[0];
+ String[] nameComponents = datasetArg.split("\\.");
+ String first;
+ String second;
+ if (nameComponents.length == 1) {
+ first = metadata.getDefaultDataverse() == null ? null : metadata.getDefaultDataverse().getDataverseName();
+ second = nameComponents[0];
} else {
- dataverseName = datasetNameComponents[0];
- datasetName = datasetNameComponents[1];
+ first = nameComponents[0];
+ second = nameComponents[1];
}
- return new Pair(dataverseName, datasetName);
+ return new Pair(first, second);
}
}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/valueextractors/MetadataEntityValueExtractor.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/valueextractors/MetadataEntityValueExtractor.java
index 3d0f799..7a56d34 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/valueextractors/MetadataEntityValueExtractor.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/valueextractors/MetadataEntityValueExtractor.java
@@ -36,6 +36,6 @@
@Override
public T getValue(JobId jobId, ITupleReference tuple) throws MetadataException, HyracksDataException, IOException {
- return tupleReaderWriter.getMetadataEntytiFromTuple(tuple);
+ return tupleReaderWriter.getMetadataEntityFromTuple(tuple);
}
}
diff --git a/asterix-om/pom.xml b/asterix-om/pom.xml
index 2311930..06f74ae 100644
--- a/asterix-om/pom.xml
+++ b/asterix-om/pom.xml
@@ -17,7 +17,7 @@
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
</parent>
<artifactId>asterix-om</artifactId>
@@ -40,13 +40,13 @@
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-common</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-transactions</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/builders/RecordBuilder.java b/asterix-om/src/main/java/edu/uci/ics/asterix/builders/RecordBuilder.java
index 10b6071..a1148cf 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/builders/RecordBuilder.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/builders/RecordBuilder.java
@@ -162,7 +162,7 @@
openFieldNameLengths = Arrays.copyOf(openFieldNameLengths, openFieldNameLengths.length
+ DEFAULT_NUM_OPEN_FIELDS);
}
- int fieldNameHashCode = utf8HashFunction.hash(name.getByteArray(), name.getStartOffset() + 1, name.getLength());
+ int fieldNameHashCode = utf8HashFunction.hash(name.getByteArray(), name.getStartOffset() + 1, name.getLength() - 1);
if (recType != null) {
int cFieldPos = recType.findFieldPosition(name.getByteArray(), name.getStartOffset() + 1,
name.getLength() - 1);
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/common/AListElementToken.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/common/AListElementToken.java
index e263e87..ba7e65e 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/common/AListElementToken.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/common/AListElementToken.java
@@ -26,7 +26,7 @@
protected int length;
protected int tokenLength;
protected int typeTag;
-
+
@Override
public byte[] getData() {
return data;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/common/AqlExpressionTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/common/AqlExpressionTypeComputer.java
index 7eb2f30..12b2753 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/common/AqlExpressionTypeComputer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/common/AqlExpressionTypeComputer.java
@@ -17,8 +17,10 @@
import java.util.ArrayList;
import java.util.List;
+import edu.uci.ics.asterix.common.functions.FunctionSignature;
import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.functions.AsterixExternalFunctionInfo;
import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
import edu.uci.ics.asterix.om.types.AUnionType;
import edu.uci.ics.asterix.om.types.BuiltinType;
@@ -72,8 +74,14 @@
unionList.add(BuiltinType.ABOOLEAN);
return new AUnionType(unionList, "OptionalBoolean");
}
- // Note: only builtin functions, for now.
- IResultTypeComputer rtc = AsterixBuiltinFunctions.getResultTypeComputer(fi);
+ // Note: built-in functions + udfs
+ IResultTypeComputer rtc = null;
+ FunctionSignature signature = new FunctionSignature(fi.getNamespace(), fi.getName(), fi.getArity());
+ if (AsterixBuiltinFunctions.isBuiltinCompilerFunction(signature, true)) {
+ rtc = AsterixBuiltinFunctions.getResultTypeComputer(fi);
+ } else {
+ rtc = ((AsterixExternalFunctionInfo) expr.getFunctionInfo()).getResultTypeComputer();
+ }
if (rtc == null) {
throw new AlgebricksException("Type computer missing for " + fi);
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/common/AqlNullableTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/common/AqlNullableTypeComputer.java
index 3b474fd..f71fcf4 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/common/AqlNullableTypeComputer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/common/AqlNullableTypeComputer.java
@@ -14,7 +14,6 @@
*/
package edu.uci.ics.asterix.dataflow.data.common;
-
import edu.uci.ics.asterix.om.types.AUnionType;
import edu.uci.ics.asterix.om.types.IAType;
import edu.uci.ics.asterix.om.types.TypeHelper;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/AObjectAscBinaryComparatorFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/AObjectAscBinaryComparatorFactory.java
index d2396e0..c409a4a 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/AObjectAscBinaryComparatorFactory.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/AObjectAscBinaryComparatorFactory.java
@@ -61,11 +61,12 @@
.createBinaryComparator();
final IBinaryComparator ascPolygonComp = APolygonPartialBinaryComparatorFactory.INSTANCE
.createBinaryComparator();
+ final IBinaryComparator ascUUIDComp = AUUIDPartialBinaryComparatorFactory.INSTANCE.createBinaryComparator();
final IBinaryComparator rawComp = RawBinaryComparatorFactory.INSTANCE.createBinaryComparator();
@Override
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
-
+
if (b1[s1] == ATypeTag.NULL.serialize()) {
if (b2[s2] == ATypeTag.NULL.serialize())
return 0;
@@ -83,6 +84,8 @@
+ ") cannot be compared!");
}
switch (tag1) {
+ case UUID:
+ return ascUUIDComp.compare(b1, s1 + 1, l1 - 1, b2, s2 + 1, l2 - 1);
case BOOLEAN: {
return ascBoolComp.compare(b1, s1 + 1, l1 - 1, b2, s2 + 1, l2 - 1);
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/AUUIDPartialBinaryComparatorFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/AUUIDPartialBinaryComparatorFactory.java
new file mode 100644
index 0000000..63c9a5a
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/AUUIDPartialBinaryComparatorFactory.java
@@ -0,0 +1,31 @@
+package edu.uci.ics.asterix.dataflow.data.nontagged.comparators;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.Integer64SerializerDeserializer;
+
+public class AUUIDPartialBinaryComparatorFactory implements IBinaryComparatorFactory {
+
+ private static final long serialVersionUID = 1L;
+
+ public static final AUUIDPartialBinaryComparatorFactory INSTANCE = new AUUIDPartialBinaryComparatorFactory();
+
+ @Override
+ public IBinaryComparator createBinaryComparator() {
+ return new IBinaryComparator() {
+
+ @Override
+ public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
+ int msbCompare = Long.compare(Integer64SerializerDeserializer.getLong(b1, s1),
+ Integer64SerializerDeserializer.getLong(b2, s2));
+ if (msbCompare == 0) {
+ return Long.compare(Integer64SerializerDeserializer.getLong(b1, s1 + 8),
+ Integer64SerializerDeserializer.getLong(b2, s2 + 8));
+ } else {
+ return msbCompare;
+ }
+ }
+ };
+ }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/ListItemBinaryComparatorFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/ListItemBinaryComparatorFactory.java
index a6b825c..e25ad2f 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/ListItemBinaryComparatorFactory.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/ListItemBinaryComparatorFactory.java
@@ -70,6 +70,7 @@
.createBinaryComparator();
final IBinaryComparator ascPolygonComp = APolygonPartialBinaryComparatorFactory.INSTANCE
.createBinaryComparator();
+ final IBinaryComparator ascUUIDComp = AUUIDPartialBinaryComparatorFactory.INSTANCE.createBinaryComparator();
final IBinaryComparator rawComp = RawBinaryComparatorFactory.INSTANCE.createBinaryComparator();
@Override
@@ -104,6 +105,9 @@
}
switch (tag1) {
+ case UUID: {
+ return ascUUIDComp.compare(b1, s1 + skip1, l1 - skip1, b2, s2 + skip2, l2 - skip2);
+ }
case BOOLEAN: {
return ascBoolComp.compare(b1, s1 + skip1, l1 - skip1, b2, s2 + skip2, l2 - skip2);
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ADateTimePrinter.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ADateTimePrinter.java
index 27456e4..c1f5c3d 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ADateTimePrinter.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ADateTimePrinter.java
@@ -56,6 +56,6 @@
} catch (IOException e) {
throw new AlgebricksException(e);
}
-
+
}
}
\ No newline at end of file
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ADayTimeDurationPrinter.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ADayTimeDurationPrinter.java
index 1386bb1..466df99 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ADayTimeDurationPrinter.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ADayTimeDurationPrinter.java
@@ -27,7 +27,7 @@
public static final ADayTimeDurationPrinter INSTANCE = new ADayTimeDurationPrinter();
private static final GregorianCalendarSystem gCalInstance = GregorianCalendarSystem.getInstance();
-
+
@Override
public void init() throws AlgebricksException {
// TODO Auto-generated method stub
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AFloatPrinterFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AFloatPrinterFactory.java
index 782cfde..4ae93e1 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AFloatPrinterFactory.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AFloatPrinterFactory.java
@@ -17,8 +17,6 @@
import edu.uci.ics.hyracks.algebricks.data.IPrinter;
import edu.uci.ics.hyracks.algebricks.data.IPrinterFactory;
-
-
public class AFloatPrinterFactory implements IPrinterFactory {
private static final long serialVersionUID = 1L;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AInt16PrinterFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AInt16PrinterFactory.java
index 20c2b7d..f578020 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AInt16PrinterFactory.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AInt16PrinterFactory.java
@@ -17,8 +17,6 @@
import edu.uci.ics.hyracks.algebricks.data.IPrinter;
import edu.uci.ics.hyracks.algebricks.data.IPrinterFactory;
-
-
public class AInt16PrinterFactory implements IPrinterFactory {
private static final long serialVersionUID = 1L;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AInt32PrinterFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AInt32PrinterFactory.java
index b530f03..9887f17 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AInt32PrinterFactory.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AInt32PrinterFactory.java
@@ -17,8 +17,6 @@
import edu.uci.ics.hyracks.algebricks.data.IPrinter;
import edu.uci.ics.hyracks.algebricks.data.IPrinterFactory;
-
-
public class AInt32PrinterFactory implements IPrinterFactory {
private static final long serialVersionUID = 1L;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AInt8PrinterFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AInt8PrinterFactory.java
index 59cb480..7ae5169 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AInt8PrinterFactory.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AInt8PrinterFactory.java
@@ -17,8 +17,6 @@
import edu.uci.ics.hyracks.algebricks.data.IPrinter;
import edu.uci.ics.hyracks.algebricks.data.IPrinterFactory;
-
-
public class AInt8PrinterFactory implements IPrinterFactory {
private static final long serialVersionUID = 1L;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AIntervalPrinter.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AIntervalPrinter.java
index 2ab8d19..9272100 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AIntervalPrinter.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AIntervalPrinter.java
@@ -46,21 +46,21 @@
if (typetag == ATypeTag.DATE.serialize()) {
ps.print("-date(\"");
timeInstancePrinter = ADatePrinter.INSTANCE;
- ((ADatePrinter)timeInstancePrinter).printString(b, s + 4, 4, ps);
+ ((ADatePrinter) timeInstancePrinter).printString(b, s + 4, 4, ps);
ps.print(", ");
- ((ADatePrinter)timeInstancePrinter).printString(b, s + 12, 4, ps);
+ ((ADatePrinter) timeInstancePrinter).printString(b, s + 12, 4, ps);
} else if (typetag == ATypeTag.TIME.serialize()) {
ps.print("-time(\"");
timeInstancePrinter = ATimePrinter.INSTANCE;
- ((ATimePrinter)timeInstancePrinter).printString(b, s + 4, 4, ps);
+ ((ATimePrinter) timeInstancePrinter).printString(b, s + 4, 4, ps);
ps.print(", ");
- ((ATimePrinter)timeInstancePrinter).printString(b, s + 12, 4, ps);
+ ((ATimePrinter) timeInstancePrinter).printString(b, s + 12, 4, ps);
} else if (typetag == ATypeTag.DATETIME.serialize()) {
ps.print("-datetime(\"");
timeInstancePrinter = ADateTimePrinter.INSTANCE;
- ((ADateTimePrinter)timeInstancePrinter).printString(b, s, 8, ps);
+ ((ADateTimePrinter) timeInstancePrinter).printString(b, s, 8, ps);
ps.print(", ");
- ((ADateTimePrinter)timeInstancePrinter).printString(b, s + 8, 8, ps);
+ ((ADateTimePrinter) timeInstancePrinter).printString(b, s + 8, 8, ps);
} else {
throw new AlgebricksException("Unsupport internal time types in interval: " + typetag);
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ANullPrinterFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ANullPrinterFactory.java
index cbd0450..bfbf3f0 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ANullPrinterFactory.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ANullPrinterFactory.java
@@ -17,8 +17,6 @@
import edu.uci.ics.hyracks.algebricks.data.IPrinter;
import edu.uci.ics.hyracks.algebricks.data.IPrinterFactory;
-
-
public class ANullPrinterFactory implements IPrinterFactory {
private static final long serialVersionUID = 1L;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AObjectPrinter.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AObjectPrinter.java
index f5d0196..d3f063c 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AObjectPrinter.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AObjectPrinter.java
@@ -138,6 +138,10 @@
unorderedListPrinter.print(b, s, l, ps);
break;
}
+ case UUID: {
+ AUUIDPrinter.INSTANCE.print(b, s, l, ps);
+ break;
+ }
default: {
throw new NotImplementedException("No printer for type " + typeTag);
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/APointPrinterFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/APointPrinterFactory.java
index 3f74c36..f3c1a29 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/APointPrinterFactory.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/APointPrinterFactory.java
@@ -17,7 +17,6 @@
import edu.uci.ics.hyracks.algebricks.data.IPrinter;
import edu.uci.ics.hyracks.algebricks.data.IPrinterFactory;
-
public class APointPrinterFactory implements IPrinterFactory {
private static final long serialVersionUID = 1L;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AUUIDPrinter.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AUUIDPrinter.java
new file mode 100644
index 0000000..5feb335
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AUUIDPrinter.java
@@ -0,0 +1,27 @@
+package edu.uci.ics.asterix.dataflow.data.nontagged.printers;
+
+import java.io.PrintStream;
+import java.util.UUID;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.data.IPrinter;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.Integer64SerializerDeserializer;
+
+public class AUUIDPrinter implements IPrinter {
+
+ public static final AUUIDPrinter INSTANCE = new AUUIDPrinter();
+
+ @Override
+ public void init() throws AlgebricksException {
+ // do nothing
+ }
+
+ @Override
+ public void print(byte[] b, int s, int l, PrintStream ps) throws AlgebricksException {
+ long msb = Integer64SerializerDeserializer.getLong(b, s + 1);
+ long lsb = Integer64SerializerDeserializer.getLong(b, s + 9);
+ UUID uuid = new UUID(msb, lsb);
+ ps.print("\"" + uuid.toString() + "\"");
+ }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AUUIDPrinterFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AUUIDPrinterFactory.java
new file mode 100644
index 0000000..015b8b8
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AUUIDPrinterFactory.java
@@ -0,0 +1,17 @@
+package edu.uci.ics.asterix.dataflow.data.nontagged.printers;
+
+import edu.uci.ics.hyracks.algebricks.data.IPrinter;
+import edu.uci.ics.hyracks.algebricks.data.IPrinterFactory;
+
+public class AUUIDPrinterFactory implements IPrinterFactory {
+
+ private static final long serialVersionUID = 1L;
+
+ public static final AUUIDPrinterFactory INSTANCE = new AUUIDPrinterFactory();
+
+ @Override
+ public IPrinter createPrinter() {
+ return AUUIDPrinter.INSTANCE;
+ }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/json/AFloatPrinterFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/json/AFloatPrinterFactory.java
index aceec9f..b8371ee 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/json/AFloatPrinterFactory.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/json/AFloatPrinterFactory.java
@@ -17,8 +17,6 @@
import edu.uci.ics.hyracks.algebricks.data.IPrinter;
import edu.uci.ics.hyracks.algebricks.data.IPrinterFactory;
-
-
public class AFloatPrinterFactory implements IPrinterFactory {
private static final long serialVersionUID = 1L;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/json/AInt16PrinterFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/json/AInt16PrinterFactory.java
index cd309d0..65bf3d9 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/json/AInt16PrinterFactory.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/json/AInt16PrinterFactory.java
@@ -17,8 +17,6 @@
import edu.uci.ics.hyracks.algebricks.data.IPrinter;
import edu.uci.ics.hyracks.algebricks.data.IPrinterFactory;
-
-
public class AInt16PrinterFactory implements IPrinterFactory {
private static final long serialVersionUID = 1L;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/json/AInt32PrinterFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/json/AInt32PrinterFactory.java
index ac6badd..1707960 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/json/AInt32PrinterFactory.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/json/AInt32PrinterFactory.java
@@ -17,8 +17,6 @@
import edu.uci.ics.hyracks.algebricks.data.IPrinter;
import edu.uci.ics.hyracks.algebricks.data.IPrinterFactory;
-
-
public class AInt32PrinterFactory implements IPrinterFactory {
private static final long serialVersionUID = 1L;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/json/AInt8PrinterFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/json/AInt8PrinterFactory.java
index 755fb22..8b6467a 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/json/AInt8PrinterFactory.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/json/AInt8PrinterFactory.java
@@ -17,8 +17,6 @@
import edu.uci.ics.hyracks.algebricks.data.IPrinter;
import edu.uci.ics.hyracks.algebricks.data.IPrinterFactory;
-
-
public class AInt8PrinterFactory implements IPrinterFactory {
private static final long serialVersionUID = 1L;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/json/ANullPrinterFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/json/ANullPrinterFactory.java
index 1930d10..4dc33e8 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/json/ANullPrinterFactory.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/json/ANullPrinterFactory.java
@@ -17,8 +17,6 @@
import edu.uci.ics.hyracks.algebricks.data.IPrinter;
import edu.uci.ics.hyracks.algebricks.data.IPrinterFactory;
-
-
public class ANullPrinterFactory implements IPrinterFactory {
private static final long serialVersionUID = 1L;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/json/APointPrinterFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/json/APointPrinterFactory.java
index 7fbcd2b..a6c0cc0 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/json/APointPrinterFactory.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/json/APointPrinterFactory.java
@@ -17,7 +17,6 @@
import edu.uci.ics.hyracks.algebricks.data.IPrinter;
import edu.uci.ics.hyracks.algebricks.data.IPrinterFactory;
-
public class APointPrinterFactory implements IPrinterFactory {
private static final long serialVersionUID = 1L;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/json/AUnionPrinterFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/json/AUnionPrinterFactory.java
index 1f93b21..a4cdbfa 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/json/AUnionPrinterFactory.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/json/AUnionPrinterFactory.java
@@ -48,8 +48,8 @@
unionList = unionType.getUnionList();
printers = new IPrinter[unionType.getUnionList().size()];
for (int i = 0; i < printers.length; i++) {
- printers[i] = (AqlJSONPrinterFactoryProvider.INSTANCE
- .getPrinterFactory(unionType.getUnionList().get(i))).createPrinter();
+ printers[i] = (AqlJSONPrinterFactoryProvider.INSTANCE.getPrinterFactory(unionType.getUnionList()
+ .get(i))).createPrinter();
printers[i].init();
}
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ADateTimeSerializerDeserializer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ADateTimeSerializerDeserializer.java
index fb077bd..8b34558 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ADateTimeSerializerDeserializer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ADateTimeSerializerDeserializer.java
@@ -69,17 +69,18 @@
short timeOffset = (short) ((datetime.charAt(0) == '-') ? 1 : 0);
timeOffset += 8;
-
- if(datetime.charAt(timeOffset) != 'T'){
+
+ if (datetime.charAt(timeOffset) != 'T') {
timeOffset += 2;
- if(datetime.charAt(timeOffset) != 'T'){
+ if (datetime.charAt(timeOffset) != 'T') {
throw new AlgebricksException(errorMessage + ": missing T");
}
}
chrononTimeInMs = ADateParserFactory.parseDatePart(datetime, 0, timeOffset);
- chrononTimeInMs += ATimeParserFactory.parseTimePart(datetime, timeOffset + 1, datetime.length() - timeOffset - 1);
+ chrononTimeInMs += ATimeParserFactory.parseTimePart(datetime, timeOffset + 1, datetime.length()
+ - timeOffset - 1);
} catch (Exception e) {
throw new HyracksDataException(e);
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ARectangleSerializerDeserializer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ARectangleSerializerDeserializer.java
index 269c508..513ca46 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ARectangleSerializerDeserializer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ARectangleSerializerDeserializer.java
@@ -102,7 +102,8 @@
Double.parseDouble(points[1].split(",")[1]));
if (aRectanglePoint1.getX() > aRectanglePoint2.getX() && aRectanglePoint1.getY() > aRectanglePoint2.getY()) {
aRectangle.setValue(aRectanglePoint2, aRectanglePoint1);
- } else if (aRectanglePoint1.getX() < aRectanglePoint2.getX() && aRectanglePoint1.getY() < aRectanglePoint2.getY()) {
+ } else if (aRectanglePoint1.getX() < aRectanglePoint2.getX()
+ && aRectanglePoint1.getY() < aRectanglePoint2.getY()) {
aRectangle.setValue(aRectanglePoint1, aRectanglePoint2);
} else {
throw new IllegalArgumentException(
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ATimeSerializerDeserializer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ATimeSerializerDeserializer.java
index 91316f2..a881ece 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ATimeSerializerDeserializer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ATimeSerializerDeserializer.java
@@ -36,7 +36,7 @@
private static final ISerializerDeserializer<ATime> timeSerde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.ATIME);
private static final AMutableTime aTime = new AMutableTime(0);
-
+
private ATimeSerializerDeserializer() {
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/AUUIDSerializerDeserializer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/AUUIDSerializerDeserializer.java
new file mode 100644
index 0000000..acd0888
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/AUUIDSerializerDeserializer.java
@@ -0,0 +1,35 @@
+package edu.uci.ics.asterix.dataflow.data.nontagged.serde;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.om.base.AUUID;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.Integer64SerializerDeserializer;
+
+public class AUUIDSerializerDeserializer implements ISerializerDeserializer<AUUID> {
+
+ private static final long serialVersionUID = 1L;
+
+ public static final AUUIDSerializerDeserializer INSTANCE = new AUUIDSerializerDeserializer();
+
+ @Override
+ public AUUID deserialize(DataInput in) throws HyracksDataException {
+ long msb = Integer64SerializerDeserializer.INSTANCE.deserialize(in);
+ long lsb = Integer64SerializerDeserializer.INSTANCE.deserialize(in);
+ return new AUUID(msb, lsb);
+ }
+
+ @Override
+ public void serialize(AUUID instance, DataOutput out) throws HyracksDataException {
+ try {
+ Integer64SerializerDeserializer.INSTANCE.serialize(instance.getMostSignificantBits(), out);
+ Integer64SerializerDeserializer.INSTANCE.serialize(instance.getLeastSignificantBits(), out);
+ } catch (IOException e) {
+ throw new HyracksDataException(e);
+ }
+ }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/valueproviders/AqlPrimitiveValueProviderFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/valueproviders/AqlPrimitiveValueProviderFactory.java
index f7a05c6..eb27cb9 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/valueproviders/AqlPrimitiveValueProviderFactory.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/valueproviders/AqlPrimitiveValueProviderFactory.java
@@ -23,7 +23,6 @@
import edu.uci.ics.hyracks.storage.am.rtree.impls.FloatPrimitiveValueProviderFactory;
import edu.uci.ics.hyracks.storage.am.rtree.impls.IntegerPrimitiveValueProviderFactory;
-
public class AqlPrimitiveValueProviderFactory implements IPrimitiveValueProviderFactory {
private static final long serialVersionUID = 1L;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/base/IDataFormat.java b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/base/IDataFormat.java
index c5f833e..3ebedde 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/base/IDataFormat.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/base/IDataFormat.java
@@ -83,6 +83,6 @@
public INormalizedKeyComputerFactoryProvider getNormalizedKeyComputerFactoryProvider();
public IBinaryHashFunctionFamilyProvider getBinaryHashFunctionFamilyProvider();
-
+
public IPredicateEvaluatorFactoryProvider getPredicateEvaluatorFactoryProvider();
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryComparatorFactoryProvider.java b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryComparatorFactoryProvider.java
index 5f53cd5..672621a 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryComparatorFactoryProvider.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryComparatorFactoryProvider.java
@@ -25,8 +25,9 @@
import edu.uci.ics.asterix.dataflow.data.nontagged.comparators.APoint3DPartialBinaryComparatorFactory;
import edu.uci.ics.asterix.dataflow.data.nontagged.comparators.APointPartialBinaryComparatorFactory;
import edu.uci.ics.asterix.dataflow.data.nontagged.comparators.APolygonPartialBinaryComparatorFactory;
-import edu.uci.ics.asterix.dataflow.data.nontagged.comparators.BooleanBinaryComparatorFactory;
import edu.uci.ics.asterix.dataflow.data.nontagged.comparators.ARectanglePartialBinaryComparatorFactory;
+import edu.uci.ics.asterix.dataflow.data.nontagged.comparators.AUUIDPartialBinaryComparatorFactory;
+import edu.uci.ics.asterix.dataflow.data.nontagged.comparators.BooleanBinaryComparatorFactory;
import edu.uci.ics.asterix.dataflow.data.nontagged.comparators.RawBinaryComparatorFactory;
import edu.uci.ics.asterix.om.types.ATypeTag;
import edu.uci.ics.asterix.om.types.IAType;
@@ -165,6 +166,9 @@
case INTERVAL: {
return addOffset(AIntervalPartialBinaryComparatorFactory.INSTANCE, ascending);
}
+ case UUID: {
+ return addOffset(AUUIDPartialBinaryComparatorFactory.INSTANCE, ascending);
+ }
default: {
return addOffset(RawBinaryComparatorFactory.INSTANCE, ascending);
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryHashFunctionFamilyProvider.java b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryHashFunctionFamilyProvider.java
index 7763bae..cd7e1fd 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryHashFunctionFamilyProvider.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryHashFunctionFamilyProvider.java
@@ -26,20 +26,18 @@
* We use a type-independent binary hash function family from the hyracks
* codebase
*/
-public class AqlBinaryHashFunctionFamilyProvider implements
- IBinaryHashFunctionFamilyProvider, Serializable {
+public class AqlBinaryHashFunctionFamilyProvider implements IBinaryHashFunctionFamilyProvider, Serializable {
- private static final long serialVersionUID = 1L;
- public static final AqlBinaryHashFunctionFamilyProvider INSTANCE = new AqlBinaryHashFunctionFamilyProvider();
+ private static final long serialVersionUID = 1L;
+ public static final AqlBinaryHashFunctionFamilyProvider INSTANCE = new AqlBinaryHashFunctionFamilyProvider();
- private AqlBinaryHashFunctionFamilyProvider() {
+ private AqlBinaryHashFunctionFamilyProvider() {
- }
+ }
- @Override
- public IBinaryHashFunctionFamily getBinaryHashFunctionFamily(Object type)
- throws AlgebricksException {
- return MurmurHash3BinaryHashFunctionFamily.INSTANCE;
- }
+ @Override
+ public IBinaryHashFunctionFamily getBinaryHashFunctionFamily(Object type) throws AlgebricksException {
+ return MurmurHash3BinaryHashFunctionFamily.INSTANCE;
+ }
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlJSONPrinterFactoryProvider.java b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlJSONPrinterFactoryProvider.java
index f08389d..688f7b3 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlJSONPrinterFactoryProvider.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlJSONPrinterFactoryProvider.java
@@ -14,6 +14,7 @@
*/
package edu.uci.ics.asterix.formats.nontagged;
+import edu.uci.ics.asterix.dataflow.data.nontagged.printers.AUUIDPrinterFactory;
import edu.uci.ics.asterix.dataflow.data.nontagged.printers.json.ABooleanPrinterFactory;
import edu.uci.ics.asterix.dataflow.data.nontagged.printers.json.ACirclePrinterFactory;
import edu.uci.ics.asterix.dataflow.data.nontagged.printers.json.ADatePrinterFactory;
@@ -40,7 +41,6 @@
import edu.uci.ics.asterix.dataflow.data.nontagged.printers.json.ATimePrinterFactory;
import edu.uci.ics.asterix.dataflow.data.nontagged.printers.json.AUnionPrinterFactory;
import edu.uci.ics.asterix.dataflow.data.nontagged.printers.json.AUnorderedlistPrinterFactory;
-import edu.uci.ics.asterix.dataflow.data.nontagged.printers.json.AYearMonthDurationPrinter;
import edu.uci.ics.asterix.dataflow.data.nontagged.printers.json.AYearMonthDurationPrinterFactory;
import edu.uci.ics.asterix.om.types.AOrderedListType;
import edu.uci.ics.asterix.om.types.ARecordType;
@@ -121,6 +121,9 @@
else
return new AUnionPrinterFactory((AUnionType) aqlType);
}
+ case UUID: {
+ return AUUIDPrinterFactory.INSTANCE;
+ }
}
}
return AObjectPrinterFactory.INSTANCE;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlPredicateEvaluatorFactoryProvider.java b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlPredicateEvaluatorFactoryProvider.java
index f4fa74e..7facab3 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlPredicateEvaluatorFactoryProvider.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlPredicateEvaluatorFactoryProvider.java
@@ -26,51 +26,53 @@
Provides PredicateEvaluator for equi-join cases to properly take care of NULL fields, being compared with each other.
If any of the join keys, from either side, is NULL, record should not pass equi-join condition.
*/
-public class AqlPredicateEvaluatorFactoryProvider implements IPredicateEvaluatorFactoryProvider{
-
- private static final long serialVersionUID = 1L;
- public static final AqlPredicateEvaluatorFactoryProvider INSTANCE = new AqlPredicateEvaluatorFactoryProvider();
-
- @Override
- public IPredicateEvaluatorFactory getPredicateEvaluatorFactory(final int[] keys0, final int[] keys1) {
-
- return new IPredicateEvaluatorFactory() {
- private static final long serialVersionUID = 1L;
- @Override
- public IPredicateEvaluator createPredicateEvaluator() {
- return new IPredicateEvaluator() {
-
- @Override
- public boolean evaluate(IFrameTupleAccessor fta0, int tupId0,
- IFrameTupleAccessor fta1, int tupId1) {
-
- int tStart0 = fta0.getTupleStartOffset(tupId0);
- int fStartOffset0 = fta0.getFieldSlotsLength() + tStart0;
-
- for(int k0 : keys0){
- int fieldStartIx = fta0.getFieldStartOffset(tupId0, k0);
- ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(fta0.getBuffer().array()[fieldStartIx + fStartOffset0]);
- if(typeTag == ATypeTag.NULL){
- return false;
- }
- }
-
- int tStart1 = fta1.getTupleStartOffset(tupId1);
- int fStartOffset1 = fta1.getFieldSlotsLength() + tStart1;
-
- for(int k1 : keys1){
- int fieldStartIx = fta1.getFieldStartOffset(tupId1, k1);
- ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(fta1.getBuffer().array()[fieldStartIx + fStartOffset1]);
- if(typeTag == ATypeTag.NULL){
- return false;
- }
- }
-
- return true; //none of the fields (from both sides) is NULL
- }
- };
- }
- };
- }
+public class AqlPredicateEvaluatorFactoryProvider implements IPredicateEvaluatorFactoryProvider {
+
+ private static final long serialVersionUID = 1L;
+ public static final AqlPredicateEvaluatorFactoryProvider INSTANCE = new AqlPredicateEvaluatorFactoryProvider();
+
+ @Override
+ public IPredicateEvaluatorFactory getPredicateEvaluatorFactory(final int[] keys0, final int[] keys1) {
+
+ return new IPredicateEvaluatorFactory() {
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public IPredicateEvaluator createPredicateEvaluator() {
+ return new IPredicateEvaluator() {
+
+ @Override
+ public boolean evaluate(IFrameTupleAccessor fta0, int tupId0, IFrameTupleAccessor fta1, int tupId1) {
+
+ int tStart0 = fta0.getTupleStartOffset(tupId0);
+ int fStartOffset0 = fta0.getFieldSlotsLength() + tStart0;
+
+ for (int k0 : keys0) {
+ int fieldStartIx = fta0.getFieldStartOffset(tupId0, k0);
+ ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(fta0.getBuffer()
+ .array()[fieldStartIx + fStartOffset0]);
+ if (typeTag == ATypeTag.NULL) {
+ return false;
+ }
+ }
+
+ int tStart1 = fta1.getTupleStartOffset(tupId1);
+ int fStartOffset1 = fta1.getFieldSlotsLength() + tStart1;
+
+ for (int k1 : keys1) {
+ int fieldStartIx = fta1.getFieldStartOffset(tupId1, k1);
+ ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(fta1.getBuffer()
+ .array()[fieldStartIx + fStartOffset1]);
+ if (typeTag == ATypeTag.NULL) {
+ return false;
+ }
+ }
+
+ return true; //none of the fields (from both sides) is NULL
+ }
+ };
+ }
+ };
+ }
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlPrinterFactoryProvider.java b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlPrinterFactoryProvider.java
index c20c219..6774f7d 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlPrinterFactoryProvider.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlPrinterFactoryProvider.java
@@ -38,6 +38,7 @@
import edu.uci.ics.asterix.dataflow.data.nontagged.printers.ARectanglePrinterFactory;
import edu.uci.ics.asterix.dataflow.data.nontagged.printers.AStringPrinterFactory;
import edu.uci.ics.asterix.dataflow.data.nontagged.printers.ATimePrinterFactory;
+import edu.uci.ics.asterix.dataflow.data.nontagged.printers.AUUIDPrinterFactory;
import edu.uci.ics.asterix.dataflow.data.nontagged.printers.AUnionPrinterFactory;
import edu.uci.ics.asterix.dataflow.data.nontagged.printers.AUnorderedlistPrinterFactory;
import edu.uci.ics.asterix.om.types.AOrderedListType;
@@ -117,6 +118,9 @@
else
return new AUnionPrinterFactory((AUnionType) aqlType);
}
+ case UUID: {
+ return AUUIDPrinterFactory.INSTANCE;
+ }
}
}
return AObjectPrinterFactory.INSTANCE;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlSerializerDeserializerProvider.java b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlSerializerDeserializerProvider.java
index 36f94a6..d67b4ed 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlSerializerDeserializerProvider.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlSerializerDeserializerProvider.java
@@ -43,6 +43,7 @@
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ARectangleSerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AStringSerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ATimeSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AUUIDSerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AUnorderedListSerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AYearMonthDurationSerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.SerializerDeserializerUtil;
@@ -158,6 +159,9 @@
case UNORDEREDLIST: {
return new AUnorderedListSerializerDeserializer((AUnorderedListType) aqlType);
}
+ case UUID: {
+ return AUUIDSerializerDeserializer.INSTANCE;
+ }
default: {
throw new NotImplementedException("No serializer/deserializer implemented for type "
+ aqlType.getTypeTag() + " .");
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlTypeTraitProvider.java b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlTypeTraitProvider.java
index 2a7dd3d..319bedb 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlTypeTraitProvider.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlTypeTraitProvider.java
@@ -56,6 +56,7 @@
case DURATION:
return EIGHTBYTETYPETRAIT;
case POINT:
+ case UUID:
return SIXTEENBYTETYPETRAIT;
case INTERVAL:
return SEVENTEENBYTETYPETRAIT;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/ADouble.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/ADouble.java
index a7cebb5..86308e1 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/ADouble.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/ADouble.java
@@ -12,73 +12,73 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package edu.uci.ics.asterix.om.base;
-
-import org.json.JSONException;
-import org.json.JSONObject;
-
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.om.types.BuiltinType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.visitors.IOMVisitor;
-
-public class ADouble implements IAObject {
-
- protected double value;
-
- public ADouble(double value) {
- super();
- this.value = value;
- }
-
- public double getDoubleValue() {
- return value;
- }
-
- @Override
- public IAType getType() {
- return BuiltinType.ADOUBLE;
- }
-
- @Override
- public String toString() {
- return "ADouble: {" + value + "}";
- }
-
- @Override
- public boolean equals(Object obj) {
- if (!(obj instanceof ADouble))
- return false;
- return value == (((ADouble) obj).getDoubleValue());
- }
-
- @Override
- public int hashCode() {
- long bits = Double.doubleToLongBits(value);
- return (int) (bits ^ (bits >>> 32));
- }
-
- @Override
- public void accept(IOMVisitor visitor) throws AsterixException {
- visitor.visitADouble(this);
- }
-
- @Override
- public boolean deepEqual(IAObject obj) {
- return equals(obj);
- }
-
- @Override
- public int hash() {
- return hashCode();
- }
-
- @Override
- public JSONObject toJSON() throws JSONException {
- JSONObject json = new JSONObject();
-
- json.put("ADouble", value);
-
- return json;
- }
-}
+package edu.uci.ics.asterix.om.base;
+
+import org.json.JSONException;
+import org.json.JSONObject;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.visitors.IOMVisitor;
+
+public class ADouble implements IAObject {
+
+ protected double value;
+
+ public ADouble(double value) {
+ super();
+ this.value = value;
+ }
+
+ public double getDoubleValue() {
+ return value;
+ }
+
+ @Override
+ public IAType getType() {
+ return BuiltinType.ADOUBLE;
+ }
+
+ @Override
+ public String toString() {
+ return "ADouble: {" + value + "}";
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (!(obj instanceof ADouble))
+ return false;
+ return value == (((ADouble) obj).getDoubleValue());
+ }
+
+ @Override
+ public int hashCode() {
+ long bits = Double.doubleToLongBits(value);
+ return (int) (bits ^ (bits >>> 32));
+ }
+
+ @Override
+ public void accept(IOMVisitor visitor) throws AsterixException {
+ visitor.visitADouble(this);
+ }
+
+ @Override
+ public boolean deepEqual(IAObject obj) {
+ return equals(obj);
+ }
+
+ @Override
+ public int hash() {
+ return hashCode();
+ }
+
+ @Override
+ public JSONObject toJSON() throws JSONException {
+ JSONObject json = new JSONObject();
+
+ json.put("ADouble", value);
+
+ return json;
+ }
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/AInt32.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/AInt32.java
index dc3d0b5..c1a7543 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/AInt32.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/AInt32.java
@@ -12,88 +12,88 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package edu.uci.ics.asterix.om.base;
-
-import org.json.JSONException;
-import org.json.JSONObject;
-
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.om.types.BuiltinType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.visitors.IOMVisitor;
-
-public class AInt32 implements IAObject {
-
- protected int value;
-
- public AInt32(int value) {
- super();
- this.value = value;
- }
-
- public AInt32(byte[] bytes, int offset, int length) {
- value = valueFromBytes(bytes, offset, length);
- }
-
- public Integer getIntegerValue() {
- return value;
- }
-
- @Override
- public IAType getType() {
- return BuiltinType.AINT32;
- }
-
- @Override
- public String toString() {
- return "AInt32: {" + value + "}";
- }
-
- @Override
- public boolean equals(Object obj) {
- if (!(obj instanceof AInt32)) {
- return false;
- } else {
- return value == (((AInt32) obj).getIntegerValue());
- }
- }
-
- @Override
- public int hashCode() {
- return value;
- }
-
- private static Integer valueFromBytes(byte[] bytes, int offset, int length) {
- return ((bytes[offset] & 0xff) << 24) + ((bytes[offset + 1] & 0xff) << 16) + ((bytes[offset + 2] & 0xff) << 8)
- + ((bytes[offset + 3] & 0xff) << 0);
- }
-
- public byte[] toBytes() {
- return new byte[] { (byte) (value >>> 24), (byte) (value >> 16 & 0xff), (byte) (value >> 8 & 0xff),
- (byte) (value & 0xff) };
- }
-
- @Override
- public void accept(IOMVisitor visitor) throws AsterixException {
- visitor.visitAInt32(this);
- }
-
- @Override
- public boolean deepEqual(IAObject obj) {
- return equals(obj);
- }
-
- @Override
- public int hash() {
- return hashCode();
- }
-
- @Override
- public JSONObject toJSON() throws JSONException {
- JSONObject json = new JSONObject();
-
- json.put("AInt32", value);
-
- return json;
- }
-}
+package edu.uci.ics.asterix.om.base;
+
+import org.json.JSONException;
+import org.json.JSONObject;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.visitors.IOMVisitor;
+
+public class AInt32 implements IAObject {
+
+ protected int value;
+
+ public AInt32(int value) {
+ super();
+ this.value = value;
+ }
+
+ public AInt32(byte[] bytes, int offset, int length) {
+ value = valueFromBytes(bytes, offset, length);
+ }
+
+ public Integer getIntegerValue() {
+ return value;
+ }
+
+ @Override
+ public IAType getType() {
+ return BuiltinType.AINT32;
+ }
+
+ @Override
+ public String toString() {
+ return "AInt32: {" + value + "}";
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (!(obj instanceof AInt32)) {
+ return false;
+ } else {
+ return value == (((AInt32) obj).getIntegerValue());
+ }
+ }
+
+ @Override
+ public int hashCode() {
+ return value;
+ }
+
+ private static Integer valueFromBytes(byte[] bytes, int offset, int length) {
+ return ((bytes[offset] & 0xff) << 24) + ((bytes[offset + 1] & 0xff) << 16) + ((bytes[offset + 2] & 0xff) << 8)
+ + ((bytes[offset + 3] & 0xff) << 0);
+ }
+
+ public byte[] toBytes() {
+ return new byte[] { (byte) (value >>> 24), (byte) (value >> 16 & 0xff), (byte) (value >> 8 & 0xff),
+ (byte) (value & 0xff) };
+ }
+
+ @Override
+ public void accept(IOMVisitor visitor) throws AsterixException {
+ visitor.visitAInt32(this);
+ }
+
+ @Override
+ public boolean deepEqual(IAObject obj) {
+ return equals(obj);
+ }
+
+ @Override
+ public int hash() {
+ return hashCode();
+ }
+
+ @Override
+ public JSONObject toJSON() throws JSONException {
+ JSONObject json = new JSONObject();
+
+ json.put("AInt32", value);
+
+ return json;
+ }
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/AMutableUUID.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/AMutableUUID.java
new file mode 100644
index 0000000..88c5875
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/AMutableUUID.java
@@ -0,0 +1,19 @@
+package edu.uci.ics.asterix.om.base;
+
+public class AMutableUUID extends AUUID {
+ private final long[] uuidBits;
+ private final byte[] randomBytes;
+
+ public AMutableUUID(long msb, long lsb) {
+ super(msb, lsb);
+ randomBytes = new byte[16];
+ uuidBits = new long[2];
+ }
+
+ public void nextUUID() {
+ Holder.srnd.nextBytes(randomBytes);
+ uuidBitsFromBytes(uuidBits, randomBytes);
+ msb = uuidBits[0];
+ lsb = uuidBits[1];
+ }
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/AMutableUnorderedList.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/AMutableUnorderedList.java
index fe0086b..93c0b22 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/AMutableUnorderedList.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/AMutableUnorderedList.java
@@ -31,4 +31,8 @@
public void add(IAObject obj) {
values.add(obj);
}
+
+ public void clear() {
+ values.clear();
+ }
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/AString.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/AString.java
index a758ee6..c6b8af4 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/AString.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/AString.java
@@ -12,71 +12,71 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package edu.uci.ics.asterix.om.base;
-
-import org.json.JSONException;
-import org.json.JSONObject;
-
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.om.types.BuiltinType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.visitors.IOMVisitor;
-
-public class AString implements IAObject {
-
- protected String value;
-
- public AString(String value) {
- this.value = value;
- }
-
- public String getStringValue() {
- return value;
- }
-
- @Override
- public IAType getType() {
- return BuiltinType.ASTRING;
- }
-
- @Override
- public String toString() {
- return "AString: {" + value + "}";
- }
-
- @Override
- public boolean equals(Object obj) {
- if (!(obj instanceof AString))
- return false;
- return value.equals(((AString) obj).getStringValue());
- }
-
- @Override
- public int hashCode() {
- return value.hashCode();
- }
-
- @Override
- public void accept(IOMVisitor visitor) throws AsterixException {
- visitor.visitAString(this);
- }
-
- @Override
- public boolean deepEqual(IAObject obj) {
- return equals(obj);
- }
-
- @Override
- public int hash() {
- return hashCode();
- }
-
- @Override
- public JSONObject toJSON() throws JSONException {
- JSONObject json = new JSONObject();
-
- json.put("AString", value);
-
- return json;
- }
-}
+package edu.uci.ics.asterix.om.base;
+
+import org.json.JSONException;
+import org.json.JSONObject;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.visitors.IOMVisitor;
+
+public class AString implements IAObject {
+
+ protected String value;
+
+ public AString(String value) {
+ this.value = value;
+ }
+
+ public String getStringValue() {
+ return value;
+ }
+
+ @Override
+ public IAType getType() {
+ return BuiltinType.ASTRING;
+ }
+
+ @Override
+ public String toString() {
+ return "AString: {" + value + "}";
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (!(obj instanceof AString))
+ return false;
+ return value.equals(((AString) obj).getStringValue());
+ }
+
+ @Override
+ public int hashCode() {
+ return value.hashCode();
+ }
+
+ @Override
+ public void accept(IOMVisitor visitor) throws AsterixException {
+ visitor.visitAString(this);
+ }
+
+ @Override
+ public boolean deepEqual(IAObject obj) {
+ return equals(obj);
+ }
+
+ @Override
+ public int hash() {
+ return hashCode();
+ }
+
+ @Override
+ public JSONObject toJSON() throws JSONException {
+ JSONObject json = new JSONObject();
+
+ json.put("AString", value);
+
+ return json;
+ }
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/AUUID.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/AUUID.java
new file mode 100644
index 0000000..d8ada69
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/AUUID.java
@@ -0,0 +1,107 @@
+package edu.uci.ics.asterix.om.base;
+
+import java.security.SecureRandom;
+import java.util.UUID;
+
+import org.json.JSONException;
+import org.json.JSONObject;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.visitors.IOMVisitor;
+
+public class AUUID implements IAObject {
+
+ protected static class Holder {
+ static final SecureRandom srnd = new SecureRandom();
+ }
+
+ protected long msb;
+ protected long lsb;
+
+ public AUUID(UUID uuid) {
+ msb = uuid.getMostSignificantBits();
+ lsb = uuid.getLeastSignificantBits();
+ }
+
+ public AUUID(long msb, long lsb) {
+ this.msb = msb;
+ this.lsb = lsb;
+ }
+
+ public long getMostSignificantBits() {
+ return msb;
+ }
+
+ public long getLeastSignificantBits() {
+ return lsb;
+ }
+
+ public static AUUID randomUUID() {
+ long[] bits = new long[2];
+ byte[] randomBytes = new byte[16];
+ Holder.srnd.nextBytes(randomBytes);
+ uuidBitsFromBytes(bits, randomBytes);
+ return new AUUID(bits[0], bits[1]);
+ }
+
+ protected static void uuidBitsFromBytes(long[] bits, byte[] randomBytes) {
+ bits[0] = 0;
+ bits[1] = 0;
+ randomBytes[6] &= 0x0f; /* clear version */
+ randomBytes[6] |= 0x40; /* set to version 4 */
+ randomBytes[8] &= 0x3f; /* clear variant */
+ randomBytes[8] |= 0x80; /* set to IETF variant */
+ for (int i = 0; i < 8; ++i) {
+ bits[0] = (bits[0] << 8) | (randomBytes[i] & 0xff);
+ }
+ for (int i = 8; i < 16; ++i) {
+ bits[1] = (bits[1] << 8) | (randomBytes[i] & 0xff);
+ }
+ }
+
+ @Override
+ public JSONObject toJSON() throws JSONException {
+ JSONObject json = new JSONObject();
+ json.put("AUUID", toString());
+ return json;
+ }
+
+ @Override
+ public IAType getType() {
+ return BuiltinType.AUUID;
+ }
+
+ @Override
+ public void accept(IOMVisitor visitor) throws AsterixException {
+ visitor.visitAUUID(this);
+ }
+
+ @Override
+ public boolean deepEqual(IAObject obj) {
+ if (!(obj instanceof AUUID)) {
+ return false;
+ }
+ AUUID oUUID = (AUUID) obj;
+ return oUUID.msb == this.msb && oUUID.lsb == this.lsb;
+ }
+
+ @Override
+ public int hash() {
+ long hilo = msb ^ lsb;
+ return ((int) (hilo >> 32)) ^ (int) hilo;
+ }
+
+ @Override
+ public String toString() {
+ return "AUUID: {"
+ + (digits(msb >> 32, 8) + "-" + digits(msb >> 16, 4) + "-" + digits(msb, 4) + "-"
+ + digits(lsb >> 48, 4) + "-" + digits(lsb, 12)) + "}";
+ }
+
+ private static String digits(long val, int digits) {
+ long hi = 1L << (digits * 4);
+ return Long.toHexString(hi | (val & (hi - 1))).substring(1);
+ }
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADateParserFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADateParserFactory.java
index 61482ff..97edb5c 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADateParserFactory.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADateParserFactory.java
@@ -60,8 +60,7 @@
* @return
* @throws Exception
*/
- public static long parseDatePart(String dateString, int start, int length)
- throws HyracksDataException {
+ public static long parseDatePart(String dateString, int start, int length) throws HyracksDataException {
int offset = 0;
@@ -139,14 +138,14 @@
if (length > offset) {
throw new HyracksDataException("Too many chars for a date only value");
}
-
- if (!GregorianCalendarSystem.getInstance().validate(year, month, day, 0, 0, 0, 0)){
+
+ if (!GregorianCalendarSystem.getInstance().validate(year, month, day, 0, 0, 0, 0)) {
throw new HyracksDataException(dateErrorMessage);
}
-
+
return GregorianCalendarSystem.getInstance().getChronon(year, month, day, 0, 0, 0, 0, 0);
}
-
+
/**
* A copy-and-paste of {@link #parseDatePart(String, int, int)} but for a char array, in order
* to avoid object creation.
@@ -157,8 +156,7 @@
* @return
* @throws HyracksDataException
*/
- public static long parseDatePart(char[] dateString, int start, int length)
- throws HyracksDataException {
+ public static long parseDatePart(char[] dateString, int start, int length) throws HyracksDataException {
int offset = 0;
@@ -236,11 +234,11 @@
if (length > offset) {
throw new HyracksDataException("Too many chars for a date only value");
}
-
- if (!GregorianCalendarSystem.getInstance().validate(year, month, day, 0, 0, 0, 0)){
+
+ if (!GregorianCalendarSystem.getInstance().validate(year, month, day, 0, 0, 0, 0)) {
throw new HyracksDataException(dateErrorMessage);
}
-
+
return GregorianCalendarSystem.getInstance().getChronon(year, month, day, 0, 0, 0, 0, 0);
}
@@ -254,8 +252,7 @@
* @return
* @throws HyracksDataException
*/
- public static long parseDatePart(byte[] dateString, int start, int length)
- throws HyracksDataException {
+ public static long parseDatePart(byte[] dateString, int start, int length) throws HyracksDataException {
int offset = 0;
@@ -333,11 +330,11 @@
if (length > offset) {
throw new HyracksDataException("Too many chars for a date only value");
}
-
- if (!GregorianCalendarSystem.getInstance().validate(year, month, day, 0, 0, 0, 0)){
+
+ if (!GregorianCalendarSystem.getInstance().validate(year, month, day, 0, 0, 0, 0)) {
throw new HyracksDataException(dateErrorMessage);
}
-
+
return GregorianCalendarSystem.getInstance().getChronon(year, month, day, 0, 0, 0, 0, 0);
}
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixBuiltinFunctions.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixBuiltinFunctions.java
index f5a6a61..299bbfb 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixBuiltinFunctions.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixBuiltinFunctions.java
@@ -26,20 +26,18 @@
import edu.uci.ics.asterix.common.functions.FunctionSignature;
import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.ABooleanTypeComputer;
-import edu.uci.ics.asterix.om.typecomputer.impl.ACircleTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.ADateTimeTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.ADateTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.ADoubleTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.AFloatTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.AInt32TypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.AInt64TypeComputer;
-import edu.uci.ics.asterix.om.typecomputer.impl.ALineTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.ANullTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.APointTypeComputer;
-import edu.uci.ics.asterix.om.typecomputer.impl.APolygonTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.ARectangleTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.AStringTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.ATimeTypeComputer;
+import edu.uci.ics.asterix.om.typecomputer.impl.AUUIDTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.BinaryBooleanOrNullFunctionTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.BinaryStringBoolOrNullTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.BinaryStringStringOrNullTypeComputer;
@@ -56,9 +54,9 @@
import edu.uci.ics.asterix.om.typecomputer.impl.NonTaggedLocalAvgTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.NonTaggedMinMaxAggTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.NonTaggedNumericAddSubMulDivTypeComputer;
+import edu.uci.ics.asterix.om.typecomputer.impl.NonTaggedNumericAggTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.NonTaggedNumericRoundHalfToEven2TypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.NonTaggedNumericUnaryFunctionTypeComputer;
-import edu.uci.ics.asterix.om.typecomputer.impl.NonTaggedNumericAggTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.NonTaggedSwitchCaseComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.NonTaggedUnaryMinusTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.NotNullTypeComputer;
@@ -91,6 +89,7 @@
import edu.uci.ics.asterix.om.typecomputer.impl.OrderedListOfAStringTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.OrderedListOfAnyTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.QuadStringStringOrNullTypeComputer;
+import edu.uci.ics.asterix.om.typecomputer.impl.RecordMergeTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.ScalarVersionOfAggregateResultType;
import edu.uci.ics.asterix.om.typecomputer.impl.Substring2TypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.SubstringTypeComputer;
@@ -169,6 +168,8 @@
FunctionConstants.ASTERIX_NS, "unordered-list-constructor", FunctionIdentifier.VARARGS);
// records
+ public final static FunctionIdentifier RECORD_MERGE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "record-merge", 3);
public final static FunctionIdentifier CLOSED_RECORD_CONSTRUCTOR = new FunctionIdentifier(
FunctionConstants.ASTERIX_NS, "closed-record-constructor", FunctionIdentifier.VARARGS);
public final static FunctionIdentifier OPEN_RECORD_CONSTRUCTOR = new FunctionIdentifier(
@@ -240,7 +241,9 @@
public final static FunctionIdentifier DATASET = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "dataset", 1);
public final static FunctionIdentifier FEED_INGEST = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "feed-ingest", 1);
+ "feed-ingest", 3);
+ public final static FunctionIdentifier FEED_INTERCEPT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "feed-intercept", 1);
public final static FunctionIdentifier INDEX_SEARCH = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
"index-search", FunctionIdentifier.VARARGS);
@@ -393,6 +396,8 @@
"datetime", 1);
public final static FunctionIdentifier DURATION_CONSTRUCTOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
"duration", 1);
+ public final static FunctionIdentifier UUID_CONSTRUCTOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "uuid", 1);
public final static FunctionIdentifier YEAR_MONTH_DURATION_CONSTRUCTOR = new FunctionIdentifier(
FunctionConstants.ASTERIX_NS, "year-month-duration", 1);
@@ -473,7 +478,7 @@
public final static FunctionIdentifier CREATE_LINE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
"create-line", 2);
public final static FunctionIdentifier CREATE_POLYGON = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "create-polygon", FunctionIdentifier.VARARGS);
+ "create-polygon", 1);
public final static FunctionIdentifier CREATE_CIRCLE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
"create-circle", 2);
public final static FunctionIdentifier CREATE_RECTANGLE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
@@ -496,9 +501,14 @@
"inject-failure", 2);
public final static FunctionIdentifier CAST_RECORD = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
"cast-record", 1);
+ public final static FunctionIdentifier FLOW_RECORD = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "flow-record", 1);
public final static FunctionIdentifier CAST_LIST = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
"cast-list", 1);
+ public final static FunctionIdentifier CREATE_UUID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "create-uuid", 0);
+
// Spatial and temporal type accessors
public static final FunctionIdentifier ACCESSOR_TEMPORAL_YEAR = new FunctionIdentifier(
FunctionConstants.ASTERIX_NS, "get-year", 1);
@@ -586,11 +596,7 @@
FunctionConstants.ASTERIX_NS, "" + "collection-to-sequence", 1);
public static IFunctionInfo getAsterixFunctionInfo(FunctionIdentifier fid) {
- IFunctionInfo finfo = registeredFunctions.get(fid);
- if (finfo == null) {
- finfo = new AsterixFunctionInfo(fid);
- }
- return finfo;
+ return registeredFunctions.get(fid);
}
public static AsterixFunctionInfo lookupFunction(FunctionIdentifier fid) {
@@ -600,72 +606,74 @@
static {
// first, take care of Algebricks builtin functions
- addFunction(IS_NULL, ABooleanTypeComputer.INSTANCE);
- addFunction(NOT, UnaryBooleanOrNullFunctionTypeComputer.INSTANCE);
+ addFunction(IS_NULL, ABooleanTypeComputer.INSTANCE, true);
+ addFunction(NOT, UnaryBooleanOrNullFunctionTypeComputer.INSTANCE, true);
- addPrivateFunction(EQ, BinaryBooleanOrNullFunctionTypeComputer.INSTANCE);
- addPrivateFunction(LE, BinaryBooleanOrNullFunctionTypeComputer.INSTANCE);
- addPrivateFunction(GE, BinaryBooleanOrNullFunctionTypeComputer.INSTANCE);
- addPrivateFunction(LT, BinaryBooleanOrNullFunctionTypeComputer.INSTANCE);
- addPrivateFunction(GT, BinaryBooleanOrNullFunctionTypeComputer.INSTANCE);
- addPrivateFunction(AND, BinaryBooleanOrNullFunctionTypeComputer.INSTANCE);
- addPrivateFunction(NEQ, BinaryBooleanOrNullFunctionTypeComputer.INSTANCE);
- addPrivateFunction(OR, BinaryBooleanOrNullFunctionTypeComputer.INSTANCE);
- addPrivateFunction(NUMERIC_ADD, NonTaggedNumericAddSubMulDivTypeComputer.INSTANCE);
+ addPrivateFunction(EQ, BinaryBooleanOrNullFunctionTypeComputer.INSTANCE, true);
+ addPrivateFunction(LE, BinaryBooleanOrNullFunctionTypeComputer.INSTANCE, true);
+ addPrivateFunction(GE, BinaryBooleanOrNullFunctionTypeComputer.INSTANCE, true);
+ addPrivateFunction(LT, BinaryBooleanOrNullFunctionTypeComputer.INSTANCE, true);
+ addPrivateFunction(GT, BinaryBooleanOrNullFunctionTypeComputer.INSTANCE, true);
+ addPrivateFunction(AND, BinaryBooleanOrNullFunctionTypeComputer.INSTANCE, true);
+ addPrivateFunction(NEQ, BinaryBooleanOrNullFunctionTypeComputer.INSTANCE, true);
+ addPrivateFunction(OR, BinaryBooleanOrNullFunctionTypeComputer.INSTANCE, true);
+ addPrivateFunction(NUMERIC_ADD, NonTaggedNumericAddSubMulDivTypeComputer.INSTANCE, true);
// and then, Asterix builtin functions
- addPrivateFunction(NOT_NULL, NotNullTypeComputer.INSTANCE);
- addPrivateFunction(ANY_COLLECTION_MEMBER, NonTaggedCollectionMemberResultType.INSTANCE);
- addFunction(AVG, OptionalADoubleTypeComputer.INSTANCE);
- addFunction(BOOLEAN_CONSTRUCTOR, UnaryBooleanOrNullFunctionTypeComputer.INSTANCE);
- addPrivateFunction(CARET, NonTaggedNumericAddSubMulDivTypeComputer.INSTANCE);
- addFunction(CIRCLE_CONSTRUCTOR, OptionalACircleTypeComputer.INSTANCE);
- addPrivateFunction(CLOSED_RECORD_CONSTRUCTOR, ClosedRecordConstructorResultType.INSTANCE);
- addPrivateFunction(CONCAT_NON_NULL, ConcatNonNullTypeComputer.INSTANCE);
+ addPrivateFunction(NOT_NULL, NotNullTypeComputer.INSTANCE, true);
+ addPrivateFunction(ANY_COLLECTION_MEMBER, NonTaggedCollectionMemberResultType.INSTANCE, true);
+ addFunction(AVG, OptionalADoubleTypeComputer.INSTANCE, true);
+ addFunction(BOOLEAN_CONSTRUCTOR, UnaryBooleanOrNullFunctionTypeComputer.INSTANCE, true);
+ addPrivateFunction(CARET, NonTaggedNumericAddSubMulDivTypeComputer.INSTANCE, true);
+ addFunction(CIRCLE_CONSTRUCTOR, OptionalACircleTypeComputer.INSTANCE, true);
+ addPrivateFunction(RECORD_MERGE, RecordMergeTypeComputer.INSTANCE, true);
+ addPrivateFunction(CLOSED_RECORD_CONSTRUCTOR, ClosedRecordConstructorResultType.INSTANCE, true);
+ addPrivateFunction(CONCAT_NON_NULL, ConcatNonNullTypeComputer.INSTANCE, true);
- addFunction(CONTAINS, ABooleanTypeComputer.INSTANCE);
- addFunction(COUNT, AInt64TypeComputer.INSTANCE);
- addPrivateFunction(COUNTHASHED_GRAM_TOKENS, OrderedListOfAInt32TypeComputer.INSTANCE);
- addPrivateFunction(COUNTHASHED_WORD_TOKENS, OrderedListOfAInt32TypeComputer.INSTANCE);
- addFunction(CREATE_CIRCLE, ACircleTypeComputer.INSTANCE);
- addFunction(CREATE_LINE, ALineTypeComputer.INSTANCE);
- addPrivateFunction(CREATE_MBR, ADoubleTypeComputer.INSTANCE);
- addFunction(CREATE_POINT, APointTypeComputer.INSTANCE);
- addFunction(CREATE_POLYGON, APolygonTypeComputer.INSTANCE);
- addFunction(CREATE_RECTANGLE, ARectangleTypeComputer.INSTANCE);
+ addFunction(CONTAINS, ABooleanTypeComputer.INSTANCE, true);
+ addFunction(COUNT, AInt64TypeComputer.INSTANCE, true);
+ addPrivateFunction(COUNTHASHED_GRAM_TOKENS, OrderedListOfAInt32TypeComputer.INSTANCE, true);
+ addPrivateFunction(COUNTHASHED_WORD_TOKENS, OrderedListOfAInt32TypeComputer.INSTANCE, true);
+ addFunction(CREATE_CIRCLE, OptionalACircleTypeComputer.INSTANCE, true);
+ addFunction(CREATE_LINE, OptionalALineTypeComputer.INSTANCE, true);
+ addPrivateFunction(CREATE_MBR, OptionalADoubleTypeComputer.INSTANCE, true);
+ addFunction(CREATE_POINT, OptionalAPointTypeComputer.INSTANCE, true);
+ addFunction(CREATE_POLYGON, OptionalAPolygonTypeComputer.INSTANCE, true);
+ addFunction(CREATE_RECTANGLE, OptionalARectangleTypeComputer.INSTANCE, true);
+ addFunction(CREATE_UUID, AUUIDTypeComputer.INSTANCE, false);
- addFunction(DATE_CONSTRUCTOR, OptionalADateTypeComputer.INSTANCE);
- addFunction(DATETIME_CONSTRUCTOR, OptionalADateTimeTypeComputer.INSTANCE);
- addFunction(DOUBLE_CONSTRUCTOR, OptionalADoubleTypeComputer.INSTANCE);
- addFunction(DURATION_CONSTRUCTOR, OptionalADurationTypeComputer.INSTANCE);
- addFunction(YEAR_MONTH_DURATION_CONSTRUCTOR, OptionalAYearMonthDurationTypeComputer.INSTANCE);
- addFunction(DAY_TIME_DURATION_CONSTRUCTOR, OptionalADayTimeDurationTypeComputer.INSTANCE);
- addFunction(EDIT_DISTANCE, AInt32TypeComputer.INSTANCE);
- addFunction(EDIT_DISTANCE_CHECK, OrderedListOfAnyTypeComputer.INSTANCE);
- addPrivateFunction(EDIT_DISTANCE_STRING_IS_FILTERABLE, ABooleanTypeComputer.INSTANCE);
- addPrivateFunction(EDIT_DISTANCE_LIST_IS_FILTERABLE, ABooleanTypeComputer.INSTANCE);
+ addFunction(DATE_CONSTRUCTOR, OptionalADateTypeComputer.INSTANCE, true);
+ addFunction(DATETIME_CONSTRUCTOR, OptionalADateTimeTypeComputer.INSTANCE, true);
+ addFunction(DOUBLE_CONSTRUCTOR, OptionalADoubleTypeComputer.INSTANCE, true);
+ addFunction(DURATION_CONSTRUCTOR, OptionalADurationTypeComputer.INSTANCE, true);
+ addFunction(YEAR_MONTH_DURATION_CONSTRUCTOR, OptionalAYearMonthDurationTypeComputer.INSTANCE, true);
+ addFunction(DAY_TIME_DURATION_CONSTRUCTOR, OptionalADayTimeDurationTypeComputer.INSTANCE, true);
+ addFunction(EDIT_DISTANCE, AInt32TypeComputer.INSTANCE, true);
+ addFunction(EDIT_DISTANCE_CHECK, OrderedListOfAnyTypeComputer.INSTANCE, true);
+ addPrivateFunction(EDIT_DISTANCE_STRING_IS_FILTERABLE, ABooleanTypeComputer.INSTANCE, true);
+ addPrivateFunction(EDIT_DISTANCE_LIST_IS_FILTERABLE, ABooleanTypeComputer.INSTANCE, true);
addPrivateFunction(EMBED_TYPE, new IResultTypeComputer() {
@Override
public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env,
IMetadataProvider<?, ?> mp) throws AlgebricksException {
return (IAType) BuiltinType.ANY;
}
- });
- addPrivateFunction(EMPTY_STREAM, ABooleanTypeComputer.INSTANCE);
- addFunction(ENDS_WITH, ABooleanTypeComputer.INSTANCE);
+ }, true);
+ addPrivateFunction(EMPTY_STREAM, ABooleanTypeComputer.INSTANCE, true);
+ addFunction(ENDS_WITH, ABooleanTypeComputer.INSTANCE, true);
// add(FIELD_ACCESS, NonTaggedFieldAccessByNameResultType.INSTANCE);
- addPrivateFunction(FIELD_ACCESS_BY_INDEX, FieldAccessByIndexResultType.INSTANCE);
- addPrivateFunction(FIELD_ACCESS_BY_NAME, NonTaggedFieldAccessByNameResultType.INSTANCE);
- addFunction(FLOAT_CONSTRUCTOR, OptionalAFloatTypeComputer.INSTANCE);
- addPrivateFunction(FUZZY_EQ, BinaryBooleanOrNullFunctionTypeComputer.INSTANCE);
- addPrivateFunction(GET_HANDLE, null); // TODO
- addPrivateFunction(GET_ITEM, NonTaggedGetItemResultType.INSTANCE);
- addPrivateFunction(GET_DATA, null); // TODO
- addPrivateFunction(GLOBAL_AVG, OptionalADoubleTypeComputer.INSTANCE);
- addPrivateFunction(GRAM_TOKENS, OrderedListOfAStringTypeComputer.INSTANCE);
- addFunction(GLOBAL_AVG, OptionalADoubleTypeComputer.INSTANCE);
- addPrivateFunction(HASHED_GRAM_TOKENS, OrderedListOfAInt32TypeComputer.INSTANCE);
- addPrivateFunction(HASHED_WORD_TOKENS, OrderedListOfAInt32TypeComputer.INSTANCE);
+ addPrivateFunction(FIELD_ACCESS_BY_INDEX, FieldAccessByIndexResultType.INSTANCE, true);
+ addPrivateFunction(FIELD_ACCESS_BY_NAME, NonTaggedFieldAccessByNameResultType.INSTANCE, true);
+ addFunction(FLOAT_CONSTRUCTOR, OptionalAFloatTypeComputer.INSTANCE, true);
+ addPrivateFunction(FUZZY_EQ, BinaryBooleanOrNullFunctionTypeComputer.INSTANCE, true);
+ addPrivateFunction(GET_HANDLE, null, true); // TODO
+ addPrivateFunction(GET_ITEM, NonTaggedGetItemResultType.INSTANCE, true);
+ addPrivateFunction(GET_DATA, null, true); // TODO
+ addPrivateFunction(GLOBAL_AVG, OptionalADoubleTypeComputer.INSTANCE, true);
+ addPrivateFunction(GRAM_TOKENS, OrderedListOfAStringTypeComputer.INSTANCE, true);
+ addFunction(GLOBAL_AVG, OptionalADoubleTypeComputer.INSTANCE, true);
+ addPrivateFunction(HASHED_GRAM_TOKENS, OrderedListOfAInt32TypeComputer.INSTANCE, true);
+ addPrivateFunction(HASHED_WORD_TOKENS, OrderedListOfAInt32TypeComputer.INSTANCE, true);
addPrivateFunction(INDEX_SEARCH, new IResultTypeComputer() {
@Override
@@ -673,95 +681,95 @@
IMetadataProvider<?, ?> mp) throws AlgebricksException {
return BuiltinType.ANY; // TODO
}
- });
- addFunction(INT8_CONSTRUCTOR, OptionalAInt8TypeComputer.INSTANCE);
- addFunction(INT16_CONSTRUCTOR, OptionalAInt16TypeComputer.INSTANCE);
- addFunction(INT32_CONSTRUCTOR, OptionalAInt32TypeComputer.INSTANCE);
- addFunction(INT64_CONSTRUCTOR, OptionalAInt64TypeComputer.INSTANCE);
- addFunction(LEN, OptionalAInt32TypeComputer.INSTANCE);
- addFunction(LIKE, BinaryBooleanOrNullFunctionTypeComputer.INSTANCE);
- addFunction(LINE_CONSTRUCTOR, OptionalALineTypeComputer.INSTANCE);
- addPrivateFunction(LISTIFY, OrderedListConstructorResultType.INSTANCE);
- addPrivateFunction(LOCAL_AVG, NonTaggedLocalAvgTypeComputer.INSTANCE);
- addPrivateFunction(MAKE_FIELD_INDEX_HANDLE, null); // TODO
- addPrivateFunction(MAKE_FIELD_NAME_HANDLE, null); // TODO
- addFunction(MAX, NonTaggedMinMaxAggTypeComputer.INSTANCE);
- addPrivateFunction(LOCAL_MAX, NonTaggedMinMaxAggTypeComputer.INSTANCE);
- addFunction(MIN, NonTaggedMinMaxAggTypeComputer.INSTANCE);
- addPrivateFunction(LOCAL_MIN, NonTaggedMinMaxAggTypeComputer.INSTANCE);
- addPrivateFunction(NON_EMPTY_STREAM, ABooleanTypeComputer.INSTANCE);
- addFunction(NULL_CONSTRUCTOR, ANullTypeComputer.INSTANCE);
- addPrivateFunction(NUMERIC_UNARY_MINUS, NonTaggedUnaryMinusTypeComputer.INSTANCE);
- addPrivateFunction(NUMERIC_SUBTRACT, NonTaggedNumericAddSubMulDivTypeComputer.INSTANCE);
- addPrivateFunction(NUMERIC_MULTIPLY, NonTaggedNumericAddSubMulDivTypeComputer.INSTANCE);
- addPrivateFunction(NUMERIC_DIVIDE, NonTaggedNumericAddSubMulDivTypeComputer.INSTANCE);
- addPrivateFunction(NUMERIC_MOD, NonTaggedNumericAddSubMulDivTypeComputer.INSTANCE);
- addPrivateFunction(NUMERIC_IDIV, AInt32TypeComputer.INSTANCE);
+ }, true);
+ addFunction(INT8_CONSTRUCTOR, OptionalAInt8TypeComputer.INSTANCE, true);
+ addFunction(INT16_CONSTRUCTOR, OptionalAInt16TypeComputer.INSTANCE, true);
+ addFunction(INT32_CONSTRUCTOR, OptionalAInt32TypeComputer.INSTANCE, true);
+ addFunction(INT64_CONSTRUCTOR, OptionalAInt64TypeComputer.INSTANCE, true);
+ addFunction(LEN, OptionalAInt32TypeComputer.INSTANCE, true);
+ addFunction(LIKE, BinaryBooleanOrNullFunctionTypeComputer.INSTANCE, true);
+ addFunction(LINE_CONSTRUCTOR, OptionalALineTypeComputer.INSTANCE, true);
+ addPrivateFunction(LISTIFY, OrderedListConstructorResultType.INSTANCE, true);
+ addPrivateFunction(LOCAL_AVG, NonTaggedLocalAvgTypeComputer.INSTANCE, true);
+ addPrivateFunction(MAKE_FIELD_INDEX_HANDLE, null, true); // TODO
+ addPrivateFunction(MAKE_FIELD_NAME_HANDLE, null, true); // TODO
+ addFunction(MAX, NonTaggedMinMaxAggTypeComputer.INSTANCE, true);
+ addPrivateFunction(LOCAL_MAX, NonTaggedMinMaxAggTypeComputer.INSTANCE, true);
+ addFunction(MIN, NonTaggedMinMaxAggTypeComputer.INSTANCE, true);
+ addPrivateFunction(LOCAL_MIN, NonTaggedMinMaxAggTypeComputer.INSTANCE, true);
+ addPrivateFunction(NON_EMPTY_STREAM, ABooleanTypeComputer.INSTANCE, true);
+ addFunction(NULL_CONSTRUCTOR, ANullTypeComputer.INSTANCE, true);
+ addPrivateFunction(NUMERIC_UNARY_MINUS, NonTaggedUnaryMinusTypeComputer.INSTANCE, true);
+ addPrivateFunction(NUMERIC_SUBTRACT, NonTaggedNumericAddSubMulDivTypeComputer.INSTANCE, true);
+ addPrivateFunction(NUMERIC_MULTIPLY, NonTaggedNumericAddSubMulDivTypeComputer.INSTANCE, true);
+ addPrivateFunction(NUMERIC_DIVIDE, NonTaggedNumericAddSubMulDivTypeComputer.INSTANCE, true);
+ addPrivateFunction(NUMERIC_MOD, NonTaggedNumericAddSubMulDivTypeComputer.INSTANCE, true);
+ addPrivateFunction(NUMERIC_IDIV, AInt32TypeComputer.INSTANCE, true);
- addFunction(NUMERIC_ABS, NonTaggedNumericUnaryFunctionTypeComputer.INSTANCE);
- addFunction(NUMERIC_CEILING, NonTaggedNumericUnaryFunctionTypeComputer.INSTANCE);
- addFunction(NUMERIC_FLOOR, NonTaggedNumericUnaryFunctionTypeComputer.INSTANCE);
- addFunction(NUMERIC_ROUND, NonTaggedNumericUnaryFunctionTypeComputer.INSTANCE);
- addFunction(NUMERIC_ROUND_HALF_TO_EVEN, NonTaggedNumericUnaryFunctionTypeComputer.INSTANCE);
- addFunction(NUMERIC_ROUND_HALF_TO_EVEN2, NonTaggedNumericRoundHalfToEven2TypeComputer.INSTANCE);
+ addFunction(NUMERIC_ABS, NonTaggedNumericUnaryFunctionTypeComputer.INSTANCE, true);
+ addFunction(NUMERIC_CEILING, NonTaggedNumericUnaryFunctionTypeComputer.INSTANCE, true);
+ addFunction(NUMERIC_FLOOR, NonTaggedNumericUnaryFunctionTypeComputer.INSTANCE, true);
+ addFunction(NUMERIC_ROUND, NonTaggedNumericUnaryFunctionTypeComputer.INSTANCE, true);
+ addFunction(NUMERIC_ROUND_HALF_TO_EVEN, NonTaggedNumericUnaryFunctionTypeComputer.INSTANCE, true);
+ addFunction(NUMERIC_ROUND_HALF_TO_EVEN2, NonTaggedNumericRoundHalfToEven2TypeComputer.INSTANCE, true);
- addFunction(STRING_TO_CODEPOINT, OrderedListOfAInt32TypeComputer.INSTANCE);
- addFunction(CODEPOINT_TO_STRING, AStringTypeComputer.INSTANCE);
- addFunction(STRING_CONCAT, OptionalAStringTypeComputer.INSTANCE);
- addFunction(SUBSTRING2, Substring2TypeComputer.INSTANCE);
- addFunction(STRING_LENGTH, UnaryStringInt32OrNullTypeComputer.INSTANCE);
- addFunction(STRING_LOWERCASE, UnaryStringOrNullTypeComputer.INSTANCE);
- addFunction(STRING_START_WITH, BinaryStringBoolOrNullTypeComputer.INSTANCE);
- addFunction(STRING_END_WITH, BinaryStringBoolOrNullTypeComputer.INSTANCE);
- addFunction(STRING_MATCHES, BinaryStringBoolOrNullTypeComputer.INSTANCE);
- addFunction(STRING_MATCHES_WITH_FLAG, TripleStringBoolOrNullTypeComputer.INSTANCE);
- addFunction(STRING_REPLACE, TripleStringStringOrNullTypeComputer.INSTANCE);
- addFunction(STRING_REPLACE_WITH_FLAG, QuadStringStringOrNullTypeComputer.INSTANCE);
- addFunction(SUBSTRING_BEFORE, BinaryStringStringOrNullTypeComputer.INSTANCE);
- addFunction(SUBSTRING_AFTER, BinaryStringStringOrNullTypeComputer.INSTANCE);
- addPrivateFunction(STRING_EQUAL, BinaryStringBoolOrNullTypeComputer.INSTANCE);
- addFunction(STRING_JOIN, AStringTypeComputer.INSTANCE);
+ addFunction(STRING_TO_CODEPOINT, OrderedListOfAInt32TypeComputer.INSTANCE, true);
+ addFunction(CODEPOINT_TO_STRING, AStringTypeComputer.INSTANCE, true);
+ addFunction(STRING_CONCAT, OptionalAStringTypeComputer.INSTANCE, true);
+ addFunction(SUBSTRING2, Substring2TypeComputer.INSTANCE, true);
+ addFunction(STRING_LENGTH, UnaryStringInt32OrNullTypeComputer.INSTANCE, true);
+ addFunction(STRING_LOWERCASE, UnaryStringOrNullTypeComputer.INSTANCE, true);
+ addFunction(STRING_START_WITH, BinaryStringBoolOrNullTypeComputer.INSTANCE, true);
+ addFunction(STRING_END_WITH, BinaryStringBoolOrNullTypeComputer.INSTANCE, true);
+ addFunction(STRING_MATCHES, BinaryStringBoolOrNullTypeComputer.INSTANCE, true);
+ addFunction(STRING_MATCHES_WITH_FLAG, TripleStringBoolOrNullTypeComputer.INSTANCE, true);
+ addFunction(STRING_REPLACE, TripleStringStringOrNullTypeComputer.INSTANCE, true);
+ addFunction(STRING_REPLACE_WITH_FLAG, QuadStringStringOrNullTypeComputer.INSTANCE, true);
+ addFunction(SUBSTRING_BEFORE, BinaryStringStringOrNullTypeComputer.INSTANCE, true);
+ addFunction(SUBSTRING_AFTER, BinaryStringStringOrNullTypeComputer.INSTANCE, true);
+ addPrivateFunction(STRING_EQUAL, BinaryStringBoolOrNullTypeComputer.INSTANCE, true);
+ addFunction(STRING_JOIN, AStringTypeComputer.INSTANCE, true);
- addPrivateFunction(OPEN_RECORD_CONSTRUCTOR, OpenRecordConstructorResultType.INSTANCE);
- addPrivateFunction(ORDERED_LIST_CONSTRUCTOR, OrderedListConstructorResultType.INSTANCE);
- addFunction(POINT_CONSTRUCTOR, OptionalAPointTypeComputer.INSTANCE);
- addFunction(POINT3D_CONSTRUCTOR, OptionalAPoint3DTypeComputer.INSTANCE);
- addFunction(POLYGON_CONSTRUCTOR, OptionalAPolygonTypeComputer.INSTANCE);
- addPrivateFunction(PREFIX_LEN_JACCARD, AInt32TypeComputer.INSTANCE);
- addFunction(RANGE, AInt32TypeComputer.INSTANCE);
- addFunction(RECTANGLE_CONSTRUCTOR, OptionalARectangleTypeComputer.INSTANCE);
+ addPrivateFunction(OPEN_RECORD_CONSTRUCTOR, OpenRecordConstructorResultType.INSTANCE, true);
+ addPrivateFunction(ORDERED_LIST_CONSTRUCTOR, OrderedListConstructorResultType.INSTANCE, true);
+ addFunction(POINT_CONSTRUCTOR, OptionalAPointTypeComputer.INSTANCE, true);
+ addFunction(POINT3D_CONSTRUCTOR, OptionalAPoint3DTypeComputer.INSTANCE, true);
+ addFunction(POLYGON_CONSTRUCTOR, OptionalAPolygonTypeComputer.INSTANCE, true);
+ addPrivateFunction(PREFIX_LEN_JACCARD, AInt32TypeComputer.INSTANCE, true);
+ addFunction(RANGE, AInt32TypeComputer.INSTANCE, true);
+ addFunction(RECTANGLE_CONSTRUCTOR, OptionalARectangleTypeComputer.INSTANCE, true);
- addFunction(SCALAR_AVG, ScalarVersionOfAggregateResultType.INSTANCE);
- addFunction(SCALAR_COUNT, AInt64TypeComputer.INSTANCE);
- addPrivateFunction(SCALAR_GLOBAL_AVG, ScalarVersionOfAggregateResultType.INSTANCE);
- addPrivateFunction(SCALAR_LOCAL_AVG, ScalarVersionOfAggregateResultType.INSTANCE);
- addFunction(SCALAR_MAX, ScalarVersionOfAggregateResultType.INSTANCE);
- addFunction(SCALAR_MIN, ScalarVersionOfAggregateResultType.INSTANCE);
- addFunction(SCALAR_SUM, ScalarVersionOfAggregateResultType.INSTANCE);
- addPrivateFunction(SCAN_COLLECTION, NonTaggedCollectionMemberResultType.INSTANCE);
- addPrivateFunction(SERIAL_AVG, OptionalADoubleTypeComputer.INSTANCE);
- addPrivateFunction(SERIAL_COUNT, AInt64TypeComputer.INSTANCE);
- addPrivateFunction(SERIAL_GLOBAL_AVG, OptionalADoubleTypeComputer.INSTANCE);
- addPrivateFunction(SERIAL_LOCAL_AVG, NonTaggedLocalAvgTypeComputer.INSTANCE);
- addPrivateFunction(SERIAL_SUM, NonTaggedNumericAggTypeComputer.INSTANCE);
- addPrivateFunction(SERIAL_LOCAL_SUM, NonTaggedNumericAggTypeComputer.INSTANCE);
- addFunction(SIMILARITY_JACCARD, AFloatTypeComputer.INSTANCE);
- addFunction(SIMILARITY_JACCARD_CHECK, OrderedListOfAnyTypeComputer.INSTANCE);
- addPrivateFunction(SIMILARITY_JACCARD_SORTED, AFloatTypeComputer.INSTANCE);
- addPrivateFunction(SIMILARITY_JACCARD_SORTED_CHECK, OrderedListOfAnyTypeComputer.INSTANCE);
- addPrivateFunction(SIMILARITY_JACCARD_PREFIX, AFloatTypeComputer.INSTANCE);
- addPrivateFunction(SIMILARITY_JACCARD_PREFIX_CHECK, OrderedListOfAnyTypeComputer.INSTANCE);
- addFunction(SPATIAL_AREA, ADoubleTypeComputer.INSTANCE);
- addFunction(SPATIAL_CELL, ARectangleTypeComputer.INSTANCE);
- addFunction(SPATIAL_DISTANCE, ADoubleTypeComputer.INSTANCE);
- addFunction(SPATIAL_INTERSECT, ABooleanTypeComputer.INSTANCE);
- addFunction(GET_POINT_X_COORDINATE_ACCESSOR, ADoubleTypeComputer.INSTANCE);
- addFunction(GET_POINT_Y_COORDINATE_ACCESSOR, ADoubleTypeComputer.INSTANCE);
- addFunction(GET_CIRCLE_RADIUS_ACCESSOR, ADoubleTypeComputer.INSTANCE);
- addFunction(GET_CIRCLE_CENTER_ACCESSOR, APointTypeComputer.INSTANCE);
- addFunction(GET_POINTS_LINE_RECTANGLE_POLYGON_ACCESSOR, OrderedListOfAPointTypeComputer.INSTANCE);
- addFunction(STARTS_WITH, ABooleanTypeComputer.INSTANCE);
- addFunction(STRING_CONSTRUCTOR, OptionalAStringTypeComputer.INSTANCE);
+ addFunction(SCALAR_AVG, ScalarVersionOfAggregateResultType.INSTANCE, true);
+ addFunction(SCALAR_COUNT, AInt64TypeComputer.INSTANCE, true);
+ addPrivateFunction(SCALAR_GLOBAL_AVG, ScalarVersionOfAggregateResultType.INSTANCE, true);
+ addPrivateFunction(SCALAR_LOCAL_AVG, ScalarVersionOfAggregateResultType.INSTANCE, true);
+ addFunction(SCALAR_MAX, ScalarVersionOfAggregateResultType.INSTANCE, true);
+ addFunction(SCALAR_MIN, ScalarVersionOfAggregateResultType.INSTANCE, true);
+ addFunction(SCALAR_SUM, ScalarVersionOfAggregateResultType.INSTANCE, true);
+ addPrivateFunction(SCAN_COLLECTION, NonTaggedCollectionMemberResultType.INSTANCE, true);
+ addPrivateFunction(SERIAL_AVG, OptionalADoubleTypeComputer.INSTANCE, true);
+ addPrivateFunction(SERIAL_COUNT, AInt64TypeComputer.INSTANCE, true);
+ addPrivateFunction(SERIAL_GLOBAL_AVG, OptionalADoubleTypeComputer.INSTANCE, true);
+ addPrivateFunction(SERIAL_LOCAL_AVG, NonTaggedLocalAvgTypeComputer.INSTANCE, true);
+ addPrivateFunction(SERIAL_SUM, NonTaggedNumericAggTypeComputer.INSTANCE, true);
+ addPrivateFunction(SERIAL_LOCAL_SUM, NonTaggedNumericAggTypeComputer.INSTANCE, true);
+ addFunction(SIMILARITY_JACCARD, AFloatTypeComputer.INSTANCE, true);
+ addFunction(SIMILARITY_JACCARD_CHECK, OrderedListOfAnyTypeComputer.INSTANCE, true);
+ addPrivateFunction(SIMILARITY_JACCARD_SORTED, AFloatTypeComputer.INSTANCE, true);
+ addPrivateFunction(SIMILARITY_JACCARD_SORTED_CHECK, OrderedListOfAnyTypeComputer.INSTANCE, true);
+ addPrivateFunction(SIMILARITY_JACCARD_PREFIX, AFloatTypeComputer.INSTANCE, true);
+ addPrivateFunction(SIMILARITY_JACCARD_PREFIX_CHECK, OrderedListOfAnyTypeComputer.INSTANCE, true);
+ addFunction(SPATIAL_AREA, ADoubleTypeComputer.INSTANCE, true);
+ addFunction(SPATIAL_CELL, ARectangleTypeComputer.INSTANCE, true);
+ addFunction(SPATIAL_DISTANCE, ADoubleTypeComputer.INSTANCE, true);
+ addFunction(SPATIAL_INTERSECT, ABooleanTypeComputer.INSTANCE, true);
+ addFunction(GET_POINT_X_COORDINATE_ACCESSOR, ADoubleTypeComputer.INSTANCE, true);
+ addFunction(GET_POINT_Y_COORDINATE_ACCESSOR, ADoubleTypeComputer.INSTANCE, true);
+ addFunction(GET_CIRCLE_RADIUS_ACCESSOR, ADoubleTypeComputer.INSTANCE, true);
+ addFunction(GET_CIRCLE_CENTER_ACCESSOR, APointTypeComputer.INSTANCE, true);
+ addFunction(GET_POINTS_LINE_RECTANGLE_POLYGON_ACCESSOR, OrderedListOfAPointTypeComputer.INSTANCE, true);
+ addFunction(STARTS_WITH, ABooleanTypeComputer.INSTANCE, true);
+ addFunction(STRING_CONSTRUCTOR, OptionalAStringTypeComputer.INSTANCE, true);
addPrivateFunction(SUBSET_COLLECTION, new IResultTypeComputer() {
@Override
@@ -798,96 +806,95 @@
}
}
}
- });
- addFunction(SUBSTRING, SubstringTypeComputer.INSTANCE);
- addFunction(SUM, NonTaggedNumericAggTypeComputer.INSTANCE);
- addPrivateFunction(LOCAL_SUM, NonTaggedNumericAggTypeComputer.INSTANCE);
- addFunction(SWITCH_CASE, NonTaggedSwitchCaseComputer.INSTANCE);
- addPrivateFunction(REG_EXP, ABooleanTypeComputer.INSTANCE);
- addFunction(INJECT_FAILURE, InjectFailureTypeComputer.INSTANCE);
- addPrivateFunction(CAST_RECORD, CastRecordResultTypeComputer.INSTANCE);
- addFunction(CAST_LIST, CastListResultTypeComputer.INSTANCE);
+ }, true);
+ addFunction(SUBSTRING, SubstringTypeComputer.INSTANCE, true);
+ addFunction(SUM, NonTaggedNumericAggTypeComputer.INSTANCE, true);
+ addPrivateFunction(LOCAL_SUM, NonTaggedNumericAggTypeComputer.INSTANCE, true);
+ addFunction(SWITCH_CASE, NonTaggedSwitchCaseComputer.INSTANCE, true);
+ addPrivateFunction(REG_EXP, ABooleanTypeComputer.INSTANCE, true);
+ addFunction(INJECT_FAILURE, InjectFailureTypeComputer.INSTANCE, true);
+ addPrivateFunction(CAST_RECORD, CastRecordResultTypeComputer.INSTANCE, true);
+ addFunction(CAST_LIST, CastListResultTypeComputer.INSTANCE, true);
- addFunction(TID, AInt32TypeComputer.INSTANCE);
- addFunction(TIME_CONSTRUCTOR, OptionalATimeTypeComputer.INSTANCE);
- addPrivateFunction(TYPE_OF, null);
- addPrivateFunction(UNORDERED_LIST_CONSTRUCTOR, UnorderedListConstructorResultType.INSTANCE);
+ addFunction(TID, AInt32TypeComputer.INSTANCE, true);
+ addFunction(TIME_CONSTRUCTOR, OptionalATimeTypeComputer.INSTANCE, true);
+ addPrivateFunction(TYPE_OF, null, true);
+ addPrivateFunction(UNORDERED_LIST_CONSTRUCTOR, UnorderedListConstructorResultType.INSTANCE, true);
addFunction(WORD_TOKENS, new IResultTypeComputer() {
-
@Override
public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env,
IMetadataProvider<?, ?> mp) throws AlgebricksException {
return new AOrderedListType(BuiltinType.ASTRING, "string");
}
- });
+ }, true);
// temporal type accessors
- addFunction(ACCESSOR_TEMPORAL_YEAR, OptionalAInt32TypeComputer.INSTANCE);
- addFunction(ACCESSOR_TEMPORAL_MONTH, OptionalAInt32TypeComputer.INSTANCE);
- addFunction(ACCESSOR_TEMPORAL_DAY, OptionalAInt32TypeComputer.INSTANCE);
- addFunction(ACCESSOR_TEMPORAL_HOUR, OptionalAInt32TypeComputer.INSTANCE);
- addFunction(ACCESSOR_TEMPORAL_MIN, OptionalAInt32TypeComputer.INSTANCE);
- addFunction(ACCESSOR_TEMPORAL_SEC, OptionalAInt32TypeComputer.INSTANCE);
- addFunction(ACCESSOR_TEMPORAL_MILLISEC, OptionalAInt32TypeComputer.INSTANCE);
- addFunction(ACCESSOR_TEMPORAL_INTERVAL_START, OptionalATemporalInstanceTypeComputer.INSTANCE);
- addFunction(ACCESSOR_TEMPORAL_INTERVAL_END, OptionalATemporalInstanceTypeComputer.INSTANCE);
+ addFunction(ACCESSOR_TEMPORAL_YEAR, OptionalAInt32TypeComputer.INSTANCE, true);
+ addFunction(ACCESSOR_TEMPORAL_MONTH, OptionalAInt32TypeComputer.INSTANCE, true);
+ addFunction(ACCESSOR_TEMPORAL_DAY, OptionalAInt32TypeComputer.INSTANCE, true);
+ addFunction(ACCESSOR_TEMPORAL_HOUR, OptionalAInt32TypeComputer.INSTANCE, true);
+ addFunction(ACCESSOR_TEMPORAL_MIN, OptionalAInt32TypeComputer.INSTANCE, true);
+ addFunction(ACCESSOR_TEMPORAL_SEC, OptionalAInt32TypeComputer.INSTANCE, true);
+ addFunction(ACCESSOR_TEMPORAL_MILLISEC, OptionalAInt32TypeComputer.INSTANCE, true);
+ addFunction(ACCESSOR_TEMPORAL_INTERVAL_START, OptionalATemporalInstanceTypeComputer.INSTANCE, true);
+ addFunction(ACCESSOR_TEMPORAL_INTERVAL_END, OptionalATemporalInstanceTypeComputer.INSTANCE, true);
// temporal functions
- addFunction(DATE_FROM_UNIX_TIME_IN_DAYS, OptionalADateTypeComputer.INSTANCE);
- addFunction(DATE_FROM_DATETIME, OptionalADateTypeComputer.INSTANCE);
- addFunction(TIME_FROM_UNIX_TIME_IN_MS, OptionalATimeTypeComputer.INSTANCE);
- addFunction(TIME_FROM_DATETIME, OptionalATimeTypeComputer.INSTANCE);
- addFunction(DATETIME_FROM_DATE_TIME, OptionalADateTimeTypeComputer.INSTANCE);
- addFunction(DATETIME_FROM_UNIX_TIME_IN_MS, OptionalADateTimeTypeComputer.INSTANCE);
- addFunction(CALENDAR_DURATION_FROM_DATETIME, OptionalADurationTypeComputer.INSTANCE);
- addFunction(CALENDAR_DURATION_FROM_DATE, OptionalADurationTypeComputer.INSTANCE);
- addFunction(ADJUST_DATETIME_FOR_TIMEZONE, OptionalAStringTypeComputer.INSTANCE);
- addFunction(ADJUST_TIME_FOR_TIMEZONE, OptionalAStringTypeComputer.INSTANCE);
- addFunction(INTERVAL_BEFORE, OptionalABooleanTypeComputer.INSTANCE);
- addFunction(INTERVAL_AFTER, OptionalABooleanTypeComputer.INSTANCE);
- addFunction(INTERVAL_MEETS, OptionalABooleanTypeComputer.INSTANCE);
- addFunction(INTERVAL_MET_BY, OptionalABooleanTypeComputer.INSTANCE);
- addFunction(INTERVAL_OVERLAPS, OptionalABooleanTypeComputer.INSTANCE);
- addFunction(INTERVAL_OVERLAPPED_BY, OptionalABooleanTypeComputer.INSTANCE);
- addFunction(OVERLAP, OptionalABooleanTypeComputer.INSTANCE);
- addFunction(INTERVAL_STARTS, OptionalABooleanTypeComputer.INSTANCE);
- addFunction(INTERVAL_STARTED_BY, OptionalABooleanTypeComputer.INSTANCE);
- addFunction(INTERVAL_COVERS, OptionalABooleanTypeComputer.INSTANCE);
- addFunction(INTERVAL_COVERED_BY, OptionalABooleanTypeComputer.INSTANCE);
- addFunction(INTERVAL_ENDS, OptionalABooleanTypeComputer.INSTANCE);
- addFunction(INTERVAL_ENDED_BY, OptionalABooleanTypeComputer.INSTANCE);
- addFunction(CURRENT_DATE, ADateTypeComputer.INSTANCE);
- addFunction(CURRENT_TIME, ATimeTypeComputer.INSTANCE);
- addFunction(CURRENT_DATETIME, ADateTimeTypeComputer.INSTANCE);
- addFunction(DAY_TIME_DURATION_GREATER_THAN, OptionalABooleanTypeComputer.INSTANCE);
- addPrivateFunction(DAY_TIME_DURATION_LESS_THAN, OptionalABooleanTypeComputer.INSTANCE);
- addPrivateFunction(YEAR_MONTH_DURATION_GREATER_THAN, OptionalABooleanTypeComputer.INSTANCE);
- addPrivateFunction(YEAR_MONTH_DURATION_LESS_THAN, OptionalABooleanTypeComputer.INSTANCE);
- addPrivateFunction(DURATION_EQUAL, OptionalABooleanTypeComputer.INSTANCE);
- addFunction(DURATION_FROM_MONTHS, OptionalADurationTypeComputer.INSTANCE);
- addFunction(DURATION_FROM_MILLISECONDS, OptionalADurationTypeComputer.INSTANCE);
- addFunction(MONTHS_FROM_YEAR_MONTH_DURATION, OptionalAInt32TypeComputer.INSTANCE);
- addFunction(MILLISECONDS_FROM_DAY_TIME_DURATION, OptionalAInt64TypeComputer.INSTANCE);
- addFunction(GET_DAY_TIME_DURATION, OptionalADayTimeDurationTypeComputer.INSTANCE);
- addFunction(GET_YEAR_MONTH_DURATION, OptionalAYearMonthDurationTypeComputer.INSTANCE);
- addFunction(INTERVAL_BIN, OptionalAIntervalTypeComputer.INSTANCE);
- addFunction(DAY_OF_WEEK, OptionalAInt32TypeComputer.INSTANCE);
- addFunction(PARSE_DATE, OptionalADateTypeComputer.INSTANCE);
- addFunction(PARSE_TIME, OptionalATimeTypeComputer.INSTANCE);
- addFunction(PARSE_DATETIME, OptionalADateTimeTypeComputer.INSTANCE);
- addFunction(PRINT_DATE, OptionalAStringTypeComputer.INSTANCE);
- addFunction(PRINT_TIME, OptionalAStringTypeComputer.INSTANCE);
- addFunction(PRINT_DATETIME, OptionalAStringTypeComputer.INSTANCE);
+ addFunction(DATE_FROM_UNIX_TIME_IN_DAYS, OptionalADateTypeComputer.INSTANCE, true);
+ addFunction(DATE_FROM_DATETIME, OptionalADateTypeComputer.INSTANCE, true);
+ addFunction(TIME_FROM_UNIX_TIME_IN_MS, OptionalATimeTypeComputer.INSTANCE, true);
+ addFunction(TIME_FROM_DATETIME, OptionalATimeTypeComputer.INSTANCE, true);
+ addFunction(DATETIME_FROM_DATE_TIME, OptionalADateTimeTypeComputer.INSTANCE, true);
+ addFunction(DATETIME_FROM_UNIX_TIME_IN_MS, OptionalADateTimeTypeComputer.INSTANCE, true);
+ addFunction(CALENDAR_DURATION_FROM_DATETIME, OptionalADurationTypeComputer.INSTANCE, true);
+ addFunction(CALENDAR_DURATION_FROM_DATE, OptionalADurationTypeComputer.INSTANCE, true);
+ addFunction(ADJUST_DATETIME_FOR_TIMEZONE, OptionalAStringTypeComputer.INSTANCE, true);
+ addFunction(ADJUST_TIME_FOR_TIMEZONE, OptionalAStringTypeComputer.INSTANCE, true);
+ addFunction(INTERVAL_BEFORE, OptionalABooleanTypeComputer.INSTANCE, true);
+ addFunction(INTERVAL_AFTER, OptionalABooleanTypeComputer.INSTANCE, true);
+ addFunction(INTERVAL_MEETS, OptionalABooleanTypeComputer.INSTANCE, true);
+ addFunction(INTERVAL_MET_BY, OptionalABooleanTypeComputer.INSTANCE, true);
+ addFunction(INTERVAL_OVERLAPS, OptionalABooleanTypeComputer.INSTANCE, true);
+ addFunction(INTERVAL_OVERLAPPED_BY, OptionalABooleanTypeComputer.INSTANCE, true);
+ addFunction(OVERLAP, OptionalABooleanTypeComputer.INSTANCE, true);
+ addFunction(INTERVAL_STARTS, OptionalABooleanTypeComputer.INSTANCE, true);
+ addFunction(INTERVAL_STARTED_BY, OptionalABooleanTypeComputer.INSTANCE, true);
+ addFunction(INTERVAL_COVERS, OptionalABooleanTypeComputer.INSTANCE, true);
+ addFunction(INTERVAL_COVERED_BY, OptionalABooleanTypeComputer.INSTANCE, true);
+ addFunction(INTERVAL_ENDS, OptionalABooleanTypeComputer.INSTANCE, true);
+ addFunction(INTERVAL_ENDED_BY, OptionalABooleanTypeComputer.INSTANCE, true);
+ addFunction(CURRENT_DATE, ADateTypeComputer.INSTANCE, true);
+ addFunction(CURRENT_TIME, ATimeTypeComputer.INSTANCE, true);
+ addFunction(CURRENT_DATETIME, ADateTimeTypeComputer.INSTANCE, true);
+ addFunction(DAY_TIME_DURATION_GREATER_THAN, OptionalABooleanTypeComputer.INSTANCE, true);
+ addPrivateFunction(DAY_TIME_DURATION_LESS_THAN, OptionalABooleanTypeComputer.INSTANCE, true);
+ addPrivateFunction(YEAR_MONTH_DURATION_GREATER_THAN, OptionalABooleanTypeComputer.INSTANCE, true);
+ addPrivateFunction(YEAR_MONTH_DURATION_LESS_THAN, OptionalABooleanTypeComputer.INSTANCE, true);
+ addPrivateFunction(DURATION_EQUAL, OptionalABooleanTypeComputer.INSTANCE, true);
+ addFunction(DURATION_FROM_MONTHS, OptionalADurationTypeComputer.INSTANCE, true);
+ addFunction(DURATION_FROM_MILLISECONDS, OptionalADurationTypeComputer.INSTANCE, true);
+ addFunction(MONTHS_FROM_YEAR_MONTH_DURATION, OptionalAInt32TypeComputer.INSTANCE, true);
+ addFunction(MILLISECONDS_FROM_DAY_TIME_DURATION, OptionalAInt64TypeComputer.INSTANCE, true);
+ addFunction(GET_DAY_TIME_DURATION, OptionalADayTimeDurationTypeComputer.INSTANCE, true);
+ addFunction(GET_YEAR_MONTH_DURATION, OptionalAYearMonthDurationTypeComputer.INSTANCE, true);
+ addFunction(INTERVAL_BIN, OptionalAIntervalTypeComputer.INSTANCE, true);
+ addFunction(DAY_OF_WEEK, OptionalAInt32TypeComputer.INSTANCE, true);
+ addFunction(PARSE_DATE, OptionalADateTypeComputer.INSTANCE, true);
+ addFunction(PARSE_TIME, OptionalATimeTypeComputer.INSTANCE, true);
+ addFunction(PARSE_DATETIME, OptionalADateTimeTypeComputer.INSTANCE, true);
+ addFunction(PRINT_DATE, OptionalAStringTypeComputer.INSTANCE, true);
+ addFunction(PRINT_TIME, OptionalAStringTypeComputer.INSTANCE, true);
+ addFunction(PRINT_DATETIME, OptionalAStringTypeComputer.INSTANCE, true);
// interval constructors
- addFunction(INTERVAL_CONSTRUCTOR_DATE, OptionalAIntervalTypeComputer.INSTANCE);
- addFunction(INTERVAL_CONSTRUCTOR_TIME, OptionalAIntervalTypeComputer.INSTANCE);
- addFunction(INTERVAL_CONSTRUCTOR_DATETIME, OptionalAIntervalTypeComputer.INSTANCE);
- addFunction(INTERVAL_CONSTRUCTOR_START_FROM_DATE, OptionalAIntervalTypeComputer.INSTANCE);
- addFunction(INTERVAL_CONSTRUCTOR_START_FROM_DATETIME, OptionalAIntervalTypeComputer.INSTANCE);
- addFunction(INTERVAL_CONSTRUCTOR_START_FROM_TIME, OptionalAIntervalTypeComputer.INSTANCE);
+ addFunction(INTERVAL_CONSTRUCTOR_DATE, OptionalAIntervalTypeComputer.INSTANCE, true);
+ addFunction(INTERVAL_CONSTRUCTOR_TIME, OptionalAIntervalTypeComputer.INSTANCE, true);
+ addFunction(INTERVAL_CONSTRUCTOR_DATETIME, OptionalAIntervalTypeComputer.INSTANCE, true);
+ addFunction(INTERVAL_CONSTRUCTOR_START_FROM_DATE, OptionalAIntervalTypeComputer.INSTANCE, true);
+ addFunction(INTERVAL_CONSTRUCTOR_START_FROM_DATETIME, OptionalAIntervalTypeComputer.INSTANCE, true);
+ addFunction(INTERVAL_CONSTRUCTOR_START_FROM_TIME, OptionalAIntervalTypeComputer.INSTANCE, true);
- addPrivateFunction(COLLECTION_TO_SEQUENCE, CollectionToSequenceTypeComputer.INSTANCE);
+ addPrivateFunction(COLLECTION_TO_SEQUENCE, CollectionToSequenceTypeComputer.INSTANCE, true);
String metadataFunctionLoaderClassName = "edu.uci.ics.asterix.metadata.functions.MetadataBuiltinFunctions";
try {
@@ -962,12 +969,14 @@
static {
datasetFunctions.add(getAsterixFunctionInfo(DATASET));
datasetFunctions.add(getAsterixFunctionInfo(FEED_INGEST));
+ datasetFunctions.add(getAsterixFunctionInfo(FEED_INTERCEPT));
datasetFunctions.add(getAsterixFunctionInfo(INDEX_SEARCH));
}
static {
addUnnestFun(DATASET, false);
addUnnestFun(FEED_INGEST, false);
+ addUnnestFun(FEED_INTERCEPT, false);
addUnnestFun(RANGE, true);
addUnnestFun(SCAN_COLLECTION, false);
addUnnestFun(SUBSET_COLLECTION, false);
@@ -981,9 +990,10 @@
return datasetFunctions.contains(getAsterixFunctionInfo(fi));
}
+ /*
public static boolean isBuiltinCompilerFunction(FunctionIdentifier fi, boolean includePrivateFunctions) {
return builtinPublicFunctionsSet.keySet().contains(getAsterixFunctionInfo(fi));
- }
+ }*/
public static boolean isBuiltinCompilerFunction(FunctionSignature signature, boolean includePrivateFunctions) {
@@ -1086,18 +1096,18 @@
return finfo == null ? null : finfo.getFunctionIdentifier();
}
- public static void addFunction(FunctionIdentifier fi, IResultTypeComputer typeComputer) {
- IFunctionInfo functionInfo = getAsterixFunctionInfo(fi);
+ public static void addFunction(FunctionIdentifier fi, IResultTypeComputer typeComputer, boolean isFunctional) {
+ IFunctionInfo functionInfo = new AsterixFunctionInfo(fi, isFunctional);
builtinPublicFunctionsSet.put(functionInfo, functionInfo);
funTypeComputer.put(functionInfo, typeComputer);
- registeredFunctions.put(fi);
+ registeredFunctions.put(fi, functionInfo);
}
- public static void addPrivateFunction(FunctionIdentifier fi, IResultTypeComputer typeComputer) {
- IFunctionInfo functionInfo = getAsterixFunctionInfo(fi);
+ public static void addPrivateFunction(FunctionIdentifier fi, IResultTypeComputer typeComputer, boolean isFunctional) {
+ IFunctionInfo functionInfo = new AsterixFunctionInfo(fi, isFunctional);
builtinPrivateFunctionsSet.put(functionInfo, functionInfo);
funTypeComputer.put(functionInfo, typeComputer);
- registeredFunctions.put(fi);
+ registeredFunctions.put(fi, functionInfo);
}
private static void addAgg(FunctionIdentifier fi) {
@@ -1128,7 +1138,7 @@
public static boolean isSpatialFilterFunction(FunctionIdentifier fi) {
return spatialFilterFunctions.get(getAsterixFunctionInfo(fi)) != null;
}
-
+
static {
similarityFunctions.add(getAsterixFunctionInfo(SIMILARITY_JACCARD));
similarityFunctions.add(getAsterixFunctionInfo(SIMILARITY_JACCARD_CHECK));
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixExternalFunctionInfo.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixExternalFunctionInfo.java
index 0f08787..ce0cf82 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixExternalFunctionInfo.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixExternalFunctionInfo.java
@@ -22,6 +22,8 @@
public class AsterixExternalFunctionInfo extends AsterixFunctionInfo implements IExternalFunctionInfo {
+ private static final long serialVersionUID = 1L;
+
private final IResultTypeComputer rtc;
private final List<IAType> argumentTypes;
private final String body;
@@ -29,20 +31,20 @@
private final FunctionKind kind;
private final IAType returnType;
- public AsterixExternalFunctionInfo(){
+ public AsterixExternalFunctionInfo() {
super();
rtc = null;
- argumentTypes= null;
+ argumentTypes = null;
body = null;
- language=null;
+ language = null;
kind = null;
returnType = null;
-
+
}
-
+
public AsterixExternalFunctionInfo(String namespace, AsterixFunction asterixFunction, FunctionKind kind,
List<IAType> argumentTypes, IAType returnType, IResultTypeComputer rtc, String body, String language) {
- super(namespace, asterixFunction);
+ super(namespace, asterixFunction, true);
this.rtc = rtc;
this.argumentTypes = argumentTypes;
this.body = body;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixFunctionIdentifier.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixFunctionIdentifier.java
index b4c47cd..02debfa 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixFunctionIdentifier.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixFunctionIdentifier.java
@@ -14,7 +14,6 @@
*/
package edu.uci.ics.asterix.om.functions;
-
public class AsterixFunctionIdentifier {
private final String dataverse;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixFunctionInfo.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixFunctionInfo.java
index 186ca78..b926a60 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixFunctionInfo.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixFunctionInfo.java
@@ -15,27 +15,33 @@
package edu.uci.ics.asterix.om.functions;
import edu.uci.ics.asterix.common.functions.FunctionSignature;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AbstractFunctionInfo;
import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
-public class AsterixFunctionInfo implements IFunctionInfo {
+public class AsterixFunctionInfo extends AbstractFunctionInfo {
+
+ private static final long serialVersionUID = 1L;
private final FunctionIdentifier functionIdentifier;
- public AsterixFunctionInfo(String namespace, AsterixFunction asterixFunction) {
+ public AsterixFunctionInfo(String namespace, AsterixFunction asterixFunction, boolean isFunctional) {
+ super(isFunctional);
this.functionIdentifier = new FunctionIdentifier(namespace, asterixFunction.getName(),
asterixFunction.getArity());
}
public AsterixFunctionInfo() {
+ super(true);
functionIdentifier = null;
}
- public AsterixFunctionInfo(FunctionIdentifier functionIdentifier) {
+ public AsterixFunctionInfo(FunctionIdentifier functionIdentifier, boolean isFunctional) {
+ super(isFunctional);
this.functionIdentifier = functionIdentifier;
}
- public AsterixFunctionInfo(FunctionSignature functionSignature) {
+ public AsterixFunctionInfo(FunctionSignature functionSignature, boolean isFunctional) {
+ super(isFunctional);
this.functionIdentifier = new FunctionIdentifier(functionSignature.getNamespace(), functionSignature.getName(),
functionSignature.getArity());
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/FunctionInfoRepository.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/FunctionInfoRepository.java
index 1eb4174..6290460 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/FunctionInfoRepository.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/FunctionInfoRepository.java
@@ -37,14 +37,8 @@
return get(fid.getNamespace(), fid.getName(), fid.getArity());
}
- public void put(String namespace, String name, int arity) {
- FunctionSignature functionSignature = new FunctionSignature(namespace, name, arity);
- functionMap.put(functionSignature, new AsterixFunctionInfo(new FunctionIdentifier(namespace, name, arity)));
- }
-
- public void put(FunctionIdentifier fid) {
+ public void put(FunctionIdentifier fid, IFunctionInfo fInfo) {
FunctionSignature functionSignature = new FunctionSignature(fid.getNamespace(), fid.getName(), fid.getArity());
- functionMap.put(functionSignature, new AsterixFunctionInfo(fid));
+ functionMap.put(functionSignature, fInfo);
}
}
-
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/IExternalFunctionInfo.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/IExternalFunctionInfo.java
index a2bfc13..47e9957 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/IExternalFunctionInfo.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/IExternalFunctionInfo.java
@@ -25,7 +25,7 @@
public interface IExternalFunctionInfo extends IFunctionInfo, Serializable {
public IResultTypeComputer getResultTypeComputer();
-
+
public IAType getReturnType();
public String getFunctionBody();
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/IFunctionDescriptorFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/IFunctionDescriptorFactory.java
index 2cf7288..a94f46b 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/IFunctionDescriptorFactory.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/IFunctionDescriptorFactory.java
@@ -19,6 +19,7 @@
/**
* the artifact registered in function manager
+ *
* @return a new IFunctionDescriptor instance
*/
public IFunctionDescriptor createFunctionDescriptor();
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/ARecordPointable.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/ARecordPointable.java
index 77b4d38..d08537e 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/ARecordPointable.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/ARecordPointable.java
@@ -39,7 +39,6 @@
* This class interprets the binary data representation of a record. One can
* call getFieldNames, getFieldTypeTags and getFieldValues to get pointable
* objects for field names, field type tags, and field values.
- *
*/
public class ARecordPointable extends AbstractVisitablePointable {
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/printer/APrintVisitor.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/printer/APrintVisitor.java
index 152822d..38e9a11 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/printer/APrintVisitor.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/printer/APrintVisitor.java
@@ -41,6 +41,7 @@
import edu.uci.ics.asterix.dataflow.data.nontagged.printers.ARectanglePrinter;
import edu.uci.ics.asterix.dataflow.data.nontagged.printers.AStringPrinter;
import edu.uci.ics.asterix.dataflow.data.nontagged.printers.ATimePrinter;
+import edu.uci.ics.asterix.dataflow.data.nontagged.printers.AUUIDPrinter;
import edu.uci.ics.asterix.dataflow.data.nontagged.printers.AYearMonthDurationPrinter;
import edu.uci.ics.asterix.om.pointables.AFlatValuePointable;
import edu.uci.ics.asterix.om.pointables.AListPointable;
@@ -188,6 +189,10 @@
AIntervalPrinter.INSTANCE.print(b, s, l, ps);
break;
}
+ case UUID: {
+ AUUIDPrinter.INSTANCE.print(b, s, l, ps);
+ break;
+ }
default: {
throw new NotImplementedException("No printer for type " + typeTag);
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/printer/ARecordPrinter.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/printer/ARecordPrinter.java
index ccdc720..54b57a6 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/printer/ARecordPrinter.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/printer/ARecordPrinter.java
@@ -31,8 +31,8 @@
* APrintVisitor.
*/
class ARecordPrinter {
- private static String LEFT_PAREN = "{ ";
- private static String RIGHT_PAREN = " }";
+ private static String LEFT_BRACE = "{ ";
+ private static String RIGHT_BRACE = " }";
private static String COMMA = ", ";
private static String COLON = ": ";
@@ -53,7 +53,7 @@
itemVisitorArg.first = ps;
// print the beginning part
- ps.print(LEFT_PAREN);
+ ps.print(LEFT_BRACE);
// print field 0 to n-2
for (int i = 0; i < fieldNames.size() - 1; i++) {
@@ -68,7 +68,7 @@
}
// print the end part
- ps.print(RIGHT_PAREN);
+ ps.print(RIGHT_BRACE);
}
private void printField(PrintStream ps, APrintVisitor visitor, List<IVisitablePointable> fieldNames,
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/base/IResultTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/base/IResultTypeComputer.java
index bd75f9c..3bc7792 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/base/IResultTypeComputer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/base/IResultTypeComputer.java
@@ -14,6 +14,7 @@
*/
package edu.uci.ics.asterix.om.typecomputer.base;
+import java.io.Serializable;
import edu.uci.ics.asterix.om.types.IAType;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -21,7 +22,7 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
-public interface IResultTypeComputer {
+public interface IResultTypeComputer extends Serializable {
public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env,
IMetadataProvider<?, ?> metadataProvider) throws AlgebricksException;
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ADoubleTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ADoubleTypeComputer.java
index e3f0c0f..bd5ed7c 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ADoubleTypeComputer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ADoubleTypeComputer.java
@@ -14,7 +14,6 @@
*/
package edu.uci.ics.asterix.om.typecomputer.impl;
-
import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
import edu.uci.ics.asterix.om.types.BuiltinType;
import edu.uci.ics.asterix.om.types.IAType;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/AFloatTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/AFloatTypeComputer.java
index cd0f8b1..3ff2635 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/AFloatTypeComputer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/AFloatTypeComputer.java
@@ -14,7 +14,6 @@
*/
package edu.uci.ics.asterix.om.typecomputer.impl;
-
import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
import edu.uci.ics.asterix.om.types.BuiltinType;
import edu.uci.ics.asterix.om.types.IAType;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ARectangleTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ARectangleTypeComputer.java
index 0872b80..88b75f5 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ARectangleTypeComputer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ARectangleTypeComputer.java
@@ -14,7 +14,6 @@
*/
package edu.uci.ics.asterix.om.typecomputer.impl;
-
import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
import edu.uci.ics.asterix.om.types.BuiltinType;
import edu.uci.ics.asterix.om.types.IAType;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/AUUIDTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/AUUIDTypeComputer.java
new file mode 100644
index 0000000..18d269b
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/AUUIDTypeComputer.java
@@ -0,0 +1,26 @@
+package edu.uci.ics.asterix.om.typecomputer.impl;
+
+import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
+
+public class AUUIDTypeComputer implements IResultTypeComputer {
+
+ private static final long serialVersionUID = 1L;
+
+ public static final AUUIDTypeComputer INSTANCE = new AUUIDTypeComputer();
+
+ private AUUIDTypeComputer() {
+ }
+
+ @Override
+ public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env,
+ IMetadataProvider<?, ?> metadataProvider) throws AlgebricksException {
+ return BuiltinType.AUUID;
+ }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/AbstractBinaryStringTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/AbstractBinaryStringTypeComputer.java
index fe29823..b17d518 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/AbstractBinaryStringTypeComputer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/AbstractBinaryStringTypeComputer.java
@@ -24,17 +24,16 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
/**
- *
* @author Xiaoyu Ma
*/
abstract public class AbstractBinaryStringTypeComputer implements IResultTypeComputer {
-
+
@Override
public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env,
IMetadataProvider<?, ?> metadataProvider) throws AlgebricksException {
AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) expression;
- if(fce.getArguments().size() < 2)
- throw new AlgebricksException("Wrong Argument Number.");
+ if (fce.getArguments().size() < 2)
+ throw new AlgebricksException("Wrong Argument Number.");
ILogicalExpression arg0 = fce.getArguments().get(0).getValue();
ILogicalExpression arg1 = fce.getArguments().get(1).getValue();
IAType t0, t1;
@@ -44,14 +43,13 @@
} catch (AlgebricksException e) {
throw new AlgebricksException(e);
}
- if ((t0.getTypeTag() != ATypeTag.NULL && t0.getTypeTag() != ATypeTag.STRING) ||
- (t1.getTypeTag() != ATypeTag.NULL && t1.getTypeTag() != ATypeTag.STRING)) {
+ if ((t0.getTypeTag() != ATypeTag.NULL && t0.getTypeTag() != ATypeTag.STRING)
+ || (t1.getTypeTag() != ATypeTag.NULL && t1.getTypeTag() != ATypeTag.STRING)) {
throw new AlgebricksException("Expects String Type.");
- }
+ }
return getResultType(t0, t1);
- }
-
-
+ }
+
public abstract IAType getResultType(IAType t0, IAType t1);
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/AbstractQuadStringTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/AbstractQuadStringTypeComputer.java
index 751b082..3f0ac2a 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/AbstractQuadStringTypeComputer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/AbstractQuadStringTypeComputer.java
@@ -25,40 +25,38 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
/**
- *
* @author Xiaoyu Ma
*/
public abstract class AbstractQuadStringTypeComputer implements IResultTypeComputer {
-
+
@Override
public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env,
IMetadataProvider<?, ?> metadataProvider) throws AlgebricksException {
AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) expression;
- if(fce.getArguments().size() < 4)
- throw new AlgebricksException("Wrong Argument Number.");
+ if (fce.getArguments().size() < 4)
+ throw new AlgebricksException("Wrong Argument Number.");
ILogicalExpression arg0 = fce.getArguments().get(0).getValue();
ILogicalExpression arg1 = fce.getArguments().get(1).getValue();
- ILogicalExpression arg2 = fce.getArguments().get(2).getValue();
- ILogicalExpression arg3 = fce.getArguments().get(3).getValue();
+ ILogicalExpression arg2 = fce.getArguments().get(2).getValue();
+ ILogicalExpression arg3 = fce.getArguments().get(3).getValue();
IAType t0, t1, t2, t3;
try {
t0 = (IAType) env.getType(arg0);
t1 = (IAType) env.getType(arg1);
- t2 = (IAType) env.getType(arg2);
- t3 = (IAType) env.getType(arg3);
+ t2 = (IAType) env.getType(arg2);
+ t3 = (IAType) env.getType(arg3);
} catch (AlgebricksException e) {
throw new AlgebricksException(e);
}
- if ((t0.getTypeTag() != ATypeTag.NULL && t0.getTypeTag() != ATypeTag.STRING) ||
- (t1.getTypeTag() != ATypeTag.NULL && t1.getTypeTag() != ATypeTag.STRING) ||
- (t2.getTypeTag() != ATypeTag.NULL && t2.getTypeTag() != ATypeTag.STRING) ||
- (t3.getTypeTag() != ATypeTag.NULL && t3.getTypeTag() != ATypeTag.STRING)) {
+ if ((t0.getTypeTag() != ATypeTag.NULL && t0.getTypeTag() != ATypeTag.STRING)
+ || (t1.getTypeTag() != ATypeTag.NULL && t1.getTypeTag() != ATypeTag.STRING)
+ || (t2.getTypeTag() != ATypeTag.NULL && t2.getTypeTag() != ATypeTag.STRING)
+ || (t3.getTypeTag() != ATypeTag.NULL && t3.getTypeTag() != ATypeTag.STRING)) {
throw new NotImplementedException("Expects String Type.");
}
return getResultType(t0, t1, t2, t3);
- }
-
-
- public abstract IAType getResultType(IAType t0, IAType t1, IAType t2, IAType t3);
+ }
+
+ public abstract IAType getResultType(IAType t0, IAType t1, IAType t2, IAType t3);
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/AbstractTripleStringTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/AbstractTripleStringTypeComputer.java
index 66b707f..cdf48df 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/AbstractTripleStringTypeComputer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/AbstractTripleStringTypeComputer.java
@@ -24,36 +24,35 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
/**
- *
* @author Xiaoyu Ma
*/
public abstract class AbstractTripleStringTypeComputer implements IResultTypeComputer {
-
+
@Override
public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env,
IMetadataProvider<?, ?> metadataProvider) throws AlgebricksException {
AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) expression;
- if(fce.getArguments().size() < 3)
- throw new AlgebricksException("Wrong Argument Number.");
+ if (fce.getArguments().size() < 3)
+ throw new AlgebricksException("Wrong Argument Number.");
ILogicalExpression arg0 = fce.getArguments().get(0).getValue();
ILogicalExpression arg1 = fce.getArguments().get(1).getValue();
- ILogicalExpression arg2 = fce.getArguments().get(2).getValue();
+ ILogicalExpression arg2 = fce.getArguments().get(2).getValue();
IAType t0, t1, t2;
try {
t0 = (IAType) env.getType(arg0);
t1 = (IAType) env.getType(arg1);
- t2 = (IAType) env.getType(arg2);
+ t2 = (IAType) env.getType(arg2);
} catch (AlgebricksException e) {
throw new AlgebricksException(e);
}
- if ((t0.getTypeTag() != ATypeTag.NULL && t0.getTypeTag() != ATypeTag.STRING) || (t1.getTypeTag() != ATypeTag.NULL &&
- t1.getTypeTag() != ATypeTag.STRING) || (t2.getTypeTag() != ATypeTag.NULL && t2.getTypeTag() != ATypeTag.STRING)) {
+ if ((t0.getTypeTag() != ATypeTag.NULL && t0.getTypeTag() != ATypeTag.STRING)
+ || (t1.getTypeTag() != ATypeTag.NULL && t1.getTypeTag() != ATypeTag.STRING)
+ || (t2.getTypeTag() != ATypeTag.NULL && t2.getTypeTag() != ATypeTag.STRING)) {
throw new AlgebricksException("Expects String Type.");
}
return getResultType(t0, t1, t2);
- }
-
-
+ }
+
public abstract IAType getResultType(IAType t0, IAType t1, IAType t2);
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/BinaryBooleanOrNullFunctionTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/BinaryBooleanOrNullFunctionTypeComputer.java
index f44a416..5e7d6e0 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/BinaryBooleanOrNullFunctionTypeComputer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/BinaryBooleanOrNullFunctionTypeComputer.java
@@ -14,7 +14,6 @@
*/
package edu.uci.ics.asterix.om.typecomputer.impl;
-
import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
import edu.uci.ics.asterix.om.types.ATypeTag;
import edu.uci.ics.asterix.om.types.AUnionType;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/BinaryStringBoolOrNullTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/BinaryStringBoolOrNullTypeComputer.java
index 5051ee5..3a9fd08 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/BinaryStringBoolOrNullTypeComputer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/BinaryStringBoolOrNullTypeComputer.java
@@ -20,18 +20,19 @@
import edu.uci.ics.asterix.om.types.TypeHelper;
/**
- *
* @author Xiaoyu Ma
*/
public class BinaryStringBoolOrNullTypeComputer extends AbstractBinaryStringTypeComputer {
public static final BinaryStringBoolOrNullTypeComputer INSTANCE = new BinaryStringBoolOrNullTypeComputer();
- private BinaryStringBoolOrNullTypeComputer() {}
-
+
+ private BinaryStringBoolOrNullTypeComputer() {
+ }
+
@Override
public IAType getResultType(IAType t0, IAType t1) {
if (TypeHelper.canBeNull(t0) || TypeHelper.canBeNull(t1)) {
return AUnionType.createNullableType(BuiltinType.ABOOLEAN);
- }
+ }
return BuiltinType.ABOOLEAN;
- }
+ }
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/BinaryStringStringOrNullTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/BinaryStringStringOrNullTypeComputer.java
index 3a301b2..da91ee3 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/BinaryStringStringOrNullTypeComputer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/BinaryStringStringOrNullTypeComputer.java
@@ -20,20 +20,21 @@
import edu.uci.ics.asterix.om.types.TypeHelper;
/**
- *
* @author Xiaoyu Ma
*/
public class BinaryStringStringOrNullTypeComputer extends AbstractBinaryStringTypeComputer {
-
+
public static final BinaryStringStringOrNullTypeComputer INSTANCE = new BinaryStringStringOrNullTypeComputer();
- private BinaryStringStringOrNullTypeComputer() {}
+
+ private BinaryStringStringOrNullTypeComputer() {
+ }
@Override
public IAType getResultType(IAType t0, IAType t1) {
if (TypeHelper.canBeNull(t0) || TypeHelper.canBeNull(t1)) {
return AUnionType.createNullableType(BuiltinType.ASTRING);
- }
+ }
return BuiltinType.ASTRING;
}
-
+
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ConcatNonNullTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ConcatNonNullTypeComputer.java
index 7bf2668..7680c15 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ConcatNonNullTypeComputer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ConcatNonNullTypeComputer.java
@@ -15,12 +15,7 @@
package edu.uci.ics.asterix.om.typecomputer.impl;
-import java.util.ArrayList;
-import java.util.List;
-
import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.AUnionType;
import edu.uci.ics.asterix.om.types.BuiltinType;
import edu.uci.ics.asterix.om.types.IAType;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -46,29 +41,18 @@
if (f.getArguments().size() < 1) {
return BuiltinType.ANULL;
}
- List<IAType> possibleTypes = new ArrayList<IAType>();
+
+ TypeCompatibilityChecker tcc = new TypeCompatibilityChecker();
for (int i = 0; i < f.getArguments().size(); i++) {
ILogicalExpression arg = f.getArguments().get(i).getValue();
IAType type = (IAType) env.getType(arg);
- if (type.getTypeTag() == ATypeTag.UNION) {
- List<IAType> typeList = ((AUnionType) type).getUnionList();
- for (IAType t : typeList) {
- if (t.getTypeTag() != ATypeTag.NULL) {
- //CONCAT_NON_NULL cannot return null because it's only used for if-else construct
- if (!possibleTypes.contains(t))
- possibleTypes.add(t);
- }
- }
- } else {
- if (!possibleTypes.contains(type))
- possibleTypes.add(type);
- }
+ tcc.addPossibleType(type);
}
- if (possibleTypes.size() == 1) {
- return possibleTypes.get(0);
- } else {
+
+ IAType result = tcc.getCompatibleType();
+ if (result == null) {
throw new AlgebricksException("The two branches of the if-else clause should return the same type.");
}
+ return result;
}
-
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/FlowRecordResultTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/FlowRecordResultTypeComputer.java
new file mode 100644
index 0000000..a982a1f
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/FlowRecordResultTypeComputer.java
@@ -0,0 +1,26 @@
+package edu.uci.ics.asterix.om.typecomputer.impl;
+
+import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
+import edu.uci.ics.asterix.om.typecomputer.base.TypeComputerUtilities;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
+
+public class FlowRecordResultTypeComputer implements IResultTypeComputer {
+
+ public static final FlowRecordResultTypeComputer INSTANCE = new FlowRecordResultTypeComputer();
+
+ @Override
+ public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env,
+ IMetadataProvider<?, ?> metadataProvider) throws AlgebricksException {
+ ScalarFunctionCallExpression funcExpr = (ScalarFunctionCallExpression) expression;
+ IAType type = TypeComputerUtilities.getRequiredType(funcExpr);
+ if (type == null) {
+ type = (IAType) env.getType(funcExpr.getArguments().get(0).getValue());
+ }
+ return type;
+ }
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NonTaggedNumericAddSubMulDivTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NonTaggedNumericAddSubMulDivTypeComputer.java
index 154ce65..0bbf8a3 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NonTaggedNumericAddSubMulDivTypeComputer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NonTaggedNumericAddSubMulDivTypeComputer.java
@@ -280,7 +280,7 @@
break;
}
case DURATION: {
- switch(tag2){
+ switch (tag2) {
case DATE:
unionList.add(BuiltinType.ADATE);
break;
@@ -297,7 +297,7 @@
break;
}
case YEARMONTHDURATION: {
- switch(tag2){
+ switch (tag2) {
case DATE:
unionList.add(BuiltinType.ADATE);
break;
@@ -317,7 +317,7 @@
break;
}
case DAYTIMEDURATION: {
- switch(tag2){
+ switch (tag2) {
case DATE:
unionList.add(BuiltinType.ADATE);
break;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NonTaggedNumericRoundHalfToEven2TypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NonTaggedNumericRoundHalfToEven2TypeComputer.java
index 8af3ef2..79b0bce 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NonTaggedNumericRoundHalfToEven2TypeComputer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NonTaggedNumericRoundHalfToEven2TypeComputer.java
@@ -37,8 +37,7 @@
public class NonTaggedNumericRoundHalfToEven2TypeComputer implements IResultTypeComputer {
- public static final NonTaggedNumericRoundHalfToEven2TypeComputer INSTANCE =
- new NonTaggedNumericRoundHalfToEven2TypeComputer();
+ public static final NonTaggedNumericRoundHalfToEven2TypeComputer INSTANCE = new NonTaggedNumericRoundHalfToEven2TypeComputer();
private NonTaggedNumericRoundHalfToEven2TypeComputer() {
}
@@ -47,17 +46,17 @@
public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env,
IMetadataProvider<?, ?> metadataProvider) throws AlgebricksException {
AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) expression;
- if(fce.getArguments().size() < 2)
- throw new AlgebricksException("Argument number invalid.");
-
+ if (fce.getArguments().size() < 2)
+ throw new AlgebricksException("Argument number invalid.");
+
ILogicalExpression arg1 = fce.getArguments().get(0).getValue();
ILogicalExpression arg2 = fce.getArguments().get(1).getValue();
-
+
IAType t1 = (IAType) env.getType(arg1);
IAType t2 = (IAType) env.getType(arg2);
-
+
List<IAType> unionList = new ArrayList<IAType>();
- unionList.add(BuiltinType.ANULL);
+ unionList.add(BuiltinType.ANULL);
ATypeTag tag1, tag2;
if (t1.getTypeTag() == ATypeTag.UNION && NonTaggedFormatUtil.isOptionalField((AUnionType) t1))
@@ -71,33 +70,33 @@
.getTypeTag();
else
tag2 = t2.getTypeTag();
-
- switch(tag2) {
- case INT8:
- case INT16:
- case INT32:
- case INT64:
- break;
- default:
- throw new AlgebricksException("Argument $precision cannot be type " + t2.getTypeName());
- }
-
+
+ switch (tag2) {
+ case INT8:
+ case INT16:
+ case INT32:
+ case INT64:
+ break;
+ default:
+ throw new AlgebricksException("Argument $precision cannot be type " + t2.getTypeName());
+ }
+
switch (tag1) {
case INT8:
unionList.add(BuiltinType.AINT8);
- break;
+ break;
case INT16:
unionList.add(BuiltinType.AINT16);
- break;
+ break;
case INT32:
unionList.add(BuiltinType.AINT32);
- break;
+ break;
case INT64:
unionList.add(BuiltinType.AINT64);
- break;
+ break;
case FLOAT:
unionList.add(BuiltinType.AFLOAT);
- break;
+ break;
case DOUBLE:
unionList.add(BuiltinType.ADOUBLE);
break;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NonTaggedNumericUnaryFunctionTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NonTaggedNumericUnaryFunctionTypeComputer.java
index 91e3ad8..8a026b0 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NonTaggedNumericUnaryFunctionTypeComputer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NonTaggedNumericUnaryFunctionTypeComputer.java
@@ -38,8 +38,7 @@
public class NonTaggedNumericUnaryFunctionTypeComputer implements IResultTypeComputer {
private static final String errMsg = "Arithmetic operations are not implemented for ";
- public static final NonTaggedNumericUnaryFunctionTypeComputer INSTANCE =
- new NonTaggedNumericUnaryFunctionTypeComputer();
+ public static final NonTaggedNumericUnaryFunctionTypeComputer INSTANCE = new NonTaggedNumericUnaryFunctionTypeComputer();
private NonTaggedNumericUnaryFunctionTypeComputer() {
}
@@ -48,42 +47,43 @@
public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env,
IMetadataProvider<?, ?> metadataProvider) throws AlgebricksException {
AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) expression;
- if(fce.getArguments().isEmpty())
+ if (fce.getArguments().isEmpty())
throw new AlgebricksException("Wrong Argument Number.");
-
+
ILogicalExpression arg1 = fce.getArguments().get(0).getValue();
IAType t = (IAType) env.getType(arg1);
ATypeTag tag = t.getTypeTag();
- if (tag == ATypeTag.UNION
- && NonTaggedFormatUtil.isOptionalField((AUnionType) env.getType(arg1))) {
+ if (tag == ATypeTag.UNION && NonTaggedFormatUtil.isOptionalField((AUnionType) env.getType(arg1))) {
return (IAType) env.getType(arg1);
}
-
+
List<IAType> unionList = new ArrayList<IAType>();
unionList.add(BuiltinType.ANULL);
switch (tag) {
case INT8:
unionList.add(BuiltinType.AINT8);
- break;
+ break;
case INT16:
unionList.add(BuiltinType.AINT16);
- break;
+ break;
case INT32:
unionList.add(BuiltinType.AINT32);
- break;
+ break;
case INT64:
unionList.add(BuiltinType.AINT64);
- break;
+ break;
case FLOAT:
unionList.add(BuiltinType.AFLOAT);
- break;
+ break;
case DOUBLE:
unionList.add(BuiltinType.ADOUBLE);
break;
case NULL:
return BuiltinType.ANULL;
+ case ANY:
+ return BuiltinType.ANY;
default: {
throw new NotImplementedException(errMsg + t.getTypeName());
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NonTaggedSwitchCaseComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NonTaggedSwitchCaseComputer.java
index c1450ca..bd67f18 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NonTaggedSwitchCaseComputer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NonTaggedSwitchCaseComputer.java
@@ -15,10 +15,7 @@
package edu.uci.ics.asterix.om.typecomputer.impl;
import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.AUnionType;
import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
@@ -40,46 +37,27 @@
if (fce.getArguments().size() < 3)
throw new AlgebricksException(errMsg1);
- IAType t0;
- IAType t1;
- IAType ti;
-
- ATypeTag tag0;
- ATypeTag tag1;
- ATypeTag tagi;
- try {
- t0 = (IAType) env.getType(fce.getArguments().get(0).getValue());
- t1 = (IAType) env.getType(fce.getArguments().get(2).getValue());
- tag0 = t0.getTypeTag();
- tag1 = t1.getTypeTag();
- if (t0.getTypeTag() == ATypeTag.UNION && NonTaggedFormatUtil.isOptionalField((AUnionType) t0))
- tag0 = ((AUnionType) t0).getUnionList().get(NonTaggedFormatUtil.OPTIONAL_TYPE_INDEX_IN_UNION_LIST)
- .getTypeTag();
- if (t1.getTypeTag() == ATypeTag.UNION && NonTaggedFormatUtil.isOptionalField((AUnionType) t1))
- tag1 = ((AUnionType) t1).getUnionList().get(NonTaggedFormatUtil.OPTIONAL_TYPE_INDEX_IN_UNION_LIST)
- .getTypeTag();
- for (int i = 2; i < fce.getArguments().size(); i += 2) {
- ti = (IAType) env.getType(fce.getArguments().get(i).getValue());
- tagi = ti.getTypeTag();
- if (ti.getTypeTag() == ATypeTag.UNION && NonTaggedFormatUtil.isOptionalField((AUnionType) ti))
- tagi = ((AUnionType) ti).getUnionList().get(NonTaggedFormatUtil.OPTIONAL_TYPE_INDEX_IN_UNION_LIST)
- .getTypeTag();
- if (tag1 != tagi)
- if (!t1.toString().equals(ti.toString()))
- throw new AlgebricksException(errMsg2);
- }
- for (int i = 1; i < fce.getArguments().size(); i += 2) {
- ti = (IAType) env.getType(fce.getArguments().get(i).getValue());
- tagi = ti.getTypeTag();
- if (ti.getTypeTag() == ATypeTag.UNION && NonTaggedFormatUtil.isOptionalField((AUnionType) ti))
- tagi = ((AUnionType) ti).getUnionList().get(NonTaggedFormatUtil.OPTIONAL_TYPE_INDEX_IN_UNION_LIST)
- .getTypeTag();
- if (tag0 != tagi)
- throw new AlgebricksException(errMsg3);
- }
- } catch (AlgebricksException e) {
- throw new AlgebricksException(e);
+ TypeCompatibilityChecker tcc = new TypeCompatibilityChecker();
+ for (int i = 2; i < fce.getArguments().size(); i += 2) {
+ IAType ti = (IAType) env.getType(fce.getArguments().get(i).getValue());
+ tcc.addPossibleType(ti);
}
- return t1;
+ IAType valueType = tcc.getCompatibleType();
+ if (valueType == null) {
+ throw new AlgebricksException(errMsg2);
+ }
+
+ IAType switchType = (IAType) env.getType(fce.getArguments().get(0).getValue());
+ tcc.reset();
+ tcc.addPossibleType(switchType);
+ for (int i = 1; i < fce.getArguments().size(); i += 2) {
+ IAType ti = (IAType) env.getType(fce.getArguments().get(i).getValue());
+ tcc.addPossibleType(ti);
+ }
+ IAType caseType = tcc.getCompatibleType();
+ if (caseType == null) {
+ throw new AlgebricksException(errMsg3);
+ }
+ return valueType;
}
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NumericAddSubMulTypeDescriptor.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NumericAddSubMulTypeDescriptor.java
index 26c61b2..01499f0 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NumericAddSubMulTypeDescriptor.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NumericAddSubMulTypeDescriptor.java
@@ -14,7 +14,6 @@
*/
package edu.uci.ics.asterix.om.typecomputer.impl;
-
import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
import edu.uci.ics.asterix.om.types.BuiltinType;
import edu.uci.ics.asterix.om.types.IAType;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/QuadStringStringOrNullTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/QuadStringStringOrNullTypeComputer.java
index c504671..4b77dc6 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/QuadStringStringOrNullTypeComputer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/QuadStringStringOrNullTypeComputer.java
@@ -20,20 +20,21 @@
import edu.uci.ics.asterix.om.types.TypeHelper;
/**
- *
* @author Xiaoyu Ma
*/
-public class QuadStringStringOrNullTypeComputer extends AbstractQuadStringTypeComputer {
+public class QuadStringStringOrNullTypeComputer extends AbstractQuadStringTypeComputer {
public static final QuadStringStringOrNullTypeComputer INSTANCE = new QuadStringStringOrNullTypeComputer();
- private QuadStringStringOrNullTypeComputer() {}
+
+ private QuadStringStringOrNullTypeComputer() {
+ }
@Override
public IAType getResultType(IAType t0, IAType t1, IAType t2, IAType t3) {
- if (TypeHelper.canBeNull(t0) || TypeHelper.canBeNull(t1) ||
- TypeHelper.canBeNull(t2) || TypeHelper.canBeNull(t3)) {
+ if (TypeHelper.canBeNull(t0) || TypeHelper.canBeNull(t1) || TypeHelper.canBeNull(t2)
+ || TypeHelper.canBeNull(t3)) {
return AUnionType.createNullableType(BuiltinType.ASTRING);
- }
+ }
return BuiltinType.ASTRING;
}
-
+
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/RecordMergeTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/RecordMergeTypeComputer.java
new file mode 100644
index 0000000..f663d50
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/RecordMergeTypeComputer.java
@@ -0,0 +1,105 @@
+package edu.uci.ics.asterix.om.typecomputer.impl;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.AUnionType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.types.TypeHelper;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
+
+public class RecordMergeTypeComputer implements IResultTypeComputer {
+ private static final long serialVersionUID = 1L;
+
+ public static final RecordMergeTypeComputer INSTANCE = new RecordMergeTypeComputer();
+
+ private RecordMergeTypeComputer() {
+ }
+
+ public static ARecordType extractRecordType(IAType t) {
+ if (t.getTypeTag() == ATypeTag.RECORD) {
+ return (ARecordType) t;
+ }
+
+ if (t.getTypeTag() == ATypeTag.UNION) {
+ IAType innerType = ((AUnionType) t).getUnionList().get(1);
+ if (innerType.getTypeTag() == ATypeTag.RECORD) {
+ return (ARecordType) innerType;
+ }
+ }
+
+ return null;
+ }
+
+ @Override
+ public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env,
+ IMetadataProvider<?, ?> metadataProvider) throws AlgebricksException {
+ AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) expression;
+ IAType t0 = (IAType) env.getType(f.getArguments().get(0).getValue());
+ IAType t1 = (IAType) env.getType(f.getArguments().get(1).getValue());
+ boolean nullable = TypeHelper.canBeNull(t0) || TypeHelper.canBeNull(t1);
+ ARecordType recType0 = extractRecordType(t0);
+ ARecordType recType1 = extractRecordType(t1);
+
+ if (recType0 == null || recType1 == null) {
+ throw new AlgebricksException("record-merge expects possibly NULL records as arguments, but got (" + t0
+ + ", " + t1 + ")");
+ }
+
+ List<String> resultFieldNames = new ArrayList<>();
+ for (String fieldName : recType0.getFieldNames()) {
+ resultFieldNames.add(fieldName);
+ }
+ Collections.sort(resultFieldNames);
+ List<IAType> resultFieldTypes = new ArrayList<>();
+ for (String fieldName : resultFieldNames) {
+ try {
+ resultFieldTypes.add(recType0.getFieldType(fieldName));
+ } catch (IOException e) {
+ throw new IllegalStateException(e);
+ }
+ }
+
+ List<String> additionalFieldNames = new ArrayList<>();
+ List<IAType> additionalFieldTypes = new ArrayList<>();
+ for (int i = 0; i < recType1.getFieldNames().length; ++i) {
+ String fieldName = recType1.getFieldNames()[i];
+ IAType fieldType = recType1.getFieldTypes()[i];
+ int pos = Collections.binarySearch(resultFieldNames, fieldName);
+ if (pos >= 0) {
+ resultFieldNames.set(pos, fieldName);
+ resultFieldTypes.set(pos, fieldType);
+ } else {
+ additionalFieldNames.add(fieldName);
+ additionalFieldTypes.add(fieldType);
+ }
+ }
+
+ resultFieldNames.addAll(additionalFieldNames);
+ resultFieldTypes.addAll(additionalFieldTypes);
+ String resultTypeName = "merged(" + recType0.getTypeName() + ", " + recType1.getTypeName() + ")";
+ boolean isOpen = recType0.isOpen() || recType1.isOpen();
+ IAType resultType = null;
+ try {
+ resultType = new ARecordType(resultTypeName, resultFieldNames.toArray(new String[] {}),
+ resultFieldTypes.toArray(new IAType[] {}), isOpen);
+ } catch (AsterixException e) {
+ throw new AlgebricksException(e);
+ };
+
+ if (nullable) {
+ resultType = AUnionType.createNullableType(resultType);
+ }
+ return resultType;
+ }
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/Substring2TypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/Substring2TypeComputer.java
index ba622fb..584708e 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/Substring2TypeComputer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/Substring2TypeComputer.java
@@ -26,52 +26,47 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
-
public class Substring2TypeComputer implements IResultTypeComputer {
public static final Substring2TypeComputer INSTANCE = new Substring2TypeComputer();
-
-
+
@Override
public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env,
IMetadataProvider<?, ?> metadataProvider) throws AlgebricksException {
AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) expression;
- if(fce.getArguments().size() < 2)
- throw new AlgebricksException("Wrong Argument Number.");
+ if (fce.getArguments().size() < 2)
+ throw new AlgebricksException("Wrong Argument Number.");
ILogicalExpression arg0 = fce.getArguments().get(0).getValue();
- ILogicalExpression arg1 = fce.getArguments().get(1).getValue();
+ ILogicalExpression arg1 = fce.getArguments().get(1).getValue();
IAType t0, t1;
try {
t0 = (IAType) env.getType(arg0);
- t1 = (IAType) env.getType(arg1);
+ t1 = (IAType) env.getType(arg1);
} catch (AlgebricksException e) {
throw new AlgebricksException(e);
}
-
+
ATypeTag tag0, tag1;
if (t0.getTypeTag() == ATypeTag.UNION && NonTaggedFormatUtil.isOptionalField((AUnionType) t0))
tag0 = ((AUnionType) t0).getUnionList().get(NonTaggedFormatUtil.OPTIONAL_TYPE_INDEX_IN_UNION_LIST)
.getTypeTag();
else
tag0 = t0.getTypeTag();
-
+
if (t1.getTypeTag() == ATypeTag.UNION && NonTaggedFormatUtil.isOptionalField((AUnionType) t1))
tag1 = ((AUnionType) t1).getUnionList().get(NonTaggedFormatUtil.OPTIONAL_TYPE_INDEX_IN_UNION_LIST)
.getTypeTag();
else
- tag1 = t1.getTypeTag();
-
- if(tag0 != ATypeTag.NULL && tag0 != ATypeTag.STRING) {
+ tag1 = t1.getTypeTag();
+
+ if (tag0 != ATypeTag.NULL && tag0 != ATypeTag.STRING) {
throw new AlgebricksException("First argument should be String Type.");
}
-
- if(tag1 != ATypeTag.NULL &&
- tag1 != ATypeTag.INT8 &&
- tag1 != ATypeTag.INT16 &&
- tag1 != ATypeTag.INT32 &&
- tag1 != ATypeTag.INT64) {
+
+ if (tag1 != ATypeTag.NULL && tag1 != ATypeTag.INT8 && tag1 != ATypeTag.INT16 && tag1 != ATypeTag.INT32
+ && tag1 != ATypeTag.INT64) {
throw new AlgebricksException("Second argument should be integer Type.");
}
return BuiltinType.ASTRING;
- }
+ }
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/TripleStringBoolOrNullTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/TripleStringBoolOrNullTypeComputer.java
index 3ad0cee..a666afb7 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/TripleStringBoolOrNullTypeComputer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/TripleStringBoolOrNullTypeComputer.java
@@ -20,18 +20,20 @@
import edu.uci.ics.asterix.om.types.TypeHelper;
/**
- *
* @author Xiaoyu Ma
*/
-public class TripleStringBoolOrNullTypeComputer extends AbstractTripleStringTypeComputer {
+public class TripleStringBoolOrNullTypeComputer extends AbstractTripleStringTypeComputer {
public static final TripleStringBoolOrNullTypeComputer INSTANCE = new TripleStringBoolOrNullTypeComputer();
- private TripleStringBoolOrNullTypeComputer() {}
+
+ private TripleStringBoolOrNullTypeComputer() {
+ }
+
@Override
public IAType getResultType(IAType t0, IAType t1, IAType t2) {
if (TypeHelper.canBeNull(t0) || TypeHelper.canBeNull(t1) || TypeHelper.canBeNull(t2)) {
return AUnionType.createNullableType(BuiltinType.ABOOLEAN);
- }
+ }
return BuiltinType.ABOOLEAN;
}
-
+
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/TripleStringStringOrNullTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/TripleStringStringOrNullTypeComputer.java
index 5031287..a02ff83 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/TripleStringStringOrNullTypeComputer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/TripleStringStringOrNullTypeComputer.java
@@ -20,18 +20,19 @@
import edu.uci.ics.asterix.om.types.TypeHelper;
/**
- *
* @author Xiaoyu Ma
*/
-public class TripleStringStringOrNullTypeComputer extends AbstractTripleStringTypeComputer {
+public class TripleStringStringOrNullTypeComputer extends AbstractTripleStringTypeComputer {
public static final TripleStringStringOrNullTypeComputer INSTANCE = new TripleStringStringOrNullTypeComputer();
- private TripleStringStringOrNullTypeComputer() {}
+
+ private TripleStringStringOrNullTypeComputer() {
+ }
@Override
public IAType getResultType(IAType t0, IAType t1, IAType t2) {
if (TypeHelper.canBeNull(t0) || TypeHelper.canBeNull(t1) || TypeHelper.canBeNull(t2)) {
return AUnionType.createNullableType(BuiltinType.ASTRING);
- }
+ }
return BuiltinType.ASTRING;
- }
+ }
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/TypeCompatibilityChecker.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/TypeCompatibilityChecker.java
new file mode 100644
index 0000000..0739b2f
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/TypeCompatibilityChecker.java
@@ -0,0 +1,61 @@
+package edu.uci.ics.asterix.om.typecomputer.impl;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.AUnionType;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+
+class TypeCompatibilityChecker {
+ private final List<IAType> possibleTypes;
+ private boolean nullEncountered;
+
+ public TypeCompatibilityChecker() {
+ possibleTypes = new ArrayList<IAType>();
+ nullEncountered = false;
+ }
+
+ public void reset() {
+ possibleTypes.clear();
+ nullEncountered = false;
+ }
+
+ public void addPossibleType(IAType type) {
+ if (type.getTypeTag() == ATypeTag.UNION) {
+ List<IAType> typeList = ((AUnionType) type).getUnionList();
+ for (IAType t : typeList) {
+ if (t.getTypeTag() != ATypeTag.NULL) {
+ //CONCAT_NON_NULL cannot return null because it's only used for if-else construct
+ if (!possibleTypes.contains(t))
+ possibleTypes.add(t);
+ } else {
+ nullEncountered = true;
+ }
+ }
+ } else {
+ if (type.getTypeTag() != ATypeTag.NULL) {
+ if (!possibleTypes.contains(type)) {
+ possibleTypes.add(type);
+ }
+ } else {
+ nullEncountered = true;
+ }
+ }
+ }
+
+ public IAType getCompatibleType() {
+ switch (possibleTypes.size()) {
+ case 0:
+ return BuiltinType.ANULL;
+ case 1:
+ if (nullEncountered) {
+ return AUnionType.createNullableType(possibleTypes.get(0));
+ } else {
+ return possibleTypes.get(0);
+ }
+ }
+ return null;
+ }
+}
\ No newline at end of file
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/UnaryStringInt32OrNullTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/UnaryStringInt32OrNullTypeComputer.java
index 1155485..1770685 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/UnaryStringInt32OrNullTypeComputer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/UnaryStringInt32OrNullTypeComputer.java
@@ -61,7 +61,7 @@
List<IAType> unionList = new ArrayList<IAType>();
unionList.add(BuiltinType.ANULL);
- if (t0.getTypeTag() == ATypeTag.NULL) {
+ if (t0.getTypeTag() == ATypeTag.NULL) {
return BuiltinType.ANULL;
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/UnaryStringOrNullTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/UnaryStringOrNullTypeComputer.java
index dadf330..e319c7d 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/UnaryStringOrNullTypeComputer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/UnaryStringOrNullTypeComputer.java
@@ -26,22 +26,22 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
-
/**
- *
* @author Xiaoyu Ma
*/
-public class UnaryStringOrNullTypeComputer implements IResultTypeComputer {
-
+public class UnaryStringOrNullTypeComputer implements IResultTypeComputer {
+
public static final UnaryStringOrNullTypeComputer INSTANCE = new UnaryStringOrNullTypeComputer();
- private UnaryStringOrNullTypeComputer() {}
-
+
+ private UnaryStringOrNullTypeComputer() {
+ }
+
@Override
public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env,
IMetadataProvider<?, ?> metadataProvider) throws AlgebricksException {
AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) expression;
- if(fce.getArguments().isEmpty())
- throw new AlgebricksException("Wrong Argument Number.");
+ if (fce.getArguments().isEmpty())
+ throw new AlgebricksException("Wrong Argument Number.");
ILogicalExpression arg0 = fce.getArguments().get(0).getValue();
IAType t0;
try {
@@ -49,17 +49,17 @@
} catch (AlgebricksException e) {
throw new AlgebricksException(e);
}
-
+
if (TypeHelper.canBeNull(t0)) {
return AUnionType.createNullableType(BuiltinType.ASTRING);
- }
-
+ }
+
if (t0.getTypeTag() == ATypeTag.NULL)
- return BuiltinType.ANULL;
-
- if(t0.getTypeTag() == ATypeTag.STRING)
- return BuiltinType.ASTRING;
-
- throw new AlgebricksException("Expects String Type.");
- }
+ return BuiltinType.ANULL;
+
+ if (t0.getTypeTag() == ATypeTag.STRING)
+ return BuiltinType.ASTRING;
+
+ throw new AlgebricksException("Expects String Type.");
+ }
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/AOrderedListType.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/AOrderedListType.java
index 2a54485..21ec140 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/AOrderedListType.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/AOrderedListType.java
@@ -71,7 +71,7 @@
}
@Override
- public JSONObject toJSON() throws JSONException{
+ public JSONObject toJSON() throws JSONException {
JSONObject type = new JSONObject();
type.put("type", itemType);
return type;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ARecordType.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ARecordType.java
index 28a41d1..96955b0 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ARecordType.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ARecordType.java
@@ -253,31 +253,45 @@
* @throws AlgebricksException
* (if the validation failed), IOException
*/
- public void validatePartitioningExpressions(List<String> partitioningExprs) throws AlgebricksException, IOException {
- for (String fieldName : partitioningExprs) {
- IAType fieldType = getFieldType(fieldName);
- if (fieldType == null) {
- throw new AlgebricksException("A field with this name \"" + fieldName + "\" could not be found.");
+ public void validatePartitioningExpressions(List<String> partitioningExprs, boolean autogenerated)
+ throws AsterixException, IOException {
+ if (autogenerated) {
+ if (partitioningExprs.size() > 1) {
+ throw new AsterixException("Cannot autogenerate a composite primary key");
}
- switch (fieldType.getTypeTag()) {
- case INT8:
- case INT16:
- case INT32:
- case INT64:
- case FLOAT:
- case DOUBLE:
- case STRING:
- case DATE:
- case TIME:
- case DATETIME:
- case YEARMONTHDURATION:
- case DAYTIMEDURATION:
- break;
- case UNION:
- throw new AlgebricksException("The partitioning key \"" + fieldName + "\" cannot be nullable");
- default:
- throw new AlgebricksException("The partitioning key \"" + fieldName + "\" cannot be of type "
- + fieldType.getTypeTag() + ".");
+
+ ATypeTag pkTypeTag = getFieldType(partitioningExprs.get(0)).getTypeTag();
+ if (pkTypeTag != ATypeTag.UUID) {
+ throw new AsterixException("Cannot autogenerate a primary key for type " + pkTypeTag
+ + ". Autogenerated primary keys must be of type " + ATypeTag.UUID + ".");
+ }
+ } else {
+ for (String fieldName : partitioningExprs) {
+ IAType fieldType = getFieldType(fieldName);
+ if (fieldType == null) {
+ throw new AsterixException("A field with this name \"" + fieldName + "\" could not be found.");
+ }
+ switch (fieldType.getTypeTag()) {
+ case INT8:
+ case INT16:
+ case INT32:
+ case INT64:
+ case FLOAT:
+ case DOUBLE:
+ case STRING:
+ case DATE:
+ case TIME:
+ case UUID:
+ case DATETIME:
+ case YEARMONTHDURATION:
+ case DAYTIMEDURATION:
+ break;
+ case UNION:
+ throw new AsterixException("The partitioning key \"" + fieldName + "\" cannot be nullable");
+ default:
+ throw new AsterixException("The partitioning key \"" + fieldName + "\" cannot be of type "
+ + fieldType.getTypeTag() + ".");
+ }
}
}
}
@@ -313,6 +327,7 @@
case TIME:
case DATETIME:
case UNION:
+ case UUID:
case YEARMONTHDURATION:
case DAYTIMEDURATION:
break;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ATypeTag.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ATypeTag.java
index d0cf2f2..ffbbd64 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ATypeTag.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ATypeTag.java
@@ -15,6 +15,9 @@
package edu.uci.ics.asterix.om.types;
+import java.util.ArrayList;
+import java.util.List;
+
/**
* There is a unique tag for each primitive type and for each kind of
* non-primitive type in the object model.
@@ -58,7 +61,8 @@
INTERVAL(34),
SYSTEM_NULL(35),
YEARMONTHDURATION(36),
- DAYTIMEDURATION(37);
+ DAYTIMEDURATION(37),
+ UUID(38);
private byte value;
@@ -71,6 +75,20 @@
return value;
}
- public final static int TYPE_COUNT = ATypeTag.values().length;
+ public static final int TYPE_COUNT = ATypeTag.values().length;
+
+ public static final ATypeTag[] VALUE_TYPE_MAPPING;
+
+ static {
+ List<ATypeTag> typeList = new ArrayList<>();
+ for (ATypeTag tt : values()) {
+ int index = tt.value;
+ while (typeList.size() <= index) {
+ typeList.add(null);
+ }
+ typeList.set(index, tt);
+ }
+ VALUE_TYPE_MAPPING = typeList.toArray(new ATypeTag[typeList.size()]);
+ }
}
\ No newline at end of file
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/AUnorderedListType.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/AUnorderedListType.java
index 2c2b341..ffbc6e6 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/AUnorderedListType.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/AUnorderedListType.java
@@ -71,7 +71,7 @@
}
@Override
- public JSONObject toJSON() throws JSONException{
+ public JSONObject toJSON() throws JSONException {
JSONObject type = new JSONObject();
type.put("type", itemType);
return type;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/AbstractComplexType.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/AbstractComplexType.java
index d2766dd..7d3fe94 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/AbstractComplexType.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/AbstractComplexType.java
@@ -18,21 +18,21 @@
public abstract class AbstractComplexType implements IAType {
- private static final long serialVersionUID = 1L;
- protected String typeName;
+ private static final long serialVersionUID = 1L;
+ protected String typeName;
- public AbstractComplexType(String typeName) {
- this.typeName = typeName;
- }
+ public AbstractComplexType(String typeName) {
+ this.typeName = typeName;
+ }
- @Override
- public String getTypeName() {
- return typeName;
- }
+ @Override
+ public String getTypeName() {
+ return typeName;
+ }
- @Override
- public boolean equals(Object object) {
- return this.deepEqual((IAObject) object);
- }
+ @Override
+ public boolean equals(Object object) {
+ return this.deepEqual((IAObject) object);
+ }
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/BuiltinType.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/BuiltinType.java
index b1f3901..97fb729 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/BuiltinType.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/BuiltinType.java
@@ -713,6 +713,31 @@
}
};
+ public static final BuiltinType AUUID = new LowerCaseConstructorType() {
+
+ @Override
+ public ATypeTag getTypeTag() {
+ return ATypeTag.UUID;
+ }
+
+ @Override
+ public String getDisplayName() {
+ return "UUID";
+ }
+
+ @Override
+ public String getTypeName() {
+ return "uuid";
+ }
+
+ @Override
+ public JSONObject toJSON() throws JSONException {
+ JSONObject type = new JSONObject();
+ type.put("type", getDisplayName());
+ return type;
+ }
+ };
+
public static final IAType ANY = new BuiltinType() {
private static final long serialVersionUID = 1L;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/TypeHierarchy.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/TypeHierarchy.java
index ba5c2db..053dbb2 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/TypeHierarchy.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/TypeHierarchy.java
@@ -12,29 +12,29 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package edu.uci.ics.asterix.om.types;
-
-import java.util.Hashtable;
-
-/*
- * Author: Guangqiang Li
- * Created on Sep 24, 2009
- */
-public class TypeHierarchy {
- private static Hashtable<String, String> parentMap = new Hashtable<String, String>();
- static {
- parentMap.put("integer", "decimal");
- parentMap.put("double", "decimal");
- parentMap.put("decimal", "numeric");
- }
-
- public static boolean isSubType(String sub, String par) {
- String parent = parentMap.get(sub);
- if (parent != null)
- if (parent.equals(par))
- return true;
- else
- return isSubType(parent, par);
- return false;
- }
-}
+package edu.uci.ics.asterix.om.types;
+
+import java.util.Hashtable;
+
+/*
+ * Author: Guangqiang Li
+ * Created on Sep 24, 2009
+ */
+public class TypeHierarchy {
+ private static Hashtable<String, String> parentMap = new Hashtable<String, String>();
+ static {
+ parentMap.put("integer", "decimal");
+ parentMap.put("double", "decimal");
+ parentMap.put("decimal", "numeric");
+ }
+
+ public static boolean isSubType(String sub, String par) {
+ String parent = parentMap.get(sub);
+ if (parent != null)
+ if (parent.equals(par))
+ return true;
+ else
+ return isSubType(parent, par);
+ return false;
+ }
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/hierachy/AbstractIntegerTypePromoteComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/hierachy/AbstractIntegerTypePromoteComputer.java
index 12bfaf4..7df6f43 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/hierachy/AbstractIntegerTypePromoteComputer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/hierachy/AbstractIntegerTypePromoteComputer.java
@@ -30,7 +30,7 @@
}
for (int i = targetTypeLength - 1; i >= 0; i--) {
- storageForPromotedValue.getDataOutput().writeByte((byte)((num >>> (i * 8)) & 0xFF));
+ storageForPromotedValue.getDataOutput().writeByte((byte) ((num >>> (i * 8)) & 0xFF));
}
}
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/hierachy/IntegerToDoubleTypePromoteComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/hierachy/IntegerToDoubleTypePromoteComputer.java
index 014dc2a..dfd116e 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/hierachy/IntegerToDoubleTypePromoteComputer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/hierachy/IntegerToDoubleTypePromoteComputer.java
@@ -34,7 +34,7 @@
storageForPromotedValue.getDataOutput().writeByte(ATypeTag.DOUBLE.serialize());
long val = 0L;
for (int i = 0; i < length; i++) {
- val += ((long)(data[start + i] & 0xff)) << (8 * (length - 1 - i));
+ val += ((long) (data[start + i] & 0xff)) << (8 * (length - 1 - i));
}
DoubleSerializerDeserializer.INSTANCE.serialize(Double.valueOf(val), storageForPromotedValue.getDataOutput());
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/AsterixAppContextInfo.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/AsterixAppContextInfo.java
index 0f836af..cef7937 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/AsterixAppContextInfo.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/AsterixAppContextInfo.java
@@ -27,6 +27,7 @@
import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.transaction.management.service.transaction.AsterixRuntimeComponentsProvider;
import edu.uci.ics.hyracks.api.application.ICCApplicationContext;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
@@ -38,7 +39,6 @@
public class AsterixAppContextInfo implements IAsterixApplicationContextInfo, IAsterixPropertiesProvider {
private static AsterixAppContextInfo INSTANCE;
-
private final ICCApplicationContext appCtx;
private AsterixCompilerProperties compilerProperties;
@@ -47,9 +47,11 @@
private AsterixStorageProperties storageProperties;
private AsterixTransactionProperties txnProperties;
- public static void initialize(ICCApplicationContext ccAppCtx) throws AsterixException {
+ private IHyracksClientConnection hcc;
+
+ public static void initialize(ICCApplicationContext ccAppCtx, IHyracksClientConnection hcc) throws AsterixException {
if (INSTANCE == null) {
- INSTANCE = new AsterixAppContextInfo(ccAppCtx);
+ INSTANCE = new AsterixAppContextInfo(ccAppCtx, hcc);
}
AsterixPropertiesAccessor propertiesAccessor = new AsterixPropertiesAccessor();
INSTANCE.compilerProperties = new AsterixCompilerProperties(propertiesAccessor);
@@ -57,11 +59,13 @@
INSTANCE.metadataProperties = new AsterixMetadataProperties(propertiesAccessor);
INSTANCE.storageProperties = new AsterixStorageProperties(propertiesAccessor);
INSTANCE.txnProperties = new AsterixTransactionProperties(propertiesAccessor);
+ INSTANCE.hcc = hcc;
Logger.getLogger("edu.uci.ics").setLevel(INSTANCE.externalProperties.getLogLevel());
}
- private AsterixAppContextInfo(ICCApplicationContext ccAppCtx) {
+ private AsterixAppContextInfo(ICCApplicationContext ccAppCtx, IHyracksClientConnection hcc) {
this.appCtx = ccAppCtx;
+ this.hcc = hcc;
}
public static AsterixAppContextInfo getInstance() {
@@ -98,6 +102,10 @@
return externalProperties;
}
+ public IHyracksClientConnection getHcc() {
+ return hcc;
+ }
+
@Override
public IIndexLifecycleManagerProvider getIndexLifecycleManagerProvider() {
return AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/AsterixClusterProperties.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/AsterixClusterProperties.java
index 6e6e09a..9af206bb 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/AsterixClusterProperties.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/AsterixClusterProperties.java
@@ -14,11 +14,25 @@
*/
package edu.uci.ics.asterix.om.util;
+import java.io.InputStream;
+import java.util.ArrayList;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
+import java.util.Set;
+import java.util.HashSet;
import java.util.logging.Level;
import java.util.logging.Logger;
+import javax.xml.bind.JAXBContext;
+import javax.xml.bind.JAXBException;
+import javax.xml.bind.Unmarshaller;
+
+import edu.uci.ics.asterix.event.schema.cluster.Cluster;
+import edu.uci.ics.asterix.event.schema.cluster.Node;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+
/**
* A holder class for properties related to the Asterix cluster.
*/
@@ -27,13 +41,31 @@
private static final Logger LOGGER = Logger.getLogger(AsterixClusterProperties.class.getName());
- private static final String IO_DEVICES = "iodevices";
-
public static final AsterixClusterProperties INSTANCE = new AsterixClusterProperties();
+ public static final String CLUSTER_CONFIGURATION_FILE = "cluster.xml";
+
+ private static final String IO_DEVICES = "iodevices";
private Map<String, Map<String, String>> ncConfiguration = new HashMap<String, Map<String, String>>();
+ private final Cluster cluster;
+
+ private AlgebricksAbsolutePartitionConstraint clusterPartitionConstraint;
+
private AsterixClusterProperties() {
+ InputStream is = this.getClass().getClassLoader().getResourceAsStream(CLUSTER_CONFIGURATION_FILE);
+ if (is != null) {
+ try {
+ JAXBContext ctx = JAXBContext.newInstance(Cluster.class);
+ Unmarshaller unmarshaller = ctx.createUnmarshaller();
+ cluster = (Cluster) unmarshaller.unmarshal(is);
+
+ } catch (JAXBException e) {
+ throw new IllegalStateException("Failed to read configuration file " + CLUSTER_CONFIGURATION_FILE);
+ }
+ } else {
+ cluster = null;
+ }
}
public enum State {
@@ -43,12 +75,13 @@
private State state = State.UNUSABLE;
- public void removeNCConfiguration(String nodeId) {
- state = State.UNUSABLE;
+ public synchronized void removeNCConfiguration(String nodeId) {
+ // state = State.UNUSABLE;
ncConfiguration.remove(nodeId);
+ resetClusterPartitionConstraint();
}
- public void addNCConfiguration(String nodeId, Map<String, String> configuration) {
+ public synchronized void addNCConfiguration(String nodeId, Map<String, String> configuration) {
ncConfiguration.put(nodeId, configuration);
if (ncConfiguration.keySet().size() == AsterixAppContextInfo.getInstance().getMetadataProperties()
.getNodeNames().size()) {
@@ -57,6 +90,7 @@
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info(" Registering configuration parameters for node id" + nodeId);
}
+ resetClusterPartitionConstraint();
}
/**
@@ -64,10 +98,11 @@
*
* @param nodeId
* unique identifier of the Node Controller
- * @return number of IO devices. -1 if the node id is not valid. A node id is not valid
- * if it does not correspond to the set of registered Node Controllers.
+ * @return number of IO devices. -1 if the node id is not valid. A node id
+ * is not valid if it does not correspond to the set of registered
+ * Node Controllers.
*/
- public int getNumberOfIODevices(String nodeId) {
+ public synchronized int getNumberOfIODevices(String nodeId) {
Map<String, String> ncConfig = ncConfiguration.get(nodeId);
if (ncConfig == null) {
if (LOGGER.isLoggable(Level.WARNING)) {
@@ -78,7 +113,7 @@
}
return ncConfig.get(IO_DEVICES).split(",").length;
}
-
+
/**
* Returns the IO devices configured for a Node Controller
*
@@ -103,4 +138,105 @@
return state;
}
+ public Cluster getCluster() {
+ return cluster;
+ }
+
+ public synchronized Node getAvailableSubstitutionNode() {
+ List<Node> subNodes = cluster.getSubstituteNodes() == null ? null : cluster.getSubstituteNodes().getNode();
+ return subNodes == null || subNodes.isEmpty() ? null : subNodes.get(0);
+ }
+
+ public synchronized Set<String> getParticipantNodes() {
+ Set<String> participantNodes = new HashSet<String>();
+ for (String pNode : ncConfiguration.keySet()) {
+ participantNodes.add(pNode);
+ }
+ return participantNodes;
+ }
+
+ public synchronized AlgebricksPartitionConstraint getClusterLocations() {
+ if (clusterPartitionConstraint == null) {
+ resetClusterPartitionConstraint();
+ }
+ return clusterPartitionConstraint;
+ }
+
+ private synchronized void resetClusterPartitionConstraint() {
+ Map<String, String[]> stores = AsterixAppContextInfo.getInstance().getMetadataProperties().getStores();
+ ArrayList<String> locs = new ArrayList<String>();
+ for (String i : stores.keySet()) {
+ String[] nodeStores = stores.get(i);
+ int numIODevices = AsterixClusterProperties.INSTANCE.getNumberOfIODevices(i);
+ for (int j = 0; j < nodeStores.length; j++) {
+ for (int k = 0; k < numIODevices; k++) {
+ locs.add(i);
+ }
+ }
+ }
+ String[] cluster = new String[locs.size()];
+ cluster = locs.toArray(cluster);
+ clusterPartitionConstraint = new AlgebricksAbsolutePartitionConstraint(cluster);
+ }
+
+ private static class AsterixCluster {
+
+ private final String asterixInstance;
+ private Map<String, AsterixNode> asterixNodes;
+
+ public AsterixCluster(Cluster cluster) {
+ asterixInstance = cluster.getInstanceName();
+ asterixNodes = new HashMap<String, AsterixNode>();
+ for (Node node : cluster.getNode()) {
+ AsterixNode aNode = new AsterixNode(node, AsterixNode.NodeRole.PARTICIPANT,
+ AsterixNode.NodeState.INACTIVE);
+ asterixNodes.put(asterixInstance + "_" + node.getId(), aNode);
+ }
+
+ for (Node node : cluster.getSubstituteNodes().getNode()) {
+ AsterixNode aNode = new AsterixNode(node, AsterixNode.NodeRole.SUBSTITUTE,
+ AsterixNode.NodeState.INACTIVE);
+ asterixNodes.put(asterixInstance + "_" + node.getId(), aNode);
+ }
+ }
+
+ private static class AsterixNode {
+
+ private final Node node;
+ private NodeRole role;
+ private NodeState state;
+
+ public enum NodeRole {
+ PARTICIPANT,
+ SUBSTITUTE
+ }
+
+ public enum NodeState {
+ ACTIVE,
+ INACTIVE
+ }
+
+ public AsterixNode(Node node, NodeRole role, NodeState state) {
+ this.node = node;
+ this.role = role;
+ this.state = state;
+ }
+
+ @Override
+ public String toString() {
+ return node.getId() + "_" + role + "_" + state;
+ }
+ }
+
+ public void notifyChangeState(String nodeId, AsterixNode.NodeRole newRole, AsterixNode.NodeState newState) {
+ AsterixNode node = asterixNodes.get(nodeId);
+ if (node != null) {
+ node.role = newRole;
+ node.state = newState;
+ } else {
+ throw new IllegalStateException("Unknown nodeId" + nodeId);
+ }
+
+ }
+ }
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/NonTaggedFormatUtil.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/NonTaggedFormatUtil.java
index c12d8b8..e3d87d1 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/NonTaggedFormatUtil.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/NonTaggedFormatUtil.java
@@ -121,6 +121,7 @@
case DURATION:
return 12;
case POINT:
+ case UUID:
return 16;
case INTERVAL:
return 17;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/ResettableByteArrayOutputStream.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/ResettableByteArrayOutputStream.java
index 795a4a6..00ffa11 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/ResettableByteArrayOutputStream.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/ResettableByteArrayOutputStream.java
@@ -19,7 +19,6 @@
/**
* This class extends ByteArrayAccessibleOutputStream to allow reset to a given
* size.
- *
*/
public class ResettableByteArrayOutputStream extends ByteArrayAccessibleOutputStream {
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/container/ListObjectPool.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/container/ListObjectPool.java
index 8014727..8a25cf2 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/container/ListObjectPool.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/container/ListObjectPool.java
@@ -21,7 +21,6 @@
/**
* Object pool backed by a list.
- *
* The argument for creating E instances could be different. This class also
* considers arguments in object reusing, e.g., it reuses an E instances ONLY
* when the construction argument is "equal".
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/visitors/IOMVisitor.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/visitors/IOMVisitor.java
index 3b83d79..8952651 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/visitors/IOMVisitor.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/visitors/IOMVisitor.java
@@ -40,6 +40,7 @@
import edu.uci.ics.asterix.om.base.ARectangle;
import edu.uci.ics.asterix.om.base.AString;
import edu.uci.ics.asterix.om.base.ATime;
+import edu.uci.ics.asterix.om.base.AUUID;
import edu.uci.ics.asterix.om.base.AUnorderedList;
import edu.uci.ics.asterix.om.base.AYearMonthDuration;
import edu.uci.ics.asterix.om.types.IAType;
@@ -100,4 +101,6 @@
public void visitACircle(ACircle obj) throws AsterixException;
public void visitARectangle(ARectangle obj) throws AsterixException;
+
+ public void visitAUUID(AUUID obj) throws AsterixException;
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/visitors/OMPrintToStringVisitor.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/visitors/OMPrintToStringVisitor.java
index d0348f9..34b9f83 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/visitors/OMPrintToStringVisitor.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/visitors/OMPrintToStringVisitor.java
@@ -41,6 +41,7 @@
import edu.uci.ics.asterix.om.base.ARectangle;
import edu.uci.ics.asterix.om.base.AString;
import edu.uci.ics.asterix.om.base.ATime;
+import edu.uci.ics.asterix.om.base.AUUID;
import edu.uci.ics.asterix.om.base.AUnorderedList;
import edu.uci.ics.asterix.om.base.AYearMonthDuration;
import edu.uci.ics.asterix.om.base.IACursor;
@@ -280,4 +281,9 @@
throw new NotImplementedException();
}
+ @Override
+ public void visitAUUID(AUUID obj) throws AsterixException {
+ throw new NotImplementedException();
+ }
+
}
diff --git a/asterix-runtime/pom.xml b/asterix-runtime/pom.xml
index 4a3ece9..e5fe2f0 100644
--- a/asterix-runtime/pom.xml
+++ b/asterix-runtime/pom.xml
@@ -17,7 +17,7 @@
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
</parent>
<artifactId>asterix-runtime</artifactId>
@@ -37,7 +37,7 @@
<plugin>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>lexer-generator-maven-plugin</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
<configuration>
<grammarFile>src/main/resources/adm.grammar</grammarFile>
<outputDir>${project.build.directory}/generated-sources/edu/uci/ics/asterix/runtime/operators/file/adm</outputDir>
@@ -133,7 +133,7 @@
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-om</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
@@ -149,7 +149,7 @@
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-transactions</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/base/AbstractAggregateFunctionDynamicDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/base/AbstractAggregateFunctionDynamicDescriptor.java
index e4d1468..b92c422 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/base/AbstractAggregateFunctionDynamicDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/base/AbstractAggregateFunctionDynamicDescriptor.java
@@ -17,12 +17,11 @@
import edu.uci.ics.asterix.common.functions.FunctionDescriptorTag;
import edu.uci.ics.asterix.om.functions.AbstractFunctionDescriptor;
-public abstract class AbstractAggregateFunctionDynamicDescriptor extends AbstractFunctionDescriptor {
+public abstract class AbstractAggregateFunctionDynamicDescriptor extends AbstractFunctionDescriptor {
private static final long serialVersionUID = 1L;
public FunctionDescriptorTag getFunctionDescriptorTag() {
return FunctionDescriptorTag.AGGREGATE;
}
-
}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/base/SingleFieldFrameTupleReference.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/base/SingleFieldFrameTupleReference.java
index 9261537..b8733b0 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/base/SingleFieldFrameTupleReference.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/base/SingleFieldFrameTupleReference.java
@@ -22,13 +22,13 @@
private byte[] fieldData;
private int start;
private int length;
-
+
public void reset(byte[] fieldData, int start, int length) {
this.fieldData = fieldData;
this.start = start;
this.length = length;
}
-
+
@Override
public int getFieldCount() {
return 1;
@@ -41,12 +41,12 @@
@Override
public int getFieldStart(int fIdx) {
- return start;
+ return start;
}
@Override
public int getFieldLength(int fIdx) {
- return length;
+ return length;
}
@Override
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/collections/ListifyAggregateFunctionEvalFactory.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/collections/ListifyAggregateFunctionEvalFactory.java
index a9bd339..bf87f08 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/collections/ListifyAggregateFunctionEvalFactory.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/collections/ListifyAggregateFunctionEvalFactory.java
@@ -40,7 +40,8 @@
}
@Override
- public ICopyAggregateFunction createAggregateFunction(final IDataOutputProvider provider) throws AlgebricksException {
+ public ICopyAggregateFunction createAggregateFunction(final IDataOutputProvider provider)
+ throws AlgebricksException {
return new ICopyAggregateFunction() {
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/serializable/std/SerializableSumAggregateDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/serializable/std/SerializableSumAggregateDescriptor.java
index c03fe91..231ce34 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/serializable/std/SerializableSumAggregateDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/serializable/std/SerializableSumAggregateDescriptor.java
@@ -42,7 +42,7 @@
public ICopySerializableAggregateFunctionFactory createSerializableAggregateFunctionFactory(
final ICopyEvaluatorFactory[] args) throws AlgebricksException {
return new ICopySerializableAggregateFunctionFactory() {
- private static final long serialVersionUID = 1L;
+ private static final long serialVersionUID = 1L;
@Override
public ICopySerializableAggregateFunction createAggregateFunction() throws AlgebricksException {
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/serializable/std/SerializableSumAggregateFunction.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/serializable/std/SerializableSumAggregateFunction.java
index 0d86176..bea6ab8 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/serializable/std/SerializableSumAggregateFunction.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/serializable/std/SerializableSumAggregateFunction.java
@@ -85,8 +85,8 @@
} else if (aggType == ATypeTag.SYSTEM_NULL) {
aggType = typeTag;
} else if (typeTag != ATypeTag.SYSTEM_NULL && typeTag != aggType) {
- throw new AlgebricksException("Unexpected type " + typeTag
- + " in aggregation input stream. Expected type " + aggType + ".");
+ throw new AlgebricksException("Unexpected type " + typeTag + " in aggregation input stream. Expected type "
+ + aggType + ".");
}
switch (typeTag) {
case INT8: {
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/std/LocalAvgAggregateDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/std/LocalAvgAggregateDescriptor.java
index 256af09..09a659c 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/std/LocalAvgAggregateDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/std/LocalAvgAggregateDescriptor.java
@@ -152,7 +152,7 @@
} else if (ATypeHierarchy.canPromote(aggType, typeTag)) {
aggType = typeTag;
}
-
+
if (typeTag != ATypeTag.SYSTEM_NULL) {
++count;
}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/common/AsterixListAccessor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/common/AsterixListAccessor.java
index b972cfd..7455ae9 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/common/AsterixListAccessor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/common/AsterixListAccessor.java
@@ -26,84 +26,84 @@
import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
/**
- * Utility class for accessing serialized unordered and ordered lists.
+ * Utility class for accessing serialized unordered and ordered lists.
*/
public class AsterixListAccessor {
- protected byte[] listBytes;
- protected int start;
- protected ATypeTag listType;
- protected ATypeTag itemType;
- protected int size;
-
- public ATypeTag getListType() {
- return listType;
- }
+ protected byte[] listBytes;
+ protected int start;
+ protected ATypeTag listType;
+ protected ATypeTag itemType;
+ protected int size;
- public ATypeTag getItemType() {
- return itemType;
- }
+ public ATypeTag getListType() {
+ return listType;
+ }
- public boolean itemsAreSelfDescribing() {
- return itemType == ATypeTag.ANY;
- }
+ public ATypeTag getItemType() {
+ return itemType;
+ }
- public void reset(byte[] listBytes, int start) throws AsterixException {
- this.listBytes = listBytes;
- this.start = start;
- listType = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(listBytes[start]);
- if (listType != ATypeTag.UNORDEREDLIST && listType != ATypeTag.ORDEREDLIST) {
- throw new AsterixException("Unsupported type: " + listType);
- }
- itemType = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(listBytes[start + 1]);
- if (listType == ATypeTag.UNORDEREDLIST) {
- size = AUnorderedListSerializerDeserializer.getNumberOfItems(listBytes, start);
- } else {
- size = AOrderedListSerializerDeserializer.getNumberOfItems(listBytes, start);
- }
- }
+ public boolean itemsAreSelfDescribing() {
+ return itemType == ATypeTag.ANY;
+ }
- public int size() {
- return size;
- }
-
- public int getItemOffset(int itemIndex) throws AsterixException {
- if (listType == ATypeTag.UNORDEREDLIST) {
- return AUnorderedListSerializerDeserializer.getItemOffset(listBytes, start, itemIndex);
- } else {
- return AOrderedListSerializerDeserializer.getItemOffset(listBytes, start, itemIndex);
- }
- }
-
- public int getItemLength(int itemOffset) throws AsterixException {
- ATypeTag itemType = getItemType(itemOffset);
- return NonTaggedFormatUtil.getFieldValueLength(listBytes, itemOffset, itemType, itemsAreSelfDescribing());
- }
-
- public ATypeTag getItemType(int itemOffset) throws AsterixException {
- if (itemType == ATypeTag.ANY) {
- return EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(listBytes[itemOffset]);
- } else {
- return itemType;
- }
- }
-
- public void writeItem(int itemIndex, DataOutput dos) throws AsterixException, IOException {
- int itemOffset = getItemOffset(itemIndex);
- int itemLength = getItemLength(itemOffset);
- if (itemsAreSelfDescribing()) {
- ++itemLength;
- } else {
- dos.writeByte(itemType.serialize());
- }
- dos.write(listBytes, itemOffset, itemLength);
- }
-
- public byte[] getByteArray() {
- return listBytes;
- }
-
- public int getStart() {
- return start;
- }
+ public void reset(byte[] listBytes, int start) throws AsterixException {
+ this.listBytes = listBytes;
+ this.start = start;
+ listType = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(listBytes[start]);
+ if (listType != ATypeTag.UNORDEREDLIST && listType != ATypeTag.ORDEREDLIST) {
+ throw new AsterixException("Unsupported type: " + listType);
+ }
+ itemType = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(listBytes[start + 1]);
+ if (listType == ATypeTag.UNORDEREDLIST) {
+ size = AUnorderedListSerializerDeserializer.getNumberOfItems(listBytes, start);
+ } else {
+ size = AOrderedListSerializerDeserializer.getNumberOfItems(listBytes, start);
+ }
+ }
+
+ public int size() {
+ return size;
+ }
+
+ public int getItemOffset(int itemIndex) throws AsterixException {
+ if (listType == ATypeTag.UNORDEREDLIST) {
+ return AUnorderedListSerializerDeserializer.getItemOffset(listBytes, start, itemIndex);
+ } else {
+ return AOrderedListSerializerDeserializer.getItemOffset(listBytes, start, itemIndex);
+ }
+ }
+
+ public int getItemLength(int itemOffset) throws AsterixException {
+ ATypeTag itemType = getItemType(itemOffset);
+ return NonTaggedFormatUtil.getFieldValueLength(listBytes, itemOffset, itemType, itemsAreSelfDescribing());
+ }
+
+ public ATypeTag getItemType(int itemOffset) throws AsterixException {
+ if (itemType == ATypeTag.ANY) {
+ return EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(listBytes[itemOffset]);
+ } else {
+ return itemType;
+ }
+ }
+
+ public void writeItem(int itemIndex, DataOutput dos) throws AsterixException, IOException {
+ int itemOffset = getItemOffset(itemIndex);
+ int itemLength = getItemLength(itemOffset);
+ if (itemsAreSelfDescribing()) {
+ ++itemLength;
+ } else {
+ dos.writeByte(itemType.serialize());
+ }
+ dos.write(listBytes, itemOffset, itemLength);
+ }
+
+ public byte[] getByteArray() {
+ return listBytes;
+ }
+
+ public int getStart() {
+ return start;
+ }
}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/common/CreateMBREvalFactory.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/common/CreateMBREvalFactory.java
index dfb236e..9d6d4e1 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/common/CreateMBREvalFactory.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/common/CreateMBREvalFactory.java
@@ -80,7 +80,8 @@
// type-check: (Point/Line/Polygon/Circle/Rectangle/Null, Int32, Int32)
if (outInput1.getByteArray()[0] != SER_INT32_TYPE_TAG
|| outInput2.getByteArray()[0] != SER_INT32_TYPE_TAG) {
- throw new AlgebricksException("Expects Types: (Point/Line/Polygon/Circle/Rectangle/Null, Int32, Int32).");
+ throw new AlgebricksException(
+ "Expects Types: (Point/Line/Polygon/Circle/Rectangle/Null, Int32, Int32).");
}
try {
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/common/FieldAccessByIndexEvalFactory.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/common/FieldAccessByIndexEvalFactory.java
index d7a409d..20bd366 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/common/FieldAccessByIndexEvalFactory.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/common/FieldAccessByIndexEvalFactory.java
@@ -49,8 +49,8 @@
private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
private final static byte SER_RECORD_TYPE_TAG = ATypeTag.RECORD.serialize();
- public FieldAccessByIndexEvalFactory(ICopyEvaluatorFactory recordEvalFactory, ICopyEvaluatorFactory fieldIndexEvalFactory,
- ARecordType recordType) {
+ public FieldAccessByIndexEvalFactory(ICopyEvaluatorFactory recordEvalFactory,
+ ICopyEvaluatorFactory fieldIndexEvalFactory, ARecordType recordType) {
this.recordEvalFactory = recordEvalFactory;
this.fieldIndexEvalFactory = fieldIndexEvalFactory;
this.recordType = recordType;
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/common/SimilarityJaccardSortedCheckEvaluator.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/common/SimilarityJaccardSortedCheckEvaluator.java
index 390d1de..11f1e3d 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/common/SimilarityJaccardSortedCheckEvaluator.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/common/SimilarityJaccardSortedCheckEvaluator.java
@@ -23,7 +23,7 @@
public class SimilarityJaccardSortedCheckEvaluator extends SimilarityJaccardCheckEvaluator {
protected final SimilarityMetricJaccard jaccard = new SimilarityMetricJaccard();
-
+
public SimilarityJaccardSortedCheckEvaluator(ICopyEvaluatorFactory[] args, IDataOutputProvider output)
throws AlgebricksException {
super(args, output);
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/common/SimilarityJaccardSortedEvaluator.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/common/SimilarityJaccardSortedEvaluator.java
index 9318bd4..67b9ccc 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/common/SimilarityJaccardSortedEvaluator.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/common/SimilarityJaccardSortedEvaluator.java
@@ -24,11 +24,11 @@
public class SimilarityJaccardSortedEvaluator extends SimilarityJaccardEvaluator {
protected final SimilarityMetricJaccard jaccard = new SimilarityMetricJaccard();
-
- public SimilarityJaccardSortedEvaluator(ICopyEvaluatorFactory[] args,
- IDataOutputProvider output) throws AlgebricksException {
- super(args, output);
- }
+
+ public SimilarityJaccardSortedEvaluator(ICopyEvaluatorFactory[] args, IDataOutputProvider output)
+ throws AlgebricksException {
+ super(args, output);
+ }
protected float computeResult(byte[] bytes, int firstStart, int secondStart, ATypeTag argType)
throws AlgebricksException {
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/comparisons/ComparisonEvalFactory.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/comparisons/ComparisonEvalFactory.java
index 20327ff..d62db02 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/comparisons/ComparisonEvalFactory.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/comparisons/ComparisonEvalFactory.java
@@ -45,7 +45,7 @@
public ICopyEvaluator createEvaluator(IDataOutputProvider output) throws AlgebricksException {
DataOutput out = output.getDataOutput();
switch (comparisonKind) {
- // Should we do any normalization?
+ // Should we do any normalization?
case EQ: {
return new EqualityComparisonEvaluator(out, evalLeftFactory, evalRightFactory);
}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromTimeConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromTimeConstructorDescriptor.java
index 29e4a77..732cd0e 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromTimeConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromTimeConstructorDescriptor.java
@@ -116,7 +116,7 @@
if (intervalStart < 0) {
intervalStart += GregorianCalendarSystem.CHRONON_OF_DAY;
}
-
+
if (argOut1.getByteArray()[0] == SER_TIME_TYPE_TAG) {
intervalEnd = ATimeSerializerDeserializer.getChronon(argOut1.getByteArray(), 1);
} else if (argOut1.getByteArray()[0] == SER_STRING_TYPE_TAG) {
@@ -135,12 +135,12 @@
if (intervalEnd < 0) {
intervalEnd += GregorianCalendarSystem.CHRONON_OF_DAY;
}
-
+
if (intervalEnd < intervalStart) {
throw new AlgebricksException(FID.getName()
+ ": interval end must not be less than the interval start.");
}
-
+
aInterval.setValue(intervalStart, intervalEnd, ATypeTag.TIME.serialize());
intervalSerde.serialize(aInterval, out);
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AStringConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AStringConstructorDescriptor.java
index 08105a2..7cf588e 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AStringConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AStringConstructorDescriptor.java
@@ -16,7 +16,16 @@
import java.io.DataOutput;
import java.io.IOException;
+import java.io.PrintStream;
+import java.io.UnsupportedEncodingException;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ABooleanSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADoubleSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AFloatSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt16SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt8SerializerDeserializer;
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
import edu.uci.ics.asterix.om.base.ANull;
import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
@@ -32,13 +41,12 @@
import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.data.std.util.ByteArrayAccessibleOutputStream;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class AStringConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
private static final long serialVersionUID = 1L;
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
public IFunctionDescriptor createFunctionDescriptor() {
return new AStringConstructorDescriptor();
@@ -52,34 +60,109 @@
@Override
public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
- return new ICopyEvaluator() {
+ try {
+ return new ICopyEvaluator() {
- private DataOutput out = output.getDataOutput();
- private ArrayBackedValueStorage outInput = new ArrayBackedValueStorage();
- private ICopyEvaluator eval = args[0].createEvaluator(outInput);
- private String errorMessage = "This can not be an instance of string";
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ANULL);
+ private DataOutput out = output.getDataOutput();
+ private ArrayBackedValueStorage outInput = new ArrayBackedValueStorage();
+ private ByteArrayAccessibleOutputStream baaos = new ByteArrayAccessibleOutputStream();
+ private PrintStream ps = new PrintStream(baaos, false, "UTF-8");
+ private ICopyEvaluator eval = args[0].createEvaluator(outInput);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
- @Override
- public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ try {
+ outInput.reset();
+ eval.evaluate(tuple);
+ byte[] serString = outInput.getByteArray();
- try {
- outInput.reset();
- eval.evaluate(tuple);
- byte[] serString = outInput.getByteArray();
- if (serString[0] == SER_STRING_TYPE_TAG) {
- out.write(outInput.getByteArray(), outInput.getStartOffset(), outInput.getLength());
- } else if (serString[0] == SER_NULL_TYPE_TAG)
- nullSerde.serialize(ANull.NULL, out);
- else
- throw new AlgebricksException(errorMessage);
- } catch (IOException e1) {
- throw new AlgebricksException(errorMessage);
+ ATypeTag tt = ATypeTag.VALUE_TYPE_MAPPING[serString[0]];
+ if (tt == ATypeTag.NULL) {
+ nullSerde.serialize(ANull.NULL, out);
+ } else if (tt == ATypeTag.STRING) {
+ out.write(outInput.getByteArray(), outInput.getStartOffset(), outInput.getLength());
+ } else {
+ baaos.write(0);
+ baaos.write(0);
+ switch (tt) {
+ case INT8: {
+ int i = AInt8SerializerDeserializer.getByte(outInput.getByteArray(), 1);
+ ps.print(i);
+ break;
+ }
+ case INT16: {
+ int i = AInt16SerializerDeserializer.getShort(outInput.getByteArray(), 1);
+ ps.print(i);
+ break;
+ }
+ case INT32: {
+ int i = AInt32SerializerDeserializer.getInt(outInput.getByteArray(), 1);
+ ps.print(i);
+ break;
+ }
+ case INT64: {
+ long l = AInt64SerializerDeserializer.getLong(outInput.getByteArray(), 1);
+ ps.print(l);
+ break;
+ }
+ case DOUBLE: {
+ double d = ADoubleSerializerDeserializer.getDouble(outInput.getByteArray(),
+ 1);
+ ps.print(d);
+ break;
+ }
+ case FLOAT: {
+ float f = AFloatSerializerDeserializer.getFloat(outInput.getByteArray(), 1);
+ ps.print(f);
+ break;
+ }
+ case BOOLEAN: {
+ boolean b = ABooleanSerializerDeserializer.getBoolean(
+ outInput.getByteArray(), 1);
+ ps.print(b);
+ break;
+ }
+
+ // NotYetImplemented
+ case CIRCLE:
+ case DATE:
+ case DATETIME:
+ case LINE:
+ case TIME:
+ case DURATION:
+ case YEARMONTHDURATION:
+ case DAYTIMEDURATION:
+ case INTERVAL:
+ case ORDEREDLIST:
+ case POINT:
+ case POINT3D:
+ case RECTANGLE:
+ case POLYGON:
+ case RECORD:
+ case UNORDEREDLIST:
+ case UUID:
+ default:
+ throw new AlgebricksException("string of " + tt + " not supported");
+ }
+ ps.flush();
+ byte[] tmpStrBytes = baaos.getByteArray();
+ int utfLen = baaos.size() - 2;
+ tmpStrBytes[0] = (byte) ((utfLen >>> 8) & 0xFF);
+ tmpStrBytes[1] = (byte) ((utfLen >>> 0) & 0xFF);
+ out.write(ATypeTag.STRING.serialize());
+ out.write(tmpStrBytes);
+ }
+ } catch (IOException e) {
+ throw new AlgebricksException(e);
+ }
}
- }
- };
+ };
+ } catch (UnsupportedEncodingException e) {
+ throw new AlgebricksException(e);
+ }
}
};
}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/CastListDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/CastListDescriptor.java
index c1817e5..fb0084c 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/CastListDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/CastListDescriptor.java
@@ -39,7 +39,6 @@
* The runtime function for casting a list(unordered list or ordered list)
*
* @author yingyib
- *
*/
public class CastListDescriptor extends AbstractScalarFunctionDynamicDescriptor {
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/CodepointIterator.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/CodepointIterator.java
index 925e4d4..77799cb 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/CodepointIterator.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/CodepointIterator.java
@@ -17,14 +17,16 @@
import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
public class CodepointIterator {
- public void reset(byte [] buf, int startPos) {
+ public void reset(byte[] buf, int startPos) {
this.buf = buf;
this.curPos = startPos + 2;
this.startPos = startPos;
- len = UTF8StringPointable.getUTFLength(buf, startPos);
+ len = UTF8StringPointable.getUTFLength(buf, startPos);
}
-
- public int size() { return len; }
+
+ public int size() {
+ return len;
+ }
private byte[] buf;
private int curPos = 0;
@@ -40,66 +42,54 @@
// 1 byte
return b[s];
} else if ((b[s] & 0xe0) == 0xc0) { /*
- * 0xe0 = 0b1110000
- */
+ * 0xe0 = 0b1110000
+ */
// 2 bytes
- return ((int) (b[s] & 0x1f)) << 6
- | /*
- * 0x3f = 0b00111111
- */ ((int) (b[s + 1] & 0x3f));
+ return ((int) (b[s] & 0x1f)) << 6 | /*
+ * 0x3f = 0b00111111
+ */((int) (b[s + 1] & 0x3f));
} else if ((b[s] & 0xf0) == 0xe0) {
// 3bytes
- return ((int) (b[s] & 0xf)) << 12
- | ((int) (b[s + 1] & 0x3f)) << 6
- | ((int) (b[s + 2] & 0x3f));
+ return ((int) (b[s] & 0xf)) << 12 | ((int) (b[s + 1] & 0x3f)) << 6 | ((int) (b[s + 2] & 0x3f));
} else if ((b[s] & 0xf8) == 0xf0) {
// 4bytes
- return ((int) (b[s] & 0x7)) << 18
- | ((int) (b[s + 1] & 0x3f)) << 12
- | ((int) (b[s + 2] & 0x3f)) << 6
+ return ((int) (b[s] & 0x7)) << 18 | ((int) (b[s + 1] & 0x3f)) << 12 | ((int) (b[s + 2] & 0x3f)) << 6
| ((int) (b[s + 3] & 0x3f));
} else if ((b[s] & 0xfc) == 0xf8) {
// 5bytes
- return ((int) (b[s] & 0x3)) << 24
- | ((int) (b[s + 1] & 0x3f)) << 18
- | ((int) (b[s + 2] & 0x3f)) << 12
- | ((int) (b[s + 3] & 0x3f)) << 6
- | ((int) (b[s + 4] & 0x3f));
+ return ((int) (b[s] & 0x3)) << 24 | ((int) (b[s + 1] & 0x3f)) << 18 | ((int) (b[s + 2] & 0x3f)) << 12
+ | ((int) (b[s + 3] & 0x3f)) << 6 | ((int) (b[s + 4] & 0x3f));
} else if ((b[s] & 0xfe) == 0xfc) {
// 6bytes
- return ((int) (b[s] & 0x1)) << 30
- | ((int) (b[s + 1] & 0x3f)) << 24
- | ((int) (b[s + 2] & 0x3f)) << 18
- | ((int) (b[s + 3] & 0x3f)) << 12
- | ((int) (b[s + 4] & 0x3f)) << 6
- | ((int) (b[s + 5] & 0x3f));
+ return ((int) (b[s] & 0x1)) << 30 | ((int) (b[s + 1] & 0x3f)) << 24 | ((int) (b[s + 2] & 0x3f)) << 18
+ | ((int) (b[s + 3] & 0x3f)) << 12 | ((int) (b[s + 4] & 0x3f)) << 6 | ((int) (b[s + 5] & 0x3f));
}
return 0;
}
public void next() {
int step = UTF8StringPointable.charSize(buf, curPos);
- if(step + curPos < len + 2 + startPos)
+ if (step + curPos < len + 2 + startPos)
curPos += step;
}
public boolean hasNext() {
int step = UTF8StringPointable.charSize(buf, curPos);
- if(step + curPos < len + 2 + startPos)
- return true;
+ if (step + curPos < len + 2 + startPos)
+ return true;
return false;
}
-
+
public static int compare(CodepointIterator ls, CodepointIterator rs) {
CodepointIterator shortString = ls.size() < rs.size() ? ls : rs;
-
+
while (true) {
int c1 = ls.getCodePoint();
int c2 = rs.getCodePoint();
if (c1 != c2) {
return c1 - c2;
}
- if(shortString.hasNext()) {
+ if (shortString.hasNext()) {
ls.next();
rs.next();
} else {
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/CreatePointDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/CreatePointDescriptor.java
index 24a68cd..46ee22b 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/CreatePointDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/CreatePointDescriptor.java
@@ -20,6 +20,7 @@
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADoubleSerializerDeserializer;
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
import edu.uci.ics.asterix.om.base.AMutablePoint;
+import edu.uci.ics.asterix.om.base.ANull;
import edu.uci.ics.asterix.om.base.APoint;
import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
@@ -43,6 +44,7 @@
// allowed input type
private static final byte SER_DOUBLE_TYPE_TAG = ATypeTag.DOUBLE.serialize();
+ private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
public IFunctionDescriptor createFunctionDescriptor() {
@@ -78,8 +80,8 @@
eval1.evaluate(tuple);
// type-check: (double, double)
- if (outInput0.getByteArray()[0] != SER_DOUBLE_TYPE_TAG
- || outInput1.getByteArray()[0] != SER_DOUBLE_TYPE_TAG) {
+ if ((outInput0.getByteArray()[0] != SER_DOUBLE_TYPE_TAG && outInput0.getByteArray()[0] != SER_NULL_TYPE_TAG)
+ || (outInput1.getByteArray()[0] != SER_DOUBLE_TYPE_TAG && outInput1.getByteArray()[0] != SER_NULL_TYPE_TAG)) {
throw new AlgebricksException(AsterixBuiltinFunctions.CREATE_POINT.getName()
+ ": expects input type: (DOUBLE, DOUBLE) but got ("
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(outInput0.getByteArray()[0])
@@ -89,10 +91,15 @@
}
try {
- aPoint.setValue(ADoubleSerializerDeserializer.getDouble(outInput0.getByteArray(), 1),
- ADoubleSerializerDeserializer.getDouble(outInput1.getByteArray(), 1));
- pointSerde.serialize(aPoint, out);
-
+ if (outInput0.getByteArray()[0] == SER_NULL_TYPE_TAG
+ || outInput1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ANULL)
+ .serialize(ANull.NULL, out);
+ } else {
+ aPoint.setValue(ADoubleSerializerDeserializer.getDouble(outInput0.getByteArray(), 1),
+ ADoubleSerializerDeserializer.getDouble(outInput1.getByteArray(), 1));
+ pointSerde.serialize(aPoint, out);
+ }
} catch (IOException e1) {
throw new AlgebricksException(e1);
}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/CreatePolygonDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/CreatePolygonDescriptor.java
index 8a35f87..438f856 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/CreatePolygonDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/CreatePolygonDescriptor.java
@@ -17,15 +17,24 @@
import java.io.DataOutput;
import java.io.IOException;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADoubleSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.APointSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ANull;
import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.EnumDeserializer;
import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.common.AsterixListAccessor;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
@@ -33,6 +42,10 @@
public class CreatePolygonDescriptor extends AbstractScalarFunctionDynamicDescriptor {
private static final long serialVersionUID = 1L;
+
+ private static final byte SER_ORDEREDLIST_TYPE_TAG = ATypeTag.ORDEREDLIST.serialize();
+ private final static byte SER_POLYGON_TYPE_TAG = ATypeTag.POLYGON.serialize();
+
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
public IFunctionDescriptor createFunctionDescriptor() {
return new CreatePolygonDescriptor();
@@ -40,45 +53,79 @@
};
@Override
- public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) {
return new ICopyEvaluatorFactory() {
- private static final long serialVersionUID = 1L;
- private DataOutput out;
- private ArrayBackedValueStorage outInput;
+ private static final long serialVersionUID = 1L;
@Override
public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
-
- final ICopyEvaluator[] argEvals = new ICopyEvaluator[args.length];
- out = output.getDataOutput();
-
- outInput = new ArrayBackedValueStorage();
-
- for (int i = 0; i < args.length; i++) {
- argEvals[i] = args[i].createEvaluator(outInput);
- }
-
return new ICopyEvaluator() {
+ private final AsterixListAccessor listAccessor = new AsterixListAccessor();
+ private final DataOutput out = output.getDataOutput();
+ private final ICopyEvaluatorFactory listEvalFactory = args[0];
+ private final ArrayBackedValueStorage outInputList = new ArrayBackedValueStorage();
+ private final ICopyEvaluator evalList = listEvalFactory.createEvaluator(outInputList);
+ @SuppressWarnings("unchecked")
+ private final ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+
@Override
public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
try {
- out.writeByte(ATypeTag.POLYGON.serialize());
- out.writeShort(args.length);
- } catch (IOException e) {
- throw new AlgebricksException(e);
- }
-
- for (int i = 0; i < argEvals.length; i++) {
- outInput.reset();
- argEvals[i].evaluate(tuple);
+ outInputList.reset();
+ evalList.evaluate(tuple);
+ byte[] listBytes = outInputList.getByteArray();
+ if (listBytes[0] != SER_ORDEREDLIST_TYPE_TAG) {
+ throw new AlgebricksException(AsterixBuiltinFunctions.CREATE_POLYGON.getName()
+ + ": expects input type ORDEREDLIST, but got "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(listBytes[0]));
+ }
try {
- out.write(outInput.getByteArray(), outInput.getStartOffset() + 1,
- outInput.getLength() - 1);
- } catch (IOException e) {
+ listAccessor.reset(listBytes, 0);
+ } catch (AsterixException e) {
throw new AlgebricksException(e);
}
+ try {
+ // First check the list consists of a valid items
+ for (int i = 0; i < listAccessor.size(); i++) {
+ int itemOffset = listAccessor.getItemOffset(i);
+ ATypeTag itemType = listAccessor.getItemType(itemOffset);
+ if (itemType != ATypeTag.DOUBLE) {
+ if (itemType == ATypeTag.NULL) {
+ nullSerde.serialize(ANull.NULL, out);
+ return;
+ }
+ throw new AlgebricksException(AsterixBuiltinFunctions.CREATE_POLYGON.getName()
+ + ": expects type DOUBLE/NULL for the list item but got " + itemType);
+ }
+
+ }
+ if (listAccessor.size() < 6) {
+ throw new AlgebricksException(
+ "A polygon instance must consists of at least 3 points");
+ } else if (listAccessor.size() % 2 != 0) {
+ throw new AlgebricksException(
+ "There must be an even number of double values in the list to form a polygon");
+ }
+ out.writeByte(SER_POLYGON_TYPE_TAG);
+ out.writeShort(listAccessor.size() / 2);
+
+ for (int i = 0; i < listAccessor.size() / 2; i++) {
+ int firstDoubleOffset = listAccessor.getItemOffset(i * 2);
+ int secondDobuleOffset = listAccessor.getItemOffset((i * 2) + 1);
+
+ APointSerializerDeserializer
+ .serialize(ADoubleSerializerDeserializer.getDouble(listBytes,
+ firstDoubleOffset), ADoubleSerializerDeserializer.getDouble(
+ listBytes, secondDobuleOffset), out);
+ }
+ } catch (AsterixException ex) {
+ throw new AlgebricksException(ex);
+ }
+ } catch (IOException e1) {
+ throw new AlgebricksException(e1.getMessage());
}
}
};
@@ -90,5 +137,4 @@
public FunctionIdentifier getIdentifier() {
return AsterixBuiltinFunctions.CREATE_POLYGON;
}
-
-}
\ No newline at end of file
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/CreateUUIDDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/CreateUUIDDescriptor.java
new file mode 100644
index 0000000..fdb7361
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/CreateUUIDDescriptor.java
@@ -0,0 +1,64 @@
+package edu.uci.ics.asterix.runtime.evaluators.functions;
+
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AMutableUUID;
+import edu.uci.ics.asterix.om.base.AUUID;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class CreateUUIDDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private static final long serialVersionUID = 1L;
+
+ public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new CreateUUIDDescriptor();
+ }
+ };
+
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ return new ICopyEvaluatorFactory() {
+
+ private static final long serialVersionUID = 1L;
+
+ @SuppressWarnings("unchecked")
+ private final ISerializerDeserializer<AUUID> uuidSerDe = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AUUID);
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+ final AMutableUUID uuid = new AMutableUUID(0, 0);
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ uuid.nextUUID();
+ try {
+ uuidSerDe.serialize(uuid, output.getDataOutput());
+ } catch (HyracksDataException e) {
+ throw new AlgebricksException(e);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return AsterixBuiltinFunctions.CREATE_UUID;
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/FlowRecordDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/FlowRecordDescriptor.java
new file mode 100644
index 0000000..81c23cd
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/FlowRecordDescriptor.java
@@ -0,0 +1,74 @@
+package edu.uci.ics.asterix.runtime.evaluators.functions;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.pointables.PointableAllocator;
+import edu.uci.ics.asterix.om.pointables.base.IVisitablePointable;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class FlowRecordDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+ public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new FlowRecordDescriptor();
+ }
+ };
+
+ private static final long serialVersionUID = 1L;
+ private ARecordType inputType;
+
+ public void reset(ARecordType inputType) {
+ this.inputType = inputType;
+ }
+
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return AsterixBuiltinFunctions.FLOW_RECORD;
+ }
+
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) {
+ final ICopyEvaluatorFactory recordEvalFactory = args[0];
+
+ return new ICopyEvaluatorFactory() {
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ final DataOutput out = output.getDataOutput();
+ final ArrayBackedValueStorage recordBuffer = new ArrayBackedValueStorage();
+ final ICopyEvaluator recEvaluator = recordEvalFactory.createEvaluator(recordBuffer);
+
+ return new ICopyEvaluator() {
+ // pointable allocator
+ private PointableAllocator allocator = new PointableAllocator();
+ final IVisitablePointable recAccessor = allocator.allocateRecordValue(inputType);
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ try {
+ recordBuffer.reset();
+ recEvaluator.evaluate(tuple);
+ recAccessor.set(recordBuffer);
+ out.write(recAccessor.getByteArray(), recAccessor.getStartOffset(), recAccessor.getLength());
+ } catch (Exception ioe) {
+ throw new AlgebricksException(ioe);
+ }
+ }
+ };
+ }
+ };
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/NumericFloorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/NumericFloorDescriptor.java
index 8439edd..a4d998b 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/NumericFloorDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/NumericFloorDescriptor.java
@@ -143,7 +143,8 @@
aDouble.setValue(Math.floor(val));
serde.serialize(aDouble, out);
} else {
- throw new NotImplementedException(AsterixBuiltinFunctions.NUMERIC_FLOOR.getName() + ": not implemented for "
+ throw new NotImplementedException(AsterixBuiltinFunctions.NUMERIC_FLOOR.getName()
+ + ": not implemented for "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut.getByteArray()[0]));
}
} catch (HyracksDataException e) {
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/OrderedListConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/OrderedListConstructorDescriptor.java
index b66b30b..31d3090 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/OrderedListConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/OrderedListConstructorDescriptor.java
@@ -108,7 +108,7 @@
try {
for (int i = 0; i < argEvals.length; i++) {
inputVal.reset();
- argEvals[i].evaluate(tuple);
+ argEvals[i].evaluate(tuple);
builder.addItem(inputVal);
}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/RecordMergeDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/RecordMergeDescriptor.java
new file mode 100644
index 0000000..9aa6057
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/RecordMergeDescriptor.java
@@ -0,0 +1,156 @@
+package edu.uci.ics.asterix.runtime.evaluators.functions;
+
+import java.io.IOException;
+
+import edu.uci.ics.asterix.builders.RecordBuilder;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.pointables.ARecordPointable;
+import edu.uci.ics.asterix.om.pointables.PointableAllocator;
+import edu.uci.ics.asterix.om.pointables.base.IVisitablePointable;
+import edu.uci.ics.asterix.om.typecomputer.impl.RecordMergeTypeComputer;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+
+public class RecordMergeDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private static final long serialVersionUID = 1L;
+
+ private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+ public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new RecordMergeDescriptor();
+ }
+ };
+
+ private ARecordType outRecType;
+ private ARecordType inRecType0;
+ private ARecordType inRecType1;
+
+ public void reset(IAType outType, IAType inType0, IAType inType1) {
+ outRecType = RecordMergeTypeComputer.extractRecordType(outType);
+ inRecType0 = RecordMergeTypeComputer.extractRecordType(inType0);
+ inRecType1 = RecordMergeTypeComputer.extractRecordType(inType1);
+ }
+
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ return new ICopyEvaluatorFactory() {
+
+ private static final long serialVersionUID = 1L;
+
+ @SuppressWarnings("unchecked")
+ private final ISerializerDeserializer<ANull> nullSerDe = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ final ARecordType recType;
+ try {
+ recType = new ARecordType(outRecType.getTypeName(), outRecType.getFieldNames(),
+ outRecType.getFieldTypes(), outRecType.isOpen());
+ } catch (AsterixException e) {
+ throw new IllegalStateException();
+ }
+
+ final PointableAllocator pa = new PointableAllocator();
+ final IVisitablePointable vp0 = pa.allocateRecordValue(inRecType0);
+ final IVisitablePointable vp1 = pa.allocateRecordValue(inRecType1);
+
+ final ArrayBackedValueStorage abvs0 = new ArrayBackedValueStorage();
+ final ArrayBackedValueStorage abvs1 = new ArrayBackedValueStorage();
+ final ICopyEvaluator eval0 = args[0].createEvaluator(abvs0);
+ final ICopyEvaluator eval1 = args[1].createEvaluator(abvs1);
+
+ final RecordBuilder rb = new RecordBuilder();
+ rb.reset(recType);
+
+ return new ICopyEvaluator() {
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ abvs0.reset();
+ abvs1.reset();
+ rb.init();
+
+ eval0.evaluate(tuple);
+ eval1.evaluate(tuple);
+
+ if (abvs0.getByteArray()[0] == SER_NULL_TYPE_TAG
+ || abvs1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ try {
+ nullSerDe.serialize(ANull.NULL, output.getDataOutput());
+ } catch (HyracksDataException e) {
+ throw new AlgebricksException(e);
+ }
+ return;
+ }
+
+ vp0.set(abvs0);
+ vp1.set(abvs1);
+
+ ARecordPointable rp0 = (ARecordPointable) vp0;
+ ARecordPointable rp1 = (ARecordPointable) vp1;
+ ArrayBackedValueStorage fnvs = new ArrayBackedValueStorage();
+ UTF8StringPointable fnp = (UTF8StringPointable) UTF8StringPointable.FACTORY.createPointable();
+ try {
+ for (String fieldName : recType.getFieldNames()) {
+ fnvs.reset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(fieldName, fnvs.getDataOutput());
+ fnp.set(fnvs);
+ if (!addFieldFromRecord(rp1, fieldName, fnp)) {
+ addFieldFromRecord(rp0, fieldName, fnp);
+ }
+ }
+ rb.write(output.getDataOutput(), true);
+ } catch (IOException | AsterixException e) {
+ throw new AlgebricksException(e);
+ }
+ }
+
+ private boolean addFieldFromRecord(ARecordPointable rp, String fieldName, UTF8StringPointable fnp)
+ throws IOException, AsterixException {
+ for (int i = 0; i < rp.getFieldNames().size(); ++i) {
+ IVisitablePointable fp = rp.getFieldNames().get(i);
+ IVisitablePointable fv = rp.getFieldValues().get(i);
+ if (fnp.compareTo(fp.getByteArray(), fp.getStartOffset() + 1, fp.getLength() - 1) == 0) {
+ if (recType.isClosedField(fieldName)) {
+ int pos = recType.findFieldPosition(fieldName);
+ rb.addField(pos, fv);
+ } else {
+ rb.addField(fp, fv);
+ }
+ return true;
+ }
+ }
+ return false;
+ }
+ };
+ }
+ };
+ }
+
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return AsterixBuiltinFunctions.RECORD_MERGE;
+ }
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/StringEvaluatorUtils.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/StringEvaluatorUtils.java
index 98a70b4..f593a4f 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/StringEvaluatorUtils.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/StringEvaluatorUtils.java
@@ -45,5 +45,5 @@
public final static char[] reservedRegexChars = new char[] { '$', '(', ')', '*', '.', '[', '\\', ']', '^', '{',
'|', '}' };
-
+
}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/SubstringBeforeDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/SubstringBeforeDescriptor.java
index 4fa03dd..1b8edff 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/SubstringBeforeDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/SubstringBeforeDescriptor.java
@@ -86,7 +86,7 @@
int posPattern = 3;
int offset = 0;
- while (posSrc - 3 < srcLen - patternLen) {
+ while (posSrc - 3 < srcLen - patternLen) {
while (posPattern + offset - 3 < patternLen && posSrc + offset - 3 < srcLen) {
char c1 = UTF8StringPointable.charAt(src, posSrc + offset);
char c2 = UTF8StringPointable.charAt(pattern, posPattern + offset);
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/MillisecondsFromDayTimeDurationDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/MillisecondsFromDayTimeDurationDescriptor.java
index 14e16c6..122649e 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/MillisecondsFromDayTimeDurationDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/MillisecondsFromDayTimeDurationDescriptor.java
@@ -91,7 +91,8 @@
}
if (argOut0.getByteArray()[0] != SER_DAY_TIME_DURATION_TYPE_TAG) {
- throw new AlgebricksException(FID.getName() + ": expects NULL/DAY-TIME-DURATION, but got "
+ throw new AlgebricksException(FID.getName()
+ + ": expects NULL/DAY-TIME-DURATION, but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0]));
}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/ParseDateTimeDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/ParseDateTimeDescriptor.java
index 89cfbe5..ae3a8e0 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/ParseDateTimeDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/ParseDateTimeDescriptor.java
@@ -135,7 +135,7 @@
if (!processSuccessfully) {
throw new HyracksDataException(
- "parse-date: Failed to match with any given format string!");
+ "parse-datetime: Failed to match with any given format string!");
}
aDateTime.setValue(chronon);
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/ParseTimeDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/ParseTimeDescriptor.java
index 8ee76e7..6db8453 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/ParseTimeDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/ParseTimeDescriptor.java
@@ -110,7 +110,7 @@
int length1 = (argOut1.getByteArray()[1] & 0xff << 8)
+ (argOut1.getByteArray()[2] & 0xff << 0);
long chronon = 0;
-
+
int formatStart = 3;
int formatLength = 0;
boolean processSuccessfully = false;
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/YearMonthDurationComparatorDecriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/YearMonthDurationComparatorDecriptor.java
index 22ab96c..c3f6379 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/YearMonthDurationComparatorDecriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/YearMonthDurationComparatorDecriptor.java
@@ -46,7 +46,7 @@
// allowed input types
private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
private final static byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
-
+
private final boolean isGreaterThan;
private YearMonthDurationComparatorDecriptor(boolean isGreaterThan) {
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/formats/NonTaggedDataFormat.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/formats/NonTaggedDataFormat.java
index 566aa98..ea75c77 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/formats/NonTaggedDataFormat.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/formats/NonTaggedDataFormat.java
@@ -143,6 +143,7 @@
import edu.uci.ics.asterix.runtime.evaluators.functions.CreatePointDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.CreatePolygonDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.CreateRectangleDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.CreateUUIDDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.EditDistanceCheckDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.EditDistanceDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.EditDistanceListIsFilterable;
@@ -151,6 +152,7 @@
import edu.uci.ics.asterix.runtime.evaluators.functions.EndsWithDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.FieldAccessByIndexDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.FieldAccessByNameDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.FlowRecordDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.FuzzyEqDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.GetItemDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.GramTokensDescriptor;
@@ -174,12 +176,12 @@
import edu.uci.ics.asterix.runtime.evaluators.functions.NumericRoundHalfToEven2Descriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.NumericRoundHalfToEvenDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.NumericSubDescriptor;
-import edu.uci.ics.asterix.runtime.evaluators.functions.NumericSubtractDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.NumericUnaryMinusDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.OpenRecordConstructorDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.OrDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.OrderedListConstructorDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.PrefixLenJaccardDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.RecordMergeDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.RegExpDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.SimilarityJaccardCheckDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.SimilarityJaccardDescriptor;
@@ -238,13 +240,13 @@
import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalEndsDecriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalMeetsDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalMetByDescriptor;
-import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.MillisecondsFromDayTimeDurationDescriptor;
-import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.MonthsFromYearMonthDurationDescriptor;
-import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.OverlapDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalOverlappedByDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalOverlapsDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalStartedByDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalStartsDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.MillisecondsFromDayTimeDurationDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.MonthsFromYearMonthDurationDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.OverlapDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.ParseDateDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.ParseDateTimeDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.ParseTimeDescriptor;
@@ -447,6 +449,8 @@
temp.add(AYearMonthDurationConstructorDescriptor.FACTORY);
temp.add(ADayTimeDurationConstructorDescriptor.FACTORY);
+ temp.add(CreateUUIDDescriptor.FACTORY);
+
// Spatial
temp.add(CreatePointDescriptor.FACTORY);
temp.add(CreateLineDescriptor.FACTORY);
@@ -489,11 +493,13 @@
temp.add(SimilarityJaccardPrefixDescriptor.FACTORY);
temp.add(SimilarityJaccardPrefixCheckDescriptor.FACTORY);
+ temp.add(RecordMergeDescriptor.FACTORY);
temp.add(SwitchCaseDescriptor.FACTORY);
temp.add(RegExpDescriptor.FACTORY);
temp.add(InjectFailureDescriptor.FACTORY);
temp.add(CastListDescriptor.FACTORY);
temp.add(CastRecordDescriptor.FACTORY);
+ temp.add(FlowRecordDescriptor.FACTORY);
temp.add(NotNullDescriptor.FACTORY);
// Spatial and temporal type accessors
@@ -723,6 +729,13 @@
((ListifyAggregateDescriptor) fd).reset(new AOrderedListType(itemType, null));
}
}
+ if (fd.getIdentifier().equals(AsterixBuiltinFunctions.RECORD_MERGE)) {
+ AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) expr;
+ IAType outType = (IAType) context.getType(expr);
+ IAType type0 = (IAType) context.getType(f.getArguments().get(0).getValue());
+ IAType type1 = (IAType) context.getType(f.getArguments().get(1).getValue());
+ ((RecordMergeDescriptor) fd).reset(outType, type0, type1);
+ }
if (fd.getIdentifier().equals(AsterixBuiltinFunctions.CAST_RECORD)) {
AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
ARecordType rt = (ARecordType) TypeComputerUtilities.getRequiredType(funcExpr);
@@ -741,6 +754,10 @@
}
((CastListDescriptor) fd).reset(rt, (AbstractCollectionType) it);
}
+ if (fd.getIdentifier().equals(AsterixBuiltinFunctions.FLOW_RECORD)) {
+ ARecordType it = (ARecordType) TypeComputerUtilities.getInputType((AbstractFunctionCallExpression) expr);
+ ((FlowRecordDescriptor) fd).reset(it);
+ }
if (fd.getIdentifier().equals(AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR)) {
ARecordType rt = (ARecordType) context.getType(expr);
((OpenRecordConstructorDescriptor) fd).reset(rt,
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/ADMDataParser.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/ADMDataParser.java
index 7e51ae6..2dd1bc6 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/ADMDataParser.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/ADMDataParser.java
@@ -22,8 +22,6 @@
import java.util.List;
import java.util.Queue;
-import edu.uci.ics.asterix.runtime.operators.file.adm.AdmLexer;
-import edu.uci.ics.asterix.runtime.operators.file.adm.AdmLexerException;
import edu.uci.ics.asterix.builders.IARecordBuilder;
import edu.uci.ics.asterix.builders.IAsterixListBuilder;
import edu.uci.ics.asterix.builders.OrderedListBuilder;
@@ -51,7 +49,10 @@
import edu.uci.ics.asterix.om.types.AUnionType;
import edu.uci.ics.asterix.om.types.AUnorderedListType;
import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.types.hierachy.ATypeHierarchy;
import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
+import edu.uci.ics.asterix.runtime.operators.file.adm.AdmLexer;
+import edu.uci.ics.asterix.runtime.operators.file.adm.AdmLexerException;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
@@ -416,13 +417,14 @@
return true;
if (aObjectType.getTypeTag() != ATypeTag.UNION) {
- if (expectedTypeTag == aObjectType.getTypeTag())
- return true;
+ return ATypeHierarchy.canPromote(expectedTypeTag, aObjectType.getTypeTag());
} else { // union
unionList = ((AUnionType) aObjectType).getUnionList();
- for (int i = 0; i < unionList.size(); i++)
- if (unionList.get(i).getTypeTag() == expectedTypeTag)
+ for (IAType t : unionList) {
+ if (ATypeHierarchy.canPromote(t.getTypeTag(), expectedTypeTag)) {
return true;
+ }
+ }
}
return false;
}
@@ -847,7 +849,7 @@
} catch (Exception e) {
throw new AsterixException(e);
}
- throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
+ throw new AsterixException(mismatchErrorMessage + objectType.getTypeName() + ". Got " + typeTag + " instead.");
}
private void parseBoolean(String bool, DataOutput out) throws AsterixException {
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AbstractDataParser.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AbstractDataParser.java
index aaa38d3..34bc1f9 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AbstractDataParser.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AbstractDataParser.java
@@ -40,42 +40,42 @@
*/
public abstract class AbstractDataParser implements IDataParser {
- protected AMutableInt8 aInt8 = new AMutableInt8((byte) 0);
- protected AMutableInt16 aInt16 = new AMutableInt16((short) 0);
- protected AMutableInt32 aInt32 = new AMutableInt32(0);
- protected AMutableInt64 aInt64 = new AMutableInt64(0);
- protected AMutableDouble aDouble = new AMutableDouble(0);
- protected AMutableFloat aFloat = new AMutableFloat(0);
- protected AMutableString aString = new AMutableString("");
- protected AMutableString aStringFieldName = new AMutableString("");
+ protected AMutableInt8 aInt8 = new AMutableInt8((byte) 0);
+ protected AMutableInt16 aInt16 = new AMutableInt16((short) 0);
+ protected AMutableInt32 aInt32 = new AMutableInt32(0);
+ protected AMutableInt64 aInt64 = new AMutableInt64(0);
+ protected AMutableDouble aDouble = new AMutableDouble(0);
+ protected AMutableFloat aFloat = new AMutableFloat(0);
+ protected AMutableString aString = new AMutableString("");
+ protected AMutableString aStringFieldName = new AMutableString("");
- // Serializers
- @SuppressWarnings("unchecked")
- protected ISerializerDeserializer<ADouble> doubleSerde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADOUBLE);
- @SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AString> stringSerde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ASTRING);
- @SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AFloat> floatSerde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AFLOAT);
- @SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AInt8> int8Serde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT8);
- @SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AInt16> int16Serde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT16);
- @SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AInt32> int32Serde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT32);
- @SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AInt64> int64Serde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
- @SuppressWarnings("unchecked")
- protected ISerializerDeserializer<ABoolean> booleanSerde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ABOOLEAN);
- @SuppressWarnings("unchecked")
- protected ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ANULL);
+ // Serializers
+ @SuppressWarnings("unchecked")
+ protected ISerializerDeserializer<ADouble> doubleSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ADOUBLE);
+ @SuppressWarnings("unchecked")
+ protected ISerializerDeserializer<AString> stringSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ASTRING);
+ @SuppressWarnings("unchecked")
+ protected ISerializerDeserializer<AFloat> floatSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AFLOAT);
+ @SuppressWarnings("unchecked")
+ protected ISerializerDeserializer<AInt8> int8Serde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AINT8);
+ @SuppressWarnings("unchecked")
+ protected ISerializerDeserializer<AInt16> int16Serde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AINT16);
+ @SuppressWarnings("unchecked")
+ protected ISerializerDeserializer<AInt32> int32Serde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AINT32);
+ @SuppressWarnings("unchecked")
+ protected ISerializerDeserializer<AInt64> int64Serde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AINT64);
+ @SuppressWarnings("unchecked")
+ protected ISerializerDeserializer<ABoolean> booleanSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ABOOLEAN);
+ @SuppressWarnings("unchecked")
+ protected ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AbstractTupleParser.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AbstractTupleParser.java
index 2322338..ab52939 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AbstractTupleParser.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AbstractTupleParser.java
@@ -18,6 +18,8 @@
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
+import java.util.logging.Level;
+import java.util.logging.Logger;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.om.types.ARecordType;
@@ -36,6 +38,8 @@
*/
public abstract class AbstractTupleParser implements ITupleParser {
+ protected static Logger LOGGER = Logger.getLogger(AbstractTupleParser.class.getName());
+
protected ArrayTupleBuilder tb = new ArrayTupleBuilder(1);
protected DataOutput dos = tb.getDataOutput();
protected final FrameTupleAppender appender;
@@ -68,6 +72,7 @@
addTupleToFrame(writer);
}
if (appender.getTupleCount() > 0) {
+
FrameUtils.flushFrame(frame, writer);
}
} catch (AsterixException ae) {
diff --git a/asterix-server/pom.xml b/asterix-server/pom.xml
index edc7872..08ecb14 100644
--- a/asterix-server/pom.xml
+++ b/asterix-server/pom.xml
@@ -19,7 +19,7 @@
<parent>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
</parent>
<build>
@@ -28,33 +28,66 @@
<groupId>org.codehaus.mojo</groupId>
<artifactId>appassembler-maven-plugin</artifactId>
<version>1.3</version>
+ <configuration>
+ <assembleDirectory>
+ ${project.build.directory}/appassembler
+ </assembleDirectory>
+ <repositoryLayout>flat</repositoryLayout>
+ <programs>
+ <program>
+ <platforms>
+ <platform>unix</platform>
+ </platforms>
+ <name>asterixcc</name>
+ <mainClass>edu.uci.ics.hyracks.control.cc.CCDriver</mainClass>
+ <commandLineArguments>
+ <commandLineArgument>-app-cc-main-class</commandLineArgument>
+ <commandLineArgument>edu.uci.ics.asterix.hyracks.bootstrap.CCApplicationEntryPoint</commandLineArgument>
+ </commandLineArguments>
+ </program>
+ <program>
+ <platforms>
+ <platform>unix</platform>
+ </platforms>
+ <name>asterixnc</name>
+ <mainClass>edu.uci.ics.hyracks.control.nc.NCDriver</mainClass>
+ <commandLineArguments>
+ <commandLineArgument>-app-nc-main-class</commandLineArgument>
+ <commandLineArgument>edu.uci.ics.asterix.hyracks.bootstrap.NCApplicationEntryPoint</commandLineArgument>
+ </commandLineArguments>
+ </program>
+ </programs>
+ <daemons>
+ <daemon>
+ <id>asterixcc</id>
+ <mainClass>edu.uci.ics.hyracks.control.cc.CCDriver</mainClass>
+ <platforms>
+ <platform>booter-windows</platform>
+ </platforms>
+ <commandLineArguments>
+ <commandLineArgument>-app-cc-main-class</commandLineArgument>
+ <commandLineArgument>edu.uci.ics.asterix.hyracks.bootstrap.CCApplicationEntryPoint</commandLineArgument>
+ </commandLineArguments>
+ </daemon>
+ <daemon>
+ <id>asterixnc</id>
+ <mainClass>edu.uci.ics.hyracks.control.nc.NCDriver</mainClass>
+ <platforms>
+ <platform>booter-windows</platform>
+ </platforms>
+ <commandLineArguments>
+ <commandLineArgument>-app-nc-main-class</commandLineArgument>
+ <commandLineArgument>edu.uci.ics.asterix.hyracks.bootstrap.NCApplicationEntryPoint</commandLineArgument>
+ </commandLineArguments>
+ </daemon>
+ </daemons>
+ </configuration>
<executions>
<execution>
- <configuration>
- <programs>
- <program>
- <mainClass>edu.uci.ics.hyracks.control.cc.CCDriver</mainClass>
- <name>asterixcc</name>
- <commandLineArguments>
- <commandLineArgument>-app-cc-main-class</commandLineArgument>
- <commandLineArgument>edu.uci.ics.asterix.hyracks.bootstrap.CCApplicationEntryPoint</commandLineArgument>
- </commandLineArguments>
- </program>
- <program>
- <mainClass>edu.uci.ics.hyracks.control.nc.NCDriver</mainClass>
- <name>asterixnc</name>
- <commandLineArguments>
- <commandLineArgument>-app-nc-main-class</commandLineArgument>
- <commandLineArgument>edu.uci.ics.asterix.hyracks.bootstrap.NCApplicationEntryPoint</commandLineArgument>
- </commandLineArguments>
- </program>
- </programs>
- <repositoryLayout>flat</repositoryLayout>
- <repositoryName>lib</repositoryName>
- </configuration>
- <phase>package</phase>
<goals>
<goal>assemble</goal>
+ <goal>generate-daemons</goal>
+ <goal>create-repository</goal>
</goals>
</execution>
</executions>
@@ -94,7 +127,12 @@
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-app</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>org.codehaus.mojo.appassembler</groupId>
+ <artifactId>appassembler-booter</artifactId>
+ <version>1.3.1</version>
</dependency>
</dependencies>
</project>
\ No newline at end of file
diff --git a/asterix-server/src/main/assembly/binary-assembly.xml b/asterix-server/src/main/assembly/binary-assembly.xml
index ae362ca..7b763e6 100644
--- a/asterix-server/src/main/assembly/binary-assembly.xml
+++ b/asterix-server/src/main/assembly/binary-assembly.xml
@@ -21,17 +21,27 @@
<includeBaseDirectory>false</includeBaseDirectory>
<fileSets>
<fileSet>
+ <directory>target/appassembler/repo</directory>
+ <outputDirectory>repo</outputDirectory>
+ </fileSet>
+ <fileSet>
<directory>target/appassembler/bin</directory>
<outputDirectory>bin</outputDirectory>
<fileMode>0755</fileMode>
</fileSet>
<fileSet>
- <directory>target/appassembler/lib</directory>
- <outputDirectory>lib</outputDirectory>
+ <directory>target/generated-resources/appassembler/booter-windows/etc</directory>
+ <outputDirectory>bin</outputDirectory>
+ <includes>
+ <include>asterixcc.xml</include>
+ <include>asterixnc.xml</include>
+ </includes>
</fileSet>
+
<fileSet>
- <directory>docs</directory>
- <outputDirectory>docs</outputDirectory>
+ <directory>target/generated-resources/appassembler/booter-windows/bin</directory>
+ <outputDirectory>bin</outputDirectory>
+ <fileMode>0755</fileMode>
</fileSet>
</fileSets>
</assembly>
diff --git a/asterix-test-framework/.gitignore b/asterix-test-framework/.gitignore
index ea8c4bf..19f2e00 100644
--- a/asterix-test-framework/.gitignore
+++ b/asterix-test-framework/.gitignore
@@ -1 +1,2 @@
/target
+/target
diff --git a/asterix-test-framework/pom.xml b/asterix-test-framework/pom.xml
old mode 100755
new mode 100644
index 5aa51d9..4329a9c
--- a/asterix-test-framework/pom.xml
+++ b/asterix-test-framework/pom.xml
@@ -17,7 +17,7 @@
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
</parent>
<artifactId>asterix-test-framework</artifactId>
<build>
diff --git a/asterix-test-framework/src/main/resources/Catalog.xsd b/asterix-test-framework/src/main/resources/Catalog.xsd
old mode 100755
new mode 100644
diff --git a/asterix-tools/data/uscensus/dist.all.first.cleaned b/asterix-tools/data/uscensus/dist.all.first.cleaned
old mode 100755
new mode 100644
diff --git a/asterix-tools/data/uscensus/dist.all.last.cleaned b/asterix-tools/data/uscensus/dist.all.last.cleaned
old mode 100755
new mode 100644
diff --git a/asterix-tools/data/uscensus/dist.female.first.cleaned b/asterix-tools/data/uscensus/dist.female.first.cleaned
old mode 100755
new mode 100644
diff --git a/asterix-tools/data/uscensus/dist.male.first.cleaned b/asterix-tools/data/uscensus/dist.male.first.cleaned
old mode 100755
new mode 100644
diff --git a/asterix-tools/pom.xml b/asterix-tools/pom.xml
index 6c7db41..04edf26 100644
--- a/asterix-tools/pom.xml
+++ b/asterix-tools/pom.xml
@@ -17,7 +17,7 @@
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
</parent>
<artifactId>asterix-tools</artifactId>
@@ -138,13 +138,31 @@
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-aql</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-algebra</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.asterix</groupId>
+ <artifactId>asterix-external-data</artifactId>
+ <version>0.8.4-SNAPSHOT</version>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.asterix</groupId>
+ <artifactId>asterix-metadata</artifactId>
+ <version>0.8.4-SNAPSHOT</version>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.asterix</groupId>
+ <artifactId>asterix-metadata</artifactId>
+ <version>0.8.4-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
diff --git a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/datagen/AdmDataGen.java b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/datagen/AdmDataGen.java
index 333d4a3..eb85bb9 100644
--- a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/datagen/AdmDataGen.java
+++ b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/datagen/AdmDataGen.java
@@ -49,8 +49,9 @@
import edu.uci.ics.asterix.common.annotations.RecordDataGenAnnotation;
import edu.uci.ics.asterix.common.annotations.TypeDataGen;
import edu.uci.ics.asterix.common.annotations.UndeclaredFieldsDataGen;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.common.exceptions.ACIDException;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.transactions.JobId;
import edu.uci.ics.asterix.metadata.MetadataException;
import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
import edu.uci.ics.asterix.om.types.ARecordType;
@@ -61,7 +62,6 @@
import edu.uci.ics.asterix.om.types.IAType;
import edu.uci.ics.asterix.om.types.TypeSignature;
import edu.uci.ics.asterix.tools.translator.ADGenDmlTranslator;
-import edu.uci.ics.asterix.common.transactions.JobId;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
import edu.uci.ics.hyracks.algebricks.data.utils.WriteValueTools;
diff --git a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/datagen/CustOrdDataGen.java b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/datagen/CustOrdDataGen.java
index 0d697c0..bb42819 100644
--- a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/datagen/CustOrdDataGen.java
+++ b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/datagen/CustOrdDataGen.java
@@ -48,7 +48,7 @@
private String[] INTERESTS = { "Bass", "Music", "Databases", "Fishing", "Tennis", "Squash", "Computers", "Books",
"Movies", "Cigars", "Wine", "Running", "Walking", "Skiing", "Basketball", "Video Games", "Cooking",
"Coffee", "Base Jumping", "Puzzles", "Chess", "Programming", "Reddit", "Soccer", "Hockey", "Money",
- "Dancing", "Brewing", "Gardening", "Hacking", "Reading"};
+ "Dancing", "Brewing", "Gardening", "Hacking", "Reading" };
private static final int MIN_CHILD_AGE = 0;
diff --git a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/DataGenerator.java b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/DataGenerator.java
new file mode 100644
index 0000000..ce22887
--- /dev/null
+++ b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/DataGenerator.java
@@ -0,0 +1,1162 @@
+/*
+x * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.tools.external.data;
+
+import java.nio.CharBuffer;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Random;
+
+public class DataGenerator {
+
+ private RandomDateGenerator randDateGen;
+
+ private RandomNameGenerator randNameGen;
+
+ private RandomMessageGenerator randMessageGen;
+
+ private RandomLocationGenerator randLocationGen;
+
+ private Random random = new Random();
+
+ private TwitterUser twUser = new TwitterUser();
+
+ private TweetMessage twMessage = new TweetMessage();
+
+ public DataGenerator(InitializationInfo info) {
+ initialize(info);
+ }
+
+ public class TweetMessageIterator implements Iterator<TweetMessage> {
+
+ private final int duration;
+ private final GULongIDGenerator idGen;
+ private long startTime = 0;
+
+ public TweetMessageIterator(int duration, GULongIDGenerator idGen) {
+ this.duration = duration;
+ this.idGen = idGen;
+ this.startTime = System.currentTimeMillis();
+ }
+
+ @Override
+ public boolean hasNext() {
+ return System.currentTimeMillis() - startTime <= duration * 1000;
+ }
+
+ @Override
+ public TweetMessage next() {
+ TweetMessage msg = null;
+ getTwitterUser(null);
+ Message message = randMessageGen.getNextRandomMessage();
+ Point location = randLocationGen.getRandomPoint();
+ DateTime sendTime = randDateGen.getNextRandomDatetime();
+ twMessage.reset(idGen.getNextULong(), twUser, location, sendTime, message.getReferredTopics(), message);
+ msg = twMessage;
+ return msg;
+ }
+
+ @Override
+ public void remove() {
+ // TODO Auto-generated method stub
+ }
+
+ }
+
+ public static class InitializationInfo {
+ public Date startDate = new Date(1, 1, 2005);
+ public Date endDate = new Date(8, 20, 2012);
+ public String[] lastNames = DataGenerator.lastNames;
+ public String[] firstNames = DataGenerator.firstNames;
+ public String[] vendors = DataGenerator.vendors;
+ public String[] jargon = DataGenerator.jargon;
+ public String[] org_list = DataGenerator.org_list;
+ }
+
+ public void initialize(InitializationInfo info) {
+ randDateGen = new RandomDateGenerator(info.startDate, info.endDate);
+ randNameGen = new RandomNameGenerator(info.firstNames, info.lastNames);
+ randLocationGen = new RandomLocationGenerator(24, 49, 66, 98);
+ randMessageGen = new RandomMessageGenerator(info.vendors, info.jargon);
+ }
+
+ public void getTwitterUser(String usernameSuffix) {
+ String suggestedName = randNameGen.getRandomName();
+ String[] nameComponents = suggestedName.split(" ");
+ String screenName = nameComponents[0] + nameComponents[1] + randNameGen.getRandomNameSuffix();
+ String name = suggestedName;
+ if (usernameSuffix != null) {
+ name = name + usernameSuffix;
+ }
+ int numFriends = random.nextInt((int) (100)); // draw from Zipfian
+ int statusesCount = random.nextInt(500); // draw from Zipfian
+ int followersCount = random.nextInt((int) (200));
+ twUser.reset(screenName, numFriends, statusesCount, name, followersCount);
+ }
+
+ public static class RandomDateGenerator {
+
+ private final Date startDate;
+ private final Date endDate;
+ private final Random random = new Random();
+ private final int yearDifference;
+ private Date workingDate;
+ private Date recentDate;
+ private DateTime dateTime;
+
+ public RandomDateGenerator(Date startDate, Date endDate) {
+ this.startDate = startDate;
+ this.endDate = endDate;
+ this.yearDifference = endDate.getYear() - startDate.getYear() + 1;
+ this.workingDate = new Date();
+ this.recentDate = new Date();
+ this.dateTime = new DateTime();
+ }
+
+ public Date getStartDate() {
+ return startDate;
+ }
+
+ public Date getEndDate() {
+ return endDate;
+ }
+
+ public Date getNextRandomDate() {
+ int year = random.nextInt(yearDifference) + startDate.getYear();
+ int month;
+ int day;
+ if (year == endDate.getYear()) {
+ month = random.nextInt(endDate.getMonth()) + 1;
+ if (month == endDate.getMonth()) {
+ day = random.nextInt(endDate.getDay()) + 1;
+ } else {
+ day = random.nextInt(28) + 1;
+ }
+ } else {
+ month = random.nextInt(12) + 1;
+ day = random.nextInt(28) + 1;
+ }
+ workingDate.reset(month, day, year);
+ return workingDate;
+ }
+
+ public DateTime getNextRandomDatetime() {
+ Date randomDate = getNextRandomDate();
+ dateTime.reset(randomDate);
+ return dateTime;
+ }
+
+ public Date getNextRecentDate(Date date) {
+ int year = date.getYear()
+ + (date.getYear() == endDate.getYear() ? 0 : random.nextInt(endDate.getYear() - date.getYear()));
+ int month = (year == endDate.getYear()) ? date.getMonth() == endDate.getMonth() ? (endDate.getMonth())
+ : (date.getMonth() + random.nextInt(endDate.getMonth() - date.getMonth())) : random.nextInt(12) + 1;
+
+ int day = (year == endDate.getYear()) ? month == endDate.getMonth() ? date.getDay() == endDate.getDay() ? endDate
+ .getDay() : date.getDay() + random.nextInt(endDate.getDay() - date.getDay())
+ : random.nextInt(28) + 1
+ : random.nextInt(28) + 1;
+ recentDate.reset(month, day, year);
+ return recentDate;
+ }
+
+ }
+
+ public static class DateTime extends Date {
+
+ private String hour = "10";
+ private String min = "10";
+ private String sec = "00";
+
+ public DateTime(int month, int day, int year, String hour, String min, String sec) {
+ super(month, day, year);
+ this.hour = hour;
+ this.min = min;
+ this.sec = sec;
+ }
+
+ public DateTime() {
+ }
+
+ public void reset(int month, int day, int year, String hour, String min, String sec) {
+ super.setDay(month);
+ super.setDay(day);
+ super.setYear(year);
+ this.hour = hour;
+ this.min = min;
+ this.sec = sec;
+ }
+
+ public DateTime(Date date) {
+ super(date.getMonth(), date.getDay(), date.getYear());
+ }
+
+ public void reset(Date date) {
+ reset(date.getMonth(), date.getDay(), date.getYear());
+ }
+
+ public DateTime(Date date, int hour, int min, int sec) {
+ super(date.getMonth(), date.getDay(), date.getYear());
+ this.hour = (hour < 10) ? "0" : "" + hour;
+ this.min = (min < 10) ? "0" : "" + min;
+ this.sec = (sec < 10) ? "0" : "" + sec;
+ }
+
+ public String toString() {
+ StringBuilder builder = new StringBuilder();
+ builder.append("datetime");
+ builder.append("(\"");
+ builder.append(super.getYear());
+ builder.append("-");
+ builder.append(super.getMonth() < 10 ? "0" + super.getMonth() : super.getMonth());
+ builder.append("-");
+ builder.append(super.getDay() < 10 ? "0" + super.getDay() : super.getDay());
+ builder.append("T");
+ builder.append(hour + ":" + min + ":" + sec);
+ builder.append("\")");
+ return builder.toString();
+ }
+ }
+
+ public static class Message {
+
+ private char[] message = new char[500];
+ private List<String> referredTopics;
+ private int length;
+
+ public Message(char[] m, List<String> referredTopics) {
+ System.arraycopy(m, 0, message, 0, m.length);
+ length = m.length;
+ this.referredTopics = referredTopics;
+ }
+
+ public Message() {
+ referredTopics = new ArrayList<String>();
+ length = 0;
+ }
+
+ public List<String> getReferredTopics() {
+ return referredTopics;
+ }
+
+ public void reset(char[] m, int offset, int length, List<String> referredTopics) {
+ System.arraycopy(m, offset, message, 0, length);
+ this.length = length;
+ this.referredTopics = referredTopics;
+ }
+
+ public int getLength() {
+ return length;
+ }
+
+ public char charAt(int index) {
+ return message[index];
+ }
+
+ }
+
+ public static class Point {
+
+ private float latitude;
+ private float longitude;
+
+ public float getLatitude() {
+ return latitude;
+ }
+
+ public float getLongitude() {
+ return longitude;
+ }
+
+ public Point(float latitude, float longitude) {
+ this.latitude = latitude;
+ this.longitude = longitude;
+ }
+
+ public void reset(float latitude, float longitude) {
+ this.latitude = latitude;
+ this.longitude = longitude;
+ }
+
+ public Point() {
+ }
+
+ public String toString() {
+ StringBuilder builder = new StringBuilder();
+ builder.append("point(\"" + latitude + "," + longitude + "\")");
+ return builder.toString();
+ }
+ }
+
+ public static class RandomNameGenerator {
+
+ private String[] firstNames;
+ private String[] lastNames;
+
+ private final Random random = new Random();
+
+ private final String[] connectors = new String[] { "_", "#", "$", "@" };
+
+ public RandomNameGenerator(String[] firstNames, String[] lastNames) {
+ this.firstNames = firstNames;
+ this.lastNames = lastNames;
+ }
+
+ public String getRandomName() {
+ String name;
+ name = getSuggestedName();
+ return name;
+
+ }
+
+ private String getSuggestedName() {
+ int firstNameIndex = random.nextInt(firstNames.length);
+ int lastNameIndex = random.nextInt(lastNames.length);
+ String suggestedName = firstNames[firstNameIndex] + " " + lastNames[lastNameIndex];
+ return suggestedName;
+ }
+
+ public String getRandomNameSuffix() {
+ return connectors[random.nextInt(connectors.length)] + random.nextInt(1000);
+ }
+ }
+
+ public static class RandomMessageGenerator {
+
+ private final MessageTemplate messageTemplate;
+
+ public RandomMessageGenerator(String[] vendors, String[] jargon) {
+ List<String> vendorList = new ArrayList<String>();
+ for (String v : vendors) {
+ vendorList.add(v);
+ }
+ List<String> jargonList = new ArrayList<String>();
+ for (String j : jargon) {
+ jargonList.add(j);
+ }
+ this.messageTemplate = new MessageTemplate(vendorList, jargonList);
+ }
+
+ public Message getNextRandomMessage() {
+ return messageTemplate.getNextMessage();
+ }
+ }
+
+ public static class AbstractMessageTemplate {
+
+ protected final Random random = new Random();
+
+ protected String[] positiveVerbs = new String[] { "like", "love" };
+ protected String[] negativeVerbs = new String[] { "dislike", "hate", "can't stand" };
+
+ protected String[] negativeAdjectives = new String[] { "horrible", "bad", "terrible", "OMG" };
+ protected String[] postiveAdjectives = new String[] { "good", "awesome", "amazing", "mind-blowing" };
+
+ protected String[] otherWords = new String[] { "the", "its" };
+ }
+
+ public static class MessageTemplate extends AbstractMessageTemplate {
+
+ private List<String> vendors;
+ private List<String> jargon;
+ private CharBuffer buffer;
+ private List<String> referredTopics;
+ private Message message = new Message();
+
+ public MessageTemplate(List<String> vendors, List<String> jargon) {
+ this.vendors = vendors;
+ this.jargon = jargon;
+ buffer = CharBuffer.allocate(2500);
+ referredTopics = new ArrayList<String>();
+ }
+
+ public Message getNextMessage() {
+ buffer.position(0);
+ buffer.limit(2500);
+ referredTopics.clear();
+ boolean isPositive = random.nextBoolean();
+ String[] verbArray = isPositive ? positiveVerbs : negativeVerbs;
+ String[] adjectiveArray = isPositive ? postiveAdjectives : negativeAdjectives;
+ String verb = verbArray[random.nextInt(verbArray.length)];
+ String adjective = adjectiveArray[random.nextInt(adjectiveArray.length)];
+
+ buffer.put(" ");
+ buffer.put(verb);
+ buffer.put(" ");
+ String vendor = vendors.get(random.nextInt(vendors.size()));
+ referredTopics.add(vendor);
+ buffer.append(vendor);
+ buffer.append(" ");
+ buffer.append(otherWords[random.nextInt(otherWords.length)]);
+ buffer.append(" ");
+ String jargonTerm = jargon.get(random.nextInt(jargon.size()));
+ referredTopics.add(jargonTerm);
+ buffer.append(jargonTerm);
+ buffer.append(" is ");
+ buffer.append(adjective);
+ if (random.nextBoolean()) {
+ buffer.append(isPositive ? ":)" : ":(");
+ }
+
+ buffer.flip();
+ message.reset(buffer.array(), 0, buffer.limit(), referredTopics);
+ return message;
+ }
+ }
+
+ public static class RandomUtil {
+
+ public static Random random = new Random();
+
+ public static int[] getKFromN(int k, int n) {
+ int[] result = new int[k];
+ int cnt = 0;
+ HashSet<Integer> values = new HashSet<Integer>();
+ while (cnt < k) {
+ int val = random.nextInt(n + 1);
+ if (values.contains(val)) {
+ continue;
+ }
+
+ result[cnt++] = val;
+ values.add(val);
+ }
+ return result;
+ }
+ }
+
+ public static class RandomLocationGenerator {
+
+ private Random random = new Random();
+
+ private final int beginLat;
+ private final int endLat;
+ private final int beginLong;
+ private final int endLong;
+
+ private Point point;
+
+ public RandomLocationGenerator(int beginLat, int endLat, int beginLong, int endLong) {
+ this.beginLat = beginLat;
+ this.endLat = endLat;
+ this.beginLong = beginLong;
+ this.endLong = endLong;
+ this.point = new Point();
+ }
+
+ public Point getRandomPoint() {
+ int latMajor = beginLat + random.nextInt(endLat - beginLat);
+ int latMinor = random.nextInt(100);
+ float latitude = latMajor + ((float) latMinor) / 100;
+
+ int longMajor = beginLong + random.nextInt(endLong - beginLong);
+ int longMinor = random.nextInt(100);
+ float longitude = longMajor + ((float) longMinor) / 100;
+
+ point.reset(latitude, longitude);
+ return point;
+ }
+
+ }
+
+ public static class TweetMessage {
+
+ private long tweetid;
+ private TwitterUser user;
+ private Point senderLocation;
+ private DateTime sendTime;
+ private List<String> referredTopics;
+ private Message messageText;
+
+ public TweetMessage() {
+ }
+
+ public TweetMessage(long tweetid, TwitterUser user, Point senderLocation, DateTime sendTime,
+ List<String> referredTopics, Message messageText) {
+ this.tweetid = tweetid;
+ this.user = user;
+ this.senderLocation = senderLocation;
+ this.sendTime = sendTime;
+ this.referredTopics = referredTopics;
+ this.messageText = messageText;
+ }
+
+ public void reset(long tweetid, TwitterUser user, Point senderLocation, DateTime sendTime,
+ List<String> referredTopics, Message messageText) {
+ this.tweetid = tweetid;
+ this.user = user;
+ this.senderLocation = senderLocation;
+ this.sendTime = sendTime;
+ this.referredTopics = referredTopics;
+ this.messageText = messageText;
+ }
+
+ public String toString() {
+ StringBuilder builder = new StringBuilder();
+ builder.append("{");
+ builder.append("\"tweetid\":");
+ builder.append("int64(\"" + tweetid + "\")");
+ builder.append(",");
+ builder.append("\"user\":");
+ builder.append(user);
+ builder.append(",");
+ builder.append("\"sender-location\":");
+ builder.append(senderLocation);
+ builder.append(",");
+ builder.append("\"send-time\":");
+ builder.append(sendTime);
+ builder.append(",");
+ builder.append("\"referred-topics\":");
+ builder.append("{{");
+ for (String topic : referredTopics) {
+ builder.append("\"" + topic + "\"");
+ builder.append(",");
+ }
+ if (referredTopics.size() > 0) {
+ builder.deleteCharAt(builder.lastIndexOf(","));
+ }
+ builder.append("}}");
+ builder.append(",");
+ builder.append("\"message-text\":");
+ builder.append("\"");
+ for (int i = 0; i < messageText.getLength(); i++) {
+ builder.append(messageText.charAt(i));
+ }
+ builder.append("\"");
+ builder.append("}");
+ return new String(builder);
+ }
+
+ public long getTweetid() {
+ return tweetid;
+ }
+
+ public void setTweetid(long tweetid) {
+ this.tweetid = tweetid;
+ }
+
+ public TwitterUser getUser() {
+ return user;
+ }
+
+ public void setUser(TwitterUser user) {
+ this.user = user;
+ }
+
+ public Point getSenderLocation() {
+ return senderLocation;
+ }
+
+ public void setSenderLocation(Point senderLocation) {
+ this.senderLocation = senderLocation;
+ }
+
+ public DateTime getSendTime() {
+ return sendTime;
+ }
+
+ public void setSendTime(DateTime sendTime) {
+ this.sendTime = sendTime;
+ }
+
+ public List<String> getReferredTopics() {
+ return referredTopics;
+ }
+
+ public void setReferredTopics(List<String> referredTopics) {
+ this.referredTopics = referredTopics;
+ }
+
+ public Message getMessageText() {
+ return messageText;
+ }
+
+ public void setMessageText(Message messageText) {
+ this.messageText = messageText;
+ }
+
+ }
+
+ public static class TwitterUser {
+
+ private String screenName;
+ private String lang = "en";
+ private int friendsCount;
+ private int statusesCount;
+ private String name;
+ private int followersCount;
+
+ public TwitterUser() {
+
+ }
+
+ public TwitterUser(String screenName, int friendsCount, int statusesCount, String name, int followersCount) {
+ this.screenName = screenName;
+ this.friendsCount = friendsCount;
+ this.statusesCount = statusesCount;
+ this.name = name;
+ this.followersCount = followersCount;
+ }
+
+ public void reset(String screenName, int friendsCount, int statusesCount, String name, int followersCount) {
+ this.screenName = screenName;
+ this.friendsCount = friendsCount;
+ this.statusesCount = statusesCount;
+ this.name = name;
+ this.followersCount = followersCount;
+ }
+
+ public String getScreenName() {
+ return screenName;
+ }
+
+ public int getFriendsCount() {
+ return friendsCount;
+ }
+
+ public int getStatusesCount() {
+ return statusesCount;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public int getFollowersCount() {
+ return followersCount;
+ }
+
+ public String toString() {
+ StringBuilder builder = new StringBuilder();
+ builder.append("{");
+ builder.append("\"screen-name\":" + "\"" + screenName + "\"");
+ builder.append(",");
+ builder.append("\"lang\":" + "\"" + lang + "\"");
+ builder.append(",");
+ builder.append("\"friends_count\":" + friendsCount);
+ builder.append(",");
+ builder.append("\"statuses_count\":" + statusesCount);
+ builder.append(",");
+ builder.append("\"name\":" + "\"" + name + "\"");
+ builder.append(",");
+ builder.append("\"followers_count\":" + followersCount);
+ builder.append("}");
+ return builder.toString();
+ }
+
+ }
+
+ public static class Date {
+
+ private int day;
+ private int month;
+ private int year;
+
+ public Date(int month, int day, int year) {
+ this.month = month;
+ this.day = day;
+ this.year = year;
+ }
+
+ public void reset(int month, int day, int year) {
+ this.month = month;
+ this.day = day;
+ this.year = year;
+ }
+
+ public int getDay() {
+ return day;
+ }
+
+ public int getMonth() {
+ return month;
+ }
+
+ public int getYear() {
+ return year;
+ }
+
+ public Date() {
+ }
+
+ public String toString() {
+ StringBuilder builder = new StringBuilder();
+ builder.append("date");
+ builder.append("(\"");
+ builder.append(year);
+ builder.append("-");
+ builder.append(month < 10 ? "0" + month : "" + month);
+ builder.append("-");
+ builder.append(day < 10 ? "0" + day : "" + day);
+ builder.append("\")");
+ return builder.toString();
+ }
+
+ public void setDay(int day) {
+ this.day = day;
+ }
+
+ public void setMonth(int month) {
+ this.month = month;
+ }
+
+ public void setYear(int year) {
+ this.year = year;
+ }
+ }
+
+ public static String[] lastNames = { "Hoopengarner", "Harrow", "Gardner", "Blyant", "Best", "Buttermore", "Gronko",
+ "Mayers", "Countryman", "Neely", "Ruhl", "Taggart", "Bash", "Cason", "Hil", "Zalack", "Mingle", "Carr",
+ "Rohtin", "Wardle", "Pullman", "Wire", "Kellogg", "Hiles", "Keppel", "Bratton", "Sutton", "Wickes",
+ "Muller", "Friedline", "Llora", "Elizabeth", "Anderson", "Gaskins", "Rifler", "Vinsant", "Stanfield",
+ "Black", "Guest", "Hujsak", "Carter", "Weidemann", "Hays", "Patton", "Hayhurst", "Paynter", "Cressman",
+ "Fiddler", "Evans", "Sherlock", "Woodworth", "Jackson", "Bloise", "Schneider", "Ring", "Kepplinger",
+ "James", "Moon", "Bennett", "Bashline", "Ryals", "Zeal", "Christman", "Milliron", "Nash", "Ewing", "Camp",
+ "Mason", "Richardson", "Bowchiew", "Hahn", "Wilson", "Wood", "Toyley", "Williamson", "Lafortune", "Errett",
+ "Saltser", "Hirleman", "Brindle", "Newbiggin", "Ulery", "Lambert", "Shick", "Kuster", "Moore", "Finck",
+ "Powell", "Jolce", "Townsend", "Sauter", "Cowher", "Wolfe", "Cavalet", "Porter", "Laborde", "Ballou",
+ "Murray", "Stoddard", "Pycroft", "Milne", "King", "Todd", "Staymates", "Hall", "Romanoff", "Keilbach",
+ "Sandford", "Hamilton", "Fye", "Kline", "Weeks", "Mcelroy", "Mccullough", "Bryant", "Hill", "Moore",
+ "Ledgerwood", "Prevatt", "Eckert", "Read", "Hastings", "Doverspike", "Allshouse", "Bryan", "Mccallum",
+ "Lombardi", "Mckendrick", "Cattley", "Barkley", "Steiner", "Finlay", "Priebe", "Armitage", "Hall", "Elder",
+ "Erskine", "Hatcher", "Walker", "Pearsall", "Dunkle", "Haile", "Adams", "Miller", "Newbern", "Basinger",
+ "Fuhrer", "Brinigh", "Mench", "Blackburn", "Bastion", "Mccune", "Bridger", "Hynes", "Quinn", "Courtney",
+ "Geddinge", "Field", "Seelig", "Cable", "Earhart", "Harshman", "Roby", "Beals", "Berry", "Reed", "Hector",
+ "Pittman", "Haverrman", "Kalp", "Briner", "Joghs", "Cowart", "Close", "Wynne", "Harden", "Weldy",
+ "Stephenson", "Hildyard", "Moberly", "Wells", "Mackendoerfer", "Fisher", "Oppie", "Oneal", "Churchill",
+ "Keister", "Alice", "Tavoularis", "Fisher", "Hair", "Burns", "Veith", "Wile", "Fuller", "Fields", "Clark",
+ "Randolph", "Stone", "Mcclymonds", "Holtzer", "Donkin", "Wilkinson", "Rosensteel", "Albright", "Stahl",
+ "Fox", "Kadel", "Houser", "Hanseu", "Henderson", "Davis", "Bicknell", "Swain", "Mercer", "Holdeman",
+ "Enderly", "Caesar", "Margaret", "Munshower", "Elless", "Lucy", "Feufer", "Schofield", "Graham",
+ "Blatenberger", "Benford", "Akers", "Campbell", "Ann", "Sadley", "Ling", "Gongaware", "Schmidt", "Endsley",
+ "Groah", "Flanders", "Reichard", "Lowstetter", "Sandblom", "Griffis", "Basmanoff", "Coveney", "Hawker",
+ "Archibald", "Hutton", "Barnes", "Diegel", "Raybould", "Focell", "Breitenstein", "Murray", "Chauvin",
+ "Busk", "Pheleps", "Teagarden", "Northey", "Baumgartner", "Fleming", "Harris", "Parkinson", "Carpenter",
+ "Whirlow", "Bonner", "Wortman", "Rogers", "Scott", "Lowe", "Mckee", "Huston", "Bullard", "Throckmorton",
+ "Rummel", "Mathews", "Dull", "Saline", "Tue", "Woolery", "Lalty", "Schrader", "Ramsey", "Eisenmann",
+ "Philbrick", "Sybilla", "Wallace", "Fonblanque", "Paul", "Orbell", "Higgens", "Casteel", "Franks",
+ "Demuth", "Eisenman", "Hay", "Robinson", "Fischer", "Hincken", "Wylie", "Leichter", "Bousum",
+ "Littlefield", "Mcdonald", "Greif", "Rhodes", "Wall", "Steele", "Baldwin", "Smith", "Stewart", "Schere",
+ "Mary", "Aultman", "Emrick", "Guess", "Mitchell", "Painter", "Aft", "Hasely", "Weldi", "Loewentsein",
+ "Poorbaugh", "Kepple", "Noton", "Judge", "Jackson", "Style", "Adcock", "Diller", "Marriman", "Johnston",
+ "Children", "Monahan", "Ehret", "Shaw", "Congdon", "Pinney", "Millard", "Crissman", "Tanner", "Rose",
+ "Knisely", "Cypret", "Sommer", "Poehl", "Hardie", "Bender", "Overholt", "Gottwine", "Beach", "Leslie",
+ "Trevithick", "Langston", "Magor", "Shotts", "Howe", "Hunter", "Cross", "Kistler", "Dealtry", "Christner",
+ "Pennington", "Thorley", "Eckhardstein", "Van", "Stroh", "Stough", "Stall", "Beedell", "Shea", "Garland",
+ "Mays", "Pritchard", "Frankenberger", "Rowley", "Lane", "Baum", "Alliman", "Park", "Jardine", "Butler",
+ "Cherry", "Kooser", "Baxter", "Billimek", "Downing", "Hurst", "Wood", "Baird", "Watkins", "Edwards",
+ "Kemerer", "Harding", "Owens", "Eiford", "Keener", "Garneis", "Fiscina", "Mang", "Draudy", "Mills",
+ "Gibson", "Reese", "Todd", "Ramos", "Levett", "Wilks", "Ward", "Mosser", "Dunlap", "Kifer", "Christopher",
+ "Ashbaugh", "Wynter", "Rawls", "Cribbs", "Haynes", "Thigpen", "Schreckengost", "Bishop", "Linton",
+ "Chapman", "James", "Jerome", "Hook", "Omara", "Houston", "Maclagan", "Sandys", "Pickering", "Blois",
+ "Dickson", "Kemble", "Duncan", "Woodward", "Southern", "Henley", "Treeby", "Cram", "Elsas", "Driggers",
+ "Warrick", "Overstreet", "Hindman", "Buck", "Sulyard", "Wentzel", "Swink", "Butt", "Schaeffer",
+ "Hoffhants", "Bould", "Willcox", "Lotherington", "Bagley", "Graff", "White", "Wheeler", "Sloan",
+ "Rodacker", "Hanford", "Jowers", "Kunkle", "Cass", "Powers", "Gilman", "Mcmichaels", "Hobbs", "Herndon",
+ "Prescott", "Smail", "Mcdonald", "Biery", "Orner", "Richards", "Mueller", "Isaman", "Bruxner", "Goodman",
+ "Barth", "Turzanski", "Vorrasi", "Stainforth", "Nehling", "Rahl", "Erschoff", "Greene", "Mckinnon",
+ "Reade", "Smith", "Pery", "Roose", "Greenwood", "Weisgarber", "Curry", "Holts", "Zadovsky", "Parrish",
+ "Putnam", "Munson", "Mcindoe", "Nickolson", "Brooks", "Bollinger", "Stroble", "Siegrist", "Fulton",
+ "Tomey", "Zoucks", "Roberts", "Otis", "Clarke", "Easter", "Johnson", "Fylbrigg", "Taylor", "Swartzbaugh",
+ "Weinstein", "Gadow", "Sayre", "Marcotte", "Wise", "Atweeke", "Mcfall", "Napier", "Eisenhart", "Canham",
+ "Sealis", "Baughman", "Gertraht", "Losey", "Laurence", "Eva", "Pershing", "Kern", "Pirl", "Rega",
+ "Sanborn", "Kanaga", "Sanders", "Anderson", "Dickinson", "Osteen", "Gettemy", "Crom", "Snyder", "Reed",
+ "Laurenzi", "Riggle", "Tillson", "Fowler", "Raub", "Jenner", "Koepple", "Soames", "Goldvogel", "Dimsdale",
+ "Zimmer", "Giesen", "Baker", "Beail", "Mortland", "Bard", "Sanner", "Knopsnider", "Jenkins", "Bailey",
+ "Werner", "Barrett", "Faust", "Agg", "Tomlinson", "Williams", "Little", "Greenawalt", "Wells", "Wilkins",
+ "Gisiko", "Bauerle", "Harrold", "Prechtl", "Polson", "Faast", "Winton", "Garneys", "Peters", "Potter",
+ "Porter", "Tennant", "Eve", "Dugger", "Jones", "Burch", "Cowper", "Whittier" };
+
+ public static String[] firstNames = { "Albert", "Jacquelin", "Dona", "Alia", "Mayme", "Genoveva", "Emma", "Lena",
+ "Melody", "Vilma", "Katelyn", "Jeremy", "Coral", "Leann", "Lita", "Gilda", "Kayla", "Alvina", "Maranda",
+ "Verlie", "Khadijah", "Karey", "Patrice", "Kallie", "Corey", "Mollie", "Daisy", "Melanie", "Sarita",
+ "Nichole", "Pricilla", "Terresa", "Berneice", "Arianne", "Brianne", "Lavinia", "Ulrike", "Lesha", "Adell",
+ "Ardelle", "Marisha", "Laquita", "Karyl", "Maryjane", "Kendall", "Isobel", "Raeann", "Heike", "Barbera",
+ "Norman", "Yasmine", "Nevada", "Mariam", "Edith", "Eugena", "Lovie", "Maren", "Bennie", "Lennie", "Tamera",
+ "Crystal", "Randi", "Anamaria", "Chantal", "Jesenia", "Avis", "Shela", "Randy", "Laurena", "Sharron",
+ "Christiane", "Lorie", "Mario", "Elizabeth", "Reina", "Adria", "Lakisha", "Brittni", "Azzie", "Dori",
+ "Shaneka", "Asuncion", "Katheryn", "Laurice", "Sharita", "Krystal", "Reva", "Inger", "Alpha", "Makeda",
+ "Anabel", "Loni", "Tiara", "Meda", "Latashia", "Leola", "Chin", "Daisey", "Ivory", "Amalia", "Logan",
+ "Tyler", "Kyong", "Carolann", "Maryetta", "Eufemia", "Anya", "Doreatha", "Lorna", "Rutha", "Ehtel",
+ "Debbie", "Chassidy", "Sang", "Christa", "Lottie", "Chun", "Karine", "Peggie", "Amina", "Melany", "Alayna",
+ "Scott", "Romana", "Naomi", "Christiana", "Salena", "Taunya", "Mitsue", "Regina", "Chelsie", "Charity",
+ "Dacia", "Aletha", "Latosha", "Lia", "Tamica", "Chery", "Bianca", "Shu", "Georgianne", "Myriam", "Austin",
+ "Wan", "Mallory", "Jana", "Georgie", "Jenell", "Kori", "Vicki", "Delfina", "June", "Mellisa", "Catherina",
+ "Claudie", "Tynisha", "Dayle", "Enriqueta", "Belen", "Pia", "Sarai", "Rosy", "Renay", "Kacie", "Frieda",
+ "Cayla", "Elissa", "Claribel", "Sabina", "Mackenzie", "Raina", "Cira", "Mitzie", "Aubrey", "Serafina",
+ "Maria", "Katharine", "Esperanza", "Sung", "Daria", "Billye", "Stefanie", "Kasha", "Holly", "Suzanne",
+ "Inga", "Flora", "Andria", "Genevie", "Eladia", "Janet", "Erline", "Renna", "Georgeanna", "Delorse",
+ "Elnora", "Rudy", "Rima", "Leanora", "Letisha", "Love", "Alverta", "Pinkie", "Domonique", "Jeannie",
+ "Jose", "Jacqueline", "Tara", "Lily", "Erna", "Tennille", "Galina", "Tamala", "Kirby", "Nichelle",
+ "Myesha", "Farah", "Santa", "Ludie", "Kenia", "Yee", "Micheline", "Maryann", "Elaina", "Ethelyn",
+ "Emmaline", "Shanell", "Marina", "Nila", "Alane", "Shakira", "Dorris", "Belinda", "Elois", "Barbie",
+ "Carita", "Gisela", "Lura", "Fransisca", "Helga", "Peg", "Leonarda", "Earlie", "Deetta", "Jacquetta",
+ "Blossom", "Kayleigh", "Deloras", "Keshia", "Christinia", "Dulce", "Bernie", "Sheba", "Lashanda", "Tula",
+ "Claretta", "Kary", "Jeanette", "Lupita", "Lenora", "Hisako", "Sherise", "Glynda", "Adela", "Chia",
+ "Sudie", "Mindy", "Caroyln", "Lindsey", "Xiomara", "Mercedes", "Onie", "Loan", "Alexis", "Tommie",
+ "Donette", "Monica", "Soo", "Camellia", "Lavera", "Valery", "Ariana", "Sophia", "Loris", "Ginette",
+ "Marielle", "Tari", "Julissa", "Alesia", "Suzanna", "Emelda", "Erin", "Ladawn", "Sherilyn", "Candice",
+ "Nereida", "Fairy", "Carl", "Joel", "Marilee", "Gracia", "Cordie", "So", "Shanita", "Drew", "Cassie",
+ "Sherie", "Marget", "Norma", "Delois", "Debera", "Chanelle", "Catarina", "Aracely", "Carlene", "Tricia",
+ "Aleen", "Katharina", "Marguerita", "Guadalupe", "Margorie", "Mandie", "Kathe", "Chong", "Sage", "Faith",
+ "Maryrose", "Stephany", "Ivy", "Pauline", "Susie", "Cristen", "Jenifer", "Annette", "Debi", "Karmen",
+ "Luci", "Shayla", "Hope", "Ocie", "Sharie", "Tami", "Breana", "Kerry", "Rubye", "Lashay", "Sondra",
+ "Katrice", "Brunilda", "Cortney", "Yan", "Zenobia", "Penni", "Addie", "Lavona", "Noel", "Anika",
+ "Herlinda", "Valencia", "Bunny", "Tory", "Victoria", "Carrie", "Mikaela", "Wilhelmina", "Chung",
+ "Hortencia", "Gerda", "Wen", "Ilana", "Sibyl", "Candida", "Victorina", "Chantell", "Casie", "Emeline",
+ "Dominica", "Cecila", "Delora", "Miesha", "Nova", "Sally", "Ronald", "Charlette", "Francisca", "Mina",
+ "Jenna", "Loraine", "Felisa", "Lulu", "Page", "Lyda", "Babara", "Flor", "Walter", "Chan", "Sherika",
+ "Kala", "Luna", "Vada", "Syreeta", "Slyvia", "Karin", "Renata", "Robbi", "Glenda", "Delsie", "Lizzie",
+ "Genia", "Caitlin", "Bebe", "Cory", "Sam", "Leslee", "Elva", "Caren", "Kasie", "Leticia", "Shannan",
+ "Vickey", "Sandie", "Kyle", "Chang", "Terrilyn", "Sandra", "Elida", "Marketta", "Elsy", "Tu", "Carman",
+ "Ashlie", "Vernia", "Albertine", "Vivian", "Elba", "Bong", "Margy", "Janetta", "Xiao", "Teofila", "Danyel",
+ "Nickole", "Aleisha", "Tera", "Cleotilde", "Dara", "Paulita", "Isela", "Maricela", "Rozella", "Marivel",
+ "Aurora", "Melissa", "Carylon", "Delinda", "Marvella", "Candelaria", "Deidre", "Tawanna", "Myrtie",
+ "Milagro", "Emilie", "Coretta", "Ivette", "Suzann", "Ammie", "Lucina", "Lory", "Tena", "Eleanor",
+ "Cherlyn", "Tiana", "Brianna", "Myra", "Flo", "Carisa", "Kandi", "Erlinda", "Jacqulyn", "Fermina", "Riva",
+ "Palmira", "Lindsay", "Annmarie", "Tamiko", "Carline", "Amelia", "Quiana", "Lashawna", "Veola", "Belva",
+ "Marsha", "Verlene", "Alex", "Leisha", "Camila", "Mirtha", "Melva", "Lina", "Arla", "Cythia", "Towanda",
+ "Aracelis", "Tasia", "Aurore", "Trinity", "Bernadine", "Farrah", "Deneen", "Ines", "Betty", "Lorretta",
+ "Dorethea", "Hertha", "Rochelle", "Juli", "Shenika", "Yung", "Lavon", "Deeanna", "Nakia", "Lynnette",
+ "Dinorah", "Nery", "Elene", "Carolee", "Mira", "Franchesca", "Lavonda", "Leida", "Paulette", "Dorine",
+ "Allegra", "Keva", "Jeffrey", "Bernardina", "Maryln", "Yoko", "Faviola", "Jayne", "Lucilla", "Charita",
+ "Ewa", "Ella", "Maggie", "Ivey", "Bettie", "Jerri", "Marni", "Bibi", "Sabrina", "Sarah", "Marleen",
+ "Katherin", "Remona", "Jamika", "Antonina", "Oliva", "Lajuana", "Fonda", "Sigrid", "Yael", "Billi",
+ "Verona", "Arminda", "Mirna", "Tesha", "Katheleen", "Bonita", "Kamilah", "Patrica", "Julio", "Shaina",
+ "Mellie", "Denyse", "Deandrea", "Alena", "Meg", "Kizzie", "Krissy", "Karly", "Alleen", "Yahaira", "Lucie",
+ "Karena", "Elaine", "Eloise", "Buena", "Marianela", "Renee", "Nan", "Carolynn", "Windy", "Avril", "Jane",
+ "Vida", "Thea", "Marvel", "Rosaline", "Tifany", "Robena", "Azucena", "Carlota", "Mindi", "Andera", "Jenny",
+ "Courtney", "Lyndsey", "Willette", "Kristie", "Shaniqua", "Tabatha", "Ngoc", "Una", "Marlena", "Louetta",
+ "Vernie", "Brandy", "Jacquelyne", "Jenelle", "Elna", "Erminia", "Ida", "Audie", "Louis", "Marisol",
+ "Shawana", "Harriette", "Karol", "Kitty", "Esmeralda", "Vivienne", "Eloisa", "Iris", "Jeanice", "Cammie",
+ "Jacinda", "Shena", "Floy", "Theda", "Lourdes", "Jayna", "Marg", "Kati", "Tanna", "Rosalyn", "Maxima",
+ "Soon", "Angelika", "Shonna", "Merle", "Kassandra", "Deedee", "Heidi", "Marti", "Renae", "Arleen",
+ "Alfredia", "Jewell", "Carley", "Pennie", "Corina", "Tonisha", "Natividad", "Lilliana", "Darcie", "Shawna",
+ "Angel", "Piedad", "Josefa", "Rebbeca", "Natacha", "Nenita", "Petrina", "Carmon", "Chasidy", "Temika",
+ "Dennise", "Renetta", "Augusta", "Shirlee", "Valeri", "Casimira", "Janay", "Berniece", "Deborah", "Yaeko",
+ "Mimi", "Digna", "Irish", "Cher", "Yong", "Lucila", "Jimmie", "Junko", "Lezlie", "Waneta", "Sandee",
+ "Marquita", "Eura", "Freeda", "Annabell", "Laree", "Jaye", "Wendy", "Toshia", "Kylee", "Aleta", "Emiko",
+ "Clorinda", "Sixta", "Audrea", "Juanita", "Birdie", "Reita", "Latanya", "Nia", "Leora", "Laurine",
+ "Krysten", "Jerrie", "Chantel", "Ira", "Sena", "Andre", "Jann", "Marla", "Precious", "Katy", "Gabrielle",
+ "Yvette", "Brook", "Shirlene", "Eldora", "Laura", "Milda", "Euna", "Jettie", "Debora", "Lise", "Edythe",
+ "Leandra", "Shandi", "Araceli", "Johanne", "Nieves", "Denese", "Carmelita", "Nohemi", "Annice", "Natalie",
+ "Yolande", "Jeffie", "Vashti", "Vickie", "Obdulia", "Youlanda", "Lupe", "Tomoko", "Monserrate", "Domitila",
+ "Etsuko", "Adrienne", "Lakesha", "Melissia", "Odessa", "Meagan", "Veronika", "Jolyn", "Isabelle", "Leah",
+ "Rhiannon", "Gianna", "Audra", "Sommer", "Renate", "Perla", "Thao", "Myong", "Lavette", "Mark", "Emilia",
+ "Ariane", "Karl", "Dorie", "Jacquie", "Mia", "Malka", "Shenita", "Tashina", "Christine", "Cherri", "Roni",
+ "Fran", "Mildred", "Sara", "Clarissa", "Fredia", "Elease", "Samuel", "Earlene", "Vernita", "Mae", "Concha",
+ "Renea", "Tamekia", "Hye", "Ingeborg", "Tessa", "Kelly", "Kristin", "Tam", "Sacha", "Kanisha", "Jillian",
+ "Tiffanie", "Ashlee", "Madelyn", "Donya", "Clementine", "Mickie", "My", "Zena", "Terrie", "Samatha",
+ "Gertie", "Tarra", "Natalia", "Sharlene", "Evie", "Shalon", "Rosalee", "Numbers", "Jodi", "Hattie",
+ "Naoma", "Valene", "Whitley", "Claude", "Alline", "Jeanne", "Camie", "Maragret", "Viola", "Kris", "Marlo",
+ "Arcelia", "Shari", "Jalisa", "Corrie", "Eleonor", "Angelyn", "Merry", "Lauren", "Melita", "Gita",
+ "Elenor", "Aurelia", "Janae", "Lyndia", "Margeret", "Shawanda", "Rolande", "Shirl", "Madeleine", "Celinda",
+ "Jaleesa", "Shemika", "Joye", "Tisa", "Trudie", "Kathrine", "Clarita", "Dinah", "Georgia", "Antoinette",
+ "Janis", "Suzette", "Sherri", "Herta", "Arie", "Hedy", "Cassi", "Audrie", "Caryl", "Jazmine", "Jessica",
+ "Beverly", "Elizbeth", "Marylee", "Londa", "Fredericka", "Argelia", "Nana", "Donnette", "Damaris",
+ "Hailey", "Jamee", "Kathlene", "Glayds", "Lydia", "Apryl", "Verla", "Adam", "Concepcion", "Zelda",
+ "Shonta", "Vernice", "Detra", "Meghann", "Sherley", "Sheri", "Kiyoko", "Margarita", "Adaline", "Mariela",
+ "Velda", "Ailene", "Juliane", "Aiko", "Edyth", "Cecelia", "Shavon", "Florance", "Madeline", "Rheba",
+ "Deann", "Ignacia", "Odelia", "Heide", "Mica", "Jennette", "Maricruz", "Ouida", "Darcy", "Laure",
+ "Justina", "Amada", "Laine", "Cruz", "Sunny", "Francene", "Roxanna", "Nam", "Nancie", "Deanna", "Letty",
+ "Britni", "Kazuko", "Lacresha", "Simon", "Caleb", "Milton", "Colton", "Travis", "Miles", "Jonathan",
+ "Logan", "Rolf", "Emilio", "Roberto", "Marcus", "Tim", "Delmar", "Devon", "Kurt", "Edward", "Jeffrey",
+ "Elvis", "Alfonso", "Blair", "Wm", "Sheldon", "Leonel", "Michal", "Federico", "Jacques", "Leslie",
+ "Augustine", "Hugh", "Brant", "Hong", "Sal", "Modesto", "Curtis", "Jefferey", "Adam", "John", "Glenn",
+ "Vance", "Alejandro", "Refugio", "Lucio", "Demarcus", "Chang", "Huey", "Neville", "Preston", "Bert",
+ "Abram", "Foster", "Jamison", "Kirby", "Erich", "Manual", "Dustin", "Derrick", "Donnie", "Jospeh", "Chris",
+ "Josue", "Stevie", "Russ", "Stanley", "Nicolas", "Samuel", "Waldo", "Jake", "Max", "Ernest", "Reinaldo",
+ "Rene", "Gale", "Morris", "Nathan", "Maximo", "Courtney", "Theodore", "Octavio", "Otha", "Delmer",
+ "Graham", "Dean", "Lowell", "Myles", "Colby", "Boyd", "Adolph", "Jarrod", "Nick", "Mark", "Clinton", "Kim",
+ "Sonny", "Dalton", "Tyler", "Jody", "Orville", "Luther", "Rubin", "Hollis", "Rashad", "Barton", "Vicente",
+ "Ted", "Rick", "Carmine", "Clifton", "Gayle", "Christopher", "Jessie", "Bradley", "Clay", "Theo", "Josh",
+ "Mitchell", "Boyce", "Chung", "Eugenio", "August", "Norbert", "Sammie", "Jerry", "Adan", "Edmundo",
+ "Homer", "Hilton", "Tod", "Kirk", "Emmett", "Milan", "Quincy", "Jewell", "Herb", "Steve", "Carmen",
+ "Bobby", "Odis", "Daron", "Jeremy", "Carl", "Hunter", "Tuan", "Thurman", "Asa", "Brenton", "Shane",
+ "Donny", "Andreas", "Teddy", "Dario", "Cyril", "Hoyt", "Teodoro", "Vincenzo", "Hilario", "Daren",
+ "Agustin", "Marquis", "Ezekiel", "Brendan", "Johnson", "Alden", "Richie", "Granville", "Chad", "Joseph",
+ "Lamont", "Jordon", "Gilberto", "Chong", "Rosendo", "Eddy", "Rob", "Dewitt", "Andre", "Titus", "Russell",
+ "Rigoberto", "Dick", "Garland", "Gabriel", "Hank", "Darius", "Ignacio", "Lazaro", "Johnie", "Mauro",
+ "Edmund", "Trent", "Harris", "Osvaldo", "Marvin", "Judson", "Rodney", "Randall", "Renato", "Richard",
+ "Denny", "Jon", "Doyle", "Cristopher", "Wilson", "Christian", "Jamie", "Roland", "Ken", "Tad", "Romeo",
+ "Seth", "Quinton", "Byron", "Ruben", "Darrel", "Deandre", "Broderick", "Harold", "Ty", "Monroe", "Landon",
+ "Mohammed", "Angel", "Arlen", "Elias", "Andres", "Carlton", "Numbers", "Tony", "Thaddeus", "Issac",
+ "Elmer", "Antoine", "Ned", "Fermin", "Grover", "Benito", "Abdul", "Cortez", "Eric", "Maxwell", "Coy",
+ "Gavin", "Rich", "Andy", "Del", "Giovanni", "Major", "Efren", "Horacio", "Joaquin", "Charles", "Noah",
+ "Deon", "Pasquale", "Reed", "Fausto", "Jermaine", "Irvin", "Ray", "Tobias", "Carter", "Yong", "Jorge",
+ "Brent", "Daniel", "Zane", "Walker", "Thad", "Shaun", "Jaime", "Mckinley", "Bradford", "Nathanial",
+ "Jerald", "Aubrey", "Virgil", "Abel", "Philip", "Chester", "Chadwick", "Dominick", "Britt", "Emmitt",
+ "Ferdinand", "Julian", "Reid", "Santos", "Dwain", "Morgan", "James", "Marion", "Micheal", "Eddie", "Brett",
+ "Stacy", "Kerry", "Dale", "Nicholas", "Darrick", "Freeman", "Scott", "Newton", "Sherman", "Felton",
+ "Cedrick", "Winfred", "Brad", "Fredric", "Dewayne", "Virgilio", "Reggie", "Edgar", "Heriberto", "Shad",
+ "Timmy", "Javier", "Nestor", "Royal", "Lynn", "Irwin", "Ismael", "Jonas", "Wiley", "Austin", "Kieth",
+ "Gonzalo", "Paris", "Earnest", "Arron", "Jarred", "Todd", "Erik", "Maria", "Chauncey", "Neil", "Conrad",
+ "Maurice", "Roosevelt", "Jacob", "Sydney", "Lee", "Basil", "Louis", "Rodolfo", "Rodger", "Roman", "Corey",
+ "Ambrose", "Cristobal", "Sylvester", "Benton", "Franklin", "Marcelo", "Guillermo", "Toby", "Jeramy",
+ "Donn", "Danny", "Dwight", "Clifford", "Valentine", "Matt", "Jules", "Kareem", "Ronny", "Lonny", "Son",
+ "Leopoldo", "Dannie", "Gregg", "Dillon", "Orlando", "Weston", "Kermit", "Damian", "Abraham", "Walton",
+ "Adrian", "Rudolf", "Will", "Les", "Norberto", "Fred", "Tyrone", "Ariel", "Terry", "Emmanuel", "Anderson",
+ "Elton", "Otis", "Derek", "Frankie", "Gino", "Lavern", "Jarod", "Kenny", "Dane", "Keenan", "Bryant",
+ "Eusebio", "Dorian", "Ali", "Lucas", "Wilford", "Jeremiah", "Warner", "Woodrow", "Galen", "Bob",
+ "Johnathon", "Amado", "Michel", "Harry", "Zachery", "Taylor", "Booker", "Hershel", "Mohammad", "Darrell",
+ "Kyle", "Stuart", "Marlin", "Hyman", "Jeffery", "Sidney", "Merrill", "Roy", "Garrett", "Porter", "Kenton",
+ "Giuseppe", "Terrance", "Trey", "Felix", "Buster", "Von", "Jackie", "Linwood", "Darron", "Francisco",
+ "Bernie", "Diego", "Brendon", "Cody", "Marco", "Ahmed", "Antonio", "Vince", "Brooks", "Kendrick", "Ross",
+ "Mohamed", "Jim", "Benny", "Gerald", "Pablo", "Charlie", "Antony", "Werner", "Hipolito", "Minh", "Mel",
+ "Derick", "Armand", "Fidel", "Lewis", "Donnell", "Desmond", "Vaughn", "Guadalupe", "Keneth", "Rodrick",
+ "Spencer", "Chas", "Gus", "Harlan", "Wes", "Carmelo", "Jefferson", "Gerard", "Jarvis", "Haywood", "Hayden",
+ "Sergio", "Gene", "Edgardo", "Colin", "Horace", "Dominic", "Aldo", "Adolfo", "Juan", "Man", "Lenard",
+ "Clement", "Everett", "Hal", "Bryon", "Mason", "Emerson", "Earle", "Laurence", "Columbus", "Lamar",
+ "Douglas", "Ian", "Fredrick", "Marc", "Loren", "Wallace", "Randell", "Noble", "Ricardo", "Rory", "Lindsey",
+ "Boris", "Bill", "Carlos", "Domingo", "Grant", "Craig", "Ezra", "Matthew", "Van", "Rudy", "Danial",
+ "Brock", "Maynard", "Vincent", "Cole", "Damion", "Ellsworth", "Marcel", "Markus", "Rueben", "Tanner",
+ "Reyes", "Hung", "Kennith", "Lindsay", "Howard", "Ralph", "Jed", "Monte", "Garfield", "Avery", "Bernardo",
+ "Malcolm", "Sterling", "Ezequiel", "Kristofer", "Luciano", "Casey", "Rosario", "Ellis", "Quintin",
+ "Trevor", "Miquel", "Jordan", "Arthur", "Carson", "Tyron", "Grady", "Walter", "Jonathon", "Ricky",
+ "Bennie", "Terrence", "Dion", "Dusty", "Roderick", "Isaac", "Rodrigo", "Harrison", "Zack", "Dee", "Devin",
+ "Rey", "Ulysses", "Clint", "Greg", "Dino", "Frances", "Wade", "Franklyn", "Jude", "Bradly", "Salvador",
+ "Rocky", "Weldon", "Lloyd", "Milford", "Clarence", "Alec", "Allan", "Bobbie", "Oswaldo", "Wilfred",
+ "Raleigh", "Shelby", "Willy", "Alphonso", "Arnoldo", "Robbie", "Truman", "Nicky", "Quinn", "Damien",
+ "Lacy", "Marcos", "Parker", "Burt", "Carroll", "Denver", "Buck", "Dong", "Normand", "Billie", "Edwin",
+ "Troy", "Arden", "Rusty", "Tommy", "Kenneth", "Leo", "Claud", "Joel", "Kendall", "Dante", "Milo", "Cruz",
+ "Lucien", "Ramon", "Jarrett", "Scottie", "Deshawn", "Ronnie", "Pete", "Alonzo", "Whitney", "Stefan",
+ "Sebastian", "Edmond", "Enrique", "Branden", "Leonard", "Loyd", "Olin", "Ron", "Rhett", "Frederic",
+ "Orval", "Tyrell", "Gail", "Eli", "Antonia", "Malcom", "Sandy", "Stacey", "Nickolas", "Hosea", "Santo",
+ "Oscar", "Fletcher", "Dave", "Patrick", "Dewey", "Bo", "Vito", "Blaine", "Randy", "Robin", "Winston",
+ "Sammy", "Edwardo", "Manuel", "Valentin", "Stanford", "Filiberto", "Buddy", "Zachariah", "Johnnie",
+ "Elbert", "Paul", "Isreal", "Jerrold", "Leif", "Owen", "Sung", "Junior", "Raphael", "Josef", "Donte",
+ "Allen", "Florencio", "Raymond", "Lauren", "Collin", "Eliseo", "Bruno", "Martin", "Lyndon", "Kurtis",
+ "Salvatore", "Erwin", "Michael", "Sean", "Davis", "Alberto", "King", "Rolland", "Joe", "Tory", "Chase",
+ "Dallas", "Vernon", "Beau", "Terrell", "Reynaldo", "Monty", "Jame", "Dirk", "Florentino", "Reuben", "Saul",
+ "Emory", "Esteban", "Michale", "Claudio", "Jacinto", "Kelley", "Levi", "Andrea", "Lanny", "Wendell",
+ "Elwood", "Joan", "Felipe", "Palmer", "Elmo", "Lawrence", "Hubert", "Rudolph", "Duane", "Cordell",
+ "Everette", "Mack", "Alan", "Efrain", "Trenton", "Bryan", "Tom", "Wilmer", "Clyde", "Chance", "Lou",
+ "Brain", "Justin", "Phil", "Jerrod", "George", "Kris", "Cyrus", "Emery", "Rickey", "Lincoln", "Renaldo",
+ "Mathew", "Luke", "Dwayne", "Alexis", "Jackson", "Gil", "Marty", "Burton", "Emil", "Glen", "Willian",
+ "Clemente", "Keven", "Barney", "Odell", "Reginald", "Aurelio", "Damon", "Ward", "Gustavo", "Harley",
+ "Peter", "Anibal", "Arlie", "Nigel", "Oren", "Zachary", "Scot", "Bud", "Wilbert", "Bart", "Josiah",
+ "Marlon", "Eldon", "Darryl", "Roger", "Anthony", "Omer", "Francis", "Patricia", "Moises", "Chuck",
+ "Waylon", "Hector", "Jamaal", "Cesar", "Julius", "Rex", "Norris", "Ollie", "Isaias", "Quentin", "Graig",
+ "Lyle", "Jeffry", "Karl", "Lester", "Danilo", "Mike", "Dylan", "Carlo", "Ryan", "Leon", "Percy", "Lucius",
+ "Jamel", "Lesley", "Joey", "Cornelius", "Rico", "Arnulfo", "Chet", "Margarito", "Ernie", "Nathanael",
+ "Amos", "Cleveland", "Luigi", "Alfonzo", "Phillip", "Clair", "Elroy", "Alva", "Hans", "Shon", "Gary",
+ "Jesus", "Cary", "Silas", "Keith", "Israel", "Willard", "Randolph", "Dan", "Adalberto", "Claude",
+ "Delbert", "Garry", "Mary", "Larry", "Riley", "Robt", "Darwin", "Barrett", "Steven", "Kelly", "Herschel",
+ "Darnell", "Scotty", "Armando", "Miguel", "Lawerence", "Wesley", "Garth", "Carol", "Micah", "Alvin",
+ "Billy", "Earl", "Pat", "Brady", "Cory", "Carey", "Bernard", "Jayson", "Nathaniel", "Gaylord", "Archie",
+ "Dorsey", "Erasmo", "Angelo", "Elisha", "Long", "Augustus", "Hobert", "Drew", "Stan", "Sherwood",
+ "Lorenzo", "Forrest", "Shawn", "Leigh", "Hiram", "Leonardo", "Gerry", "Myron", "Hugo", "Alvaro", "Leland",
+ "Genaro", "Jamey", "Stewart", "Elden", "Irving", "Olen", "Antone", "Freddy", "Lupe", "Joshua", "Gregory",
+ "Andrew", "Sang", "Wilbur", "Gerardo", "Merlin", "Williams", "Johnny", "Alex", "Tommie", "Jimmy",
+ "Donovan", "Dexter", "Gaston", "Tracy", "Jeff", "Stephen", "Berry", "Anton", "Darell", "Fritz", "Willis",
+ "Noel", "Mariano", "Crawford", "Zoey", "Alex", "Brianna", "Carlie", "Lloyd", "Cal", "Astor", "Randolf",
+ "Magdalene", "Trevelyan", "Terance", "Roy", "Kermit", "Harriett", "Crystal", "Laurinda", "Kiersten",
+ "Phyllida", "Liz", "Bettie", "Rena", "Colten", "Berenice", "Sindy", "Wilma", "Amos", "Candi", "Ritchie",
+ "Dirk", "Kathlyn", "Callista", "Anona", "Flossie", "Sterling", "Calista", "Regan", "Erica", "Jeana",
+ "Keaton", "York", "Nolan", "Daniel", "Benton", "Tommie", "Serenity", "Deanna", "Chas", "Heron", "Marlyn",
+ "Xylia", "Tristin", "Lyndon", "Andriana", "Madelaine", "Maddison", "Leila", "Chantelle", "Audrey",
+ "Connor", "Daley", "Tracee", "Tilda", "Eliot", "Merle", "Linwood", "Kathryn", "Silas", "Alvina",
+ "Phinehas", "Janis", "Alvena", "Zubin", "Gwendolen", "Caitlyn", "Bertram", "Hailee", "Idelle", "Homer",
+ "Jannah", "Delbert", "Rhianna", "Cy", "Jefferson", "Wayland", "Nona", "Tempest", "Reed", "Jenifer",
+ "Ellery", "Nicolina", "Aldous", "Prince", "Lexia", "Vinnie", "Doug", "Alberic", "Kayleen", "Woody",
+ "Rosanne", "Ysabel", "Skyler", "Twyla", "Geordie", "Leta", "Clive", "Aaron", "Scottie", "Celeste", "Chuck",
+ "Erle", "Lallie", "Jaycob", "Ray", "Carrie", "Laurita", "Noreen", "Meaghan", "Ulysses", "Andy", "Drogo",
+ "Dina", "Yasmin", "Mya", "Luvenia", "Urban", "Jacob", "Laetitia", "Sherry", "Love", "Michaela", "Deonne",
+ "Summer", "Brendon", "Sheena", "Mason", "Jayson", "Linden", "Salal", "Darrell", "Diana", "Hudson",
+ "Lennon", "Isador", "Charley", "April", "Ralph", "James", "Mina", "Jolyon", "Laurine", "Monna", "Carita",
+ "Munro", "Elsdon", "Everette", "Radclyffe", "Darrin", "Herbert", "Gawain", "Sheree", "Trudy", "Emmaline",
+ "Kassandra", "Rebecca", "Basil", "Jen", "Don", "Osborne", "Lilith", "Hannah", "Fox", "Rupert", "Paulene",
+ "Darius", "Wally", "Baptist", "Sapphire", "Tia", "Sondra", "Kylee", "Ashton", "Jepson", "Joetta", "Val",
+ "Adela", "Zacharias", "Zola", "Marmaduke", "Shannah", "Posie", "Oralie", "Brittany", "Ernesta", "Raymund",
+ "Denzil", "Daren", "Roosevelt", "Nelson", "Fortune", "Mariel", "Nick", "Jaden", "Upton", "Oz", "Margaux",
+ "Precious", "Albert", "Bridger", "Jimmy", "Nicola", "Rosalynne", "Keith", "Walt", "Della", "Joanna",
+ "Xenia", "Esmeralda", "Major", "Simon", "Rexana", "Stacy", "Calanthe", "Sherley", "Kaitlyn", "Graham",
+ "Ramsey", "Abbey", "Madlyn", "Kelvin", "Bill", "Rue", "Monica", "Caileigh", "Laraine", "Booker", "Jayna",
+ "Greta", "Jervis", "Sherman", "Kendrick", "Tommy", "Iris", "Geffrey", "Kaelea", "Kerr", "Garrick", "Jep",
+ "Audley", "Nic", "Bronte", "Beulah", "Patricia", "Jewell", "Deidra", "Cory", "Everett", "Harper",
+ "Charity", "Godfrey", "Jaime", "Sinclair", "Talbot", "Dayna", "Cooper", "Rosaline", "Jennie", "Eileen",
+ "Latanya", "Corinna", "Roxie", "Caesar", "Charles", "Pollie", "Lindsey", "Sorrel", "Dwight", "Jocelyn",
+ "Weston", "Shyla", "Valorie", "Bessie", "Josh", "Lessie", "Dayton", "Kathi", "Chasity", "Wilton", "Adam",
+ "William", "Ash", "Angela", "Ivor", "Ria", "Jazmine", "Hailey", "Jo", "Silvestra", "Ernie", "Clifford",
+ "Levi", "Matilda", "Quincey", "Camilla", "Delicia", "Phemie", "Laurena", "Bambi", "Lourdes", "Royston",
+ "Chastity", "Lynwood", "Elle", "Brenda", "Phoebe", "Timothy", "Raschelle", "Lilly", "Burt", "Rina",
+ "Rodney", "Maris", "Jaron", "Wilf", "Harlan", "Audra", "Vincent", "Elwyn", "Drew", "Wynter", "Ora",
+ "Lissa", "Virgil", "Xavier", "Chad", "Ollie", "Leyton", "Karolyn", "Skye", "Roni", "Gladys", "Dinah",
+ "Penny", "August", "Osmund", "Whitaker", "Brande", "Cornell", "Phil", "Zara", "Kilie", "Gavin", "Coty",
+ "Randy", "Teri", "Keira", "Pru", "Clemency", "Kelcey", "Nevil", "Poppy", "Gareth", "Christabel", "Bastian",
+ "Wynonna", "Roselyn", "Goddard", "Collin", "Trace", "Neal", "Effie", "Denys", "Virginia", "Richard",
+ "Isiah", "Harrietta", "Gaylord", "Diamond", "Trudi", "Elaine", "Jemmy", "Gage", "Annabel", "Quincy", "Syd",
+ "Marianna", "Philomena", "Aubree", "Kathie", "Jacki", "Kelley", "Bess", "Cecil", "Maryvonne", "Kassidy",
+ "Anselm", "Dona", "Darby", "Jamison", "Daryl", "Darell", "Teal", "Lennie", "Bartholomew", "Katie",
+ "Maybelline", "Kimball", "Elvis", "Les", "Flick", "Harley", "Beth", "Bidelia", "Montague", "Helen", "Ozzy",
+ "Stef", "Debra", "Maxene", "Stefanie", "Russ", "Avril", "Johnathan", "Orson", "Chelsey", "Josephine",
+ "Deshaun", "Wendell", "Lula", "Ferdinanda", "Greg", "Brad", "Kynaston", "Dena", "Russel", "Robertina",
+ "Misti", "Leon", "Anjelica", "Bryana", "Myles", "Judi", "Curtis", "Davin", "Kristia", "Chrysanta",
+ "Hayleigh", "Hector", "Osbert", "Eustace", "Cary", "Tansy", "Cayley", "Maryann", "Alissa", "Ike",
+ "Tranter", "Reina", "Alwilda", "Sidony", "Columbine", "Astra", "Jillie", "Stephania", "Jonah", "Kennedy",
+ "Ferdinand", "Allegria", "Donella", "Kelleigh", "Darian", "Eldreda", "Jayden", "Herbie", "Jake", "Winston",
+ "Vi", "Annie", "Cherice", "Hugo", "Tricia", "Haydee", "Cassarah", "Darden", "Mallory", "Alton", "Hadley",
+ "Romayne", "Lacey", "Ern", "Alayna", "Cecilia", "Seward", "Tilly", "Edgar", "Concordia", "Ibbie", "Dahlia",
+ "Oswin", "Stu", "Brett", "Maralyn", "Kristeen", "Dotty", "Robyn", "Nessa", "Tresha", "Guinevere",
+ "Emerson", "Haze", "Lyn", "Henderson", "Lexa", "Jaylen", "Gail", "Lizette", "Tiara", "Robbie", "Destiny",
+ "Alice", "Livia", "Rosy", "Leah", "Jan", "Zach", "Vita", "Gia", "Micheal", "Rowina", "Alysha", "Bobbi",
+ "Delores", "Osmond", "Karaugh", "Wilbur", "Kasandra", "Renae", "Kaety", "Dora", "Gaye", "Amaryllis",
+ "Katelyn", "Dacre", "Prudence", "Ebony", "Camron", "Jerrold", "Vivyan", "Randall", "Donna", "Misty",
+ "Damon", "Selby", "Esmund", "Rian", "Garry", "Julius", "Raelene", "Clement", "Dom", "Tibby", "Moss",
+ "Millicent", "Gwendoline", "Berry", "Ashleigh", "Lilac", "Quin", "Vere", "Creighton", "Harriet", "Malvina",
+ "Lianne", "Pearle", "Kizzie", "Kara", "Petula", "Jeanie", "Maria", "Pacey", "Victoria", "Huey", "Toni",
+ "Rose", "Wallis", "Diggory", "Josiah", "Delma", "Keysha", "Channing", "Prue", "Lee", "Ryan", "Sidney",
+ "Valerie", "Clancy", "Ezra", "Gilbert", "Clare", "Laz", "Crofton", "Mike", "Annabella", "Tara", "Eldred",
+ "Arthur", "Jaylon", "Peronel", "Paden", "Dot", "Marian", "Amyas", "Alexus", "Esmond", "Abbie", "Stanley",
+ "Brittani", "Vickie", "Errol", "Kimberlee", "Uland", "Ebenezer", "Howie", "Eveline", "Andrea", "Trish",
+ "Hopkin", "Bryanna", "Temperance", "Valarie", "Femie", "Alix", "Terrell", "Lewin", "Lorrin", "Happy",
+ "Micah", "Rachyl", "Sloan", "Gertrude", "Elizabeth", "Dorris", "Andra", "Bram", "Gary", "Jeannine",
+ "Maurene", "Irene", "Yolonda", "Jonty", "Coleen", "Cecelia", "Chantal", "Stuart", "Caris", "Ros",
+ "Kaleigh", "Mirabelle", "Kolby", "Primrose", "Susannah", "Ginny", "Jinny", "Dolly", "Lettice", "Sonny",
+ "Melva", "Ernest", "Garret", "Reagan", "Trenton", "Gallagher", "Edwin", "Nikolas", "Corrie", "Lynette",
+ "Ettie", "Sly", "Debbi", "Eudora", "Brittney", "Tacey", "Marius", "Anima", "Gordon", "Olivia", "Kortney",
+ "Shantel", "Kolleen", "Nevaeh", "Buck", "Sera", "Liliana", "Aric", "Kalyn", "Mick", "Libby", "Ingram",
+ "Alexandria", "Darleen", "Jacklyn", "Hughie", "Tyler", "Aida", "Ronda", "Deemer", "Taryn", "Laureen",
+ "Samantha", "Dave", "Hardy", "Baldric", "Montgomery", "Gus", "Ellis", "Titania", "Luke", "Chase", "Haidee",
+ "Mayra", "Isabell", "Trinity", "Milo", "Abigail", "Tacita", "Meg", "Hervey", "Natasha", "Sadie", "Holden",
+ "Dee", "Mansel", "Perry", "Randi", "Frederica", "Georgina", "Kolour", "Debbie", "Seraphina", "Elspet",
+ "Julyan", "Raven", "Zavia", "Jarvis", "Jaymes", "Grover", "Cairo", "Alea", "Jordon", "Braxton", "Donny",
+ "Rhoda", "Tonya", "Bee", "Alyssia", "Ashlyn", "Reanna", "Lonny", "Arlene", "Deb", "Jane", "Nikole",
+ "Bettina", "Harrison", "Tamzen", "Arielle", "Adelaide", "Faith", "Bridie", "Wilburn", "Fern", "Nan",
+ "Shaw", "Zeke", "Alan", "Dene", "Gina", "Alexa", "Bailey", "Sal", "Tammy", "Maximillian", "America",
+ "Sylvana", "Fitz", "Mo", "Marissa", "Cass", "Eldon", "Wilfrid", "Tel", "Joann", "Kendra", "Tolly",
+ "Leanne", "Ferdie", "Haven", "Lucas", "Marlee", "Cyrilla", "Red", "Phoenix", "Jazmin", "Carin", "Gena",
+ "Lashonda", "Tucker", "Genette", "Kizzy", "Winifred", "Melody", "Keely", "Kaylyn", "Radcliff", "Lettie",
+ "Foster", "Lyndsey", "Nicholas", "Farley", "Louisa", "Dana", "Dortha", "Francine", "Doran", "Bonita",
+ "Hal", "Sawyer", "Reginald", "Aislin", "Nathan", "Baylee", "Abilene", "Ladonna", "Maurine", "Shelly",
+ "Deandre", "Jasmin", "Roderic", "Tiffany", "Amanda", "Verity", "Wilford", "Gayelord", "Whitney", "Demelza",
+ "Kenton", "Alberta", "Kyra", "Tabitha", "Sampson", "Korey", "Lillian", "Edison", "Clayton", "Steph",
+ "Maya", "Dusty", "Jim", "Ronny", "Adrianne", "Bernard", "Harris", "Kiley", "Alexander", "Kisha", "Ethalyn",
+ "Patience", "Briony", "Indigo", "Aureole", "Makenzie", "Molly", "Sherilyn", "Barry", "Laverne", "Hunter",
+ "Rocky", "Tyreek", "Madalyn", "Phyliss", "Chet", "Beatrice", "Faye", "Lavina", "Madelyn", "Tracey",
+ "Gyles", "Patti", "Carlyn", "Stephanie", "Jackalyn", "Larrie", "Kimmy", "Isolda", "Emelina", "Lis",
+ "Zillah", "Cody", "Sheard", "Rufus", "Paget", "Mae", "Rexanne", "Luvinia", "Tamsen", "Rosanna", "Greig",
+ "Stacia", "Mabelle", "Quianna", "Lotus", "Delice", "Bradford", "Angus", "Cosmo", "Earlene", "Adrian",
+ "Arlie", "Noelle", "Sabella", "Isa", "Adelle", "Innocent", "Kirby", "Trixie", "Kenelm", "Nelda", "Melia",
+ "Kendal", "Dorinda", "Placid", "Linette", "Kam", "Sherisse", "Evan", "Ewart", "Janice", "Linton",
+ "Jacaline", "Charissa", "Douglas", "Aileen", "Kemp", "Oli", "Amethyst", "Rosie", "Nigella", "Sherill",
+ "Anderson", "Alanna", "Eric", "Claudia", "Jennifer", "Boniface", "Harriet", "Vernon", "Lucy", "Shawnee",
+ "Gerard", "Cecily", "Romey", "Randall", "Wade", "Lux", "Dawson", "Gregg", "Kade", "Roxanne", "Melinda",
+ "Rolland", "Rowanne", "Fannie", "Isidore", "Melia", "Harvie", "Salal", "Eleonor", "Jacquette", "Lavone",
+ "Shanika", "Tarquin", "Janet", "Josslyn", "Maegan", "Augusta", "Aubree", "Francene", "Martie", "Marisa",
+ "Tyreek", "Tatianna", "Caleb", "Sheridan", "Nellie", "Barbara", "Wat", "Jayla", "Esmaralda", "Graeme",
+ "Lavena", "Jemima", "Nikolas", "Triston", "Portia", "Kyla", "Marcus", "Raeburn", "Jamison", "Earl", "Wren",
+ "Leighton", "Lagina", "Lucasta", "Dina", "Amaranta", "Jessika", "Claud", "Bernard", "Winifred", "Ebba",
+ "Sammi", "Gall", "Chloe", "Ottoline", "Herbert", "Janice", "Gareth", "Channing", "Caleigh", "Kailee",
+ "Ralphie", "Tamzen", "Quincy", "Beaumont", "Albert", "Jadyn", "Violet", "Luanna", "Moriah", "Humbert",
+ "Jed", "Leona", "Hale", "Mitch", "Marlin", "Nivek", "Darwin", "Dirk", "Liliana", "Meadow", "Bernadine",
+ "Jorie", "Peyton", "Astra", "Roscoe", "Gina", "Lovell", "Jewel", "Romayne", "Rosy", "Imogene",
+ "Margaretta", "Lorinda", "Hopkin", "Bobby", "Flossie", "Bennie", "Horatio", "Jonah", "Lyn", "Deana",
+ "Juliana", "Blanch", "Wright", "Kendal", "Woodrow", "Tania", "Austyn", "Val", "Mona", "Charla", "Rudyard",
+ "Pamela", "Raven", "Zena", "Nicola", "Kaelea", "Conor", "Virgil", "Sonnie", "Goodwin", "Christianne",
+ "Linford", "Myron", "Denton", "Charita", "Brody", "Ginnie", "Harrison", "Jeanine", "Quin", "Isolda",
+ "Zoie", "Pearce", "Margie", "Larrie", "Angelina", "Marcia", "Jessamine", "Delilah", "Dick", "Luana",
+ "Delicia", "Lake", "Luvenia", "Vaughan", "Concordia", "Gayelord", "Cheyenne", "Felix", "Dorris", "Pen",
+ "Kristeen", "Parris", "Everitt", "Josephina", "Amy", "Tommie", "Adrian", "April", "Rosaline", "Zachery",
+ "Trace", "Phoebe", "Jenelle", "Kameron", "Katharine", "Media", "Colton", "Tad", "Quianna", "Kerenza",
+ "Greta", "Luvinia", "Pete", "Tonya", "Beckah", "Barbra", "Jon", "Tetty", "Corey", "Sylvana", "Kizzy",
+ "Korey", "Trey", "Haydee", "Penny", "Mandy", "Panda", "Coline", "Ramsey", "Sukie", "Annabel", "Sarina",
+ "Corbin", "Suzanna", "Rob", "Duana", "Shell", "Jason", "Eddy", "Rube", "Roseann", "Celia", "Brianne",
+ "Nerissa", "Jera", "Humphry", "Ashlynn", "Terrence", "Philippina", "Coreen", "Kolour", "Indiana", "Paget",
+ "Marlyn", "Hester", "Isbel", "Ocean", "Harris", "Leslie", "Vere", "Monroe", "Isabelle", "Bertie", "Clitus",
+ "Dave", "Alethea", "Lessie", "Louiza", "Madlyn", "Garland", "Wolf", "Lalo", "Donny", "Amabel", "Tianna",
+ "Louie", "Susie", "Mackenzie", "Renie", "Tess", "Marmaduke", "Gwendolen", "Bettina", "Beatrix", "Esmund",
+ "Minnie", "Carlie", "Barnabas", "Ruthie", "Honour", "Haylie", "Xavior", "Freddie", "Ericka", "Aretha",
+ "Edie", "Madelina", "Anson", "Tabby", "Derrick", "Jocosa", "Deirdre", "Aislin", "Chastity", "Abigail",
+ "Wynonna", "Zo", "Eldon", "Krystine", "Ghislaine", "Zavia", "Nolene", "Marigold", "Kelley", "Sylvester",
+ "Odell", "George", "Laurene", "Franklyn", "Clarice", "Mo", "Dustin", "Debbi", "Lina", "Tony", "Acacia",
+ "Hettie", "Natalee", "Marcie", "Brittany", "Elnora", "Rachel", "Dawn", "Basil", "Christal", "Anjelica",
+ "Fran", "Tawny", "Delroy", "Tameka", "Lillie", "Ceara", "Deanna", "Deshaun", "Ken", "Bradford", "Justina",
+ "Merle", "Draven", "Gretta", "Harriette", "Webster", "Nathaniel", "Anemone", "Coleen", "Ruth", "Chryssa",
+ "Hortensia", "Saffie", "Deonne", "Leopold", "Harlan", "Lea", "Eppie", "Lucinda", "Tilda", "Fanny", "Titty",
+ "Lockie", "Jepson", "Sherisse", "Maralyn", "Ethel", "Sly", "Ebenezer", "Canute", "Ella", "Freeman",
+ "Reuben", "Olivette", "Nona", "Rik", "Amice", "Kristine", "Kathie", "Jayne", "Jeri", "Mckenna", "Bertram",
+ "Kaylee", "Livia", "Gil", "Wallace", "Maryann", "Keeleigh", "Laurinda", "Doran", "Khloe", "Dakota",
+ "Yaron", "Kimberleigh", "Gytha", "Doris", "Marylyn", "Benton", "Linnette", "Esther", "Jakki", "Rowina",
+ "Marian", "Roselyn", "Norbert", "Maggie", "Caesar", "Phinehas", "Jerry", "Jasmine", "Antonette", "Miriam",
+ "Monna", "Maryvonne", "Jacquetta", "Bernetta", "Napier", "Annie", "Gladwin", "Sheldon", "Aric", "Elouise",
+ "Gawain", "Kristia", "Gabe", "Kyra", "Red", "Tod", "Dudley", "Lorraine", "Ryley", "Sabina", "Poppy",
+ "Leland", "Aileen", "Eglantine", "Alicia", "Jeni", "Addy", "Tiffany", "Geffrey", "Lavina", "Collin",
+ "Clover", "Vin", "Jerome", "Doug", "Vincent", "Florence", "Scarlet", "Celeste", "Desdemona", "Tiphanie",
+ "Kassandra", "Ashton", "Madison", "Art", "Magdalene", "Iona", "Josepha", "Anise", "Ferne", "Derek",
+ "Huffie", "Qiana", "Ysabel", "Tami", "Shannah", "Xavier", "Willard", "Winthrop", "Vickie", "Maura",
+ "Placid", "Tiara", "Reggie", "Elissa", "Isa", "Chrysanta", "Jeff", "Bessie", "Terri", "Amilia", "Brett",
+ "Daniella", "Damion", "Carolina", "Maximillian", "Travers", "Benjamin", "Oprah", "Darcy", "Yolanda",
+ "Nicolina", "Crofton", "Jarrett", "Kaitlin", "Shauna", "Keren", "Bevis", "Kalysta", "Sharron", "Alyssa",
+ "Blythe", "Zelma", "Caelie", "Norwood", "Billie", "Patrick", "Gary", "Cambria", "Tylar", "Mason", "Helen",
+ "Melyssa", "Gene", "Gilberta", "Carter", "Herbie", "Harmonie", "Leola", "Eugenia", "Clint", "Pauletta",
+ "Edwyna", "Georgina", "Teal", "Harper", "Izzy", "Dillon", "Kezia", "Evangeline", "Colene", "Madelaine",
+ "Zilla", "Rudy", "Dottie", "Caris", "Morton", "Marge", "Tacey", "Parker", "Troy", "Liza", "Lewin",
+ "Tracie", "Justine", "Dallas", "Linden", "Ray", "Loretta", "Teri", "Elvis", "Diane", "Julianna", "Manfred",
+ "Denise", "Eireen", "Ann", "Kenith", "Linwood", "Kathlyn", "Bernice", "Shelley", "Oswald", "Amedeus",
+ "Homer", "Tanzi", "Ted", "Ralphina", "Hyacinth", "Lotus", "Matthias", "Arlette", "Clark", "Cecil",
+ "Elspeth", "Alvena", "Noah", "Millard", "Brenden", "Cole", "Philipa", "Nina", "Thelma", "Iantha", "Reid",
+ "Jefferson", "Meg", "Elsie", "Shirlee", "Nathan", "Nancy", "Simona", "Racheal", "Carin", "Emory", "Delice",
+ "Kristi", "Karaugh", "Kaety", "Tilly", "Em", "Alanis", "Darrin", "Jerrie", "Hollis", "Cary", "Marly",
+ "Carita", "Jody", "Farley", "Hervey", "Rosalin", "Cuthbert", "Stewart", "Jodene", "Caileigh", "Briscoe",
+ "Dolores", "Sheree", "Eustace", "Nigel", "Detta", "Barret", "Rowland", "Kenny", "Githa", "Zoey", "Adela",
+ "Petronella", "Opal", "Coleman", "Niles", "Cyril", "Dona", "Alberic", "Allannah", "Jules", "Avalon",
+ "Hadley", "Thomas", "Renita", "Calanthe", "Heron", "Shawnda", "Chet", "Malina", "Manny", "Rina", "Frieda",
+ "Eveleen", "Deshawn", "Amos", "Raelene", "Paige", "Molly", "Nannie", "Ileen", "Brendon", "Milford",
+ "Unice", "Rebeccah", "Caedmon", "Gae", "Doreen", "Vivian", "Louis", "Raphael", "Vergil", "Lise", "Glenn",
+ "Karyn", "Terance", "Reina", "Jake", "Gordon", "Wisdom", "Isiah", "Gervase", "Fern", "Marylou", "Roddy",
+ "Justy", "Derick", "Shantelle", "Adam", "Chantel", "Madoline", "Emmerson", "Lexie", "Mickey", "Stephen",
+ "Dane", "Stacee", "Elwin", "Tracey", "Alexandra", "Ricky", "Ian", "Kasey", "Rita", "Alanna", "Georgene",
+ "Deon", "Zavier", "Ophelia", "Deforest", "Lowell", "Zubin", "Hardy", "Osmund", "Tabatha", "Debby",
+ "Katlyn", "Tallulah", "Priscilla", "Braden", "Wil", "Keziah", "Jen", "Aggie", "Korbin", "Lemoine",
+ "Barnaby", "Tranter", "Goldie", "Roderick", "Trina", "Emery", "Pris", "Sidony", "Adelle", "Tate", "Wilf",
+ "Zola", "Brande", "Chris", "Calanthia", "Lilly", "Kaycee", "Lashonda", "Jasmin", "Elijah", "Shantel",
+ "Simon", "Rosalind", "Jarod", "Kaylie", "Corrine", "Joselyn", "Archibald", "Mariabella", "Winton",
+ "Merlin", "Chad", "Ursula", "Kristopher", "Hewie", "Adrianna", "Lyndsay", "Jasmyn", "Tim", "Evette",
+ "Margaret", "Samson", "Bronte", "Terence", "Leila", "Candice", "Tori", "Jamey", "Coriander", "Conrad",
+ "Floyd", "Karen", "Lorin", "Maximilian", "Cairo", "Emily", "Yasmin", "Karolyn", "Bryan", "Lanny",
+ "Kimberly", "Rick", "Chaz", "Krystle", "Lyric", "Laura", "Garrick", "Flip", "Monty", "Brendan",
+ "Ermintrude", "Rayner", "Merla", "Titus", "Marva", "Patricia", "Leone", "Tracy", "Jaqueline", "Hallam",
+ "Delores", "Cressida", "Carlyle", "Leann", "Kelcey", "Laurence", "Ryan", "Reynold", "Mark", "Collyn",
+ "Audie", "Sammy", "Ellery", "Sallie", "Pamelia", "Adolph", "Lydia", "Titania", "Ron", "Bridger", "Aline",
+ "Read", "Kelleigh", "Weldon", "Irving", "Garey", "Diggory", "Evander", "Kylee", "Deidre", "Ormond",
+ "Laurine", "Reannon", "Arline", "Pat"
+
+ };
+
+ public static String[] jargon = { "wireless", "signal", "network", "3G", "plan", "touch-screen",
+ "customer-service", "reachability", "voice-command", "shortcut-menu", "customization", "platform", "speed",
+ "voice-clarity", "voicemail-service" };
+
+ public static String[] vendors = { "at&t", "verizon", "t-mobile", "sprint", "motorola", "samsung", "iphone" };
+
+ public static String[] org_list = { "Latsonity", "ganjalax", "Zuncan", "Lexitechno", "Hot-tech", "subtam",
+ "Coneflex", "Ganjatax", "physcane", "Tranzap", "Qvohouse", "Zununoing", "jaydax", "Keytech", "goldendexon",
+ "Villa-tech", "Trustbam", "Newcom", "Voltlane", "Ontohothex", "Ranhotfan", "Alphadax", "Transhigh",
+ "kin-ron", "Doublezone", "Solophase", "Vivaace", "silfind", "Basecone", "sonstreet", "Freshfix",
+ "Techitechi", "Kanelectrics", "linedexon", "Goldcity", "Newfase", "Technohow", "Zimcone", "Salthex",
+ "U-ron", "Solfix", "whitestreet", "Xx-technology", "Hexviafind", "over-it", "Strongtone", "Tripplelane",
+ "geomedia", "Scotcity", "Inchex", "Vaiatech", "Striptaxon", "Hatcom", "tresline", "Sanjodax", "freshdox",
+ "Sumlane", "Quadlane", "Newphase", "overtech", "Voltbam", "Icerunin", "Fixdintex", "Hexsanhex", "Statcode",
+ "Greencare", "U-electrics", "Zamcorporation", "Ontotanin", "Tanzimcare", "Groovetex", "Ganjastrip",
+ "Redelectronics", "Dandamace", "Whitemedia", "strongex", "Streettax", "highfax", "Mathtech", "Xx-drill",
+ "Sublamdox", "Unijobam", "Rungozoom", "Fixelectrics", "Villa-dox", "Ransaofan", "Plexlane", "itlab",
+ "Lexicone", "Fax-fax", "Viatechi", "Inchdox", "Kongreen", "Doncare", "Y-geohex", "Opeelectronics",
+ "Medflex", "Dancode", "Roundhex", "Labzatron", "Newhotplus", "Sancone", "Ronholdings", "Quoline",
+ "zoomplus", "Fix-touch", "Codetechno", "Tanzumbam", "Indiex", "Canline" };
+
+}
\ No newline at end of file
diff --git a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/GULongIDGenerator.java b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/GULongIDGenerator.java
new file mode 100644
index 0000000..0832613
--- /dev/null
+++ b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/GULongIDGenerator.java
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.tools.external.data;
+
+import java.nio.ByteBuffer;
+import java.util.concurrent.atomic.AtomicLong;
+
+public class GULongIDGenerator {
+
+ private final int partition;
+ private final long baseValue;
+ private final AtomicLong nextValue;
+
+ public GULongIDGenerator(int partition, byte seed) {
+ this.partition = partition;
+ ByteBuffer buffer = ByteBuffer.allocate(8);
+ buffer.put(seed);
+ buffer.put((byte) partition);
+ buffer.putInt(0);
+ buffer.putShort((short) 0);
+ buffer.flip();
+ this.baseValue = new Long(buffer.getLong());
+ this.nextValue = new AtomicLong(baseValue);
+ }
+
+ public long getNextULong() {
+ return nextValue.incrementAndGet();
+ }
+
+ public int getPartition() {
+ return partition;
+ }
+
+}
diff --git a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/GenericSocketFeedAdapter.java b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/GenericSocketFeedAdapter.java
new file mode 100644
index 0000000..20b9be1
--- /dev/null
+++ b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/GenericSocketFeedAdapter.java
@@ -0,0 +1,93 @@
+package edu.uci.ics.asterix.tools.external.data;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.ServerSocket;
+import java.net.Socket;
+import java.util.logging.Level;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.external.dataset.adapter.StreamBasedAdapter;
+import edu.uci.ics.asterix.metadata.feeds.IFeedAdapter;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.dataflow.std.file.ITupleParserFactory;
+
+public class GenericSocketFeedAdapter extends StreamBasedAdapter implements IFeedAdapter {
+
+ private static final long serialVersionUID = 1L;
+
+ private SocketFeedServer socketFeedServer;
+
+ public GenericSocketFeedAdapter(ITupleParserFactory parserFactory, ARecordType outputtype, int port,
+ IHyracksTaskContext ctx) throws AsterixException, IOException {
+ super(parserFactory, outputtype, ctx);
+ this.socketFeedServer = new SocketFeedServer(outputtype, port);
+ }
+
+ @Override
+ public void start(int partition, IFrameWriter writer) throws Exception {
+ super.start(partition, writer);
+ }
+
+ @Override
+ public InputStream getInputStream(int partition) throws IOException {
+ return socketFeedServer.getInputStream();
+ }
+
+ private static class SocketFeedServer {
+ private ServerSocket serverSocket;
+ private InputStream inputStream;
+
+ public SocketFeedServer(ARecordType outputtype, int port) throws IOException, AsterixException {
+ try {
+ serverSocket = new ServerSocket(port);
+ } catch (Exception e) {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("port: " + port + " unusable ");
+ }
+ }
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Feed server configured to use port: " + port);
+ }
+ }
+
+ public InputStream getInputStream() {
+ Socket socket;
+ try {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("waiting for client at " + serverSocket.getLocalPort());
+ }
+ socket = serverSocket.accept();
+ inputStream = socket.getInputStream();
+ } catch (IOException e) {
+ if (LOGGER.isLoggable(Level.SEVERE)) {
+ LOGGER.severe("Unable to create input stream required for feed ingestion");
+ }
+ }
+ return inputStream;
+ }
+
+ public void stop() throws IOException {
+ try {
+ serverSocket.close();
+ } catch (IOException ioe) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Unable to close socket at " + serverSocket.getLocalPort());
+ }
+ }
+ }
+
+ }
+
+ @Override
+ public void stop() throws Exception {
+ socketFeedServer.stop();
+ }
+
+ public DataExchangeMode getDataExchangeMode() {
+ return DataExchangeMode.PUSH;
+ }
+
+}
diff --git a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/GenericSocketFeedAdapterFactory.java b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/GenericSocketFeedAdapterFactory.java
new file mode 100644
index 0000000..ce14bb2
--- /dev/null
+++ b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/GenericSocketFeedAdapterFactory.java
@@ -0,0 +1,146 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.tools.external.data;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.Set;
+
+import org.apache.commons.lang3.StringUtils;
+
+import edu.uci.ics.asterix.external.adapter.factory.StreamBasedAdapterFactory;
+import edu.uci.ics.asterix.metadata.feeds.IDatasourceAdapter;
+import edu.uci.ics.asterix.metadata.feeds.IGenericAdapterFactory;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.util.AsterixRuntimeUtil;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+
+/**
+ * Factory class for creating @see{GenericSocketFeedAdapter} The
+ * adapter listens at a port for receiving data (from external world).
+ * Data received is transformed into Asterix Data Format (ADM).
+ */
+public class GenericSocketFeedAdapterFactory extends StreamBasedAdapterFactory implements IGenericAdapterFactory {
+
+ private static final long serialVersionUID = 1L;
+
+ private ARecordType outputType;
+
+ private List<Pair<String, Integer>> sockets;
+
+ private Mode mode = Mode.IP;
+
+ public static final String KEY_SOCKETS = "sockets";
+
+ public static final String KEY_MODE = "address-type";
+
+ public static enum Mode {
+ NC,
+ IP
+ }
+
+ @Override
+ public String getName() {
+ return "socket_adaptor";
+ }
+
+ @Override
+ public AdapterType getAdapterType() {
+ return AdapterType.GENERIC;
+ }
+
+ @Override
+ public SupportedOperation getSupportedOperations() {
+ return SupportedOperation.READ;
+ }
+
+ public List<Pair<String, Integer>> getSockets() {
+ return sockets;
+ }
+
+ @Override
+ public void configure(Map<String, String> configuration, ARecordType outputType) throws Exception {
+ this.configuration = configuration;
+ outputType = (ARecordType) outputType;
+ this.configureFormat(outputType);
+ this.configureSockets(configuration);
+ }
+
+ @Override
+ public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
+ List<String> locations = new ArrayList<String>();
+ for (Pair<String, Integer> socket : sockets) {
+ locations.add(socket.first);
+ }
+ return new AlgebricksAbsolutePartitionConstraint(locations.toArray(new String[] {}));
+ }
+
+ @Override
+ public IDatasourceAdapter createAdapter(IHyracksTaskContext ctx, int partition) throws Exception {
+ Pair<String, Integer> socket = sockets.get(partition);
+ return new GenericSocketFeedAdapter(parserFactory, outputType, socket.second, ctx);
+ }
+
+ private void configureSockets(Map<String, String> configuration) throws Exception {
+ sockets = new ArrayList<Pair<String, Integer>>();
+ String modeValue = configuration.get(KEY_MODE);
+ if (modeValue != null) {
+ mode = Mode.valueOf(modeValue.trim().toUpperCase());
+ }
+ String socketsValue = configuration.get(KEY_SOCKETS);
+ if (socketsValue == null) {
+ throw new IllegalArgumentException("\'sockets\' parameter not specified as part of adaptor configuration");
+ }
+ Map<String, Set<String>> ncMap = AsterixRuntimeUtil.getNodeControllerMap();
+ List<String> ncs = AsterixRuntimeUtil.getAllNodeControllers();
+ String[] socketsArray = socketsValue.split(",");
+ Random random = new Random();
+ for (String socket : socketsArray) {
+ String[] socketTokens = socket.split(":");
+ String host = socketTokens[0];
+ int port = Integer.parseInt(socketTokens[1]);
+ Pair<String, Integer> p = null;
+ switch (mode) {
+ case IP:
+ Set<String> ncsOnIp = ncMap.get(host);
+ if (ncsOnIp == null || ncsOnIp.isEmpty()) {
+ throw new IllegalArgumentException("Invalid host " + host
+ + " as it is not part of the AsterixDB cluster. Valid choices are "
+ + StringUtils.join(ncMap.keySet(), ", "));
+ }
+ String[] ncArray = ncsOnIp.toArray(new String[] {});
+ String nc = ncArray[random.nextInt(ncArray.length)];
+ p = new Pair<String, Integer>(nc, port);
+ break;
+
+ case NC:
+ p = new Pair<String, Integer>(host, port);
+ if (!ncs.contains(host)) {
+ throw new IllegalArgumentException("Invalid NC " + host
+ + " as it is not part of the AsterixDB cluster. Valid choices are "
+ + StringUtils.join(ncs, ", "));
+
+ }
+ break;
+ }
+ sockets.add(p);
+ }
+ }
+}
\ No newline at end of file
diff --git a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapter.java b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapter.java
index e037ec6..8f169f2 100644
--- a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapter.java
+++ b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapter.java
@@ -18,43 +18,28 @@
import java.io.InputStream;
import java.util.Map;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.external.dataset.adapter.FileSystemBasedAdapter;
-import edu.uci.ics.asterix.external.dataset.adapter.ITypedDatasourceAdapter;
-import edu.uci.ics.asterix.feed.managed.adapter.IManagedFeedAdapter;
+import edu.uci.ics.asterix.metadata.feeds.IFeedAdapter;
import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.runtime.operators.file.ADMDataParser;
-import edu.uci.ics.asterix.runtime.operators.file.AbstractTupleParser;
-import edu.uci.ics.asterix.runtime.operators.file.DelimitedDataParser;
-import edu.uci.ics.asterix.runtime.operators.file.IDataParser;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
-import edu.uci.ics.hyracks.api.comm.IFrameWriter;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.file.ITupleParser;
import edu.uci.ics.hyracks.dataflow.std.file.ITupleParserFactory;
/**
- * An adapter that simulates a feed from the contents of a source file. The file can be on the local file
- * system or on HDFS. The feed ends when the content of the source file has been ingested.
+ * An adapter that simulates a feed from the contents of a source file. The file
+ * can be on the local file system or on HDFS. The feed ends when the content of
+ * the source file has been ingested.
*/
-public class RateControlledFileSystemBasedAdapter extends FileSystemBasedAdapter implements ITypedDatasourceAdapter,
- IManagedFeedAdapter {
+
+public class RateControlledFileSystemBasedAdapter extends FileSystemBasedAdapter implements IFeedAdapter {
private static final long serialVersionUID = 1L;
private FileSystemBasedAdapter coreAdapter;
- private String format;
- public RateControlledFileSystemBasedAdapter(ARecordType atype, Map<String, Object> configuration,
- FileSystemBasedAdapter coreAdapter, String format) throws Exception {
- super(atype);
- this.configuration = configuration;
+ public RateControlledFileSystemBasedAdapter(ARecordType atype, Map<String, String> configuration,
+ FileSystemBasedAdapter coreAdapter, String format, ITupleParserFactory parserFactory,
+ IHyracksTaskContext ctx) throws Exception {
+ super(parserFactory, atype, ctx);
this.coreAdapter = coreAdapter;
- this.format = format;
}
@Override
@@ -63,184 +48,13 @@
}
@Override
- public void initialize(IHyracksTaskContext ctx) throws Exception {
- coreAdapter.initialize(ctx);
- this.ctx = ctx;
- }
-
- @Override
- public void configure(Map<String, Object> arguments) throws Exception {
- coreAdapter.configure(arguments);
- }
-
- @Override
- public AdapterType getAdapterType() {
- return coreAdapter.getAdapterType();
- }
-
- @Override
- protected ITupleParser getTupleParser() throws Exception {
- ITupleParser parser = null;
- if (format.equals(FORMAT_DELIMITED_TEXT)) {
- parser = getRateControlledDelimitedDataTupleParser((ARecordType) atype);
- } else if (format.equals(FORMAT_ADM)) {
- parser = getRateControlledADMDataTupleParser((ARecordType) atype);
- } else {
- throw new IllegalArgumentException(" format " + configuration.get(KEY_FORMAT) + " not supported");
- }
- return parser;
-
- }
-
- protected ITupleParser getRateControlledDelimitedDataTupleParser(ARecordType recordType) throws AsterixException,
- HyracksDataException {
- ITupleParser parser;
- int n = recordType.getFieldTypes().length;
- IValueParserFactory[] fieldParserFactories = new IValueParserFactory[n];
- for (int i = 0; i < n; i++) {
- ATypeTag tag = recordType.getFieldTypes()[i].getTypeTag();
- IValueParserFactory vpf = typeToValueParserFactMap.get(tag);
- if (vpf == null) {
- throw new NotImplementedException("No value parser factory for delimited fields of type " + tag);
- }
- fieldParserFactories[i] = vpf;
-
- }
- String delimiterValue = (String) configuration.get(KEY_DELIMITER);
- if (delimiterValue != null && delimiterValue.length() > 1) {
- throw new AsterixException("improper delimiter");
- }
-
- Character delimiter = delimiterValue.charAt(0);
- parser = new RateControlledTupleParserFactory(recordType, fieldParserFactories, delimiter, configuration)
- .createTupleParser(ctx);
- return parser;
- }
-
- protected ITupleParser getRateControlledADMDataTupleParser(ARecordType recordType) throws AsterixException {
- ITupleParser parser = null;
- try {
- parser = new RateControlledTupleParserFactory(recordType, configuration).createTupleParser(ctx);
- return parser;
- } catch (Exception e) {
- throw new AsterixException(e);
- }
-
- }
-
- @Override
- public ARecordType getAdapterOutputType() {
- return (ARecordType) atype;
- }
-
- @Override
- public void alter(Map<String, String> properties) {
- ((RateControlledTupleParser) parser).setInterTupleInterval(Long.parseLong(properties
- .get(RateControlledTupleParser.INTER_TUPLE_INTERVAL)));
- }
-
- @Override
public void stop() {
- ((RateControlledTupleParser) parser).stop();
+ ((RateControlledTupleParser) tupleParser).stop();
}
-
+
@Override
- public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
- return coreAdapter.getPartitionConstraint();
- }
-}
-
-class RateControlledTupleParserFactory implements ITupleParserFactory {
-
- private static final long serialVersionUID = 1L;
-
- private final ARecordType recordType;
- private final IDataParser dataParser;
- private final Map<String, Object> configuration;
-
- public RateControlledTupleParserFactory(ARecordType recordType, IValueParserFactory[] valueParserFactories,
- char fieldDelimiter, Map<String, Object> configuration) {
- this.recordType = recordType;
- dataParser = new DelimitedDataParser(recordType, valueParserFactories, fieldDelimiter);
- this.configuration = configuration;
+ public DataExchangeMode getDataExchangeMode() {
+ return DataExchangeMode.PULL;
}
- public RateControlledTupleParserFactory(ARecordType recordType, Map<String, Object> configuration) {
- this.recordType = recordType;
- dataParser = new ADMDataParser();
- this.configuration = configuration;
- }
-
- @Override
- public ITupleParser createTupleParser(IHyracksTaskContext ctx) throws HyracksDataException {
- return new RateControlledTupleParser(ctx, recordType, dataParser, configuration);
- }
-
-}
-
-class RateControlledTupleParser extends AbstractTupleParser {
-
- private final IDataParser dataParser;
- private long interTupleInterval;
- private boolean delayConfigured;
- private boolean continueIngestion = true;
-
- public static final String INTER_TUPLE_INTERVAL = "tuple-interval";
-
- public RateControlledTupleParser(IHyracksTaskContext ctx, ARecordType recType, IDataParser dataParser,
- Map<String, Object> configuration) throws HyracksDataException {
- super(ctx, recType);
- this.dataParser = dataParser;
- String propValue = (String) configuration.get(INTER_TUPLE_INTERVAL);
- if (propValue != null) {
- interTupleInterval = Long.parseLong(propValue);
- } else {
- interTupleInterval = 0;
- }
- delayConfigured = interTupleInterval != 0;
- }
-
- public void setInterTupleInterval(long val) {
- this.interTupleInterval = val;
- this.delayConfigured = val > 0;
- }
-
- public void stop() {
- continueIngestion = false;
- }
-
- @Override
- public IDataParser getDataParser() {
- return dataParser;
- }
-
- @Override
- public void parse(InputStream in, IFrameWriter writer) throws HyracksDataException {
-
- appender.reset(frame, true);
- IDataParser parser = getDataParser();
- try {
- parser.initialize(in, recType, true);
- while (continueIngestion) {
- tb.reset();
- if (!parser.parse(tb.getDataOutput())) {
- break;
- }
- tb.addFieldEndOffset();
- if (delayConfigured) {
- Thread.sleep(interTupleInterval);
- }
- addTupleToFrame(writer);
- }
- if (appender.getTupleCount() > 0) {
- FrameUtils.flushFrame(frame, writer);
- }
- } catch (AsterixException ae) {
- throw new HyracksDataException(ae);
- } catch (IOException ioe) {
- throw new HyracksDataException(ioe);
- } catch (InterruptedException ie) {
- throw new HyracksDataException(ie);
- }
- }
}
diff --git a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapterFactory.java b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapterFactory.java
index bf3c086..dc558d7 100644
--- a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapterFactory.java
+++ b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapterFactory.java
@@ -14,14 +14,32 @@
*/
package edu.uci.ics.asterix.tools.external.data;
+import java.io.IOException;
+import java.io.InputStream;
import java.util.Map;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.external.adapter.factory.IGenericDatasetAdapterFactory;
+import edu.uci.ics.asterix.external.adapter.factory.HDFSAdapterFactory;
+import edu.uci.ics.asterix.external.adapter.factory.NCFileSystemAdapterFactory;
+import edu.uci.ics.asterix.external.adapter.factory.StreamBasedAdapterFactory;
import edu.uci.ics.asterix.external.dataset.adapter.FileSystemBasedAdapter;
-import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
+import edu.uci.ics.asterix.metadata.feeds.IDatasourceAdapter;
+import edu.uci.ics.asterix.metadata.feeds.IGenericAdapterFactory;
import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.runtime.operators.file.ADMDataParser;
+import edu.uci.ics.asterix.runtime.operators.file.AbstractTupleParser;
+import edu.uci.ics.asterix.runtime.operators.file.DelimitedDataParser;
+import edu.uci.ics.asterix.runtime.operators.file.IDataParser;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.file.ITupleParser;
+import edu.uci.ics.hyracks.dataflow.std.file.ITupleParserFactory;
/**
* Factory class for creating @see{RateControllerFileSystemBasedAdapter} The
@@ -29,38 +47,25 @@
* on the local file system or on HDFS. The feed ends when the content of the
* source file has been ingested.
*/
-public class RateControlledFileSystemBasedAdapterFactory implements IGenericDatasetAdapterFactory {
+public class RateControlledFileSystemBasedAdapterFactory extends StreamBasedAdapterFactory implements
+ IGenericAdapterFactory {
private static final long serialVersionUID = 1L;
-
+
public static final String KEY_FILE_SYSTEM = "fs";
public static final String LOCAL_FS = "localfs";
public static final String HDFS = "hdfs";
public static final String KEY_PATH = "path";
public static final String KEY_FORMAT = "format";
- private IGenericDatasetAdapterFactory adapterFactory;
+ private IGenericAdapterFactory adapterFactory;
private String format;
- private boolean setup = false;
+ private Map<String, String> configuration;
+ private ARecordType atype;
@Override
- public IDatasourceAdapter createAdapter(Map<String, Object> configuration, IAType type) throws Exception {
- if (!setup) {
- checkRequiredArgs(configuration);
- String fileSystem = (String) configuration.get(KEY_FILE_SYSTEM);
- String adapterFactoryClass = null;
- if (fileSystem.equalsIgnoreCase(LOCAL_FS)) {
- adapterFactoryClass = "edu.uci.ics.asterix.external.adapter.factory.NCFileSystemAdapterFactory";
- } else if (fileSystem.equals(HDFS)) {
- adapterFactoryClass = "edu.uci.ics.asterix.external.adapter.factory.HDFSAdapterFactory";
- } else {
- throw new AsterixException("Unsupported file system type " + fileSystem);
- }
- format = (String) configuration.get(KEY_FORMAT);
- adapterFactory = (IGenericDatasetAdapterFactory) Class.forName(adapterFactoryClass).newInstance();
- setup = true;
- }
- return new RateControlledFileSystemBasedAdapter((ARecordType) type, configuration,
- (FileSystemBasedAdapter) adapterFactory.createAdapter(configuration, type), format);
+ public IDatasourceAdapter createAdapter(IHyracksTaskContext ctx, int partition) throws Exception {
+ FileSystemBasedAdapter coreAdapter = (FileSystemBasedAdapter) adapterFactory.createAdapter(ctx, partition);
+ return new RateControlledFileSystemBasedAdapter(atype, configuration, coreAdapter, format, parserFactory, ctx);
}
@Override
@@ -68,11 +73,11 @@
return "file_feed";
}
- private void checkRequiredArgs(Map<String, Object> configuration) throws Exception {
+ private void checkRequiredArgs(Map<String, String> configuration) throws Exception {
if (configuration.get(KEY_FILE_SYSTEM) == null) {
throw new Exception("File system type not specified. (fs=?) File system could be 'localfs' or 'hdfs'");
}
- if (configuration.get(IGenericDatasetAdapterFactory.KEY_TYPE_NAME) == null) {
+ if (configuration.get(IGenericAdapterFactory.KEY_TYPE_NAME) == null) {
throw new Exception("Record type not specified (output-type-name=?)");
}
if (configuration.get(KEY_PATH) == null) {
@@ -83,4 +88,186 @@
}
}
-}
\ No newline at end of file
+ @Override
+ public AdapterType getAdapterType() {
+ return AdapterType.GENERIC;
+ }
+
+ @Override
+ public SupportedOperation getSupportedOperations() {
+ return SupportedOperation.READ;
+ }
+
+ @Override
+ public void configure(Map<String, String> configuration, ARecordType recordType) throws Exception {
+ this.configuration = configuration;
+ checkRequiredArgs(configuration);
+ String fileSystem = (String) configuration.get(KEY_FILE_SYSTEM);
+ String adapterFactoryClass = null;
+ if (fileSystem.equalsIgnoreCase(LOCAL_FS)) {
+ adapterFactoryClass = NCFileSystemAdapterFactory.class.getName();
+ } else if (fileSystem.equals(HDFS)) {
+ adapterFactoryClass = HDFSAdapterFactory.class.getName();
+ } else {
+ throw new AsterixException("Unsupported file system type " + fileSystem);
+ }
+ format = configuration.get(KEY_FORMAT);
+ adapterFactory = (IGenericAdapterFactory) Class.forName(adapterFactoryClass).newInstance();
+ adapterFactory.configure(configuration, recordType);
+
+ atype = (ARecordType) recordType;
+ configureFormat();
+ }
+
+ @Override
+ public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
+ return adapterFactory.getPartitionConstraint();
+ }
+
+ private void configureFormat() throws AsterixException {
+ switch (format) {
+ case FORMAT_ADM:
+ parserFactory = new RateControlledTupleParserFactory(atype, configuration);
+ break;
+
+ case FORMAT_DELIMITED_TEXT:
+ String delimiterValue = (String) configuration.get(KEY_DELIMITER);
+ if (delimiterValue != null && delimiterValue.length() > 1) {
+ throw new AsterixException("improper delimiter");
+ }
+ IValueParserFactory[] valueParserFactories = getValueParserFactories(atype);
+ parserFactory = new RateControlledTupleParserFactory(atype, valueParserFactories,
+ delimiterValue.charAt(0), configuration);
+ break;
+ }
+ }
+
+ protected IValueParserFactory[] getValueParserFactories(ARecordType recordType) throws AsterixException {
+ int n = recordType.getFieldTypes().length;
+ IValueParserFactory[] fieldParserFactories = new IValueParserFactory[n];
+ for (int i = 0; i < n; i++) {
+ ATypeTag tag = recordType.getFieldTypes()[i].getTypeTag();
+ IValueParserFactory vpf = typeToValueParserFactMap.get(tag);
+ if (vpf == null) {
+ throw new NotImplementedException("No value parser factory for delimited fields of type " + tag);
+ }
+ fieldParserFactories[i] = vpf;
+
+ }
+ return fieldParserFactories;
+ }
+
+}
+
+class RateControlledTupleParserFactory implements ITupleParserFactory {
+
+ private static final long serialVersionUID = 1L;
+
+ private final ARecordType recordType;
+ private final Map<String, String> configuration;
+ private IValueParserFactory[] valueParserFactories;
+ private char delimiter;
+ private final ParserType parserType;
+
+ public enum ParserType {
+ ADM,
+ DELIMITED_DATA
+ }
+
+ public RateControlledTupleParserFactory(ARecordType recordType, IValueParserFactory[] valueParserFactories,
+ char fieldDelimiter, Map<String, String> configuration) {
+ this.recordType = recordType;
+ this.valueParserFactories = valueParserFactories;
+ this.delimiter = fieldDelimiter;
+ this.configuration = configuration;
+ this.parserType = ParserType.DELIMITED_DATA;
+ }
+
+ public RateControlledTupleParserFactory(ARecordType recordType, Map<String, String> configuration) {
+ this.recordType = recordType;
+ this.configuration = configuration;
+ this.parserType = ParserType.ADM;
+ }
+
+ @Override
+ public ITupleParser createTupleParser(IHyracksTaskContext ctx) throws HyracksDataException {
+ IDataParser dataParser = null;
+ switch (parserType) {
+ case ADM:
+ dataParser = new ADMDataParser();
+ break;
+ case DELIMITED_DATA:
+ dataParser = new DelimitedDataParser(recordType, valueParserFactories, delimiter);
+ break;
+ }
+ return new RateControlledTupleParser(ctx, recordType, dataParser, configuration);
+ }
+
+}
+
+class RateControlledTupleParser extends AbstractTupleParser {
+
+ private final IDataParser dataParser;
+ private long interTupleInterval;
+ private boolean delayConfigured;
+ private boolean continueIngestion = true;
+
+ public static final String INTER_TUPLE_INTERVAL = "tuple-interval";
+
+ public RateControlledTupleParser(IHyracksTaskContext ctx, ARecordType recType, IDataParser dataParser,
+ Map<String, String> configuration) throws HyracksDataException {
+ super(ctx, recType);
+ this.dataParser = dataParser;
+ String propValue = configuration.get(INTER_TUPLE_INTERVAL);
+ if (propValue != null) {
+ interTupleInterval = Long.parseLong(propValue);
+ } else {
+ interTupleInterval = 0;
+ }
+ delayConfigured = interTupleInterval != 0;
+ }
+
+ public void setInterTupleInterval(long val) {
+ this.interTupleInterval = val;
+ this.delayConfigured = val > 0;
+ }
+
+ public void stop() {
+ continueIngestion = false;
+ }
+
+ @Override
+ public IDataParser getDataParser() {
+ return dataParser;
+ }
+
+ @Override
+ public void parse(InputStream in, IFrameWriter writer) throws HyracksDataException {
+
+ appender.reset(frame, true);
+ IDataParser parser = getDataParser();
+ try {
+ parser.initialize(in, recType, true);
+ while (continueIngestion) {
+ tb.reset();
+ if (!parser.parse(tb.getDataOutput())) {
+ break;
+ }
+ tb.addFieldEndOffset();
+ if (delayConfigured) {
+ Thread.sleep(interTupleInterval);
+ }
+ addTupleToFrame(writer);
+ }
+ if (appender.getTupleCount() > 0) {
+ FrameUtils.flushFrame(frame, writer);
+ }
+ } catch (AsterixException ae) {
+ throw new HyracksDataException(ae);
+ } catch (IOException ioe) {
+ throw new HyracksDataException(ioe);
+ } catch (InterruptedException ie) {
+ throw new HyracksDataException(ie);
+ }
+ }
+}
diff --git a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/SocketClientAdapter.java b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/SocketClientAdapter.java
new file mode 100644
index 0000000..4cbc4f1
--- /dev/null
+++ b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/SocketClientAdapter.java
@@ -0,0 +1,85 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.tools.external.data;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.OutputStream;
+import java.net.Socket;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.metadata.feeds.IFeedAdapter;
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+
+public class SocketClientAdapter implements IFeedAdapter {
+
+ private static final long serialVersionUID = 1L;
+
+ private static final Logger LOGGER = Logger.getLogger(SocketClientAdapter.class.getName());
+
+ private static final String LOCALHOST = "127.0.0.1";
+
+ private final String localFile;
+
+ private final int port;
+
+ private final IHyracksTaskContext ctx;
+
+ private boolean continueStreaming = true;
+
+ public SocketClientAdapter(Integer port, String localFile, IHyracksTaskContext ctx) {
+ this.localFile = localFile;
+ this.port = port;
+ this.ctx = ctx;
+ }
+
+ @Override
+ public void start(int partition, IFrameWriter writer) throws Exception {
+ Socket socket = new Socket(LOCALHOST, port);
+ OutputStream os = socket.getOutputStream();
+ FileInputStream fin = new FileInputStream(new File(localFile));
+ byte[] chunk = new byte[1024];
+ int read;
+ try {
+ while (continueStreaming) {
+ read = fin.read(chunk);
+ if (read > 0) {
+ os.write(chunk, 0, read);
+ } else {
+ break;
+ }
+ }
+ } finally {
+ socket.close();
+ fin.close();
+ }
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Finished streaming file " + localFile + "to port [" + port + "]");
+ }
+ }
+
+ @Override
+ public DataExchangeMode getDataExchangeMode() {
+ return DataExchangeMode.PUSH;
+ }
+
+ @Override
+ public void stop() throws Exception {
+ continueStreaming = false;
+ }
+
+}
diff --git a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/SocketClientAdapterFactory.java b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/SocketClientAdapterFactory.java
new file mode 100644
index 0000000..f21a740
--- /dev/null
+++ b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/SocketClientAdapterFactory.java
@@ -0,0 +1,108 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.tools.external.data;
+
+import java.util.Map;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.metadata.feeds.IDatasourceAdapter;
+import edu.uci.ics.asterix.metadata.feeds.ITypedAdapterFactory;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.AUnorderedListType;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+
+public class SocketClientAdapterFactory implements ITypedAdapterFactory {
+
+ private static final long serialVersionUID = 1L;
+
+ private static final ARecordType outputType = initOutputType();
+
+ private GenericSocketFeedAdapterFactory genericSocketAdapterFactory;
+
+ private String[] fileSplits;
+
+ public static final String KEY_FILE_SPLITS = "file_splits";
+
+ @Override
+ public SupportedOperation getSupportedOperations() {
+ return SupportedOperation.READ;
+ }
+
+ private static ARecordType initOutputType() {
+ ARecordType outputType = null;
+ try {
+ String[] userFieldNames = new String[] { "screen-name", "lang", "friends_count", "statuses_count", "name",
+ "followers_count" };
+
+ IAType[] userFieldTypes = new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.AINT32,
+ BuiltinType.AINT32, BuiltinType.ASTRING, BuiltinType.AINT32 };
+ ARecordType userRecordType = new ARecordType("TwitterUserType", userFieldNames, userFieldTypes, false);
+
+ String[] fieldNames = new String[] { "tweetid", "user", "sender-location", "send-time", "referred-topics",
+ "message-text" };
+
+ AUnorderedListType unorderedListType = new AUnorderedListType(BuiltinType.ASTRING, "referred-topics");
+ IAType[] fieldTypes = new IAType[] { BuiltinType.AINT64, userRecordType, BuiltinType.APOINT,
+ BuiltinType.ADATETIME, unorderedListType, BuiltinType.ASTRING };
+ outputType = new ARecordType("TweetMessageType", fieldNames, fieldTypes, false);
+
+ } catch (AsterixException e) {
+ throw new IllegalStateException("Unable to initialize output type");
+ }
+ return outputType;
+ }
+
+ @Override
+ public String getName() {
+ return "socket_client";
+ }
+
+ @Override
+ public AdapterType getAdapterType() {
+ return AdapterType.TYPED;
+ }
+
+ @Override
+ public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
+ return genericSocketAdapterFactory.getPartitionConstraint();
+ }
+
+ @Override
+ public IDatasourceAdapter createAdapter(IHyracksTaskContext ctx, int partition) throws Exception {
+ Pair<String, Integer> socket = genericSocketAdapterFactory.getSockets().get(partition);
+ return new SocketClientAdapter(socket.second, fileSplits[partition], ctx);
+ }
+
+ @Override
+ public ARecordType getAdapterOutputType() {
+ return outputType;
+ }
+
+ @Override
+ public void configure(Map<String, String> configuration) throws Exception {
+ String fileSplitsValue = configuration.get(KEY_FILE_SPLITS);
+ if (fileSplitsValue == null) {
+ throw new IllegalArgumentException(
+ "File splits not specified. File split is specified as a comma separated list of paths");
+ }
+ fileSplits = fileSplitsValue.trim().split(",");
+ genericSocketAdapterFactory = new GenericSocketFeedAdapterFactory();
+ genericSocketAdapterFactory.configure(configuration, outputType);
+ }
+}
diff --git a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/TweetGenerator.java b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/TweetGenerator.java
new file mode 100644
index 0000000..8f252e6
--- /dev/null
+++ b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/TweetGenerator.java
@@ -0,0 +1,116 @@
+/*
+x * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.tools.external.data;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.tools.external.data.DataGenerator.InitializationInfo;
+import edu.uci.ics.asterix.tools.external.data.DataGenerator.TweetMessage;
+import edu.uci.ics.asterix.tools.external.data.DataGenerator.TweetMessageIterator;
+
+public class TweetGenerator {
+
+ private static Logger LOGGER = Logger.getLogger(TweetGenerator.class.getName());
+
+ public static final String KEY_DURATION = "duration";
+ public static final String KEY_TPS = "tps";
+ public static final String KEY_GUID_SEED = "guid-seed";
+
+ public static final String OUTPUT_FORMAT = "output-format";
+ public static final String OUTPUT_FORMAT_ARECORD = "arecord";
+ public static final String OUTPUT_FORMAT_ADM_STRING = "adm-string";
+
+ private static final int DEFAULT_DURATION = 60; //seconds
+ private static final int DEFAULT_GUID_SEED = 0;
+
+ private int duration;
+ private TweetMessageIterator tweetIterator = null;
+ private int partition;
+ private int tweetCount = 0;
+ private int frameTweetCount = 0;
+ private int numFlushedTweets = 0;
+ private OutputStream os;
+ private DataGenerator dataGenerator = null;
+ private ByteBuffer outputBuffer = ByteBuffer.allocate(32 * 1024);
+ private GULongIDGenerator uidGenerator;
+
+ public int getTweetCount() {
+ return tweetCount;
+ }
+
+ public TweetGenerator(Map<String, String> configuration, int partition, String format, OutputStream os)
+ throws Exception {
+ this.partition = partition;
+ String value = configuration.get(KEY_DURATION);
+ this.duration = value != null ? Integer.parseInt(value) : DEFAULT_DURATION;
+ int guidSeed = configuration.get(KEY_GUID_SEED) != null ? Integer.parseInt(configuration.get(KEY_GUID_SEED))
+ : DEFAULT_GUID_SEED;
+ uidGenerator = new GULongIDGenerator(partition, (byte) (guidSeed));
+ dataGenerator = new DataGenerator(new InitializationInfo());
+ tweetIterator = dataGenerator.new TweetMessageIterator(duration, uidGenerator);
+ this.os = os;
+ }
+
+ private void writeTweetString(TweetMessage tweetMessage) throws IOException {
+ String tweet = tweetMessage.toString() + "\n";
+ tweetCount++;
+ byte[] b = tweet.getBytes();
+ if (outputBuffer.position() + b.length > outputBuffer.limit()) {
+ flush();
+ numFlushedTweets += frameTweetCount;
+ frameTweetCount = 0;
+ outputBuffer.put(b);
+ } else {
+ outputBuffer.put(b);
+ }
+ frameTweetCount++;
+ }
+
+ public int getNumFlushedTweets() {
+ return numFlushedTweets;
+ }
+
+ private void flush() throws IOException {
+ outputBuffer.flip();
+ os.write(outputBuffer.array(), 0, outputBuffer.limit());
+ outputBuffer.position(0);
+ outputBuffer.limit(32 * 1024);
+ }
+
+ public boolean setNextRecordBatch(int numTweetsInBatch) throws Exception {
+ boolean moreData = tweetIterator.hasNext();
+ if (!moreData) {
+ if (outputBuffer.position() > 0) {
+ flush();
+ }
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Reached end of batch. Tweet Count: [" + partition + "]" + tweetCount);
+ }
+ return false;
+ } else {
+ int count = 0;
+ while (count < numTweetsInBatch) {
+ writeTweetString(tweetIterator.next());
+ count++;
+ }
+ return true;
+ }
+ }
+}
\ No newline at end of file
diff --git a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/TwitterFirehoseFeedAdapter.java b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/TwitterFirehoseFeedAdapter.java
new file mode 100644
index 0000000..9e3c4dd
--- /dev/null
+++ b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/TwitterFirehoseFeedAdapter.java
@@ -0,0 +1,177 @@
+/*
+x * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.tools.external.data;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.PipedInputStream;
+import java.io.PipedOutputStream;
+import java.util.Map;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.external.dataset.adapter.StreamBasedAdapter;
+import edu.uci.ics.asterix.metadata.feeds.IFeedAdapter;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.dataflow.std.file.ITupleParserFactory;
+
+/**
+ * TPS can be configured between 1 and 20,000
+ *
+ * @author ramang
+ */
+public class TwitterFirehoseFeedAdapter extends StreamBasedAdapter implements IFeedAdapter {
+
+ private static final long serialVersionUID = 1L;
+
+ private static final Logger LOGGER = Logger.getLogger(TwitterFirehoseFeedAdapter.class.getName());
+
+ private ExecutorService executorService = Executors.newCachedThreadPool();
+
+ private PipedOutputStream outputStream = new PipedOutputStream();
+
+ private PipedInputStream inputStream = new PipedInputStream(outputStream);
+
+ private final TwitterServer twitterServer;
+
+ public TwitterFirehoseFeedAdapter(Map<String, String> configuration, ITupleParserFactory parserFactory,
+ ARecordType outputtype, int partition, IHyracksTaskContext ctx) throws Exception {
+ super(parserFactory, outputtype, ctx);
+ this.twitterServer = new TwitterServer(configuration, partition, outputtype, outputStream, executorService);
+ }
+
+ @Override
+ public void start(int partition, IFrameWriter writer) throws Exception {
+ twitterServer.start();
+ super.start(partition, writer);
+ }
+
+ @Override
+ public InputStream getInputStream(int partition) throws IOException {
+ return inputStream;
+ }
+
+ public static class TwitterServer {
+ private final DataProvider dataProvider;
+ private final ExecutorService executorService;
+
+ public TwitterServer(Map<String, String> configuration, int partition, ARecordType outputtype, OutputStream os,
+ ExecutorService executorService) throws Exception {
+ dataProvider = new DataProvider(configuration, outputtype, partition, os);
+ this.executorService = executorService;
+ }
+
+ public void stop() throws IOException {
+ dataProvider.stop();
+ }
+
+ public void start() {
+ executorService.execute(dataProvider);
+ }
+
+ }
+
+ public static class DataProvider implements Runnable {
+
+ public static final String KEY_MODE = "mode";
+
+ private TweetGenerator tweetGenerator;
+ private boolean continuePush = true;
+ private int batchSize;
+ private final Mode mode;
+ private final OutputStream os;
+
+ public static enum Mode {
+ AGGRESSIVE,
+ CONTROLLED
+ }
+
+ public DataProvider(Map<String, String> configuration, ARecordType outputtype, int partition, OutputStream os)
+ throws Exception {
+ this.tweetGenerator = new TweetGenerator(configuration, partition, TweetGenerator.OUTPUT_FORMAT_ADM_STRING,
+ os);
+ this.os = os;
+ mode = configuration.get(KEY_MODE) != null ? Mode.valueOf(configuration.get(KEY_MODE).toUpperCase())
+ : Mode.AGGRESSIVE;
+ switch (mode) {
+ case CONTROLLED:
+ String tpsValue = configuration.get(TweetGenerator.KEY_TPS);
+ if (tpsValue == null) {
+ throw new IllegalArgumentException("TPS value not configured. use tps=<value>");
+ }
+ batchSize = Integer.parseInt(tpsValue);
+ break;
+ case AGGRESSIVE:
+ batchSize = 5000;
+ break;
+ }
+ }
+
+ @Override
+ public void run() {
+ boolean moreData = true;
+ long startBatch;
+ long endBatch;
+
+ try {
+ while (moreData && continuePush) {
+ switch (mode) {
+ case AGGRESSIVE:
+ moreData = tweetGenerator.setNextRecordBatch(batchSize);
+ break;
+ case CONTROLLED:
+ startBatch = System.currentTimeMillis();
+ moreData = tweetGenerator.setNextRecordBatch(batchSize);
+ endBatch = System.currentTimeMillis();
+ if (endBatch - startBatch < 1000) {
+ Thread.sleep(1000 - (endBatch - startBatch));
+ } else {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Unable to reach the required tps of " + batchSize);
+ }
+ }
+ break;
+ }
+ }
+ os.close();
+ } catch (Exception e) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Exception in adaptor " + e.getMessage());
+ }
+ }
+ }
+
+ public void stop() {
+ continuePush = false;
+ }
+
+ }
+
+ @Override
+ public void stop() throws Exception {
+ twitterServer.stop();
+ }
+
+ @Override
+ public DataExchangeMode getDataExchangeMode() {
+ return DataExchangeMode.PUSH;
+ }
+
+}
\ No newline at end of file
diff --git a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/TwitterFirehoseFeedAdapterFactory.java b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/TwitterFirehoseFeedAdapterFactory.java
new file mode 100644
index 0000000..8f24dec
--- /dev/null
+++ b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/TwitterFirehoseFeedAdapterFactory.java
@@ -0,0 +1,133 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.tools.external.data;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.external.adapter.factory.StreamBasedAdapterFactory;
+import edu.uci.ics.asterix.metadata.feeds.IDatasourceAdapter;
+import edu.uci.ics.asterix.metadata.feeds.ITypedAdapterFactory;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.AUnorderedListType;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.util.AsterixClusterProperties;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+
+/**
+ * Factory class for creating @see{TwitterFirehoseFeedAdapter}.
+ * The adapter simulates a twitter firehose with tweets being "pushed" into Asterix at a configurable rate
+ * measured in terms of TPS (tweets/second). The stream of tweets lasts for a configurable duration (measured in seconds).
+ */
+public class TwitterFirehoseFeedAdapterFactory extends StreamBasedAdapterFactory implements ITypedAdapterFactory {
+
+ private static final long serialVersionUID = 1L;
+
+ /*
+ * Degree of parallelism for feed ingestion activity. Defaults to 1.
+ * This builds up the count constraint for the ingestion operator.
+ */
+ private static final String KEY_INGESTION_CARDINALITY = "ingestion-cardinality";
+
+ /*
+ * The absolute locations where ingestion operator instances will be places.
+ */
+ private static final String KEY_INGESTION_LOCATIONS = "ingestion-location";
+
+ private static final ARecordType outputType = initOutputType();
+
+ @Override
+ public String getName() {
+ return "twitter_firehose";
+ }
+
+ @Override
+ public AdapterType getAdapterType() {
+ return AdapterType.TYPED;
+ }
+
+ @Override
+ public SupportedOperation getSupportedOperations() {
+ return SupportedOperation.READ;
+ }
+
+ @Override
+ public void configure(Map<String, String> configuration) throws Exception {
+ configuration.put(KEY_FORMAT, FORMAT_ADM);
+ this.configuration = configuration;
+ this.configureFormat(initOutputType());
+ }
+
+ @Override
+ public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
+ String ingestionCardinalityParam = (String) configuration.get(KEY_INGESTION_CARDINALITY);
+ String ingestionLocationParam = (String) configuration.get(KEY_INGESTION_LOCATIONS);
+ String[] locations = null;
+ if (ingestionLocationParam != null) {
+ locations = ingestionLocationParam.split(",");
+ }
+ int count = locations != null ? locations.length : 1;
+ if (ingestionCardinalityParam != null) {
+ count = Integer.parseInt(ingestionCardinalityParam);
+ }
+
+ List<String> chosenLocations = new ArrayList<String>();
+ String[] availableLocations = locations != null ? locations : AsterixClusterProperties.INSTANCE
+ .getParticipantNodes().toArray(new String[] {});
+ for (int i = 0, k = 0; i < count; i++, k = (k + 1) % availableLocations.length) {
+ chosenLocations.add(availableLocations[k]);
+ }
+ return new AlgebricksAbsolutePartitionConstraint(chosenLocations.toArray(new String[] {}));
+ }
+
+ @Override
+ public IDatasourceAdapter createAdapter(IHyracksTaskContext ctx, int partition) throws Exception {
+ return new TwitterFirehoseFeedAdapter(configuration, parserFactory, outputType, partition, ctx);
+ }
+
+ @Override
+ public ARecordType getAdapterOutputType() {
+ return outputType;
+ }
+
+ private static ARecordType initOutputType() {
+ ARecordType outputType = null;
+ try {
+ String[] userFieldNames = new String[] { "screen-name", "lang", "friends_count", "statuses_count", "name",
+ "followers_count" };
+
+ IAType[] userFieldTypes = new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.AINT32,
+ BuiltinType.AINT32, BuiltinType.ASTRING, BuiltinType.AINT32 };
+ ARecordType userRecordType = new ARecordType("TwitterUserType", userFieldNames, userFieldTypes, false);
+
+ String[] fieldNames = new String[] { "tweetid", "user", "sender-location", "send-time", "referred-topics",
+ "message-text" };
+
+ AUnorderedListType unorderedListType = new AUnorderedListType(BuiltinType.ASTRING, "referred-topics");
+ IAType[] fieldTypes = new IAType[] { BuiltinType.AINT64, userRecordType, BuiltinType.APOINT,
+ BuiltinType.ADATETIME, unorderedListType, BuiltinType.ASTRING };
+ outputType = new ARecordType("TweetMessageType", fieldNames, fieldTypes, false);
+
+ } catch (AsterixException e) {
+ throw new IllegalStateException("Unable to initialize output type");
+ }
+ return outputType;
+ }
+}
\ No newline at end of file
diff --git a/asterix-tools/src/main/resources/test.properties b/asterix-tools/src/main/resources/test.properties
old mode 100755
new mode 100644
diff --git a/asterix-transactions/pom.xml b/asterix-transactions/pom.xml
index 3123008..27a1de4 100644
--- a/asterix-transactions/pom.xml
+++ b/asterix-transactions/pom.xml
@@ -17,7 +17,7 @@
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
</parent>
<artifactId>asterix-transactions</artifactId>
@@ -33,6 +33,47 @@
<fork>true</fork>
</configuration>
</plugin>
+ <plugin>
+ <groupId>edu.uci.ics.asterix</groupId>
+ <artifactId>record-manager-generator-maven-plugin</artifactId>
+ <version>0.8.4-SNAPSHOT</version>
+ <configuration>
+ <debug>false</debug>
+ <inputFiles>
+ <param>src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/Job.json</param>
+ <param>src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/Resource.json</param>
+ <param>src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/Request.json</param>
+ </inputFiles>
+ <packageName>edu.uci.ics.asterix.transaction.management.service.locking</packageName>
+ </configuration>
+ <executions>
+ <execution>
+ <id>generate-record-manager</id>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>generate-record-manager</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>add-source</id>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>add-source</goal>
+ </goals>
+ <configuration>
+ <sources>
+ <source>${project.build.directory}/generated-sources/java/</source>
+ </sources>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
</plugins>
</build>
@@ -57,7 +98,7 @@
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-common</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexInstantSearchOperationCallback.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexInstantSearchOperationCallback.java
index dcc4d40..e16e106 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexInstantSearchOperationCallback.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexInstantSearchOperationCallback.java
@@ -64,7 +64,7 @@
public void complete(ITupleReference tuple) throws HyracksDataException {
int pkHash = computePrimaryKeyHashValue(tuple, primaryKeyFields);
try {
- lockManager.unlock(datasetId, pkHash, txnCtx);
+ lockManager.unlock(datasetId, pkHash, LockMode.S, txnCtx);
} catch (ACIDException e) {
throw new HyracksDataException(e);
}
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexSearchOperationCallback.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexSearchOperationCallback.java
index efe1daa..8de7ca5 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexSearchOperationCallback.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexSearchOperationCallback.java
@@ -36,9 +36,8 @@
@Override
public boolean proceed(ITupleReference tuple) throws HyracksDataException {
- int pkHash = computePrimaryKeyHashValue(tuple, primaryKeyFields);
try {
- return lockManager.tryLock(datasetId, pkHash, LockMode.S, txnCtx);
+ return lockManager.tryLock(datasetId, -1, LockMode.S, txnCtx);
} catch (ACIDException e) {
throw new HyracksDataException(e);
}
@@ -46,9 +45,8 @@
@Override
public void reconcile(ITupleReference tuple) throws HyracksDataException {
- int pkHash = computePrimaryKeyHashValue(tuple, primaryKeyFields);
try {
- lockManager.lock(datasetId, pkHash, LockMode.S, txnCtx);
+ lockManager.lock(datasetId, -1, LockMode.S, txnCtx);
} catch (ACIDException e) {
throw new HyracksDataException(e);
}
@@ -56,12 +54,7 @@
@Override
public void cancel(ITupleReference tuple) throws HyracksDataException {
- int pkHash = computePrimaryKeyHashValue(tuple, primaryKeyFields);
- try {
- lockManager.unlock(datasetId, pkHash, txnCtx);
- } catch (ACIDException e) {
- throw new HyracksDataException(e);
- }
+ //no op
}
@Override
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/SecondaryIndexModificationOperationCallbackFactory.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/SecondaryIndexModificationOperationCallbackFactory.java
index 563e9b7..9a0e49f 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/SecondaryIndexModificationOperationCallbackFactory.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/SecondaryIndexModificationOperationCallbackFactory.java
@@ -56,8 +56,9 @@
try {
ITransactionContext txnCtx = txnSubsystem.getTransactionManager().getTransactionContext(jobId, false);
- IModificationOperationCallback modCallback = new SecondaryIndexModificationOperationCallback(datasetId, primaryKeyFields, txnCtx,
- txnSubsystem.getLockManager(), txnSubsystem, resourceId, resourceType, indexOp);
+ IModificationOperationCallback modCallback = new SecondaryIndexModificationOperationCallback(datasetId,
+ primaryKeyFields, txnCtx, txnSubsystem.getLockManager(), txnSubsystem, resourceId, resourceType,
+ indexOp);
txnCtx.registerIndexAndCallback(resourceId, index, (AbstractOperationCallback) modCallback, false);
return modCallback;
} catch (ACIDException e) {
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/resource/PersistentLocalResourceFactory.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/resource/PersistentLocalResourceFactory.java
index b2f8ae6..dc85360 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/resource/PersistentLocalResourceFactory.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/resource/PersistentLocalResourceFactory.java
@@ -19,7 +19,7 @@
import edu.uci.ics.hyracks.storage.common.file.LocalResource;
public class PersistentLocalResourceFactory implements ILocalResourceFactory {
-
+
private final ILocalResourceMetadata localResourceMetadata;
private final int resourceType;
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/ConcurrentLockManager.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/ConcurrentLockManager.java
new file mode 100644
index 0000000..3dc4fa6
--- /dev/null
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/ConcurrentLockManager.java
@@ -0,0 +1,1045 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.transaction.management.service.locking;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.HashMap;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.locks.Condition;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.common.config.AsterixTransactionProperties;
+import edu.uci.ics.asterix.common.exceptions.ACIDException;
+import edu.uci.ics.asterix.common.transactions.DatasetId;
+import edu.uci.ics.asterix.common.transactions.ILockManager;
+import edu.uci.ics.asterix.common.transactions.ITransactionContext;
+import edu.uci.ics.asterix.common.transactions.ITransactionManager;
+import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionManagementConstants.LockManagerConstants.LockMode;
+import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionSubsystem;
+import edu.uci.ics.hyracks.api.lifecycle.ILifeCycleComponent;
+
+/**
+ * An implementation of the ILockManager interface.
+ *
+ * @author tillw
+ */
+public class ConcurrentLockManager implements ILockManager, ILifeCycleComponent {
+
+ private static final Logger LOGGER
+ = Logger.getLogger(ConcurrentLockManager.class.getName());
+ private static final Level LVL = Level.FINER;
+
+ public static final boolean DEBUG_MODE = false;//true
+
+ private TransactionSubsystem txnSubsystem;
+ private ResourceGroupTable table;
+ private ResourceArenaManager resArenaMgr;
+ private RequestArenaManager reqArenaMgr;
+ private JobArenaManager jobArenaMgr;
+ private ConcurrentHashMap<Integer, Long> jobIdSlotMap;
+ private ThreadLocal<DatasetLockCache> dsLockCache;
+ private LockManagerStats stats = new LockManagerStats(10000);
+
+ enum LockAction {
+ ERR(false, false),
+ GET(false, false),
+ UPD(false, true), // version of GET that updates the max lock mode
+ WAIT(true, false),
+ CONV(true, true) // convert (upgrade) a lock (e.g. from S to X)
+ ;
+ boolean wait;
+ boolean modify;
+
+ LockAction(boolean wait, boolean modify) {
+ this.wait = wait;
+ this.modify = modify;
+ }
+ }
+
+ static LockAction[][] ACTION_MATRIX = {
+ // new NL IS IX S X
+ { LockAction.ERR, LockAction.UPD, LockAction.UPD, LockAction.UPD, LockAction.UPD }, // NL
+ { LockAction.ERR, LockAction.GET, LockAction.UPD, LockAction.UPD, LockAction.WAIT }, // IS
+ { LockAction.ERR, LockAction.GET, LockAction.GET, LockAction.WAIT, LockAction.WAIT }, // IX
+ { LockAction.ERR, LockAction.GET, LockAction.WAIT, LockAction.GET, LockAction.WAIT }, // S
+ { LockAction.ERR, LockAction.WAIT, LockAction.WAIT, LockAction.WAIT, LockAction.WAIT } // X
+ };
+
+ public ConcurrentLockManager(TransactionSubsystem txnSubsystem) throws ACIDException {
+ this.txnSubsystem = txnSubsystem;
+
+ this.table = new ResourceGroupTable();
+
+ final int lockManagerShrinkTimer = txnSubsystem.getTransactionProperties().getLockManagerShrinkTimer();
+
+ int noArenas = Runtime.getRuntime().availableProcessors() * 2;
+
+ resArenaMgr = new ResourceArenaManager(noArenas, lockManagerShrinkTimer);
+ reqArenaMgr = new RequestArenaManager(noArenas, lockManagerShrinkTimer);
+ jobArenaMgr = new JobArenaManager(noArenas, lockManagerShrinkTimer);
+ jobIdSlotMap = new ConcurrentHashMap<>();
+ dsLockCache = new ThreadLocal<DatasetLockCache>() {
+ protected DatasetLockCache initialValue() {
+ return new DatasetLockCache();
+ }
+ };
+ }
+
+ public AsterixTransactionProperties getTransactionProperties() {
+ return this.txnSubsystem.getTransactionProperties();
+ }
+
+ @Override
+ public void lock(DatasetId datasetId, int entityHashValue, byte lockMode, ITransactionContext txnContext)
+ throws ACIDException {
+ log("lock", datasetId.getId(), entityHashValue, lockMode, txnContext);
+ stats.lock();
+
+ final int dsId = datasetId.getId();
+ final int jobId = txnContext.getJobId().getId();
+
+ if (entityHashValue != -1) {
+ lock(datasetId, -1, LockMode.intentionMode(lockMode), txnContext);
+ } else {
+ if (dsLockCache.get().contains(jobId, dsId, lockMode)) {
+ return;
+ }
+ }
+
+ final long jobSlot = findOrAllocJobSlot(jobId);
+
+ final ResourceGroup group = table.get(dsId, entityHashValue);
+ group.getLatch();
+ try {
+ validateJob(txnContext);
+
+ final long resSlot = findOrAllocResourceSlot(group, dsId, entityHashValue);
+ final long reqSlot = allocRequestSlot(resSlot, jobSlot, lockMode);
+ boolean locked = false;
+ while (!locked) {
+ final LockAction act = determineLockAction(resSlot, jobSlot, lockMode);
+ switch (act) {
+ case UPD:
+ resArenaMgr.setMaxMode(resSlot, lockMode);
+ // no break
+ case GET:
+ addHolder(reqSlot, resSlot, jobSlot);
+ locked = true;
+ break;
+ case WAIT:
+ case CONV:
+ enqueueWaiter(group, reqSlot, resSlot, jobSlot, act, txnContext);
+ break;
+ case ERR:
+ default:
+ throw new IllegalStateException();
+ }
+ }
+ if (entityHashValue == -1) {
+ dsLockCache.get().put(jobId, dsId, lockMode);
+ }
+ } finally {
+ group.releaseLatch();
+ }
+ }
+
+ private void enqueueWaiter(final ResourceGroup group, final long reqSlot, final long resSlot, final long jobSlot,
+ final LockAction act, ITransactionContext txnContext) throws ACIDException {
+ final Queue queue = act.modify ? upgrader : waiter;
+ if (!introducesDeadlock(resSlot, jobSlot)) {
+ queue.add(reqSlot, resSlot, jobSlot);
+ } else {
+ requestAbort(txnContext);
+ }
+ try {
+ group.await(txnContext);
+ } finally {
+ queue.remove(reqSlot, resSlot, jobSlot);
+ }
+ }
+
+ /**
+ * determine if adding a job to the waiters of a resource will introduce a
+ * cycle in the wait-graph where the job waits on itself
+ *
+ * @param resSlot
+ * the slot that contains the information about the resource
+ * @param jobSlot
+ * the slot that contains the information about the job
+ * @return true if a cycle would be introduced, false otherwise
+ */
+ private boolean introducesDeadlock(final long resSlot, final long jobSlot) {
+ synchronized (jobArenaMgr) {
+ long reqSlot = resArenaMgr.getLastHolder(resSlot);
+ while (reqSlot >= 0) {
+ long holderJobSlot = reqArenaMgr.getJobSlot(reqSlot);
+ if (holderJobSlot == jobSlot) {
+ return true;
+ }
+ boolean scanWaiters = true;
+ long waiter = jobArenaMgr.getLastWaiter(holderJobSlot);
+ while (waiter >= 0) {
+ long watingOnResSlot = reqArenaMgr.getResourceId(waiter);
+ if (introducesDeadlock(watingOnResSlot, jobSlot)) {
+ return true;
+ }
+ waiter = reqArenaMgr.getNextJobRequest(waiter);
+ if (waiter < 0 && scanWaiters) {
+ scanWaiters = false;
+ waiter = jobArenaMgr.getLastUpgrader(holderJobSlot);
+ }
+ }
+ reqSlot = reqArenaMgr.getNextRequest(reqSlot);
+ }
+ return false;
+ }
+ }
+
+ @Override
+ public void instantLock(DatasetId datasetId, int entityHashValue, byte lockMode, ITransactionContext txnContext)
+ throws ACIDException {
+ log("instantLock", datasetId.getId(), entityHashValue, lockMode, txnContext);
+ stats.instantLock();
+
+ final int dsId = datasetId.getId();
+ final int jobId = txnContext.getJobId().getId();
+
+ if (entityHashValue != -1) {
+ lock(datasetId, -1, LockMode.intentionMode(lockMode), txnContext);
+ } else {
+ throw new UnsupportedOperationException("instant locks are not supported on datasets");
+ }
+
+ final ResourceGroup group = table.get(dsId, entityHashValue);
+ if (group.firstResourceIndex.get() == -1l) {
+ validateJob(txnContext);
+ // if we do not have a resource in the group, we know that the
+ // resource that we are looking for is not locked
+ return;
+ }
+
+ // we only allocate a request slot if we actually have to wait
+ long reqSlot = -1;
+
+ group.getLatch();
+ try {
+ validateJob(txnContext);
+
+ final long resSlot = findResourceInGroup(group, dsId, entityHashValue);
+ if (resSlot < 0) {
+ // if we don't find the resource, there are no locks on it.
+ return;
+ }
+
+ final long jobSlot = findOrAllocJobSlot(jobId);
+
+ while (true) {
+ final LockAction act = determineLockAction(resSlot, jobSlot, lockMode);
+ switch (act) {
+ case UPD:
+ case GET:
+ return;
+ case WAIT:
+ case CONV:
+ if (reqSlot == -1) {
+ reqSlot = allocRequestSlot(resSlot, jobSlot, lockMode);
+ }
+ enqueueWaiter(group, reqSlot, resSlot, jobSlot, act, txnContext);
+ break;
+ case ERR:
+ default:
+ throw new IllegalStateException();
+ }
+ }
+ } finally {
+ if (reqSlot != -1) {
+ // deallocate request, if we allocated one earlier
+ if (DEBUG_MODE) LOGGER.finer("del req slot " + TypeUtil.Global.toString(reqSlot));
+ reqArenaMgr.deallocate(reqSlot);
+ }
+ group.releaseLatch();
+ }
+ }
+
+ @Override
+ public boolean tryLock(DatasetId datasetId, int entityHashValue, byte lockMode, ITransactionContext txnContext)
+ throws ACIDException {
+ log("tryLock", datasetId.getId(), entityHashValue, lockMode, txnContext);
+ stats.tryLock();
+
+ final int dsId = datasetId.getId();
+ final int jobId = txnContext.getJobId().getId();
+
+ if (entityHashValue != -1) {
+ if (! tryLock(datasetId, -1, LockMode.intentionMode(lockMode), txnContext)) {
+ return false;
+ }
+ } else {
+ if (dsLockCache.get().contains(jobId, dsId, lockMode)) {
+ return true;
+ }
+ }
+
+ final long jobSlot = findOrAllocJobSlot(jobId);
+
+ final ResourceGroup group = table.get(dsId, entityHashValue);
+ group.getLatch();
+
+ try {
+ validateJob(txnContext);
+
+ final long resSlot = findOrAllocResourceSlot(group, dsId, entityHashValue);
+ final long reqSlot = allocRequestSlot(resSlot, jobSlot, lockMode);
+
+ final LockAction act = determineLockAction(resSlot, jobSlot, lockMode);
+ switch (act) {
+ case UPD:
+ resArenaMgr.setMaxMode(resSlot, lockMode);
+ // no break
+ case GET:
+ addHolder(reqSlot, resSlot, jobSlot);
+ if (entityHashValue == -1) {
+ dsLockCache.get().put(jobId, dsId, lockMode);
+ }
+ return true;
+ case WAIT:
+ case CONV:
+ return false;
+ default:
+ throw new IllegalStateException();
+ }
+ } finally {
+ group.releaseLatch();
+ }
+
+ // if we did acquire the dataset lock, but not the entity lock, we keep
+ // it anyway and clean it up at the end of the job
+ }
+
+ @Override
+ public boolean instantTryLock(DatasetId datasetId, int entityHashValue, byte lockMode,
+ ITransactionContext txnContext) throws ACIDException {
+ log("instantTryLock", datasetId.getId(), entityHashValue, lockMode, txnContext);
+ stats.instantTryLock();
+
+ final int dsId = datasetId.getId();
+ final int jobId = txnContext.getJobId().getId();
+
+ if (entityHashValue != -1) {
+ if (! tryLock(datasetId, -1, LockMode.intentionMode(lockMode), txnContext)) {
+ return false;
+ }
+ } else {
+ throw new UnsupportedOperationException("instant locks are not supported on datasets");
+ }
+
+ final ResourceGroup group = table.get(dsId, entityHashValue);
+ if (group.firstResourceIndex.get() == -1l) {
+ validateJob(txnContext);
+ // if we do not have a resource in the group, we know that the
+ // resource that we are looking for is not locked
+ return true;
+ }
+
+ group.getLatch();
+ try {
+ validateJob(txnContext);
+
+ final long resSlot = findResourceInGroup(group, dsId, entityHashValue);
+ if (resSlot < 0) {
+ // if we don't find the resource, there are no locks on it.
+ return true;
+ }
+
+ final long jobSlot = findOrAllocJobSlot(jobId);
+
+ LockAction act = determineLockAction(resSlot, jobSlot, lockMode);
+ switch (act) {
+ case UPD:
+ case GET:
+ return true;
+ case WAIT:
+ case CONV:
+ return false;
+ case ERR:
+ default:
+ throw new IllegalStateException();
+ }
+ } finally {
+ group.releaseLatch();
+ }
+ }
+
+ @Override
+ public void unlock(DatasetId datasetId, int entityHashValue, byte lockMode, ITransactionContext txnContext)
+ throws ACIDException {
+ log("unlock", datasetId.getId(), entityHashValue, lockMode, txnContext);
+ final int jobId = txnContext.getJobId().getId();
+ final long jobSlot = jobIdSlotMap.get(jobId);
+ final int dsId = datasetId.getId();
+ unlock(dsId, entityHashValue, lockMode, jobSlot);
+ }
+
+ private void unlock(int dsId, int entityHashValue, byte lockMode, long jobSlot) throws ACIDException {
+ log("unlock", dsId, entityHashValue, lockMode, null);
+ stats.unlock();
+
+ ResourceGroup group = table.get(dsId, entityHashValue);
+ group.getLatch();
+ try {
+
+ long resource = findResourceInGroup(group, dsId, entityHashValue);
+ if (resource < 0) {
+ throw new IllegalStateException("resource (" + dsId + ", " + entityHashValue + ") not found");
+ }
+
+ long holder = removeLastHolder(resource, jobSlot, lockMode);
+
+ // deallocate request
+ if (DEBUG_MODE) LOGGER.finer("del req slot " + TypeUtil.Global.toString(holder));
+ reqArenaMgr.deallocate(holder);
+ // deallocate resource or fix max lock mode
+ if (resourceNotUsed(resource)) {
+ long prev = group.firstResourceIndex.get();
+ if (prev == resource) {
+ group.firstResourceIndex.set(resArenaMgr.getNext(resource));
+ } else {
+ while (resArenaMgr.getNext(prev) != resource) {
+ prev = resArenaMgr.getNext(prev);
+ }
+ resArenaMgr.setNext(prev, resArenaMgr.getNext(resource));
+ }
+ if (DEBUG_MODE) LOGGER.finer("del res slot " + TypeUtil.Global.toString(resource));
+ resArenaMgr.deallocate(resource);
+ } else {
+ final int oldMaxMode = resArenaMgr.getMaxMode(resource);
+ final int newMaxMode = determineNewMaxMode(resource, oldMaxMode);
+ resArenaMgr.setMaxMode(resource, newMaxMode);
+ if (oldMaxMode != newMaxMode) {
+ // the locking mode didn't change, current waiters won't be
+ // able to acquire the lock, so we do not need to signal them
+ group.wakeUp();
+ }
+ }
+ } finally {
+ group.releaseLatch();
+ }
+
+ // dataset intention locks are cleaned up at the end of the job
+ }
+
+ @Override
+ public void releaseLocks(ITransactionContext txnContext) throws ACIDException {
+ log("releaseLocks", -1, -1, LockMode.ANY, txnContext);
+ stats.releaseLocks();
+
+ int jobId = txnContext.getJobId().getId();
+ Long jobSlot = jobIdSlotMap.get(jobId);
+ if (jobSlot == null) {
+ // we don't know the job, so there are no locks for it - we're done
+ return;
+ }
+ //System.err.println(table.append(new StringBuilder(), true).toString());
+ if (LOGGER.isLoggable(LVL)) {
+ LOGGER.log(LVL, "jobArenaMgr " + jobArenaMgr.addTo(new RecordManagerStats()).toString());
+ LOGGER.log(LVL, "resArenaMgr " + resArenaMgr.addTo(new RecordManagerStats()).toString());
+ LOGGER.log(LVL, "reqArenaMgr " + reqArenaMgr.addTo(new RecordManagerStats()).toString());
+ }
+ long holder;
+ synchronized (jobArenaMgr) {
+ holder = jobArenaMgr.getLastHolder(jobSlot);
+ }
+ while (holder != -1) {
+ long resource = reqArenaMgr.getResourceId(holder);
+ int dsId = resArenaMgr.getDatasetId(resource);
+ int pkHashVal = resArenaMgr.getPkHashVal(resource);
+ unlock(dsId, pkHashVal, LockMode.ANY, jobSlot);
+ synchronized (jobArenaMgr) {
+ holder = jobArenaMgr.getLastHolder(jobSlot);
+ }
+ }
+ if (DEBUG_MODE) LOGGER.finer("del job slot " + TypeUtil.Global.toString(jobSlot));
+ jobArenaMgr.deallocate(jobSlot);
+ jobIdSlotMap.remove(jobId);
+ stats.logCounters(LOGGER, Level.INFO, true);
+ //LOGGER.info(toString());
+ }
+
+ private long findOrAllocJobSlot(int jobId) {
+ Long jobSlot = jobIdSlotMap.get(jobId);
+ if (jobSlot == null) {
+ jobSlot = new Long(jobArenaMgr.allocate());
+ if (DEBUG_MODE) LOGGER.finer("new job slot " + TypeUtil.Global.toString(jobSlot) + " (" + jobId + ")");
+ jobArenaMgr.setJobId(jobSlot, jobId);
+ Long oldSlot = jobIdSlotMap.putIfAbsent(jobId, jobSlot);
+ if (oldSlot != null) {
+ // if another thread allocated a slot for this jobId between
+ // get(..) and putIfAbsent(..), we'll use that slot and
+ // deallocate the one we allocated
+ if (DEBUG_MODE) LOGGER.finer("del job slot " + TypeUtil.Global.toString(jobSlot) + " due to conflict");
+ jobArenaMgr.deallocate(jobSlot);
+ jobSlot = oldSlot;
+ }
+ }
+ assert (jobSlot >= 0);
+ return jobSlot;
+ }
+
+ private long findOrAllocResourceSlot(ResourceGroup group, int dsId, int entityHashValue) {
+ long resSlot = findResourceInGroup(group, dsId, entityHashValue);
+
+ if (resSlot == -1) {
+ // we don't know about this resource, let's alloc a slot
+ resSlot = resArenaMgr.allocate();
+ resArenaMgr.setDatasetId(resSlot, dsId);
+ resArenaMgr.setPkHashVal(resSlot, entityHashValue);
+ resArenaMgr.setNext(resSlot, group.firstResourceIndex.get());
+ group.firstResourceIndex.set(resSlot);
+ if (DEBUG_MODE) LOGGER.finer("new res slot " + TypeUtil.Global.toString(resSlot) + " (" + dsId + ", " + entityHashValue + ")");
+ } else {
+ if (DEBUG_MODE) LOGGER.finer("fnd res slot " + TypeUtil.Global.toString(resSlot) + " (" + dsId + ", " + entityHashValue + ")");
+ }
+ return resSlot;
+ }
+
+ private long allocRequestSlot(long resSlot, long jobSlot, byte lockMode) {
+ long reqSlot = reqArenaMgr.allocate();
+ reqArenaMgr.setResourceId(reqSlot, resSlot);
+ reqArenaMgr.setLockMode(reqSlot, lockMode); // lock mode is a byte!!
+ reqArenaMgr.setJobSlot(reqSlot, jobSlot);
+ if (DEBUG_MODE) {
+ LOGGER.finer("new req slot " + TypeUtil.Global.toString(reqSlot)
+ + " (" + TypeUtil.Global.toString(resSlot)
+ + ", " + TypeUtil.Global.toString(jobSlot)
+ + ", " + LockMode.toString(lockMode) + ")");
+ }
+ return reqSlot;
+ }
+
+ private LockAction determineLockAction(long resSlot, long jobSlot, byte lockMode) {
+ final int curLockMode = resArenaMgr.getMaxMode(resSlot);
+ final LockAction act = ACTION_MATRIX[curLockMode][lockMode];
+ if (act == LockAction.WAIT) {
+ return updateActionForSameJob(resSlot, jobSlot, lockMode);
+ }
+ return act;
+ }
+
+ /**
+ * when we've got a lock conflict for a different job, we always have to
+ * wait, if it is for the same job we either have to
+ * a) (wait and) convert the lock once conversion becomes viable or
+ * b) acquire the lock if we want to lock the same resource with the same
+ * lock mode for the same job.
+ *
+ * @param resource
+ * the resource slot that's being locked
+ * @param job
+ * the job slot of the job locking the resource
+ * @param lockMode
+ * the lock mode that the resource should be locked with
+ * @return
+ */
+ private LockAction updateActionForSameJob(long resource, long job, byte lockMode) {
+ // TODO we can reduce the number of things we have to look at by
+ // carefully distinguishing the different lock modes
+ long holder = resArenaMgr.getLastHolder(resource);
+ LockAction res = LockAction.WAIT;
+ while (holder != -1) {
+ if (job == reqArenaMgr.getJobSlot(holder)) {
+ if (reqArenaMgr.getLockMode(holder) == lockMode) {
+ return LockAction.GET;
+ } else {
+ res = LockAction.CONV;
+ }
+ }
+ holder = reqArenaMgr.getNextRequest(holder);
+ }
+ return res;
+ }
+
+ private long findResourceInGroup(ResourceGroup group, int dsId, int entityHashValue) {
+ stats.logCounters(LOGGER, Level.INFO, false);
+ long resSlot = group.firstResourceIndex.get();
+ while (resSlot != -1) {
+ // either we already have a lock on this resource or we have a
+ // hash collision
+ if (resArenaMgr.getDatasetId(resSlot) == dsId && resArenaMgr.getPkHashVal(resSlot) == entityHashValue) {
+ return resSlot;
+ } else {
+ resSlot = resArenaMgr.getNext(resSlot);
+ }
+ }
+ return -1;
+ }
+
+ private void addHolder(long request, long resource, long job) {
+ long lastHolder = resArenaMgr.getLastHolder(resource);
+ reqArenaMgr.setNextRequest(request, lastHolder);
+ resArenaMgr.setLastHolder(resource, request);
+
+ synchronized (jobArenaMgr) {
+ long lastJobHolder = jobArenaMgr.getLastHolder(job);
+ insertIntoJobQueue(request, lastJobHolder);
+ jobArenaMgr.setLastHolder(job, request);
+ }
+ }
+
+ private long removeLastHolder(long resource, long jobSlot, byte lockMode) {
+ long holder = resArenaMgr.getLastHolder(resource);
+ if (holder < 0) {
+ throw new IllegalStateException("no holder for resource " + resource);
+ }
+
+ // remove from the list of holders for a resource
+ if (requestMatches(holder, jobSlot, lockMode)) {
+ // if the head of the queue matches, we need to update the resource
+ long next = reqArenaMgr.getNextRequest(holder);
+ resArenaMgr.setLastHolder(resource, next);
+ } else {
+ holder = removeRequestFromQueueForJob(holder, jobSlot, lockMode);
+ }
+
+ synchronized (jobArenaMgr) {
+ // remove from the list of requests for a job
+ long newHead = removeRequestFromJob(jobSlot, holder);
+ jobArenaMgr.setLastHolder(jobSlot, newHead);
+ }
+ return holder;
+ }
+
+ private boolean requestMatches(long holder, long jobSlot, byte lockMode) {
+ return jobSlot == reqArenaMgr.getJobSlot(holder)
+ && (lockMode == LockMode.ANY || lockMode == reqArenaMgr.getLockMode(holder));
+ }
+
+ private long removeRequestFromJob(long jobSlot, long holder) {
+ long prevForJob = reqArenaMgr.getPrevJobRequest(holder);
+ long nextForJob = reqArenaMgr.getNextJobRequest(holder);
+ if (nextForJob != -1) {
+ reqArenaMgr.setPrevJobRequest(nextForJob, prevForJob);
+ }
+ if (prevForJob == -1) {
+ return nextForJob;
+ } else {
+ reqArenaMgr.setNextJobRequest(prevForJob, nextForJob);
+ return -1;
+ }
+ }
+
+ interface Queue {
+ void add(long request, long resource, long job);
+
+ void remove(long request, long resource, long job);
+ }
+
+ final Queue waiter = new Queue() {
+ public void add(long request, long resource, long job) {
+ long waiter = resArenaMgr.getFirstWaiter(resource);
+ reqArenaMgr.setNextRequest(request, -1);
+ if (waiter == -1) {
+ resArenaMgr.setFirstWaiter(resource, request);
+ } else {
+ appendToRequestQueue(waiter, request);
+ }
+ synchronized (jobArenaMgr) {
+ waiter = jobArenaMgr.getLastWaiter(job);
+ insertIntoJobQueue(request, waiter);
+ jobArenaMgr.setLastWaiter(job, request);
+ }
+ }
+
+ public void remove(long request, long resource, long job) {
+ long waiter = resArenaMgr.getFirstWaiter(resource);
+ if (waiter == request) {
+ long next = reqArenaMgr.getNextRequest(waiter);
+ resArenaMgr.setFirstWaiter(resource, next);
+ } else {
+ waiter = removeRequestFromQueueForSlot(waiter, request);
+ }
+ synchronized (jobArenaMgr) {
+ // remove from the list of requests for a job
+ long newHead = removeRequestFromJob(job, waiter);
+ jobArenaMgr.setLastWaiter(job, newHead);
+ }
+ }
+ };
+
+ final Queue upgrader = new Queue() {
+ public void add(long request, long resource, long job) {
+ long upgrader = resArenaMgr.getFirstUpgrader(resource);
+ reqArenaMgr.setNextRequest(request, -1);
+ if (upgrader == -1) {
+ resArenaMgr.setFirstUpgrader(resource, request);
+ } else {
+ appendToRequestQueue(upgrader, request);
+ }
+ synchronized (jobArenaMgr) {
+ upgrader = jobArenaMgr.getLastUpgrader(job);
+ insertIntoJobQueue(request, upgrader);
+ jobArenaMgr.setLastUpgrader(job, request);
+ }
+ }
+
+ public void remove(long request, long resource, long job) {
+ long upgrader = resArenaMgr.getFirstUpgrader(resource);
+ if (upgrader == request) {
+ long next = reqArenaMgr.getNextRequest(upgrader);
+ resArenaMgr.setFirstUpgrader(resource, next);
+ } else {
+ upgrader = removeRequestFromQueueForSlot(upgrader, request);
+ }
+ synchronized (jobArenaMgr) {
+ // remove from the list of requests for a job
+ long newHead = removeRequestFromJob(job, upgrader);
+ jobArenaMgr.setLastUpgrader(job, newHead);
+ }
+ }
+ };
+
+ private void insertIntoJobQueue(long newRequest, long oldRequest) {
+ reqArenaMgr.setNextJobRequest(newRequest, oldRequest);
+ reqArenaMgr.setPrevJobRequest(newRequest, -1);
+ if (oldRequest >= 0) {
+ reqArenaMgr.setPrevJobRequest(oldRequest, newRequest);
+ }
+ }
+
+ private void appendToRequestQueue(long head, long appendee) {
+ long next = reqArenaMgr.getNextRequest(head);
+ while (next != -1) {
+ head = next;
+ next = reqArenaMgr.getNextRequest(head);
+ }
+ reqArenaMgr.setNextRequest(head, appendee);
+ }
+
+ private long removeRequestFromQueueForSlot(long head, long reqSlot) {
+ long cur = head;
+ long prev = cur;
+ while (prev != -1) {
+ cur = reqArenaMgr.getNextRequest(prev);
+ if (cur == -1) {
+ throw new IllegalStateException("request " + reqSlot + " not in queue");
+ }
+ if (cur == reqSlot) {
+ break;
+ }
+ prev = cur;
+ }
+ long next = reqArenaMgr.getNextRequest(cur);
+ reqArenaMgr.setNextRequest(prev, next);
+ return cur;
+ }
+
+ /**
+ * remove the first request for a given job and lock mode from a request queue.
+ * If the value of the parameter lockMode is LockMode.NL the first request
+ * for the job is removed - independent of the LockMode.
+ *
+ * @param head
+ * the head of the request queue
+ * @param jobSlot
+ * the job slot
+ * @param lockMode
+ * the lock mode
+ * @return the slot of the first request that matched the given job
+ */
+ private long removeRequestFromQueueForJob(long head, long jobSlot, byte lockMode) {
+ long holder = head;
+ long prev = holder;
+ while (prev != -1) {
+ holder = reqArenaMgr.getNextRequest(prev);
+ if (holder == -1) {
+ throw new IllegalStateException("no entry for job " + jobSlot + " in queue");
+ }
+ if (requestMatches(holder, jobSlot, lockMode)) {
+ break;
+ }
+ prev = holder;
+ }
+ long next = reqArenaMgr.getNextRequest(holder);
+ reqArenaMgr.setNextRequest(prev, next);
+ return holder;
+ }
+
+ private int determineNewMaxMode(long resource, int oldMaxMode) {
+ int newMaxMode = LockMode.NL;
+ long holder = resArenaMgr.getLastHolder(resource);
+ while (holder != -1) {
+ int curLockMode = reqArenaMgr.getLockMode(holder);
+ if (curLockMode == oldMaxMode) {
+ // we have another lock of the same mode - we're done
+ return oldMaxMode;
+ }
+ switch (ACTION_MATRIX[newMaxMode][curLockMode]) {
+ case UPD:
+ newMaxMode = curLockMode;
+ break;
+ case GET:
+ break;
+ case WAIT:
+ throw new IllegalStateException("incompatible locks in holder queue");
+ }
+ holder = reqArenaMgr.getNextRequest(holder);
+ }
+ return newMaxMode;
+ }
+
+ private boolean resourceNotUsed(long resource) {
+ return resArenaMgr.getLastHolder(resource) == -1 && resArenaMgr.getFirstUpgrader(resource) == -1
+ && resArenaMgr.getFirstWaiter(resource) == -1;
+ }
+
+ private void log(String string, int id, int entityHashValue, byte lockMode, ITransactionContext txnContext) {
+ if (! LOGGER.isLoggable(LVL)) {
+ return;
+ }
+ StringBuilder sb = new StringBuilder();
+ sb.append("{ op : ").append(string);
+ if (id != -1) {
+ sb.append(" , dataset : ").append(id);
+ }
+ if (entityHashValue != -1) {
+ sb.append(" , entity : ").append(entityHashValue);
+ }
+ if (lockMode != LockMode.NL) {
+ sb.append(" , mode : ").append(LockMode.toString(lockMode));
+ }
+ if (txnContext != null) {
+ sb.append(" , jobId : ").append(txnContext.getJobId());
+ }
+ sb.append(" }");
+ LOGGER.log(LVL, sb.toString());
+ }
+
+ private void validateJob(ITransactionContext txnContext) throws ACIDException {
+ if (txnContext.getTxnState() == ITransactionManager.ABORTED) {
+ throw new ACIDException("" + txnContext.getJobId() + " is in ABORTED state.");
+ } else if (txnContext.isTimeout()) {
+ requestAbort(txnContext);
+ }
+ }
+
+ private void requestAbort(ITransactionContext txnContext) throws ACIDException {
+ txnContext.setTimeout(true);
+ throw new ACIDException("Transaction " + txnContext.getJobId()
+ + " should abort (requested by the Lock Manager)");
+ }
+
+ public StringBuilder append(StringBuilder sb) {
+ table.getAllLatches();
+ try {
+ sb.append(">>dump_begin\t>>----- [resTable] -----\n");
+ table.append(sb);
+ sb.append(">>dump_end\t>>----- [resTable] -----\n");
+
+ sb.append(">>dump_begin\t>>----- [resArenaMgr] -----\n");
+ resArenaMgr.append(sb);
+ sb.append(">>dump_end\t>>----- [resArenaMgr] -----\n");
+
+ sb.append(">>dump_begin\t>>----- [reqArenaMgr] -----\n");
+ reqArenaMgr.append(sb);
+ sb.append(">>dump_end\t>>----- [reqArenaMgr] -----\n");
+
+ sb.append(">>dump_begin\t>>----- [jobIdSlotMap] -----\n");
+ for (Integer i : jobIdSlotMap.keySet()) {
+ sb.append(i).append(" : ");
+ TypeUtil.Global.append(sb, jobIdSlotMap.get(i));
+ sb.append("\n");
+ }
+ sb.append(">>dump_end\t>>----- [jobIdSlotMap] -----\n");
+
+ sb.append(">>dump_begin\t>>----- [jobArenaMgr] -----\n");
+ jobArenaMgr.append(sb);
+ sb.append(">>dump_end\t>>----- [jobArenaMgr] -----\n");
+ } finally {
+ table.releaseAllLatches();
+ }
+ return sb;
+ }
+
+ public String toString() {
+ return append(new StringBuilder()).toString();
+ }
+
+ @Override
+ public String prettyPrint() throws ACIDException {
+ StringBuilder s = new StringBuilder("\n########### LockManager Status #############\n");
+ return append(s).toString() + "\n";
+ }
+
+ @Override
+ public void start() {
+ //no op
+ }
+
+ @Override
+ public void dumpState(OutputStream os) throws IOException {
+ os.write(toString().getBytes());
+ }
+
+ @Override
+ public void stop(boolean dumpState, OutputStream os) throws IOException {
+ if (dumpState) {
+ dumpState(os);
+ }
+ }
+
+ private static class DatasetLockCache {
+ private long jobId = -1;
+ private HashMap<Integer, Byte> lockCache = new HashMap<Integer, Byte>();
+ // size 1 cache to avoid the boxing/unboxing that comes with the
+ // access to the HashMap
+ private int cDsId = -1;
+ private byte cDsLockMode = -1;
+
+ public boolean contains(final int jobId, final int dsId, byte dsLockMode) {
+ if (this.jobId == jobId) {
+ if (this.cDsId == dsId && this.cDsLockMode == dsLockMode) {
+ return true;
+ }
+ final Byte cachedLockMode = this.lockCache.get(dsId);
+ if (cachedLockMode != null && cachedLockMode == dsLockMode) {
+ this.cDsId = dsId;
+ this.cDsLockMode = dsLockMode;
+ return true;
+ }
+ } else {
+ this.jobId = -1;
+ this.cDsId = -1;
+ this.cDsLockMode = -1;
+ this.lockCache.clear();
+ }
+ return false;
+ }
+
+ public void put(final int jobId, final int dsId, byte dsLockMode) {
+ this.jobId = jobId;
+ this.cDsId = dsId;
+ this.cDsLockMode = dsLockMode;
+ this.lockCache.put(dsId, dsLockMode);
+ }
+
+ public String toString() {
+ return "[ " + jobId + " : " + lockCache.toString() + "]";
+ }
+ }
+
+ private static class ResourceGroupTable {
+ public static final int TABLE_SIZE = 1024; // TODO increase?
+
+ private ResourceGroup[] table;
+
+ public ResourceGroupTable() {
+ table = new ResourceGroup[TABLE_SIZE];
+ for (int i = 0; i < TABLE_SIZE; ++i) {
+ table[i] = new ResourceGroup();
+ }
+ }
+ ResourceGroup get(int dId, int entityHashValue) {
+ // TODO ensure good properties of hash function
+ int h = Math.abs(dId ^ entityHashValue);
+ if (h < 0) h = 0;
+ return table[h % TABLE_SIZE];
+ }
+
+ public void getAllLatches() {
+ for (int i = 0; i < TABLE_SIZE; ++i) {
+ table[i].getLatch();
+ }
+ }
+
+ public void releaseAllLatches() {
+ for (int i = 0; i < TABLE_SIZE; ++i) {
+ table[i].releaseLatch();
+ }
+ }
+
+ public StringBuilder append(StringBuilder sb) {
+ return append(sb, false);
+ }
+
+ public StringBuilder append(StringBuilder sb, boolean detail) {
+ for (int i = 0; i < table.length; ++i) {
+ sb.append(i).append(" : ");
+ if (detail) {
+ sb.append(table[i]);
+ } else {
+ sb.append(table[i].firstResourceIndex);
+ }
+ sb.append('\n');
+ }
+ return sb;
+ }
+ }
+
+ private static class ResourceGroup {
+ private ReentrantReadWriteLock latch;
+ private Condition condition;
+ AtomicLong firstResourceIndex;
+
+ ResourceGroup() {
+ latch = new ReentrantReadWriteLock();
+ condition = latch.writeLock().newCondition();
+ firstResourceIndex = new AtomicLong(-1);
+ }
+
+ void getLatch() {
+ log("latch");
+ latch.writeLock().lock();
+ }
+
+ void releaseLatch() {
+ log("release");
+ latch.writeLock().unlock();
+ }
+
+ boolean hasWaiters() {
+ return latch.hasQueuedThreads();
+ }
+
+ void await(ITransactionContext txnContext) throws ACIDException {
+ log("wait for");
+ try {
+ condition.await();
+ } catch (InterruptedException e) {
+ LOGGER.finer("interrupted while wating on ResourceGroup");
+ throw new ACIDException(txnContext, "interrupted", e);
+ }
+ }
+
+ void wakeUp() {
+ log("notify");
+ condition.signalAll();
+ }
+
+ void log(String s) {
+ if (LOGGER.isLoggable(LVL)) {
+ LOGGER.log(LVL, s + " " + toString());
+ }
+ }
+
+ public String toString() {
+ return "{ id : " + hashCode() + ", first : " + firstResourceIndex.toString() + ", waiters : "
+ + (hasWaiters() ? "true" : "false") + " }";
+ }
+ }
+}
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/DatasetLockInfo.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/DatasetLockInfo.java
index 0573957..f3ff0e0 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/DatasetLockInfo.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/DatasetLockInfo.java
@@ -288,7 +288,7 @@
//This entityInfo(i.e., holder) is the last resource held by this job.
jobInfo.setlastHoldingResource(holder);
}
-
+
//jobInfo.decreaseDatasetLockCount(holder);
}
@@ -317,9 +317,9 @@
lastObj = lockWaiterManager.getLockWaiter(waiterObjId);
lastObj.setNextWaiterObjId(-1);
-// if (LockManager.IS_DEBUG_MODE) {
-// System.out.println(printWaiters());
-// }
+ // if (LockManager.IS_DEBUG_MODE) {
+ // System.out.println(printWaiters());
+ // }
}
public void removeWaiter(int waiterObjId) {
@@ -362,9 +362,9 @@
firstWaiter = nextObjId;
}
-// if (LockManager.IS_DEBUG_MODE) {
-// System.out.println(printWaiters());
-// }
+ // if (LockManager.IS_DEBUG_MODE) {
+ // System.out.println(printWaiters());
+ // }
}
public void addUpgrader(int waiterObjId) {
@@ -451,7 +451,7 @@
return s.toString();
}
-
+
public String coreDump() {
StringBuilder sb = new StringBuilder();
sb.append("\n\t firstUpgrader: " + firstUpgrader);
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/DummyLockManager.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/DummyLockManager.java
new file mode 100644
index 0000000..5f0b53f
--- /dev/null
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/DummyLockManager.java
@@ -0,0 +1,89 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.transaction.management.service.locking;
+
+import java.io.IOException;
+import java.io.OutputStream;
+
+import edu.uci.ics.asterix.common.exceptions.ACIDException;
+import edu.uci.ics.asterix.common.transactions.DatasetId;
+import edu.uci.ics.asterix.common.transactions.ILockManager;
+import edu.uci.ics.asterix.common.transactions.ITransactionContext;
+import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionSubsystem;
+import edu.uci.ics.hyracks.api.lifecycle.ILifeCycleComponent;
+
+
+/**
+ * A dummy implementation of the ILockManager interface. It assumes that all
+ * requests are successful. It can be used to for jobs that are known to be
+ * conflict free, but it'll yield terrible results if there are conflicts.
+ *
+ * @author tillw
+ *
+ */
+public class DummyLockManager implements ILockManager, ILifeCycleComponent {
+
+ public DummyLockManager(TransactionSubsystem transactionSubsystem) {
+ }
+
+ @Override
+ public void start() {
+ }
+
+ @Override
+ public void stop(boolean dumpState, OutputStream ouputStream) throws IOException {
+ }
+
+ @Override
+ public void lock(DatasetId datasetId, int entityHashValue, byte lockMode, ITransactionContext txnContext)
+ throws ACIDException {
+ }
+
+ @Override
+ public void releaseLocks(ITransactionContext txnContext) throws ACIDException {
+ }
+
+ @Override
+ public void unlock(DatasetId datasetId, int entityHashValue, byte lockMode, ITransactionContext txnContext)
+ throws ACIDException {
+ }
+
+ @Override
+ public void instantLock(DatasetId datasetId, int entityHashValue, byte lockMode, ITransactionContext context)
+ throws ACIDException {
+ }
+
+ @Override
+ public boolean tryLock(DatasetId datasetId, int entityHashValue, byte lockMode, ITransactionContext context)
+ throws ACIDException {
+ return true;
+ }
+
+ @Override
+ public boolean instantTryLock(DatasetId datasetId, int entityHashValue, byte lockMode,
+ ITransactionContext txnContext) throws ACIDException {
+ return true;
+ }
+
+ @Override
+ public String prettyPrint() throws ACIDException {
+ return "DummyLockManager";
+ }
+
+ @Override
+ public void dumpState(OutputStream os) throws IOException {
+ }
+
+}
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/EntityInfoManager.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/EntityInfoManager.java
index ef4cf2e..1c6cb7d 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/EntityInfoManager.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/EntityInfoManager.java
@@ -257,7 +257,7 @@
break;
}
}
-
+
//reset allocChild to the first buffer
allocChild = 0;
@@ -279,19 +279,19 @@
}
return s.toString();
}
-
+
public void coreDump(OutputStream os) {
StringBuilder sb = new StringBuilder("\n\t########### EntityLockInfoManager Status #############\n");
int size = pArray.size();
ChildEntityInfoArrayManager child;
- sb.append("Number of Child: " + size + "\n");
+ sb.append("Number of Child: " + size + "\n");
for (int i = 0; i < size; i++) {
try {
child = pArray.get(i);
sb.append("child[" + i + "]");
sb.append(child.prettyPrint());
-
+
os.write(sb.toString().getBytes());
} catch (IOException e) {
//ignore IOException
@@ -299,7 +299,7 @@
sb = new StringBuilder();
}
}
-
+
public int getShrinkTimerThreshold() {
return shrinkTimerThreshold;
}
@@ -575,7 +575,7 @@
public int getFreeSlotNum() {
return freeSlotNum;
}
-
+
public String prettyPrint() {
StringBuilder sb = new StringBuilder();
sb.append("\n\toccupiedSlots:" + getNumOfOccupiedSlots());
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/EntityLockInfoManager.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/EntityLockInfoManager.java
index 2c64b26..9e9a219 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/EntityLockInfoManager.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/EntityLockInfoManager.java
@@ -256,7 +256,7 @@
break;
}
}
-
+
//reset allocChild to the first buffer
allocChild = 0;
@@ -278,19 +278,19 @@
}
return s.toString();
}
-
+
public void coreDump(OutputStream os) {
StringBuilder sb = new StringBuilder("\n\t########### EntityLockInfoManager Status #############\n");
int size = pArray.size();
ChildEntityLockInfoArrayManager child;
- sb.append("Number of Child: " + size + "\n");
+ sb.append("Number of Child: " + size + "\n");
for (int i = 0; i < size; i++) {
try {
child = pArray.get(i);
sb.append("child[" + i + "]");
sb.append(child.prettyPrint());
-
+
os.write(sb.toString().getBytes());
} catch (IOException e) {
//ignore IOException
@@ -298,7 +298,7 @@
sb = new StringBuilder();
}
}
-
+
public int getShrinkTimerThreshold() {
return SHRINK_TIMER_THRESHOLD;
}
@@ -751,7 +751,7 @@
public int getFreeSlotNum() {
return freeSlotNum;
}
-
+
public String prettyPrint() {
StringBuilder sb = new StringBuilder();
sb.append("\n\toccupiedSlots:" + getNumOfOccupiedSlots());
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/Job.json b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/Job.json
new file mode 100644
index 0000000..a649b7c
--- /dev/null
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/Job.json
@@ -0,0 +1,24 @@
+{
+ "name" : "Job",
+ "fields" : [
+ {
+ "name" : "last holder",
+ "type" : "GLOBAL",
+ "initial" : "-1"
+ },
+ {
+ "name" : "last waiter",
+ "type" : "GLOBAL",
+ "initial" : "-1"
+ },
+ {
+ "name" : "last upgrader",
+ "type" : "GLOBAL",
+ "initial" : "-1"
+ },
+ {
+ "name" : "job id",
+ "type" : "INT"
+ }
+ ]
+}
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/JobInfo.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/JobInfo.java
index 28d60f3..46bde4e 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/JobInfo.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/JobInfo.java
@@ -289,7 +289,7 @@
}
return s.toString();
}
-
+
public String coreDump() {
StringBuilder sb = new StringBuilder();
sb.append("\n\t datasetISLockHT");
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManager.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManager.java
index c7df2f2..1b22a34 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManager.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManager.java
@@ -15,7 +15,6 @@
package edu.uci.ics.asterix.transaction.management.service.locking;
-import java.io.IOException;
import java.io.OutputStream;
import java.util.HashMap;
import java.util.Iterator;
@@ -98,7 +97,8 @@
this.entityLockInfoManager = new EntityLockInfoManager(entityInfoManager, lockWaiterManager);
this.deadlockDetector = new DeadlockDetector(jobHT, datasetResourceHT, entityLockInfoManager,
entityInfoManager, lockWaiterManager);
- this.toutDetector = new TimeOutDetector(this);
+ this.toutDetector = new TimeOutDetector(this, txnSubsystem.getAsterixAppRuntimeContextProvider()
+ .getThreadExecutor());
this.tempDatasetIdObj = new DatasetId(0);
this.tempJobIdObj = new JobId(0);
this.consecutiveWakeupContext = new ConsecutiveWakeupContext();
@@ -278,7 +278,7 @@
did = entityInfoManager.getDatasetId(entityInfo);
entityHashValue = entityInfoManager.getPKHashVal(entityInfo);
if (did == datasetId.getId() && entityHashValue != -1) {
- this.unlock(datasetId, entityHashValue, txnContext);
+ this.unlock(datasetId, entityHashValue, LockMode.ANY, txnContext);
}
entityInfo = prevEntityInfo;
@@ -638,7 +638,7 @@
}
@Override
- public void unlock(DatasetId datasetId, int entityHashValue, ITransactionContext txnContext) throws ACIDException {
+ public void unlock(DatasetId datasetId, int entityHashValue, byte lockMode, ITransactionContext txnContext) throws ACIDException {
internalUnlock(datasetId, entityHashValue, txnContext, false);
}
@@ -2032,33 +2032,31 @@
@Override
public void stop(boolean dumpState, OutputStream os) {
if (dumpState) {
-
- //#. dump Configurable Variables
- dumpConfVars(os);
-
- //#. dump jobHT
- dumpJobInfo(os);
-
- //#. dump datasetResourceHT
- dumpDatasetLockInfo(os);
-
- //#. dump entityLockInfoManager
- dumpEntityLockInfo(os);
-
- //#. dump entityInfoManager
- dumpEntityInfo(os);
-
- //#. dump lockWaiterManager
-
- dumpLockWaiterInfo(os);
- try {
- os.flush();
- } catch (IOException e) {
- //ignore
- }
+ dumpState(os);
}
}
+ @Override
+ public void dumpState(OutputStream os) {
+ //#. dump Configurable Variables
+ dumpConfVars(os);
+
+ //#. dump jobHT
+ dumpJobInfo(os);
+
+ //#. dump datasetResourceHT
+ dumpDatasetLockInfo(os);
+
+ //#. dump entityLockInfoManager
+ dumpEntityLockInfo(os);
+
+ //#. dump entityInfoManager
+ dumpEntityInfo(os);
+
+ //#. dump lockWaiterManager
+ dumpLockWaiterInfo(os);
+ }
+
private void dumpConfVars(OutputStream os) {
try {
StringBuilder sb = new StringBuilder();
@@ -2211,7 +2209,7 @@
tempDatasetIdObj.setId(logRecord.getDatasetId());
tempJobIdObj.setId(logRecord.getJobId());
txnCtx = txnSubsystem.getTransactionManager().getTransactionContext(tempJobIdObj, false);
- unlock(tempDatasetIdObj, logRecord.getPKHashValue(), txnCtx);
+ unlock(tempDatasetIdObj, logRecord.getPKHashValue(), LockMode.ANY, txnCtx);
txnCtx.notifyOptracker(false);
} else if (logRecord.getLogType() == LogType.JOB_COMMIT || logRecord.getLogType() == LogType.ABORT) {
tempJobIdObj.setId(logRecord.getJobId());
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManagerDeterministicUnitTest.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManagerDeterministicUnitTest.java
index e61cb55..8f3e6df 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManagerDeterministicUnitTest.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManagerDeterministicUnitTest.java
@@ -24,6 +24,7 @@
import org.apache.commons.io.FileUtils;
+import edu.uci.ics.asterix.common.api.AsterixThreadExecutor;
import edu.uci.ics.asterix.common.config.AsterixPropertiesAccessor;
import edu.uci.ics.asterix.common.config.AsterixTransactionProperties;
import edu.uci.ics.asterix.common.exceptions.ACIDException;
@@ -32,6 +33,7 @@
import edu.uci.ics.asterix.common.transactions.ILockManager;
import edu.uci.ics.asterix.common.transactions.ITransactionManager;
import edu.uci.ics.asterix.common.transactions.JobId;
+import edu.uci.ics.asterix.transaction.management.service.logging.LogManager;
import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionContext;
import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionManagementConstants.LockManagerConstants.LockMode;
import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionSubsystem;
@@ -42,7 +44,8 @@
//prepare configuration file
File cwd = new File(System.getProperty("user.dir"));
File asterixdbDir = cwd.getParentFile();
- File srcFile = new File(asterixdbDir.getAbsoluteFile(), "asterix-app/src/main/resources/asterix-build-configuration.xml");
+ File srcFile = new File(asterixdbDir.getAbsoluteFile(),
+ "asterix-app/src/main/resources/asterix-build-configuration.xml");
File destFile = new File(cwd, "target/classes/asterix-configuration.xml");
FileUtils.copyFile(srcFile, destFile);
@@ -62,7 +65,7 @@
ArrayList<LockRequest> requestList;
ArrayList<ArrayList<Integer>> expectedResultList;
int resultListIndex;
- LockManager lockMgr;
+ ILockManager lockMgr;
String requestFileName;
long defaultWaitTime;
@@ -72,7 +75,7 @@
this.workerReadyQueue = new WorkerReadyQueue();
this.requestList = new ArrayList<LockRequest>();
this.expectedResultList = new ArrayList<ArrayList<Integer>>();
- this.lockMgr = (LockManager) txnProvider.getLockManager();
+ this.lockMgr = txnProvider.getLockManager();
this.requestFileName = new String(requestFileName);
this.resultListIndex = 0;
this.defaultWaitTime = 10;
@@ -151,6 +154,7 @@
if (isSuccess) {
log("\n*** Test Passed ***");
}
+ ((LogManager) txnProvider.getLogManager()).stop(false, null);
}
public boolean handleRequest(LockRequest request) throws ACIDException {
@@ -482,7 +486,7 @@
request.txnContext);
break;
case RequestType.UNLOCK:
- lockMgr.unlock(request.datasetIdObj, request.entityHashValue, request.txnContext);
+ lockMgr.unlock(request.datasetIdObj, request.entityHashValue, request.lockMode, request.txnContext);
break;
case RequestType.RELEASE_LOCKS:
lockMgr.releaseLocks(request.txnContext);
@@ -511,6 +515,18 @@
public void log(String s) {
System.out.println(s);
}
+
+ public String toString() {
+ StringBuilder sb = new StringBuilder();
+ sb.append("{ t : \"").append(threadName).append("\", r : ");
+ if (lockRequest == null) {
+ sb.append("null");
+ } else {
+ sb.append("\"").append(lockRequest.toString()).append("\"");
+ }
+ sb.append(" }");
+ return sb.toString();
+ }
}
class WorkerReadyQueue {
@@ -618,7 +634,7 @@
} catch (InterruptedException e) {
e.printStackTrace();
}
- log(Thread.currentThread().getName() + "Waiting for worker to finish its task...");
+ log(Thread.currentThread().getName() + " Waiting for worker to finish its task...");
queueSize = workerReadyQueue.size();
}
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManagerRandomUnitTest.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManagerRandomUnitTest.java
index e6f2798..214aa35 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManagerRandomUnitTest.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManagerRandomUnitTest.java
@@ -14,9 +14,14 @@
*/
package edu.uci.ics.asterix.transaction.management.service.locking;
+import java.io.File;
+import java.io.IOException;
import java.util.ArrayList;
import java.util.Random;
+import org.apache.commons.io.FileUtils;
+
+import edu.uci.ics.asterix.common.api.AsterixThreadExecutor;
import edu.uci.ics.asterix.common.config.AsterixPropertiesAccessor;
import edu.uci.ics.asterix.common.config.AsterixTransactionProperties;
import edu.uci.ics.asterix.common.exceptions.ACIDException;
@@ -26,6 +31,7 @@
import edu.uci.ics.asterix.common.transactions.ITransactionContext;
import edu.uci.ics.asterix.common.transactions.ITransactionManager;
import edu.uci.ics.asterix.common.transactions.JobId;
+import edu.uci.ics.asterix.transaction.management.service.logging.LogManager;
import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionContext;
import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionManagementConstants.LockManagerConstants.LockMode;
import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionSubsystem;
@@ -45,9 +51,16 @@
private static int jobId = 0;
private static Random rand;
- public static void main(String args[]) throws ACIDException, AsterixException {
+ public static void main(String args[]) throws ACIDException, AsterixException, IOException {
int i;
- TransactionSubsystem txnProvider = new TransactionSubsystem("LockManagerRandomUnitTest", null,
+ //prepare configuration file
+ File cwd = new File(System.getProperty("user.dir"));
+ File asterixdbDir = cwd.getParentFile();
+ File srcFile = new File(asterixdbDir.getAbsoluteFile(), "asterix-app/src/main/resources/asterix-build-configuration.xml");
+ File destFile = new File(cwd, "target/classes/asterix-configuration.xml");
+ FileUtils.copyFile(srcFile, destFile);
+
+ TransactionSubsystem txnProvider = new TransactionSubsystem("nc1", null,
new AsterixTransactionProperties(new AsterixPropertiesAccessor()));
rand = new Random(System.currentTimeMillis());
for (i = 0; i < MAX_NUM_OF_ENTITY_LOCK_JOB; i++) {
@@ -64,6 +77,7 @@
System.out.println("Creating " + i + "th EntityLockUpgradeJob..");
generateEntityLockUpgradeThread(txnProvider);
}
+ ((LogManager) txnProvider.getLogManager()).stop(false, null);
}
private static void generateEntityLockThread(TransactionSubsystem txnProvider) {
@@ -496,7 +510,7 @@
request.txnContext);
break;
case RequestType.UNLOCK:
- lockMgr.unlock(request.datasetIdObj, request.entityHashValue, request.txnContext);
+ lockMgr.unlock(request.datasetIdObj, request.entityHashValue, request.lockMode, request.txnContext);
break;
case RequestType.RELEASE_LOCKS:
lockMgr.releaseLocks(request.txnContext);
@@ -555,6 +569,11 @@
this.entityHashValue = waitTime;
}
+ @Override
+ public String toString() {
+ return prettyPrint();
+ }
+
public String prettyPrint() {
StringBuilder s = new StringBuilder();
//s.append(threadName.charAt(7)).append("\t").append("\t");
@@ -595,23 +614,7 @@
}
s.append("\tJ").append(txnContext.getJobId().getId()).append("\tD").append(datasetIdObj.getId()).append("\tE")
.append(entityHashValue).append("\t");
- switch (lockMode) {
- case LockMode.S:
- s.append("S");
- break;
- case LockMode.X:
- s.append("X");
- break;
- case LockMode.IS:
- s.append("IS");
- break;
- case LockMode.IX:
- s.append("IX");
- break;
- default:
- throw new UnsupportedOperationException("Unsupported lock mode");
- }
- s.append("\n");
+ s.append(LockMode.toString(lockMode)).append("\n");
return s.toString();
}
}
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManagerStats.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManagerStats.java
new file mode 100644
index 0000000..14d1775
--- /dev/null
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManagerStats.java
@@ -0,0 +1,71 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.transaction.management.service.locking;
+
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+final class LockManagerStats {
+ private final int loggingPeriod;
+
+ private final AtomicLong lCnt = new AtomicLong();
+ private final AtomicLong ilCnt = new AtomicLong();
+ private final AtomicLong tlCnt = new AtomicLong();
+ private final AtomicLong itlCnt = new AtomicLong();
+ private final AtomicLong ulCnt = new AtomicLong();
+ private final AtomicLong rlCnt = new AtomicLong();
+
+ LockManagerStats(int loggingPeriod) {
+ this.loggingPeriod = loggingPeriod;
+ }
+
+ final void lock() { lCnt.incrementAndGet(); }
+ final void instantLock() { ilCnt.incrementAndGet(); }
+ final void tryLock() { tlCnt.incrementAndGet(); }
+ final void instantTryLock() { itlCnt.incrementAndGet(); }
+ final void unlock() { ulCnt.incrementAndGet(); }
+ final void releaseLocks() { rlCnt.incrementAndGet(); }
+
+ final int requestSum() {
+ return lCnt.intValue() + ilCnt.intValue() + tlCnt.intValue()
+ + itlCnt.intValue() + ulCnt.intValue() + rlCnt.intValue();
+ }
+
+ final StringBuilder append(StringBuilder sb) {
+ sb.append("{")
+ .append(" lock : ").append(lCnt)
+ .append(", instantLock : ").append(ilCnt)
+ .append(", tryLock : ").append(tlCnt)
+ .append(", instantTryLock : ").append(itlCnt)
+ .append(", unlock : ").append(ulCnt)
+ .append(", releaseLocks : ").append(rlCnt)
+ .append(" }");
+ return sb;
+ }
+
+ @Override
+ public String toString() {
+ return append(new StringBuilder()).toString();
+ }
+
+ final void logCounters(final Logger logger, final Level lvl, boolean always) {
+ if (logger.isLoggable(lvl)
+ && (always || requestSum() % loggingPeriod == 0)) {
+ logger.log(lvl, toString());
+ }
+ }
+}
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockRequestTracker.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockRequestTracker.java
index 702f492..a5bae28 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockRequestTracker.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockRequestTracker.java
@@ -45,7 +45,7 @@
//handle global request queue
historyForAllJobs.append(request.prettyPrint()).append("--> ").append(msg).append("\n");
}
-
+
public void addRequest(LockRequest request) {
requestHistoryForAllJobs.append(request.prettyPrint());
}
@@ -64,7 +64,7 @@
}
return history.toString();
}
-
+
public String getRequestHistoryForAllJobs() {
return requestHistoryForAllJobs.toString();
}
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockWaiter.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockWaiter.java
index 8bf9929..8fa28fe 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockWaiter.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockWaiter.java
@@ -17,9 +17,9 @@
/**
* LockWaiter object is used for keeping a lock waiter or a lock upgrader information on a certain resource.
- * The resource can be a dataset or an entity.
+ * The resource can be a dataset or an entity.
+ *
* @author kisskys
- *
*/
public class LockWaiter {
/**
@@ -77,69 +77,69 @@
public boolean isVictim() {
return this.victim;
}
-
+
public void increaseWaiterCount() {
waiterCount++;
}
-
+
public void decreaseWaiterCount() {
waiterCount--;
}
-
+
public byte getWaiterCount() {
return waiterCount;
}
-
+
public void setWaiterCount(byte count) {
waiterCount = count;
}
-
+
public void setFirstGetUp(boolean isFirst) {
firstGetUp = isFirst;
}
-
+
public boolean isFirstGetUp() {
return firstGetUp;
}
-
+
public void setNextWaiterObjId(int next) {
nextWaiterObjId = next;
}
-
+
public int getNextWaiterObjId() {
return nextWaiterObjId;
}
-
+
public void setNextWaitingResourceObjId(int next) {
nextWaitingResourceObjId = next;
}
-
+
public int getNextWaitingResourceObjId() {
return nextWaitingResourceObjId;
}
-
+
public void setBeginWaitTime(long time) {
this.beginWaitTime = time;
}
-
+
public long getBeginWaitTime() {
return beginWaitTime;
}
-
+
public boolean isWaiter() {
return isWaiter;
}
-
+
public void setWaiter(boolean isWaiter) {
this.isWaiter = isWaiter;
}
-
+
public boolean isWaitingOnEntityLock() {
return isWaitingOnEntityLock;
}
-
+
public void setWaitingOnEntityLock(boolean isWaitingOnEntityLock) {
this.isWaitingOnEntityLock = isWaitingOnEntityLock;
}
-
+
}
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockWaiterManager.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockWaiterManager.java
index 617c9ba..110f8a2 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockWaiterManager.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockWaiterManager.java
@@ -37,96 +37,96 @@
private boolean isShrinkTimerOn;
private int occupiedSlots;
-// ////////////////////////////////////////////////
-// // begin of unit test
-// ////////////////////////////////////////////////
-//
-// public static final int SHRINK_TIMER_THRESHOLD = 0; //for unit test
-//
-// /**
-// * @param args
-// */
-// public static void main(String[] args) {
-// final int DataSize = 5000;
-//
-// int i, j;
-// int slots = ChildLockWaiterArrayManager.NUM_OF_SLOTS;
-// int data[] = new int[DataSize];
-// LockWaiterManager lwMgr = new LockWaiterManager();
-//
-// //allocate: 50
-// System.out.println("allocate: 50");
-// for (i = 0; i < 5; i++) {
-// for (j = i * slots; j < i * slots + slots; j++) {
-// data[j] = lwMgr.allocate();
-// }
-//
-// System.out.println(lwMgr.prettyPrint());
-// }
-//
-// //deallocate from the last child to the first child
-// System.out.println("deallocate from the last child to the first child");
-// for (i = 4; i >= 0; i--) {
-// for (j = i * slots + slots - 1; j >= i * slots; j--) {
-// lwMgr.deallocate(data[j]);
-// }
-// System.out.println(lwMgr.prettyPrint());
-// }
-//
-// //allocate: 50
-// System.out.println("allocate: 50");
-// for (i = 0; i < 5; i++) {
-// for (j = i * slots; j < i * slots + slots; j++) {
-// data[j] = lwMgr.allocate();
-// }
-//
-// System.out.println(lwMgr.prettyPrint());
-// }
-//
-// //deallocate from the first child to last child
-// System.out.println("deallocate from the first child to last child");
-// for (i = 0; i < 5; i++) {
-// for (j = i * slots; j < i * slots + slots; j++) {
-// lwMgr.deallocate(data[j]);
-// }
-//
-// System.out.println(lwMgr.prettyPrint());
-// }
-//
-// //allocate: 50
-// System.out.println("allocate: 50");
-// for (i = 0; i < 5; i++) {
-// for (j = i * slots; j < i * slots + slots; j++) {
-// data[j] = lwMgr.allocate();
-// }
-//
-// System.out.println(lwMgr.prettyPrint());
-// }
-//
-// //deallocate from the first child to 4th child
-// System.out.println("deallocate from the first child to 4th child");
-// for (i = 0; i < 4; i++) {
-// for (j = i * slots; j < i * slots + slots; j++) {
-// lwMgr.deallocate(data[j]);
-// }
-//
-// System.out.println(lwMgr.prettyPrint());
-// }
-//
-// //allocate: 40
-// System.out.println("allocate: 40");
-// for (i = 0; i < 4; i++) {
-// for (j = i * slots; j < i * slots + slots; j++) {
-// data[j] = lwMgr.allocate();
-// }
-//
-// System.out.println(lwMgr.prettyPrint());
-// }
-// }
-//
-// ////////////////////////////////////////////////
-// // end of unit test
-// ////////////////////////////////////////////////
+ // ////////////////////////////////////////////////
+ // // begin of unit test
+ // ////////////////////////////////////////////////
+ //
+ // public static final int SHRINK_TIMER_THRESHOLD = 0; //for unit test
+ //
+ // /**
+ // * @param args
+ // */
+ // public static void main(String[] args) {
+ // final int DataSize = 5000;
+ //
+ // int i, j;
+ // int slots = ChildLockWaiterArrayManager.NUM_OF_SLOTS;
+ // int data[] = new int[DataSize];
+ // LockWaiterManager lwMgr = new LockWaiterManager();
+ //
+ // //allocate: 50
+ // System.out.println("allocate: 50");
+ // for (i = 0; i < 5; i++) {
+ // for (j = i * slots; j < i * slots + slots; j++) {
+ // data[j] = lwMgr.allocate();
+ // }
+ //
+ // System.out.println(lwMgr.prettyPrint());
+ // }
+ //
+ // //deallocate from the last child to the first child
+ // System.out.println("deallocate from the last child to the first child");
+ // for (i = 4; i >= 0; i--) {
+ // for (j = i * slots + slots - 1; j >= i * slots; j--) {
+ // lwMgr.deallocate(data[j]);
+ // }
+ // System.out.println(lwMgr.prettyPrint());
+ // }
+ //
+ // //allocate: 50
+ // System.out.println("allocate: 50");
+ // for (i = 0; i < 5; i++) {
+ // for (j = i * slots; j < i * slots + slots; j++) {
+ // data[j] = lwMgr.allocate();
+ // }
+ //
+ // System.out.println(lwMgr.prettyPrint());
+ // }
+ //
+ // //deallocate from the first child to last child
+ // System.out.println("deallocate from the first child to last child");
+ // for (i = 0; i < 5; i++) {
+ // for (j = i * slots; j < i * slots + slots; j++) {
+ // lwMgr.deallocate(data[j]);
+ // }
+ //
+ // System.out.println(lwMgr.prettyPrint());
+ // }
+ //
+ // //allocate: 50
+ // System.out.println("allocate: 50");
+ // for (i = 0; i < 5; i++) {
+ // for (j = i * slots; j < i * slots + slots; j++) {
+ // data[j] = lwMgr.allocate();
+ // }
+ //
+ // System.out.println(lwMgr.prettyPrint());
+ // }
+ //
+ // //deallocate from the first child to 4th child
+ // System.out.println("deallocate from the first child to 4th child");
+ // for (i = 0; i < 4; i++) {
+ // for (j = i * slots; j < i * slots + slots; j++) {
+ // lwMgr.deallocate(data[j]);
+ // }
+ //
+ // System.out.println(lwMgr.prettyPrint());
+ // }
+ //
+ // //allocate: 40
+ // System.out.println("allocate: 40");
+ // for (i = 0; i < 4; i++) {
+ // for (j = i * slots; j < i * slots + slots; j++) {
+ // data[j] = lwMgr.allocate();
+ // }
+ //
+ // System.out.println(lwMgr.prettyPrint());
+ // }
+ // }
+ //
+ // ////////////////////////////////////////////////
+ // // end of unit test
+ // ////////////////////////////////////////////////
public LockWaiterManager() {
pArray = new ArrayList<ChildLockWaiterArrayManager>();
@@ -249,7 +249,7 @@
break;
}
}
-
+
//reset allocChild to the first buffer
allocChild = 0;
@@ -271,19 +271,19 @@
}
return s.toString();
}
-
+
public void coreDump(OutputStream os) {
StringBuilder sb = new StringBuilder("\n########### LockWaiterManager Status #############\n");
int size = pArray.size();
ChildLockWaiterArrayManager child;
- sb.append("Number of Child: " + size + "\n");
+ sb.append("Number of Child: " + size + "\n");
for (int i = 0; i < size; i++) {
try {
child = pArray.get(i);
sb.append("child[" + i + "]");
sb.append(child.prettyPrint());
-
+
os.write(sb.toString().getBytes());
} catch (IOException e) {
//ignore IOException
@@ -291,11 +291,11 @@
sb = new StringBuilder();
}
}
-
+
public int getShrinkTimerThreshold() {
return SHRINK_TIMER_THRESHOLD;
}
-
+
public LockWaiter getLockWaiter(int slotNum) {
return pArray.get(slotNum / ChildLockWaiterArrayManager.NUM_OF_SLOTS).getLockWaiter(
slotNum % ChildLockWaiterArrayManager.NUM_OF_SLOTS);
@@ -304,7 +304,7 @@
class ChildLockWaiterArrayManager {
public static final int NUM_OF_SLOTS = 100; //number of LockWaiter objects in 'childArray'.
-// public static final int NUM_OF_SLOTS = 10; //for unit test
+ // public static final int NUM_OF_SLOTS = 10; //for unit test
private int freeSlotNum;
private int occupiedSlots; //-1 represents 'deinitialized' state.
@@ -336,13 +336,13 @@
freeSlotNum = childArray[currentSlot].getNextFreeSlot();
childArray[currentSlot].setWait(true);
childArray[currentSlot].setVictim(false);
- childArray[currentSlot].setWaiterCount((byte)0);
+ childArray[currentSlot].setWaiterCount((byte) 0);
childArray[currentSlot].setNextWaiterObjId(-1);
childArray[currentSlot].setNextWaitingResourceObjId(-1);
childArray[currentSlot].setBeginWaitTime(-1l);
occupiedSlots++;
if (LockManager.IS_DEBUG_MODE) {
- System.out.println(Thread.currentThread().getName()+" Alloc LockWaiterId("+currentSlot+")");
+ System.out.println(Thread.currentThread().getName() + " Alloc LockWaiterId(" + currentSlot + ")");
}
return currentSlot;
}
@@ -352,7 +352,7 @@
freeSlotNum = slotNum;
occupiedSlots--;
if (LockManager.IS_DEBUG_MODE) {
- System.out.println(Thread.currentThread().getName()+" Dealloc LockWaiterId("+slotNum+")");
+ System.out.println(Thread.currentThread().getName() + " Dealloc LockWaiterId(" + slotNum + ")");
}
}
@@ -380,7 +380,7 @@
public int getFreeSlotNum() {
return freeSlotNum;
}
-
+
public String prettyPrint() {
LockWaiter waiter;
StringBuilder sb = new StringBuilder();
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/PrimitiveIntHashMap.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/PrimitiveIntHashMap.java
index 2ac0c64..01becdd 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/PrimitiveIntHashMap.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/PrimitiveIntHashMap.java
@@ -21,16 +21,15 @@
* PrimitiveIntHashMap supports primitive int type as key and value.
* The hash map grows when the available slots in a bucket are overflowed.
* Also, the hash map shrinks according to the following shrink policy.
- * : Shrink when the resource under-utilization lasts for a certain threshold time.
- *
+ * : Shrink when the resource under-utilization lasts for a certain threshold time.
+ *
* @author kisskys
- *
*/
public class PrimitiveIntHashMap {
private final int CHILD_BUCKETS; //INIT_NUM_OF_BUCKETS;
private final int NUM_OF_SLOTS; //NUM_OF_SLOTS_IN_A_BUCKET;
private final int SHRINK_TIMER_THRESHOLD;
-
+
private int occupiedSlots;
private ArrayList<ChildIntArrayManager> pArray; //parent array
private int hashMod;
@@ -41,113 +40,113 @@
private int iterChildIndex;
private KeyValuePair iterPair;
-// ////////////////////////////////////////////////
-// // begin of unit test
-// ////////////////////////////////////////////////
-//
-// /**
-// * @param args
-// */
-// public static void main(String[] args) {
-// int i, j;
-// int k = 0;
-// int num = 5;
-// int key[] = new int[500];
-// int val[] = new int[500];
-// KeyValuePair pair;
-// PrimitiveIntHashMap map = new PrimitiveIntHashMap(1<<4, 1<<3, 5);
-//
-// for (j=0; j < num; j++) {
-//
-// k += 100;
-// //generate data
-// for (i=0; i < k; i++) {
-// key[i] = i;
-// val[i] = i;
-// }
-//
-// //put data to map
-// for (i=0; i < k-30; i++) {
-// map.put(key[i], val[i]);
-// }
-//
-// //put data to map
-// for (i=0; i < k-30; i++) {
-// map.put(key[i], val[i]);
-// }
-//
-// map.beginIterate();
-// pair = map.getNextKeyValue();
-// i = 0;
-// while (pair != null) {
-// i++;
-// System.out.println("["+i+"] key:"+ pair.key + ", val:"+ pair.value);
-// pair = map.getNextKeyValue();
-// }
-//
-// //System.out.println(map.prettyPrint());
-//
-// for (i=k-20; i< k; i++) { //skip X70~X79
-// map.put(key[i], val[i]);
-// }
-//
-// System.out.println(map.prettyPrint());
-//
-// //remove data to map
-// for (i=0; i < k-10; i++) {
-// map.remove(key[i]);
-// try {
-// Thread.currentThread().sleep(1);
-// } catch (InterruptedException e) {
-// e.printStackTrace();
-// }
-// }
-//
-// map.beginIterate();
-// pair = map.getNextKeyValue();
-// i = 0;
-// while (pair != null) {
-// i++;
-// System.out.println("["+i+"] key:"+ pair.key + ", val:"+ pair.value);
-// pair = map.getNextKeyValue();
-// }
-//
-// //remove data to map
-// for (i=0; i < k-10; i++) {
-// map.remove(key[i]);
-// try {
-// Thread.currentThread().sleep(1);
-// } catch (InterruptedException e) {
-// // TODO Auto-generated catch block
-// e.printStackTrace();
-// }
-// }
-//
-// System.out.println(map.prettyPrint());
-//
-// //get data from map
-// for (i=0; i < k; i++) {
-// System.out.println(""+i+"=> key:"+ key[i] + ", val:"+val[i] +", result: " + map.get(key[i]));
-// }
-// }
-//
-// map.beginIterate();
-// pair = map.getNextKeyValue();
-// i = 0;
-// while (pair != null) {
-// i++;
-// System.out.println("["+i+"] key:"+ pair.key + ", val:"+ pair.value);
-// pair = map.getNextKeyValue();
-// }
-// }
-//
-// ////////////////////////////////////////////////
-// // end of unit test
-// ////////////////////////////////////////////////
-
+ // ////////////////////////////////////////////////
+ // // begin of unit test
+ // ////////////////////////////////////////////////
+ //
+ // /**
+ // * @param args
+ // */
+ // public static void main(String[] args) {
+ // int i, j;
+ // int k = 0;
+ // int num = 5;
+ // int key[] = new int[500];
+ // int val[] = new int[500];
+ // KeyValuePair pair;
+ // PrimitiveIntHashMap map = new PrimitiveIntHashMap(1<<4, 1<<3, 5);
+ //
+ // for (j=0; j < num; j++) {
+ //
+ // k += 100;
+ // //generate data
+ // for (i=0; i < k; i++) {
+ // key[i] = i;
+ // val[i] = i;
+ // }
+ //
+ // //put data to map
+ // for (i=0; i < k-30; i++) {
+ // map.put(key[i], val[i]);
+ // }
+ //
+ // //put data to map
+ // for (i=0; i < k-30; i++) {
+ // map.put(key[i], val[i]);
+ // }
+ //
+ // map.beginIterate();
+ // pair = map.getNextKeyValue();
+ // i = 0;
+ // while (pair != null) {
+ // i++;
+ // System.out.println("["+i+"] key:"+ pair.key + ", val:"+ pair.value);
+ // pair = map.getNextKeyValue();
+ // }
+ //
+ // //System.out.println(map.prettyPrint());
+ //
+ // for (i=k-20; i< k; i++) { //skip X70~X79
+ // map.put(key[i], val[i]);
+ // }
+ //
+ // System.out.println(map.prettyPrint());
+ //
+ // //remove data to map
+ // for (i=0; i < k-10; i++) {
+ // map.remove(key[i]);
+ // try {
+ // Thread.currentThread().sleep(1);
+ // } catch (InterruptedException e) {
+ // e.printStackTrace();
+ // }
+ // }
+ //
+ // map.beginIterate();
+ // pair = map.getNextKeyValue();
+ // i = 0;
+ // while (pair != null) {
+ // i++;
+ // System.out.println("["+i+"] key:"+ pair.key + ", val:"+ pair.value);
+ // pair = map.getNextKeyValue();
+ // }
+ //
+ // //remove data to map
+ // for (i=0; i < k-10; i++) {
+ // map.remove(key[i]);
+ // try {
+ // Thread.currentThread().sleep(1);
+ // } catch (InterruptedException e) {
+ // // TODO Auto-generated catch block
+ // e.printStackTrace();
+ // }
+ // }
+ //
+ // System.out.println(map.prettyPrint());
+ //
+ // //get data from map
+ // for (i=0; i < k; i++) {
+ // System.out.println(""+i+"=> key:"+ key[i] + ", val:"+val[i] +", result: " + map.get(key[i]));
+ // }
+ // }
+ //
+ // map.beginIterate();
+ // pair = map.getNextKeyValue();
+ // i = 0;
+ // while (pair != null) {
+ // i++;
+ // System.out.println("["+i+"] key:"+ pair.key + ", val:"+ pair.value);
+ // pair = map.getNextKeyValue();
+ // }
+ // }
+ //
+ // ////////////////////////////////////////////////
+ // // end of unit test
+ // ////////////////////////////////////////////////
+
public PrimitiveIntHashMap() {
- CHILD_BUCKETS = 1<<9; //INIT_NUM_OF_BUCKETS;
- NUM_OF_SLOTS = 1<<3; //NUM_OF_SLOTS_IN_A_BUCKET;
+ CHILD_BUCKETS = 1 << 9; //INIT_NUM_OF_BUCKETS;
+ NUM_OF_SLOTS = 1 << 3; //NUM_OF_SLOTS_IN_A_BUCKET;
SHRINK_TIMER_THRESHOLD = 120000; //2min
pArray = new ArrayList<ChildIntArrayManager>();
pArray.add(new ChildIntArrayManager(this));
@@ -155,7 +154,7 @@
occupiedSlots = 0;
iterPair = new KeyValuePair();
}
-
+
public PrimitiveIntHashMap(int childBuckets, int numOfSlots, int shrinkTimerThreshold) {
CHILD_BUCKETS = childBuckets;
NUM_OF_SLOTS = numOfSlots;
@@ -166,126 +165,127 @@
occupiedSlots = 0;
iterPair = new KeyValuePair();
}
-
+
public void put(int key, int value) {
int growCount = 0;
int bucketNum = hash(key);
- ChildIntArrayManager child = pArray.get(bucketNum/CHILD_BUCKETS);
- while (child.isFull(bucketNum%CHILD_BUCKETS)) {
+ ChildIntArrayManager child = pArray.get(bucketNum / CHILD_BUCKETS);
+ while (child.isFull(bucketNum % CHILD_BUCKETS)) {
growHashMap();
bucketNum = hash(key);
- child = pArray.get(bucketNum/CHILD_BUCKETS);
+ child = pArray.get(bucketNum / CHILD_BUCKETS);
if (growCount > 2) {
//changeHashFunc();
}
growCount++;
}
- occupiedSlots += child.put(bucketNum%CHILD_BUCKETS, key, value, false);
+ occupiedSlots += child.put(bucketNum % CHILD_BUCKETS, key, value, false);
}
-
- public void upsert (int key, int value) {
+
+ public void upsert(int key, int value) {
int growCount = 0;
int bucketNum = hash(key);
- ChildIntArrayManager child = pArray.get(bucketNum/CHILD_BUCKETS);
- while (child.isFull(bucketNum%CHILD_BUCKETS)) {
+ ChildIntArrayManager child = pArray.get(bucketNum / CHILD_BUCKETS);
+ while (child.isFull(bucketNum % CHILD_BUCKETS)) {
growHashMap();
bucketNum = hash(key);
- child = pArray.get(bucketNum/CHILD_BUCKETS);
+ child = pArray.get(bucketNum / CHILD_BUCKETS);
if (growCount > 2) {
//changeHashFunc();
}
growCount++;
}
- occupiedSlots += child.put(bucketNum%CHILD_BUCKETS, key, value, true);
+ occupiedSlots += child.put(bucketNum % CHILD_BUCKETS, key, value, true);
}
-
+
private int hash(int key) {
- return key%hashMod;
+ return key % hashMod;
}
-
+
private void growHashMap() {
int size = pArray.size();
- int i;
-
+ int i;
+
//grow buckets by adding more child
- for (i=0; i<size; i++) {
+ for (i = 0; i < size; i++) {
pArray.add(new ChildIntArrayManager(this));
}
-
+
//increase hashMod
hashMod *= 2;
-
+
//re-hash
- rehash(0, size, hashMod/2);
+ rehash(0, size, hashMod / 2);
}
-
+
private void shrinkHashMap() {
int size = pArray.size();
int i;
-
+
//decrease hashMod
hashMod /= 2;
-
+
//re-hash
- rehash(size/2, size, hashMod*2);
-
+ rehash(size / 2, size, hashMod * 2);
+
//shrink buckets by removing child(s)
- for (i=size-1; i>=size/2;i--) {
+ for (i = size - 1; i >= size / 2; i--) {
pArray.remove(i);
}
}
-
+
private void rehash(int begin, int end, int oldHashMod) {
int i, j, k;
int key, value;
ChildIntArrayManager child;
-
+
//re-hash
- for (i=begin; i<end; i++) {
+ for (i = begin; i < end; i++) {
child = pArray.get(i);
- for (j=0; j<CHILD_BUCKETS; j++) {
+ for (j = 0; j < CHILD_BUCKETS; j++) {
if (child.cArray[j][0] == 0) {
continue;
}
- for (k=1; k<NUM_OF_SLOTS; k++) {
+ for (k = 1; k < NUM_OF_SLOTS; k++) {
//if the hashValue of the key is different, then re-hash it.
- key = child.cArray[j][k*2];
- if (hash(key) != key%oldHashMod) {
- value = child.cArray[j][k*2+1];
+ key = child.cArray[j][k * 2];
+ if (hash(key) != key % oldHashMod) {
+ value = child.cArray[j][k * 2 + 1];
//remove existing key and value
//Notice! To avoid bucket iteration, child.remove() is not used.
- child.cArray[j][k*2] = -1;
+ child.cArray[j][k * 2] = -1;
child.cArray[j][0]--;
//re-hash it
- pArray.get(hash(key)/CHILD_BUCKETS).put(hash(key)%CHILD_BUCKETS, key, value, false);
+ pArray.get(hash(key) / CHILD_BUCKETS).put(hash(key) % CHILD_BUCKETS, key, value, false);
}
}
}
}
}
-
-// private void changeHashFunc() {
-// //TODO need to implement.
-// throw new UnsupportedOperationException("changeHashFunc() not implemented");
-// }
-
+
+ // private void changeHashFunc() {
+ // //TODO need to implement.
+ // throw new UnsupportedOperationException("changeHashFunc() not implemented");
+ // }
+
public int get(int key) {
int bucketNum = hash(key);
- return pArray.get(bucketNum/CHILD_BUCKETS).get(bucketNum%CHILD_BUCKETS, key);
+ return pArray.get(bucketNum / CHILD_BUCKETS).get(bucketNum % CHILD_BUCKETS, key);
}
-
+
public void remove(int key) {
int bucketNum = hash(key);
- occupiedSlots -= pArray.get(bucketNum/CHILD_BUCKETS).remove(bucketNum%CHILD_BUCKETS, key);
-
+ occupiedSlots -= pArray.get(bucketNum / CHILD_BUCKETS).remove(bucketNum % CHILD_BUCKETS, key);
+
if (needShrink()) {
shrinkHashMap();
}
}
-
+
/**
* Shrink policy:
- * Shrink when the resource under-utilization lasts for a certain amount of time.
+ * Shrink when the resource under-utilization lasts for a certain amount of time.
+ *
* @return
*/
private boolean needShrink() {
@@ -294,7 +294,7 @@
if (usedSlots == 0) {
usedSlots = 1;
}
- if (size > 1 && size*CHILD_BUCKETS*NUM_OF_SLOTS/usedSlots >= 3 && isSafeToShrink()) {
+ if (size > 1 && size * CHILD_BUCKETS * NUM_OF_SLOTS / usedSlots >= 3 && isSafeToShrink()) {
if (isShrinkTimerOn) {
if (System.currentTimeMillis() - shrinkTimer >= SHRINK_TIMER_THRESHOLD) {
isShrinkTimerOn = false;
@@ -311,7 +311,7 @@
}
return false;
}
-
+
private boolean isSafeToShrink() {
int i, j;
int size = pArray.size();
@@ -319,50 +319,51 @@
//[HChild(Head Child):0 and TChild(Tail Child): 4], [1(H),5(T)], [2(H),6(T)] and so on.
//When the map shrinks, the sum of occupied slots in H/TChild should not exceed the NUM_OF_SLOTS-1.
//Then it is safe to shrink. Otherwise, unsafe.
- ChildIntArrayManager HChild, TChild;
-
- for (i=0; i<size/2; i++){
+ ChildIntArrayManager HChild, TChild;
+
+ for (i = 0; i < size / 2; i++) {
HChild = pArray.get(i);
- TChild = pArray.get(size/2+i);
- for (j=0; j<CHILD_BUCKETS; j++) {
- if (HChild.cArray[j][0] + TChild.cArray[j][0] > NUM_OF_SLOTS-1) {
+ TChild = pArray.get(size / 2 + i);
+ for (j = 0; j < CHILD_BUCKETS; j++) {
+ if (HChild.cArray[j][0] + TChild.cArray[j][0] > NUM_OF_SLOTS - 1) {
return false;
}
}
}
return true;
}
-
+
public String prettyPrint() {
StringBuilder s = new StringBuilder("\n########### PrimitiveIntHashMap Status #############\n");
ChildIntArrayManager child;
int i, j, k;
int size = pArray.size();
- for (i=0; i<size;i++) {
+ for (i = 0; i < size; i++) {
child = pArray.get(i);
s.append("child[").append(i).append("]\n");
- for (j=0; j<CHILD_BUCKETS;j++) {
+ for (j = 0; j < CHILD_BUCKETS; j++) {
s.append(j).append(" ");
- for (k=0; k<NUM_OF_SLOTS;k++) {
- s.append("[").append(child.cArray[j][k*2]).append(",").append(child.cArray[j][k*2+1]).append("] ");
+ for (k = 0; k < NUM_OF_SLOTS; k++) {
+ s.append("[").append(child.cArray[j][k * 2]).append(",").append(child.cArray[j][k * 2 + 1])
+ .append("] ");
}
s.append("\n");
}
}
return s.toString();
}
-
+
public int getNumOfSlots() {
return NUM_OF_SLOTS;
}
-
+
public int getNumOfChildBuckets() {
return CHILD_BUCKETS;
}
-
+
public void clear(boolean needShrink) {
int size = pArray.size();
- for (int i=size-1; i >= 0; i--) {
+ for (int i = size - 1; i >= 0; i--) {
if (needShrink && i != 0) {
pArray.remove(i);
} else {
@@ -371,29 +372,29 @@
}
occupiedSlots = 0;
}
-
+
///////////////////////////////////////
// iterate method
///////////////////////////////////////
-
+
public void beginIterate() {
iterChildIndex = 0;
iterBucketIndex = 0;
iterSlotIndex = 1;
}
-
+
public KeyValuePair getNextKeyValue() {
for (; iterChildIndex < pArray.size(); iterChildIndex++, iterBucketIndex = 0) {
for (; iterBucketIndex < CHILD_BUCKETS; iterBucketIndex++, iterSlotIndex = 1) {
- if (iterSlotIndex ==1 && pArray.get(iterChildIndex).cArray[iterBucketIndex][0] == 0) {
+ if (iterSlotIndex == 1 && pArray.get(iterChildIndex).cArray[iterBucketIndex][0] == 0) {
continue;
}
for (; iterSlotIndex < NUM_OF_SLOTS; iterSlotIndex++) {
- iterPair.key = pArray.get(iterChildIndex).cArray[iterBucketIndex][iterSlotIndex*2];
+ iterPair.key = pArray.get(iterChildIndex).cArray[iterBucketIndex][iterSlotIndex * 2];
if (iterPair.key == -1) {
continue;
}
- iterPair.value = pArray.get(iterChildIndex).cArray[iterBucketIndex][iterSlotIndex*2+1];
+ iterPair.value = pArray.get(iterChildIndex).cArray[iterBucketIndex][iterSlotIndex * 2 + 1];
iterSlotIndex++;
return iterPair;
}
@@ -401,15 +402,15 @@
}
return null;
}
-
+
public int getNextKey() {
for (; iterChildIndex < pArray.size(); iterChildIndex++, iterBucketIndex = 0) {
for (; iterBucketIndex < CHILD_BUCKETS; iterBucketIndex++, iterSlotIndex = 1) {
- if (iterSlotIndex ==1 && pArray.get(iterChildIndex).cArray[iterBucketIndex][0] == 0) {
+ if (iterSlotIndex == 1 && pArray.get(iterChildIndex).cArray[iterBucketIndex][0] == 0) {
continue;
}
for (; iterSlotIndex < NUM_OF_SLOTS; iterSlotIndex++) {
- iterPair.key = pArray.get(iterChildIndex).cArray[iterBucketIndex][iterSlotIndex*2];
+ iterPair.key = pArray.get(iterChildIndex).cArray[iterBucketIndex][iterSlotIndex * 2];
if (iterPair.key == -1) {
continue;
}
@@ -420,19 +421,19 @@
}
return -1;
}
-
+
public int getNextValue() {
for (; iterChildIndex < pArray.size(); iterChildIndex++, iterBucketIndex = 0) {
for (; iterBucketIndex < CHILD_BUCKETS; iterBucketIndex++, iterSlotIndex = 1) {
- if (iterSlotIndex ==1 && pArray.get(iterChildIndex).cArray[iterBucketIndex][0] == 0) {
+ if (iterSlotIndex == 1 && pArray.get(iterChildIndex).cArray[iterBucketIndex][0] == 0) {
continue;
}
for (; iterSlotIndex < NUM_OF_SLOTS; iterSlotIndex++) {
- iterPair.key = pArray.get(iterChildIndex).cArray[iterBucketIndex][iterSlotIndex*2];
+ iterPair.key = pArray.get(iterChildIndex).cArray[iterBucketIndex][iterSlotIndex * 2];
if (iterPair.key == -1) {
continue;
}
- iterPair.value = pArray.get(iterChildIndex).cArray[iterBucketIndex][iterSlotIndex*2+1];
+ iterPair.value = pArray.get(iterChildIndex).cArray[iterBucketIndex][iterSlotIndex * 2 + 1];
iterSlotIndex++;
return iterPair.value;
}
@@ -440,23 +441,23 @@
}
return -1;
}
-
+
public static class KeyValuePair {
public int key;
- public int value;
+ public int value;
}
}
class ChildIntArrayManager {
- private final int DIM1_SIZE;
- private final int DIM2_SIZE;
+ private final int DIM1_SIZE;
+ private final int DIM2_SIZE;
private final int NUM_OF_SLOTS;
public int[][] cArray; //child array
-
+
public ChildIntArrayManager(PrimitiveIntHashMap parentHashMap) {
DIM1_SIZE = parentHashMap.getNumOfChildBuckets();
DIM2_SIZE = parentHashMap.getNumOfSlots() * 2; //2: Array of [key, value] pair
- NUM_OF_SLOTS = parentHashMap.getNumOfSlots() ;
+ NUM_OF_SLOTS = parentHashMap.getNumOfSlots();
initialize();
}
@@ -468,11 +469,11 @@
//cArray[i][1] is not used.
cArray[i][0] = 0;
for (j = 1; j < NUM_OF_SLOTS; j++) {
- cArray[i][j*2] = -1; // -1 represent that the slot is empty
+ cArray[i][j * 2] = -1; // -1 represent that the slot is empty
}
}
}
-
+
public void clear() {
int i, j;
for (i = 0; i < DIM1_SIZE; i++) {
@@ -483,23 +484,23 @@
}
cArray[i][0] = 0;
for (j = 1; j < NUM_OF_SLOTS; j++) {
- cArray[i][j*2] = -1; // -1 represent that the slot is empty
+ cArray[i][j * 2] = -1; // -1 represent that the slot is empty
}
}
}
-
+
public void deinitialize() {
cArray = null;
}
-
+
public void allocate() {
initialize();
}
public boolean isFull(int bucketNum) {
- return cArray[bucketNum][0] == NUM_OF_SLOTS-1;
+ return cArray[bucketNum][0] == NUM_OF_SLOTS - 1;
}
-
+
public boolean isEmpty(int bucketNum) {
return cArray[bucketNum][0] == 0;
}
@@ -514,11 +515,11 @@
* @param key
* @param value
* @param isUpsert
- * @return 1 for new insertion, 0 for key duplication
+ * @return 1 for new insertion, 0 for key duplication
*/
public int put(int bucketNum, int key, int value, boolean isUpsert) {
int i;
- int emptySlot=-1;
+ int emptySlot = -1;
if (cArray[bucketNum][0] == 0) {
cArray[bucketNum][2] = key;
@@ -528,65 +529,63 @@
}
for (i = 1; i < NUM_OF_SLOTS; i++) {
- if (cArray[bucketNum][i*2] == key) {
+ if (cArray[bucketNum][i * 2] == key) {
if (isUpsert) {
- cArray[bucketNum][i*2+1] = value;
+ cArray[bucketNum][i * 2 + 1] = value;
}
return 0;
- }
- else if (cArray[bucketNum][i*2] == -1) {
+ } else if (cArray[bucketNum][i * 2] == -1) {
emptySlot = i;
}
}
-
+
if (emptySlot == -1) {
throw new UnsupportedOperationException("error");
}
-
- cArray[bucketNum][emptySlot*2] = key;
- cArray[bucketNum][emptySlot*2+1] = value;
+
+ cArray[bucketNum][emptySlot * 2] = key;
+ cArray[bucketNum][emptySlot * 2 + 1] = value;
cArray[bucketNum][0]++;
return 1;
}
public int get(int bucketNum, int key) {
int i;
-
+
if (cArray[bucketNum][0] == 0) {
return -1;
}
for (i = 1; i < NUM_OF_SLOTS; i++) {
- if (cArray[bucketNum][i*2] == key) {
- return cArray[bucketNum][i*2+1];
+ if (cArray[bucketNum][i * 2] == key) {
+ return cArray[bucketNum][i * 2 + 1];
}
}
return -1;
}
-
+
/**
* remove key if it exists. Otherwise, ignore it.
+ *
* @param bucketNum
* @param key
- * @return 1 for success, 0 if the key doesn't exist
+ * @return 1 for success, 0 if the key doesn't exist
*/
public int remove(int bucketNum, int key) {
int i;
-
+
if (cArray[bucketNum][0] == 0) {
return 0;
}
for (i = 1; i < NUM_OF_SLOTS; i++) {
- if (cArray[bucketNum][i*2] == key) {
- cArray[bucketNum][i*2] = -1;
+ if (cArray[bucketNum][i * 2] == key) {
+ cArray[bucketNum][i * 2] = -1;
cArray[bucketNum][0]--;
return 1;
}
}
-
+
return 0;
}
}
-
-
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/Request.json b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/Request.json
new file mode 100644
index 0000000..0c4fa71
--- /dev/null
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/Request.json
@@ -0,0 +1,29 @@
+{
+ "name" : "Request",
+ "fields" : [
+ {
+ "name" : "resource id",
+ "type" : "GLOBAL"
+ },
+ {
+ "name" : "job slot",
+ "type" : "GLOBAL"
+ },
+ {
+ "name" : "prev job request",
+ "type" : "GLOBAL"
+ },
+ {
+ "name" : "next job request",
+ "type" : "GLOBAL"
+ },
+ {
+ "name" : "next request",
+ "type" : "GLOBAL"
+ },
+ {
+ "name" : "lock mode",
+ "type" : "INT"
+ }
+ ]
+}
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/Resource.json b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/Resource.json
new file mode 100644
index 0000000..d0d553a
--- /dev/null
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/Resource.json
@@ -0,0 +1,37 @@
+{
+ "name" : "Resource",
+ "fields" : [
+ {
+ "name" : "last holder",
+ "type" : "GLOBAL",
+ "initial" : "-1"
+ },
+ {
+ "name" : "first waiter",
+ "type" : "GLOBAL",
+ "initial" : "-1"
+ },
+ {
+ "name" : "first upgrader",
+ "type" : "GLOBAL",
+ "initial" : "-1"
+ },
+ {
+ "name" : "next",
+ "type" : "GLOBAL"
+ },
+ {
+ "name" : "max mode",
+ "type" : "INT",
+ "initial" : "edu.uci.ics.asterix.transaction.management.service.transaction.TransactionManagementConstants.LockManagerConstants.LockMode.NL"
+ },
+ {
+ "name" : "dataset id",
+ "type" : "INT"
+ },
+ {
+ "name" : "pk hash val",
+ "type" : "INT"
+ }
+ ]
+}
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/TimeOutDetector.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/TimeOutDetector.java
index d9c6097..cab0f7c 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/TimeOutDetector.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/TimeOutDetector.java
@@ -15,8 +15,8 @@
package edu.uci.ics.asterix.transaction.management.service.locking;
import java.util.LinkedList;
+import java.util.concurrent.Executor;
-import edu.uci.ics.asterix.common.api.AsterixThreadExecutor;
import edu.uci.ics.asterix.common.exceptions.ACIDException;
/**
@@ -36,14 +36,14 @@
int timeoutThreshold;
int sweepThreshold;
- public TimeOutDetector(LockManager lockMgr) {
+ public TimeOutDetector(LockManager lockMgr, Executor threadExecutor) {
this.victimList = new LinkedList<LockWaiter>();
this.lockMgr = lockMgr;
this.trigger = new Thread(new TimeoutTrigger(this));
this.timeoutThreshold = lockMgr.getTransactionProperties().getTimeoutWaitThreshold();
this.sweepThreshold = lockMgr.getTransactionProperties().getTimeoutSweepThreshold();
trigger.setDaemon(true);
- AsterixThreadExecutor.INSTANCE.execute(trigger);
+ threadExecutor.execute(trigger);
}
public void sweep() throws ACIDException {
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogManager.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogManager.java
index 933afcd..7292f82 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogManager.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogManager.java
@@ -32,7 +32,6 @@
import java.util.logging.Level;
import java.util.logging.Logger;
-import edu.uci.ics.asterix.common.api.AsterixThreadExecutor;
import edu.uci.ics.asterix.common.exceptions.ACIDException;
import edu.uci.ics.asterix.common.transactions.ILogManager;
import edu.uci.ics.asterix.common.transactions.ILogReader;
@@ -41,7 +40,6 @@
import edu.uci.ics.asterix.common.transactions.ITransactionManager;
import edu.uci.ics.asterix.common.transactions.LogManagerProperties;
import edu.uci.ics.asterix.common.transactions.MutableLong;
-import edu.uci.ics.asterix.transaction.management.service.locking.LockManager;
import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionSubsystem;
import edu.uci.ics.hyracks.api.lifecycle.ILifeCycleComponent;
@@ -50,6 +48,7 @@
public static final boolean IS_DEBUG_MODE = false;// true
private static final Logger LOGGER = Logger.getLogger(LogManager.class.getName());
private final TransactionSubsystem txnSubsystem;
+
private final LogManagerProperties logManagerProperties;
private final long logFileSize;
private final int logPageSize;
@@ -82,7 +81,7 @@
emptyQ = new LinkedBlockingQueue<LogPage>(numLogPages);
flushQ = new LinkedBlockingQueue<LogPage>(numLogPages);
for (int i = 0; i < numLogPages; i++) {
- emptyQ.offer(new LogPage((LockManager) txnSubsystem.getLockManager(), logPageSize, flushLSN));
+ emptyQ.offer(new LogPage(txnSubsystem, logPageSize, flushLSN));
}
appendLSN = initializeLogAnchor(nextLogFileId);
flushLSN.set(appendLSN);
@@ -92,7 +91,7 @@
appendChannel = getFileChannel(appendLSN, false);
getAndInitNewPage();
logFlusher = new LogFlusher(this, emptyQ, flushQ);
- futureLogFlusher = AsterixThreadExecutor.INSTANCE.submit(logFlusher);
+ futureLogFlusher = txnSubsystem.getAsterixAppRuntimeContextProvider().getThreadExecutor().submit(logFlusher);
}
@Override
@@ -184,20 +183,19 @@
public void stop(boolean dumpState, OutputStream os) {
terminateLogFlusher();
if (dumpState) {
- // #. dump Configurable Variables
- dumpConfVars(os);
-
- // #. dump LSNInfo
- dumpLSNInfo(os);
-
- try {
- os.flush();
- } catch (IOException e) {
- // ignore
- }
+ dumpState(os);
}
}
+ @Override
+ public void dumpState(OutputStream os) {
+ // #. dump Configurable Variables
+ dumpConfVars(os);
+
+ // #. dump LSNInfo
+ dumpLSNInfo(os);
+ }
+
private void dumpConfVars(OutputStream os) {
try {
StringBuilder sb = new StringBuilder();
@@ -271,7 +269,7 @@
return logFileSize * fileId + offset;
}
- public void renewLogFiles() {
+ public void renewLogFiles() throws IOException {
terminateLogFlusher();
long lastMaxLogFileId = deleteAllLogFiles();
initializeLogManager(lastMaxLogFileId + 1);
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogPage.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogPage.java
index a3f42a7..cec6cd3 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogPage.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogPage.java
@@ -22,16 +22,21 @@
import java.util.logging.Logger;
import edu.uci.ics.asterix.common.exceptions.ACIDException;
+import edu.uci.ics.asterix.common.transactions.DatasetId;
import edu.uci.ics.asterix.common.transactions.ILogPage;
import edu.uci.ics.asterix.common.transactions.ILogRecord;
+import edu.uci.ics.asterix.common.transactions.ITransactionContext;
+import edu.uci.ics.asterix.common.transactions.JobId;
import edu.uci.ics.asterix.common.transactions.MutableLong;
import edu.uci.ics.asterix.transaction.management.service.locking.LockManager;
+import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionManagementConstants.LockManagerConstants.LockMode;
+import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionSubsystem;
public class LogPage implements ILogPage {
public static final boolean IS_DEBUG_MODE = false;//true
private static final Logger LOGGER = Logger.getLogger(LogPage.class.getName());
- private final LockManager lockMgr;
+ private final TransactionSubsystem txnSubsystem;
private final LogPageReader logPageReader;
private final int logPageSize;
private final MutableLong flushLSN;
@@ -45,9 +50,11 @@
private final LinkedBlockingQueue<ILogRecord> syncCommitQ;
private FileChannel fileChannel;
private boolean stop;
+ private DatasetId reusableDsId;
+ private JobId reusableJobId;
- public LogPage(LockManager lockMgr, int logPageSize, MutableLong flushLSN) {
- this.lockMgr = lockMgr;
+ public LogPage(TransactionSubsystem txnSubsystem, int logPageSize, MutableLong flushLSN) {
+ this.txnSubsystem = txnSubsystem;
this.logPageSize = logPageSize;
this.flushLSN = flushLSN;
appendBuffer = ByteBuffer.allocate(logPageSize);
@@ -59,6 +66,8 @@
flushOffset = 0;
isLastPage = false;
syncCommitQ = new LinkedBlockingQueue<ILogRecord>(logPageSize / ILogRecord.JOB_TERMINATE_LOG_SIZE);
+ reusableDsId = new DatasetId(-1);
+ reusableJobId = new JobId(-1);
}
////////////////////////////////////
@@ -187,7 +196,25 @@
private void batchUnlock(int beginOffset, int endOffset) throws ACIDException {
if (endOffset > beginOffset) {
logPageReader.initializeScan(beginOffset, endOffset);
- lockMgr.batchUnlock(this, logPageReader);
+
+ ITransactionContext txnCtx = null;
+
+ LogRecord logRecord = logPageReader.next();
+ while (logRecord != null) {
+ if (logRecord.getLogType() == LogType.ENTITY_COMMIT) {
+ reusableJobId.setId(logRecord.getJobId());
+ txnCtx = txnSubsystem.getTransactionManager().getTransactionContext(reusableJobId, false);
+ reusableDsId.setId(logRecord.getDatasetId());
+ txnSubsystem.getLockManager().unlock(reusableDsId, logRecord.getPKHashValue(), LockMode.ANY, txnCtx);
+ txnCtx.notifyOptracker(false);
+ } else if (logRecord.getLogType() == LogType.JOB_COMMIT || logRecord.getLogType() == LogType.ABORT) {
+ reusableJobId.setId(logRecord.getJobId());
+ txnCtx = txnSubsystem.getTransactionManager().getTransactionContext(reusableJobId, false);
+ txnCtx.notifyOptracker(true);
+ notifyJobTerminator();
+ }
+ logRecord = logPageReader.next();
+ }
}
}
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogReader.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogReader.java
index 9dc966c..778d24b 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogReader.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogReader.java
@@ -39,7 +39,7 @@
private long bufferBeginLSN;
private long fileBeginLSN;
private FileChannel fileChannel;
-
+
private enum ReturnState {
FLUSH,
EOF
@@ -64,7 +64,7 @@
getFileChannel();
readPage();
}
-
+
//for scanning
@Override
public ILogRecord next() throws ACIDException {
@@ -81,7 +81,7 @@
readLSN += logRecord.getLogSize();
return logRecord;
}
-
+
private ReturnState waitForFlushOrReturnIfEOF() {
synchronized (flushLSN) {
while (readLSN >= flushLSN.get()) {
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogType.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogType.java
index f9e9304..1716cfb 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogType.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogType.java
@@ -26,7 +26,6 @@
private static final String STRING_ABORT = "ABORT";
private static final String STRING_INVALID_LOG_TYPE = "INVALID_LOG_TYPE";
-
public static String toString(byte logType) {
switch (logType) {
case LogType.UPDATE:
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/PrimaryKeyTupleReference.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/PrimaryKeyTupleReference.java
index d45b209..7acc000 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/PrimaryKeyTupleReference.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/PrimaryKeyTupleReference.java
@@ -12,7 +12,7 @@
this.start = start;
this.length = length;
}
-
+
@Override
public int getFieldCount() {
return 1;
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/recovery/CheckpointObject.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/recovery/CheckpointObject.java
index f2cb22a..49cb500 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/recovery/CheckpointObject.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/recovery/CheckpointObject.java
@@ -19,7 +19,7 @@
public class CheckpointObject implements Serializable, Comparable<CheckpointObject> {
private static final long serialVersionUID = 1L;
-
+
private final long checkpointLsn;
private final long minMCTFirstLsn;
private final int maxJobId;
@@ -31,7 +31,7 @@
this.maxJobId = maxJobId;
this.timeStamp = timeStamp;
}
-
+
public long getCheckpointLsn() {
return checkpointLsn;
}
@@ -50,18 +50,18 @@
@Override
public int compareTo(CheckpointObject checkpointObject) {
- long compareTimeStamp = checkpointObject.getTimeStamp();
-
+ long compareTimeStamp = checkpointObject.getTimeStamp();
+
//decending order
long diff = compareTimeStamp - this.timeStamp;
if (diff > 0) {
return 1;
- } else if (diff == 0){
+ } else if (diff == 0) {
return 0;
} else {
return -1;
}
-
+
//ascending order
//return this.timeStamp - compareTimeStamp;
}
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/recovery/RecoveryManager.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/recovery/RecoveryManager.java
index 95ee767..1e34df6 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/recovery/RecoveryManager.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/recovery/RecoveryManager.java
@@ -340,8 +340,9 @@
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("[RecoveryMgr] recovery is completed.");
- LOGGER.info("[RecoveryMgr's recovery log count] update/entityCommit/jobCommit/abort/redo = " + updateLogCount + "/"
- + entityCommitLogCount + "/" + jobCommitLogCount + "/" + abortLogCount + "/" + redoCount);
+ LOGGER.info("[RecoveryMgr's recovery log count] update/entityCommit/jobCommit/abort/redo = "
+ + updateLogCount + "/" + entityCommitLogCount + "/" + jobCommitLogCount + "/" + abortLogCount + "/"
+ + redoCount);
}
}
@@ -446,7 +447,11 @@
}
if (isSharpCheckpoint) {
- logMgr.renewLogFiles();
+ try {
+ logMgr.renewLogFiles();
+ } catch (IOException e) {
+ throw new HyracksDataException(e);
+ }
}
if (isSharpCheckpoint && LOGGER.isLoggable(Level.INFO)) {
@@ -666,8 +671,8 @@
logReader.close();
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info(" undone loser transaction's effect");
- LOGGER.info("[RecoveryManager's rollback log count] update/entityCommit/undo:" + updateLogCount + "/" + entityCommitLogCount + "/"
- + undoCount);
+ LOGGER.info("[RecoveryManager's rollback log count] update/entityCommit/undo:" + updateLogCount + "/"
+ + entityCommitLogCount + "/" + undoCount);
}
}
@@ -681,6 +686,11 @@
//no op
}
+ @Override
+ public void dumpState(OutputStream os) throws IOException {
+ // do nothing
+ }
+
private void undo(ILogRecord logRecord) {
try {
ILSMIndex index = (ILSMIndex) txnSubsystem.getAsterixAppRuntimeContextProvider().getIndexLifecycleManager()
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/DatasetIdFactory.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/DatasetIdFactory.java
index c6f245b..8a3d299 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/DatasetIdFactory.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/DatasetIdFactory.java
@@ -18,21 +18,21 @@
public class DatasetIdFactory {
private static AtomicInteger id = new AtomicInteger();
- private static boolean isInitialized = false;
-
+ private static boolean isInitialized = false;
+
public static boolean isInitialized() {
return isInitialized;
}
-
+
public static void initialize(int initialId) {
- id.set(initialId);
- isInitialized = true;
+ id.set(initialId);
+ isInitialized = true;
}
public static int generateDatasetId() {
return id.incrementAndGet();
}
-
+
public static int getMostRecentDatasetId() {
return id.get();
}
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/FieldsHashValueGenerator.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/FieldsHashValueGenerator.java
index 6ad4dc7..0b915c1 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/FieldsHashValueGenerator.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/FieldsHashValueGenerator.java
@@ -28,7 +28,7 @@
tuple.getFieldStart(primaryKeyFieldIdx), tuple.getFieldLength(primaryKeyFieldIdx));
h = h * 31 + fh;
if (h < 0) {
- h = h*(-1);
+ h = h * (-1);
}
}
return h;
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/JobIdFactory.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/JobIdFactory.java
index 019bdfd..ea912c0 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/JobIdFactory.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/JobIdFactory.java
@@ -27,7 +27,7 @@
public static JobId generateJobId() {
return new JobId(Id.incrementAndGet());
}
-
+
public static void initJobId(int id) {
Id.set(id);
}
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/MutableResourceId.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/MutableResourceId.java
index 9eb7bbd..545d5c0 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/MutableResourceId.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/MutableResourceId.java
@@ -14,7 +14,7 @@
*/
package edu.uci.ics.asterix.transaction.management.service.transaction;
-public class MutableResourceId{
+public class MutableResourceId {
long id;
public MutableResourceId(long id) {
@@ -31,7 +31,7 @@
@Override
public int hashCode() {
- return (int)id;
+ return (int) id;
}
@Override
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/TransactionContext.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/TransactionContext.java
index 6fb91c8..664a4f1 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/TransactionContext.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/TransactionContext.java
@@ -46,48 +46,54 @@
private static final long serialVersionUID = -6105616785783310111L;
private TransactionSubsystem transactionSubsystem;
- //jobId is set once and read concurrently.
+ // jobId is set once and read concurrently.
private final JobId jobId;
- //There are no concurrent writers on both firstLSN and lastLSN
- //since both values are updated by serialized log appenders.
- //But readers and writers can be different threads,
- //so both LSNs are atomic variables in order to be read and written atomically.
+ // There are no concurrent writers on both firstLSN and lastLSN
+ // since both values are updated by serialized log appenders.
+ // But readers and writers can be different threads,
+ // so both LSNs are atomic variables in order to be read and written
+ // atomically.
private AtomicLong firstLSN;
private AtomicLong lastLSN;
- //txnState is read and written concurrently.
+ // txnState is read and written concurrently.
private AtomicInteger txnState;
- //isTimeout is read and written under the lockMgr's tableLatch
- //Thus, no other synchronization is required separately.
+ // isTimeout is read and written under the lockMgr's tableLatch
+ // Thus, no other synchronization is required separately.
private boolean isTimeout;
- //isWriteTxn can be set concurrently by multiple threads.
+ // isWriteTxn can be set concurrently by multiple threads.
private AtomicBoolean isWriteTxn;
- //isMetadataTxn is accessed by a single thread since the metadata is not partitioned
+ // isMetadataTxn is accessed by a single thread since the metadata is not
+ // partitioned
private boolean isMetadataTxn;
- //indexMap is concurrently accessed by multiple threads,
- //so those threads are synchronized on indexMap object itself
+ // indexMap is concurrently accessed by multiple threads,
+ // so those threads are synchronized on indexMap object itself
private Map<MutableLong, AbstractLSMIOOperationCallback> indexMap;
- //TODO: fix ComponentLSNs' issues.
- //primaryIndex, primaryIndexCallback, and primaryIndexOptracker will be modified accordingly
- //when the issues of componentLSNs are fixed.
+ // TODO: fix ComponentLSNs' issues.
+ // primaryIndex, primaryIndexCallback, and primaryIndexOptracker will be
+ // modified accordingly
+ // when the issues of componentLSNs are fixed.
private ILSMIndex primaryIndex;
private PrimaryIndexModificationOperationCallback primaryIndexCallback;
private PrimaryIndexOperationTracker primaryIndexOpTracker;
- //The following three variables are used as temporary variables in order to avoid object creations.
- //Those are used in synchronized methods.
+ // The following three variables are used as temporary variables in order to
+ // avoid object creations.
+ // Those are used in synchronized methods.
private MutableLong tempResourceIdForRegister;
private MutableLong tempResourceIdForSetLSN;
private LogRecord logRecord;
- //TODO: implement transactionContext pool in order to avoid object creations.
- // also, the pool can throttle the number of concurrent active jobs at every moment.
+ // TODO: implement transactionContext pool in order to avoid object
+ // creations.
+ // also, the pool can throttle the number of concurrent active jobs at every
+ // moment.
public TransactionContext(JobId jobId, TransactionSubsystem transactionSubsystem) throws ACIDException {
this.jobId = jobId;
this.transactionSubsystem = transactionSubsystem;
@@ -120,16 +126,17 @@
}
}
- //[Notice]
- //This method is called sequentially by the LogAppender threads.
- //However, the indexMap is concurrently read and modified through this method and registerIndexAndCallback()
+ // [Notice]
+ // This method is called sequentially by the LogAppender threads.
+ // However, the indexMap is concurrently read and modified through this
+ // method and registerIndexAndCallback()
@Override
public void setLastLSN(long resourceId, long LSN) {
synchronized (indexMap) {
firstLSN.compareAndSet(-1, LSN);
lastLSN.set(Math.max(lastLSN.get(), LSN));
if (resourceId != -1) {
- //Non-update log's resourceId is -1.
+ // Non-update log's resourceId is -1.
tempResourceIdForSetLSN.set(resourceId);
AbstractLSMIOOperationCallback ioOpCallback = indexMap.get(tempResourceIdForSetLSN);
ioOpCallback.updateLastLSN(LSN);
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/TransactionManagementConstants.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/TransactionManagementConstants.java
index 78bca42..91f2535 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/TransactionManagementConstants.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/TransactionManagementConstants.java
@@ -30,17 +30,34 @@
}
public static class LockManagerConstants {
- public static final String LOCK_CONF_DIR = "lock_conf";
- public static final String LOCK_CONF_FILE = "lock.conf";
- public static final int[] LOCK_CONFLICT_MATRIX = new int[] { 2, 3 };
- public static final int[] LOCK_CONVERT_MATRIX = new int[] { 2, 0 };
-
public static class LockMode {
- public static final byte S = 0;
- public static final byte X = 1;
- public static final byte IS = 2;
- public static final byte IX = 3;
+ public static final byte ANY = -1;
+ public static final byte NL = 0;
+ public static final byte IS = 1;
+ public static final byte IX = 2;
+ public static final byte S = 3;
+ public static final byte X = 4;
+
+ public static byte intentionMode(byte mode) {
+ switch (mode) {
+ case S: return IS;
+ case X: return IX;
+ default: throw new IllegalArgumentException(
+ "no intention lock mode for " + toString(mode));
+ }
+ }
+
+ public static String toString(byte mode) {
+ switch (mode) {
+ case ANY: return "ANY";
+ case NL: return "NL";
+ case IS: return "IS";
+ case IX: return "IX";
+ case S: return "S";
+ case X: return "X";
+ default: throw new IllegalArgumentException("no such lock mode");
+ }
+ }
}
}
-
}
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/TransactionManager.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/TransactionManager.java
index 07fc152..94be125 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/TransactionManager.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/TransactionManager.java
@@ -14,7 +14,6 @@
*/
package edu.uci.ics.asterix.transaction.management.service.transaction;
-import java.io.IOException;
import java.io.OutputStream;
import java.util.Map;
import java.util.Set;
@@ -153,17 +152,16 @@
@Override
public void stop(boolean dumpState, OutputStream os) {
if (dumpState) {
- //#. dump TxnContext
- dumpTxnContext(os);
-
- try {
- os.flush();
- } catch (IOException e) {
- //ignore
- }
+ dumpState(os);
}
}
+ @Override
+ public void dumpState(OutputStream os) {
+ //#. dump TxnContext
+ dumpTxnContext(os);
+ }
+
private void dumpTxnContext(OutputStream os) {
JobId jobId;
ITransactionContext txnCtx;
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/TransactionSubsystem.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/TransactionSubsystem.java
index aceeb82..e6bcc5d 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/TransactionSubsystem.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/TransactionSubsystem.java
@@ -22,7 +22,7 @@
import edu.uci.ics.asterix.common.transactions.IRecoveryManager;
import edu.uci.ics.asterix.common.transactions.ITransactionManager;
import edu.uci.ics.asterix.common.transactions.ITransactionSubsystem;
-import edu.uci.ics.asterix.transaction.management.service.locking.LockManager;
+import edu.uci.ics.asterix.transaction.management.service.locking.ConcurrentLockManager;
import edu.uci.ics.asterix.transaction.management.service.logging.LogManager;
import edu.uci.ics.asterix.transaction.management.service.recovery.CheckpointThread;
import edu.uci.ics.asterix.transaction.management.service.recovery.RecoveryManager;
@@ -43,13 +43,13 @@
public TransactionSubsystem(String id, IAsterixAppRuntimeContextProvider asterixAppRuntimeContextProvider,
AsterixTransactionProperties txnProperties) throws ACIDException {
+ this.asterixAppRuntimeContextProvider = asterixAppRuntimeContextProvider;
this.id = id;
this.txnProperties = txnProperties;
this.transactionManager = new TransactionManager(this);
- this.lockManager = new LockManager(this);
+ this.lockManager = new ConcurrentLockManager(this);
this.logManager = new LogManager(this);
this.recoveryManager = new RecoveryManager(this);
- this.asterixAppRuntimeContextProvider = asterixAppRuntimeContextProvider;
if (asterixAppRuntimeContextProvider != null) {
this.checkpointThread = new CheckpointThread(recoveryManager,
asterixAppRuntimeContextProvider.getIndexLifecycleManager(),
diff --git a/pom.xml b/pom.xml
index 308900a..8a78880 100644
--- a/pom.xml
+++ b/pom.xml
@@ -17,12 +17,12 @@
<modelVersion>4.0.0</modelVersion>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix</artifactId>
- <version>0.8.1-SNAPSHOT</version>
+ <version>0.8.4-SNAPSHOT</version>
<packaging>pom</packaging>
<properties>
- <algebricks.version>0.2.10-SNAPSHOT</algebricks.version>
- <hyracks.version>0.2.10-SNAPSHOT</hyracks.version>
+ <algebricks.version>0.2.11-SNAPSHOT</algebricks.version>
+ <hyracks.version>0.2.11-SNAPSHOT</hyracks.version>
</properties>
<build>