merge -r1055:1282 (merging from asterix_stabilization to asterix_lsm_stabilization)
git-svn-id: https://asterixdb.googlecode.com/svn/branches/asterix_lsm_stabilization@1322 eaa15691-b419-025a-1212-ee371bd00084
diff --git a/asterix-algebra/pom.xml b/asterix-algebra/pom.xml
index f950800..a03bae7 100644
--- a/asterix-algebra/pom.xml
+++ b/asterix-algebra/pom.xml
@@ -1,13 +1,12 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
<version>0.0.4-SNAPSHOT</version>
</parent>
- <groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-algebra</artifactId>
- <version>0.0.4-SNAPSHOT</version>
<build>
<plugins>
@@ -16,8 +15,9 @@
<artifactId>maven-compiler-plugin</artifactId>
<version>2.0.2</version>
<configuration>
- <source>1.6</source>
- <target>1.6</target>
+ <source>1.7</source>
+ <target>1.7</target>
+ <fork>true</fork>
</configuration>
</plugin>
<plugin>
@@ -60,8 +60,7 @@
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
- <artifactId>hyracks-algebricks-compiler</artifactId>
- <version>0.2.2-SNAPSHOT</version>
+ <artifactId>algebricks-compiler</artifactId>
</dependency>
<dependency>
<groupId>org.json</groupId>
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/BTreeSearchPOperator.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/BTreeSearchPOperator.java
index 09a4c6b..f0880ec 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/BTreeSearchPOperator.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/BTreeSearchPOperator.java
@@ -45,12 +45,14 @@
private final List<LogicalVariable> lowKeyVarList;
private final List<LogicalVariable> highKeyVarList;
- private boolean isPrimaryIndex;
+ private final boolean isPrimaryIndex;
+ private final boolean isEqCondition;
public BTreeSearchPOperator(IDataSourceIndex<String, AqlSourceId> idx, boolean requiresBroadcast,
- boolean isPrimaryIndex, List<LogicalVariable> lowKeyVarList, List<LogicalVariable> highKeyVarList) {
+ boolean isPrimaryIndex, boolean isEqCondition, List<LogicalVariable> lowKeyVarList, List<LogicalVariable> highKeyVarList) {
super(idx, requiresBroadcast);
this.isPrimaryIndex = isPrimaryIndex;
+ this.isEqCondition = isEqCondition;
this.lowKeyVarList = lowKeyVarList;
this.highKeyVarList = highKeyVarList;
}
@@ -100,13 +102,13 @@
public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op,
IPhysicalPropertiesVector reqdByParent) {
if (requiresBroadcast) {
- if (isPrimaryIndex) {
- // For primary indexes, we require re-partitioning on the primary key, and not a broadcast.
- // Also, add a local sorting property to enforce a sort before the primary-index operator.
+ // For primary indexes optimizing an equality condition we can reduce the broadcast requirement to hash partitioning.
+ if (isPrimaryIndex && isEqCondition) {
StructuralPropertiesVector[] pv = new StructuralPropertiesVector[1];
ListSet<LogicalVariable> searchKeyVars = new ListSet<LogicalVariable>();
searchKeyVars.addAll(lowKeyVarList);
searchKeyVars.addAll(highKeyVarList);
+ // Also, add a local sorting property to enforce a sort before the primary-index operator.
List<ILocalStructuralProperty> propsLocal = new ArrayList<ILocalStructuralProperty>();
for (LogicalVariable orderVar : searchKeyVars) {
propsLocal.add(new LocalOrderProperty(new OrderColumn(orderVar, OrderKind.ASC)));
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/CommitPOperator.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/CommitPOperator.java
index dcf601c..ece3db6 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/CommitPOperator.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/CommitPOperator.java
@@ -24,7 +24,6 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
import edu.uci.ics.hyracks.algebricks.core.algebra.base.PhysicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.AbstractPhysicalOperator;
@@ -33,7 +32,6 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector;
import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenHelper;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
public class CommitPOperator extends AbstractPhysicalOperator {
@@ -82,12 +80,7 @@
RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema,
context);
int[] primaryKeyFields = JobGenHelper.variablesToFieldIndexes(primaryKeyLogicalVars, inputSchemas[0]);
- IVariableTypeEnvironment varTypeEnv = context.getTypeEnvironment(op.getInputs().get(0).getValue());
- IBinaryHashFunctionFactory[] binaryHashFunctionFactories = JobGenHelper.variablesToBinaryHashFunctionFactories(
- primaryKeyLogicalVars, varTypeEnv, context);
-
- CommitRuntimeFactory runtime = new CommitRuntimeFactory(jobId, datasetId, primaryKeyFields,
- binaryHashFunctionFactories, isWriteTransaction);
+ CommitRuntimeFactory runtime = new CommitRuntimeFactory(jobId, datasetId, primaryKeyFields, isWriteTransaction);
builder.contributeMicroOperator(op, runtime, recDesc);
ILogicalOperator src = op.getInputs().get(0).getValue();
builder.contributeGraphEdge(src, 0, op, 0);
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/CommitRuntime.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/CommitRuntime.java
index 98dbb5b..5d57603 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/CommitRuntime.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/CommitRuntime.java
@@ -16,37 +16,33 @@
package edu.uci.ics.asterix.algebra.operators.physical;
import java.nio.ByteBuffer;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
import edu.uci.ics.asterix.common.context.AsterixAppRuntimeContext;
import edu.uci.ics.asterix.transaction.management.exception.ACIDException;
import edu.uci.ics.asterix.transaction.management.service.transaction.DatasetId;
-import edu.uci.ics.asterix.transaction.management.service.transaction.FieldsHashValueGenerator;
import edu.uci.ics.asterix.transaction.management.service.transaction.ITransactionManager;
import edu.uci.ics.asterix.transaction.management.service.transaction.JobId;
import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionContext;
import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionContext.TransactionType;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntime;
import edu.uci.ics.hyracks.api.comm.IFrameWriter;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.MurmurHash128Bit;
public class CommitRuntime implements IPushRuntime {
+
+ private final static long SEED = 0L;
private final IHyracksTaskContext hyracksTaskCtx;
private final ITransactionManager transactionManager;
private final JobId jobId;
private final DatasetId datasetId;
private final int[] primaryKeyFields;
- private final IBinaryHashFunction[] primaryKeyHashFunctions;
private final boolean isWriteTransaction;
private TransactionContext transactionContext;
@@ -55,7 +51,7 @@
private FrameTupleReference frameTupleReference;
public CommitRuntime(IHyracksTaskContext ctx, JobId jobId, int datasetId, int[] primaryKeyFields,
- IBinaryHashFunctionFactory[] binaryHashFunctionFactories, boolean isWriteTransaction) {
+ boolean isWriteTransaction) {
this.hyracksTaskCtx = ctx;
AsterixAppRuntimeContext runtimeCtx = (AsterixAppRuntimeContext) ctx.getJobletContext().getApplicationContext()
.getApplicationObject();
@@ -63,10 +59,6 @@
this.jobId = jobId;
this.datasetId = new DatasetId(datasetId);
this.primaryKeyFields = primaryKeyFields;
- primaryKeyHashFunctions = new IBinaryHashFunction[binaryHashFunctionFactories.length];
- for (int i = 0; i < binaryHashFunctionFactories.length; ++i) {
- this.primaryKeyHashFunctions[i] = binaryHashFunctionFactories[i].createBinaryHashFunction();
- }
this.frameTupleReference = new FrameTupleReference();
this.isWriteTransaction = isWriteTransaction;
}
@@ -89,8 +81,7 @@
int nTuple = frameTupleAccessor.getTupleCount();
for (int t = 0; t < nTuple; t++) {
frameTupleReference.reset(frameTupleAccessor, t);
- pkHash = FieldsHashValueGenerator.computeFieldsHashValue(frameTupleReference, primaryKeyFields,
- primaryKeyHashFunctions);
+ pkHash = computePrimaryKeyHashValue(frameTupleReference, primaryKeyFields);
try {
transactionManager.commitTransaction(transactionContext, datasetId, pkHash);
} catch (ACIDException e) {
@@ -98,6 +89,12 @@
}
}
}
+
+ private int computePrimaryKeyHashValue(ITupleReference tuple, int[] primaryKeyFields) {
+ long[] longHashes= new long[2];
+ MurmurHash128Bit.hash3_x64_128(tuple, primaryKeyFields, SEED, longHashes);
+ return Math.abs((int) longHashes[0]);
+ }
@Override
public void fail() throws HyracksDataException {
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/CommitRuntimeFactory.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/CommitRuntimeFactory.java
index 4012bbb..f3b6526 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/CommitRuntimeFactory.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/CommitRuntimeFactory.java
@@ -20,7 +20,6 @@
import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntime;
import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
public class CommitRuntimeFactory implements IPushRuntimeFactory {
@@ -29,15 +28,12 @@
private final JobId jobId;
private final int datasetId;
private final int[] primaryKeyFields;
- IBinaryHashFunctionFactory[] binaryHashFunctionFactories;
private final boolean isWriteTransaction;
- public CommitRuntimeFactory(JobId jobId, int datasetId, int[] primaryKeyFields,
- IBinaryHashFunctionFactory[] binaryHashFunctionFactories, boolean isWriteTransaction) {
+ public CommitRuntimeFactory(JobId jobId, int datasetId, int[] primaryKeyFields, boolean isWriteTransaction) {
this.jobId = jobId;
this.datasetId = datasetId;
this.primaryKeyFields = primaryKeyFields;
- this.binaryHashFunctionFactories = binaryHashFunctionFactories;
this.isWriteTransaction = isWriteTransaction;
}
@@ -48,6 +44,6 @@
@Override
public IPushRuntime createPushRuntime(IHyracksTaskContext ctx) throws AlgebricksException {
- return new CommitRuntime(ctx, jobId, datasetId, primaryKeyFields, binaryHashFunctionFactories, isWriteTransaction);
+ return new CommitRuntime(ctx, jobId, datasetId, primaryKeyFields, isWriteTransaction);
}
}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ByNameToByIndexFieldAccessRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ByNameToByIndexFieldAccessRule.java
index 704ccb5..5c54a4a 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ByNameToByIndexFieldAccessRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ByNameToByIndexFieldAccessRule.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -15,6 +15,7 @@
package edu.uci.ics.asterix.optimizer.rules;
+import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
@@ -93,40 +94,44 @@
}
IAType t = (IAType) env.getType(fce.getArguments().get(0).getValue());
- switch (t.getTypeTag()) {
- case ANY: {
- return false;
- }
- case RECORD: {
- ARecordType recType = (ARecordType) t;
- ILogicalExpression fai = createFieldAccessByIndex(recType, fce);
- if (fai == null) {
+ try {
+ switch (t.getTypeTag()) {
+ case ANY: {
return false;
}
- expressions.get(i).setValue(fai);
- changed = true;
- break;
- }
- case UNION: {
- AUnionType unionT = (AUnionType) t;
- if (unionT.isNullableType()) {
- IAType t2 = unionT.getUnionList().get(1);
- if (t2.getTypeTag() == ATypeTag.RECORD) {
- ARecordType recType = (ARecordType) t2;
- ILogicalExpression fai = createFieldAccessByIndex(recType, fce);
- if (fai == null) {
- return false;
- }
- expressions.get(i).setValue(fai);
- changed = true;
- break;
+ case RECORD: {
+ ARecordType recType = (ARecordType) t;
+ ILogicalExpression fai = createFieldAccessByIndex(recType, fce);
+ if (fai == null) {
+ return false;
}
+ expressions.get(i).setValue(fai);
+ changed = true;
+ break;
}
- throw new NotImplementedException("Union " + unionT);
+ case UNION: {
+ AUnionType unionT = (AUnionType) t;
+ if (unionT.isNullableType()) {
+ IAType t2 = unionT.getUnionList().get(1);
+ if (t2.getTypeTag() == ATypeTag.RECORD) {
+ ARecordType recType = (ARecordType) t2;
+ ILogicalExpression fai = createFieldAccessByIndex(recType, fce);
+ if (fai == null) {
+ return false;
+ }
+ expressions.get(i).setValue(fai);
+ changed = true;
+ break;
+ }
+ }
+ throw new NotImplementedException("Union " + unionT);
+ }
+ default: {
+ throw new AlgebricksException("Cannot call field-access on data of type " + t);
+ }
}
- default: {
- throw new AlgebricksException("Cannot call field-access on data of type " + t);
- }
+ } catch (IOException e) {
+ throw new AlgebricksException(e);
}
}
assign.removeAnnotation(AsterixOperatorAnnotations.PUSHED_FIELD_ACCESS);
@@ -134,7 +139,8 @@
}
@SuppressWarnings("unchecked")
- private static ILogicalExpression createFieldAccessByIndex(ARecordType recType, AbstractFunctionCallExpression fce) {
+ private static ILogicalExpression createFieldAccessByIndex(ARecordType recType, AbstractFunctionCallExpression fce)
+ throws IOException {
String s = getStringSecondArgument(fce);
if (s == null) {
return null;
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ConstantFoldingRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ConstantFoldingRule.java
index 47e957b..b0ce342 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ConstantFoldingRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ConstantFoldingRule.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -16,6 +16,7 @@
package edu.uci.ics.asterix.optimizer.rules;
import java.io.DataInputStream;
+import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.List;
@@ -28,6 +29,7 @@
import edu.uci.ics.asterix.formats.nontagged.AqlBinaryBooleanInspectorImpl;
import edu.uci.ics.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;
import edu.uci.ics.asterix.formats.nontagged.AqlBinaryHashFunctionFactoryProvider;
+import edu.uci.ics.asterix.formats.nontagged.AqlBinaryHashFunctionFamilyProvider;
import edu.uci.ics.asterix.formats.nontagged.AqlBinaryIntegerInspector;
import edu.uci.ics.asterix.formats.nontagged.AqlPrinterFactoryProvider;
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
@@ -106,8 +108,8 @@
private static final JobGenContext _jobGenCtx = new JobGenContext(null, null, null,
AqlSerializerDeserializerProvider.INSTANCE, AqlBinaryHashFunctionFactoryProvider.INSTANCE,
- AqlBinaryComparatorFactoryProvider.INSTANCE, AqlTypeTraitProvider.INSTANCE,
- AqlBinaryBooleanInspectorImpl.FACTORY, AqlBinaryIntegerInspector.FACTORY,
+ AqlBinaryHashFunctionFamilyProvider.INSTANCE, AqlBinaryComparatorFactoryProvider.INSTANCE,
+ AqlTypeTraitProvider.INSTANCE, AqlBinaryBooleanInspectorImpl.FACTORY, AqlBinaryIntegerInspector.FACTORY,
AqlPrinterFactoryProvider.INSTANCE, AqlNullWriterFactory.INSTANCE, null,
new LogicalExpressionJobGenToExpressionRuntimeProviderAdapter(AqlLogicalExpressionJobGen.INSTANCE),
AqlExpressionTypeComputer.INSTANCE, AqlNullableTypeComputer.INSTANCE, null, null, null,
@@ -187,7 +189,12 @@
ARecordType rt = (ARecordType) _emptyTypeEnv.getType(expr.getArguments().get(0).getValue());
String str = ((AString) ((AsterixConstantValue) ((ConstantExpression) expr.getArguments().get(1)
.getValue()).getValue()).getObject()).getStringValue();
- int k = rt.findFieldPosition(str);
+ int k;
+ try {
+ k = rt.findFieldPosition(str);
+ } catch (IOException e) {
+ throw new AlgebricksException(e);
+ }
if (k >= 0) {
// wait for the ByNameToByIndex rule to apply
return new Pair<Boolean, ILogicalExpression>(changed, expr);
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SetAsterixPhysicalOperatorsRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SetAsterixPhysicalOperatorsRule.java
index 066de80..9c7594c 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SetAsterixPhysicalOperatorsRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SetAsterixPhysicalOperatorsRule.java
@@ -165,8 +165,8 @@
BTreeJobGenParams btreeJobGenParams = new BTreeJobGenParams();
btreeJobGenParams.readFromFuncArgs(f.getArguments());
op.setPhysicalOperator(new BTreeSearchPOperator(dsi, requiresBroadcast,
- btreeJobGenParams.isPrimaryIndex(), btreeJobGenParams.getLowKeyVarList(),
- btreeJobGenParams.getHighKeyVarList()));
+ btreeJobGenParams.isPrimaryIndex(), btreeJobGenParams.isEqCondition(),
+ btreeJobGenParams.getLowKeyVarList(), btreeJobGenParams.getHighKeyVarList()));
break;
}
case RTREE: {
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java
index ad70d6f..3dad464 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java
@@ -186,6 +186,7 @@
case DATE:
case TIME:
case DURATION:
+ case INTERVAL:
case POINT:
case POINT3D:
case POLYGON:
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SimilarityCheckRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SimilarityCheckRule.java
index c90b3f1..e88e5b0 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SimilarityCheckRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SimilarityCheckRule.java
@@ -33,24 +33,22 @@
/**
* Looks for a select operator, containing a condition:
- *
* similarity-function GE/GT/LE/LE constant/variable
- *
* Rewrites the select condition (and possibly the assign expr) with the equivalent similarity-check function.
- *
*/
public class SimilarityCheckRule implements IAlgebraicRewriteRule {
@Override
- public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
- AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+ public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
+ AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
// Look for select.
if (op.getOperatorTag() != LogicalOperatorTag.SELECT) {
return false;
}
SelectOperator select = (SelectOperator) op;
Mutable<ILogicalExpression> condExpr = select.getCondition();
-
+
// Gather assigns below this select.
List<AssignOperator> assigns = new ArrayList<AssignOperator>();
AbstractLogicalOperator childOp = (AbstractLogicalOperator) select.getInputs().get(0).getValue();
@@ -60,12 +58,13 @@
}
while (childOp.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
assigns.add((AssignOperator) childOp);
- childOp = (AbstractLogicalOperator) childOp.getInputs().get(0).getValue();
+ childOp = (AbstractLogicalOperator) childOp.getInputs().get(0).getValue();
}
return replaceSelectConditionExprs(condExpr, assigns, context);
}
- private boolean replaceSelectConditionExprs(Mutable<ILogicalExpression> expRef, List<AssignOperator> assigns, IOptimizationContext context) throws AlgebricksException {
+ private boolean replaceSelectConditionExprs(Mutable<ILogicalExpression> expRef, List<AssignOperator> assigns,
+ IOptimizationContext context) throws AlgebricksException {
ILogicalExpression expr = expRef.getValue();
if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
return false;
@@ -83,10 +82,10 @@
}
return found;
}
-
+
// Look for GE/GT/LE/LT.
- if (funcIdent != AlgebricksBuiltinFunctions.GE && funcIdent != AlgebricksBuiltinFunctions.GT &&
- funcIdent != AlgebricksBuiltinFunctions.LE && funcIdent != AlgebricksBuiltinFunctions.LT) {
+ if (funcIdent != AlgebricksBuiltinFunctions.GE && funcIdent != AlgebricksBuiltinFunctions.GT
+ && funcIdent != AlgebricksBuiltinFunctions.LE && funcIdent != AlgebricksBuiltinFunctions.LT) {
return false;
}
@@ -98,8 +97,8 @@
// Normalized GE/GT/LE/LT as if constant was on the right hand side.
FunctionIdentifier normFuncIdent = null;
// One of the args must be a constant.
- if (arg1.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
- ConstantExpression constExpr = (ConstantExpression) arg1;
+ if (arg1.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
+ ConstantExpression constExpr = (ConstantExpression) arg1;
constVal = (AsterixConstantValue) constExpr.getValue();
nonConstExpr = arg2;
// Get func ident as if swapping lhs and rhs.
@@ -113,91 +112,101 @@
} else {
return false;
}
-
+
// The other arg is a function call. We can directly replace the select condition with an equivalent similarity check expression.
if (nonConstExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
- return replaceWithFunctionCallArg(expRef, normFuncIdent, constVal, (AbstractFunctionCallExpression) nonConstExpr);
+ return replaceWithFunctionCallArg(expRef, normFuncIdent, constVal,
+ (AbstractFunctionCallExpression) nonConstExpr);
}
// The other arg ist a variable. We may have to introduce an assign operator that assigns the result of a similarity-check function to a variable.
if (nonConstExpr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
- return replaceWithVariableArg(expRef, normFuncIdent, constVal, (VariableReferenceExpression) nonConstExpr, assigns, context);
+ return replaceWithVariableArg(expRef, normFuncIdent, constVal, (VariableReferenceExpression) nonConstExpr,
+ assigns, context);
}
return false;
}
-
+
private boolean replaceWithVariableArg(Mutable<ILogicalExpression> expRef, FunctionIdentifier normFuncIdent,
- AsterixConstantValue constVal, VariableReferenceExpression varRefExpr, List<AssignOperator> assigns, IOptimizationContext context) throws AlgebricksException {
-
- // Find variable in assigns to determine its originating function.
- LogicalVariable var = varRefExpr.getVariableReference();
- Mutable<ILogicalExpression> simFuncExprRef = null;
- ScalarFunctionCallExpression simCheckFuncExpr = null;
- AssignOperator matchingAssign = null;
- for (int i = 0; i < assigns.size(); i++) {
- AssignOperator assign = assigns.get(i);
- for (int j = 0; j < assign.getVariables().size(); j++) {
- // Check if variables match.
- if (var != assign.getVariables().get(j)) {
- continue;
- }
- // Check if corresponding expr is a function call.
- if (assign.getExpressions().get(j).getValue().getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
- continue;
- }
- simFuncExprRef = assign.getExpressions().get(j);
- // Analyze function expression and get equivalent similarity check function.
- simCheckFuncExpr = getSimilarityCheckExpr(normFuncIdent, constVal, (AbstractFunctionCallExpression) simFuncExprRef.getValue());
- matchingAssign = assign;
- break;
- }
- if (simCheckFuncExpr != null) {
- break;
- }
- }
-
- // Only non-null if we found that varRefExpr refers to an optimizable similarity function call.
- if (simCheckFuncExpr != null) {
- // Create a new assign under matchingAssign which assigns the result of our similarity-check function to a variable.
- LogicalVariable newVar = context.newVar();
- AssignOperator newAssign = new AssignOperator(newVar, new MutableObject<ILogicalExpression>(simCheckFuncExpr));
- // Hook up inputs.
- newAssign.getInputs().add(new MutableObject<ILogicalOperator>(matchingAssign.getInputs().get(0).getValue()));
- matchingAssign.getInputs().get(0).setValue(newAssign);
-
- // Replace select condition with a get-item on newVar.
+ AsterixConstantValue constVal, VariableReferenceExpression varRefExpr, List<AssignOperator> assigns,
+ IOptimizationContext context) throws AlgebricksException {
+
+ // Find variable in assigns to determine its originating function.
+ LogicalVariable var = varRefExpr.getVariableReference();
+ Mutable<ILogicalExpression> simFuncExprRef = null;
+ ScalarFunctionCallExpression simCheckFuncExpr = null;
+ AssignOperator matchingAssign = null;
+ for (int i = 0; i < assigns.size(); i++) {
+ AssignOperator assign = assigns.get(i);
+ for (int j = 0; j < assign.getVariables().size(); j++) {
+ // Check if variables match.
+ if (var != assign.getVariables().get(j)) {
+ continue;
+ }
+ // Check if corresponding expr is a function call.
+ if (assign.getExpressions().get(j).getValue().getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+ continue;
+ }
+ simFuncExprRef = assign.getExpressions().get(j);
+ // Analyze function expression and get equivalent similarity check function.
+ simCheckFuncExpr = getSimilarityCheckExpr(normFuncIdent, constVal,
+ (AbstractFunctionCallExpression) simFuncExprRef.getValue());
+ matchingAssign = assign;
+ break;
+ }
+ if (simCheckFuncExpr != null) {
+ break;
+ }
+ }
+
+ // Only non-null if we found that varRefExpr refers to an optimizable similarity function call.
+ if (simCheckFuncExpr != null) {
+ // Create a new assign under matchingAssign which assigns the result of our similarity-check function to a variable.
+ LogicalVariable newVar = context.newVar();
+ AssignOperator newAssign = new AssignOperator(newVar, new MutableObject<ILogicalExpression>(
+ simCheckFuncExpr));
+ // Hook up inputs.
+ newAssign.getInputs()
+ .add(new MutableObject<ILogicalOperator>(matchingAssign.getInputs().get(0).getValue()));
+ matchingAssign.getInputs().get(0).setValue(newAssign);
+
+ // Replace select condition with a get-item on newVar.
List<Mutable<ILogicalExpression>> selectGetItemArgs = new ArrayList<Mutable<ILogicalExpression>>();
// First arg is a variable reference expr on newVar.
selectGetItemArgs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(newVar)));
// Second arg is the item index to be accessed, here 0.
- selectGetItemArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AInt32(0)))));
- ILogicalExpression selectGetItemExpr = new ScalarFunctionCallExpression(FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.GET_ITEM), selectGetItemArgs);
+ selectGetItemArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(
+ new AsterixConstantValue(new AInt32(0)))));
+ ILogicalExpression selectGetItemExpr = new ScalarFunctionCallExpression(
+ FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.GET_ITEM), selectGetItemArgs);
// Replace the old similarity function call with the new getItemExpr.
expRef.setValue(selectGetItemExpr);
-
+
// Replace expr corresponding to original variable in the original assign with a get-item on newVar.
List<Mutable<ILogicalExpression>> assignGetItemArgs = new ArrayList<Mutable<ILogicalExpression>>();
// First arg is a variable reference expr on newVar.
assignGetItemArgs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(newVar)));
// Second arg is the item index to be accessed, here 1.
- assignGetItemArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AInt32(1)))));
- ILogicalExpression assignGetItemExpr = new ScalarFunctionCallExpression(FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.GET_ITEM), assignGetItemArgs);
+ assignGetItemArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(
+ new AsterixConstantValue(new AInt32(1)))));
+ ILogicalExpression assignGetItemExpr = new ScalarFunctionCallExpression(
+ FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.GET_ITEM), assignGetItemArgs);
// Replace the original assign expr with the get-item expr.
simFuncExprRef.setValue(assignGetItemExpr);
-
+
context.computeAndSetTypeEnvironmentForOperator(newAssign);
context.computeAndSetTypeEnvironmentForOperator(matchingAssign);
-
- return true;
- }
-
- return false;
+
+ return true;
+ }
+
+ return false;
}
-
+
private boolean replaceWithFunctionCallArg(Mutable<ILogicalExpression> expRef, FunctionIdentifier normFuncIdent,
- AsterixConstantValue constVal, AbstractFunctionCallExpression funcExpr) {
- // Analyze func expr to see if it is an optimizable similarity function.
- ScalarFunctionCallExpression simCheckFuncExpr = getSimilarityCheckExpr(normFuncIdent, constVal, funcExpr);
-
+ AsterixConstantValue constVal, AbstractFunctionCallExpression funcExpr) {
+ // Analyze func expr to see if it is an optimizable similarity function.
+ ScalarFunctionCallExpression simCheckFuncExpr = getSimilarityCheckExpr(normFuncIdent, constVal, funcExpr);
+
// Replace the expr in the select condition.
if (simCheckFuncExpr != null) {
// Get item 0 from var.
@@ -205,8 +214,10 @@
// First arg is the similarity-check function call.
getItemArgs.add(new MutableObject<ILogicalExpression>(simCheckFuncExpr));
// Second arg is the item index to be accessed.
- getItemArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AInt32(0)))));
- ILogicalExpression getItemExpr = new ScalarFunctionCallExpression(FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.GET_ITEM), getItemArgs);
+ getItemArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
+ new AInt32(0)))));
+ ILogicalExpression getItemExpr = new ScalarFunctionCallExpression(
+ FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.GET_ITEM), getItemArgs);
// Replace the old similarity function call with the new getItemExpr.
expRef.setValue(getItemExpr);
return true;
@@ -214,10 +225,10 @@
return false;
}
-
+
private ScalarFunctionCallExpression getSimilarityCheckExpr(FunctionIdentifier normFuncIdent,
- AsterixConstantValue constVal, AbstractFunctionCallExpression funcExpr) {
- // Remember args from original similarity function to add them to the similarity-check function later.
+ AsterixConstantValue constVal, AbstractFunctionCallExpression funcExpr) {
+ // Remember args from original similarity function to add them to the similarity-check function later.
ArrayList<Mutable<ILogicalExpression>> similarityArgs = null;
ScalarFunctionCallExpression simCheckFuncExpr = null;
// Look for jaccard function call, and GE or GT.
@@ -270,11 +281,11 @@
}
// Preserve all annotations.
if (simCheckFuncExpr != null) {
- simCheckFuncExpr.getAnnotations().putAll(funcExpr.getAnnotations());
+ simCheckFuncExpr.getAnnotations().putAll(funcExpr.getAnnotations());
}
return simCheckFuncExpr;
}
-
+
private FunctionIdentifier getLhsAndRhsSwappedFuncIdent(FunctionIdentifier oldFuncIdent) {
if (oldFuncIdent == AlgebricksBuiltinFunctions.GE) {
return AlgebricksBuiltinFunctions.LE;
@@ -290,7 +301,7 @@
}
throw new IllegalStateException();
}
-
+
@Override
public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
return false;
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AccessMethodUtils.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AccessMethodUtils.java
index d555eda..7084cd7 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AccessMethodUtils.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AccessMethodUtils.java
@@ -1,5 +1,21 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.asterix.optimizer.rules.am;
+import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
@@ -44,7 +60,8 @@
* Static helper functions for rewriting plans using indexes.
*/
public class AccessMethodUtils {
- public static void appendPrimaryIndexTypes(Dataset dataset, IAType itemType, List<Object> target) {
+ public static void appendPrimaryIndexTypes(Dataset dataset, IAType itemType, List<Object> target)
+ throws IOException {
ARecordType recordType = (ARecordType) itemType;
List<String> partitioningKeys = DatasetUtils.getPartitioningKeys(dataset);
for (String partitioningKey : partitioningKeys) {
@@ -109,7 +126,7 @@
analysisCtx.matchedFuncExprs.add(new OptimizableFuncExpr(funcExpr, fieldVar, constFilterVal));
return true;
}
-
+
public static boolean analyzeFuncExprArgsForTwoVars(AbstractFunctionCallExpression funcExpr,
AccessMethodAnalysisContext analysisCtx) {
LogicalVariable fieldVar1 = null;
@@ -182,7 +199,11 @@
// Primary keys.
List<String> partitioningKeys = DatasetUtils.getPartitioningKeys(dataset);
for (String partitioningKey : partitioningKeys) {
- dest.add(recordType.getFieldType(partitioningKey));
+ try {
+ dest.add(recordType.getFieldType(partitioningKey));
+ } catch (IOException e) {
+ throw new AlgebricksException(e);
+ }
}
}
@@ -231,14 +252,14 @@
}
/**
- * Returns the first expr optimizable by this index.
+ * Returns the first expr optimizable by this index.
*/
public static IOptimizableFuncExpr chooseFirstOptFuncExpr(Index chosenIndex, AccessMethodAnalysisContext analysisCtx) {
List<Integer> indexExprs = analysisCtx.getIndexExprs(chosenIndex);
int firstExprIndex = indexExprs.get(0);
return analysisCtx.matchedFuncExprs.get(firstExprIndex);
}
-
+
public static UnnestMapOperator createSecondaryIndexUnnestMap(Dataset dataset, ARecordType recordType, Index index,
ILogicalOperator inputOp, AccessMethodJobGenParams jobGenParams, IOptimizationContext context,
boolean outputPrimaryKeysOnly, boolean retainInput) throws AlgebricksException {
@@ -301,7 +322,11 @@
List<Object> primaryIndexOutputTypes = new ArrayList<Object>();
// Append output variables/types generated by the primary-index search (not forwarded from input).
primaryIndexUnnestVars.addAll(dataSourceScan.getVariables());
- appendPrimaryIndexTypes(dataset, recordType, primaryIndexOutputTypes);
+ try {
+ appendPrimaryIndexTypes(dataset, recordType, primaryIndexOutputTypes);
+ } catch (IOException e) {
+ throw new AlgebricksException(e);
+ }
// An index search is expressed as an unnest over an index-search function.
IFunctionInfo primaryIndexSearch = FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.INDEX_SEARCH);
AbstractFunctionCallExpression primaryIndexSearchFunc = new ScalarFunctionCallExpression(primaryIndexSearch,
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/BTreeAccessMethod.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/BTreeAccessMethod.java
index 00a0f34..ddcf768 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/BTreeAccessMethod.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/BTreeAccessMethod.java
@@ -1,5 +1,21 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.asterix.optimizer.rules.am;
+import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.BitSet;
@@ -192,6 +208,7 @@
// If we can't figure out how to integrate a certain funcExpr into the current predicate, we just bail by setting this flag.
boolean couldntFigureOut = false;
boolean doneWithExprs = false;
+ boolean isEqCondition = false;
// TODO: For now don't consider prefix searches.
BitSet setLowKeys = new BitSet(numSecondaryKeys);
BitSet setHighKeys = new BitSet(numSecondaryKeys);
@@ -213,7 +230,7 @@
}
ILogicalExpression searchKeyExpr = AccessMethodUtils.createSearchKeyExpr(optFuncExpr, indexSubTree,
probeSubTree);
- LimitType limit = getLimitType(optFuncExpr);
+ LimitType limit = getLimitType(optFuncExpr, probeSubTree);
switch (limit) {
case EQUAL: {
if (lowKeyLimits[keyPos] == null && highKeyLimits[keyPos] == null) {
@@ -222,7 +239,16 @@
lowKeyExprs[keyPos] = highKeyExprs[keyPos] = searchKeyExpr;
setLowKeys.set(keyPos);
setHighKeys.set(keyPos);
+ isEqCondition = true;
} else {
+ // Has already been set to the identical values. When optimizing join we may encounter the same optimizable expression twice
+ // (once from analyzing each side of the join)
+ if (lowKeyLimits[keyPos] == limit && lowKeyInclusive[keyPos] == true
+ && lowKeyExprs[keyPos].equals(searchKeyExpr) && highKeyLimits[keyPos] == limit
+ && highKeyInclusive[keyPos] == true && highKeyExprs[keyPos].equals(searchKeyExpr)) {
+ isEqCondition = true;
+ break;
+ }
couldntFigureOut = true;
}
// TODO: For now don't consider prefix searches.
@@ -238,6 +264,12 @@
highKeyExprs[keyPos] = searchKeyExpr;
highKeyInclusive[keyPos] = false;
} else {
+ // Has already been set to the identical values. When optimizing join we may encounter the same optimizable expression twice
+ // (once from analyzing each side of the join)
+ if (highKeyLimits[keyPos] == limit && highKeyInclusive[keyPos] == false
+ && highKeyExprs[keyPos].equals(searchKeyExpr)) {
+ break;
+ }
couldntFigureOut = true;
doneWithExprs = true;
}
@@ -249,6 +281,12 @@
highKeyExprs[keyPos] = searchKeyExpr;
highKeyInclusive[keyPos] = true;
} else {
+ // Has already been set to the identical values. When optimizing join we may encounter the same optimizable expression twice
+ // (once from analyzing each side of the join)
+ if (highKeyLimits[keyPos] == limit && highKeyInclusive[keyPos] == true
+ && highKeyExprs[keyPos].equals(searchKeyExpr)) {
+ break;
+ }
couldntFigureOut = true;
doneWithExprs = true;
}
@@ -260,6 +298,12 @@
lowKeyExprs[keyPos] = searchKeyExpr;
lowKeyInclusive[keyPos] = false;
} else {
+ // Has already been set to the identical values. When optimizing join we may encounter the same optimizable expression twice
+ // (once from analyzing each side of the join)
+ if (lowKeyLimits[keyPos] == limit && lowKeyInclusive[keyPos] == false
+ && lowKeyExprs[keyPos].equals(searchKeyExpr)) {
+ break;
+ }
couldntFigureOut = true;
doneWithExprs = true;
}
@@ -271,6 +315,12 @@
lowKeyExprs[keyPos] = searchKeyExpr;
lowKeyInclusive[keyPos] = true;
} else {
+ // Has already been set to the identical values. When optimizing join we may encounter the same optimizable expression twice
+ // (once from analyzing each side of the join)
+ if (lowKeyLimits[keyPos] == limit && lowKeyInclusive[keyPos] == true
+ && lowKeyExprs[keyPos].equals(searchKeyExpr)) {
+ break;
+ }
couldntFigureOut = true;
doneWithExprs = true;
}
@@ -343,6 +393,7 @@
dataset.getDataverseName(), dataset.getDatasetName(), retainInput, requiresBroadcast);
jobGenParams.setLowKeyInclusive(lowKeyInclusive[0]);
jobGenParams.setHighKeyInclusive(highKeyInclusive[0]);
+ jobGenParams.setIsEqCondition(isEqCondition);
jobGenParams.setLowKeyVarList(keyVarList, 0, numLowKeys);
jobGenParams.setHighKeyVarList(keyVarList, numLowKeys, numHighKeys);
@@ -374,7 +425,11 @@
indexSubTree.dataSourceScanRef.setValue(primaryIndexUnnestOp); //kisskys
} else {
List<Object> primaryIndexOutputTypes = new ArrayList<Object>();
- AccessMethodUtils.appendPrimaryIndexTypes(dataset, recordType, primaryIndexOutputTypes);
+ try {
+ AccessMethodUtils.appendPrimaryIndexTypes(dataset, recordType, primaryIndexOutputTypes);
+ } catch (IOException e) {
+ throw new AlgebricksException(e);
+ }
primaryIndexUnnestOp = new UnnestMapOperator(dataSourceScan.getVariables(),
secondaryIndexUnnestOp.getExpressionRef(), primaryIndexOutputTypes, retainInput);
primaryIndexUnnestOp.getInputs().add(new MutableObject<ILogicalOperator>(inputOp));
@@ -462,7 +517,7 @@
return -1;
}
- private LimitType getLimitType(IOptimizableFuncExpr optFuncExpr) {
+ private LimitType getLimitType(IOptimizableFuncExpr optFuncExpr, OptimizableOperatorSubTree probeSubTree) {
ComparisonKind ck = AlgebricksBuiltinFunctions.getComparisonType(optFuncExpr.getFuncExpr()
.getFunctionIdentifier());
LimitType limit = null;
@@ -472,19 +527,19 @@
break;
}
case GE: {
- limit = constantIsOnLhs(optFuncExpr) ? LimitType.HIGH_INCLUSIVE : LimitType.LOW_INCLUSIVE;
+ limit = probeIsOnLhs(optFuncExpr, probeSubTree) ? LimitType.HIGH_INCLUSIVE : LimitType.LOW_INCLUSIVE;
break;
}
case GT: {
- limit = constantIsOnLhs(optFuncExpr) ? LimitType.HIGH_EXCLUSIVE : LimitType.LOW_EXCLUSIVE;
+ limit = probeIsOnLhs(optFuncExpr, probeSubTree) ? LimitType.HIGH_EXCLUSIVE : LimitType.LOW_EXCLUSIVE;
break;
}
case LE: {
- limit = constantIsOnLhs(optFuncExpr) ? LimitType.LOW_INCLUSIVE : LimitType.HIGH_INCLUSIVE;
+ limit = probeIsOnLhs(optFuncExpr, probeSubTree) ? LimitType.LOW_INCLUSIVE : LimitType.HIGH_INCLUSIVE;
break;
}
case LT: {
- limit = constantIsOnLhs(optFuncExpr) ? LimitType.LOW_EXCLUSIVE : LimitType.HIGH_EXCLUSIVE;
+ limit = probeIsOnLhs(optFuncExpr, probeSubTree) ? LimitType.LOW_EXCLUSIVE : LimitType.HIGH_EXCLUSIVE;
break;
}
case NEQ: {
@@ -498,9 +553,14 @@
return limit;
}
- // Returns true if there is a constant value on the left-hand side if the given optimizable function (assuming a binary function).
- public boolean constantIsOnLhs(IOptimizableFuncExpr optFuncExpr) {
- return optFuncExpr.getFuncExpr().getArguments().get(0) == optFuncExpr.getConstantVal(0);
+ private boolean probeIsOnLhs(IOptimizableFuncExpr optFuncExpr, OptimizableOperatorSubTree probeSubTree) {
+ if (probeSubTree == null) {
+ // We are optimizing a selection query. Search key is a constant. Return true if constant is on lhs.
+ return optFuncExpr.getFuncExpr().getArguments().get(0) == optFuncExpr.getConstantVal(0);
+ } else {
+ // We are optimizing a join query. Determine whether the feeding variable is on the lhs.
+ return (optFuncExpr.getOperatorSubTree(0) == null || optFuncExpr.getOperatorSubTree(0) == probeSubTree);
+ }
}
private ILogicalExpression createSelectCondition(List<Mutable<ILogicalExpression>> predList) {
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/BTreeJobGenParams.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/BTreeJobGenParams.java
index 9a735c9..8b7636b 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/BTreeJobGenParams.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/BTreeJobGenParams.java
@@ -22,13 +22,14 @@
protected boolean lowKeyInclusive;
protected boolean highKeyInclusive;
+ protected boolean isEqCondition;
public BTreeJobGenParams() {
super();
}
- public BTreeJobGenParams(String indexName, IndexType indexType, String dataverseName, String datasetName, boolean retainInput,
- boolean requiresBroadcast) {
+ public BTreeJobGenParams(String indexName, IndexType indexType, String dataverseName, String datasetName,
+ boolean retainInput, boolean requiresBroadcast) {
super(indexName, indexType, dataverseName, datasetName, retainInput, requiresBroadcast);
}
@@ -56,12 +57,17 @@
this.highKeyInclusive = highKeyInclusive;
}
+ public void setIsEqCondition(boolean isEqConsition) {
+ this.isEqCondition = isEqConsition;
+ }
+
public void writeToFuncArgs(List<Mutable<ILogicalExpression>> funcArgs) {
super.writeToFuncArgs(funcArgs);
writeVarList(lowKeyVarList, funcArgs);
writeVarList(highKeyVarList, funcArgs);
- writeKeyInclusive(lowKeyInclusive, funcArgs);
- writeKeyInclusive(highKeyInclusive, funcArgs);
+ writeBoolean(lowKeyInclusive, funcArgs);
+ writeBoolean(highKeyInclusive, funcArgs);
+ writeBoolean(isEqCondition, funcArgs);
}
public void readFromFuncArgs(List<Mutable<ILogicalExpression>> funcArgs) {
@@ -71,16 +77,24 @@
highKeyVarList = new ArrayList<LogicalVariable>();
int nextIndex = readVarList(funcArgs, index, lowKeyVarList);
nextIndex = readVarList(funcArgs, nextIndex, highKeyVarList);
- readKeyInclusives(funcArgs, nextIndex);
+ nextIndex = readKeyInclusives(funcArgs, nextIndex);
+ readIsEqCondition(funcArgs, nextIndex);
}
- private void readKeyInclusives(List<Mutable<ILogicalExpression>> funcArgs, int index) {
+ private int readKeyInclusives(List<Mutable<ILogicalExpression>> funcArgs, int index) {
lowKeyInclusive = ((ConstantExpression) funcArgs.get(index).getValue()).getValue().isTrue();
+ // Read the next function argument at index + 1.
highKeyInclusive = ((ConstantExpression) funcArgs.get(index + 1).getValue()).getValue().isTrue();
+ // We have read two of the function arguments, so the next index is at index + 2.
+ return index + 2;
}
- private void writeKeyInclusive(boolean keyInclusive, List<Mutable<ILogicalExpression>> funcArgs) {
- ILogicalExpression keyExpr = keyInclusive ? ConstantExpression.TRUE : ConstantExpression.FALSE;
+ private void readIsEqCondition(List<Mutable<ILogicalExpression>> funcArgs, int index) {
+ isEqCondition = ((ConstantExpression) funcArgs.get(index).getValue()).getValue().isTrue();
+ }
+
+ private void writeBoolean(boolean val, List<Mutable<ILogicalExpression>> funcArgs) {
+ ILogicalExpression keyExpr = val ? ConstantExpression.TRUE : ConstantExpression.FALSE;
funcArgs.add(new MutableObject<ILogicalExpression>(keyExpr));
}
@@ -92,6 +106,10 @@
return highKeyVarList;
}
+ public boolean isEqCondition() {
+ return isEqCondition;
+ }
+
public boolean isLowKeyInclusive() {
return lowKeyInclusive;
}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/typecast/StaticTypeCastUtil.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/typecast/StaticTypeCastUtil.java
index 0c6f2ea..3ba3e96 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/typecast/StaticTypeCastUtil.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/typecast/StaticTypeCastUtil.java
@@ -26,6 +26,7 @@
import edu.uci.ics.asterix.om.base.AString;
import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.functions.AsterixFunctionInfo;
import edu.uci.ics.asterix.om.pointables.base.DefaultOpenFieldType;
import edu.uci.ics.asterix.om.typecomputer.base.TypeComputerUtilities;
import edu.uci.ics.asterix.om.types.ARecordType;
@@ -261,6 +262,7 @@
String reqFieldName = reqFieldNames[j];
IAType reqFieldType = reqFieldTypes[j];
if (fieldName.equals(reqFieldName)) {
+ //type matched
if (fieldType.equals(reqFieldType)) {
fieldPermutation[j] = i;
openFields[i] = false;
@@ -293,6 +295,26 @@
}
}
+ // match the optional type input for a non-optional field
+ // delay that to runtime by calling the not-null function
+ if (fieldType.getTypeTag() == ATypeTag.UNION
+ && NonTaggedFormatUtil.isOptionalField((AUnionType) fieldType)) {
+ IAType itemType = ((AUnionType) fieldType).getUnionList().get(
+ NonTaggedFormatUtil.OPTIONAL_TYPE_INDEX_IN_UNION_LIST);
+ if (reqFieldType.equals(itemType)) {
+ fieldPermutation[j] = i;
+ openFields[i] = false;
+ matched = true;
+
+ ScalarFunctionCallExpression notNullFunc = new ScalarFunctionCallExpression(
+ new AsterixFunctionInfo(AsterixBuiltinFunctions.NOT_NULL));
+ notNullFunc.getArguments().add(new MutableObject<ILogicalExpression>(arg));
+ //wrap the not null function to the original function
+ func.getArguments().get(2 * i + 1).setValue(notNullFunc);
+ break;
+ }
+ }
+
// match the record field: need cast
if (arg.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
ScalarFunctionCallExpression scalarFunc = (ScalarFunctionCallExpression) arg;
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AbstractAqlTranslator.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AbstractAqlTranslator.java
index a908363..16cfefe 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AbstractAqlTranslator.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AbstractAqlTranslator.java
@@ -1,78 +1,126 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.translator;
import java.util.Map;
+import java.util.Map.Entry;
import edu.uci.ics.asterix.aql.base.Statement;
+import edu.uci.ics.asterix.aql.expression.DatasetDecl;
import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
+import edu.uci.ics.asterix.aql.expression.DeleteStatement;
import edu.uci.ics.asterix.aql.expression.DropStatement;
-import edu.uci.ics.asterix.aql.expression.FunctionDropStatement;
+import edu.uci.ics.asterix.aql.expression.InsertStatement;
import edu.uci.ics.asterix.aql.expression.NodeGroupDropStatement;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.common.functions.FunctionSignature;
import edu.uci.ics.asterix.metadata.bootstrap.MetadataConstants;
-import edu.uci.ics.asterix.metadata.entities.AsterixBuiltinArtifactMap;
-import edu.uci.ics.asterix.metadata.entities.AsterixBuiltinArtifactMap.ARTIFACT_KIND;
import edu.uci.ics.asterix.metadata.entities.AsterixBuiltinTypeMap;
import edu.uci.ics.asterix.metadata.entities.Dataverse;
import edu.uci.ics.asterix.om.types.BuiltinType;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+/**
+ * Base class for AQL translators. Contains the common validation logic for AQL
+ * statements.
+ */
public abstract class AbstractAqlTranslator {
protected static final Map<String, BuiltinType> builtinTypeMap = AsterixBuiltinTypeMap.getBuiltinTypes();
- public void validateOperation(Dataverse dataverse, Statement stmt) throws AsterixException {
- String dataverseName = dataverse != null ? dataverse.getDataverseName() : null;
- if (dataverseName != null && dataverseName.equals(MetadataConstants.METADATA_DATAVERSE_NAME)) {
+ public void validateOperation(Dataverse defaultDataverse, Statement stmt) throws AsterixException {
+ boolean invalidOperation = false;
+ String message = null;
+ String dataverse = defaultDataverse != null ? defaultDataverse.getDataverseName() : null;
+ switch (stmt.getKind()) {
+ case INSERT:
+ InsertStatement insertStmt = (InsertStatement) stmt;
+ if (insertStmt.getDataverseName() != null) {
+ dataverse = insertStmt.getDataverseName().getValue();
+ }
+ invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
+ if (invalidOperation) {
+ message = "Insert operation is not permitted in dataverse "
+ + MetadataConstants.METADATA_DATAVERSE_NAME;
+ }
+ break;
- boolean invalidOperation = false;
- String message = null;
- switch (stmt.getKind()) {
- case INSERT:
- case UPDATE:
- case DELETE:
- invalidOperation = true;
- message = " Operation " + stmt.getKind() + " not permitted in system dataverse " + "'"
- + MetadataConstants.METADATA_DATAVERSE_NAME + "'";
- break;
- case FUNCTION_DROP:
- FunctionSignature signature = ((FunctionDropStatement) stmt).getFunctionSignature();
- FunctionIdentifier fId = new FunctionIdentifier(signature.getNamespace(), signature.getName(),
- signature.getArity());
- if (dataverseName.equals(MetadataConstants.METADATA_DATAVERSE_NAME)
- && AsterixBuiltinArtifactMap.isSystemProtectedArtifact(ARTIFACT_KIND.FUNCTION, fId)) {
- invalidOperation = true;
- message = "Cannot drop function " + signature + " (protected by system)";
+ case DELETE:
+ DeleteStatement deleteStmt = (DeleteStatement) stmt;
+ if (deleteStmt.getDataverseName() != null) {
+ dataverse = deleteStmt.getDataverseName().getValue();
+ }
+ invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
+ if (invalidOperation) {
+ message = "Delete operation is not permitted in dataverse "
+ + MetadataConstants.METADATA_DATAVERSE_NAME;
+ }
+ break;
+
+ case NODEGROUP_DROP:
+ String nodegroupName = ((NodeGroupDropStatement) stmt).getNodeGroupName().getValue();
+ invalidOperation = MetadataConstants.METADATA_DEFAULT_NODEGROUP_NAME.equals(nodegroupName);
+ if (invalidOperation) {
+ message = "Cannot drop nodegroup:" + nodegroupName;
+ }
+ break;
+
+ case DATAVERSE_DROP:
+ DataverseDropStatement dvDropStmt = (DataverseDropStatement) stmt;
+ invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dvDropStmt.getDataverseName()
+ .getValue());
+ if (invalidOperation) {
+ message = "Cannot drop dataverse:" + dvDropStmt.getDataverseName().getValue();
+ }
+ break;
+
+ case DATASET_DROP:
+ DropStatement dropStmt = (DropStatement) stmt;
+ if (dropStmt.getDataverseName() != null) {
+ dataverse = dropStmt.getDataverseName().getValue();
+ }
+ invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
+ if (invalidOperation) {
+ message = "Cannot drop a dataset belonging to the dataverse:"
+ + MetadataConstants.METADATA_DATAVERSE_NAME;
+ }
+ break;
+ case DATASET_DECL:
+ DatasetDecl datasetStmt = (DatasetDecl) stmt;
+ Map<String, String> hints = datasetStmt.getHints();
+ if (hints != null && !hints.isEmpty()) {
+ Pair<Boolean, String> validationResult = null;
+ StringBuffer errorMsgBuffer = new StringBuffer();
+ for (Entry<String, String> hint : hints.entrySet()) {
+ validationResult = DatasetHints.validate(hint.getKey(), hint.getValue());
+ if (!validationResult.first) {
+ errorMsgBuffer.append("Dataset: " + datasetStmt.getName().getValue()
+ + " error in processing hint: " + hint.getKey() + " " + validationResult.second);
+ errorMsgBuffer.append(" \n");
+ }
}
- break;
- case NODEGROUP_DROP:
- NodeGroupDropStatement nodeGroupDropStmt = (NodeGroupDropStatement) stmt;
- String nodegroupName = nodeGroupDropStmt.getNodeGroupName().getValue();
- if (AsterixBuiltinArtifactMap.isSystemProtectedArtifact(ARTIFACT_KIND.NODEGROUP, nodegroupName)) {
- message = "Cannot drop nodegroup " + nodegroupName + " (protected by system)";
- invalidOperation = true;
+ invalidOperation = errorMsgBuffer.length() > 0;
+ if (invalidOperation) {
+ message = errorMsgBuffer.toString();
}
- break;
- case DATAVERSE_DROP:
- DataverseDropStatement dvDropStmt = (DataverseDropStatement) stmt;
- String dvName = dvDropStmt.getDataverseName().getValue();
- if (dvName.equals(MetadataConstants.METADATA_DATAVERSE_NAME)) {
- message = "Cannot drop dataverse " + dvName + " (protected by system)";
- invalidOperation = true;
- }
- break;
- case DATASET_DROP:
- DropStatement dropStmt = (DropStatement) stmt;
- String datasetName = dropStmt.getDatasetName().getValue();
- if (AsterixBuiltinArtifactMap.isSystemProtectedArtifact(ARTIFACT_KIND.DATASET, datasetName)) {
- invalidOperation = true;
- message = "Cannot drop dataset " + datasetName + " (protected by system)";
- }
- break;
- }
- if (invalidOperation) {
- throw new AsterixException(message);
- }
+ }
+ break;
+
+ }
+
+ if (invalidOperation) {
+ throw new AsterixException("Invalid operation - " + message);
}
}
-}
\ No newline at end of file
+}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/CompiledStatements.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/CompiledStatements.java
index 6d52b8f..625fed1 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/CompiledStatements.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/CompiledStatements.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -43,501 +43,533 @@
import edu.uci.ics.asterix.om.types.IAType;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+/**
+ * An AQL statement instance is translated into an instance of type CompileX
+ * that has additional fields for use by the AqlTranslator.
+ */
public class CompiledStatements {
- public static interface ICompiledStatement {
+ public static interface ICompiledStatement {
- public Kind getKind();
- }
+ public Kind getKind();
+ }
- public static class CompiledWriteFromQueryResultStatement implements ICompiledDmlStatement {
+ public static class CompiledWriteFromQueryResultStatement implements
+ ICompiledDmlStatement {
- private String dataverseName;
- private String datasetName;
- private Query query;
- private int varCounter;
+ private String dataverseName;
+ private String datasetName;
+ private Query query;
+ private int varCounter;
- public CompiledWriteFromQueryResultStatement(String dataverseName, String datasetName, Query query,
- int varCounter) {
- this.dataverseName = dataverseName;
- this.datasetName = datasetName;
- this.query = query;
- this.varCounter = varCounter;
- }
+ public CompiledWriteFromQueryResultStatement(String dataverseName,
+ String datasetName, Query query, int varCounter) {
+ this.dataverseName = dataverseName;
+ this.datasetName = datasetName;
+ this.query = query;
+ this.varCounter = varCounter;
+ }
- public String getDataverseName() {
- return dataverseName;
- }
+ public String getDataverseName() {
+ return dataverseName;
+ }
- public String getDatasetName() {
- return datasetName;
- }
+ public String getDatasetName() {
+ return datasetName;
+ }
- public int getVarCounter() {
- return varCounter;
- }
+ public int getVarCounter() {
+ return varCounter;
+ }
- public Query getQuery() {
- return query;
- }
+ public Query getQuery() {
+ return query;
+ }
- @Override
- public Kind getKind() {
- return Kind.WRITE_FROM_QUERY_RESULT;
- }
+ @Override
+ public Kind getKind() {
+ return Kind.WRITE_FROM_QUERY_RESULT;
+ }
- }
+ }
- public static class CompiledDatasetDropStatement implements ICompiledStatement {
- private final String dataverseName;
- private final String datasetName;
+ public static class CompiledDatasetDropStatement implements
+ ICompiledStatement {
+ private final String dataverseName;
+ private final String datasetName;
- public CompiledDatasetDropStatement(String dataverseName, String datasetName) {
- this.dataverseName = dataverseName;
- this.datasetName = datasetName;
- }
+ public CompiledDatasetDropStatement(String dataverseName,
+ String datasetName) {
+ this.dataverseName = dataverseName;
+ this.datasetName = datasetName;
+ }
- public String getDataverseName() {
- return dataverseName;
- }
+ public String getDataverseName() {
+ return dataverseName;
+ }
- public String getDatasetName() {
- return datasetName;
- }
+ public String getDatasetName() {
+ return datasetName;
+ }
- @Override
- public Kind getKind() {
- return Kind.DATASET_DROP;
- }
- }
+ @Override
+ public Kind getKind() {
+ return Kind.DATASET_DROP;
+ }
+ }
- // added by yasser
- public static class CompiledCreateDataverseStatement implements ICompiledStatement {
- private String dataverseName;
- private String format;
+ // added by yasser
+ public static class CompiledCreateDataverseStatement implements
+ ICompiledStatement {
+ private String dataverseName;
+ private String format;
- public CompiledCreateDataverseStatement(String dataverseName, String format) {
- this.dataverseName = dataverseName;
- this.format = format;
- }
+ public CompiledCreateDataverseStatement(String dataverseName,
+ String format) {
+ this.dataverseName = dataverseName;
+ this.format = format;
+ }
- public String getDataverseName() {
- return dataverseName;
- }
+ public String getDataverseName() {
+ return dataverseName;
+ }
- public String getFormat() {
- return format;
- }
+ public String getFormat() {
+ return format;
+ }
- @Override
- public Kind getKind() {
- return Kind.CREATE_DATAVERSE;
- }
- }
+ @Override
+ public Kind getKind() {
+ return Kind.CREATE_DATAVERSE;
+ }
+ }
- public static class CompiledNodeGroupDropStatement implements ICompiledStatement {
- private String nodeGroupName;
+ public static class CompiledNodeGroupDropStatement implements
+ ICompiledStatement {
+ private String nodeGroupName;
- public CompiledNodeGroupDropStatement(String nodeGroupName) {
- this.nodeGroupName = nodeGroupName;
- }
+ public CompiledNodeGroupDropStatement(String nodeGroupName) {
+ this.nodeGroupName = nodeGroupName;
+ }
- public String getNodeGroupName() {
- return nodeGroupName;
- }
+ public String getNodeGroupName() {
+ return nodeGroupName;
+ }
- @Override
- public Kind getKind() {
- return Kind.NODEGROUP_DROP;
- }
- }
+ @Override
+ public Kind getKind() {
+ return Kind.NODEGROUP_DROP;
+ }
+ }
- public static class CompiledIndexDropStatement implements ICompiledStatement {
- private String dataverseName;
- private String datasetName;
- private String indexName;
+ public static class CompiledIndexDropStatement implements
+ ICompiledStatement {
+ private String dataverseName;
+ private String datasetName;
+ private String indexName;
- public CompiledIndexDropStatement(String dataverseName, String datasetName, String indexName) {
- this.dataverseName = dataverseName;
- this.datasetName = datasetName;
- this.indexName = indexName;
- }
+ public CompiledIndexDropStatement(String dataverseName,
+ String datasetName, String indexName) {
+ this.dataverseName = dataverseName;
+ this.datasetName = datasetName;
+ this.indexName = indexName;
+ }
- public String getDataverseName() {
- return dataverseName;
- }
+ public String getDataverseName() {
+ return dataverseName;
+ }
- public String getDatasetName() {
- return datasetName;
- }
+ public String getDatasetName() {
+ return datasetName;
+ }
- public String getIndexName() {
- return indexName;
- }
+ public String getIndexName() {
+ return indexName;
+ }
- @Override
- public Kind getKind() {
- return Kind.INDEX_DROP;
- }
- }
+ @Override
+ public Kind getKind() {
+ return Kind.INDEX_DROP;
+ }
+ }
- public static class CompiledDataverseDropStatement implements ICompiledStatement {
- private String dataverseName;
- private boolean ifExists;
+ public static class CompiledDataverseDropStatement implements
+ ICompiledStatement {
+ private String dataverseName;
+ private boolean ifExists;
- public CompiledDataverseDropStatement(String dataverseName, boolean ifExists) {
- this.dataverseName = dataverseName;
- this.ifExists = ifExists;
- }
+ public CompiledDataverseDropStatement(String dataverseName,
+ boolean ifExists) {
+ this.dataverseName = dataverseName;
+ this.ifExists = ifExists;
+ }
- public String getDataverseName() {
- return dataverseName;
- }
+ public String getDataverseName() {
+ return dataverseName;
+ }
- public boolean getIfExists() {
- return ifExists;
- }
+ public boolean getIfExists() {
+ return ifExists;
+ }
- @Override
- public Kind getKind() {
- return Kind.DATAVERSE_DROP;
- }
- }
+ @Override
+ public Kind getKind() {
+ return Kind.DATAVERSE_DROP;
+ }
+ }
- public static class CompiledTypeDropStatement implements ICompiledStatement {
- private String typeName;
+ public static class CompiledTypeDropStatement implements ICompiledStatement {
+ private String typeName;
- public CompiledTypeDropStatement(String nodeGroupName) {
- this.typeName = nodeGroupName;
- }
+ public CompiledTypeDropStatement(String nodeGroupName) {
+ this.typeName = nodeGroupName;
+ }
- public String getTypeName() {
- return typeName;
- }
+ public String getTypeName() {
+ return typeName;
+ }
- @Override
- public Kind getKind() {
- return Kind.TYPE_DROP;
- }
- }
+ @Override
+ public Kind getKind() {
+ return Kind.TYPE_DROP;
+ }
+ }
- public static interface ICompiledDmlStatement extends ICompiledStatement {
+ public static interface ICompiledDmlStatement extends ICompiledStatement {
- public String getDataverseName();
+ public String getDataverseName();
- public String getDatasetName();
- }
+ public String getDatasetName();
+ }
- public static class CompiledCreateIndexStatement implements ICompiledDmlStatement {
- private final String indexName;
- private final String dataverseName;
- private final String datasetName;
- private final List<String> keyFields;
- private final IndexType indexType;
+ public static class CompiledCreateIndexStatement implements
+ ICompiledDmlStatement {
+ private final String indexName;
+ private final String dataverseName;
+ private final String datasetName;
+ private final List<String> keyFields;
+ private final IndexType indexType;
- // Specific to NGram index.
- private final int gramLength;
+ // Specific to NGram index.
+ private final int gramLength;
- public CompiledCreateIndexStatement(String indexName, String dataverseName, String datasetName,
- List<String> keyFields, int gramLength, IndexType indexType) {
- this.indexName = indexName;
- this.dataverseName = dataverseName;
- this.datasetName = datasetName;
- this.keyFields = keyFields;
- this.gramLength = gramLength;
- this.indexType = indexType;
- }
+ public CompiledCreateIndexStatement(String indexName,
+ String dataverseName, String datasetName,
+ List<String> keyFields, int gramLength, IndexType indexType) {
+ this.indexName = indexName;
+ this.dataverseName = dataverseName;
+ this.datasetName = datasetName;
+ this.keyFields = keyFields;
+ this.gramLength = gramLength;
+ this.indexType = indexType;
+ }
- public String getDatasetName() {
- return datasetName;
- }
+ public String getDatasetName() {
+ return datasetName;
+ }
- public String getDataverseName() {
- return dataverseName;
- }
+ public String getDataverseName() {
+ return dataverseName;
+ }
- public String getIndexName() {
- return indexName;
- }
+ public String getIndexName() {
+ return indexName;
+ }
- public List<String> getKeyFields() {
- return keyFields;
- }
+ public List<String> getKeyFields() {
+ return keyFields;
+ }
- public IndexType getIndexType() {
- return indexType;
- }
+ public IndexType getIndexType() {
+ return indexType;
+ }
- public int getGramLength() {
- return gramLength;
- }
+ public int getGramLength() {
+ return gramLength;
+ }
- @Override
- public Kind getKind() {
- return Kind.CREATE_INDEX;
- }
- }
+ @Override
+ public Kind getKind() {
+ return Kind.CREATE_INDEX;
+ }
+ }
- public static class CompiledLoadFromFileStatement implements ICompiledDmlStatement {
- private String dataverseName;
- private String datasetName;
- private boolean alreadySorted;
- private String adapter;
- private Map<String, String> properties;
+ public static class CompiledLoadFromFileStatement implements
+ ICompiledDmlStatement {
+ private String dataverseName;
+ private String datasetName;
+ private boolean alreadySorted;
+ private String adapter;
+ private Map<String, String> properties;
- public CompiledLoadFromFileStatement(String dataverseName, String datasetName, String adapter,
- Map<String, String> properties, boolean alreadySorted) {
- this.dataverseName = dataverseName;
- this.datasetName = datasetName;
- this.alreadySorted = alreadySorted;
- this.adapter = adapter;
- this.properties = properties;
- }
+ public CompiledLoadFromFileStatement(String dataverseName,
+ String datasetName, String adapter,
+ Map<String, String> properties, boolean alreadySorted) {
+ this.dataverseName = dataverseName;
+ this.datasetName = datasetName;
+ this.alreadySorted = alreadySorted;
+ this.adapter = adapter;
+ this.properties = properties;
+ }
- public String getDataverseName() {
- return dataverseName;
- }
+ public String getDataverseName() {
+ return dataverseName;
+ }
- public String getDatasetName() {
- return datasetName;
- }
+ public String getDatasetName() {
+ return datasetName;
+ }
- public boolean alreadySorted() {
- return alreadySorted;
- }
+ public boolean alreadySorted() {
+ return alreadySorted;
+ }
- public String getAdapter() {
- return adapter;
- }
+ public String getAdapter() {
+ return adapter;
+ }
- public Map<String, String> getProperties() {
- return properties;
- }
+ public Map<String, String> getProperties() {
+ return properties;
+ }
- @Override
- public Kind getKind() {
- return Kind.LOAD_FROM_FILE;
- }
- }
+ @Override
+ public Kind getKind() {
+ return Kind.LOAD_FROM_FILE;
+ }
+ }
- public static class CompiledInsertStatement implements ICompiledDmlStatement {
- private final String dataverseName;
- private final String datasetName;
- private final Query query;
- private final int varCounter;
+ public static class CompiledInsertStatement implements
+ ICompiledDmlStatement {
+ private final String dataverseName;
+ private final String datasetName;
+ private final Query query;
+ private final int varCounter;
- public CompiledInsertStatement(String dataverseName, String datasetName, Query query, int varCounter) {
- this.dataverseName = dataverseName;
- this.datasetName = datasetName;
- this.query = query;
- this.varCounter = varCounter;
- }
+ public CompiledInsertStatement(String dataverseName,
+ String datasetName, Query query, int varCounter) {
+ this.dataverseName = dataverseName;
+ this.datasetName = datasetName;
+ this.query = query;
+ this.varCounter = varCounter;
+ }
- public String getDataverseName() {
- return dataverseName;
- }
+ public String getDataverseName() {
+ return dataverseName;
+ }
- public String getDatasetName() {
- return datasetName;
- }
+ public String getDatasetName() {
+ return datasetName;
+ }
- public int getVarCounter() {
- return varCounter;
- }
+ public int getVarCounter() {
+ return varCounter;
+ }
- public Query getQuery() {
- return query;
- }
+ public Query getQuery() {
+ return query;
+ }
- @Override
- public Kind getKind() {
- return Kind.INSERT;
- }
- }
+ @Override
+ public Kind getKind() {
+ return Kind.INSERT;
+ }
+ }
- public static class CompiledBeginFeedStatement implements ICompiledDmlStatement {
- private String dataverseName;
- private String datasetName;
- private Query query;
- private int varCounter;
+ public static class CompiledBeginFeedStatement implements
+ ICompiledDmlStatement {
+ private String dataverseName;
+ private String datasetName;
+ private Query query;
+ private int varCounter;
- public CompiledBeginFeedStatement(String dataverseName, String datasetName, Query query, int varCounter) {
- this.dataverseName = dataverseName;
- this.datasetName = datasetName;
- this.query = query;
- this.varCounter = varCounter;
- }
+ public CompiledBeginFeedStatement(String dataverseName,
+ String datasetName, Query query, int varCounter) {
+ this.dataverseName = dataverseName;
+ this.datasetName = datasetName;
+ this.query = query;
+ this.varCounter = varCounter;
+ }
- @Override
- public String getDataverseName() {
- return dataverseName;
- }
+ @Override
+ public String getDataverseName() {
+ return dataverseName;
+ }
- @Override
- public String getDatasetName() {
- return datasetName;
- }
+ @Override
+ public String getDatasetName() {
+ return datasetName;
+ }
- public int getVarCounter() {
- return varCounter;
- }
+ public int getVarCounter() {
+ return varCounter;
+ }
- public Query getQuery() {
- return query;
- }
+ public Query getQuery() {
+ return query;
+ }
- public void setQuery(Query query) {
- this.query = query;
- }
+ public void setQuery(Query query) {
+ this.query = query;
+ }
- @Override
- public Kind getKind() {
- return Kind.BEGIN_FEED;
- }
- }
+ @Override
+ public Kind getKind() {
+ return Kind.BEGIN_FEED;
+ }
+ }
- public static class CompiledControlFeedStatement implements ICompiledDmlStatement {
- private String dataverseName;
- private String datasetName;
- private OperationType operationType;
- private Query query;
- private int varCounter;
- private Map<String, String> alteredParams;
+ public static class CompiledControlFeedStatement implements
+ ICompiledDmlStatement {
+ private String dataverseName;
+ private String datasetName;
+ private OperationType operationType;
+ private Query query;
+ private int varCounter;
+ private Map<String, String> alteredParams;
- public CompiledControlFeedStatement(OperationType operationType, String dataverseName, String datasetName,
- Map<String, String> alteredParams) {
- this.dataverseName = dataverseName;
- this.datasetName = datasetName;
- this.operationType = operationType;
- this.alteredParams = alteredParams;
- }
+ public CompiledControlFeedStatement(OperationType operationType,
+ String dataverseName, String datasetName,
+ Map<String, String> alteredParams) {
+ this.dataverseName = dataverseName;
+ this.datasetName = datasetName;
+ this.operationType = operationType;
+ this.alteredParams = alteredParams;
+ }
- @Override
- public String getDataverseName() {
- return dataverseName;
- }
+ @Override
+ public String getDataverseName() {
+ return dataverseName;
+ }
- @Override
- public String getDatasetName() {
- return datasetName;
- }
+ @Override
+ public String getDatasetName() {
+ return datasetName;
+ }
- public OperationType getOperationType() {
- return operationType;
- }
+ public OperationType getOperationType() {
+ return operationType;
+ }
- public int getVarCounter() {
- return varCounter;
- }
+ public int getVarCounter() {
+ return varCounter;
+ }
- public Query getQuery() {
- return query;
- }
+ public Query getQuery() {
+ return query;
+ }
- @Override
- public Kind getKind() {
- return Kind.CONTROL_FEED;
- }
+ @Override
+ public Kind getKind() {
+ return Kind.CONTROL_FEED;
+ }
- public Map<String, String> getProperties() {
- return alteredParams;
- }
+ public Map<String, String> getProperties() {
+ return alteredParams;
+ }
- public void setProperties(Map<String, String> properties) {
- this.alteredParams = properties;
- }
- }
+ public void setProperties(Map<String, String> properties) {
+ this.alteredParams = properties;
+ }
+ }
- public static class CompiledDeleteStatement implements ICompiledDmlStatement {
- private VariableExpr var;
- private String dataverseName;
- private String datasetName;
- private Expression condition;
- private Clause dieClause;
- private int varCounter;
- private AqlMetadataProvider metadataProvider;
+ public static class CompiledDeleteStatement implements
+ ICompiledDmlStatement {
+ private VariableExpr var;
+ private String dataverseName;
+ private String datasetName;
+ private Expression condition;
+ private Clause dieClause;
+ private int varCounter;
+ private AqlMetadataProvider metadataProvider;
- public CompiledDeleteStatement(VariableExpr var, String dataverseName, String datasetName,
- Expression condition, Clause dieClause, int varCounter, AqlMetadataProvider metadataProvider) {
- this.var = var;
- this.dataverseName = dataverseName;
- this.datasetName = datasetName;
- this.condition = condition;
- this.dieClause = dieClause;
- this.varCounter = varCounter;
- this.metadataProvider = metadataProvider;
- }
+ public CompiledDeleteStatement(VariableExpr var, String dataverseName,
+ String datasetName, Expression condition, Clause dieClause,
+ int varCounter, AqlMetadataProvider metadataProvider) {
+ this.var = var;
+ this.dataverseName = dataverseName;
+ this.datasetName = datasetName;
+ this.condition = condition;
+ this.dieClause = dieClause;
+ this.varCounter = varCounter;
+ this.metadataProvider = metadataProvider;
+ }
- @Override
- public String getDatasetName() {
- return datasetName;
- }
+ @Override
+ public String getDatasetName() {
+ return datasetName;
+ }
- @Override
- public String getDataverseName() {
- return dataverseName;
- }
+ @Override
+ public String getDataverseName() {
+ return dataverseName;
+ }
- public int getVarCounter() {
- return varCounter;
- }
+ public int getVarCounter() {
+ return varCounter;
+ }
- public Expression getCondition() {
- return condition;
- }
+ public Expression getCondition() {
+ return condition;
+ }
- public Clause getDieClause() {
- return dieClause;
- }
+ public Clause getDieClause() {
+ return dieClause;
+ }
- public Query getQuery() throws AlgebricksException {
+ public Query getQuery() throws AlgebricksException {
- List<Expression> arguments = new ArrayList<Expression>();
- String arg = dataverseName == null ? datasetName : dataverseName + "." + datasetName;
- LiteralExpr argumentLiteral = new LiteralExpr(new StringLiteral(arg));
- arguments.add(argumentLiteral);
+ List<Expression> arguments = new ArrayList<Expression>();
+ String arg = dataverseName == null ? datasetName : dataverseName
+ + "." + datasetName;
+ LiteralExpr argumentLiteral = new LiteralExpr(
+ new StringLiteral(arg));
+ arguments.add(argumentLiteral);
- CallExpr callExpression = new CallExpr(new FunctionSignature(FunctionConstants.ASTERIX_NS, "dataset", 1),
- arguments);
- List<Clause> clauseList = new ArrayList<Clause>();
- Clause forClause = new ForClause(var, callExpression);
- clauseList.add(forClause);
- Clause whereClause = null;
- if (condition != null) {
- whereClause = new WhereClause(condition);
- clauseList.add(whereClause);
- }
- if (dieClause != null) {
- clauseList.add(dieClause);
- }
+ CallExpr callExpression = new CallExpr(new FunctionSignature(
+ FunctionConstants.ASTERIX_NS, "dataset", 1), arguments);
+ List<Clause> clauseList = new ArrayList<Clause>();
+ Clause forClause = new ForClause(var, callExpression);
+ clauseList.add(forClause);
+ Clause whereClause = null;
+ if (condition != null) {
+ whereClause = new WhereClause(condition);
+ clauseList.add(whereClause);
+ }
+ if (dieClause != null) {
+ clauseList.add(dieClause);
+ }
- Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
- if (dataset == null) {
- throw new AlgebricksException("Unknown dataset " + datasetName);
- }
- String itemTypeName = dataset.getItemTypeName();
- IAType itemType = metadataProvider.findType(dataset.getDataverseName(), itemTypeName);
- ARecordType recType = (ARecordType) itemType;
- String[] fieldNames = recType.getFieldNames();
- List<FieldBinding> fieldBindings = new ArrayList<FieldBinding>();
- for (int i = 0; i < fieldNames.length; i++) {
- FieldAccessor fa = new FieldAccessor(var, new Identifier(fieldNames[i]));
- FieldBinding fb = new FieldBinding(new LiteralExpr(new StringLiteral(fieldNames[i])), fa);
- fieldBindings.add(fb);
- }
- RecordConstructor rc = new RecordConstructor(fieldBindings);
+ Dataset dataset = metadataProvider.findDataset(dataverseName,
+ datasetName);
+ if (dataset == null) {
+ throw new AlgebricksException("Unknown dataset " + datasetName);
+ }
+ String itemTypeName = dataset.getItemTypeName();
+ IAType itemType = metadataProvider.findType(
+ dataset.getDataverseName(), itemTypeName);
+ ARecordType recType = (ARecordType) itemType;
+ String[] fieldNames = recType.getFieldNames();
+ List<FieldBinding> fieldBindings = new ArrayList<FieldBinding>();
+ for (int i = 0; i < fieldNames.length; i++) {
+ FieldAccessor fa = new FieldAccessor(var, new Identifier(
+ fieldNames[i]));
+ FieldBinding fb = new FieldBinding(new LiteralExpr(
+ new StringLiteral(fieldNames[i])), fa);
+ fieldBindings.add(fb);
+ }
+ RecordConstructor rc = new RecordConstructor(fieldBindings);
- FLWOGRExpression flowgr = new FLWOGRExpression(clauseList, rc);
- Query query = new Query();
- query.setBody(flowgr);
- return query;
- }
+ FLWOGRExpression flowgr = new FLWOGRExpression(clauseList, rc);
+ Query query = new Query();
+ query.setBody(flowgr);
+ return query;
+ }
- @Override
- public Kind getKind() {
- return Kind.DELETE;
- }
+ @Override
+ public Kind getKind() {
+ return Kind.DELETE;
+ }
- }
+ }
}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/DatasetHints.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/DatasetHints.java
new file mode 100644
index 0000000..0b66c30
--- /dev/null
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/DatasetHints.java
@@ -0,0 +1,87 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.translator;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+
+/**
+ * Collection of hints supported by create dataset statement.
+ * Includes method to validate a hint and its associated value
+ * as provided in a create dataset statement.
+ */
+public class DatasetHints {
+
+ /**
+ * validate the use of a hint
+ *
+ * @param hintName
+ * name of the hint
+ * @param value
+ * value associated with the hint
+ * @return a Pair with
+ * first element as a boolean that represents the validation result.
+ * second element as the error message if the validation result is false
+ */
+ public static Pair<Boolean, String> validate(String hintName, String value) {
+ for (IHint h : hints) {
+ if (h.getName().equalsIgnoreCase(hintName.trim())) {
+ return h.validateValue(value);
+ }
+ }
+ return new Pair<Boolean, String>(false, "Unknwon hint :" + hintName);
+ }
+
+ private static Set<IHint> hints = initHints();
+
+ private static Set<IHint> initHints() {
+ Set<IHint> hints = new HashSet<IHint>();
+ hints.add(new DatasetCardinalityHint());
+ return hints;
+ }
+
+ /**
+ * Hint representing the expected number of tuples in the dataset.
+ */
+ public static class DatasetCardinalityHint implements IHint {
+ public static final String NAME = "CARDINALITY";
+
+ public static final long DEFAULT = 1000000L;
+
+ @Override
+ public String getName() {
+ return NAME;
+ }
+
+ @Override
+ public Pair<Boolean, String> validateValue(String value) {
+ boolean valid = true;
+ long longValue;
+ try {
+ longValue = Long.parseLong(value);
+ if (longValue < 0) {
+ return new Pair<Boolean, String>(false, "Value must be >= 0");
+ }
+ } catch (NumberFormatException nfe) {
+ valid = false;
+ return new Pair<Boolean, String>(valid, "Inappropriate value");
+ }
+ return new Pair<Boolean, String>(true, null);
+ }
+
+ }
+}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/IHint.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/IHint.java
new file mode 100644
index 0000000..1c9fd7b
--- /dev/null
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/IHint.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.translator;
+
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+
+/**
+ * Represents a hint provided as part of an AQL statement.
+ */
+public interface IHint {
+
+ /**
+ * retrieve the name of the hint.
+ *
+ * @return
+ */
+ public String getName();
+
+ /**
+ * validate the value associated with the hint.
+ *
+ * @param value
+ * the value associated with the hint.
+ * @return a Pair with
+ * first element as a boolean that represents the validation result.
+ * second element as the error message if the validation result is false
+ */
+ public Pair<Boolean, String> validateValue(String value);
+
+}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/TypeTranslator.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/TypeTranslator.java
index 4047d9a..5abbd91 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/TypeTranslator.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/TypeTranslator.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -12,6 +12,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+
package edu.uci.ics.asterix.translator;
import java.util.ArrayList;
@@ -29,6 +30,7 @@
import edu.uci.ics.asterix.aql.expression.UnorderedListTypeDefinition;
import edu.uci.ics.asterix.common.annotations.IRecordFieldDataGen;
import edu.uci.ics.asterix.common.annotations.RecordDataGenAnnotation;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.metadata.MetadataException;
import edu.uci.ics.asterix.metadata.MetadataManager;
import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
@@ -89,41 +91,45 @@
throw new AlgebricksException("Cannot redefine builtin type " + tdname + " .");
}
TypeSignature typeSignature = new TypeSignature(typeDataverse, tdname);
- switch (texpr.getTypeKind()) {
- case TYPEREFERENCE: {
- TypeReferenceExpression tre = (TypeReferenceExpression) texpr;
- IAType t = solveTypeReference(typeSignature, typeMap);
- if (t != null) {
- typeMap.put(typeSignature, t);
- } else {
- addIncompleteTopLevelTypeReference(tdname, tre, incompleteTopLevelTypeReferences, typeDataverse);
+ try {
+ switch (texpr.getTypeKind()) {
+ case TYPEREFERENCE: {
+ TypeReferenceExpression tre = (TypeReferenceExpression) texpr;
+ IAType t = solveTypeReference(typeSignature, typeMap);
+ if (t != null) {
+ typeMap.put(typeSignature, t);
+ } else {
+ addIncompleteTopLevelTypeReference(tdname, tre, incompleteTopLevelTypeReferences, typeDataverse);
+ }
+ break;
}
- break;
+ case RECORD: {
+ RecordTypeDefinition rtd = (RecordTypeDefinition) texpr;
+ ARecordType recType = computeRecordType(typeSignature, rtd, typeMap, incompleteFieldTypes,
+ incompleteItemTypes, typeDataverse);
+ typeMap.put(typeSignature, recType);
+ break;
+ }
+ case ORDEREDLIST: {
+ OrderedListTypeDefinition oltd = (OrderedListTypeDefinition) texpr;
+ AOrderedListType olType = computeOrderedListType(typeSignature, oltd, typeMap, incompleteItemTypes,
+ incompleteFieldTypes, typeDataverse);
+ typeMap.put(typeSignature, olType);
+ break;
+ }
+ case UNORDEREDLIST: {
+ UnorderedListTypeDefinition ultd = (UnorderedListTypeDefinition) texpr;
+ AUnorderedListType ulType = computeUnorderedListType(typeSignature, ultd, typeMap,
+ incompleteItemTypes, incompleteFieldTypes, typeDataverse);
+ typeMap.put(typeSignature, ulType);
+ break;
+ }
+ default: {
+ throw new IllegalStateException();
+ }
}
- case RECORD: {
- RecordTypeDefinition rtd = (RecordTypeDefinition) texpr;
- ARecordType recType = computeRecordType(typeSignature, rtd, typeMap, incompleteFieldTypes,
- incompleteItemTypes, typeDataverse);
- typeMap.put(typeSignature, recType);
- break;
- }
- case ORDEREDLIST: {
- OrderedListTypeDefinition oltd = (OrderedListTypeDefinition) texpr;
- AOrderedListType olType = computeOrderedListType(typeSignature, oltd, typeMap, incompleteItemTypes,
- incompleteFieldTypes, typeDataverse);
- typeMap.put(typeSignature, olType);
- break;
- }
- case UNORDEREDLIST: {
- UnorderedListTypeDefinition ultd = (UnorderedListTypeDefinition) texpr;
- AUnorderedListType ulType = computeUnorderedListType(typeSignature, ultd, typeMap, incompleteItemTypes,
- incompleteFieldTypes, typeDataverse);
- typeMap.put(typeSignature, ulType);
- break;
- }
- default: {
- throw new IllegalStateException();
- }
+ } catch (AsterixException e) {
+ throw new AlgebricksException(e);
}
}
@@ -181,7 +187,7 @@
}
t = dt.getDatatype();
} else {
- t = typeMap.get(typeSignature);
+ t = typeMap.get(typeSignature);
}
for (AbstractCollectionType act : incompleteItemTypes.get(typeSignature)) {
act.setItemType(t);
@@ -191,7 +197,8 @@
private static AOrderedListType computeOrderedListType(TypeSignature typeSignature, OrderedListTypeDefinition oltd,
Map<TypeSignature, IAType> typeMap, Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes,
- Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes, String defaultDataverse) {
+ Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes, String defaultDataverse)
+ throws AsterixException {
TypeExpression tExpr = oltd.getItemTypeExpression();
String typeName = typeSignature != null ? typeSignature.getName() : null;
AOrderedListType aolt = new AOrderedListType(null, typeName);
@@ -202,7 +209,8 @@
private static AUnorderedListType computeUnorderedListType(TypeSignature typeSignature,
UnorderedListTypeDefinition ultd, Map<TypeSignature, IAType> typeMap,
Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes,
- Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes, String defaulDataverse) {
+ Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes, String defaulDataverse)
+ throws AsterixException {
TypeExpression tExpr = ultd.getItemTypeExpression();
String typeName = typeSignature != null ? typeSignature.getName() : null;
AUnorderedListType ault = new AUnorderedListType(null, typeName);
@@ -213,7 +221,7 @@
private static void setCollectionItemType(TypeExpression tExpr, Map<TypeSignature, IAType> typeMap,
Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes,
Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes, AbstractCollectionType act,
- String defaultDataverse) {
+ String defaultDataverse) throws AsterixException {
switch (tExpr.getTypeKind()) {
case ORDEREDLIST: {
OrderedListTypeDefinition oltd = (OrderedListTypeDefinition) tExpr;
@@ -306,7 +314,8 @@
private static ARecordType computeRecordType(TypeSignature typeSignature, RecordTypeDefinition rtd,
Map<TypeSignature, IAType> typeMap, Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes,
- Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes, String defaultDataverse) {
+ Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes, String defaultDataverse)
+ throws AsterixException {
List<String> names = rtd.getFieldNames();
int n = names.size();
String[] fldNames = new String[n];
@@ -318,14 +327,14 @@
boolean isOpen = rtd.getRecordKind() == RecordKind.OPEN;
ARecordType recType = new ARecordType(typeSignature == null ? null : typeSignature.getName(), fldNames,
fldTypes, isOpen);
-
+
List<IRecordFieldDataGen> fieldDataGen = rtd.getFieldDataGen();
if (fieldDataGen.size() == n) {
IRecordFieldDataGen[] rfdg = new IRecordFieldDataGen[n];
rfdg = fieldDataGen.toArray(rfdg);
recType.getAnnotations().add(new RecordDataGenAnnotation(rfdg, rtd.getUndeclaredFieldsDataGen()));
}
-
+
for (int j = 0; j < n; j++) {
TypeExpression texpr = rtd.getFieldTypes().get(j);
switch (texpr.getTypeKind()) {
diff --git a/asterix-app/data/hdfs/asterix_info.txt b/asterix-app/data/hdfs/asterix_info.txt
new file mode 100644
index 0000000..a9a5596
--- /dev/null
+++ b/asterix-app/data/hdfs/asterix_info.txt
@@ -0,0 +1,4 @@
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
diff --git a/asterix-app/data/hdfs/large_text b/asterix-app/data/hdfs/large_text
new file mode 100644
index 0000000..31a394d
--- /dev/null
+++ b/asterix-app/data/hdfs/large_text
@@ -0,0 +1,136 @@
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
diff --git a/asterix-app/data/hdfs/obamatweets.adm b/asterix-app/data/hdfs/obamatweets.adm
new file mode 100644
index 0000000..2567483
--- /dev/null
+++ b/asterix-app/data/hdfs/obamatweets.adm
@@ -0,0 +1,11 @@
+{ "id": "nc1:1", "username": "BronsonMike", "location": "", "text": "@GottaLaff @reutersus Christie and obama just foul weather friends", "timestamp": "Thu Dec 06 16:53:06 PST 2012" }
+{ "id": "nc1:100", "username": "KidrauhlProuds", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:102", "username": "jaysauce82", "location": "", "text": "Not voting for President Obama #BadDecision", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:104", "username": "princeofsupras", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson e uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:15 PST 2012" }
+{ "id": "nc1:106", "username": "GulfDogs", "location": "", "text": "Obama Admin Knew Libyan Terrorists Had US-Provided Weaponsteaparty #tcot #ccot #NewGuards #BreitbartArmy #patriotwttp://t.co/vJxzrQUE", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:108", "username": "Laugzpz", "location": "", "text": "@AlfredoJalife Maestro Obama se hace de la vista gorda, es un acuerdo de siempre creo yo.", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:11", "username": "magarika", "location": "", "text": "RT @ken24xavier: Obama tells SOROS - our plan is ALMOST finished http://t.co/WvzK0GtU", "timestamp": "Thu Dec 06 16:53:05 PST 2012" }
+{ "id": "nc1:111", "username": "ToucanMall", "location": "", "text": "RT @WorldWar3Watch: Michelle Obama Gets More Grammy Nominations Than Justin ... #Obama #WW3 http://t.co/0Wv2GKij", "timestamp": "Thu Dec 06 16:53:13 PST 2012" }
+{ "id": "nc1:113", "username": "ToucanMall", "location": "", "text": "RT @ObamaPalooza: Tiffany Shared What $2,000 Meant to Her ... and the President Stopped by to Talk About It http://t.co/sgT7lsNV #Obama", "timestamp": "Thu Dec 06 16:53:12 PST 2012" }
+{ "id": "nc1:115", "username": "thewildpitch", "location": "", "text": "RT @RevkahJC: Dennis Miller: Obama Should Just Say He Wants To Tax Successful People http://t.co/Ihlemy9Y", "timestamp": "Thu Dec 06 16:53:11 PST 2012" }
+{ "id": "nc1:117", "username": "Rnugent24", "location": "", "text": "RT @ConservativeQuo: unemployment is above 8% again. I wonder how long it will take for Obama to start blaming Bush? 3-2-1 #tcot #antiobama", "timestamp": "Thu Dec 06 16:53:10 PST 2012" }
diff --git a/asterix-app/data/hdfs/textFileS b/asterix-app/data/hdfs/textFileS
new file mode 100644
index 0000000..4bd0604
--- /dev/null
+++ b/asterix-app/data/hdfs/textFileS
Binary files differ
diff --git a/asterix-app/data/temporal/temporalData.json b/asterix-app/data/temporal/temporalData.json
new file mode 100644
index 0000000..520fd4b
--- /dev/null
+++ b/asterix-app/data/temporal/temporalData.json
@@ -0,0 +1,3 @@
+{"id": "001", "dateField": date("-2012-12-12"), "dateFieldPlus": date("0990-01-01"), "timeField": time("23:49:12.39Z"), "timeFieldPlus": time("03:23:12.2"), "datetimeField": datetime("2012-12-12T00:00:00.001"), "datetimeFieldPlus": datetime("-00130810T221015398"), "durationField": duration("P20Y19DT3H74M23.34S"), "durationFieldPlus": duration("-P2MT4M300.68S"), "intervalField": dtinterval("2012-12-12T00:00:00.001,20130810T221015398") }
+{"id": "002", "datetimeField": datetime("19201220T232918478") }
+{"id": "003", "intervalPlus": tinterval("19:23:32.328Z,23:20:20") }
\ No newline at end of file
diff --git a/asterix-app/data/temporal/temporalData.txt b/asterix-app/data/temporal/temporalData.txt
new file mode 100644
index 0000000..9ce94f5
--- /dev/null
+++ b/asterix-app/data/temporal/temporalData.txt
@@ -0,0 +1,4 @@
+001|-2012-12-12|23:49:12.39Z|3827-12-12T11:43:29.329|P20Y19DT3H74M23.34S
+002|1993-12-12|03:32:00|-2012-12-12T05:00:23.071|P20Y19D
+003|1839-03-12|12:30:49.382|1012-06-12T00:37:00|PT3H74M23.34S
+999|0003-11-02|23:19:32.382Z|2012-12-12T00:00:00.001|P20YT300H9.34S
diff --git a/asterix-app/data/twitter/obamatweets.adm b/asterix-app/data/twitter/obamatweets.adm
new file mode 100644
index 0000000..9720960
--- /dev/null
+++ b/asterix-app/data/twitter/obamatweets.adm
@@ -0,0 +1,12 @@
+{ "id": "nc1:1", "username": "BronsonMike", "location": "", "text": "@GottaLaff @reutersus Christie and obama just foul weather friends", "timestamp": "Thu Dec 06 16:53:06 PST 2012" }
+{ "id": "nc1:100", "username": "KidrauhlProuds", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:102", "username": "jaysauce82", "location": "", "text": "Not voting for President Obama #BadDecision", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:104", "username": "princeofsupras", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson e uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:15 PST 2012" }
+{ "id": "nc1:106", "username": "GulfDogs", "location": "", "text": "Obama Admin Knew Libyan Terrorists Had US-Provided Weaponsteaparty #tcot #ccot #NewGuards #BreitbartArmy #patriotwttp://t.co/vJxzrQUE", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:108", "username": "Laugzpz", "location": "", "text": "@AlfredoJalife Maestro Obama se hace de la vista gorda, es un acuerdo de siempre creo yo.", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:11", "username": "magarika", "location": "", "text": "RT @ken24xavier: Obama tells SOROS - our plan is ALMOST finished http://t.co/WvzK0GtU", "timestamp": "Thu Dec 06 16:53:05 PST 2012" }
+{ "id": "nc1:111", "username": "ToucanMall", "location": "", "text": "RT @WorldWar3Watch: Michelle Obama Gets More Grammy Nominations Than Justin ... #Obama #WW3 http://t.co/0Wv2GKij", "timestamp": "Thu Dec 06 16:53:13 PST 2012" }
+{ "id": "nc1:113", "username": "ToucanMall", "location": "", "text": "RT @ObamaPalooza: Tiffany Shared What $2,000 Meant to Her ... and the President Stopped by to Talk About It http://t.co/sgT7lsNV #Obama", "timestamp": "Thu Dec 06 16:53:12 PST 2012" }
+{ "id": "nc1:115", "username": "thewildpitch", "location": "", "text": "RT @RevkahJC: Dennis Miller: Obama Should Just Say He Wants To Tax Successful People http://t.co/Ihlemy9Y", "timestamp": "Thu Dec 06 16:53:11 PST 2012" }
+{ "id": "nc1:117", "username": "Rnugent24", "location": "", "text": "RT @ConservativeQuo: unemployment is above 8% again. I wonder how long it will take for Obama to start blaming Bush? 3-2-1 #tcot #antiobama", "timestamp": "Thu Dec 06 16:53:10 PST 2012" }
+{ "id": "nc1:119", "username": "ToucanMall", "location": "", "text": "RT @Newitrsdotcom: I hope #Obama will win re-election... Other four years without meaningless #wars", "timestamp": "Thu Dec 06 16:53:09 PST 2012" }
diff --git a/asterix-app/pom.xml b/asterix-app/pom.xml
index b47aadc..33a718f 100644
--- a/asterix-app/pom.xml
+++ b/asterix-app/pom.xml
@@ -6,9 +6,7 @@
<groupId>edu.uci.ics.asterix</groupId>
<version>0.0.4-SNAPSHOT</version>
</parent>
- <groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-app</artifactId>
- <version>0.0.4-SNAPSHOT</version>
<build>
<plugins>
@@ -17,8 +15,9 @@
<artifactId>maven-compiler-plugin</artifactId>
<version>2.0.2</version>
<configuration>
- <source>1.6</source>
- <target>1.6</target>
+ <source>1.7</source>
+ <target>1.7</target>
+ <fork>true</fork>
</configuration>
</plugin>
<plugin>
@@ -129,17 +128,14 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-cc</artifactId>
- <version>0.2.2-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-nc</artifactId>
- <version>0.2.2-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
- <artifactId>hyracks-algebricks-compiler</artifactId>
- <version>0.2.2-SNAPSHOT</version>
+ <artifactId>algebricks-compiler</artifactId>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/ADMCursor.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/ADMCursor.java
deleted file mode 100644
index cad8760..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/ADMCursor.java
+++ /dev/null
@@ -1,397 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.aqlj.client;
-
-import edu.uci.ics.asterix.api.aqlj.common.AQLJException;
-import edu.uci.ics.asterix.om.base.ABinary;
-import edu.uci.ics.asterix.om.base.ABitArray;
-import edu.uci.ics.asterix.om.base.ABoolean;
-import edu.uci.ics.asterix.om.base.ACircle;
-import edu.uci.ics.asterix.om.base.ADate;
-import edu.uci.ics.asterix.om.base.ADateTime;
-import edu.uci.ics.asterix.om.base.ADouble;
-import edu.uci.ics.asterix.om.base.ADuration;
-import edu.uci.ics.asterix.om.base.AFloat;
-import edu.uci.ics.asterix.om.base.AInt16;
-import edu.uci.ics.asterix.om.base.AInt32;
-import edu.uci.ics.asterix.om.base.AInt64;
-import edu.uci.ics.asterix.om.base.AInt8;
-import edu.uci.ics.asterix.om.base.ALine;
-import edu.uci.ics.asterix.om.base.APoint;
-import edu.uci.ics.asterix.om.base.APoint3D;
-import edu.uci.ics.asterix.om.base.APolygon;
-import edu.uci.ics.asterix.om.base.ARecord;
-import edu.uci.ics.asterix.om.base.ARectangle;
-import edu.uci.ics.asterix.om.base.AString;
-import edu.uci.ics.asterix.om.base.ATime;
-import edu.uci.ics.asterix.om.base.IACollection;
-import edu.uci.ics.asterix.om.base.IACursor;
-import edu.uci.ics.asterix.om.base.IAObject;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.AbstractCollectionType;
-import edu.uci.ics.asterix.om.types.IAType;
-
-/**
- * This class is the implementation of IADMCursor. This class supports iterating
- * over all objects in ASTERIX. All ASTERIX objects can be iterated over and
- * returned via the associated get<Type>() call.
- *
- * @author zheilbron
- */
-public class ADMCursor implements IADMCursor {
- protected IAObject currentObject;
- protected IACursor collectionCursor;
- private boolean readOnce;
-
- public ADMCursor(IAObject currentObject) {
- setCurrentObject(currentObject);
- }
-
- public boolean next() throws AQLJException {
- if (collectionCursor != null) {
- boolean next = collectionCursor.next();
- if (next) {
- currentObject = collectionCursor.get();
- }
- return next;
- } else if (currentObject == null) {
- return false;
- } else {
- if (!readOnce) {
- readOnce = true;
- return true;
- }
- }
- return false;
- }
-
- @Override
- public void position(IADMCursor c) throws AQLJException {
- ((ADMCursor) c).setCurrentObject(currentObject);
- }
-
- @Override
- public void position(IADMCursor c, String field) throws AQLJException {
- IAObject o = getObjectByField(field);
- ((ADMCursor) c).setCurrentObject(o);
- }
-
- private IAObject getObjectByField(String field) throws AQLJException {
- ATypeTag tag = currentObject.getType().getTypeTag();
- if (tag != ATypeTag.RECORD) {
- throw new AQLJException("object of type " + tag + " has no fields");
- }
- ARecord curRecord = (ARecord) currentObject;
- ARecordType t = curRecord.getType();
- int idx = t.findFieldPosition(field);
- if (idx == -1) {
- return null;
- }
- IAObject o = curRecord.getValueByPos(idx);
- return o;
- }
-
- public void setCurrentObject(IAObject o) {
- readOnce = false;
- currentObject = o;
- if (currentObject != null) {
- if (currentObject.getType() instanceof AbstractCollectionType) {
- collectionCursor = ((IACollection) currentObject).getCursor();
- }
- }
- }
-
- private void checkTypeTag(IAObject o, ATypeTag expectedTag) throws AQLJException {
- ATypeTag actualTag;
- actualTag = o.getType().getTypeTag();
-
- if (actualTag != expectedTag) {
- throw new AQLJException("cannot get " + expectedTag + " when type is " + actualTag);
- }
- }
-
- @Override
- public ABinary getBinary() throws AQLJException {
- checkTypeTag(currentObject, ATypeTag.BINARY);
- return ((ABinary) currentObject);
- }
-
- @Override
- public ABinary getBinary(String field) throws AQLJException {
- IAObject o = getObjectByField(field);
- checkTypeTag(o, ATypeTag.BINARY);
- return (ABinary) o;
- }
-
- @Override
- public ABitArray getBitArray() throws AQLJException {
- checkTypeTag(currentObject, ATypeTag.BITARRAY);
- return ((ABitArray) currentObject);
- }
-
- @Override
- public ABitArray getBitArray(String field) throws AQLJException {
- IAObject o = getObjectByField(field);
- checkTypeTag(o, ATypeTag.BITARRAY);
- return (ABitArray) o;
- }
-
- @Override
- public ABoolean getBoolean() throws AQLJException {
- checkTypeTag(currentObject, ATypeTag.BOOLEAN);
- return ((ABoolean) currentObject);
- }
-
- @Override
- public ABoolean getBoolean(String field) throws AQLJException {
- IAObject o = getObjectByField(field);
- checkTypeTag(o, ATypeTag.BOOLEAN);
- return (ABoolean) o;
- }
-
- @Override
- public ACircle getCircle() throws AQLJException {
- checkTypeTag(currentObject, ATypeTag.CIRCLE);
- return ((ACircle) currentObject);
- }
-
- @Override
- public ACircle getCircle(String field) throws AQLJException {
- IAObject o = getObjectByField(field);
- checkTypeTag(o, ATypeTag.CIRCLE);
- return (ACircle) o;
- }
-
- @Override
- public ADate getDate() throws AQLJException {
- checkTypeTag(currentObject, ATypeTag.DATE);
- return ((ADate) currentObject);
- }
-
- @Override
- public ADate getDate(String field) throws AQLJException {
- IAObject o = getObjectByField(field);
- checkTypeTag(o, ATypeTag.DATE);
- return (ADate) o;
- }
-
- @Override
- public ADateTime getDateTime() throws AQLJException {
- checkTypeTag(currentObject, ATypeTag.DATETIME);
- return ((ADateTime) currentObject);
- }
-
- @Override
- public ADateTime getDateTime(String field) throws AQLJException {
- IAObject o = getObjectByField(field);
- checkTypeTag(o, ATypeTag.DATETIME);
- return (ADateTime) o;
- }
-
- @Override
- public ADouble getDouble() throws AQLJException {
- checkTypeTag(currentObject, ATypeTag.DOUBLE);
- return ((ADouble) currentObject);
- }
-
- @Override
- public ADouble getDouble(String field) throws AQLJException {
- IAObject o = getObjectByField(field);
- checkTypeTag(o, ATypeTag.DOUBLE);
- return (ADouble) o;
- }
-
- @Override
- public ADuration getDuration() throws AQLJException {
- checkTypeTag(currentObject, ATypeTag.DURATION);
- return ((ADuration) currentObject);
- }
-
- @Override
- public ADuration getDuration(String field) throws AQLJException {
- IAObject o = getObjectByField(field);
- checkTypeTag(o, ATypeTag.DURATION);
- return (ADuration) o;
- }
-
- @Override
- public AFloat getFloat() throws AQLJException {
- checkTypeTag(currentObject, ATypeTag.FLOAT);
- return ((AFloat) currentObject);
- }
-
- @Override
- public AFloat getFloat(String field) throws AQLJException {
- IAObject o = getObjectByField(field);
- checkTypeTag(o, ATypeTag.FLOAT);
- return (AFloat) o;
- }
-
- @Override
- public AInt8 getInt8() throws AQLJException {
- checkTypeTag(currentObject, ATypeTag.INT8);
- return ((AInt8) currentObject);
- }
-
- @Override
- public AInt8 getInt8(String field) throws AQLJException {
- IAObject o = getObjectByField(field);
- checkTypeTag(o, ATypeTag.INT8);
- return (AInt8) o;
- }
-
- @Override
- public AInt16 getInt16() throws AQLJException {
- checkTypeTag(currentObject, ATypeTag.INT16);
- return ((AInt16) currentObject);
- }
-
- @Override
- public AInt16 getInt16(String field) throws AQLJException {
- IAObject o = getObjectByField(field);
- checkTypeTag(o, ATypeTag.INT16);
- return (AInt16) o;
- }
-
- @Override
- public AInt32 getInt32() throws AQLJException {
- checkTypeTag(currentObject, ATypeTag.INT32);
- return ((AInt32) currentObject);
- }
-
- @Override
- public AInt32 getInt32(String field) throws AQLJException {
- IAObject o = getObjectByField(field);
- checkTypeTag(o, ATypeTag.INT32);
- return (AInt32) o;
- }
-
- @Override
- public AInt64 getInt64() throws AQLJException {
- checkTypeTag(currentObject, ATypeTag.INT64);
- return ((AInt64) currentObject);
- }
-
- @Override
- public AInt64 getInt64(String field) throws AQLJException {
- IAObject o = getObjectByField(field);
- checkTypeTag(o, ATypeTag.INT64);
- return (AInt64) o;
- }
-
- @Override
- public ALine getLine() throws AQLJException {
- checkTypeTag(currentObject, ATypeTag.LINE);
- return ((ALine) currentObject);
- }
-
- @Override
- public ALine getLine(String field) throws AQLJException {
- IAObject o = getObjectByField(field);
- checkTypeTag(o, ATypeTag.LINE);
- return (ALine) o;
- }
-
- @Override
- public APoint getPoint() throws AQLJException {
- checkTypeTag(currentObject, ATypeTag.POINT);
- return ((APoint) currentObject);
- }
-
- @Override
- public APoint getPoint(String field) throws AQLJException {
- IAObject o = getObjectByField(field);
- checkTypeTag(o, ATypeTag.POINT);
- return (APoint) o;
- }
-
- @Override
- public APoint3D getPoint3D() throws AQLJException {
- checkTypeTag(currentObject, ATypeTag.POINT3D);
- return ((APoint3D) currentObject);
- }
-
- @Override
- public APoint3D getPoint3D(String field) throws AQLJException {
- IAObject o = getObjectByField(field);
- checkTypeTag(o, ATypeTag.POINT3D);
- return (APoint3D) o;
- }
-
- @Override
- public APolygon getPolygon() throws AQLJException {
- checkTypeTag(currentObject, ATypeTag.POLYGON);
- return ((APolygon) currentObject);
- }
-
- @Override
- public APolygon getPolygon(String field) throws AQLJException {
- IAObject o = getObjectByField(field);
- checkTypeTag(o, ATypeTag.POLYGON);
- return (APolygon) o;
- }
-
- @Override
- public ARectangle getRectangle() throws AQLJException {
- checkTypeTag(currentObject, ATypeTag.RECTANGLE);
- return ((ARectangle) currentObject);
- }
-
- @Override
- public ARectangle getRectangle(String field) throws AQLJException {
- IAObject o = getObjectByField(field);
- checkTypeTag(o, ATypeTag.RECTANGLE);
- return (ARectangle) o;
- }
-
- @Override
- public AString getString() throws AQLJException {
- checkTypeTag(currentObject, ATypeTag.STRING);
- return ((AString) currentObject);
- }
-
- @Override
- public AString getString(String field) throws AQLJException {
- IAObject o = getObjectByField(field);
- checkTypeTag(o, ATypeTag.STRING);
- return (AString) o;
- }
-
- @Override
- public ATime getTime() throws AQLJException {
- checkTypeTag(currentObject, ATypeTag.TIME);
- return ((ATime) currentObject);
- }
-
- @Override
- public ATime getTime(String field) throws AQLJException {
- IAObject o = getObjectByField(field);
- checkTypeTag(o, ATypeTag.TIME);
- return (ATime) o;
- }
-
- public IAType getType() {
- if (currentObject != null) {
- return currentObject.getType();
- }
- return null;
- }
-
- @Override
- public IAObject get() {
- return currentObject;
- }
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/AQLJClientDriver.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/AQLJClientDriver.java
deleted file mode 100644
index a1a077b..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/AQLJClientDriver.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.aqlj.client;
-
-import edu.uci.ics.asterix.api.aqlj.common.AQLJException;
-
-/**
- * This class encapsulates the mechanism for creating a connection to an ASTERIX
- * server.
- *
- * @author zheilbron
- */
-public class AQLJClientDriver {
- /**
- * Get a connection to the ASTERIX server.
- *
- * @param host
- * the ip or hostname of the ASTERIX server
- * @param port
- * the port of the ASTERIX server (default: 14600)
- * @param dataverse
- * the name of the dataverse to use for any AQL statements
- * @return an IAQLJConnection object representing the connection to ASTERIX
- * @throws AQLJException
- */
- public static IAQLJConnection getConnection(String host, int port, String dataverse) throws AQLJException {
- return new AQLJConnection(host, port, dataverse);
- }
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/AQLJConnection.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/AQLJConnection.java
deleted file mode 100644
index c597450..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/AQLJConnection.java
+++ /dev/null
@@ -1,147 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.aqlj.client;
-
-import java.io.IOException;
-
-import edu.uci.ics.asterix.api.aqlj.common.AQLJException;
-import edu.uci.ics.asterix.api.aqlj.common.AQLJProtocol;
-import edu.uci.ics.asterix.api.aqlj.common.AQLJStream;
-
-/**
- * This class is the implementation of IAQLJConnection and is the means for
- * communication between a client and an ASTERIX server. The messages passed
- * through this connection conform to the AQLJ protocol.
- *
- * @author zheilbron
- */
-public class AQLJConnection implements IAQLJConnection {
- private final String dataverse;
- private final AQLJStream aqljStream;
-
- public AQLJConnection(String host, int port, String dataverse) throws AQLJException {
- this.dataverse = dataverse;
-
- try {
- aqljStream = new AQLJStream(host, port);
- } catch (IOException e) {
- throw new AQLJException("Could not connect to " + host + ":" + port);
- }
-
- startup();
- }
-
- private void startup() throws AQLJException {
- sendStartupMessage(dataverse);
- getStartupResponse();
- }
-
- private void sendStartupMessage(String dataverse) throws AQLJException {
- try {
- byte[] dvBytes = dataverse.getBytes("UTF-8");
- // 4 for the message length, 1 for the message type, 2 for the
- // string length
- aqljStream.sendUnsignedInt32(4 + 1 + 2 + dvBytes.length);
- aqljStream.sendChar(AQLJProtocol.STARTUP_MESSAGE);
- aqljStream.sendInt16(dvBytes.length);
- aqljStream.send(dvBytes);
- aqljStream.flush();
- } catch (IOException e) {
- throw new AQLJException(e);
- }
- }
-
- private void getStartupResponse() throws AQLJException {
- try {
- aqljStream.receiveUnsignedInt32();
- int messageType = aqljStream.receiveChar();
- switch (messageType) {
- case AQLJProtocol.READY_MESSAGE:
- break;
- case AQLJProtocol.ERROR_MESSAGE:
- String err = aqljStream.receiveString();
- throw new AQLJException(err);
- default:
- throw new AQLJException("Error: unable to parse message from server");
- }
- } catch (IOException e) {
- throw new AQLJException(e);
- }
- }
-
- @Override
- public IAQLJResult execute(String stmt) throws AQLJException {
- sendExecute(stmt);
- return fetchResults();
- }
-
- private AQLJResult fetchResults() throws AQLJException {
- long len;
- int messageType;
-
- ResultBuffer rb = null;
- while (true) {
- try {
- len = aqljStream.receiveUnsignedInt32();
- messageType = aqljStream.receiveChar();
- switch (messageType) {
- case AQLJProtocol.DATA_MESSAGE:
- // DataRecord
- if (rb == null) {
- rb = new ResultBuffer();
- }
- rb.appendMessage(aqljStream, (int) (len - 5));
- break;
- case AQLJProtocol.EXECUTE_COMPLETE_MESSAGE:
- // ExecuteComplete
- return new AQLJResult(rb);
- case AQLJProtocol.ERROR_MESSAGE:
- // Error
- throw new AQLJException(aqljStream.receiveString());
- default:
- throw new AQLJException("Error: received unknown message type from server");
- }
- } catch (IOException e) {
- throw new AQLJException(e);
- }
- }
-
- }
-
- private void sendExecute(String stmt) throws AQLJException {
- try {
- byte[] stmtBytes = stmt.getBytes("UTF-8");
- // 4 for the message length, 1 for the message type, 2 for the
- // string length
- aqljStream.sendUnsignedInt32(4 + 1 + 2 + stmtBytes.length);
- aqljStream.sendChar(AQLJProtocol.EXECUTE_MESSAGE);
- aqljStream.sendInt16(stmtBytes.length);
- aqljStream.send(stmtBytes);
- aqljStream.flush();
- } catch (IOException e) {
- throw new AQLJException(e);
- }
- }
-
- @Override
- public void close() throws IOException {
- aqljStream.close();
- }
-
- @Override
- public IADMCursor createADMCursor() {
- return new ADMCursor(null);
- }
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/AQLJResult.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/AQLJResult.java
deleted file mode 100644
index 63114ce..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/AQLJResult.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.aqlj.client;
-
-import java.io.IOException;
-
-import edu.uci.ics.asterix.api.aqlj.common.AQLJException;
-
-/**
- * This class is special type of ADMCursor in that it has a result buffer
- * associated with it. It can be thought of as the "base" cursor for some ADM
- * results.
- *
- * @author zheilbron
- */
-public class AQLJResult extends ADMCursor implements IAQLJResult {
- private final ResultBuffer resultBuffer;
-
- public AQLJResult(ResultBuffer buffer) {
- super(null);
- this.resultBuffer = buffer;
- }
-
- @Override
- public boolean next() throws AQLJException {
- currentObject = resultBuffer.get();
- if (currentObject == null) {
- return false;
- }
- return true;
- }
-
- public void close() throws IOException {
- resultBuffer.close();
- }
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/IADMCursor.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/IADMCursor.java
deleted file mode 100644
index a7500c9..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/IADMCursor.java
+++ /dev/null
@@ -1,172 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.aqlj.client;
-
-import edu.uci.ics.asterix.api.aqlj.common.AQLJException;
-import edu.uci.ics.asterix.om.base.ABinary;
-import edu.uci.ics.asterix.om.base.ABitArray;
-import edu.uci.ics.asterix.om.base.ABoolean;
-import edu.uci.ics.asterix.om.base.ACircle;
-import edu.uci.ics.asterix.om.base.ADate;
-import edu.uci.ics.asterix.om.base.ADateTime;
-import edu.uci.ics.asterix.om.base.ADouble;
-import edu.uci.ics.asterix.om.base.ADuration;
-import edu.uci.ics.asterix.om.base.AFloat;
-import edu.uci.ics.asterix.om.base.AInt16;
-import edu.uci.ics.asterix.om.base.AInt32;
-import edu.uci.ics.asterix.om.base.AInt64;
-import edu.uci.ics.asterix.om.base.AInt8;
-import edu.uci.ics.asterix.om.base.ALine;
-import edu.uci.ics.asterix.om.base.APoint;
-import edu.uci.ics.asterix.om.base.APoint3D;
-import edu.uci.ics.asterix.om.base.APolygon;
-import edu.uci.ics.asterix.om.base.ARectangle;
-import edu.uci.ics.asterix.om.base.AString;
-import edu.uci.ics.asterix.om.base.ATime;
-import edu.uci.ics.asterix.om.base.IAObject;
-import edu.uci.ics.asterix.om.types.IAType;
-
-/**
- * The mechanism by which results are iterated over. Results from ASTERIX may
- * come in the form of a set of objects which may either be primitives (e.g.
- * int, string, ...), collections (e.g. ordered lists, unordered lists, ...),
- * records, or some combination thereof.
- *
- * @author zheilbron
- */
-public interface IADMCursor {
- public ABinary getBinary() throws AQLJException;
-
- public ABinary getBinary(String field) throws AQLJException;
-
- public ABitArray getBitArray() throws AQLJException;
-
- public ABitArray getBitArray(String field) throws AQLJException;
-
- public ABoolean getBoolean() throws AQLJException;
-
- public ABoolean getBoolean(String field) throws AQLJException;
-
- public ACircle getCircle() throws AQLJException;
-
- public ACircle getCircle(String field) throws AQLJException;
-
- public ADate getDate() throws AQLJException;
-
- public ADate getDate(String field) throws AQLJException;
-
- public ADateTime getDateTime() throws AQLJException;
-
- public ADateTime getDateTime(String field) throws AQLJException;
-
- public ADouble getDouble() throws AQLJException;
-
- public ADouble getDouble(String field) throws AQLJException;
-
- public ADuration getDuration() throws AQLJException;
-
- public ADuration getDuration(String field) throws AQLJException;
-
- public AFloat getFloat() throws AQLJException;
-
- public AFloat getFloat(String field) throws AQLJException;
-
- public AInt8 getInt8() throws AQLJException;
-
- public AInt8 getInt8(String field) throws AQLJException;
-
- public AInt16 getInt16() throws AQLJException;
-
- public AInt16 getInt16(String field) throws AQLJException;
-
- public AInt32 getInt32() throws AQLJException;
-
- public AInt32 getInt32(String field) throws AQLJException;
-
- public AInt64 getInt64() throws AQLJException;
-
- public AInt64 getInt64(String field) throws AQLJException;
-
- public ALine getLine() throws AQLJException;
-
- public ALine getLine(String field) throws AQLJException;
-
- public APoint getPoint() throws AQLJException;
-
- public APoint getPoint(String field) throws AQLJException;
-
- public APoint3D getPoint3D() throws AQLJException;
-
- public APoint3D getPoint3D(String field) throws AQLJException;
-
- public APolygon getPolygon() throws AQLJException;
-
- public APolygon getPolygon(String field) throws AQLJException;
-
- public ARectangle getRectangle() throws AQLJException;
-
- public ARectangle getRectangle(String field) throws AQLJException;
-
- public AString getString(String field) throws AQLJException;
-
- public AString getString() throws AQLJException;
-
- public ATime getTime() throws AQLJException;
-
- public ATime getTime(String field) throws AQLJException;
-
- /**
- * Advances the cursor to the next object
- *
- * @return true if the cursor points to a an object
- * @throws AQLJException
- */
- public boolean next() throws AQLJException;
-
- /**
- * Positions the cursor c on the object pointed to by this
- *
- * @param c
- * the cursor to position
- * @throws AQLJException
- */
- public void position(IADMCursor c) throws AQLJException;
-
- /**
- * Positions the cursor c on the object associated with the given field
- *
- * @param c
- * the cursor to position
- * @param field
- * the field name
- * @throws AQLJException
- */
- public void position(IADMCursor c, String field) throws AQLJException;
-
- /**
- * Returns the type of the current object being pointed at, which may be
- * null.
- *
- * @return the type of the current object
- */
- public IAType getType();
-
- /**
- * Returns the current object being pointed at, which may be null.
- *
- * @return the current object
- */
- public IAObject get();
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/IAQLJConnection.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/IAQLJConnection.java
deleted file mode 100644
index 8fdf59d..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/IAQLJConnection.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.aqlj.client;
-
-import java.io.IOException;
-
-import edu.uci.ics.asterix.api.aqlj.common.AQLJException;
-
-/**
- * The connection (session) that serves as the context for communicating with
- * ASTERIX.
- *
- * @author zheilbron
- */
-public interface IAQLJConnection {
- /**
- * Execute an AQL statement that returns an IAQLJResult. The IAQLJResult
- * will contain all associated results of the AQL statement.
- *
- * @param stmt
- * the AQL statement
- * @return the results of the AQL statement as an IAQLJResult
- * @throws AQLJException
- */
- public IAQLJResult execute(String stmt) throws AQLJException;
-
- /**
- * Create a cursor to iterate over results
- *
- * @return an unpositioned cursor
- */
- public IADMCursor createADMCursor();
-
- /**
- * Close the connection with the server.
- */
- public void close() throws IOException;
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/IAQLJResult.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/IAQLJResult.java
deleted file mode 100644
index b28b3c6..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/IAQLJResult.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.aqlj.client;
-
-import java.io.IOException;
-
-/**
- * The results associated with an AQL statement.
- *
- * @author zheilbron
- */
-public interface IAQLJResult extends IADMCursor {
- /**
- * Close the cursor and discard any associated results.
- * It's important to ensure that this method is called in order to free up
- * the associated result buffer.
- */
- public void close() throws IOException;
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/ResultBuffer.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/ResultBuffer.java
deleted file mode 100644
index 9fddad5..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/client/ResultBuffer.java
+++ /dev/null
@@ -1,126 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.aqlj.client;
-
-import java.io.DataInputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.ObjectInputStream;
-import java.util.logging.Logger;
-
-import edu.uci.ics.asterix.api.aqlj.common.AQLJException;
-import edu.uci.ics.asterix.api.aqlj.common.AQLJStream;
-import edu.uci.ics.asterix.om.base.IAObject;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-
-/**
- * This class supports the buffering of results that are received from the
- * server. The results are buffered completely to a file on disk. The results
- * that are sent back should contain a serde in order to read the results back
- * in. To see the expected format refer to {@link edu.uci.ics.algebricks.runtime.hyracks.writers.SerializedDataWriterFactory} .
- *
- * @author zheilbron
- */
-public class ResultBuffer {
- private static final Logger LOGGER = Logger.getLogger(ResultBuffer.class.getName());
-
- private static final int BUF_SIZE = 8192;
-
- private final byte[] buffer;
- private final File tmpFile;
- private final FileOutputStream fos;
- private final FileInputStream fis;
- private final DataInputStream dis;
-
- private ObjectInputStream ois;
- private ISerializerDeserializer serde;
-
- public ResultBuffer() throws IOException {
- buffer = new byte[BUF_SIZE];
- tmpFile = File.createTempFile("aqlj", null, new File(System.getProperty("java.io.tmpdir")));
- fos = new FileOutputStream(tmpFile);
- fis = new FileInputStream(tmpFile);
- dis = new DataInputStream(fis);
- serde = null;
- }
-
- private RecordDescriptor getRecordDescriptor() throws AQLJException {
- RecordDescriptor rd;
- try {
- ois = new ObjectInputStream(fis);
- } catch (IOException e) {
- throw new AQLJException(e);
- }
- try {
- rd = (RecordDescriptor) ois.readObject();
- } catch (IOException e) {
- throw new AQLJException(e);
- } catch (ClassNotFoundException e) {
- throw new AQLJException(e);
- }
- return rd;
- }
-
- public IAObject get() throws AQLJException {
- Object o;
-
- if (serde == null) {
- serde = getRecordDescriptor().getFields()[0];
- }
-
- try {
- o = serde.deserialize(dis);
- } catch (HyracksDataException e) {
- // this is expected behavior... we know when we've reached the end
- // of the
- // results when a EOFException (masked by the HyracksDataException)
- // is thrown
- o = null;
- }
-
- return (IAObject) o;
- }
-
- public void appendMessage(AQLJStream aqljStream, long len) throws IOException {
- long pos = 0;
- long read = 0;
- long remaining = 0;
-
- while (pos < len) {
- remaining = len - pos;
- read = remaining > BUF_SIZE ? BUF_SIZE : remaining;
- aqljStream.receive(buffer, 0, (int) read);
- pos += read;
- fos.write(buffer, 0, (int) read);
- }
- }
-
- public void close() throws IOException {
- // remove the file!
- if (tmpFile.exists()) {
- tmpFile.delete();
- }
- fos.close();
- fis.close();
- dis.close();
- if (ois != null) {
- ois.close();
- }
- }
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/common/AQLJProtocol.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/common/AQLJProtocol.java
deleted file mode 100644
index 83ef0e5..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/common/AQLJProtocol.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.aqlj.common;
-
-/**
- * This class provides constants for message types in the AQLJ protocol.
- *
- * @author zheilbron
- */
-public abstract class AQLJProtocol {
- public static final char STARTUP_MESSAGE = 'S';
- public static final char EXECUTE_MESSAGE = 'X';
- public static final char READY_MESSAGE = 'R';
- public static final char ERROR_MESSAGE = 'E';
- public static final char EXECUTE_COMPLETE_MESSAGE = 'C';
- public static final char DATA_MESSAGE = 'D';
- public static final char GET_RESULTS_MESSAGE = 'G';
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/common/AQLJStream.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/common/AQLJStream.java
deleted file mode 100644
index c595284..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/common/AQLJStream.java
+++ /dev/null
@@ -1,169 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.aqlj.common;
-
-import java.io.BufferedInputStream;
-import java.io.BufferedOutputStream;
-import java.io.EOFException;
-import java.io.IOException;
-import java.net.Socket;
-
-/**
- * This class provides a clean mechanism for sending and receiving the data
- * types involved in the AQLJ protocol.
- *
- * @author zheilbron
- */
-public class AQLJStream {
- private static final int BUF_SIZE = 8192;
-
- private final String host;
- private final int port;
- private final Socket connection;
- private final BufferedInputStream aqljInput;
- private final BufferedOutputStream aqljOutput;
-
- private final byte[] int16Buf;
- private final byte[] int32Buf;
-
- public AQLJStream(String host, int port) throws IOException {
- this.host = host;
- this.port = port;
-
- connection = new Socket(host, port);
-
- aqljInput = new BufferedInputStream(connection.getInputStream(), BUF_SIZE);
- aqljOutput = new BufferedOutputStream(connection.getOutputStream(), BUF_SIZE);
-
- int16Buf = new byte[2];
- int32Buf = new byte[4];
- }
-
- public AQLJStream(Socket sock) throws IOException {
- this.host = null;
- this.port = 0;
-
- this.connection = sock;
- aqljInput = new BufferedInputStream(connection.getInputStream(), BUF_SIZE);
- aqljOutput = new BufferedOutputStream(connection.getOutputStream(), BUF_SIZE);
-
- int16Buf = new byte[2];
- int32Buf = new byte[4];
- }
-
- public String getHost() {
- return host;
- }
-
- public int getPort() {
- return port;
- }
-
- public Socket getSocket() {
- return connection;
- }
-
- public void receive(byte[] buf, int off, int len) throws IOException {
- int read;
- int count = 0;
- while (count < len) {
- read = aqljInput.read(buf, off + count, len - count);
- if (read < 0) {
- throw new EOFException();
- }
- count += read;
- }
- }
-
- public byte[] receive(int len) throws IOException {
- byte[] result = new byte[len];
- receive(result, 0, len);
- return result;
- }
-
- public int receiveInt16() throws IOException {
- if (aqljInput.read(int16Buf) != 2) {
- throw new EOFException();
- }
- return (int16Buf[0] & 0xff) << 8 | (int16Buf[1] & 0xff);
- }
-
- public long receiveUnsignedInt32() throws IOException {
- if (aqljInput.read(int32Buf) != 4) {
- throw new EOFException();
- }
- return ((int32Buf[0] & 0xff) << 24 | (int32Buf[1] & 0xff) << 16 | (int32Buf[2] & 0xff) << 8 | (int32Buf[3] & 0xff)) & 0x00000000ffffffffl;
- }
-
- public int receiveChar() throws IOException {
- int c = aqljInput.read();
- if (c < 0) {
- throw new EOFException();
- }
- return c;
- }
-
- public String receiveString() throws IOException {
- int strlen = receiveInt16();
- return new String(receive(strlen), "UTF8");
- }
-
- public void send(byte[] buf) throws IOException {
- aqljOutput.write(buf);
- }
-
- public void send(byte[] buf, int off, int len) throws IOException {
- aqljOutput.write(buf, off, len);
- }
-
- public void sendInt16(int val) throws IOException {
- int16Buf[0] = (byte) (val >>> 8);
- int16Buf[1] = (byte) (val);
- aqljOutput.write(int16Buf);
- }
-
- public void sendUnsignedInt32(long val) throws IOException {
- int32Buf[0] = (byte) (val >>> 24);
- int32Buf[1] = (byte) (val >>> 16);
- int32Buf[2] = (byte) (val >>> 8);
- int32Buf[3] = (byte) (val);
- aqljOutput.write(int32Buf);
- }
-
- public void sendChar(int c) throws IOException {
- aqljOutput.write(c);
- }
-
- public void sendString(byte[] strBytes) throws IOException {
- sendInt16(strBytes.length);
- send(strBytes);
- }
-
- public void sendString(String str) throws IOException {
- byte[] strBytes = str.getBytes("UTF8");
- sendInt16(strBytes.length);
- send(strBytes);
- }
-
- public void flush() throws IOException {
- aqljOutput.flush();
- }
-
- public void close() throws IOException {
- aqljInput.close();
- aqljOutput.close();
- connection.close();
- }
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/APIClientThread.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/APIClientThread.java
deleted file mode 100644
index 98fe0a1..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/APIClientThread.java
+++ /dev/null
@@ -1,312 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.aqlj.server;
-
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.io.StringReader;
-import java.net.Socket;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.logging.Logger;
-
-import edu.uci.ics.asterix.api.aqlj.common.AQLJException;
-import edu.uci.ics.asterix.api.aqlj.common.AQLJProtocol;
-import edu.uci.ics.asterix.api.aqlj.common.AQLJStream;
-import edu.uci.ics.asterix.api.common.APIFramework.DisplayFormat;
-import edu.uci.ics.asterix.api.common.AsterixHyracksIntegrationUtil;
-import edu.uci.ics.asterix.api.common.SessionConfig;
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.parser.AQLParser;
-import edu.uci.ics.asterix.aql.parser.ParseException;
-import edu.uci.ics.asterix.aql.translator.AqlTranslator;
-import edu.uci.ics.asterix.aql.translator.QueryResult;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.hyracks.bootstrap.CCBootstrapImpl;
-import edu.uci.ics.asterix.metadata.MetadataManager;
-import edu.uci.ics.asterix.metadata.bootstrap.AsterixProperties;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.api.application.ICCApplicationContext;
-import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-
-/**
- * This class is the client handler for the APIServer. The AQLJ protocol is used
- * for communicating with the client. The client, for example, may send a
- * message to execute a set containing one or more AQL statements. It is up to this class to process each
- * AQL statement (in the original order) and pass back the results, if any, to the client.
- *
- * @author zheilbron
- */
-public class APIClientThread extends Thread {
- private static final Logger LOGGER = Logger.getLogger(APIClientThread.class.getName());
-
- private static final int RESULT_BUF_SIZE = 8192;
-
- private final IHyracksClientConnection hcc;
- private final ICCApplicationContext appContext;
- private final AQLJStream clientStream;
- private final String outputFilePath;
- private final String outputNodeName;
- private final String binaryOutputClause;
-
- private String outputNodeIP;
- private AQLJStream nodeDataServerStream;
- private int nodeDataServerPort;
- private String dataverse;
-
- public APIClientThread(IHyracksClientConnection hcc, Socket clientSocket, ICCApplicationContext appCtx)
- throws IOException {
- this.hcc = hcc;
- clientStream = new AQLJStream(clientSocket);
- this.appContext = appCtx;
- outputNodeName = AsterixProperties.INSTANCE.getMetadataNodeName();
- outputFilePath = AsterixProperties.INSTANCE.getOutputDir() + System.currentTimeMillis() + ".adm";
-
- Map<String, Set<String>> nodeNameMap = new HashMap<String, Set<String>>();
- try {
- this.appContext.getCCContext().getIPAddressNodeMap(nodeNameMap);
- } catch (Exception e) {
- throw new IOException(" unable to obtain IP address node map", e);
- }
- for (String ip : nodeNameMap.keySet()) {
- Set<String> nodeNames = nodeNameMap.get(ip);
- if (nodeNames.contains(outputNodeName)) {
- outputNodeIP = ip;
- break;
- }
- }
-
- nodeDataServerPort = CCBootstrapImpl.DEFAULT_API_NODEDATA_SERVER_PORT;
- nodeDataServerStream = null;
-
- // the "write output..." clause is inserted into incoming AQL statements
- binaryOutputClause = "write output to " + outputNodeName + ":\"" + outputFilePath
- + "\" using \"edu.uci.ics.hyracks.algebricks.runtime.writers.SerializedDataWriterFactory\";";
-
- }
-
- private void startup() throws IOException {
- int messageType;
-
- clientStream.receiveUnsignedInt32();
- messageType = clientStream.receiveChar();
- dataverse = clientStream.receiveString();
- if (messageType == AQLJProtocol.STARTUP_MESSAGE) {
- // send Ready
- sendReady();
- } else {
- // send Error
- LOGGER.warning("Error: received message other than Startup. Exiting.");
- String err = "startup failed: no Startup message received";
- sendError(err);
- }
- }
-
- public void run() {
- String outputPath;
- int messageType;
-
- try {
- // startup phase
- startup();
-
- // normal execution phase
- while (true) {
- // check if we should close
- if (Thread.interrupted()) {
- close();
- return;
- }
-
- clientStream.receiveUnsignedInt32();
- messageType = clientStream.receiveChar();
- switch (messageType) {
- case AQLJProtocol.EXECUTE_MESSAGE:
- // Execute
- String query = clientStream.receiveString();
- String fullQuery = "use dataverse " + dataverse + ";\n" + binaryOutputClause + '\n' + query;
-
- try {
- outputPath = executeStatement(fullQuery);
- } catch (AQLJException e) {
- LOGGER.severe("Error occurred while executing query: " + fullQuery);
- LOGGER.severe(e.getMessage());
- sendError(e.getMessage());
- break;
- }
-
- if (outputPath == null) {
- // The query ran, but produced no results. This
- // means cardinality of the
- // result is 0 or "actions" were performed, where
- // actions are things like create
- // type, create dataset, etc.
- sendExecuteComplete();
- } else {
- // otherwise, there are some results, so send them
- // back to the client
- if (sendResults(outputPath)) {
- sendExecuteComplete();
- } else {
- String err = "Error: unable to retrieve results from " + outputNodeName;
- LOGGER.severe(err);
- sendError(err);
- }
- }
- break;
- default:
- String err = "Error: received unknown message of type " + (char) messageType;
- sendError(err);
- LOGGER.severe(err);
- close();
- return;
- }
- }
- } catch (IOException e) {
- // the normal path that is taken when exiting
- close();
- return;
- }
- }
-
- private void close() {
- try {
- if (nodeDataServerStream != null) {
- nodeDataServerStream.close();
- }
- } catch (IOException e) {
- LOGGER.severe("Error closing NodeData AQLJStream");
- LOGGER.severe(e.getMessage());
- }
- try {
- clientStream.close();
- } catch (IOException e) {
- LOGGER.severe("Error closing client AQLJStream");
- LOGGER.severe(e.getMessage());
- }
- }
-
- private String executeStatement(String stmt) throws IOException, AQLJException {
- List<QueryResult> executionResults = null;
- PrintWriter out = new PrintWriter(System.out);
- try {
- AQLParser parser = new AQLParser(new StringReader(stmt));
- List<Statement> statements = parser.Statement();
- SessionConfig pc = new SessionConfig(AsterixHyracksIntegrationUtil.DEFAULT_HYRACKS_CC_CLIENT_PORT, true,
- false, false, false, false, false, true, false);
-
- MetadataManager.INSTANCE.init();
- if (statements != null && statements.size() > 0) {
- AqlTranslator translator = new AqlTranslator(statements, out, pc, DisplayFormat.TEXT);
- executionResults = translator.compileAndExecute(hcc);
- }
- } catch (ParseException e) {
- e.printStackTrace();
- throw new AQLJException(e);
- } catch (AsterixException e) {
- e.printStackTrace();
- throw new AQLJException(e);
- } catch (AlgebricksException e) {
- e.printStackTrace();
- throw new AQLJException(e);
- } catch (Exception e) {
- e.printStackTrace();
- sendError(e.getMessage());
- }
- return executionResults.get(0).getResultPath();
-
- }
-
- private boolean sendResults(String path) throws IOException {
- int messageType;
- long len;
- int sent;
- int toSend;
- byte[] buf = new byte[RESULT_BUF_SIZE];
-
- if (nodeDataServerStream == null) {
- nodeDataServerStream = new AQLJStream(outputNodeIP, nodeDataServerPort);
- }
- sendGetResults(nodeDataServerStream);
-
- // forward data packets from the nodedataservers through this server to
- // the client
- while (true) {
- len = nodeDataServerStream.receiveUnsignedInt32();
- messageType = nodeDataServerStream.receiveChar();
- switch ((char) messageType) {
- case AQLJProtocol.DATA_MESSAGE:
- clientStream.sendUnsignedInt32(len);
- clientStream.sendChar(AQLJProtocol.DATA_MESSAGE);
- len -= 5;
- sent = 0;
- while (sent < len) {
- len -= sent;
- toSend = (len > buf.length) ? buf.length : (int) len;
- nodeDataServerStream.receive(buf, 0, toSend);
- clientStream.send(buf, 0, toSend);
- sent += toSend;
- }
- clientStream.flush();
- break;
- case AQLJProtocol.EXECUTE_COMPLETE_MESSAGE:
- nodeDataServerStream.close();
- nodeDataServerStream = null;
- return true;
- default:
- nodeDataServerStream.close();
- nodeDataServerStream = null;
- return false;
- }
- }
- }
-
- private void sendGetResults(AQLJStream s) throws IOException {
- byte[] pathBytes = outputFilePath.getBytes("UTF-8");
- // 4 for the message length, 1 for the message type, 2 for the string
- // length
- s.sendUnsignedInt32(4 + 1 + 2 + pathBytes.length);
- s.sendChar(AQLJProtocol.GET_RESULTS_MESSAGE);
- s.sendString(outputFilePath);
- s.flush();
- }
-
- private void sendReady() throws IOException {
- // 4 for the message length and 1 for the message type (4 + 1 = 5)
- clientStream.sendUnsignedInt32(5);
- clientStream.sendChar(AQLJProtocol.READY_MESSAGE);
- clientStream.flush();
- }
-
- private void sendError(String msg) throws IOException {
- byte[] msgBytes = msg.getBytes("UTF-8");
- // 4 for the message length, 1 for the message type, 2 for the string
- // length
- clientStream.sendUnsignedInt32(4 + 1 + 2 + msgBytes.length);
- clientStream.sendChar(AQLJProtocol.ERROR_MESSAGE);
- clientStream.sendInt16(msgBytes.length);
- clientStream.send(msgBytes);
- clientStream.flush();
- }
-
- private void sendExecuteComplete() throws IOException {
- // 4 for the message length and 1 for the message type (4 + 1 = 5)
- clientStream.sendUnsignedInt32(5);
- clientStream.sendChar(AQLJProtocol.EXECUTE_COMPLETE_MESSAGE);
- clientStream.flush();
- }
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/APIClientThreadFactory.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/APIClientThreadFactory.java
deleted file mode 100644
index 7f35af6..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/APIClientThreadFactory.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.aqlj.server;
-
-import java.io.IOException;
-import java.net.Socket;
-
-import edu.uci.ics.hyracks.api.application.ICCApplicationContext;
-import edu.uci.ics.hyracks.api.client.HyracksConnection;
-import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-
-/**
- * This class is a factory for client handler threads of type {@link APIClientThread} and is used in conjunction with {@link ThreadedServer}.
- *
- * @author zheilbron
- */
-public class APIClientThreadFactory implements IClientThreadFactory {
- private final ICCApplicationContext appContext;
-
- private IHyracksClientConnection hcc;
-
- public APIClientThreadFactory(ICCApplicationContext appContext) throws Exception {
- this.appContext = appContext;
- hcc = new HyracksConnection(appContext.getCCContext().getClusterControllerInfo().getClientNetAddress(), appContext.getCCContext().getClusterControllerInfo()
- .getClientNetPort());
- }
-
- @Override
- public Thread createThread(Socket socket) throws IOException {
- return new APIClientThread(hcc, socket, appContext);
- }
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/IClientThreadFactory.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/IClientThreadFactory.java
deleted file mode 100644
index bca7f4d..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/IClientThreadFactory.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.aqlj.server;
-
-import java.io.IOException;
-import java.net.Socket;
-
-/**
- * Implementing this interface allows a class such as {@link ThreadedServer} to
- * spawn a particular type of thread to handle some client connection.
- *
- * @author zheilbron
- */
-public interface IClientThreadFactory {
- public Thread createThread(Socket socket) throws IOException;
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/NodeDataClientThread.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/NodeDataClientThread.java
deleted file mode 100644
index 0246fd9..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/NodeDataClientThread.java
+++ /dev/null
@@ -1,128 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.aqlj.server;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.net.Socket;
-import java.util.logging.Logger;
-
-import edu.uci.ics.asterix.api.aqlj.common.AQLJProtocol;
-import edu.uci.ics.asterix.api.aqlj.common.AQLJStream;
-
-/**
- * This class handles data requests from the APIServer. When a query is executed
- * through the API, the output is written to the local disk of some NC. The
- * APIServer will contact that NC and ask for the results of the query to be
- * sent. This class handles such communication between the NC and APIServer.
- *
- * @author zheilbron
- */
-public class NodeDataClientThread extends Thread {
- private static final Logger LOGGER = Logger.getLogger(NodeDataClientThread.class.getName());
-
- private static final int RESULT_BUFFER_SIZE = 8192;
-
- private final AQLJStream aqljStream;
-
- public NodeDataClientThread(Socket clientSocket) throws IOException {
- aqljStream = new AQLJStream(clientSocket);
- }
-
- public void run() {
- try {
- getFile();
- } catch (IOException e) {
- LOGGER.severe("I/O error occurred over AQLJStream (socket)");
- LOGGER.severe(e.getMessage());
- } finally {
- close();
- }
- }
-
- private void getFile() throws IOException {
- aqljStream.receiveUnsignedInt32();
- int type = aqljStream.receiveChar();
- if ((char) type != AQLJProtocol.GET_RESULTS_MESSAGE) {
- return;
- }
-
- String path = aqljStream.receiveString();
- File outputFile = new File(path);
- FileInputStream fis = null;
- try {
- fis = new FileInputStream(outputFile);
- } catch (FileNotFoundException e) {
- LOGGER.warning("Error: requested file not found: " + path);
- return;
- }
-
- byte[] buf = new byte[RESULT_BUFFER_SIZE];
- long maxPayload = 0xffffffffL - 5; // 2^32 (max size of payload) - 5
- // (header size)
- long remainingTotal = outputFile.length();
- long remainingInner = 0;
- int sentTotal = 0;
- int sentInner = 0;
- int toSend = 0;
-
- // the results may be large, so cram as much into a packet as possible
- while (remainingTotal > maxPayload) {
- aqljStream.sendUnsignedInt32(4 + 1 + maxPayload);
- aqljStream.sendChar(AQLJProtocol.DATA_MESSAGE);
- sentInner = 0;
- remainingInner = 0;
- while (sentInner < maxPayload) {
- remainingInner = maxPayload - sentInner;
- toSend = fis.read(buf, 0, (remainingInner > buf.length) ? buf.length : (int) remainingInner);
- sentInner += toSend;
- aqljStream.send(buf, 0, toSend);
- }
- aqljStream.flush();
- sentTotal += maxPayload;
- remainingTotal -= sentTotal;
- }
-
- // send the remaining data
- if (remainingTotal > 0) {
- aqljStream.sendUnsignedInt32(4 + 1 + (int) remainingTotal);
- aqljStream.sendChar(AQLJProtocol.DATA_MESSAGE);
- sentInner = 0;
- remainingInner = 0;
- while (sentInner < remainingTotal) {
- remainingInner = remainingTotal - sentInner;
- toSend = fis.read(buf, 0, (remainingInner > buf.length) ? buf.length : (int) remainingInner);
- sentInner += toSend;
- aqljStream.send(buf, 0, toSend);
- }
- aqljStream.flush();
- }
- outputFile.delete();
- aqljStream.sendUnsignedInt32(5);
- aqljStream.sendChar(AQLJProtocol.EXECUTE_COMPLETE_MESSAGE);
- aqljStream.flush();
- }
-
- private void close() {
- try {
- aqljStream.close();
- } catch (IOException e) {
- LOGGER.severe("Error closing AQLJStream");
- LOGGER.severe(e.getMessage());
- }
- }
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/NodeDataClientThreadFactory.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/NodeDataClientThreadFactory.java
deleted file mode 100644
index 22efa89..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/NodeDataClientThreadFactory.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.aqlj.server;
-
-import java.io.IOException;
-import java.net.Socket;
-
-/**
- * This class is a factory for client handler threads of type {@link NodeDataClientThread} and is used in conjunction with {@link ThreadedServer}.
- *
- * @author zheilbron
- */
-public class NodeDataClientThreadFactory implements IClientThreadFactory {
- @Override
- public Thread createThread(Socket socket) throws IOException {
- return new NodeDataClientThread(socket);
- }
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/ThreadedServer.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/ThreadedServer.java
deleted file mode 100644
index 573d6a3..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/server/ThreadedServer.java
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.aqlj.server;
-
-import java.io.IOException;
-import java.net.ServerSocket;
-import java.net.Socket;
-import java.net.SocketException;
-import java.util.logging.Logger;
-
-/**
- * This server is a multithreaded server that spawns one connection per client
- * up to MAX_CLIENTS total clients. The type of thread spawned to handle each
- * client request is delegated to a client thread factory that implements the
- * IClientThreadFactory interface.
- * NOTE: The "BE" in logging messages stands for "back-end". This is to
- * differentiate from the "FE" or "front-end" when reviewing log messages.
- *
- * @author zheilbron
- */
-public class ThreadedServer extends Thread {
- private static Logger LOGGER = Logger.getLogger(ThreadedServer.class.getName());
-
- private static final int MAX_CLIENTS = 10;
-
- private final int port;
- private final IClientThreadFactory factory;
-
- private ServerSocket serverSocket;
- private Socket clientSocket;
- private Socket[] clientSockets;
- private Thread[] threads;
-
- public ThreadedServer(int port, IClientThreadFactory factory) {
- this.port = port;
- this.factory = factory;
- this.clientSockets = new Socket[MAX_CLIENTS];
- this.threads = new Thread[MAX_CLIENTS];
- this.clientSocket = null;
- }
-
- public void run() {
- try {
- serverSocket = new ServerSocket(port);
- } catch (IOException e) {
- LOGGER.severe("Error listening on port: " + port);
- LOGGER.severe(e.getMessage());
- return;
- }
- LOGGER.info("Server started. Listening on port: " + port);
-
- while (true) {
- try {
- clientSocket = serverSocket.accept();
- } catch (SocketException e) {
- // This is the normal path the server will take when exiting.
- //
- // In order to close the server down properly, the
- // serverSocket.accept() call must
- // be interrupted. The only way to interrupt the
- // serverSocket.accept() call in the loop
- // above is by calling serverSocket.close() (as is done in the
- // ThreadedServer.shutdown() method
- // below). The serverSocket.accept() then throws a
- // SocketException, so we catch it here
- // and assume that ThreadedServer.shutdown() was called.
-
- return;
- } catch (IOException e) {
- LOGGER.severe("Failed to accept() connection");
- LOGGER.severe(e.getMessage());
- }
-
- for (int i = 0; i < threads.length; i++) {
- if (threads[i] == null || !threads[i].isAlive()) {
- try {
- threads[i] = factory.createThread(clientSocket);
- } catch (IOException e) {
- LOGGER.severe("Failed to create client handler thread");
- LOGGER.severe(e.getMessage());
- }
- clientSockets[i] = clientSocket;
- threads[i].start();
- clientSocket = null;
- break;
- }
- }
-
- // setting the clientSocket to null is an indicator the there was
- // room for the
- // connection (i.e. the number of clients < MAX_CLIENTS). If it is
- // not set, then
- // there was no room for the connection, so the client is dropped.
- if (clientSocket != null) {
- try {
- clientSocket.close();
- } catch (IOException e) {
- LOGGER.severe("Error closing (dropped) client socket.");
- LOGGER.severe(e.getMessage());
- }
- LOGGER.warning("Client was dropped. Maximum number of connections reached!");
- }
- }
- }
-
- public void shutdown() {
- try {
- serverSocket.close();
- } catch (IOException e) {
- LOGGER.severe("Error closing server socket.");
- LOGGER.severe(e.getMessage());
- }
-
- try {
- for (int i = 0; i < threads.length; i++) {
- if (threads[i] != null && threads[i].isAlive()) {
- clientSockets[i].close();
- threads[i].interrupt();
- }
- }
- } catch (IOException e) {
- LOGGER.severe("Error closing client socket.");
- LOGGER.severe(e.getMessage());
- }
- }
-}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java
index 468bca6..73353dd 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java
@@ -1,3 +1,17 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.api.common;
import java.io.PrintWriter;
@@ -59,6 +73,10 @@
import edu.uci.ics.hyracks.api.job.JobId;
import edu.uci.ics.hyracks.api.job.JobSpecification;
+/**
+ * Provides helper methods for compilation of a query into a JobSpec and submission
+ * to Hyracks through the Hyracks client interface.
+ */
public class APIFramework {
private static List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> buildDefaultLogicalRewrites() {
@@ -270,7 +288,7 @@
OptimizationConfUtil.getPhysicalOptimizationConfig().setFrameSize(frameSize);
builder.setPhysicalOptimizationConfig(OptimizationConfUtil.getPhysicalOptimizationConfig());
-
+
ICompiler compiler = compilerFactory.createCompiler(plan, queryMetadataProvider, t.getVarCounter());
if (pc.isOptimize()) {
compiler.optimize();
@@ -319,6 +337,7 @@
builder.setExpressionRuntimeProvider(new LogicalExpressionJobGenToExpressionRuntimeProviderAdapter(
AqlLogicalExpressionJobGen.INSTANCE));
builder.setHashFunctionFactoryProvider(format.getBinaryHashFunctionFactoryProvider());
+ builder.setHashFunctionFamilyProvider(format.getBinaryHashFunctionFamilyProvider());
builder.setNullWriterFactory(format.getNullWriterFactory());
builder.setPrinterProvider(format.getPrinterFactoryProvider());
builder.setSerializerDeserializerProvider(format.getSerdeProvider());
@@ -327,7 +346,7 @@
IJobletEventListenerFactory jobEventListenerFactory = new JobEventListenerFactory(asterixJobId,
isWriteTransaction);
- JobSpecification spec = compiler.createJob(AsterixAppContextInfoImpl.INSTANCE, jobEventListenerFactory);
+ JobSpecification spec = compiler.createJob(AsterixAppContextInfoImpl.getInstance(), jobEventListenerFactory);
if (pc.isPrintJob()) {
switch (pdf) {
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
index 2504a3c..e7364c7 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
@@ -107,6 +107,10 @@
import edu.uci.ics.hyracks.api.job.JobSpecification;
import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+/*
+ * Provides functionality for executing a batch of AQL statements (queries included)
+ * sequentially.
+ */
public class AqlTranslator extends AbstractAqlTranslator {
private final List<Statement> aqlStatements;
@@ -135,6 +139,16 @@
return functionDecls;
}
+ /**
+ * Compiles and submits for execution a list of AQL statements.
+ *
+ * @param hcc
+ * AHyracks client connection that is used to submit a jobspec to
+ * Hyracks.
+ * @return A List<QueryResult> containing a QueryResult instance
+ * corresponding to each submitted query.
+ * @throws Exception
+ */
public List<QueryResult> compileAndExecute(IHyracksClientConnection hcc) throws Exception {
List<QueryResult> executionResult = new ArrayList<QueryResult>();
FileSplit outputFile = null;
@@ -349,6 +363,7 @@
throw new AlgebricksException(" dataverse not specified ");
}
datasetName = dd.getName().getValue();
+
DatasetType dsType = dd.getDatasetType();
String itemTypeName = dd.getItemTypeName().getValue();
@@ -408,8 +423,8 @@
}
//#. add a new dataset with PendingAddOp
- Dataset dataset = new Dataset(dataverseName, datasetName, itemTypeName, datasetDetails, dsType,
- DatasetIdFactory.generateDatasetId(), IMetadataEntity.PENDING_ADD_OP);
+ Dataset dataset = new Dataset(dataverseName, datasetName, itemTypeName, datasetDetails, dd.getHints(),
+ dsType, DatasetIdFactory.generateDatasetId(), IMetadataEntity.PENDING_ADD_OP);
MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
if (dd.getDatasetType() == DatasetType.INTERNAL || dd.getDatasetType() == DatasetType.FEED) {
@@ -434,11 +449,12 @@
//#. add a new dataset with PendingNoOp after deleting the dataset with PendingAddOp
MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), new Dataset(dataverseName,
- datasetName, itemTypeName, datasetDetails, dsType, dataset.getDatasetId(),
+ datasetName, itemTypeName, datasetDetails, dd.getHints(), dsType, dataset.getDatasetId(),
IMetadataEntity.PENDING_NO_OP));
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
if (bActiveTxn) {
+ e.printStackTrace();
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
@@ -452,7 +468,7 @@
JobSpecification jobSpec = DatasetOperations.createDropDatasetJobSpec(cds, metadataProvider);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
-
+
runJob(hcc, jobSpec);
} catch (Exception e3) {
if (bActiveTxn) {
@@ -581,7 +597,7 @@
JobSpecification jobSpec = IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
-
+
runJob(hcc, jobSpec);
} catch (Exception e3) {
if (bActiveTxn) {
@@ -601,7 +617,7 @@
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
throw new AlgebricksException(e2);
}
-
+
throw new AlgebricksException(e);
} finally {
releaseWriteLatch();
@@ -797,7 +813,7 @@
MetadataManager.INSTANCE.addDataset(
mdTxnCtx,
new Dataset(dataverseName, datasetName, ds.getItemTypeName(), ds.getDatasetDetails(), ds
- .getDatasetType(), ds.getDatasetId(), IMetadataEntity.PENDING_DROP_OP));
+ .getHints(), ds.getDatasetType(), ds.getDatasetId(), IMetadataEntity.PENDING_DROP_OP));
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
@@ -1103,7 +1119,7 @@
if (bActiveTxn) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
-
+
throw new AlgebricksException(e);
} finally {
releaseReadLatch();
@@ -1222,7 +1238,8 @@
Pair<Query, Integer> reWrittenQuery = APIFramework.reWriteQuery(declaredFunctions, metadataProvider, query,
sessionConfig, out, pdf);
- // Query Compilation (happens under the same ongoing metadata transaction)
+ // Query Compilation (happens under the same ongoing metadata
+ // transaction)
JobSpecification spec = APIFramework.compileQuery(declaredFunctions, metadataProvider, query,
reWrittenQuery.second, stmt == null ? null : stmt.getDatasetName(), sessionConfig, out, pdf, stmt);
@@ -1243,7 +1260,7 @@
try {
BeginFeedStatement bfs = (BeginFeedStatement) stmt;
String dataverseName = bfs.getDataverseName() == null ? activeDefaultDataverse == null ? null
- : activeDefaultDataverse.getDataverseName() : bfs.getDatasetName().getValue();
+ : activeDefaultDataverse.getDataverseName() : bfs.getDataverseName().getValue();
CompiledBeginFeedStatement cbfs = new CompiledBeginFeedStatement(dataverseName, bfs.getDatasetName()
.getValue(), bfs.getQuery(), bfs.getVarCounter());
@@ -1251,6 +1268,9 @@
Dataset dataset;
dataset = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(), dataverseName, bfs
.getDatasetName().getValue());
+ if (dataset == null) {
+ throw new AsterixException("Unknown dataset :" + bfs.getDatasetName().getValue());
+ }
IDatasetDetails datasetDetails = dataset.getDatasetDetails();
if (datasetDetails.getDatasetType() != DatasetType.FEED) {
throw new IllegalArgumentException("Dataset " + bfs.getDatasetName().getValue()
@@ -1279,7 +1299,6 @@
private void handleControlFeedStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc, List<JobSpecification> jobsToExecute) throws Exception {
-
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
index 93d16ca..a6943b3 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -12,9 +12,11 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+
package edu.uci.ics.asterix.file;
import java.io.File;
+import java.io.IOException;
import java.rmi.RemoteException;
import java.util.List;
import java.util.logging.Logger;
@@ -145,6 +147,17 @@
throw new AsterixException("Could not find dataset " + datasetName + " in datavetse " + dataverseName);
}
ARecordType itemType = (ARecordType) metadata.findType(dataverseName, dataset.getItemTypeName());
+ for (String keyField : DatasetUtils.getPartitioningKeys(dataset)) {
+ try {
+ if (!itemType.isClosedField(keyField)) {
+ throw new AsterixException("Cannot partition dataset \"" + dataset.getDatasetName()
+ + "\" by key \"" + keyField + "\" since it is not a valid field of \""
+ + itemType.getTypeName() + "\"");
+ }
+ } catch (IOException e) {
+ throw new AsterixException(e);
+ }
+ }
JobSpecification spec = new JobSpecification();
IBinaryComparatorFactory[] comparatorFactories = DatasetUtils.computeKeysBinaryComparatorFactories(dataset,
itemType, format.getBinaryComparatorFactoryProvider());
@@ -323,7 +336,12 @@
ISerializerDeserializer[] recordFields = new ISerializerDeserializer[1 + numKeys];
recordFields[0] = payloadSerde;
for (int i = 0; i < numKeys; i++) {
- IAType keyType = itemType.getFieldType(partitioningKeys.get(i));
+ IAType keyType;
+ try {
+ keyType = itemType.getFieldType(partitioningKeys.get(i));
+ } catch (IOException e) {
+ throw new AlgebricksException(e);
+ }
ISerializerDeserializer keySerde = dataFormat.getSerdeProvider().getSerializerDeserializer(keyType);
recordFields[i + 1] = keySerde;
}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/FeedOperations.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/FeedOperations.java
index 546973c..f9bd2d5 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/FeedOperations.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/FeedOperations.java
@@ -21,10 +21,10 @@
import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.feed.comm.AlterFeedMessage;
-import edu.uci.ics.asterix.feed.comm.FeedMessage;
-import edu.uci.ics.asterix.feed.comm.IFeedMessage;
-import edu.uci.ics.asterix.feed.comm.IFeedMessage.MessageType;
+import edu.uci.ics.asterix.external.feed.lifecycle.AlterFeedMessage;
+import edu.uci.ics.asterix.external.feed.lifecycle.FeedMessage;
+import edu.uci.ics.asterix.external.feed.lifecycle.IFeedMessage;
+import edu.uci.ics.asterix.external.feed.lifecycle.IFeedMessage.MessageType;
import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
import edu.uci.ics.asterix.metadata.entities.Dataset;
import edu.uci.ics.asterix.metadata.entities.FeedDatasetDetails;
@@ -38,16 +38,29 @@
import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.misc.NullSinkOperatorDescriptor;
+/**
+ * Provides helper method(s) for creating JobSpec for operations on a feed.
+ */
public class FeedOperations {
private static final Logger LOGGER = Logger.getLogger(IndexOperations.class.getName());
+ /**
+ * @param controlFeedStatement
+ * The statement representing the action that describes the
+ * action that needs to be taken on the feed. E.g. of actions are
+ * stop feed or alter feed.
+ * @param metadataProvider
+ * An instance of the MetadataProvider
+ * @return An instance of JobSpec for the job that would send an appropriate
+ * control message to the running feed.
+ * @throws AsterixException
+ * @throws AlgebricksException
+ */
public static JobSpecification buildControlFeedJobSpec(CompiledControlFeedStatement controlFeedStatement,
AqlMetadataProvider metadataProvider) throws AsterixException, AlgebricksException {
switch (controlFeedStatement.getOperationType()) {
case ALTER:
- case SUSPEND:
- case RESUME:
case END: {
return createSendMessageToFeedJobSpec(controlFeedStatement, metadataProvider);
}
@@ -86,15 +99,9 @@
List<IFeedMessage> feedMessages = new ArrayList<IFeedMessage>();
switch (controlFeedStatement.getOperationType()) {
- case SUSPEND:
- feedMessages.add(new FeedMessage(MessageType.SUSPEND));
- break;
case END:
feedMessages.add(new FeedMessage(MessageType.STOP));
break;
- case RESUME:
- feedMessages.add(new FeedMessage(MessageType.RESUME));
- break;
case ALTER:
feedMessages.add(new AlterFeedMessage(controlFeedStatement.getProperties()));
break;
@@ -102,8 +109,8 @@
try {
Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = metadataProvider.buildFeedMessengerRuntime(
- metadataProvider, spec, (FeedDatasetDetails) dataset.getDatasetDetails(),
- metadataProvider.getDefaultDataverseName(), datasetName, feedMessages);
+ metadataProvider, spec, (FeedDatasetDetails) dataset.getDatasetDetails(), dataverseName,
+ datasetName, feedMessages);
feedMessenger = p.first;
messengerPc = p.second;
} catch (AlgebricksException e) {
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryIndexCreator.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryIndexCreator.java
index 701f2b4..b1cc730 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryIndexCreator.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryIndexCreator.java
@@ -1,6 +1,22 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.asterix.file;
import java.io.DataOutput;
+import java.io.IOException;
import java.util.List;
import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
@@ -165,7 +181,12 @@
primaryBloomFilterKeyFields = new int [numPrimaryKeys];
ISerializerDeserializerProvider serdeProvider = metadataProvider.getFormat().getSerdeProvider();
for (int i = 0; i < numPrimaryKeys; i++) {
- IAType keyType = itemType.getFieldType(partitioningKeys.get(i));
+ IAType keyType;
+ try {
+ keyType = itemType.getFieldType(partitioningKeys.get(i));
+ } catch (IOException e) {
+ throw new AlgebricksException(e);
+ }
primaryRecFields[i] = serdeProvider.getSerializerDeserializer(keyType);
primaryComparatorFactories[i] = AqlBinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(
keyType, true);
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCBootstrapImpl.java b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCBootstrapImpl.java
index fcd50b5..9b29427 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCBootstrapImpl.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCBootstrapImpl.java
@@ -21,9 +21,8 @@
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
-import edu.uci.ics.asterix.api.aqlj.server.APIClientThreadFactory;
-import edu.uci.ics.asterix.api.aqlj.server.ThreadedServer;
import edu.uci.ics.asterix.api.http.servlet.APIServlet;
+import edu.uci.ics.asterix.common.api.AsterixAppContextInfoImpl;
import edu.uci.ics.asterix.common.config.GlobalConfig;
import edu.uci.ics.asterix.metadata.MetadataManager;
import edu.uci.ics.asterix.metadata.api.IAsterixStateProxy;
@@ -32,18 +31,18 @@
import edu.uci.ics.hyracks.api.application.ICCApplicationContext;
import edu.uci.ics.hyracks.api.application.ICCBootstrap;
+/**
+ * The bootstrap class of the application that will manage its life cycle at the
+ * Cluster Controller.
+ */
public class CCBootstrapImpl implements ICCBootstrap {
private static final Logger LOGGER = Logger.getLogger(CCBootstrapImpl.class.getName());
private static final int DEFAULT_WEB_SERVER_PORT = 19001;
- public static final int DEFAULT_API_SERVER_PORT = 14600;
- public static final int DEFAULT_API_NODEDATA_SERVER_PORT = 14601;
-
private Server webServer;
private static IAsterixStateProxy proxy;
private ICCApplicationContext appCtx;
- private ThreadedServer apiServer;
@Override
public void start() throws Exception {
@@ -51,21 +50,16 @@
LOGGER.info("Starting Asterix cluster controller");
}
- // Set the AsterixStateProxy to be the distributed object
proxy = AsterixStateProxy.registerRemoteObject();
proxy.setAsterixProperties(AsterixProperties.INSTANCE);
appCtx.setDistributedState(proxy);
- // Create the metadata manager
MetadataManager.INSTANCE = new MetadataManager(proxy);
- // Setup and start the web interface
setupWebServer();
webServer.start();
- // Setup and start the API server
- setupAPIServer();
- apiServer.start();
+ AsterixAppContextInfoImpl.initialize(appCtx);
}
@Override
@@ -76,7 +70,6 @@
AsterixStateProxy.unregisterRemoteObject();
webServer.stop();
- apiServer.shutdown();
}
@Override
@@ -97,8 +90,4 @@
webServer.setHandler(context);
context.addServlet(new ServletHolder(new APIServlet()), "/*");
}
-
- private void setupAPIServer() throws Exception {
- apiServer = new ThreadedServer(DEFAULT_API_SERVER_PORT, new APIClientThreadFactory(appCtx));
- }
}
\ No newline at end of file
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/NCBootstrapImpl.java b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/NCBootstrapImpl.java
index dfd7065..3c81b43 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/NCBootstrapImpl.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/NCBootstrapImpl.java
@@ -19,8 +19,6 @@
import java.util.logging.Level;
import java.util.logging.Logger;
-import edu.uci.ics.asterix.api.aqlj.server.NodeDataClientThreadFactory;
-import edu.uci.ics.asterix.api.aqlj.server.ThreadedServer;
import edu.uci.ics.asterix.common.context.AsterixAppRuntimeContext;
import edu.uci.ics.asterix.metadata.MetadataManager;
import edu.uci.ics.asterix.metadata.MetadataNode;
@@ -39,7 +37,6 @@
private AsterixAppRuntimeContext runtimeContext;
private String nodeId;
private boolean isMetadataNode = false;
- private ThreadedServer apiNodeDataServer;
@Override
public void start() throws Exception {
@@ -50,10 +47,8 @@
runtimeContext = new AsterixAppRuntimeContext(ncApplicationContext);
runtimeContext.initialize();
-
ncApplicationContext.setApplicationObject(runtimeContext);
- // Initialize metadata if this node is the metadata node
IAsterixStateProxy proxy = (IAsterixStateProxy) ncApplicationContext.getDistributedState();
isMetadataNode = nodeId.equals(proxy.getAsterixProperties().getMetadataNodeName());
if (isMetadataNode) {
@@ -62,12 +57,11 @@
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Bootstrapping metadata");
}
-
MetadataManager.INSTANCE = new MetadataManager(proxy);
MetadataManager.INSTANCE.init();
MetadataBootstrap.startUniverse(proxy.getAsterixProperties(), ncApplicationContext);
}
-
+
//#. recover if the system is corrupted by checking system state.
IRecoveryManager recoveryMgr = runtimeContext.getTransactionSubsystem().getRecoveryManager();
if (!proxy.getAsterixProperties().isNewUniverse()) {
@@ -76,18 +70,10 @@
}
}
recoveryMgr.checkpoint(true);
-
+
if (isMetadataNode) {
//#. clean-up incomplete DDL operations, which is DDLRecovery
MetadataBootstrap.startDDLRecovery();
-
- // Start a sub-component for the API server. This server is only connected to by the
- // API server that lives on the CC and never by a client wishing to execute AQL.
- // TODO: The API sub-system will change dramatically in the future and this code will go away,
- // but leave it for now.
- apiNodeDataServer = new ThreadedServer(CCBootstrapImpl.DEFAULT_API_NODEDATA_SERVER_PORT,
- new NodeDataClientThreadFactory());
- apiNodeDataServer.start();
}
}
@@ -108,12 +94,9 @@
LOGGER.info("Stopping Asterix node controller: " + nodeId);
}
- // Quiesce metadata
if (isMetadataNode) {
MetadataBootstrap.stopUniverse();
}
-
- apiNodeDataServer.shutdown();
runtimeContext.deinitialize();
}
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/aql/TestsUtils.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/aql/TestsUtils.java
index 4dfe106..0931994 100644
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/aql/TestsUtils.java
+++ b/asterix-app/src/test/java/edu/uci/ics/asterix/test/aql/TestsUtils.java
@@ -5,27 +5,12 @@
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.io.Reader;
-import java.io.UnsupportedEncodingException;
-import java.nio.MappedByteBuffer;
-import java.nio.channels.FileChannel;
-import java.nio.charset.Charset;
-import edu.uci.ics.asterix.api.aqlj.client.AQLJClientDriver;
-import edu.uci.ics.asterix.api.aqlj.client.IADMCursor;
-import edu.uci.ics.asterix.api.aqlj.client.IAQLJConnection;
-import edu.uci.ics.asterix.api.aqlj.client.IAQLJResult;
-import edu.uci.ics.asterix.api.aqlj.common.AQLJException;
import edu.uci.ics.asterix.api.java.AsterixJavaClient;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.om.base.IAObject;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.AbstractCollectionType;
-import edu.uci.ics.asterix.om.types.IAType;
import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
public class TestsUtils {
@@ -98,112 +83,6 @@
}
- public static void runScriptAndCompareWithResultViaClientAPI(File scriptFile, PrintWriter print, File expectedFile,
- File actualFile, int apiPort) throws Exception {
- FileOutputStream fos = new FileOutputStream(actualFile);
- String query = queryFromFile(scriptFile);
- IAQLJConnection conn = null;
- IAQLJResult res = null;
- try {
- conn = AQLJClientDriver.getConnection("localhost", apiPort, "Metadata");
- res = conn.execute(query);
-
- while (res.next()) {
- leafPrint(conn, res, fos);
- }
- } catch (AQLJException e) {
- e.printStackTrace();
- } finally {
- // be sure that we close the connection and the result cursor
- if (res != null) {
- res.close();
- }
- if (conn != null) {
- conn.close();
- }
- }
- fos.close();
-
- BufferedReader readerExpected = new BufferedReader(new InputStreamReader(new FileInputStream(expectedFile),
- "UTF-8"));
- BufferedReader readerActual = new BufferedReader(
- new InputStreamReader(new FileInputStream(actualFile), "UTF-8"));
- String lineExpected, lineActual;
- int num = 1;
- try {
- while ((lineExpected = readerExpected.readLine()) != null) {
- lineActual = readerActual.readLine();
- // Assert.assertEquals(lineExpected, lineActual);
- if (lineActual == null) {
- throw new Exception("Result for " + scriptFile + " changed at line " + num + ":\n< " + lineExpected
- + "\n> ");
- }
- if (!equalStrings(lineExpected, lineActual)) {
- throw new Exception("Result for " + scriptFile + " changed at line " + num + ":\n< " + lineExpected
- + "\n> " + lineActual);
- }
- ++num;
- }
- lineActual = readerActual.readLine();
- // Assert.assertEquals(null, lineActual);
- if (lineActual != null) {
- throw new Exception("Result for " + scriptFile + " changed at line " + num + ":\n< \n> " + lineActual);
- }
- actualFile.delete();
- } finally {
- readerExpected.close();
- readerActual.close();
- }
-
- }
-
- public static void leafPrint(IAQLJConnection conn, IADMCursor c, FileOutputStream fos) throws AQLJException,
- UnsupportedEncodingException, IOException {
- IAType t;
- IAObject o;
- String fieldNames[];
- IADMCursor cur;
-
- o = c.get();
- if (o == null) {
- return;
- }
-
- t = o.getType();
- if (t instanceof AbstractCollectionType) {
- fos.write("AbstractCollectionType: \n".getBytes("UTF-8"));
- cur = conn.createADMCursor();
- c.position(cur);
- while (cur.next()) {
-
- leafPrint(conn, cur, fos);
- }
- } else if (t instanceof ARecordType) {
- fos.write("ARecordType: \n".getBytes("UTF-8"));
- fieldNames = ((ARecordType) t).getFieldNames();
- for (int i = 0; i < fieldNames.length; i++) {
- cur = conn.createADMCursor();
- c.position(cur, fieldNames[i]);
- fos.write(("field: " + fieldNames[i] + "\n").getBytes("UTF-8"));
- leafPrint(conn, cur, fos);
- }
- } else {
- fos.write((o.toString() + "\n").getBytes("UTF-8"));
- }
- }
-
- private static String queryFromFile(File f) throws IOException {
- FileInputStream fis = new FileInputStream(f);
- try {
- FileChannel fc = fis.getChannel();
- MappedByteBuffer bb = fc.map(FileChannel.MapMode.READ_ONLY, 0, fc.size());
- return Charset.forName("UTF-8").decode(bb).toString();
- } finally {
- fis.close();
- }
-
- }
-
private static boolean equalStrings(String s1, String s2) {
String[] rowsOne = s1.split("\n");
String[] rowsTwo = s2.split("\n");
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/aqlj/ClientAPITest.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/aqlj/ClientAPITest.java
deleted file mode 100644
index 0301675..0000000
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/aqlj/ClientAPITest.java
+++ /dev/null
@@ -1,151 +0,0 @@
-package edu.uci.ics.asterix.test.aqlj;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.io.PrintWriter;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.logging.Logger;
-
-import org.junit.AfterClass;
-import org.junit.Assume;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.internal.AssumptionViolatedException;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
-
-import edu.uci.ics.asterix.api.common.AsterixHyracksIntegrationUtil;
-import edu.uci.ics.asterix.common.config.GlobalConfig;
-import edu.uci.ics.asterix.hyracks.bootstrap.CCBootstrapImpl;
-import edu.uci.ics.asterix.test.aql.TestsUtils;
-import edu.uci.ics.asterix.test.common.TestHelper;
-
-@RunWith(Parameterized.class)
-public class ClientAPITest {
- private static final PrintWriter ERR = new PrintWriter(System.err);
- private static final String EXTENSION_QUERY = "aql";
- private static final String FILENAME_IGNORE = "ignore.txt";
- private static final String FILENAME_ONLY = "only.txt";
- private static final ArrayList<String> ignore = readFile(FILENAME_IGNORE);
- private static final ArrayList<String> only = readFile(FILENAME_ONLY);
- private static final String PATH_ACTUAL = "aqljtest/";
- private static final String PATH_BASE = "src/test/resources/aqljts/";
- private static final String PATH_EXPECTED = PATH_BASE + "results/";
- private static final String PATH_QUERIES = PATH_BASE + "queries/";
- private static final String SEPARATOR = System.getProperty("file.separator");
-
- private static final String TEST_CONFIG_FILE_NAME = "test.properties";
- private static final String[] ASTERIX_DATA_DIRS = new String[] { "nc1data", "nc2data" };
-
- private static final Logger LOGGER = Logger.getLogger(ClientAPITest.class.getName());
-
- private static ArrayList<String> readFile(String fileName) {
- ArrayList<String> list = new ArrayList<String>();
- BufferedReader result;
- try {
- result = new BufferedReader(new InputStreamReader(new FileInputStream(PATH_BASE + fileName), "UTF-8"));
- while (true) {
- String line = result.readLine();
- if (line == null) {
- break;
- } else {
- String s = line.trim();
- if (s.length() > 0) {
- list.add(s);
- }
- }
- }
- result.close();
- } catch (FileNotFoundException e) {
- } catch (IOException e) {
- }
- return list;
- }
-
- @BeforeClass
- public static void setUp() throws Exception {
- System.setProperty(GlobalConfig.CONFIG_FILE_PROPERTY, TEST_CONFIG_FILE_NAME);
- System.setProperty(GlobalConfig.WEB_SERVER_PORT_PROPERTY, "19002");
- File outdir = new File(PATH_ACTUAL);
- outdir.mkdirs();
- AsterixHyracksIntegrationUtil.init();
- // _bootstrap.start();
- }
-
- @AfterClass
- public static void tearDown() throws Exception {
- // _bootstrap.stop();
- AsterixHyracksIntegrationUtil.deinit();
- File outdir = new File(PATH_ACTUAL);
- File[] files = outdir.listFiles();
- if (files == null || files.length == 0) {
- outdir.delete();
- }
- // clean up the files written by the ASTERIX storage manager
- for (String d : ASTERIX_DATA_DIRS) {
- TestsUtils.deleteRec(new File(d));
- }
- }
-
- private static void suiteBuild(File dir, Collection<Object[]> testArgs, String path) {
- for (File file : dir.listFiles()) {
- if (file.isDirectory() && !file.getName().startsWith(".")) {
- suiteBuild(file, testArgs, path + file.getName() + SEPARATOR);
- }
- if (file.isFile() && file.getName().endsWith(EXTENSION_QUERY)
- // && !ignore.contains(path + file.getName())
- ) {
- String resultFileName = TestsUtils.aqlExtToResExt(file.getName());
- File expectedFile = new File(PATH_EXPECTED + path + resultFileName);
- File actualFile = new File(PATH_ACTUAL + SEPARATOR + path.replace(SEPARATOR, "_") + resultFileName);
- testArgs.add(new Object[] { file, expectedFile, actualFile });
- }
- }
- }
-
- @Parameters
- public static Collection<Object[]> tests() {
- Collection<Object[]> testArgs = new ArrayList<Object[]>();
- suiteBuild(new File(PATH_QUERIES), testArgs, "");
- return testArgs;
- }
-
- private File actualFile;
- private File expectedFile;
- private File queryFile;
-
- public ClientAPITest(File queryFile, File expectedFile, File actualFile) {
- this.queryFile = queryFile;
- this.expectedFile = expectedFile;
- this.actualFile = actualFile;
- }
-
- @Test
- public void test() throws Exception {
- try {
- String queryFileShort = queryFile.getPath().substring(PATH_QUERIES.length())
- .replace(SEPARATOR.charAt(0), '/');
-
- if (!only.isEmpty()) {
- Assume.assumeTrue(TestHelper.isInPrefixList(only, queryFileShort));
- }
- Assume.assumeTrue(!TestHelper.isInPrefixList(ignore, queryFileShort));
- LOGGER.severe("RUNNING TEST: " + queryFile + " \n");
- TestsUtils.runScriptAndCompareWithResultViaClientAPI(queryFile, ERR, expectedFile, actualFile,
- CCBootstrapImpl.DEFAULT_API_SERVER_PORT);
- } catch (Exception e) {
- if (!(e instanceof AssumptionViolatedException)) {
- LOGGER.severe("Test \"" + queryFile.getPath() + "\" FAILED!");
- throw new Exception("Test \"" + queryFile.getPath() + "\" FAILED!", e);
- } else {
- throw e;
- }
- }
- }
-}
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java
index dfeb103..7423cb9 100644
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java
+++ b/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java
@@ -1,3 +1,17 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.test.metadata;
import java.io.File;
@@ -21,6 +35,9 @@
import edu.uci.ics.asterix.testframework.context.TestCaseContext;
import edu.uci.ics.asterix.testframework.xml.TestCase.CompilationUnit;
+/**
+ * Executes the Metadata tests.
+ */
@RunWith(Parameterized.class)
public class MetadataTest {
@@ -30,8 +47,7 @@
private static final String PATH_ACTUAL = "mdtest/";
private static final String PATH_BASE = "src/test/resources/metadata/";
private static final String TEST_CONFIG_FILE_NAME = "test.properties";
- private static final String WEB_SERVER_PORT="19002";
- private static final String[] ASTERIX_DATA_DIRS = new String[] { "nc1data", "nc2data" };
+ private static final String WEB_SERVER_PORT = "19002";
public MetadataTest(TestCaseContext tcCtx) {
this.tcCtx = tcCtx;
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/optimizer/OptimizerTest.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/optimizer/OptimizerTest.java
index 43c6c0c..10cba14 100644
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/optimizer/OptimizerTest.java
+++ b/asterix-app/src/test/java/edu/uci/ics/asterix/test/optimizer/OptimizerTest.java
@@ -23,6 +23,8 @@
import edu.uci.ics.asterix.api.java.AsterixJavaClient;
import edu.uci.ics.asterix.common.config.GlobalConfig;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.external.dataset.adapter.FileSystemBasedAdapter;
+import edu.uci.ics.asterix.external.util.IdentitiyResolverFactory;
import edu.uci.ics.asterix.test.base.AsterixTestHelper;
import edu.uci.ics.asterix.test.common.TestHelper;
@@ -56,6 +58,10 @@
File outdir = new File(PATH_ACTUAL);
outdir.mkdirs();
AsterixHyracksIntegrationUtil.init();
+ // Set the node resolver to be the identity resolver that expects node names
+ // to be node controller ids; a valid assumption in test environment.
+ System.setProperty(FileSystemBasedAdapter.NODE_RESOLVER_FACTORY_PROPERTY,
+ IdentitiyResolverFactory.class.getName());
}
@AfterClass
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java
index da2c91f..36236d4 100644
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java
+++ b/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java
@@ -17,10 +17,15 @@
import edu.uci.ics.asterix.api.common.AsterixHyracksIntegrationUtil;
import edu.uci.ics.asterix.common.config.GlobalConfig;
+import edu.uci.ics.asterix.external.dataset.adapter.FileSystemBasedAdapter;
+import edu.uci.ics.asterix.external.util.IdentitiyResolverFactory;
import edu.uci.ics.asterix.test.aql.TestsUtils;
import edu.uci.ics.asterix.testframework.context.TestCaseContext;
import edu.uci.ics.asterix.testframework.xml.TestCase.CompilationUnit;
+/**
+ * Runs the runtime test cases under 'src/test/resources/runtimets'.
+ */
@RunWith(Parameterized.class)
public class ExecutionTest {
private static final String PATH_ACTUAL = "rttest/";
@@ -48,8 +53,14 @@
AsterixHyracksIntegrationUtil.init();
- // TODO: Uncomment when hadoop version is upgraded and adapters are ported
- //HDFSCluster.getInstance().setup();
+ // TODO: Uncomment when hadoop version is upgraded and adapters are
+ // ported.
+ HDFSCluster.getInstance().setup();
+
+ // Set the node resolver to be the identity resolver that expects node names
+ // to be node controller ids; a valid assumption in test environment.
+ System.setProperty(FileSystemBasedAdapter.NODE_RESOLVER_FACTORY_PROPERTY,
+ IdentitiyResolverFactory.class.getName());
}
@AfterClass
@@ -70,6 +81,7 @@
FileUtils.deleteDirectory(log);
File lsn = new File("last_checkpoint_lsn");
lsn.deleteOnExit();
+ HDFSCluster.getInstance().cleanup();
}
@Parameters
@@ -95,12 +107,13 @@
File testFile = tcCtx.getTestFile(cUnit);
/*************** to avoid run failure cases ****************
- if (!testFile.getAbsolutePath().contains("query-issue205.aql")) {
+ if (!testFile.getAbsolutePath().contains("queries/failure")) {//&& !testFile.getAbsolutePath().contains("partition-by-nonexistent-field.aql")) ) {
continue;
+ //System.out.println(testFile.getAbsolutePath());
}
System.out.println(testFile.getAbsolutePath());
************************************************************/
-
+
File expectedResultFile = tcCtx.getExpectedResultFile(cUnit);
File actualFile = new File(PATH_ACTUAL + File.separator
+ tcCtx.getTestCase().getFilePath().replace(File.separator, "_") + "_" + cUnit.getName() + ".adm");
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/HDFSCluster.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/HDFSCluster.java
index c07cff2..20df118 100644
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/HDFSCluster.java
+++ b/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/HDFSCluster.java
@@ -1,5 +1,20 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.test.runtime;
+import java.io.File;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
@@ -7,20 +22,29 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.server.common.HdfsConstants.StartupOption;
+import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
+import edu.uci.ics.asterix.external.dataset.adapter.HDFSAdapter;
+
+/**
+ * Manages a Mini (local VM) HDFS cluster with a configured number of datanodes.
+ *
+ * @author ramangrover29
+ */
@SuppressWarnings("deprecation")
public class HDFSCluster {
private static final String PATH_TO_HADOOP_CONF = "src/test/resources/hadoop/conf";
- private static final int nameNodePort = 10009;
- private static final String DATA_PATH = "data/tpch0.001";
- private static final String HDFS_PATH = "/tpch";
+ private static final int nameNodePort = 31888;
+ private static final String DATA_PATH = "data/hdfs";
+ private static final String HDFS_PATH = "/asterix";
private static final HDFSCluster INSTANCE = new HDFSCluster();
private MiniDFSCluster dfsCluster;
private int numDataNodes = 2;
private JobConf conf = new JobConf();
+ private FileSystem dfs;
public static HDFSCluster getInstance() {
return INSTANCE;
@@ -30,16 +54,30 @@
}
+ /**
+ * Instantiates the (Mini) DFS Cluster with the configured number of datanodes.
+ * Post instantiation, data is laoded to HDFS.
+ * Called prior to running the Runtime test suite.
+ */
public void setup() throws Exception {
conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/core-site.xml"));
conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/mapred-site.xml"));
conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));
cleanupLocal();
dfsCluster = new MiniDFSCluster(nameNodePort, conf, numDataNodes, true, true, StartupOption.REGULAR, null);
- FileSystem dfs = FileSystem.get(conf);
- Path src = new Path(DATA_PATH);
- Path dest = new Path(HDFS_PATH);
- dfs.copyFromLocalFile(src, dest);
+ dfs = FileSystem.get(conf);
+ loadData();
+ }
+
+ private void loadData() throws IOException {
+ Path destDir = new Path(HDFS_PATH);
+ dfs.mkdirs(destDir);
+ File srcDir = new File(DATA_PATH);
+ File[] listOfFiles = srcDir.listFiles();
+ for (File srcFile : listOfFiles) {
+ Path path = new Path(srcFile.getAbsolutePath());
+ dfs.copyFromLocalFile(path, destDir);
+ }
}
private void cleanupLocal() throws IOException {
@@ -57,7 +95,25 @@
public static void main(String[] args) throws Exception {
HDFSCluster cluster = new HDFSCluster();
cluster.setup();
- cluster.cleanup();
+ JobConf conf = configureJobConf();
+ FileSystem fs = FileSystem.get(conf);
+ InputSplit[] inputSplits = conf.getInputFormat().getSplits(conf, 0);
+ for (InputSplit split : inputSplits) {
+ System.out.println("split :" + split);
+ }
+ // cluster.cleanup();
+ }
+
+ private static JobConf configureJobConf() throws Exception {
+ JobConf conf = new JobConf();
+ String hdfsUrl = "hdfs://127.0.0.1:31888";
+ String hdfsPath = "/asterix/extrasmalltweets.txt";
+ conf.set("fs.default.name", hdfsUrl);
+ conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
+ conf.setClassLoader(HDFSAdapter.class.getClassLoader());
+ conf.set("mapred.input.dir", hdfsPath);
+ conf.set("mapred.input.format.class", "org.apache.hadoop.mapred.TextInputFormat");
+ return conf;
}
}
diff --git a/asterix-app/src/test/resources/AQLTS/queries/del-dataset.aql b/asterix-app/src/test/resources/AQLTS/queries/del-dataset.aql
index 51e4344..1660e44 100644
--- a/asterix-app/src/test/resources/AQLTS/queries/del-dataset.aql
+++ b/asterix-app/src/test/resources/AQLTS/queries/del-dataset.aql
@@ -24,6 +24,6 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
drop dataset Customers;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/AQLTS/queries/load-del-dataset.aql b/asterix-app/src/test/resources/AQLTS/queries/load-del-dataset.aql
index f70de14..576a9da 100644
--- a/asterix-app/src/test/resources/AQLTS/queries/load-del-dataset.aql
+++ b/asterix-app/src/test/resources/AQLTS/queries/load-del-dataset.aql
@@ -24,7 +24,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/aqljts/ignore.txt b/asterix-app/src/test/resources/aqljts/ignore.txt
deleted file mode 100644
index e69de29..0000000
--- a/asterix-app/src/test/resources/aqljts/ignore.txt
+++ /dev/null
diff --git a/asterix-app/src/test/resources/aqljts/only.txt b/asterix-app/src/test/resources/aqljts/only.txt
deleted file mode 100644
index e69de29..0000000
--- a/asterix-app/src/test/resources/aqljts/only.txt
+++ /dev/null
diff --git a/asterix-app/src/test/resources/aqljts/queries/gram-tokens_01.aql b/asterix-app/src/test/resources/aqljts/queries/gram-tokens_01.aql
deleted file mode 100644
index 5f2b961..0000000
--- a/asterix-app/src/test/resources/aqljts/queries/gram-tokens_01.aql
+++ /dev/null
@@ -1,4 +0,0 @@
-let $txt := "Jürgen S. Generic's Car"
-let $tokens := gram-tokens($txt, 3, false)
-for $token in $tokens
-return $token
diff --git a/asterix-app/src/test/resources/aqljts/queries/nested_01.aql b/asterix-app/src/test/resources/aqljts/queries/nested_01.aql
deleted file mode 100644
index 968f314..0000000
--- a/asterix-app/src/test/resources/aqljts/queries/nested_01.aql
+++ /dev/null
@@ -1,9 +0,0 @@
-for $x in [1]
-return {"x":
- [{"r": [["a", "b", "c"], ["d", "e", "f"]]}],
- "y":
- [{"s": [[1, 2, 3], [4, 5, 6]]}],
- "z":
- [{"t": [datetime("2011-09-16T11:00:00Z")]}]
- }
-
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/aqljts/results/gram-tokens_01.adm b/asterix-app/src/test/resources/aqljts/results/gram-tokens_01.adm
deleted file mode 100644
index f0f59c3..0000000
--- a/asterix-app/src/test/resources/aqljts/results/gram-tokens_01.adm
+++ /dev/null
@@ -1,21 +0,0 @@
-AString: {jür}
-AString: {ürg}
-AString: {rge}
-AString: {gen}
-AString: {en }
-AString: {n s}
-AString: { s.}
-AString: {s. }
-AString: {. g}
-AString: { ge}
-AString: {gen}
-AString: {ene}
-AString: {ner}
-AString: {eri}
-AString: {ric}
-AString: {ic'}
-AString: {c's}
-AString: {'s }
-AString: {s c}
-AString: { ca}
-AString: {car}
diff --git a/asterix-app/src/test/resources/aqljts/results/nested_01.adm b/asterix-app/src/test/resources/aqljts/results/nested_01.adm
deleted file mode 100644
index 27de5c0..0000000
--- a/asterix-app/src/test/resources/aqljts/results/nested_01.adm
+++ /dev/null
@@ -1,33 +0,0 @@
-ARecordType:
-field: x
-AbstractCollectionType:
-ARecordType:
-field: r
-AbstractCollectionType:
-AbstractCollectionType:
-AString: {a}
-AString: {b}
-AString: {c}
-AbstractCollectionType:
-AString: {d}
-AString: {e}
-AString: {f}
-field: y
-AbstractCollectionType:
-ARecordType:
-field: s
-AbstractCollectionType:
-AbstractCollectionType:
-AInt32: {1}
-AInt32: {2}
-AInt32: {3}
-AbstractCollectionType:
-AInt32: {4}
-AInt32: {5}
-AInt32: {6}
-field: z
-AbstractCollectionType:
-ARecordType:
-field: t
-AbstractCollectionType:
-ADateTime: { 2011-09-16T11:00:00.000Z }
diff --git a/asterix-app/src/test/resources/dapd/denorm_user_event.aql b/asterix-app/src/test/resources/dapd/denorm_user_event.aql
index a3b9fab..fe79872 100644
--- a/asterix-app/src/test/resources/dapd/denorm_user_event.aql
+++ b/asterix-app/src/test/resources/dapd/denorm_user_event.aql
@@ -27,8 +27,8 @@
declare nodegroup group1 on nc1, nc2;
-declare dataset Users(UserType) partitioned by key u_name on group1;
-declare dataset Events(EventType) partitioned by key e_name on group1;
+declare dataset Users(UserType) primary key u_name on group1;
+declare dataset Events(EventType) primary key e_name on group1;
write output to nc1:"/tmp/denorm_user_event.adm";
diff --git a/asterix-app/src/test/resources/dapd/q1.aql b/asterix-app/src/test/resources/dapd/q1.aql
index ae6b28f..934387b 100644
--- a/asterix-app/src/test/resources/dapd/q1.aql
+++ b/asterix-app/src/test/resources/dapd/q1.aql
@@ -23,7 +23,7 @@
declare nodegroup group1 on nc1, nc2;
-declare dataset User(UserType) partitioned by key name on group1;
+declare dataset User(UserType) primary key name on group1;
write output to nc1:"/tmp/q1.adm";
diff --git a/asterix-app/src/test/resources/dapd/q2.aql b/asterix-app/src/test/resources/dapd/q2.aql
index 3998b9e..fdb0469 100644
--- a/asterix-app/src/test/resources/dapd/q2.aql
+++ b/asterix-app/src/test/resources/dapd/q2.aql
@@ -37,7 +37,7 @@
drop dataset Event;
declare dataset Event(EventType)
- partitioned by key event_id on group1;
+ primary key event_id on group1;
load dataset Event
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/demo0216/01-load-dblp-large.aql b/asterix-app/src/test/resources/demo0216/01-load-dblp-large.aql
index 0cb6b21..171c935 100644
--- a/asterix-app/src/test/resources/demo0216/01-load-dblp-large.aql
+++ b/asterix-app/src/test/resources/demo0216/01-load-dblp-large.aql
@@ -12,7 +12,7 @@
rainbow-04, rainbow-05;
declare dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/demo0216/03-load-dblp-small.aql b/asterix-app/src/test/resources/demo0216/03-load-dblp-small.aql
index f4d8ae0..751179f 100644
--- a/asterix-app/src/test/resources/demo0216/03-load-dblp-small.aql
+++ b/asterix-app/src/test/resources/demo0216/03-load-dblp-small.aql
@@ -12,7 +12,7 @@
rainbow-04, rainbow-05;
declare dataset DBLPSmall(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset DBLPSmall
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/demo0216/110-self-join-dblp.aql b/asterix-app/src/test/resources/demo0216/110-self-join-dblp.aql
index 9077f33..dc40196 100644
--- a/asterix-app/src/test/resources/demo0216/110-self-join-dblp.aql
+++ b/asterix-app/src/test/resources/demo0216/110-self-join-dblp.aql
@@ -12,7 +12,7 @@
rainbow-04, rainbow-05;
declare dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
write output to rainbow-01:"/home/hyracks/dblp-self-join.adm";
diff --git a/asterix-app/src/test/resources/demo0216/120-self-join-dblp-small.aql b/asterix-app/src/test/resources/demo0216/120-self-join-dblp-small.aql
index 66ca6cb..5802236 100644
--- a/asterix-app/src/test/resources/demo0216/120-self-join-dblp-small.aql
+++ b/asterix-app/src/test/resources/demo0216/120-self-join-dblp-small.aql
@@ -12,7 +12,7 @@
rainbow-04, rainbow-05;
declare dataset DBLPSmall(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
write output to rainbow-01:"/home/hyracks/small-dblp-self-join.adm";
diff --git a/asterix-app/src/test/resources/demo0927/local/create-index.aql b/asterix-app/src/test/resources/demo0927/local/create-index.aql
index 3ecd163..dd41fc8 100644
--- a/asterix-app/src/test/resources/demo0927/local/create-index.aql
+++ b/asterix-app/src/test/resources/demo0927/local/create-index.aql
@@ -20,6 +20,6 @@
declare nodegroup group1 on nc1,nc2;
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
create index NameIndex on Customers(name);
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/demo0927/local/dataset-filter-int.aql b/asterix-app/src/test/resources/demo0927/local/dataset-filter-int.aql
index 3acb8f8..c50405b 100644
--- a/asterix-app/src/test/resources/demo0927/local/dataset-filter-int.aql
+++ b/asterix-app/src/test/resources/demo0927/local/dataset-filter-int.aql
@@ -20,7 +20,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
write output to nc1:"/tmp/mycustomers.adm";
diff --git a/asterix-app/src/test/resources/demo0927/local/dataset-filter-str.aql b/asterix-app/src/test/resources/demo0927/local/dataset-filter-str.aql
index 35401cf..89242ea 100644
--- a/asterix-app/src/test/resources/demo0927/local/dataset-filter-str.aql
+++ b/asterix-app/src/test/resources/demo0927/local/dataset-filter-str.aql
@@ -20,7 +20,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
write output to nc1:"/tmp/mycustomers.adm";
diff --git a/asterix-app/src/test/resources/demo0927/local/dataset-scan.aql b/asterix-app/src/test/resources/demo0927/local/dataset-scan.aql
index 2f8746c..230b6f2 100644
--- a/asterix-app/src/test/resources/demo0927/local/dataset-scan.aql
+++ b/asterix-app/src/test/resources/demo0927/local/dataset-scan.aql
@@ -20,7 +20,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
write output to nc1:"/tmp/mycustomers.adm";
diff --git a/asterix-app/src/test/resources/demo0927/local/declare-index.aql b/asterix-app/src/test/resources/demo0927/local/declare-index.aql
index ecacc49..49c0a31 100644
--- a/asterix-app/src/test/resources/demo0927/local/declare-index.aql
+++ b/asterix-app/src/test/resources/demo0927/local/declare-index.aql
@@ -20,6 +20,6 @@
declare nodegroup group1 on nc1,nc2;
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
declare index NameIndex on Customers(name);
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/demo0927/local/join-cust-ord.aql b/asterix-app/src/test/resources/demo0927/local/join-cust-ord.aql
index b262860..6630507 100644
--- a/asterix-app/src/test/resources/demo0927/local/join-cust-ord.aql
+++ b/asterix-app/src/test/resources/demo0927/local/join-cust-ord.aql
@@ -30,9 +30,9 @@
declare nodegroup group1 on nc1, nc2;
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
declare dataset Orders(OrderType)
- partitioned by key oid on group1;
+ primary key oid on group1;
write output to nc1:"/tmp/custorder.adm";
diff --git a/asterix-app/src/test/resources/demo0927/local/load-cust.aql b/asterix-app/src/test/resources/demo0927/local/load-cust.aql
index b71731c..885b226 100644
--- a/asterix-app/src/test/resources/demo0927/local/load-cust.aql
+++ b/asterix-app/src/test/resources/demo0927/local/load-cust.aql
@@ -20,7 +20,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/demo0927/local/load-ord.aql b/asterix-app/src/test/resources/demo0927/local/load-ord.aql
index 15ba4cf..6fba4b1 100644
--- a/asterix-app/src/test/resources/demo0927/local/load-ord.aql
+++ b/asterix-app/src/test/resources/demo0927/local/load-ord.aql
@@ -12,7 +12,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset Orders(OrderType)
- partitioned by key oid on group1;
+ primary key oid on group1;
load dataset Orders from nc1:"/tmp/orderData.json";
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/demo0927/rainbow/01-load-cust.aql b/asterix-app/src/test/resources/demo0927/rainbow/01-load-cust.aql
index 74502a0..ed750e5 100644
--- a/asterix-app/src/test/resources/demo0927/rainbow/01-load-cust.aql
+++ b/asterix-app/src/test/resources/demo0927/rainbow/01-load-cust.aql
@@ -20,7 +20,7 @@
declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
load dataset Customers from rainbow-01:"/home/hyracks/demo-data/customerData.json";
// delete dataset Customers;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/demo0927/rainbow/02-filter-cust.aql b/asterix-app/src/test/resources/demo0927/rainbow/02-filter-cust.aql
index 2ed1304..d8070ef 100644
--- a/asterix-app/src/test/resources/demo0927/rainbow/02-filter-cust.aql
+++ b/asterix-app/src/test/resources/demo0927/rainbow/02-filter-cust.aql
@@ -20,7 +20,7 @@
declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
write output to rainbow-01:"/home/hyracks/mycustomers.adm";
diff --git a/asterix-app/src/test/resources/demo0927/rainbow/03-load-ord.aql b/asterix-app/src/test/resources/demo0927/rainbow/03-load-ord.aql
index a9bfe61..0fb5398 100644
--- a/asterix-app/src/test/resources/demo0927/rainbow/03-load-ord.aql
+++ b/asterix-app/src/test/resources/demo0927/rainbow/03-load-ord.aql
@@ -12,7 +12,7 @@
declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
declare dataset Orders(OrderType)
- partitioned by key oid on group1;
+ primary key oid on group1;
load dataset Orders from rainbow-01:"/home/hyracks/demo-data/orderData.json";
// delete dataset Orders;
diff --git a/asterix-app/src/test/resources/demo0927/rainbow/04-join-custord.aql b/asterix-app/src/test/resources/demo0927/rainbow/04-join-custord.aql
index 57cc397..d89a799 100644
--- a/asterix-app/src/test/resources/demo0927/rainbow/04-join-custord.aql
+++ b/asterix-app/src/test/resources/demo0927/rainbow/04-join-custord.aql
@@ -30,9 +30,9 @@
declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
declare dataset Orders(OrderType)
- partitioned by key oid on group1;
+ primary key oid on group1;
write output to rainbow-01:"/home/hyracks/custorder.adm";
diff --git a/asterix-app/src/test/resources/demo1112/local/01-load-cust.aql b/asterix-app/src/test/resources/demo1112/local/01-load-cust.aql
index 8d764c3..dd7d2a9 100644
--- a/asterix-app/src/test/resources/demo1112/local/01-load-cust.aql
+++ b/asterix-app/src/test/resources/demo1112/local/01-load-cust.aql
@@ -20,7 +20,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/demo1112/local/02-filter-cust.aql b/asterix-app/src/test/resources/demo1112/local/02-filter-cust.aql
index 24c5827..eb3f9a8 100644
--- a/asterix-app/src/test/resources/demo1112/local/02-filter-cust.aql
+++ b/asterix-app/src/test/resources/demo1112/local/02-filter-cust.aql
@@ -20,7 +20,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
write output to nc1:"/tmp/mycustomers.adm";
diff --git a/asterix-app/src/test/resources/demo1112/local/03-load-ord.aql b/asterix-app/src/test/resources/demo1112/local/03-load-ord.aql
index 7db4f4e..83657df 100644
--- a/asterix-app/src/test/resources/demo1112/local/03-load-ord.aql
+++ b/asterix-app/src/test/resources/demo1112/local/03-load-ord.aql
@@ -12,7 +12,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset Orders(OrderType)
- partitioned by key oid on group1;
+ primary key oid on group1;
load dataset Orders
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/demo1112/local/04-join-custord.aql b/asterix-app/src/test/resources/demo1112/local/04-join-custord.aql
index 1238e7e..fa82c9a 100644
--- a/asterix-app/src/test/resources/demo1112/local/04-join-custord.aql
+++ b/asterix-app/src/test/resources/demo1112/local/04-join-custord.aql
@@ -30,9 +30,9 @@
declare nodegroup group1 on nc1, nc2;
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
declare dataset Orders(OrderType)
- partitioned by key oid on group1;
+ primary key oid on group1;
write output to nc1:"/tmp/custorder.adm";
diff --git a/asterix-app/src/test/resources/demo1112/rainbow/01-load-cust.aql b/asterix-app/src/test/resources/demo1112/rainbow/01-load-cust.aql
index ccb959d..f8c34fb 100644
--- a/asterix-app/src/test/resources/demo1112/rainbow/01-load-cust.aql
+++ b/asterix-app/src/test/resources/demo1112/rainbow/01-load-cust.aql
@@ -20,7 +20,7 @@
declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
load dataset Customers from rainbow-01:"/home/onose/demo-data/customerData.adm";
// drop dataset Customers;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/demo1112/rainbow/02-filter-cust.aql b/asterix-app/src/test/resources/demo1112/rainbow/02-filter-cust.aql
index 2c08f34..2993b92 100644
--- a/asterix-app/src/test/resources/demo1112/rainbow/02-filter-cust.aql
+++ b/asterix-app/src/test/resources/demo1112/rainbow/02-filter-cust.aql
@@ -20,7 +20,7 @@
declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
write output to rainbow-01:"/home/onose/hyracks-rainbow/results/mycustomers.adm";
diff --git a/asterix-app/src/test/resources/demo1112/rainbow/03-load-ord.aql b/asterix-app/src/test/resources/demo1112/rainbow/03-load-ord.aql
index 1329aa4..3134818 100644
--- a/asterix-app/src/test/resources/demo1112/rainbow/03-load-ord.aql
+++ b/asterix-app/src/test/resources/demo1112/rainbow/03-load-ord.aql
@@ -12,6 +12,6 @@
declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
declare dataset Orders(OrderType)
- partitioned by key oid on group1;
+ primary key oid on group1;
load dataset Orders from rainbow-01:"/home/onose/demo-data/orderData.adm";
diff --git a/asterix-app/src/test/resources/demo1112/rainbow/04-join-custord.aql b/asterix-app/src/test/resources/demo1112/rainbow/04-join-custord.aql
index 63c4670..18426d4 100644
--- a/asterix-app/src/test/resources/demo1112/rainbow/04-join-custord.aql
+++ b/asterix-app/src/test/resources/demo1112/rainbow/04-join-custord.aql
@@ -30,9 +30,9 @@
declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
declare dataset Orders(OrderType)
- partitioned by key oid on group1;
+ primary key oid on group1;
write output to rainbow-01:"/home/onose/hyracks-rainbow/results/custorder.adm";
diff --git a/asterix-app/src/test/resources/demo1112/rainbow/05-count-custord.aql b/asterix-app/src/test/resources/demo1112/rainbow/05-count-custord.aql
index dcbea09..f55ccbd 100644
--- a/asterix-app/src/test/resources/demo1112/rainbow/05-count-custord.aql
+++ b/asterix-app/src/test/resources/demo1112/rainbow/05-count-custord.aql
@@ -30,9 +30,9 @@
declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
declare dataset Orders(OrderType)
- partitioned by key oid on group1;
+ primary key oid on group1;
write output to rainbow-01:"/home/onose/hyracks-rainbow/results/custorder.adm";
diff --git a/asterix-app/src/test/resources/demo_aql/local/01-load-cust.aql b/asterix-app/src/test/resources/demo_aql/local/01-load-cust.aql
index cd7a682..0cbbf5b 100644
--- a/asterix-app/src/test/resources/demo_aql/local/01-load-cust.aql
+++ b/asterix-app/src/test/resources/demo_aql/local/01-load-cust.aql
@@ -18,7 +18,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/demo_aql/local/02-filter-cust.aql b/asterix-app/src/test/resources/demo_aql/local/02-filter-cust.aql
index 38171ed..f631afc 100644
--- a/asterix-app/src/test/resources/demo_aql/local/02-filter-cust.aql
+++ b/asterix-app/src/test/resources/demo_aql/local/02-filter-cust.aql
@@ -18,7 +18,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
write output to nc1:"/tmp/02-filter-cust.adm";
diff --git a/asterix-app/src/test/resources/demo_aql/local/03-count-cust-age.aql b/asterix-app/src/test/resources/demo_aql/local/03-count-cust-age.aql
index 37698a7..1bba3f6 100644
--- a/asterix-app/src/test/resources/demo_aql/local/03-count-cust-age.aql
+++ b/asterix-app/src/test/resources/demo_aql/local/03-count-cust-age.aql
@@ -18,7 +18,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
write output to nc1:"/tmp/03-count-cust-age.adm";
diff --git a/asterix-app/src/test/resources/demo_aql/local/04-load-ord.aql b/asterix-app/src/test/resources/demo_aql/local/04-load-ord.aql
index b8898f5..00f4f10 100644
--- a/asterix-app/src/test/resources/demo_aql/local/04-load-ord.aql
+++ b/asterix-app/src/test/resources/demo_aql/local/04-load-ord.aql
@@ -12,7 +12,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset Orders(OrderType)
- partitioned by key oid on group1;
+ primary key oid on group1;
load dataset Orders
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/demo_aql/local/05-count-param1.aql b/asterix-app/src/test/resources/demo_aql/local/05-count-param1.aql
index 02263c4..f7cc468 100644
--- a/asterix-app/src/test/resources/demo_aql/local/05-count-param1.aql
+++ b/asterix-app/src/test/resources/demo_aql/local/05-count-param1.aql
@@ -12,7 +12,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset Orders(OrderType)
- partitioned by key oid on group1;
+ primary key oid on group1;
write output to nc1:"/tmp/05-count-param1.adm";
diff --git a/asterix-app/src/test/resources/demo_aql/local/06-count-custord.aql b/asterix-app/src/test/resources/demo_aql/local/06-count-custord.aql
index 86e6ea0..3ff3a43 100644
--- a/asterix-app/src/test/resources/demo_aql/local/06-count-custord.aql
+++ b/asterix-app/src/test/resources/demo_aql/local/06-count-custord.aql
@@ -28,9 +28,9 @@
declare nodegroup group1 on nc1, nc2;
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
declare dataset Orders(OrderType)
- partitioned by key oid on group1;
+ primary key oid on group1;
write output to nc1:"/tmp/06-count-custord.adm";
diff --git a/asterix-app/src/test/resources/demo_aql/local/101-load-dblp.aql b/asterix-app/src/test/resources/demo_aql/local/101-load-dblp.aql
index 7d6dc62..03cf233 100644
--- a/asterix-app/src/test/resources/demo_aql/local/101-load-dblp.aql
+++ b/asterix-app/src/test/resources/demo_aql/local/101-load-dblp.aql
@@ -11,7 +11,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/demo_aql/local/102-fuzzy-select.aql b/asterix-app/src/test/resources/demo_aql/local/102-fuzzy-select.aql
index e37b49a..2cea3c5 100644
--- a/asterix-app/src/test/resources/demo_aql/local/102-fuzzy-select.aql
+++ b/asterix-app/src/test/resources/demo_aql/local/102-fuzzy-select.aql
@@ -11,7 +11,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
write output to nc1:"/tmp/102-fuzzy-select.adm";
diff --git a/asterix-app/src/test/resources/demo_aql/local/110-self-join-dblp.aql b/asterix-app/src/test/resources/demo_aql/local/110-self-join-dblp.aql
index f51e455..64a65a5 100644
--- a/asterix-app/src/test/resources/demo_aql/local/110-self-join-dblp.aql
+++ b/asterix-app/src/test/resources/demo_aql/local/110-self-join-dblp.aql
@@ -11,7 +11,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
write output to nc1:"/tmp/110-self-join-dblp.adm";
diff --git a/asterix-app/src/test/resources/demo_aql/rainbow/01-load-cust.aql b/asterix-app/src/test/resources/demo_aql/rainbow/01-load-cust.aql
index e75bce2..143f55f 100644
--- a/asterix-app/src/test/resources/demo_aql/rainbow/01-load-cust.aql
+++ b/asterix-app/src/test/resources/demo_aql/rainbow/01-load-cust.aql
@@ -18,7 +18,7 @@
declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
load dataset Customers from rainbow-01:"/home/onose/demo-data/semistructured/customer.adm";
diff --git a/asterix-app/src/test/resources/demo_aql/rainbow/02-filter-cust.aql b/asterix-app/src/test/resources/demo_aql/rainbow/02-filter-cust.aql
index 8af2368..a99e088 100644
--- a/asterix-app/src/test/resources/demo_aql/rainbow/02-filter-cust.aql
+++ b/asterix-app/src/test/resources/demo_aql/rainbow/02-filter-cust.aql
@@ -18,7 +18,7 @@
declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
write output to rainbow-01:"/home/onose/hyracks-rainbow/results/02-filter-cust.adm";
diff --git a/asterix-app/src/test/resources/demo_aql/rainbow/03-count-cust-age.aql b/asterix-app/src/test/resources/demo_aql/rainbow/03-count-cust-age.aql
index eda4015..714c60a 100644
--- a/asterix-app/src/test/resources/demo_aql/rainbow/03-count-cust-age.aql
+++ b/asterix-app/src/test/resources/demo_aql/rainbow/03-count-cust-age.aql
@@ -18,7 +18,7 @@
declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
write output to rainbow-01:"/home/onose/hyracks-rainbow/results/03-count-cust-age.adm";
diff --git a/asterix-app/src/test/resources/demo_aql/rainbow/04-load-ord.aql b/asterix-app/src/test/resources/demo_aql/rainbow/04-load-ord.aql
index bae97e8..be7a874 100644
--- a/asterix-app/src/test/resources/demo_aql/rainbow/04-load-ord.aql
+++ b/asterix-app/src/test/resources/demo_aql/rainbow/04-load-ord.aql
@@ -12,6 +12,6 @@
declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
declare dataset Orders(OrderType)
- partitioned by key oid on group1;
+ primary key oid on group1;
load dataset Orders from rainbow-01:"/home/onose/demo-data/semistructured/orders.adm";
diff --git a/asterix-app/src/test/resources/demo_aql/rainbow/05-count-param1.aql b/asterix-app/src/test/resources/demo_aql/rainbow/05-count-param1.aql
index f5ec449..0ea243a 100644
--- a/asterix-app/src/test/resources/demo_aql/rainbow/05-count-param1.aql
+++ b/asterix-app/src/test/resources/demo_aql/rainbow/05-count-param1.aql
@@ -12,7 +12,7 @@
declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
declare dataset Orders(OrderType)
- partitioned by key oid on group1;
+ primary key oid on group1;
write output to rainbow-01:"/home/onose/hyracks-rainbow/results/05-count-param1.adm";
diff --git a/asterix-app/src/test/resources/demo_aql/rainbow/06-count-custord.aql b/asterix-app/src/test/resources/demo_aql/rainbow/06-count-custord.aql
index ba5d5f4..2502d48a 100644
--- a/asterix-app/src/test/resources/demo_aql/rainbow/06-count-custord.aql
+++ b/asterix-app/src/test/resources/demo_aql/rainbow/06-count-custord.aql
@@ -29,9 +29,9 @@
declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
declare dataset Orders(OrderType)
- partitioned by key oid on group1;
+ primary key oid on group1;
write output to rainbow-01:"/home/onose/hyracks-rainbow/results/06-count-custord.adm";
diff --git a/asterix-app/src/test/resources/demo_aql/rainbow/101-load-dblp.aql b/asterix-app/src/test/resources/demo_aql/rainbow/101-load-dblp.aql
index 20f0b51..9ed4a85 100644
--- a/asterix-app/src/test/resources/demo_aql/rainbow/101-load-dblp.aql
+++ b/asterix-app/src/test/resources/demo_aql/rainbow/101-load-dblp.aql
@@ -12,7 +12,7 @@
declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
declare dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset DBLP from
rainbow-01:"/home/onose/demo-data/dblp-id.txt" delimited by ":";
diff --git a/asterix-app/src/test/resources/demo_aql/rainbow/102-fuzzy-select.aql b/asterix-app/src/test/resources/demo_aql/rainbow/102-fuzzy-select.aql
index c9f072a..af129cb 100644
--- a/asterix-app/src/test/resources/demo_aql/rainbow/102-fuzzy-select.aql
+++ b/asterix-app/src/test/resources/demo_aql/rainbow/102-fuzzy-select.aql
@@ -11,7 +11,7 @@
declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
declare dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
write output to rainbow-01:"/home/onose/hyracks-rainbow/results/102-fuzzy-select.adm";
diff --git a/asterix-app/src/test/resources/demo_aql/rainbow/110-self-join-dblp.aql b/asterix-app/src/test/resources/demo_aql/rainbow/110-self-join-dblp.aql
index c5b900b..37f06a0 100644
--- a/asterix-app/src/test/resources/demo_aql/rainbow/110-self-join-dblp.aql
+++ b/asterix-app/src/test/resources/demo_aql/rainbow/110-self-join-dblp.aql
@@ -11,7 +11,7 @@
declare nodegroup group1 on rainbow-01, rainbow-02, rainbow-03, rainbow-04, rainbow-05;
declare dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
write output to rainbow-01:"/home/onose/hyracks-rainbow/results/110-self-join-dblp.adm";
diff --git a/asterix-app/src/test/resources/dmlts/scripts/enlist-scan-cust.aql b/asterix-app/src/test/resources/dmlts/scripts/enlist-scan-cust.aql
index ed44308..5bc029d 100644
--- a/asterix-app/src/test/resources/dmlts/scripts/enlist-scan-cust.aql
+++ b/asterix-app/src/test/resources/dmlts/scripts/enlist-scan-cust.aql
@@ -24,7 +24,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
enlist dataset Customers;
diff --git a/asterix-app/src/test/resources/dmlts/scripts/load-cust.aql b/asterix-app/src/test/resources/dmlts/scripts/load-cust.aql
index d55dfed..77d023e 100644
--- a/asterix-app/src/test/resources/dmlts/scripts/load-cust.aql
+++ b/asterix-app/src/test/resources/dmlts/scripts/load-cust.aql
@@ -24,7 +24,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/fuzzyjoin/amerix/10-load-csx-small.aql b/asterix-app/src/test/resources/fuzzyjoin/amerix/10-load-csx-small.aql
index f7dcf0e..5e819bd 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/amerix/10-load-csx-small.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/amerix/10-load-csx-small.aql
@@ -11,7 +11,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset CSXSmall(CSXType)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset CSXSmall
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/fuzzyjoin/amerix/10-load-csx.aql b/asterix-app/src/test/resources/fuzzyjoin/amerix/10-load-csx.aql
index b5787a1..0b6da50 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/amerix/10-load-csx.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/amerix/10-load-csx.aql
@@ -11,7 +11,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset CSX(CSXType)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset CSX
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/fuzzyjoin/amerix/10-load-dblp.aql b/asterix-app/src/test/resources/fuzzyjoin/amerix/10-load-dblp.aql
index 3ca85bb..c726018f 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/amerix/10-load-dblp.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/amerix/10-load-dblp.aql
@@ -11,7 +11,7 @@
declare nodegroup group1 on nc1;
declare dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/fuzzyjoin/amerix/20-drop-dblp.aql b/asterix-app/src/test/resources/fuzzyjoin/amerix/20-drop-dblp.aql
index 868d534..66af9ce 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/amerix/20-drop-dblp.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/amerix/20-drop-dblp.aql
@@ -11,7 +11,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
drop dataset DBLP;
diff --git a/asterix-app/src/test/resources/fuzzyjoin/amerix/30-filter-dblp.aql b/asterix-app/src/test/resources/fuzzyjoin/amerix/30-filter-dblp.aql
index 45bd1c8..4a6b486 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/amerix/30-filter-dblp.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/amerix/30-filter-dblp.aql
@@ -11,7 +11,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
write output to nc1:"/tmp/amerix.adm";
diff --git a/asterix-app/src/test/resources/fuzzyjoin/amerix/40-self-join-dblp.aql b/asterix-app/src/test/resources/fuzzyjoin/amerix/40-self-join-dblp.aql
index bc024c3..60eafd3 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/amerix/40-self-join-dblp.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/amerix/40-self-join-dblp.aql
@@ -11,7 +11,7 @@
declare nodegroup group1 on nc1;
declare dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
write output to nc1:"/tmp/amerix.adm";
diff --git a/asterix-app/src/test/resources/fuzzyjoin/amerix/50-self-join-dblp.aql b/asterix-app/src/test/resources/fuzzyjoin/amerix/50-self-join-dblp.aql
index eb44be7..536b549 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/amerix/50-self-join-dblp.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/amerix/50-self-join-dblp.aql
@@ -11,7 +11,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
write output to nc1:"/tmp/amerix.adm";
diff --git a/asterix-app/src/test/resources/fuzzyjoin/dblp/000-1-char-at.aql b/asterix-app/src/test/resources/fuzzyjoin/dblp/000-1-char-at.aql
index 43a595d..182a17d 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/dblp/000-1-char-at.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/dblp/000-1-char-at.aql
@@ -11,7 +11,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
write output to nc1:"/tmp/dblp.adm";
diff --git a/asterix-app/src/test/resources/fuzzyjoin/dblp/010-load.aql b/asterix-app/src/test/resources/fuzzyjoin/dblp/010-load.aql
index a857935..ab93f83 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/dblp/010-load.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/dblp/010-load.aql
@@ -11,7 +11,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/fuzzyjoin/dblp/020-drop.aql b/asterix-app/src/test/resources/fuzzyjoin/dblp/020-drop.aql
index 65213dd..c82183b 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/dblp/020-drop.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/dblp/020-drop.aql
@@ -11,6 +11,6 @@
declare nodegroup group1 on nc1, nc2;
declare dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
drop dataset DBLP;
diff --git a/asterix-app/src/test/resources/fuzzyjoin/dblp/030-filter.aql b/asterix-app/src/test/resources/fuzzyjoin/dblp/030-filter.aql
index b539aba..2588e99 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/dblp/030-filter.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/dblp/030-filter.aql
@@ -11,7 +11,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
write output to nc1:'/tmp/dblp.adm';
diff --git a/asterix-app/src/test/resources/fuzzyjoin/dblp/040-self-join-aql.aql b/asterix-app/src/test/resources/fuzzyjoin/dblp/040-self-join-aql.aql
index 63e1a1e..4ac1a60 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/dblp/040-self-join-aql.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/dblp/040-self-join-aql.aql
@@ -11,7 +11,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
write output to nc1:'/tmp/dblp.adm';
diff --git a/asterix-app/src/test/resources/fuzzyjoin/dblp/050-self-join-op.aql b/asterix-app/src/test/resources/fuzzyjoin/dblp/050-self-join-op.aql
index f2467f2..e97df35 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/dblp/050-self-join-op.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/dblp/050-self-join-op.aql
@@ -11,7 +11,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
write output to nc1:'/tmp/dblp.adm';
diff --git a/asterix-app/src/test/resources/fuzzyjoin/pub/010-load-dblp.aql b/asterix-app/src/test/resources/fuzzyjoin/pub/010-load-dblp.aql
index 49d07ec..7ae0e30 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/pub/010-load-dblp.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/pub/010-load-dblp.aql
@@ -11,7 +11,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
// load dataset DBLP from nc1:'/asterix/asterix-app/data/pub-small/dblp-small-id.txt'
load dataset DBLP
diff --git a/asterix-app/src/test/resources/fuzzyjoin/pub/020-drop-dblp.aql b/asterix-app/src/test/resources/fuzzyjoin/pub/020-drop-dblp.aql
index 65213dd..c82183b 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/pub/020-drop-dblp.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/pub/020-drop-dblp.aql
@@ -11,6 +11,6 @@
declare nodegroup group1 on nc1, nc2;
declare dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
drop dataset DBLP;
diff --git a/asterix-app/src/test/resources/fuzzyjoin/pub/030-filter-dblp.aql b/asterix-app/src/test/resources/fuzzyjoin/pub/030-filter-dblp.aql
index d67d58a..7950d12 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/pub/030-filter-dblp.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/pub/030-filter-dblp.aql
@@ -11,7 +11,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
write output to nc1:'/tmp/pub.adm';
diff --git a/asterix-app/src/test/resources/fuzzyjoin/pub/040-load-csx.aql b/asterix-app/src/test/resources/fuzzyjoin/pub/040-load-csx.aql
index 06dfb4e..a87398d 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/pub/040-load-csx.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/pub/040-load-csx.aql
@@ -11,7 +11,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset CSX(CSXType)
- partitioned by key id on group1;
+ primary key id on group1;
// load dataset CSX from nc1:'/asterix/asterix-app/data/pub-small/csx-small-id.txt'
load dataset CSX
diff --git a/asterix-app/src/test/resources/fuzzyjoin/pub/050-drop-csx.aql b/asterix-app/src/test/resources/fuzzyjoin/pub/050-drop-csx.aql
index c37c8b5..62e53ca 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/pub/050-drop-csx.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/pub/050-drop-csx.aql
@@ -11,6 +11,6 @@
declare nodegroup group1 on nc1, nc2;
declare dataset CSX(CSXType)
- partitioned by key id on group1;
+ primary key id on group1;
drop dataset CSX;
diff --git a/asterix-app/src/test/resources/fuzzyjoin/pub/060-filter-csx.aql b/asterix-app/src/test/resources/fuzzyjoin/pub/060-filter-csx.aql
index 56b6dc9..daf53d3 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/pub/060-filter-csx.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/pub/060-filter-csx.aql
@@ -13,7 +13,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset CSX(CSXType)
- partitioned by key id on group1;
+ primary key id on group1;
write output to nc1:'/tmp/pub.adm';
diff --git a/asterix-app/src/test/resources/fuzzyjoin/pub/070-join-aql.aql b/asterix-app/src/test/resources/fuzzyjoin/pub/070-join-aql.aql
index ec88679..a72fa03 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/pub/070-join-aql.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/pub/070-join-aql.aql
@@ -19,10 +19,10 @@
declare nodegroup group1 on nc1, nc2;
declare dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
declare dataset CSX(CSXType)
- partitioned by key id on group1;
+ primary key id on group1;
write output to nc1:'/tmp/pub.adm';
diff --git a/asterix-app/src/test/resources/fuzzyjoin/pub/080-join-op.aql b/asterix-app/src/test/resources/fuzzyjoin/pub/080-join-op.aql
index 032031f..0466033 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/pub/080-join-op.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/pub/080-join-op.aql
@@ -19,10 +19,10 @@
declare nodegroup group1 on nc1, nc2;
declare dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
declare dataset CSX(CSXType)
- partitioned by key id on group1;
+ primary key id on group1;
write output to nc1:'/tmp/pub.adm';
diff --git a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/010-load-users.aql b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/010-load-users.aql
index be75fa6..affd0f3 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/010-load-users.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/010-load-users.aql
@@ -10,7 +10,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset Users(UserType)
- partitioned by key uid on group1;
+ primary key uid on group1;
load dataset Users
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/020-drop-users.aql b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/020-drop-users.aql
index 86525a7..71df5ae 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/020-drop-users.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/020-drop-users.aql
@@ -10,6 +10,6 @@
declare nodegroup group1 on nc1, nc2;
declare dataset Users(UserType)
- partitioned by key uid on group1;
+ primary key uid on group1;
drop dataset Users;
diff --git a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/030-filter-users.aql b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/030-filter-users.aql
index 298c0b1..4e6846f 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/030-filter-users.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/030-filter-users.aql
@@ -10,7 +10,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset Users(UserType)
- partitioned by key uid on group1;
+ primary key uid on group1;
write output to nc1:'/tmp/users.adm';
diff --git a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/040-load-visitors.aql b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/040-load-visitors.aql
index a75989a..0e07e69 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/040-load-visitors.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/040-load-visitors.aql
@@ -10,7 +10,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset Visitors(VisitorType)
- partitioned by key vid on group1;
+ primary key vid on group1;
load dataset Visitors
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/050-drop-visitors.aql b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/050-drop-visitors.aql
index 15047b8..3a8e725 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/050-drop-visitors.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/050-drop-visitors.aql
@@ -10,6 +10,6 @@
declare nodegroup group1 on nc1, nc2;
declare dataset Visitors(VisitorType)
- partitioned by key vid on group1;
+ primary key vid on group1;
drop dataset Visitors;
diff --git a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/060-fililter-visitors.aql b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/060-fililter-visitors.aql
index dae180e..1fb0a4c 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/060-fililter-visitors.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/060-fililter-visitors.aql
@@ -10,7 +10,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset Visitors(VisitorType)
- partitioned by key vid on group1;
+ primary key vid on group1;
write output to nc1:'/tmp/visitors.adm';
diff --git a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/070-join-aql-lottery_numbers.aql b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/070-join-aql-lottery_numbers.aql
index aaa145f..c5a625a 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/070-join-aql-lottery_numbers.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/070-join-aql-lottery_numbers.aql
@@ -17,9 +17,9 @@
declare nodegroup group1 on nc1, nc2;
declare dataset Users(UserType)
- partitioned by key uid on group1;
+ primary key uid on group1;
declare dataset Visitors(VisitorType)
- partitioned by key vid on group1;
+ primary key vid on group1;
write output to nc1:'/tmp/users-visitors.adm';
diff --git a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/080-join-op-lottery_numbers.aql b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/080-join-op-lottery_numbers.aql
index ca5bebb..4275067 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/080-join-op-lottery_numbers.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/080-join-op-lottery_numbers.aql
@@ -17,9 +17,9 @@
declare nodegroup group1 on nc1, nc2;
declare dataset Users(UserType)
- partitioned by key uid on group1;
+ primary key uid on group1;
declare dataset Visitors(VisitorType)
- partitioned by key vid on group1;
+ primary key vid on group1;
write output to nc1:'/tmp/users-visitors.adm';
diff --git a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/090-join-op-interests.aql b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/090-join-op-interests.aql
index a7cf0a4..856e02f 100644
--- a/asterix-app/src/test/resources/fuzzyjoin/users-visitors/090-join-op-interests.aql
+++ b/asterix-app/src/test/resources/fuzzyjoin/users-visitors/090-join-op-interests.aql
@@ -17,9 +17,9 @@
declare nodegroup group1 on nc1, nc2;
declare dataset Users(UserType)
- partitioned by key uid on group1;
+ primary key uid on group1;
declare dataset Visitors(VisitorType)
- partitioned by key vid on group1;
+ primary key vid on group1;
write output to nc1:'/tmp/users-visitors.adm';
diff --git a/asterix-app/src/test/resources/integration/queries/dataset-scan.aql b/asterix-app/src/test/resources/integration/queries/dataset-scan.aql
index 1930be2..f7558c5 100644
--- a/asterix-app/src/test/resources/integration/queries/dataset-scan.aql
+++ b/asterix-app/src/test/resources/integration/queries/dataset-scan.aql
@@ -20,7 +20,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
for $c in dataset('Customers')
return $c
diff --git a/asterix-app/src/test/resources/integration/updates/load-dataset.aql b/asterix-app/src/test/resources/integration/updates/load-dataset.aql
index c6faa6e..4041450 100644
--- a/asterix-app/src/test/resources/integration/updates/load-dataset.aql
+++ b/asterix-app/src/test/resources/integration/updates/load-dataset.aql
@@ -15,7 +15,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/metadata-transactions/init-state-queries/customers_orders.aql b/asterix-app/src/test/resources/metadata-transactions/init-state-queries/customers_orders.aql
index af15a23..87aa2fc 100644
--- a/asterix-app/src/test/resources/metadata-transactions/init-state-queries/customers_orders.aql
+++ b/asterix-app/src/test/resources/metadata-transactions/init-state-queries/customers_orders.aql
@@ -44,10 +44,10 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset Customers(CustomerType)
- partitioned by key cid, name on group1;
+ primary key cid, name on group1;
create dataset Orders(OrderType)
- partitioned by key oid on group1;
+ primary key oid on group1;
create index ordCustId on Orders(cid);
diff --git a/asterix-app/src/test/resources/metadata-transactions/queries/create_duplicate_dataset.aql b/asterix-app/src/test/resources/metadata-transactions/queries/create_duplicate_dataset.aql
index 047eddf..b5c3c28 100644
--- a/asterix-app/src/test/resources/metadata-transactions/queries/create_duplicate_dataset.aql
+++ b/asterix-app/src/test/resources/metadata-transactions/queries/create_duplicate_dataset.aql
@@ -1,4 +1,4 @@
use dataverse custord;
create dataset Customers(CustomerType)
- partitioned by key cid, name on group1;
+ primary key cid, name on group1;
diff --git a/asterix-app/src/test/resources/metadata-transactions/queries/rollback_new_dataset.aql b/asterix-app/src/test/resources/metadata-transactions/queries/rollback_new_dataset.aql
index aa76ae2..a9ad490 100644
--- a/asterix-app/src/test/resources/metadata-transactions/queries/rollback_new_dataset.aql
+++ b/asterix-app/src/test/resources/metadata-transactions/queries/rollback_new_dataset.aql
@@ -2,7 +2,7 @@
// Creating this dataset should succeed.
create dataset NewDataset(CustomerType)
- partitioned by key cid, name on group1;
+ primary key cid, name on group1;
// Creating this duplicate type should fail, and rollback should remove the new dataverse.
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_251_dataset_hint_1.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_251_dataset_hint_1.aql
new file mode 100644
index 0000000..de0e870
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_251_dataset_hint_1.aql
@@ -0,0 +1,25 @@
+/*
+ * Description : create a dataset providing hints but use whitespace
+ * Expected Res : Success
+ * Date : 29 Jan 2013
+ * Issue : 251
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type LineType as open {
+ id:int32,
+ text: string
+}
+
+write output to nc1:"mdtest/basic_issue_251_dataset_hint_1.adm";
+
+create dataset Book(LineType)
+primary key id
+hints( cardinality = 2000);
+
+for $x in dataset('Metadata.Dataset')
+where $x.DataverseName='test'
+return $x
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_251_dataset_hint_2.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_251_dataset_hint_2.aql
new file mode 100644
index 0000000..27d2c2c
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_251_dataset_hint_2.aql
@@ -0,0 +1,25 @@
+/*
+ * Description : create a dataset providing a valid hint and do not use any whitespace
+ * Expected Res : Success
+ * Date : 29 Jan 2013
+ * Issue : 251
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type LineType as open {
+ id:int32,
+ text: string
+}
+
+write output to nc1:"mdtest/basic_issue_251_dataset_hint_2.adm";
+
+create dataset Book(LineType)
+primary key id
+hints(cardinality=2000);
+
+for $x in dataset('Metadata.Dataset')
+where $x.DataverseName='test'
+return $x
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_251_dataset_hint_3.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_251_dataset_hint_3.aql
new file mode 100644
index 0000000..b6a730f
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_251_dataset_hint_3.aql
@@ -0,0 +1,25 @@
+/*
+ * Description : create a dataset providing hint (in upper case)
+ * Expected Res : Success
+ * Date : 29 Jan 2013
+ * Issue : 251
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type LineType as open {
+ id:int32,
+ text: string
+}
+
+write output to nc1:"mdtest/basic_issue_251_dataset_hint_3.adm";
+
+create dataset Book(LineType)
+primary key id
+hints(CARDINALITY=2000);
+
+for $x in dataset('Metadata.Dataset')
+where $x.DataverseName='test'
+return $x
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/issue_251_dataset_hint_4.aql b/asterix-app/src/test/resources/metadata/queries/basic/issue_251_dataset_hint_4.aql
new file mode 100644
index 0000000..af7077a
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/issue_251_dataset_hint_4.aql
@@ -0,0 +1,24 @@
+/*
+ * Description : create a dataset without providing any hints.
+ * Expected Res : Success
+ * Date : 29 Jan 2013
+ * Issue : 251
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type LineType as open {
+ id:int32,
+ text: string
+}
+
+write output to nc1:"mdtest/basic_issue_251_dataset_hint_4.adm";
+
+create dataset Book(LineType)
+primary key id;
+
+for $x in dataset('Metadata.Dataset')
+where $x.DataverseName='test'
+return $x
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta02.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta02.aql
index 8bfc240..7778e99 100644
--- a/asterix-app/src/test/resources/metadata/queries/basic/meta02.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta02.aql
@@ -13,7 +13,7 @@
id : int32
}
-create dataset testdv.dst01(testtype) partitioned by key id;
+create dataset testdv.dst01(testtype) primary key id;
for $l in dataset('Metadata.Dataset')
where $l.DataverseName = 'testdv' and $l.DatasetName = 'dst01'
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta05.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta05.aql
index 9eb129d..201ab6e 100644
--- a/asterix-app/src/test/resources/metadata/queries/basic/meta05.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta05.aql
@@ -14,7 +14,7 @@
name : string
}
-create dataset testdv.t1(testtype) partitioned by key id;
+create dataset testdv.t1(testtype) primary key id;
create index idx1 on testdv.t1(name);
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta09.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta09.aql
index 28a3794..56c6c7a 100644
--- a/asterix-app/src/test/resources/metadata/queries/basic/meta09.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta09.aql
@@ -13,7 +13,7 @@
id:int32
}
-create dataset test.t1(testtype) partitioned by key id;
+create dataset test.t1(testtype) primary key id;
insert into dataset test.t1({"id":123});
insert into dataset test.t1({"id":133});
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta11.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta11.aql
index d781114..0e02802 100644
--- a/asterix-app/src/test/resources/metadata/queries/basic/meta11.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta11.aql
@@ -13,7 +13,7 @@
id : int32
}
-create dataset test.dst01(testtype) partitioned by key id;
+create dataset test.dst01(testtype) primary key id;
drop dataset test.dst01;
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta12.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta12.aql
index 1de7ac5..82e2eb0 100644
--- a/asterix-app/src/test/resources/metadata/queries/basic/meta12.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta12.aql
@@ -14,7 +14,7 @@
name : string
}
-create dataset test.dst01(testtype) partitioned by key id;
+create dataset test.dst01(testtype) primary key id;
create index idx1 on test.dst01(name);
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_1.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_1.aql
new file mode 100644
index 0000000..f4547d6
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_1.aql
@@ -0,0 +1,8 @@
+/*
+ * Description : Drop a system dataset- Metadata.Dataset
+ * Expected Res : Failure
+ * Date : 13 Jan 2013
+ * Issue : 239
+ */
+
+drop dataset Metadata.Dataset;
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_2.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_2.aql
new file mode 100644
index 0000000..d62bb49
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_2.aql
@@ -0,0 +1,8 @@
+/*
+ * Description : Drop a system dataset- Metadata.Dataset
+ * Expected Res : Failure
+ * Date : 13 Jan 2013
+ * Issue : 239
+ */
+
+drop dataset Metadata.Dataverse;
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_3.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_3.aql
new file mode 100644
index 0000000..96dd8cc
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_3.aql
@@ -0,0 +1,8 @@
+/*
+ * Description : Drop a system dataset- Metadata.Nodegroup
+ * Expected Res : Failure
+ * Date : 13 Jan 2013
+ * Issue : 239
+ */
+
+drop dataset Metadata.Nodegroup;
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_4.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_4.aql
new file mode 100644
index 0000000..7685427
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_4.aql
@@ -0,0 +1,8 @@
+/*
+ * Description : Drop a system dataset- Metadata.Index
+ * Expected Res : Failure
+ * Date : 13 Jan 2013
+ * Issue : 239
+ */
+
+drop dataset Metadata.Index;
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_5.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_5.aql
new file mode 100644
index 0000000..5e6e468
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_5.aql
@@ -0,0 +1,8 @@
+/*
+ * Description : Drop a system dataset- Metadata.DatasourceAdapter
+ * Expected Res : Failure
+ * Date : 13 Jan 2013
+ * Issue : 239
+ */
+
+drop dataset Metadata.DatasourceAdapter;
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_6.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_6.aql
new file mode 100644
index 0000000..0eb863f
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_6.aql
@@ -0,0 +1,8 @@
+/*
+ * Description : Drop a system dataset- Metadata.Function
+ * Expected Res : Failure
+ * Date : 13 Jan 2013
+ * Issue : 239
+ */
+
+drop dataset Metadata.Function;
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_7.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_7.aql
new file mode 100644
index 0000000..6794d04
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_7.aql
@@ -0,0 +1,8 @@
+/*
+ * Description : Drop a system dataset- Metadata.Datatype
+ * Expected Res : Failure
+ * Date : 13 Jan 2013
+ * Issue : 239
+ */
+
+drop dataset Metadata.Datatype;
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_8.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_8.aql
new file mode 100644
index 0000000..d75e27a
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_8.aql
@@ -0,0 +1,8 @@
+/*
+ * Description : Drop a system dataset- Metadata.Node
+ * Expected Res : Failure
+ * Date : 13 Jan 2013
+ * Issue : 239
+ */
+
+drop dataset Metadata.Node;
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_251_dataset_hint_error_1.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_251_dataset_hint_error_1.aql
new file mode 100644
index 0000000..eb3f3dc
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_251_dataset_hint_error_1.aql
@@ -0,0 +1,19 @@
+/*
+ * Description : create a dataset providing invalid hints
+ * Expected Res : Failure
+ * Date : 29 Jan 2013
+ * Issue : 251
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type LineType as open {
+ id:int32,
+ text: string
+}
+
+create dataset Book(LineType)
+primary key id
+hints(size=2000,tuple_size=100);
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_251_dataset_hint_error_2.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_251_dataset_hint_error_2.aql
new file mode 100644
index 0000000..830370e
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_251_dataset_hint_error_2.aql
@@ -0,0 +1,24 @@
+/*
+ * Description : create a dataset providingi an invalid value for a hint and an unknown hint
+ * Expected Res : Failure
+ * Date : 29 Jan 2013
+ * Issue : 251
+ */
+
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type LineType as open {
+ id:int32,
+ text: string
+}
+
+create dataset Book(LineType)
+primary key id
+hints(cardinality="-20jh0",size=45);
+
+for $x in dataset('Metadata.Dataset')
+where $x.DataverseName='test'
+return $x
diff --git a/asterix-app/src/test/resources/metadata/queries/transaction/failure_subsequent_no_execution.aql b/asterix-app/src/test/resources/metadata/queries/transaction/failure_subsequent_no_execution.aql
index d4678f2..19b87a2 100644
--- a/asterix-app/src/test/resources/metadata/queries/transaction/failure_subsequent_no_execution.aql
+++ b/asterix-app/src/test/resources/metadata/queries/transaction/failure_subsequent_no_execution.aql
@@ -37,7 +37,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset Customers(CustomerType)
- partitioned by key cid, name on group1;
+ primary key cid, name on group1;
create nodegroup group1 on nc1, nc2;
diff --git a/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_2.adm b/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_2.adm
new file mode 100644
index 0000000..7c82b18
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_2.adm
@@ -0,0 +1 @@
+{ "DataverseName": "test", "DatasetName": "Book", "DataTypeName": "LineType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ { "Name": "CARDINALITY", "Value": "2000" } }}, "Timestamp": "Tue Jan 29 19:11:26 PST 2013" }
diff --git a/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_3.adm b/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_3.adm
new file mode 100644
index 0000000..f931b40
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_3.adm
@@ -0,0 +1 @@
+{ "DataverseName": "test", "DatasetName": "Book", "DataTypeName": "LineType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ { "Name": "CARDINALITY", "Value": "2000" } }}, "Timestamp": "Tue Jan 29 19:00:38 PST 2013" }
diff --git a/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_4.adm b/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_4.adm
new file mode 100644
index 0000000..efd3a7e
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/results/basic/issue_251_dataset_hint_4.adm
@@ -0,0 +1 @@
+{ "DataverseName": "test", "DatasetName": "Book", "DataTypeName": "LineType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 18:59:57 PST 2013" }
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta02.adm b/asterix-app/src/test/resources/metadata/results/basic/meta02.adm
index 2424676..394af5c 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/meta02.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta02.adm
@@ -1 +1 @@
-{ "DataverseName": "testdv", "DatasetName": "dst01", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Sat Sep 15 14:44:58 PDT 2012" }
+{ "DataverseName": "testdv", "DatasetName": "dst01", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 18:54:34 PST 2013" }
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta09.adm b/asterix-app/src/test/resources/metadata/results/basic/meta09.adm
index b85737d..9bcb2a4 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/meta09.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta09.adm
@@ -1 +1 @@
-{ "DataverseName": "test", "DatasetName": "t1", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Sat Nov 24 14:28:44 PST 2012" }
+{ "DataverseName": "test", "DatasetName": "t1", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 18:55:25 PST 2013" }
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta16.adm b/asterix-app/src/test/resources/metadata/results/basic/meta16.adm
index 8abc339..b56fe7c 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/meta16.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta16.adm
@@ -1,8 +1,8 @@
-{ "DataverseName": "Metadata", "DatasetName": "Dataset", "DataTypeName": "DatasetRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName" ], "PrimaryKey": [ "DataverseName", "DatasetName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "DatasourceAdapter", "DataTypeName": "DatasourceAdapterRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name" ], "PrimaryKey": [ "DataverseName", "Name" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Datatype", "DataTypeName": "DatatypeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatatypeName" ], "PrimaryKey": [ "DataverseName", "DatatypeName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Dataverse", "DataTypeName": "DataverseRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName" ], "PrimaryKey": [ "DataverseName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Function", "DataTypeName": "FunctionRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name", "Arity" ], "PrimaryKey": [ "DataverseName", "Name", "Arity" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Index", "DataTypeName": "IndexRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName", "IndexName" ], "PrimaryKey": [ "DataverseName", "DatasetName", "IndexName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Node", "DataTypeName": "NodeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "NodeName" ], "PrimaryKey": [ "NodeName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Nodegroup", "DataTypeName": "NodeGroupRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "GroupName" ], "PrimaryKey": [ "GroupName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
+{ "DataverseName": "Metadata", "DatasetName": "Dataset", "DataTypeName": "DatasetRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName" ], "PrimaryKey": [ "DataverseName", "DatasetName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
+{ "DataverseName": "Metadata", "DatasetName": "DatasourceAdapter", "DataTypeName": "DatasourceAdapterRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name" ], "PrimaryKey": [ "DataverseName", "Name" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
+{ "DataverseName": "Metadata", "DatasetName": "Datatype", "DataTypeName": "DatatypeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatatypeName" ], "PrimaryKey": [ "DataverseName", "DatatypeName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
+{ "DataverseName": "Metadata", "DatasetName": "Dataverse", "DataTypeName": "DataverseRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName" ], "PrimaryKey": [ "DataverseName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
+{ "DataverseName": "Metadata", "DatasetName": "Function", "DataTypeName": "FunctionRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name", "Arity" ], "PrimaryKey": [ "DataverseName", "Name", "Arity" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
+{ "DataverseName": "Metadata", "DatasetName": "Index", "DataTypeName": "IndexRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName", "IndexName" ], "PrimaryKey": [ "DataverseName", "DatasetName", "IndexName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
+{ "DataverseName": "Metadata", "DatasetName": "Node", "DataTypeName": "NodeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "NodeName" ], "PrimaryKey": [ "NodeName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
+{ "DataverseName": "Metadata", "DatasetName": "Nodegroup", "DataTypeName": "NodeGroupRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "GroupName" ], "PrimaryKey": [ "GroupName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta17.adm b/asterix-app/src/test/resources/metadata/results/basic/meta17.adm
index b351cfb..2de89a6 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/meta17.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta17.adm
@@ -1,56 +1,60 @@
-{ "DataverseName": "Metadata", "DatatypeName": "DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "DataTypeName", "FieldType": "string" }, { "FieldName": "DatasetType", "FieldType": "string" }, { "FieldName": "InternalDetails", "FieldType": "Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "ExternalDetails", "FieldType": "Field_ExternalDetails_in_DatasetRecordType" }, { "FieldName": "FeedDetails", "FieldType": "Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "DatasourceAdapterRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Classname", "FieldType": "string" }, { "FieldName": "Type", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatatypeName", "FieldType": "string" }, { "FieldName": "Derived", "FieldType": "Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "DataverseRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DataFormat", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType_ItemType" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FieldName", "FieldType": "string" }, { "FieldName": "FieldType", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_NodeNames_in_NodeGroupRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "string", "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_OrderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Params_in_FunctionRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_PartitioningKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_PartitioningKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_PrimaryKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_PrimaryKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_SearchKey_in_IndexRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_UnorderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "FunctionRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Arity", "FieldType": "string" }, { "FieldName": "Params", "FieldType": "Field_Params_in_FunctionRecordType" }, { "FieldName": "ReturnType", "FieldType": "string" }, { "FieldName": "Definition", "FieldType": "string" }, { "FieldName": "Language", "FieldType": "string" }, { "FieldName": "Kind", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "IndexRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "IndexName", "FieldType": "string" }, { "FieldName": "IndexStructure", "FieldType": "string" }, { "FieldName": "SearchKey", "FieldType": "Field_SearchKey_in_IndexRecordType" }, { "FieldName": "IsPrimary", "FieldType": "boolean" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "NodeGroupRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "GroupName", "FieldType": "string" }, { "FieldName": "NodeNames", "FieldType": "Field_NodeNames_in_NodeGroupRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "NodeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "NodeName", "FieldType": "string" }, { "FieldName": "NumberOfCores", "FieldType": "int32" }, { "FieldName": "WorkingMemorySize", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Tag", "FieldType": "string" }, { "FieldName": "IsAnonymous", "FieldType": "boolean" }, { "FieldName": "EnumValues", "FieldType": "Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Record", "FieldType": "Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Union", "FieldType": "Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "UnorderedList", "FieldType": "Field_UnorderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "OrderedList", "FieldType": "Field_OrderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DatasourceAdapter", "FieldType": "string" }, { "FieldName": "Properties", "FieldType": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FileStructure", "FieldType": "string" }, { "FieldName": "PartitioningStrategy", "FieldType": "string" }, { "FieldName": "PartitioningKey", "FieldType": "Field_PartitioningKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "PrimaryKey", "FieldType": "Field_PrimaryKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "GroupName", "FieldType": "string" }, { "FieldName": "DatasourceAdapter", "FieldType": "string" }, { "FieldName": "Properties", "FieldType": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "Function", "FieldType": "string" }, { "FieldName": "Status", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FileStructure", "FieldType": "string" }, { "FieldName": "PartitioningStrategy", "FieldType": "string" }, { "FieldName": "PartitioningKey", "FieldType": "Field_PartitioningKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "PrimaryKey", "FieldType": "Field_PrimaryKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "GroupName", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "IsOpen", "FieldType": "boolean" }, { "FieldName": "Fields", "FieldType": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "boolean", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "circle", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "date", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "datetime", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "double", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "duration", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "float", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "int16", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "int32", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "int64", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "int8", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "line", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "null", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "point", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "point3d", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "polygon", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "rectangle", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "string", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "time", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "DataTypeName", "FieldType": "string" }, { "FieldName": "DatasetType", "FieldType": "string" }, { "FieldName": "InternalDetails", "FieldType": "Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "ExternalDetails", "FieldType": "Field_ExternalDetails_in_DatasetRecordType" }, { "FieldName": "FeedDetails", "FieldType": "Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "Hints", "FieldType": "Field_Hints_in_DatasetRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "DatasourceAdapterRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Classname", "FieldType": "string" }, { "FieldName": "Type", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:30 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatatypeName", "FieldType": "string" }, { "FieldName": "Derived", "FieldType": "Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "DataverseRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DataFormat", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType_ItemType" }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FieldName", "FieldType": "string" }, { "FieldName": "FieldType", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Function_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Hints_in_DatasetRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "Field_Hints_in_DatasetRecordType_ItemType", "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Hints_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_NodeNames_in_NodeGroupRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "string", "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_OrderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Params_in_FunctionRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Fri Feb 08 15:49:30 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_PartitioningKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_PartitioningKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_PrimaryKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_PrimaryKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_SearchKey_in_IndexRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_UnorderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "FunctionRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Arity", "FieldType": "string" }, { "FieldName": "Params", "FieldType": "Field_Params_in_FunctionRecordType" }, { "FieldName": "ReturnType", "FieldType": "string" }, { "FieldName": "Definition", "FieldType": "string" }, { "FieldName": "Language", "FieldType": "string" }, { "FieldName": "Kind", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:30 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "IndexRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "IndexName", "FieldType": "string" }, { "FieldName": "IndexStructure", "FieldType": "string" }, { "FieldName": "SearchKey", "FieldType": "Field_SearchKey_in_IndexRecordType" }, { "FieldName": "IsPrimary", "FieldType": "boolean" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "NodeGroupRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "GroupName", "FieldType": "string" }, { "FieldName": "NodeNames", "FieldType": "Field_NodeNames_in_NodeGroupRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "NodeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "NodeName", "FieldType": "string" }, { "FieldName": "NumberOfCores", "FieldType": "int32" }, { "FieldName": "WorkingMemorySize", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Tag", "FieldType": "string" }, { "FieldName": "IsAnonymous", "FieldType": "boolean" }, { "FieldName": "EnumValues", "FieldType": "Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Record", "FieldType": "Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Union", "FieldType": "Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "UnorderedList", "FieldType": "Field_UnorderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "OrderedList", "FieldType": "Field_OrderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DatasourceAdapter", "FieldType": "string" }, { "FieldName": "Properties", "FieldType": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FileStructure", "FieldType": "string" }, { "FieldName": "PartitioningStrategy", "FieldType": "string" }, { "FieldName": "PartitioningKey", "FieldType": "Field_PartitioningKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "PrimaryKey", "FieldType": "Field_PrimaryKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "GroupName", "FieldType": "string" }, { "FieldName": "DatasourceAdapter", "FieldType": "string" }, { "FieldName": "Properties", "FieldType": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "Function", "FieldType": "Field_Function_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "Status", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FileStructure", "FieldType": "string" }, { "FieldName": "PartitioningStrategy", "FieldType": "string" }, { "FieldName": "PartitioningKey", "FieldType": "Field_PartitioningKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "PrimaryKey", "FieldType": "Field_PrimaryKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "GroupName", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "IsOpen", "FieldType": "boolean" }, { "FieldName": "Fields", "FieldType": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "boolean", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "circle", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "date", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "datetime", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "double", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "duration", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "float", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "int16", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "int32", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "int64", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "int8", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "interval", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "line", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "null", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "point", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "point3d", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "polygon", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "rectangle", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "string", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "time", "Derived": null, "Timestamp": "Fri Feb 08 15:49:29 PST 2013" }
diff --git a/asterix-app/src/test/resources/metadata/testsuite.xml b/asterix-app/src/test/resources/metadata/testsuite.xml
index 78dc3b5..6494cbf 100644
--- a/asterix-app/src/test/resources/metadata/testsuite.xml
+++ b/asterix-app/src/test/resources/metadata/testsuite.xml
@@ -105,50 +105,83 @@
<output-file compare="Text">meta21.adm</output-file>
</compilation-unit>
</test-case>
+ <test-case FilePath="basic">
+ <compilation-unit name="issue_251_dataset_hint_2">
+ <output-file compare="Text">issue_251_dataset_hint_2.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="basic">
+ <compilation-unit name="issue_251_dataset_hint_3">
+ <output-file compare="Text">issue_251_dataset_hint_3.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="basic">
+ <compilation-unit name="issue_251_dataset_hint_4">
+ <output-file compare="Text">issue_251_dataset_hint_4.adm</output-file>
+ </compilation-unit>
+ </test-case>
</test-group>
<test-group name="exception">
<test-case FilePath="exception">
- <compilation-unit name="exception_drop_dataset">
- <expected-error>MetadataException</expected-error>
+ <compilation-unit name="issue_239_drop_system_dataset_1">
<output-file compare="Text">none.adm</output-file>
+ <expected-error>MetadataException</expected-error>
</compilation-unit>
</test-case>
<test-case FilePath="exception">
- <compilation-unit name="exception_drop_dataverse">
- <expected-error>MetadataException</expected-error>
+ <compilation-unit name="issue_239_drop_system_dataset_2">
<output-file compare="Text">none.adm</output-file>
+ <expected-error>MetadataException</expected-error>
</compilation-unit>
</test-case>
<test-case FilePath="exception">
- <compilation-unit name="exception_drop_index">
- <expected-error>MetadataException</expected-error>
+ <compilation-unit name="issue_239_drop_system_dataset_3">
<output-file compare="Text">none.adm</output-file>
+ <expected-error>MetadataException</expected-error>
</compilation-unit>
</test-case>
<test-case FilePath="exception">
- <compilation-unit name="exception_drop_nodegroup">
- <expected-error>MetadataException</expected-error>
+ <compilation-unit name="issue_239_drop_system_dataset_4">
<output-file compare="Text">none.adm</output-file>
+ <expected-error>MetadataException</expected-error>
</compilation-unit>
</test-case>
<test-case FilePath="exception">
- <compilation-unit name="exception_drop_type1">
- <expected-error>MetadataException</expected-error>
+ <compilation-unit name="issue_239_drop_system_dataset_5">
<output-file compare="Text">none.adm</output-file>
+ <expected-error>MetadataException</expected-error>
</compilation-unit>
</test-case>
<test-case FilePath="exception">
- <compilation-unit name="exception_drop_type2">
- <expected-error>MetadataException</expected-error>
+ <compilation-unit name="issue_239_drop_system_dataset_6">
<output-file compare="Text">none.adm</output-file>
+ <expected-error>MetadataException</expected-error>
</compilation-unit>
</test-case>
<test-case FilePath="exception">
- <compilation-unit name="exception_drop_type3">
+ <compilation-unit name="issue_239_drop_system_dataset_7">
<output-file compare="Text">none.adm</output-file>
<expected-error>MetadataException</expected-error>
</compilation-unit>
</test-case>
+ <test-case FilePath="exception">
+ <compilation-unit name="issue_239_drop_system_dataset_8">
+ <output-file compare="Text">none.adm</output-file>
+ <expected-error>MetadataException</expected-error>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="exception">
+ <compilation-unit name="issue_251_dataset_hint_error_1">
+ <output-file compare="Text">none.adm</output-file>
+ <expected-error>AsterixException</expected-error>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="exception">
+ <compilation-unit name="issue_251_dataset_hint_error_2">
+ <output-file compare="Text">none.adm</output-file>
+ <expected-error>AsterixException</expected-error>
+ </compilation-unit>
+ </test-case>
</test-group>
<test-group name="transaction">
<test-case FilePath="transaction">
diff --git a/asterix-app/src/test/resources/misc/split01.aql b/asterix-app/src/test/resources/misc/split01.aql
index 46b2403..de9ec5c 100644
--- a/asterix-app/src/test/resources/misc/split01.aql
+++ b/asterix-app/src/test/resources/misc/split01.aql
@@ -30,9 +30,9 @@
declare nodegroup group1 on nc1, nc2;
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
declare dataset Orders(OrderType)
- partitioned by key oid on group1;
+ primary key oid on group1;
write output to nc1:"/tmp/split01.adm";
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/all-drop.aql b/asterix-app/src/test/resources/nontagged/custord/local/all-drop.aql
index 13a0414..0672b32 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/all-drop.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/all-drop.aql
@@ -36,6 +36,6 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset All(ExampleType)
- partitioned by key id on group1;
+ primary key id on group1;
drop dataset All;
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/all-load.aql b/asterix-app/src/test/resources/nontagged/custord/local/all-load.aql
index 73fa096..745562d 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/all-load.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/all-load.aql
@@ -36,7 +36,7 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset All(ExampleType)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset All from nc1:"/home/yasser/Dropbox/Research/data/allData.json";
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/all-scan.aql b/asterix-app/src/test/resources/nontagged/custord/local/all-scan.aql
index 7006eb4..1bd4b73 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/all-scan.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/all-scan.aql
@@ -36,7 +36,7 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset All(ExampleType)
- partitioned by key id on group1;
+ primary key id on group1;
write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_all_3.adm";
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/cust-drop.aql b/asterix-app/src/test/resources/nontagged/custord/local/cust-drop.aql
index b6a1913..23aeea8 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/cust-drop.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/cust-drop.aql
@@ -23,6 +23,6 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
drop dataset Customers;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/cust-load.aql b/asterix-app/src/test/resources/nontagged/custord/local/cust-load.aql
index fb57584..4af623a 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/cust-load.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/cust-load.aql
@@ -17,7 +17,7 @@
declare nodegroup group1 on nc1;
declare dataset Dataverse(DataverseType)
- partitioned by key dataverseName on group1;
+ primary key dataverseName on group1;
for $c in dataset('Dataverse')
return $c
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/cust-q1.aql b/asterix-app/src/test/resources/nontagged/custord/local/cust-q1.aql
index bc7ac1f..bc1c623 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/cust-q1.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/cust-q1.aql
@@ -23,7 +23,7 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_cust_q1.adm";
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/cust-scan.aql b/asterix-app/src/test/resources/nontagged/custord/local/cust-scan.aql
index a54197f..cf8aacf 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/cust-scan.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/cust-scan.aql
@@ -23,7 +23,7 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_cust_3.adm";
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/emp-drop.aql b/asterix-app/src/test/resources/nontagged/custord/local/emp-drop.aql
index 22230c1..0499a72 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/emp-drop.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/emp-drop.aql
@@ -24,6 +24,6 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset Emp(EmpType)
- partitioned by key cid on group1;
+ primary key cid on group1;
drop dataset Emp;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/emp-load.aql b/asterix-app/src/test/resources/nontagged/custord/local/emp-load.aql
index c0cbfc3..e676ef6 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/emp-load.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/emp-load.aql
@@ -20,6 +20,6 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset Emp(EmpType)
- partitioned by key cid on group1;
+ primary key cid on group1;
load dataset Emp from nc1:"/home/yasser/Dropbox/Research/data/EmpData.json";
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/emp-q1.aql b/asterix-app/src/test/resources/nontagged/custord/local/emp-q1.aql
index 20b83f7..ef8ce90 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/emp-q1.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/emp-q1.aql
@@ -19,7 +19,7 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset Emp(EmpType)
- partitioned by key cid on group1;
+ primary key cid on group1;
write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_emp_q1.adm";
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/emp-scan.aql b/asterix-app/src/test/resources/nontagged/custord/local/emp-scan.aql
index df903bf..e69b361 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/emp-scan.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/emp-scan.aql
@@ -20,7 +20,7 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset Emp(EmpType)
- partitioned by key cid on group1;
+ primary key cid on group1;
write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_emp_3.adm";
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/join-01.aql b/asterix-app/src/test/resources/nontagged/custord/local/join-01.aql
index d5a32f5..587fa4d 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/join-01.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/join-01.aql
@@ -34,9 +34,9 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
declare dataset Orders(OrderType)
- partitioned by key oid on group1;
+ primary key oid on group1;
write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_join_1.adm";
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/join-02.aql b/asterix-app/src/test/resources/nontagged/custord/local/join-02.aql
index 9b5caf1..ffcc6a0 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/join-02.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/join-02.aql
@@ -34,9 +34,9 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
declare dataset Orders(OrderType)
- partitioned by key oid on group1;
+ primary key oid on group1;
write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_join_2.adm";
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/join-03.aql b/asterix-app/src/test/resources/nontagged/custord/local/join-03.aql
index bff863c..a953d76 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/join-03.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/join-03.aql
@@ -34,9 +34,9 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
declare dataset Orders(OrderType)
- partitioned by key oid on group1;
+ primary key oid on group1;
write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_join_3.adm";
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/numeric-drop.aql b/asterix-app/src/test/resources/nontagged/custord/local/numeric-drop.aql
index b82bf84..a576214 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/numeric-drop.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/numeric-drop.aql
@@ -15,6 +15,6 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset Numeric(ExampleType)
- partitioned by key id on group1;
+ primary key id on group1;
drop dataset Numeric;
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/numeric-load.aql b/asterix-app/src/test/resources/nontagged/custord/local/numeric-load.aql
index 5a7f162..491b391 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/numeric-load.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/numeric-load.aql
@@ -16,7 +16,7 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset Numeric(ExampleType)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset Numeric from nc1:"/home/yasser/Dropbox/Research/data/numericData.json";
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/numeric-scan.aql b/asterix-app/src/test/resources/nontagged/custord/local/numeric-scan.aql
index ec60095..c5bdb34 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/numeric-scan.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/numeric-scan.aql
@@ -15,7 +15,7 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset Numeric(ExampleType)
- partitioned by key id on group1;
+ primary key id on group1;
write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_numeric_3.adm";
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/ord-drop.aql b/asterix-app/src/test/resources/nontagged/custord/local/ord-drop.aql
index fd68e70..d4fa668 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/ord-drop.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/ord-drop.aql
@@ -15,6 +15,6 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset Orders(OrderType)
- partitioned by key oid on group1;
+ primary key oid on group1;
drop dataset Orders;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/ord-load.aql b/asterix-app/src/test/resources/nontagged/custord/local/ord-load.aql
index fe1d61d..81d25bc 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/ord-load.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/ord-load.aql
@@ -15,6 +15,6 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset Orders(OrderType)
- partitioned by key oid on group1;
+ primary key oid on group1;
load dataset Orders from nc1:"/home/yasser/Dropbox/Research/data/orderData.json";
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/ord-q1.aql b/asterix-app/src/test/resources/nontagged/custord/local/ord-q1.aql
index 97aa315..d008979 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/ord-q1.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/ord-q1.aql
@@ -15,7 +15,7 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset Orders(OrderType)
- partitioned by key oid on group1;
+ primary key oid on group1;
write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_ord_q1.adm";
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/ord-scan.aql b/asterix-app/src/test/resources/nontagged/custord/local/ord-scan.aql
index e7c5236..9fc0faf 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/ord-scan.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/ord-scan.aql
@@ -15,7 +15,7 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset Orders(OrderType)
- partitioned by key oid on group1;
+ primary key oid on group1;
write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_ord_3.adm";
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/spatial-drop.aql b/asterix-app/src/test/resources/nontagged/custord/local/spatial-drop.aql
index 5e7b515..304ec7c 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/spatial-drop.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/spatial-drop.aql
@@ -14,6 +14,6 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset Spatial(ExampleType)
- partitioned by key id on group1;
+ primary key id on group1;
drop dataset Spatial;
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/spatial-load.aql b/asterix-app/src/test/resources/nontagged/custord/local/spatial-load.aql
index b36808a..a0e7bae 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/spatial-load.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/spatial-load.aql
@@ -14,7 +14,7 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset Spatial(ExampleType)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset Spatial from nc1:"/home/yasser/Dropbox/Research/data/spatialData.json";
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/spatial-scan.aql b/asterix-app/src/test/resources/nontagged/custord/local/spatial-scan.aql
index e4faa16..5027c29 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/spatial-scan.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/spatial-scan.aql
@@ -16,7 +16,7 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset Spatial(ExampleType)
- partitioned by key id on group1;
+ primary key id on group1;
write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_spatial_3.adm";
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/temp-drop.aql b/asterix-app/src/test/resources/nontagged/custord/local/temp-drop.aql
index 80a86ac..df7e293 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/temp-drop.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/temp-drop.aql
@@ -13,6 +13,6 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset Temp(ExampleType)
- partitioned by key id on group1;
+ primary key id on group1;
drop dataset Temp;
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/temp-load.aql b/asterix-app/src/test/resources/nontagged/custord/local/temp-load.aql
index 0400db1..d75168a 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/temp-load.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/temp-load.aql
@@ -14,7 +14,7 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset Temp(ExampleType)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset Temp from nc1:"/home/yasser/Dropbox/Research/data/tempData.json";
diff --git a/asterix-app/src/test/resources/nontagged/custord/local/temp-scan.aql b/asterix-app/src/test/resources/nontagged/custord/local/temp-scan.aql
index 8dc3cfa..7a369e6 100644
--- a/asterix-app/src/test/resources/nontagged/custord/local/temp-scan.aql
+++ b/asterix-app/src/test/resources/nontagged/custord/local/temp-scan.aql
@@ -14,7 +14,7 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset Temp(ExampleType)
- partitioned by key id on group1;
+ primary key id on group1;
write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_temp_3.adm";
diff --git a/asterix-app/src/test/resources/nontagged/tpch/local/alltables-loadAsOpen.aql b/asterix-app/src/test/resources/nontagged/tpch/local/alltables-loadAsOpen.aql
index 7847546..0c4a5b3 100644
--- a/asterix-app/src/test/resources/nontagged/tpch/local/alltables-loadAsOpen.aql
+++ b/asterix-app/src/test/resources/nontagged/tpch/local/alltables-loadAsOpen.aql
@@ -39,21 +39,21 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
declare dataset Suppliers(SupplierType)
- partitioned by key s_suppkey on group1;
+ primary key s_suppkey on group1;
declare dataset Nations(NationType)
- partitioned by key n_nationkey on group1;
+ primary key n_nationkey on group1;
declare dataset Regions(RegionType)
- partitioned by key r_regionkey on group1;
+ primary key r_regionkey on group1;
declare dataset Parts(PartType)
- partitioned by key p_partkey on group1;
+ primary key p_partkey on group1;
declare dataset PartSupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey on group1;
+ primary key ps_partkey, ps_suppkey on group1;
//load dataset LineItems from nc1:"/home/yasser/Dropbox/Research/data/tpch_data/alldata/jsonformat/lineitem.json" pre-sorted;
//load dataset Orders from nc1:"/home/yasser/Dropbox/Research/data/tpch_data/alldata/jsonformat/orders.json" pre-sorted;
diff --git a/asterix-app/src/test/resources/nontagged/tpch/local/alltablesAsopen-scan.aql b/asterix-app/src/test/resources/nontagged/tpch/local/alltablesAsopen-scan.aql
index cd32ae0..c147235 100644
--- a/asterix-app/src/test/resources/nontagged/tpch/local/alltablesAsopen-scan.aql
+++ b/asterix-app/src/test/resources/nontagged/tpch/local/alltablesAsopen-scan.aql
@@ -79,21 +79,21 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
declare dataset Suppliers(SupplierType)
- partitioned by key s_suppkey on group1;
+ primary key s_suppkey on group1;
declare dataset Nations(NationType)
- partitioned by key n_nationkey on group1;
+ primary key n_nationkey on group1;
declare dataset Regions(RegionType)
- partitioned by key r_regionkey on group1;
+ primary key r_regionkey on group1;
declare dataset Parts(PartType)
- partitioned by key p_partkey on group1;
+ primary key p_partkey on group1;
declare dataset PartSupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey on group1;
+ primary key ps_partkey, ps_suppkey on group1;
write output to nc1:"/home/yasser/Desktop/result_scan.adm";
diff --git a/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-drop.aql b/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-drop.aql
index e8c56c1..a30757c 100644
--- a/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-drop.aql
+++ b/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-drop.aql
@@ -90,21 +90,21 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
declare dataset Suppliers(SupplierType)
- partitioned by key s_suppkey on group1;
+ primary key s_suppkey on group1;
declare dataset Nations(NationType)
- partitioned by key n_nationkey on group1;
+ primary key n_nationkey on group1;
declare dataset Regions(RegionType)
- partitioned by key r_regionkey on group1;
+ primary key r_regionkey on group1;
declare dataset Parts(PartType)
- partitioned by key p_partkey on group1;
+ primary key p_partkey on group1;
declare dataset PartSupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey on group1;
+ primary key ps_partkey, ps_suppkey on group1;
drop dataset LineItems;
//drop dataset Orders;
diff --git a/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-load.aql b/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-load.aql
index 9e05370..0bd1a39 100644
--- a/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-load.aql
+++ b/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-load.aql
@@ -90,21 +90,21 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
declare dataset Suppliers(SupplierType)
- partitioned by key s_suppkey on group1;
+ primary key s_suppkey on group1;
declare dataset Nations(NationType)
- partitioned by key n_nationkey on group1;
+ primary key n_nationkey on group1;
declare dataset Regions(RegionType)
- partitioned by key r_regionkey on group1;
+ primary key r_regionkey on group1;
declare dataset Parts(PartType)
- partitioned by key p_partkey on group1;
+ primary key p_partkey on group1;
declare dataset PartSupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey on group1;
+ primary key ps_partkey, ps_suppkey on group1;
//load dataset LineItems from nc1:"/home/yasser/Dropbox/Research/data/tpch_data/alldata/jsonformat/lineitem.json" pre-sorted;
//load dataset Orders from nc1:"/home/yasser/Dropbox/Research/data/tpch_data/alldata/jsonformat/orders.json" pre-sorted;
diff --git a/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-q1.aql b/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-q1.aql
index 441ac43..f08d390 100644
--- a/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-q1.aql
+++ b/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-q1.aql
@@ -24,7 +24,7 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_tpch_closed_1.adm";
diff --git a/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-q3.aql b/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-q3.aql
index e002ad9..6933881 100644
--- a/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-q3.aql
+++ b/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-q3.aql
@@ -47,11 +47,11 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_tpch_closed_3.adm";
diff --git a/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-q5.aql b/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-q5.aql
index ceca8cd..d605099 100644
--- a/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-q5.aql
+++ b/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-q5.aql
@@ -70,17 +70,17 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
declare dataset Suppliers(SupplierType)
- partitioned by key s_suppkey on group1;
+ primary key s_suppkey on group1;
declare dataset Nations(NationType)
- partitioned by key n_nationkey on group1;
+ primary key n_nationkey on group1;
declare dataset Regions(RegionType)
- partitioned by key r_regionkey on group1;
+ primary key r_regionkey on group1;
write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_tpch_closed_5.adm";
diff --git a/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-q9.aql b/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-q9.aql
index 2586561..acf2ae3 100644
--- a/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-q9.aql
+++ b/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-q9.aql
@@ -74,17 +74,17 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare dataset Suppliers(SupplierType)
- partitioned by key s_suppkey on group1;
+ primary key s_suppkey on group1;
declare dataset Nations(NationType)
- partitioned by key n_nationkey on group1;
+ primary key n_nationkey on group1;
declare dataset Parts(PartType)
- partitioned by key p_partkey on group1;
+ primary key p_partkey on group1;
declare dataset PartSupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey on group1;
+ primary key ps_partkey, ps_suppkey on group1;
write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_tpch_closed_9.adm";
diff --git a/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-scan.aql b/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-scan.aql
index f4f50b1..4b97d2e 100644
--- a/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-scan.aql
+++ b/asterix-app/src/test/resources/nontagged/tpch/local/closedtables-scan.aql
@@ -90,21 +90,21 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
declare dataset Suppliers(SupplierType)
- partitioned by key s_suppkey on group1;
+ primary key s_suppkey on group1;
declare dataset Nations(NationType)
- partitioned by key n_nationkey on group1;
+ primary key n_nationkey on group1;
declare dataset Regions(RegionType)
- partitioned by key r_regionkey on group1;
+ primary key r_regionkey on group1;
declare dataset Parts(PartType)
- partitioned by key p_partkey on group1;
+ primary key p_partkey on group1;
declare dataset PartSupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey on group1;
+ primary key ps_partkey, ps_suppkey on group1;
write output to nc1:"/home/yasser/Desktop/result_scan.adm";
diff --git a/asterix-app/src/test/resources/nontagged/tpch/local/opentables-drop.aql b/asterix-app/src/test/resources/nontagged/tpch/local/opentables-drop.aql
index 06d352c..61b6d85 100644
--- a/asterix-app/src/test/resources/nontagged/tpch/local/opentables-drop.aql
+++ b/asterix-app/src/test/resources/nontagged/tpch/local/opentables-drop.aql
@@ -39,21 +39,21 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
declare dataset Suppliers(SupplierType)
- partitioned by key s_suppkey on group1;
+ primary key s_suppkey on group1;
declare dataset Nations(NationType)
- partitioned by key n_nationkey on group1;
+ primary key n_nationkey on group1;
declare dataset Regions(RegionType)
- partitioned by key r_regionkey on group1;
+ primary key r_regionkey on group1;
declare dataset Parts(PartType)
- partitioned by key p_partkey on group1;
+ primary key p_partkey on group1;
declare dataset PartSupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey on group1;
+ primary key ps_partkey, ps_suppkey on group1;
drop dataset LineItems;
//drop dataset Orders;
diff --git a/asterix-app/src/test/resources/nontagged/tpch/local/opentables-load.aql b/asterix-app/src/test/resources/nontagged/tpch/local/opentables-load.aql
index 4241366..fdcf15c 100644
--- a/asterix-app/src/test/resources/nontagged/tpch/local/opentables-load.aql
+++ b/asterix-app/src/test/resources/nontagged/tpch/local/opentables-load.aql
@@ -39,21 +39,21 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
declare dataset Suppliers(SupplierType)
- partitioned by key s_suppkey on group1;
+ primary key s_suppkey on group1;
declare dataset Nations(NationType)
- partitioned by key n_nationkey on group1;
+ primary key n_nationkey on group1;
declare dataset Regions(RegionType)
- partitioned by key r_regionkey on group1;
+ primary key r_regionkey on group1;
declare dataset Parts(PartType)
- partitioned by key p_partkey on group1;
+ primary key p_partkey on group1;
declare dataset PartSupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey on group1;
+ primary key ps_partkey, ps_suppkey on group1;
//load dataset LineItems from nc1:"/home/yasser/Dropbox/Research/data/tpch_data/alldata/jsonformat/lineitem.json" pre-sorted;
//load dataset Orders from nc1:"/home/yasser/Dropbox/Research/data/tpch_data/alldata/jsonformat/orders.json" pre-sorted;
diff --git a/asterix-app/src/test/resources/nontagged/tpch/local/opentables-q1.aql b/asterix-app/src/test/resources/nontagged/tpch/local/opentables-q1.aql
index ec5e202..6288b4f 100644
--- a/asterix-app/src/test/resources/nontagged/tpch/local/opentables-q1.aql
+++ b/asterix-app/src/test/resources/nontagged/tpch/local/opentables-q1.aql
@@ -10,7 +10,7 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_tpch_open_1.adm";
diff --git a/asterix-app/src/test/resources/nontagged/tpch/local/opentables-q3.aql b/asterix-app/src/test/resources/nontagged/tpch/local/opentables-q3.aql
index 921cea6..955eba7 100644
--- a/asterix-app/src/test/resources/nontagged/tpch/local/opentables-q3.aql
+++ b/asterix-app/src/test/resources/nontagged/tpch/local/opentables-q3.aql
@@ -18,11 +18,11 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_tpch_open_3.adm";
diff --git a/asterix-app/src/test/resources/nontagged/tpch/local/opentables-q5.aql b/asterix-app/src/test/resources/nontagged/tpch/local/opentables-q5.aql
index 82c996f..ffbd71d 100644
--- a/asterix-app/src/test/resources/nontagged/tpch/local/opentables-q5.aql
+++ b/asterix-app/src/test/resources/nontagged/tpch/local/opentables-q5.aql
@@ -30,17 +30,17 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
declare dataset Suppliers(SupplierType)
- partitioned by key s_suppkey on group1;
+ primary key s_suppkey on group1;
declare dataset Nations(NationType)
- partitioned by key n_nationkey on group1;
+ primary key n_nationkey on group1;
declare dataset Regions(RegionType)
- partitioned by key r_regionkey on group1;
+ primary key r_regionkey on group1;
write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_tpch_open_5.adm";
diff --git a/asterix-app/src/test/resources/nontagged/tpch/local/opentables-q9.aql b/asterix-app/src/test/resources/nontagged/tpch/local/opentables-q9.aql
index 75b176d..8064099 100644
--- a/asterix-app/src/test/resources/nontagged/tpch/local/opentables-q9.aql
+++ b/asterix-app/src/test/resources/nontagged/tpch/local/opentables-q9.aql
@@ -39,17 +39,17 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare dataset Suppliers(SupplierType)
- partitioned by key s_suppkey on group1;
+ primary key s_suppkey on group1;
declare dataset Nations(NationType)
- partitioned by key n_nationkey on group1;
+ primary key n_nationkey on group1;
declare dataset Parts(PartType)
- partitioned by key p_partkey on group1;
+ primary key p_partkey on group1;
declare dataset PartSupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey on group1;
+ primary key ps_partkey, ps_suppkey on group1;
write output to nc1:"/home/yasser/Dropbox/Research/data/results/result_tpch_closed_9.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join-multipred.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join-multipred.aql
index 9b83718..f235220 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join-multipred.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join-multipred.aql
@@ -37,8 +37,8 @@
total: float
}
-create dataset Customers(CustomerType) partitioned by key cid;
-create dataset Orders(OrderType) partitioned by key oid;
+create dataset Customers(CustomerType) primary key cid;
+create dataset Orders(OrderType) primary key oid;
write output to nc1:"rttest/btree-index-join_primary-equi-join-multipred.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join-neg_01.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join-neg_01.aql
new file mode 100644
index 0000000..8e8324f
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join-neg_01.aql
@@ -0,0 +1,29 @@
+/*
+ * Description : This is a negative test, mis-spelt/incorrect HINT should result in
+ * a plan not using an indexed-nested loops join strategy. We expect a hash join.
+ * Expected Res : Success
+ * Date : 29th November 2012
+ */
+
+drop dataverse test1 if exists;
+create dataverse test1;
+
+create type test1.TestType as open {
+ key1: int32,
+ key2: int32,
+ fname : string,
+ lname : string
+}
+
+create dataset test1.DsOne(TestType) primary key key1;
+create dataset test1.DsTwo(TestType) primary key key1;
+
+// Please note content enclosed in the comment in the predicate is the HINT to the optimizer
+
+write output to nc1:"rttest/btree-index-join_primary-equi-join-neg_01.adm";
+
+for $x in dataset('test1.DsOne')
+for $y in dataset('test1.DsTwo')
+where $x.key1 /*+ index */ = $y.key2
+return $x
+
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join-neg_02.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join-neg_02.aql
new file mode 100644
index 0000000..8a62332
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join-neg_02.aql
@@ -0,0 +1,29 @@
+/*
+ * Description : This is a negative test, mis-spelt/incorrect HINT should result in
+ * a plan not using an indexed-nested loops join strategy. We expect a hash join.
+ * Expected Res : Success
+ * Date : 29th November 2012
+ */
+
+drop dataverse test1 if exists;
+create dataverse test1;
+
+create type test1.TestType as open {
+ key1: int32,
+ key2: int32,
+ fname : string,
+ lname : string
+}
+
+create dataset test1.DsOne(TestType) primary key key1;
+create dataset test1.DsTwo(TestType) primary key key1;
+
+// Please note content enclosed in the comment in the predicate is the HINT to the optimizer
+
+write output to nc1:"rttest/btree-index-join_primary-equi-join-neg_02.adm";
+
+for $x in dataset('test1.DsOne')
+for $y in dataset('test1.DsTwo')
+where $x.key2 /*+ index */ = $y.key1
+return $x
+
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_01.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_01.aql
index f7f8d6c..6950747 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_01.aql
@@ -1,46 +1,29 @@
/*
- * Description : Equi joins two datasets, Customers and Orders, based on the customer id.
- * Given the 'indexnl' hint we expect the join to be transformed
- * into an indexed nested-loop join using Customers' primary index.
- * Success : Yes
+ * Description : Notice the query hint to use an indexed nested-loops join plan.
+ * : We expect a plan that hash-exchanges internal dataset DsTwo, then probes internal dataset DsOne’s primary index.
+ * Expected Res : Success
+ * Date : 29th November 2012
*/
-drop dataverse test if exists;
-create dataverse test;
-use dataverse test;
+drop dataverse test1 if exists;
+create dataverse test1;
-create type AddressType as closed {
- number: int32,
- street: string,
- city: string
+create type test1.TestType as open {
+ key1: int32,
+ key2: int32,
+ fname : string,
+ lname : string
}
-create type CustomerType as closed {
- cid: int32,
- name: string,
- age: int32?,
- address: AddressType?,
- lastorder: {
- oid: int32,
- total: float
- }
-}
+create dataset test1.DsOne(TestType) primary key key1;
+create dataset test1.DsTwo(TestType) primary key key1;
-create type OrderType as closed {
- oid: int32,
- cid: int32,
- orderstatus: string,
- orderpriority: string,
- clerk: string,
- total: float
-}
-
-create dataset Customers(CustomerType) partitioned by key cid;
-create dataset Orders(OrderType) partitioned by key oid;
+// Please note content enclosed in the comment in the predicate is a HINT to the optimizer
write output to nc1:"rttest/btree-index-join_primary-equi-join_01.adm";
-for $c in dataset('Customers')
-for $o in dataset('Orders')
-where $c.cid /*+ indexnl */ = $o.cid
-return {"customer":$c, "order": $o}
+for $x in dataset('test1.DsOne')
+for $y in dataset('test1.DsTwo')
+where $x.key1 /*+ indexnl */ = $y.key2
+return $x
+
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_02.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_02.aql
index 9ac6140..b7832fd 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_02.aql
@@ -1,46 +1,29 @@
/*
- * Description : Equi joins two datasets, Customers and Orders, based on the customer id.
- * Given the 'indexnl' hint we expect the join to be transformed
- * into an indexed nested-loop join using Customers' primary index.
- * Success : Yes
+ * Description : Notice the query hint to use an indexed nested-loops join plan.
+ * : We expect a plan that hash-exchanges internal dataset DsTwo, then probes internal dataset DsOne’s primary index.
+ * Expected Res : Success
+ * Date : 29th November 2012
*/
-drop dataverse test if exists;
-create dataverse test;
-use dataverse test;
+drop dataverse test1 if exists;
+create dataverse test1;
-create type AddressType as closed {
- number: int32,
- street: string,
- city: string
+create type test1.TestType as open {
+ key1: int32,
+ key2: int32,
+ fname : string,
+ lname : string
}
-create type CustomerType as closed {
- cid: int32,
- name: string,
- age: int32?,
- address: AddressType?,
- lastorder: {
- oid: int32,
- total: float
- }
-}
+create dataset test1.DsOne(TestType) primary key key1;
+create dataset test1.DsTwo(TestType) primary key key1;
-create type OrderType as closed {
- oid: int32,
- cid: int32,
- orderstatus: string,
- orderpriority: string,
- clerk: string,
- total: float
-}
-
-create dataset Customers(CustomerType) partitioned by key cid;
-create dataset Orders(OrderType) partitioned by key oid;
+// Please note content enclosed in the comment in the predicate is a HINT to the optimizer
write output to nc1:"rttest/btree-index-join_primary-equi-join_02.adm";
-for $o in dataset('Orders')
-for $c in dataset('Customers')
-where $o.cid /*+ indexnl */ = $c.cid
-return {"customer":$c, "order": $o}
+for $x in dataset('test1.DsOne')
+for $y in dataset('test1.DsTwo')
+where $x.key2 /*+ indexnl */ = $y.key1
+return $x
+
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_03.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_03.aql
index e33e2a9..9268c2c 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_03.aql
@@ -1,5 +1,5 @@
/*
- * Description : Self-equi joins a dataset, Customers, based on the customer id.
+ * Description : Equi joins two datasets, Customers and Orders, based on the customer id.
* Given the 'indexnl' hint we expect the join to be transformed
* into an indexed nested-loop join using Customers' primary index.
* Success : Yes
@@ -26,11 +26,21 @@
}
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create type OrderType as closed {
+ oid: int32,
+ cid: int32,
+ orderstatus: string,
+ orderpriority: string,
+ clerk: string,
+ total: float
+}
-write output to nc1:"rttest/btree-index-join_primary-equi-join_03.adm";
+create dataset Customers(CustomerType) primary key cid;
+create dataset Orders(OrderType) primary key oid;
-for $c1 in dataset('Customers')
-for $c2 in dataset('Customers')
-where $c1.cid /*+ indexnl */ = $c2.cid
-return {"customer1":$c1, "customer2":$c2}
+write output to nc1:"rttest/btree-index-join_primary-equi-join_04.adm";
+
+for $c in dataset('Customers')
+for $o in dataset('Orders')
+where $c.cid /*+ indexnl */ = $o.cid
+return {"customer":$c, "order": $o}
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_04.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_04.aql
new file mode 100644
index 0000000..3c07154
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_04.aql
@@ -0,0 +1,46 @@
+/*
+ * Description : Equi joins two datasets, Customers and Orders, based on the customer id.
+ * Given the 'indexnl' hint we expect the join to be transformed
+ * into an indexed nested-loop join using Customers' primary index.
+ * Success : Yes
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type AddressType as closed {
+ number: int32,
+ street: string,
+ city: string
+}
+
+create type CustomerType as closed {
+ cid: int32,
+ name: string,
+ age: int32?,
+ address: AddressType?,
+ lastorder: {
+ oid: int32,
+ total: float
+ }
+}
+
+create type OrderType as closed {
+ oid: int32,
+ cid: int32,
+ orderstatus: string,
+ orderpriority: string,
+ clerk: string,
+ total: float
+}
+
+create dataset Customers(CustomerType) primary key cid;
+create dataset Orders(OrderType) primary key oid;
+
+write output to nc1:"rttest/btree-index-join_primary-equi-join_05.adm";
+
+for $o in dataset('Orders')
+for $c in dataset('Customers')
+where $o.cid /*+ indexnl */ = $c.cid
+return {"customer":$c, "order": $o}
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_05.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_05.aql
new file mode 100644
index 0000000..7da6b0c
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-equi-join_05.aql
@@ -0,0 +1,36 @@
+/*
+ * Description : Self-equi joins a dataset, Customers, based on the customer id.
+ * Given the 'indexnl' hint we expect the join to be transformed
+ * into an indexed nested-loop join using Customers' primary index.
+ * Success : Yes
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type AddressType as closed {
+ number: int32,
+ street: string,
+ city: string
+}
+
+create type CustomerType as closed {
+ cid: int32,
+ name: string,
+ age: int32?,
+ address: AddressType?,
+ lastorder: {
+ oid: int32,
+ total: float
+ }
+}
+
+create dataset Customers(CustomerType) primary key cid;
+
+write output to nc1:"rttest/btree-index-join_primary-equi-join_06.adm";
+
+for $c1 in dataset('Customers')
+for $c2 in dataset('Customers')
+where $c1.cid /*+ indexnl */ = $c2.cid
+return {"customer1":$c1, "customer2":$c2}
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-ge-join_01.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-ge-join_01.aql
new file mode 100644
index 0000000..8bc306c
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-ge-join_01.aql
@@ -0,0 +1,29 @@
+/*
+ * Description : Notice the query hint to use an indexed nested-loops join plan.
+ * : We expect a plan that broadcasts internal dataset DsTwo, then probes internal dataset DsOne’s primary index.
+ * Expected Res : Success
+ * Date : 29th November 2012
+ */
+
+drop dataverse test1 if exists;
+create dataverse test1;
+
+create type test1.TestType as open {
+ key1: int32,
+ key2: int32,
+ fname : string,
+ lname : string
+}
+
+create dataset test1.DsOne(TestType) primary key key1;
+create dataset test1.DsTwo(TestType) primary key key1;
+
+// Please note content enclosed in the comment in the predicate is the HINT to the optimizer
+
+write output to nc1:"rttest/btree-index-join_primary-ge-join_01.adm";
+
+for $x in dataset('test1.DsOne')
+for $y in dataset('test1.DsTwo')
+where $x.key1 /*+ indexnl */ >= $y.key2
+return $x
+
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-ge-join_02.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-ge-join_02.aql
new file mode 100644
index 0000000..06be0e9
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-ge-join_02.aql
@@ -0,0 +1,29 @@
+/*
+ * Description : Notice the query hint to use an indexed nested-loops join plan.
+ * : We expect a plan that broadcasts internal dataset DsTwo, then probes internal dataset DsOne’s primary index.
+ * Expected Res : Success
+ * Date : 29th November 2012
+ */
+
+drop dataverse test1 if exists;
+create dataverse test1;
+
+create type test1.TestType as open {
+ key1: int32,
+ key2: int32,
+ fname : string,
+ lname : string
+}
+
+create dataset test1.DsOne(TestType) primary key key1;
+create dataset test1.DsTwo(TestType) primary key key1;
+
+// Please note content enclosed in the comment in the predicate is the HINT to the optimizer
+
+write output to nc1:"rttest/btree-index-join_primary-ge-join_02.adm";
+
+for $x in dataset('test1.DsOne')
+for $y in dataset('test1.DsTwo')
+where $x.key2 /*+ indexnl */ <= $y.key1
+return $x
+
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-gt-join_01.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-gt-join_01.aql
new file mode 100644
index 0000000..67d75a1
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-gt-join_01.aql
@@ -0,0 +1,29 @@
+/*
+ * Description : Notice the query hint to use an indexed nested-loops join plan.
+ * : We expect a plan that broadcasts internal dataset DsTwo, then probes internal dataset DsOne’s primary index.
+ * Expected Res : Success
+ * Date : 29th November 2012
+ */
+
+drop dataverse test1 if exists;
+create dataverse test1;
+
+create type test1.TestType as open {
+ key1: int32,
+ key2: int32,
+ fname : string,
+ lname : string
+}
+
+create dataset test1.DsOne(TestType) primary key key1;
+create dataset test1.DsTwo(TestType) primary key key1;
+
+// Please note content enclosed in the comment in the predicate is the HINT to the optimizer
+
+write output to nc1:"rttest/btree-index-join_primary-gt-join_01.adm";
+
+for $x in dataset('test1.DsOne')
+for $y in dataset('test1.DsTwo')
+where $x.key1 /*+ indexnl */ > $y.key2
+return $x
+
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-gt-join_02.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-gt-join_02.aql
new file mode 100644
index 0000000..a7be083
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-gt-join_02.aql
@@ -0,0 +1,29 @@
+/*
+ * Description : Notice the query hint to use an indexed nested-loops join plan.
+ * : We expect a plan that broadcasts internal dataset DsTwo, then probes internal dataset DsOne’s primary index.
+ * Expected Res : Success
+ * Date : 29th November 2012
+ */
+
+drop dataverse test1 if exists;
+create dataverse test1;
+
+create type test1.TestType as open {
+ key1: int32,
+ key2: int32,
+ fname : string,
+ lname : string
+}
+
+create dataset test1.DsOne(TestType) primary key key1;
+create dataset test1.DsTwo(TestType) primary key key1;
+
+// Please note content enclosed in the comment in the predicate is the HINT to the optimizer
+
+write output to nc1:"rttest/btree-index-join_primary-gt-join_02.adm";
+
+for $x in dataset('test1.DsOne')
+for $y in dataset('test1.DsTwo')
+where $x.key2 /*+ indexnl */ < $y.key1
+return $x
+
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-le-join_01.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-le-join_01.aql
new file mode 100644
index 0000000..66f2ace
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-le-join_01.aql
@@ -0,0 +1,29 @@
+/*
+ * Description : Notice the query hint to use an indexed nested-loops join plan.
+ * : We expect a plan that broadcasts internal dataset DsTwo, then probes internal dataset DsOne’s primary index.
+ * Expected Res : Success
+ * Date : 29th November 2012
+ */
+
+drop dataverse test1 if exists;
+create dataverse test1;
+
+create type test1.TestType as open {
+ key1: int32,
+ key2: int32,
+ fname : string,
+ lname : string
+}
+
+create dataset test1.DsOne(TestType) primary key key1;
+create dataset test1.DsTwo(TestType) primary key key1;
+
+// Please note content enclosed in the comment in the predicate is the HINT to the optimizer
+
+write output to nc1:"rttest/btree-index-join_primary-le-join_01.adm";
+
+for $x in dataset('test1.DsOne')
+for $y in dataset('test1.DsTwo')
+where $x.key1 /*+ indexnl */ <= $y.key2
+return $x
+
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-le-join_02.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-le-join_02.aql
new file mode 100644
index 0000000..39c1d51
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-le-join_02.aql
@@ -0,0 +1,29 @@
+/*
+ * Description : Notice the query hint to use an indexed nested-loops join plan.
+ * : We expect a plan that broadcasts internal dataset DsTwo, then probes internal dataset DsOne’s primary index.
+ * Expected Res : Success
+ * Date : 29th November 2012
+ */
+
+drop dataverse test1 if exists;
+create dataverse test1;
+
+create type test1.TestType as open {
+ key1: int32,
+ key2: int32,
+ fname : string,
+ lname : string
+}
+
+create dataset test1.DsOne(TestType) primary key key1;
+create dataset test1.DsTwo(TestType) primary key key1;
+
+// Please note content enclosed in the comment in the predicate is the HINT to the optimizer
+
+write output to nc1:"rttest/btree-index-join_primary-le-join_02.adm";
+
+for $x in dataset('test1.DsOne')
+for $y in dataset('test1.DsTwo')
+where $x.key2 /*+ indexnl */ >= $y.key1
+return $x
+
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-lt-join_01.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-lt-join_01.aql
new file mode 100644
index 0000000..ef6a616
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-lt-join_01.aql
@@ -0,0 +1,29 @@
+/*
+ * Description : Notice the query hint to use an indexed nested-loops join plan.
+ * : We expect a plan that broadcasts internal dataset DsTwo, then probes internal dataset DsOne’s primary index.
+ * Expected Res : Success
+ * Date : 29th November 2012
+ */
+
+drop dataverse test1 if exists;
+create dataverse test1;
+
+create type test1.TestType as open {
+ key1: int32,
+ key2: int32,
+ fname : string,
+ lname : string
+}
+
+create dataset test1.DsOne(TestType) primary key key1;
+create dataset test1.DsTwo(TestType) primary key key1;
+
+// Please note content enclosed in the comment in the predicate is the HINT to the optimizer
+
+write output to nc1:"rttest/btree-index-join_primary-lt-join_01.adm";
+
+for $x in dataset('test1.DsOne')
+for $y in dataset('test1.DsTwo')
+where $x.key1 /*+ indexnl */ < $y.key2
+return $x
+
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-lt-join_02.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-lt-join_02.aql
new file mode 100644
index 0000000..4924514
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/primary-lt-join_02.aql
@@ -0,0 +1,29 @@
+/*
+ * Description : Notice the query hint to use an indexed nested-loops join plan.
+ * : We expect a plan that broadcasts internal dataset DsTwo, then probes internal dataset DsOne’s primary index.
+ * Expected Res : Success
+ * Date : 29th November 2012
+ */
+
+drop dataverse test1 if exists;
+create dataverse test1;
+
+create type test1.TestType as open {
+ key1: int32,
+ key2: int32,
+ fname : string,
+ lname : string
+}
+
+create dataset test1.DsOne(TestType) primary key key1;
+create dataset test1.DsTwo(TestType) primary key key1;
+
+// Please note content enclosed in the comment in the predicate is the HINT to the optimizer
+
+write output to nc1:"rttest/btree-index-join_primary-lt-join_02.adm";
+
+for $x in dataset('test1.DsOne')
+for $y in dataset('test1.DsTwo')
+where $x.key2 /*+ indexnl */ > $y.key1
+return $x
+
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join-multiindex.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join-multiindex.aql
index 44cdef8..c596984 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join-multiindex.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join-multiindex.aql
@@ -39,10 +39,10 @@
}
create dataset FacebookUsers(FacebookUserType)
-partitioned by key id;
+primary key id;
create dataset FacebookMessages(FacebookMessageType)
-partitioned by key message-id;
+primary key message-id;
create index fbmIdxAutId if not exists on FacebookMessages(author-id-copy);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join-multipred.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join-multipred.aql
index 03cbf24..bb803b0 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join-multipred.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join-multipred.aql
@@ -27,9 +27,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
create index title_index on DBLP(title);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join_01.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join_01.aql
index e07eee5..8a4f056 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join_01.aql
@@ -25,9 +25,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
create index title_index on DBLP(title);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join_02.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join_02.aql
index 2546244..fa68cd2 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join_02.aql
@@ -25,9 +25,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
create index title_index on CSX(title);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join_03.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join_03.aql
index 251c88e..3eedafd 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join_03.aql
@@ -17,7 +17,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index title_index on DBLP(title);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-01.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-01.aql
index d3d8d69..0d096a7 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-01.aql
@@ -19,7 +19,7 @@
}
// create internal dataset with primary index (composite key) defined on fname,lname fields
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
for $emp in dataset('testdst')
where $emp.fname > "Roger"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-02.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-02.aql
index eeded38..fbcdd50 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-02.aql
@@ -19,7 +19,7 @@
}
// create internal dataset with primary index defined on fname,lname fields
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
for $emp in dataset('testdst')
where $emp.fname >= "Susan"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-03.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-03.aql
index 1046edd..2047b62 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-03.aql
@@ -19,7 +19,7 @@
}
// create internal dataset with primary index defined on fname,lname fields
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
for $emp in dataset('testdst')
where $emp.fname < "Isa"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-04.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-04.aql
index 2bee1fe..84147ae 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-04.aql
@@ -19,7 +19,7 @@
}
// create internal dataset with primary index defined on fname,lname fields
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
for $emp in dataset('testdst')
where $emp.fname <= "Vanpatten"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-05.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-05.aql
index 0423290..0fbf179 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-05.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-05.aql
@@ -19,7 +19,7 @@
}
// create internal dataset with primary index defined on fname,lname fields
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
for $emp in dataset('testdst')
where $emp.fname != "Max"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-06.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-06.aql
index 313a4fa..08164fd 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-06.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-06.aql
@@ -19,7 +19,7 @@
}
// create internal dataset with primary index defined on fname,lname fields
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
for $emp in dataset('testdst')
where $emp.fname = "Julio"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-07.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-07.aql
index e9063eb..2b635a5 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-07.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-07.aql
@@ -20,7 +20,7 @@
}
// create internal dataset with primary index defined on fname,lname fields
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
for $emp in dataset('testdst')
where $emp.lname = "Kim"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-08.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-08.aql
index 9919457..337072e 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-08.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-08.aql
@@ -15,7 +15,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
for $emp in dataset('testdst')
where $emp.fname = "Young Seok" and $emp.lname = "Kim"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-09.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-09.aql
index 8973d2d..cc52d2a 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-09.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-09.aql
@@ -19,7 +19,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
for $emp in dataset('testdst')
where $emp.fname = "Julio" or $emp.lname = "Malaika"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-10.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-10.aql
index 5076e97..4d0f85e 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-10.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-10.aql
@@ -17,7 +17,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
for $emp in dataset('testdst')
where $emp.fname > "Alex" and $emp.lname < "Zach"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-11.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-11.aql
index feeaaad..164e49a 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-11.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-11.aql
@@ -17,7 +17,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
for $emp in dataset('testdst')
where $emp.fname > "Allan" and $emp.lname < "Zubi"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-12.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-12.aql
index 740cca6..659d1ff 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-12.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-12.aql
@@ -19,7 +19,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
for $emp in dataset('testdst')
where $emp.fname > "Allan" and $emp.lname = "Xu"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-13.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-13.aql
index 47e81b2..3c49200 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-13.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-13.aql
@@ -19,7 +19,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
for $emp in dataset('testdst')
where $emp.fname = "Julio" and $emp.lname < "Xu"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-14.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-14.aql
index 0ec31aa..e93061f 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-14.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-14.aql
@@ -17,7 +17,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
for $emp in dataset('testdst')
where $emp.fname >= "Michael" and $emp.lname <= "Xu"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-15.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-15.aql
index f595804..97a486a 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-15.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-15.aql
@@ -17,7 +17,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
for $emp in dataset('testdst')
where $emp.fname > "Craig" and $emp.lname > "Kevin" and $emp.fname < "Mary" and $emp.lname < "Tomes"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-16.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-16.aql
index 022bd50..cf80ed2 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-16.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-16.aql
@@ -17,7 +17,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
for $emp in dataset('testdst')
where $emp.fname >= "Craig" and $emp.lname >= "Kevin" and $emp.fname <= "Mary" and $emp.lname <= "Tomes"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-17.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-17.aql
index ad114cd..db9c994 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-17.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-17.aql
@@ -17,7 +17,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
for $emp in dataset('testdst')
where $emp.fname <= "Craig" and $emp.lname > "Kevin"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-18.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-18.aql
index 43b0bd3..9787005 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-18.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-18.aql
@@ -17,7 +17,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
for $emp in dataset('testdst')
where $emp.fname != "Michael" and $emp.lname != "Carey"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-19.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-19.aql
index fe2dc41..0dff6af 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-19.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-19.aql
@@ -17,7 +17,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
for $emp in dataset('testdst')
where $emp.fname > "Craig" and $emp.lname > "Kevin" and $emp.fname <= "Mary" and $emp.lname <= "Tomes"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-20.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-20.aql
index c817492..ac9e374 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-20.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-20.aql
@@ -17,7 +17,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
for $emp in dataset('testdst')
where $emp.fname >= "Craig" and $emp.lname >= "Kevin" and $emp.fname < "Mary" and $emp.lname < "Tomes"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-21.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-21.aql
index 46c107c..afa241f 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-21.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-21.aql
@@ -17,7 +17,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key fname;
+create dataset testdst(TestType) primary key fname;
for $emp in dataset('testdst')
where $emp.fname > "Max"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-22.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-22.aql
index 0f74980..4af86a0 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-22.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-22.aql
@@ -17,7 +17,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key fname;
+create dataset testdst(TestType) primary key fname;
for $emp in dataset('testdst')
where $emp.fname >= "Sofia"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-23.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-23.aql
index c942165..f73bd1f 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-23.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-23.aql
@@ -17,7 +17,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key fname;
+create dataset testdst(TestType) primary key fname;
for $emp in dataset('testdst')
where $emp.fname < "Chen"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-24.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-24.aql
index 0c66008..1faa37e 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-24.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-24.aql
@@ -17,7 +17,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key fname;
+create dataset testdst(TestType) primary key fname;
for $emp in dataset('testdst')
where $emp.fname <= "Julio"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-25.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-25.aql
index 770baf5..6b24b7c 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-25.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-25.aql
@@ -17,7 +17,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key fname;
+create dataset testdst(TestType) primary key fname;
for $emp in dataset('testdst')
where $emp.fname > "Neil" and $emp.fname < "Roger"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-26.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-26.aql
index ba5de37..05a69d2 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-26.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-26.aql
@@ -17,7 +17,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key fname;
+create dataset testdst(TestType) primary key fname;
for $emp in dataset('testdst')
where $emp.fname >= "Max" and $emp.fname <= "Roger"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-27.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-27.aql
index ace6808..e9ccb87 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-27.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-27.aql
@@ -17,7 +17,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
for $emp in dataset('testdst')
where $emp.fname > "Craig" and $emp.lname > "Kevin" and $emp.fname <= "Mary" and $emp.lname < "Tomes"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-28.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-28.aql
index 48a7354..5e7b4e3 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-28.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-28.aql
@@ -17,7 +17,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
for $emp in dataset('testdst')
where $emp.fname > "Craig" and $emp.lname > "Kevin" and $emp.fname < "Mary" and $emp.lname <= "Tomes"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-29.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-29.aql
index 3ae7a8c..c7d4753 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-29.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-29.aql
@@ -17,7 +17,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
for $emp in dataset('testdst')
where $emp.fname > "Craig" and $emp.lname >= "Kevin" and $emp.fname < "Mary" and $emp.lname < "Tomes"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-30.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-30.aql
index 50e9a26..a9a2573 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-30.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-primary-30.aql
@@ -17,7 +17,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key fname,lname;
+create dataset testdst(TestType) primary key fname,lname;
for $emp in dataset('testdst')
where $emp.fname >= "Craig" and $emp.lname > "Kevin" and $emp.fname < "Mary" and $emp.lname < "Tomes"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-31.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-31.aql
index 5117da4..c2e6257 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-31.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-31.aql
@@ -20,7 +20,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
create index sec_Idx on testdst(fname,lname);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-32.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-32.aql
index 37b46b3..d009e2e 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-32.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-32.aql
@@ -20,7 +20,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
create index sec_Idx on testdst(fname,lname);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-33.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-33.aql
index 8f1b4d5..2b364e4 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-33.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-33.aql
@@ -20,7 +20,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
create index sec_Idx on testdst(fname,lname);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-34.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-34.aql
index 5042bd9..f20b5a9 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-34.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-34.aql
@@ -20,7 +20,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
create index sec_Idx on testdst(fname,lname);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-35.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-35.aql
index 2e38330..dfc5017 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-35.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-35.aql
@@ -20,7 +20,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
create index sec_Idx on testdst(fname,lname);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-36.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-36.aql
index 84bdcd4..7d72623 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-36.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-36.aql
@@ -20,7 +20,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
create index sec_Idx on testdst(fname,lname);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-37.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-37.aql
index 73133c2..7c155ce 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-37.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-37.aql
@@ -20,7 +20,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
create index sec_Idx on testdst(fname,lname);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-38.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-38.aql
index 3f844ee..dc63f05 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-38.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-38.aql
@@ -17,7 +17,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
create index sec_Idx on testdst(fname,lname);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-39.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-39.aql
index 3813e48..a9d32c3 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-39.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-39.aql
@@ -20,7 +20,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
create index sec_Idx on testdst(fname,lname);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-40.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-40.aql
index 4e13597..25c6dec 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-40.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-40.aql
@@ -18,7 +18,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
create index sec_Idx on testdst(fname,lname);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-41.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-41.aql
index c724cbe..bc65009 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-41.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-41.aql
@@ -18,7 +18,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
create index sec_Idx on testdst(fname,lname);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-42.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-42.aql
index 06c93b0..289d1b4 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-42.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-42.aql
@@ -20,7 +20,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
create index sec_Idx on testdst(fname,lname);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-43.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-43.aql
index 490becb..86e5d67 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-43.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-43.aql
@@ -20,7 +20,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
create index sec_Idx on testdst(fname,lname);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-44.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-44.aql
index f75f3de..54e149e 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-44.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-44.aql
@@ -18,7 +18,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
create index sec_Idx on testdst(fname,lname);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-45.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-45.aql
index 6717780..cc33e5a 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-45.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-45.aql
@@ -18,7 +18,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
create index sec_Idx on testdst(fname,lname);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-46.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-46.aql
index 8a508b3..f486e44 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-46.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-46.aql
@@ -18,7 +18,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
create index sec_Idx on testdst(fname,lname);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-47.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-47.aql
index 70e8185..5d06a51 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-47.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-47.aql
@@ -18,7 +18,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
create index sec_Idx on testdst(fname,lname);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-48.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-48.aql
index bb6f24d..2dc1ab4 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-48.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-48.aql
@@ -18,7 +18,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
create index sec_Idx on testdst(fname,lname);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-49.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-49.aql
index 6d11235..6175ba6 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-49.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-49.aql
@@ -19,7 +19,7 @@
}
// create internal dataset
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
create index sec_Idx on testdst(fname,lname);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-50.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-50.aql
index 3a91c09..04aefc3 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-50.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-50.aql
@@ -18,7 +18,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
create index sec_Idx on testdst(fname,lname);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-51.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-51.aql
index 3be3875..5ae63a9 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-51.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-51.aql
@@ -18,7 +18,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
create index sec_Idx on testdst(fname,lname);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-52.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-52.aql
index 7b52ec9..e720742 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-52.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-52.aql
@@ -17,7 +17,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
create index sec_Idx on testdst(fname);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-53.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-53.aql
index 45536b5..65a9b6d 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-53.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-53.aql
@@ -17,7 +17,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
create index sec_Idx on testdst(fname);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-54.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-54.aql
index f1a3cb5..a18975a 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-54.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-54.aql
@@ -17,7 +17,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
create index sec_Idx on testdst(fname);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-55.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-55.aql
index bbeca00..94c1463 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-55.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-55.aql
@@ -17,7 +17,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
create index sec_Idx on testdst(fname);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-56.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-56.aql
index 7ecfea9..53f8ca4 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-56.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-56.aql
@@ -17,7 +17,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
create index sec_Idx on testdst(fname);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-57.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-57.aql
index 2183f69..43cf54d 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-57.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-57.aql
@@ -17,7 +17,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
create index sec_Idx on testdst(fname);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-58.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-58.aql
index bdf16b6..aa18136 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-58.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-58.aql
@@ -17,7 +17,7 @@
lname : string
}
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
create index sec_Idx on testdst(fname);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-59.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-59.aql
index e30006f..2b0a300 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-59.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index/btree-secondary-59.aql
@@ -19,7 +19,7 @@
}
// create internal dataset
-create dataset testdst(TestType) partitioned by key id;
+create dataset testdst(TestType) primary key id;
create index sec_Idx on testdst(fname,lname);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/consolidate-selects-complex.aql b/asterix-app/src/test/resources/optimizerts/queries/consolidate-selects-complex.aql
index 313e9a4..7dfe817 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/consolidate-selects-complex.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/consolidate-selects-complex.aql
@@ -13,7 +13,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/consolidate-selects-simple.aql b/asterix-app/src/test/resources/optimizerts/queries/consolidate-selects-simple.aql
index 8a237d2..d21fa6e 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/consolidate-selects-simple.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/consolidate-selects-simple.aql
@@ -16,7 +16,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
write output to nc1:"rttest/consolidate-selects-simple.aql";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/cust_group_no_agg.aql b/asterix-app/src/test/resources/optimizerts/queries/cust_group_no_agg.aql
index 6fdae88..6a86b8a 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/cust_group_no_agg.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/cust_group_no_agg.aql
@@ -18,7 +18,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
write output to nc1:"/tmp/.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/denorm-cust-order.aql b/asterix-app/src/test/resources/optimizerts/queries/denorm-cust-order.aql
index acad4a1..389dad5 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/denorm-cust-order.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/denorm-cust-order.aql
@@ -36,9 +36,9 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
create dataset Orders(OrderType)
- partitioned by key oid on group1;
+ primary key oid on group1;
write output to nc1:"/tmp/custorder.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/distinct_aggregate.aql b/asterix-app/src/test/resources/optimizerts/queries/distinct_aggregate.aql
index a4784c1..6fc1dac 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/distinct_aggregate.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/distinct_aggregate.aql
@@ -24,7 +24,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset LineItems_q1(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
write output to nc1:"rttest/tpch_q1_pricing_summary_report_nt.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/fj-dblp-csx.aql b/asterix-app/src/test/resources/optimizerts/queries/fj-dblp-csx.aql
index 2898058..f748eae 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/fj-dblp-csx.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/fj-dblp-csx.aql
@@ -22,8 +22,8 @@
create nodegroup group1 if not exists on nc1, nc2;
-create dataset DBLP(DBLPType) partitioned by key id on group1;
-create dataset CSX(CSXType) partitioned by key id on group1;
+create dataset DBLP(DBLPType) primary key id on group1;
+create dataset CSX(CSXType) primary key id on group1;
write output to nc1:'rttest/fj-dblp-csx.adm';
diff --git a/asterix-app/src/test/resources/optimizerts/queries/fj-phase1.aql b/asterix-app/src/test/resources/optimizerts/queries/fj-phase1.aql
index 778232a..19ceda0 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/fj-phase1.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/fj-phase1.aql
@@ -19,10 +19,10 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset Users(UserType)
- partitioned by key uid on group1;
+ primary key uid on group1;
create dataset Visitors(VisitorType)
- partitioned by key vid on group1;
+ primary key vid on group1;
// set simfunction "jaccard";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/fj-phase2-with-hints.aql b/asterix-app/src/test/resources/optimizerts/queries/fj-phase2-with-hints.aql
index 866c027..a0973f4 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/fj-phase2-with-hints.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/fj-phase2-with-hints.aql
@@ -15,7 +15,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset DBLP_fuzzyjoin_078(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
write output to nc1:'rttest/fuzzyjoin_078.adm';
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inlined_q18_large_volume_customer.aql b/asterix-app/src/test/resources/optimizerts/queries/inlined_q18_large_volume_customer.aql
index 49c4157..0db7a51 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inlined_q18_large_volume_customer.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inlined_q18_large_volume_customer.aql
@@ -49,11 +49,11 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
create dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
create dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
write output to nc1:"/tmp/inlined_q18_large_volume_customer.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/introhashpartitionmerge.aql b/asterix-app/src/test/resources/optimizerts/queries/introhashpartitionmerge.aql
index 9427ab5..cd1d78c 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/introhashpartitionmerge.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/introhashpartitionmerge.aql
@@ -9,7 +9,7 @@
create nodegroup group1 if not exists on nc1, nc2;
-create dataset TOKENSRANKEDADM(TOKENSRANKEDADMType) partitioned by key rank on group1;
+create dataset TOKENSRANKEDADM(TOKENSRANKEDADMType) primary key rank on group1;
write output to nc1:'rttest/introhashpartitionmerge.adm';
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-contains-panic.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-contains-panic.aql
index 31c0d03..d193238 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-contains-panic.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-contains-panic.aql
@@ -16,7 +16,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index ngram_index on DBLP(title) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-contains.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-contains.aql
index 9851cd8..137b1c7 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-contains.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-contains.aql
@@ -16,7 +16,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index ngram_index on DBLP(title) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-edit-distance-check-panic.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-edit-distance-check-panic.aql
index 4608aeb..6f7f38c 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-edit-distance-check-panic.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-edit-distance-check-panic.aql
@@ -16,7 +16,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index ngram_index on DBLP(authors) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-edit-distance-check.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-edit-distance-check.aql
index 7b264b7..0d9db19 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-edit-distance-check.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-edit-distance-check.aql
@@ -16,7 +16,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index ngram_index on DBLP(authors) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-edit-distance-panic.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-edit-distance-panic.aql
index 9412cf3..7decdb5 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-edit-distance-panic.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-edit-distance-panic.aql
@@ -16,7 +16,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index ngram_index on DBLP(authors) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-edit-distance.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-edit-distance.aql
index d6be7d4..c7a9ba4 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-edit-distance.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-edit-distance.aql
@@ -16,7 +16,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index ngram_index on DBLP(authors) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-fuzzyeq-edit-distance.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-fuzzyeq-edit-distance.aql
index b241311..3a3d7bb 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-fuzzyeq-edit-distance.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-fuzzyeq-edit-distance.aql
@@ -16,7 +16,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index ngram_index on DBLP(authors) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-fuzzyeq-jaccard.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-fuzzyeq-jaccard.aql
index 33f9d5a..5e9ac0e 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-fuzzyeq-jaccard.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-fuzzyeq-jaccard.aql
@@ -16,7 +16,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index ngram_index on DBLP(title) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-jaccard-check.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-jaccard-check.aql
index 376415f..dbb9c71 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-jaccard-check.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-jaccard-check.aql
@@ -16,7 +16,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index ngram_index on DBLP(title) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-jaccard.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-jaccard.aql
index ef7143a..f25fb8b 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-jaccard.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ngram-jaccard.aql
@@ -16,7 +16,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index ngram_index on DBLP(title) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-edit-distance-check-panic.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-edit-distance-check-panic.aql
index 8209572..c105eab 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-edit-distance-check-panic.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-edit-distance-check-panic.aql
@@ -23,7 +23,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-edit-distance-check.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-edit-distance-check.aql
index b55a516..b12037d1 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-edit-distance-check.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-edit-distance-check.aql
@@ -23,7 +23,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-edit-distance-panic.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-edit-distance-panic.aql
index 63bead1..84bcbb9 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-edit-distance-panic.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-edit-distance-panic.aql
@@ -23,7 +23,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-edit-distance.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-edit-distance.aql
index 7958fb4..afafa37 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-edit-distance.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-edit-distance.aql
@@ -23,7 +23,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-fuzzyeq-edit-distance.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-fuzzyeq-edit-distance.aql
index f156bd2..86b6474 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-fuzzyeq-edit-distance.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-fuzzyeq-edit-distance.aql
@@ -23,7 +23,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-fuzzyeq-jaccard.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-fuzzyeq-jaccard.aql
index 6632189..64812db 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-fuzzyeq-jaccard.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-fuzzyeq-jaccard.aql
@@ -23,7 +23,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-jaccard-check.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-jaccard-check.aql
index 61dcc7f..4f8b8fe 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-jaccard-check.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-jaccard-check.aql
@@ -23,7 +23,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-jaccard.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-jaccard.aql
index 7af2161..62d6806 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-jaccard.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/olist-jaccard.aql
@@ -23,7 +23,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ulist-fuzzyeq-jaccard.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ulist-fuzzyeq-jaccard.aql
index d65a517..4ac02c8 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ulist-fuzzyeq-jaccard.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ulist-fuzzyeq-jaccard.aql
@@ -23,7 +23,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ulist-jaccard-check.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ulist-jaccard-check.aql
index 9551294..80e8aef 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ulist-jaccard-check.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ulist-jaccard-check.aql
@@ -23,7 +23,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ulist-jaccard.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ulist-jaccard.aql
index 6580bd0..588ca14 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ulist-jaccard.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/ulist-jaccard.aql
@@ -23,7 +23,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/word-contains.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/word-contains.aql
index eee681a..8d9cb02 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/word-contains.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/word-contains.aql
@@ -16,7 +16,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index keyword_index on DBLP(title) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/word-fuzzyeq-jaccard.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/word-fuzzyeq-jaccard.aql
index fb6d0e8..006398e 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/word-fuzzyeq-jaccard.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/word-fuzzyeq-jaccard.aql
@@ -16,7 +16,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index keyword_index on DBLP(title) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/word-jaccard-check.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/word-jaccard-check.aql
index 10ca5ce..4c814e4 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/word-jaccard-check.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/word-jaccard-check.aql
@@ -16,7 +16,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index keyword_index on DBLP(title) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/word-jaccard.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/word-jaccard.aql
index 7f544b6..53a5bbb 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/word-jaccard.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-basic/word-jaccard.aql
@@ -16,7 +16,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index keyword_index on DBLP(title) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-edit-distance-check-let-panic-nopanic_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-edit-distance-check-let-panic-nopanic_01.aql
index 4d6bfd9..1c96adc 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-edit-distance-check-let-panic-nopanic_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-edit-distance-check-let-panic-nopanic_01.aql
@@ -18,7 +18,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index ngram_index on DBLP(authors) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-edit-distance-check-let-panic-nopanic_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-edit-distance-check-let-panic-nopanic_02.aql
index a4d4163..e80443e 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-edit-distance-check-let-panic-nopanic_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-edit-distance-check-let-panic-nopanic_02.aql
@@ -18,7 +18,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index ngram_index on DBLP(authors) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-edit-distance-check-let-panic.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-edit-distance-check-let-panic.aql
index d6f58fc..d0d3035 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-edit-distance-check-let-panic.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-edit-distance-check-let-panic.aql
@@ -17,7 +17,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index ngram_index on DBLP(authors) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-edit-distance-check-let.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-edit-distance-check-let.aql
index 24d3202..2d2f34f 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-edit-distance-check-let.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-edit-distance-check-let.aql
@@ -17,7 +17,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index ngram_index on DBLP(authors) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-jaccard-check-let.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-jaccard-check-let.aql
index de57848..fb59040 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-jaccard-check-let.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-jaccard-check-let.aql
@@ -17,7 +17,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index ngram_index on DBLP(title) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-jaccard-check-multi-let.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-jaccard-check-multi-let.aql
index 2eed598..b86c6b2 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-jaccard-check-multi-let.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ngram-jaccard-check-multi-let.aql
@@ -17,7 +17,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index ngram_index on DBLP(title) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/olist-edit-distance-check-let-panic.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/olist-edit-distance-check-let-panic.aql
index 337eced..aa9bc59 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/olist-edit-distance-check-let-panic.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/olist-edit-distance-check-let-panic.aql
@@ -24,7 +24,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/olist-edit-distance-check-let.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/olist-edit-distance-check-let.aql
index c3be0bd..8a78450 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/olist-edit-distance-check-let.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/olist-edit-distance-check-let.aql
@@ -24,7 +24,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/olist-jaccard-check-let.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/olist-jaccard-check-let.aql
index 0652131..48ec727 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/olist-jaccard-check-let.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/olist-jaccard-check-let.aql
@@ -24,7 +24,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ulist-jaccard-check-let.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ulist-jaccard-check-let.aql
index 5a3b329..0757d41 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ulist-jaccard-check-let.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/ulist-jaccard-check-let.aql
@@ -24,7 +24,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/word-jaccard-check-let.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/word-jaccard-check-let.aql
index 2c45ea8..ec97668 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/word-jaccard-check-let.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/word-jaccard-check-let.aql
@@ -17,7 +17,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index keyword_index on DBLP(title) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/word-jaccard-check-multi-let.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/word-jaccard-check-multi-let.aql
index 8f8f014..bf1821e 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/word-jaccard-check-multi-let.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-complex/word-jaccard-check-multi-let.aql
@@ -17,7 +17,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index keyword_index on DBLP(title) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-edit-distance-inline.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-edit-distance-inline.aql
index 78a285a..ee768d7 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-edit-distance-inline.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-edit-distance-inline.aql
@@ -18,7 +18,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index ngram_index on DBLP(authors) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-edit-distance.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-edit-distance.aql
index b05c0d1..ca2827d 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-edit-distance.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-edit-distance.aql
@@ -25,9 +25,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
create index ngram_index on DBLP(authors) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-fuzzyeq-edit-distance.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-fuzzyeq-edit-distance.aql
index 87ebe4a..0e15708 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-fuzzyeq-edit-distance.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-fuzzyeq-edit-distance.aql
@@ -25,9 +25,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
create index ngram_index on CSX(authors) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-fuzzyeq-jaccard.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-fuzzyeq-jaccard.aql
index 77aa691..1a97936 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-fuzzyeq-jaccard.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-fuzzyeq-jaccard.aql
@@ -25,9 +25,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
create index ngram_index on DBLP(title) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-jaccard-inline.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-jaccard-inline.aql
index 9f6e66c..bf6bb82 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-jaccard-inline.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-jaccard-inline.aql
@@ -18,7 +18,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index ngram_index on DBLP(title) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-jaccard.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-jaccard.aql
index 1979201..dae2a78 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-jaccard.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ngram-jaccard.aql
@@ -25,9 +25,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
create index ngram_index on DBLP(title) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-edit-distance-inline.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-edit-distance-inline.aql
index 25cdc53..21114fc 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-edit-distance-inline.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-edit-distance-inline.aql
@@ -25,7 +25,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-edit-distance.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-edit-distance.aql
index e5dec4f..b90b4f7 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-edit-distance.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-edit-distance.aql
@@ -24,9 +24,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-fuzzyeq-edit-distance.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-fuzzyeq-edit-distance.aql
index ae24f13..c78f65c 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-fuzzyeq-edit-distance.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-fuzzyeq-edit-distance.aql
@@ -24,9 +24,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-fuzzyeq-jaccard.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-fuzzyeq-jaccard.aql
index f7df554..3bbcc28 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-fuzzyeq-jaccard.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-fuzzyeq-jaccard.aql
@@ -24,9 +24,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-jaccard-inline.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-jaccard-inline.aql
index 61816b8..7dca72a 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-jaccard-inline.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-jaccard-inline.aql
@@ -25,7 +25,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-jaccard.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-jaccard.aql
index d16776a..79d8baf 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-jaccard.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/olist-jaccard.aql
@@ -24,9 +24,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ulist-fuzzyeq-jaccard.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ulist-fuzzyeq-jaccard.aql
index 0d4872b..0efb32d 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ulist-fuzzyeq-jaccard.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ulist-fuzzyeq-jaccard.aql
@@ -24,9 +24,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ulist-jaccard-inline.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ulist-jaccard-inline.aql
index c89fdc3..2bca24d 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ulist-jaccard-inline.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ulist-jaccard-inline.aql
@@ -25,7 +25,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ulist-jaccard.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ulist-jaccard.aql
index 72d0341..8805542 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ulist-jaccard.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/ulist-jaccard.aql
@@ -24,9 +24,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/word-fuzzyeq-jaccard.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/word-fuzzyeq-jaccard.aql
index 99a3c79..2d2e0cc 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/word-fuzzyeq-jaccard.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/word-fuzzyeq-jaccard.aql
@@ -25,9 +25,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
create index keyword_index on DBLP(title) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/word-jaccard-inline.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/word-jaccard-inline.aql
index 24c66ac..0d5bf9c 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/word-jaccard-inline.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/word-jaccard-inline.aql
@@ -18,7 +18,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index keyword_index on DBLP(title) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/word-jaccard.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/word-jaccard.aql
index ef91b89..5ffa92e 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/word-jaccard.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join-noeqjoin/word-jaccard.aql
@@ -25,9 +25,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
create index keyword_index on DBLP(title) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance-check_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance-check_01.aql
index f9cc7e9..d24ee17 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance-check_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance-check_01.aql
@@ -24,9 +24,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
create index ngram_index on DBLP(authors) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance-check_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance-check_02.aql
index 011afea..ec553a0 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance-check_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance-check_02.aql
@@ -24,9 +24,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
create index ngram_index on CSX(authors) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance-check_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance-check_03.aql
index 649c3b6..4de1680 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance-check_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance-check_03.aql
@@ -16,7 +16,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index ngram_index on DBLP(authors) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance-check_04.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance-check_04.aql
index e6a3c56..091676f 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance-check_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance-check_04.aql
@@ -17,7 +17,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index ngram_index on DBLP(authors) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance_01.aql
index a94e84f..8a0282c 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance_01.aql
@@ -24,9 +24,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
create index ngram_index on DBLP(authors) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance_02.aql
index 61edf5c..5d026db 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance_02.aql
@@ -24,9 +24,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
create index ngram_index on CSX(authors) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance_03.aql
index 6c61de1..11b39c5 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance_03.aql
@@ -16,7 +16,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index ngram_index on DBLP(authors) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance_04.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance_04.aql
index 5925f47..b8952e9 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-edit-distance_04.aql
@@ -17,7 +17,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index ngram_index on DBLP(authors) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-edit-distance_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-edit-distance_01.aql
index f6dd893..4704213 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-edit-distance_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-edit-distance_01.aql
@@ -24,9 +24,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
create index ngram_index on CSX(authors) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-edit-distance_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-edit-distance_02.aql
index d9822aa..8454be9 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-edit-distance_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-edit-distance_02.aql
@@ -24,9 +24,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
create index ngram_index on DBLP(authors) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-edit-distance_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-edit-distance_03.aql
index 0431a6c..4187d8f 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-edit-distance_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-edit-distance_03.aql
@@ -15,7 +15,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index ngram_index on DBLP(authors) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-jaccard_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-jaccard_01.aql
index 933e5d0..84fb8f3 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-jaccard_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-jaccard_01.aql
@@ -24,9 +24,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
create index ngram_index on DBLP(title) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-jaccard_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-jaccard_02.aql
index 1213935..d0c3c6f 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-jaccard_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-jaccard_02.aql
@@ -24,9 +24,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
create index ngram_index on CSX(title) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-jaccard_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-jaccard_03.aql
index 70c7640..a725edc 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-jaccard_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-fuzzyeq-jaccard_03.aql
@@ -16,7 +16,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index ngram_index on DBLP(title) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard-check_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard-check_01.aql
index 4d7939a..c25e2e3 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard-check_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard-check_01.aql
@@ -24,9 +24,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
create index ngram_index on DBLP(title) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard-check_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard-check_02.aql
index e0acebb..0e9f4d5 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard-check_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard-check_02.aql
@@ -24,9 +24,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
create index ngram_index on CSX(title) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard-check_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard-check_03.aql
index 808965d..bbd0f51 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard-check_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard-check_03.aql
@@ -16,7 +16,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index ngram_index on DBLP(title) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard-check_04.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard-check_04.aql
index e66cdc5..956f712 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard-check_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard-check_04.aql
@@ -17,7 +17,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index ngram_index on DBLP(title) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard_01.aql
index a28a6af..742cb69 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard_01.aql
@@ -24,9 +24,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
create index ngram_index on DBLP(title) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard_02.aql
index a4ff58d..a620399 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard_02.aql
@@ -24,9 +24,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
create index ngram_index on CSX(title) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard_03.aql
index 1a90cf9..d12bf98 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard_03.aql
@@ -16,7 +16,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index ngram_index on DBLP(title) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard_04.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard_04.aql
index 7be6773..6b308c8 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ngram-jaccard_04.aql
@@ -17,7 +17,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index ngram_index on DBLP(title) type ngram(3);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance-check_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance-check_01.aql
index 84d2f45..1afe025 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance-check_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance-check_01.aql
@@ -23,9 +23,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance-check_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance-check_02.aql
index b0ec650..aec4c85 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance-check_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance-check_02.aql
@@ -23,9 +23,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
create index interests_index on Customers2(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance-check_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance-check_03.aql
index 03a1a3f..47cea17 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance-check_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance-check_03.aql
@@ -23,7 +23,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance-check_04.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance-check_04.aql
index e7814af..9abc4e8 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance-check_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance-check_04.aql
@@ -24,7 +24,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance_01.aql
index ac25ac2..b8f6144 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance_01.aql
@@ -23,9 +23,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance_02.aql
index a160c4d..7b17749 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance_02.aql
@@ -23,9 +23,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
create index interests_index on Customers2(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance_03.aql
index dba92b9..7132950 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance_03.aql
@@ -23,7 +23,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance_04.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance_04.aql
index 4835727..84c7d5b 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-edit-distance_04.aql
@@ -24,7 +24,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-edit-distance_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-edit-distance_01.aql
index 1dddf5e..c84bd28 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-edit-distance_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-edit-distance_01.aql
@@ -23,9 +23,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-edit-distance_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-edit-distance_02.aql
index 15b1001..c6b9d9c 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-edit-distance_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-edit-distance_02.aql
@@ -23,9 +23,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
create index interests_index on Customers2(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-edit-distance_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-edit-distance_03.aql
index af4814f..962f05c 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-edit-distance_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-edit-distance_03.aql
@@ -23,7 +23,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-jaccard_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-jaccard_01.aql
index 2b46707..4b3793a 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-jaccard_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-jaccard_01.aql
@@ -23,9 +23,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-jaccard_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-jaccard_02.aql
index 4f188dd..60cdb07 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-jaccard_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-jaccard_02.aql
@@ -23,9 +23,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
create index interests_index on Customers2(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-jaccard_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-jaccard_03.aql
index 05cf635..2df41b5 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-jaccard_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-fuzzyeq-jaccard_03.aql
@@ -23,7 +23,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard-check_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard-check_01.aql
index 60fc705..8968ec6 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard-check_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard-check_01.aql
@@ -23,9 +23,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard-check_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard-check_02.aql
index 9cdb78b..96b8622 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard-check_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard-check_02.aql
@@ -23,9 +23,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
create index interests_index on Customers2(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard-check_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard-check_03.aql
index b1c4d68..e8763b4 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard-check_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard-check_03.aql
@@ -23,7 +23,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard-check_04.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard-check_04.aql
index a1e1478..3194e47 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard-check_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard-check_04.aql
@@ -24,7 +24,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard_01.aql
index cabdac1..1c4c7c2 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard_01.aql
@@ -23,9 +23,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard_02.aql
index 49a7411..212b64b 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard_02.aql
@@ -23,9 +23,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
create index interests_index on Customers2(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard_03.aql
index 639d782..22b4108 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard_03.aql
@@ -23,7 +23,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard_04.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard_04.aql
index c3e34b0..32e8a6c 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/olist-jaccard_04.aql
@@ -24,7 +24,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-fuzzyeq-jaccard_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-fuzzyeq-jaccard_01.aql
index c19ce47..cd391a4 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-fuzzyeq-jaccard_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-fuzzyeq-jaccard_01.aql
@@ -23,9 +23,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-fuzzyeq-jaccard_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-fuzzyeq-jaccard_02.aql
index 6e3a274..a7d7118 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-fuzzyeq-jaccard_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-fuzzyeq-jaccard_02.aql
@@ -23,9 +23,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
create index interests_index on Customers2(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-fuzzyeq-jaccard_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-fuzzyeq-jaccard_03.aql
index 396af09..a5ec440 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-fuzzyeq-jaccard_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-fuzzyeq-jaccard_03.aql
@@ -23,7 +23,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard-check_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard-check_01.aql
index 8792dd2..dead8cc 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard-check_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard-check_01.aql
@@ -23,9 +23,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard-check_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard-check_02.aql
index 185df97..b18abf8 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard-check_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard-check_02.aql
@@ -23,9 +23,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
create index interests_index on Customers2(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard-check_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard-check_03.aql
index 4d2b2e7..5afd428 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard-check_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard-check_03.aql
@@ -23,7 +23,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard-check_04.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard-check_04.aql
index acf4d71..3c8bdd7 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard-check_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard-check_04.aql
@@ -24,7 +24,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard_01.aql
index 0d5ad31..2001a2a 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard_01.aql
@@ -23,9 +23,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard_02.aql
index 8ac631e..95ce2ad 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard_02.aql
@@ -23,9 +23,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
create index interests_index on Customers2(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard_03.aql
index 94aedbb..e4fc27f 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard_03.aql
@@ -23,7 +23,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard_04.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard_04.aql
index e30fbcf..6356f79 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/ulist-jaccard_04.aql
@@ -24,7 +24,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
create index interests_index on Customers(interests) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-fuzzyeq-jaccard_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-fuzzyeq-jaccard_01.aql
index 8438509..ff39969 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-fuzzyeq-jaccard_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-fuzzyeq-jaccard_01.aql
@@ -24,9 +24,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
create index keyword_index on DBLP(title) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-fuzzyeq-jaccard_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-fuzzyeq-jaccard_02.aql
index 6f6c5f6..f693855 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-fuzzyeq-jaccard_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-fuzzyeq-jaccard_02.aql
@@ -24,9 +24,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
create index keyword_index on CSX(title) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-fuzzyeq-jaccard_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-fuzzyeq-jaccard_03.aql
index 3f3a247..72f9bdc 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-fuzzyeq-jaccard_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-fuzzyeq-jaccard_03.aql
@@ -16,7 +16,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index keyword_index on DBLP(title) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard-check_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard-check_01.aql
index 14f63bf..2a076bc 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard-check_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard-check_01.aql
@@ -24,9 +24,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
create index keyword_index on DBLP(title) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard-check_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard-check_02.aql
index 46932be..98b3972 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard-check_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard-check_02.aql
@@ -24,9 +24,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
create index keyword_index on CSX(title) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard-check_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard-check_03.aql
index 77ac9ba..dace174 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard-check_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard-check_03.aql
@@ -16,7 +16,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index keyword_index on DBLP(title) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard-check_04.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard-check_04.aql
index 6129c52..fa68be2 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard-check_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard-check_04.aql
@@ -17,7 +17,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index keyword_index on DBLP(title) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard_01.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard_01.aql
index 47d1192..df0b75e 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard_01.aql
@@ -24,9 +24,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
create index keyword_index on DBLP(title) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard_02.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard_02.aql
index ac33cb0..1f227c8 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard_02.aql
@@ -24,9 +24,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
create index keyword_index on CSX(title) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard_03.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard_03.aql
index 128e500..7a719ff 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard_03.aql
@@ -16,7 +16,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index keyword_index on DBLP(title) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard_04.aql b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard_04.aql
index 325f955..077803b 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/inverted-index-join/word-jaccard_04.aql
@@ -17,7 +17,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
create index keyword_index on DBLP(title) type keyword;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/join-super-key_01.aql b/asterix-app/src/test/resources/optimizerts/queries/join-super-key_01.aql
index f0251ad..52b35b8 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/join-super-key_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/join-super-key_01.aql
@@ -66,9 +66,9 @@
write output to nc1:"/tmp/join-super-key_01.adm";
create dataset LineItems(LineItemType)
- partitioned by key l_partkey, l_linenumber on group1;
+ primary key l_partkey, l_linenumber on group1;
create dataset PartSupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey on group1;
+ primary key ps_partkey, ps_suppkey on group1;
for $li in dataset('LineItems')
diff --git a/asterix-app/src/test/resources/optimizerts/queries/join-super-key_02.aql b/asterix-app/src/test/resources/optimizerts/queries/join-super-key_02.aql
index 0904f37..98f7e9e 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/join-super-key_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/join-super-key_02.aql
@@ -66,9 +66,9 @@
write output to nc1:"/tmp/join-super-key_01.adm";
create dataset LineItems(LineItemType)
- partitioned by key l_partkey, l_linenumber on group1;
+ primary key l_partkey, l_linenumber on group1;
create dataset PartSupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey on group1;
+ primary key ps_partkey, ps_suppkey on group1;
for $ps in dataset('PartSupp')
diff --git a/asterix-app/src/test/resources/optimizerts/queries/loj-super-key_01.aql b/asterix-app/src/test/resources/optimizerts/queries/loj-super-key_01.aql
index 1f43f73..4855576 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/loj-super-key_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/loj-super-key_01.aql
@@ -66,9 +66,9 @@
write output to nc1:"/tmp/loj-super-key_01.adm";
create dataset LineItems(LineItemType)
- partitioned by key l_partkey, l_linenumber on group1;
+ primary key l_partkey, l_linenumber on group1;
create dataset PartSupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey on group1;
+ primary key ps_partkey, ps_suppkey on group1;
for $li in dataset('LineItems')
diff --git a/asterix-app/src/test/resources/optimizerts/queries/loj-super-key_02.aql b/asterix-app/src/test/resources/optimizerts/queries/loj-super-key_02.aql
index ceab193..ce937c5 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/loj-super-key_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/loj-super-key_02.aql
@@ -66,9 +66,9 @@
write output to nc1:"/tmp/loj-super-key_01.adm";
create dataset LineItems(LineItemType)
- partitioned by key l_partkey, l_linenumber on group1;
+ primary key l_partkey, l_linenumber on group1;
create dataset PartSupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey on group1;
+ primary key ps_partkey, ps_suppkey on group1;
for $ps in dataset('PartSupp')
diff --git a/asterix-app/src/test/resources/optimizerts/queries/nested_loj2.aql b/asterix-app/src/test/resources/optimizerts/queries/nested_loj2.aql
index 0b77f55..d235b09 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/nested_loj2.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/nested_loj2.aql
@@ -47,11 +47,11 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
create dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
create dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
write output to nc1:"/tmp/nested_loj.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/nested_loj3.aql b/asterix-app/src/test/resources/optimizerts/queries/nested_loj3.aql
index 3a1ef04..1616a67 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/nested_loj3.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/nested_loj3.aql
@@ -57,13 +57,13 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
create dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
create dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
create dataset PartSupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey on group1;
+ primary key ps_partkey, ps_suppkey on group1;
write output to nc1:"/tmp/nested_loj.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/noncollocated.aql b/asterix-app/src/test/resources/optimizerts/queries/noncollocated.aql
index 3aa1855..26b5ab0 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/noncollocated.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/noncollocated.aql
@@ -21,10 +21,10 @@
create nodegroup group2 if not exists on nc2;
create dataset Users(UserType)
- partitioned by key uid on group1;
+ primary key uid on group1;
create dataset Visitors(VisitorType)
- partitioned by key vid on group2;
+ primary key vid on group2;
write output to nc1:"/tmp/fuzzy1.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/orderby-desc-using-gby.aql b/asterix-app/src/test/resources/optimizerts/queries/orderby-desc-using-gby.aql
index 682800c..8cef009 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/orderby-desc-using-gby.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/orderby-desc-using-gby.aql
@@ -24,7 +24,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
write output to nc1:"rttest/gby-using-orderby-desc.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/orders-aggreg.aql b/asterix-app/src/test/resources/optimizerts/queries/orders-aggreg.aql
index c2f96f6..960a8e6 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/orders-aggreg.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/orders-aggreg.aql
@@ -16,7 +16,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset Orders(OrderType)
- partitioned by key oid on group1;
+ primary key oid on group1;
write output to nc1:"/tmp/orders-aggreg.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/orders-composite-index-search.aql b/asterix-app/src/test/resources/optimizerts/queries/orders-composite-index-search.aql
index d82be3c..73ec062 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/orders-composite-index-search.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/orders-composite-index-search.aql
@@ -14,7 +14,7 @@
o_comment: string
}
-create dataset Orders(OrderType) partitioned by key o_orderkey;
+create dataset Orders(OrderType) primary key o_orderkey;
create index idx_Custkey_Orderstatus on Orders(o_custkey, o_orderstatus);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-conjunctive-open_01.aql b/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-conjunctive-open_01.aql
index 150e962..b5aa50d 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-conjunctive-open_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-conjunctive-open_01.aql
@@ -19,7 +19,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
load dataset Orders
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-conjunctive-open_02.aql b/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-conjunctive-open_02.aql
index 725da3a..daef719 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-conjunctive-open_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-conjunctive-open_02.aql
@@ -19,7 +19,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
load dataset Orders
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-conjunctive_01.aql b/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-conjunctive_01.aql
index 0a42fa6..21a7f5a 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-conjunctive_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-conjunctive_01.aql
@@ -19,7 +19,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
load dataset Orders
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-conjunctive_02.aql b/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-conjunctive_02.aql
index fed390c..25a6d3f 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-conjunctive_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-conjunctive_02.aql
@@ -19,7 +19,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
load dataset Orders
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-open.aql b/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-open.aql
index a6bfa42..31d41b6 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-open.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/orders-index-search-open.aql
@@ -20,7 +20,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
create index idx_Orders_Custkey on Orders(o_custkey);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/orders-index-search.aql b/asterix-app/src/test/resources/optimizerts/queries/orders-index-search.aql
index 75be90b..4e01059 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/orders-index-search.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/orders-index-search.aql
@@ -20,7 +20,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
create index idx_Orders_Custkey on Orders(o_custkey);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/prim-idx-search-open.aql b/asterix-app/src/test/resources/optimizerts/queries/prim-idx-search-open.aql
index 09a1ef6..1f6f887 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/prim-idx-search-open.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/prim-idx-search-open.aql
@@ -21,7 +21,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
write output to nc1:"/tmp/prim_index_search.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/prim-idx-search.aql b/asterix-app/src/test/resources/optimizerts/queries/prim-idx-search.aql
index 7a22597..7939fed 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/prim-idx-search.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/prim-idx-search.aql
@@ -21,7 +21,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
write output to nc1:"/tmp/prim_index_search.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/pull_select_above_eq_join.aql b/asterix-app/src/test/resources/optimizerts/queries/pull_select_above_eq_join.aql
index dfc05ff..ee998cf 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/pull_select_above_eq_join.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/pull_select_above_eq_join.aql
@@ -21,10 +21,10 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset Users(UserType)
- partitioned by key uid on group1;
+ primary key uid on group1;
create dataset Visitors(VisitorType)
- partitioned by key vid on group1;
+ primary key vid on group1;
write output to nc1:"/tmp/pull-select-above-eq-join.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/push-project-through-group.aql b/asterix-app/src/test/resources/optimizerts/queries/push-project-through-group.aql
index 5dd229b..3c0235c 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/push-project-through-group.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/push-project-through-group.aql
@@ -15,7 +15,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
write output to nc1:'rttest/fuzzyjoin_080.adm';
diff --git a/asterix-app/src/test/resources/optimizerts/queries/push_limit.aql b/asterix-app/src/test/resources/optimizerts/queries/push_limit.aql
index a9c4fe7..0ffe2b0 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/push_limit.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/push_limit.aql
@@ -16,7 +16,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
write output to nc1:"/tmp/push_limit.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/q1.aql b/asterix-app/src/test/resources/optimizerts/queries/q1.aql
index 8f62292..ced342d 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/q1.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/q1.aql
@@ -27,7 +27,7 @@
create nodegroup group1 if not exists on nc1, nc2;
-create dataset User(UserType) partitioned by key name on group1;
+create dataset User(UserType) primary key name on group1;
write output to nc1:"/tmp/q1.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/q1_pricing_summary_report_nt.aql b/asterix-app/src/test/resources/optimizerts/queries/q1_pricing_summary_report_nt.aql
new file mode 100644
index 0000000..b6de1fa
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/queries/q1_pricing_summary_report_nt.aql
@@ -0,0 +1,50 @@
+drop dataverse tpch if exists;
+create dataverse tpch;
+use dataverse tpch;
+
+create type LineItemType as closed {
+ l_orderkey: int32,
+ l_partkey: int32,
+ l_suppkey: int32,
+ l_linenumber: int32,
+ l_quantity: double,
+ l_extendedprice: double,
+ l_discount: double,
+ l_tax: double,
+ l_returnflag: string,
+ l_linestatus: string,
+ l_shipdate: string,
+ l_commitdate: string,
+ l_receiptdate: string,
+ l_shipinstruct: string,
+ l_shipmode: string,
+ l_comment: string
+}
+
+create dataset LineItem(LineItemType)
+ primary key l_orderkey, l_linenumber;
+
+load dataset LineItem
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+
+write output to nc1:"rttest/tpch_q1_pricing_summary_report_nt.adm";
+
+for $l in dataset('LineItem')
+where $l.l_shipdate <= '1998-09-02'
+/*+ hash*/
+group by $l_returnflag := $l.l_returnflag, $l_linestatus := $l.l_linestatus
+ with $l
+order by $l_returnflag, $l_linestatus
+return {
+ "l_returnflag": $l_returnflag,
+ "l_linestatus": $l_linestatus,
+ "sum_qty": sum(for $i in $l return $i.l_quantity),
+ "sum_base_price": sum(for $i in $l return $i.l_extendedprice),
+ "sum_disc_price": sum(for $i in $l return $i.l_extendedprice * (1 - $i.l_discount)),
+ "sum_charge": sum(for $i in $l return $i.l_extendedprice * (1 - $i.l_discount) * (1 + $i.l_tax)),
+ "ave_qty": avg(for $i in $l return $i.l_quantity),
+ "ave_price": avg(for $i in $l return $i.l_extendedprice),
+ "ave_disc": avg(for $i in $l return $i.l_discount),
+ "count_order": count($l)
+}
diff --git a/asterix-app/src/test/resources/optimizerts/queries/q2.aql b/asterix-app/src/test/resources/optimizerts/queries/q2.aql
index 17b1f86..5fba695 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/q2.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/q2.aql
@@ -34,7 +34,7 @@
create nodegroup group1 if not exists on nc1, nc2;
-create dataset Event(EventType) partitioned by key name on group1;
+create dataset Event(EventType) primary key name on group1;
write output to nc1:"/tmp/q2.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/q3_shipping_priority.aql b/asterix-app/src/test/resources/optimizerts/queries/q3_shipping_priority.aql
index 89c6fd9..8e6bf6e6 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/q3_shipping_priority.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/q3_shipping_priority.aql
@@ -49,11 +49,11 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
create dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
create dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
write output to nc1:"/tmp/q3_shipping_priority.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/q5_local_supplier_volume.aql b/asterix-app/src/test/resources/optimizerts/queries/q5_local_supplier_volume.aql
index 6b5ccfa..670a392 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/q5_local_supplier_volume.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/q5_local_supplier_volume.aql
@@ -72,17 +72,17 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
create dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
create dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
create dataset Suppliers(SupplierType)
- partitioned by key s_suppkey on group1;
+ primary key s_suppkey on group1;
create dataset Nations(NationType)
- partitioned by key n_nationkey on group1;
+ primary key n_nationkey on group1;
create dataset Regions(RegionType)
- partitioned by key r_regionkey on group1;
+ primary key r_regionkey on group1;
write output to nc1:"/tmp/q5_local_supplier.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/rtree-index-join/spatial-intersect-point_01.aql b/asterix-app/src/test/resources/optimizerts/queries/rtree-index-join/spatial-intersect-point_01.aql
index 207b5c9..e95be99 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/rtree-index-join/spatial-intersect-point_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/rtree-index-join/spatial-intersect-point_01.aql
@@ -20,8 +20,8 @@
rec: rectangle
}
-create dataset MyData1(MyRecord) partitioned by key id;
-create dataset MyData2(MyRecord) partitioned by key id;
+create dataset MyData1(MyRecord) primary key id;
+create dataset MyData2(MyRecord) primary key id;
create index rtree_index on MyData1(point) type rtree;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/rtree-index-join/spatial-intersect-point_02.aql b/asterix-app/src/test/resources/optimizerts/queries/rtree-index-join/spatial-intersect-point_02.aql
index e70bedf..c0a8634 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/rtree-index-join/spatial-intersect-point_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/rtree-index-join/spatial-intersect-point_02.aql
@@ -20,8 +20,8 @@
rec: rectangle
}
-create dataset MyData1(MyRecord) partitioned by key id;
-create dataset MyData2(MyRecord) partitioned by key id;
+create dataset MyData1(MyRecord) primary key id;
+create dataset MyData2(MyRecord) primary key id;
create index rtree_index on MyData2(point) type rtree;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/rtree-index-join/spatial-intersect-point_03.aql b/asterix-app/src/test/resources/optimizerts/queries/rtree-index-join/spatial-intersect-point_03.aql
index 85fc22b..6fb79f3 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/rtree-index-join/spatial-intersect-point_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/rtree-index-join/spatial-intersect-point_03.aql
@@ -20,7 +20,7 @@
rec: rectangle
}
-create dataset MyData(MyRecord) partitioned by key id;
+create dataset MyData(MyRecord) primary key id;
create index rtree_index on MyData(point) type rtree;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/rtree-secondary-index-open.aql b/asterix-app/src/test/resources/optimizerts/queries/rtree-secondary-index-open.aql
index 14416c8..b4a5796 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/rtree-secondary-index-open.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/rtree-secondary-index-open.aql
@@ -17,7 +17,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset MyData(MyRecord)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset MyData
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/rtree-secondary-index.aql b/asterix-app/src/test/resources/optimizerts/queries/rtree-secondary-index.aql
index c8525c1..41a5a91 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/rtree-secondary-index.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/rtree-secondary-index.aql
@@ -17,7 +17,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset MyData(MyRecord)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset MyData
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/scan-delete-all.aql b/asterix-app/src/test/resources/optimizerts/queries/scan-delete-all.aql
index d30b7f8..7b8215f 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/scan-delete-all.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/scan-delete-all.aql
@@ -30,9 +30,9 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset LineItems_q1(LineItemType)
- partitioned by key l_orderkey on group1;
+ primary key l_orderkey on group1;
create dataset LineID(LineIDType)
- partitioned by key l_orderkey on group1;
+ primary key l_orderkey on group1;
delete $l from dataset LineItems_q1;
diff --git a/asterix-app/src/test/resources/optimizerts/queries/scan-delete-rtree-secondary-index.aql b/asterix-app/src/test/resources/optimizerts/queries/scan-delete-rtree-secondary-index.aql
index 4d5de58..a307a1d 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/scan-delete-rtree-secondary-index.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/scan-delete-rtree-secondary-index.aql
@@ -17,7 +17,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset MyData(MyRecord)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset MyData
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/optimizerts/queries/scan-delete.aql b/asterix-app/src/test/resources/optimizerts/queries/scan-delete.aql
index 9bf6a4c..57e1bd3 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/scan-delete.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/scan-delete.aql
@@ -31,9 +31,9 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset LineItems_q1(LineItemType)
- partitioned by key l_orderkey on group1;
+ primary key l_orderkey on group1;
create dataset LineID(LineIDType)
- partitioned by key l_orderkey on group1;
+ primary key l_orderkey on group1;
delete $l from dataset LineItems_q1 where $l.l_shipdate <= '1998-09-02';
diff --git a/asterix-app/src/test/resources/optimizerts/queries/scan-insert-secondary-index.aql b/asterix-app/src/test/resources/optimizerts/queries/scan-insert-secondary-index.aql
index afecc04..97bc8df 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/scan-insert-secondary-index.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/scan-insert-secondary-index.aql
@@ -30,10 +30,10 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset LineItems_q1(LineItemType)
- partitioned by key l_orderkey on group1;
+ primary key l_orderkey on group1;
create dataset LineID(LineIDType)
- partitioned by key l_orderkey on group1;
+ primary key l_orderkey on group1;
create index idx_LineID_partkey on LineID(l_partkey);
create index idx_LineID_suppkey on LineID(l_suppkey);
diff --git a/asterix-app/src/test/resources/optimizerts/queries/scan-insert.aql b/asterix-app/src/test/resources/optimizerts/queries/scan-insert.aql
index beb73a7..1c26a46 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/scan-insert.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/scan-insert.aql
@@ -30,10 +30,10 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset LineItems_q1(LineItemType)
- partitioned by key l_orderkey on group1;
+ primary key l_orderkey on group1;
create dataset LineID(LineIDType)
- partitioned by key l_orderkey on group1;
+ primary key l_orderkey on group1;
insert into dataset LineID (
for $l in dataset('LineItems_q1')
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_01.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_01.aql
index 96d3b1a..4d63c15 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_01.aql
@@ -18,7 +18,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_edit-distance-let-to-edit-distance-check_01.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_02.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_02.aql
index e0ef1aa..762b41c 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_02.aql
@@ -18,7 +18,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_edit-distance-let-to-edit-distance-check_02.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_03.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_03.aql
index 75a584e..e14f329 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_03.aql
@@ -18,7 +18,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_edit-distance-let-to-edit-distance-check_03.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_04.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_04.aql
index 2f42ab0..f539938 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_04.aql
@@ -18,7 +18,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_edit-distance-let-to-edit-distance-check_04.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_05.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_05.aql
index 73f8ddb..b126f67 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_05.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_05.aql
@@ -18,7 +18,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_edit-distance-let-to-edit-distance-check_05.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_06.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_06.aql
index 221bfb8..f32285e 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_06.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_06.aql
@@ -18,7 +18,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_edit-distance-let-to-edit-distance-check_06.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_07.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_07.aql
index 949ed2c..2dff7e5 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_07.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_07.aql
@@ -18,7 +18,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_edit-distance-let-to-edit-distance-check_07.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_08.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_08.aql
index 3f899a2..9bc10e6 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_08.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-let-to-edit-distance-check_08.aql
@@ -18,7 +18,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_edit-distance-let-to-edit-distance-check_08.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_01.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_01.aql
index 0370fff..09e962c 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_01.aql
@@ -17,7 +17,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_edit-distance-to-edit-distance-check_01.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_02.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_02.aql
index 77d78f8..35e28fc 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_02.aql
@@ -17,7 +17,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_edit-distance-to-edit-distance-check_02.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_03.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_03.aql
index c34c28a..e05f85a 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_03.aql
@@ -17,7 +17,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_edit-distance-to-edit-distance-check_03.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_04.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_04.aql
index 082dc39..b890967 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_04.aql
@@ -17,7 +17,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_edit-distance-to-edit-distance-check_04.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_05.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_05.aql
index 6d892df..bc66061 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_05.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_05.aql
@@ -17,7 +17,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_edit-distance-to-edit-distance-check_05.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_06.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_06.aql
index 20d4879..018d6d1 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_06.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_06.aql
@@ -17,7 +17,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_edit-distance-to-edit-distance-check_06.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_07.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_07.aql
index 49871f9..cdaefa9 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_07.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_07.aql
@@ -17,7 +17,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_edit-distance-to-edit-distance-check_07.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_08.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_08.aql
index ed6d406..745044f 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_08.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/edit-distance-to-edit-distance-check_08.aql
@@ -17,7 +17,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_edit-distance-to-edit-distance-check_08.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/fuzzyeq-to-edit-distance-check.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/fuzzyeq-to-edit-distance-check.aql
index 702b739..f6e9d52 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/fuzzyeq-to-edit-distance-check.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/fuzzyeq-to-edit-distance-check.aql
@@ -16,7 +16,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_fuzzyeq-to-edit-distance-check.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/fuzzyeq-to-jaccard-check.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/fuzzyeq-to-jaccard-check.aql
index eaacc3e..5f57a3e 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/fuzzyeq-to-jaccard-check.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/fuzzyeq-to-jaccard-check.aql
@@ -16,7 +16,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_fuzzyeq-to-jaccard-check.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_01.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_01.aql
index 8647565..733c6d9 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_01.aql
@@ -18,7 +18,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_jaccard-let-to-jaccard-check_01.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_02.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_02.aql
index b48eb24..a360ef1 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_02.aql
@@ -18,7 +18,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_jaccard-let-to-jaccard-check_01.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_03.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_03.aql
index 74dcf9d..ad8abca 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_03.aql
@@ -18,7 +18,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_jaccard-let-to-jaccard-check_01.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_04.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_04.aql
index e17d458..fd236de 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_04.aql
@@ -18,7 +18,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_jaccard-let-to-jaccard-check_01.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_05.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_05.aql
index 266f369..b750a5d 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_05.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_05.aql
@@ -18,7 +18,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_jaccard-let-to-jaccard-check_01.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_06.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_06.aql
index 3beec4b..56b1471 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_06.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_06.aql
@@ -18,7 +18,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_jaccard-let-to-jaccard-check_01.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_07.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_07.aql
index 86401ac..6ae0444 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_07.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_07.aql
@@ -18,7 +18,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_jaccard-let-to-jaccard-check_01.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_08.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_08.aql
index 372a3e8..5d04402 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_08.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-let-to-jaccard-check_08.aql
@@ -18,7 +18,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_jaccard-let-to-jaccard-check_01.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_01.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_01.aql
index 8e8a0f8..33eb133 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_01.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_01.aql
@@ -17,7 +17,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_jaccard-to-jaccard-check_01.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_02.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_02.aql
index bee4595..3bdf01f 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_02.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_02.aql
@@ -17,7 +17,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_jaccard-to-jaccard-check_02.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_03.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_03.aql
index 591c8f0..3a22a16 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_03.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_03.aql
@@ -17,7 +17,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_jaccard-to-jaccard-check_02.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_04.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_04.aql
index 2d10e1d..bdce095 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_04.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_04.aql
@@ -17,7 +17,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_jaccard-to-jaccard-check_03.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_05.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_05.aql
index f091dcd..977f22a 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_05.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_05.aql
@@ -17,7 +17,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_jaccard-to-jaccard-check_05.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_06.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_06.aql
index 7b98eaf..eb3f3ef 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_06.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_06.aql
@@ -17,7 +17,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_jaccard-to-jaccard-check_06.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_07.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_07.aql
index 71f087e..753ce3c 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_07.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_07.aql
@@ -17,7 +17,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_jaccard-to-jaccard-check_07.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_08.aql b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_08.aql
index 13d11ed..2d1bd93 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_08.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/similarity/jaccard-to-jaccard-check_08.aql
@@ -17,7 +17,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
write output to nc1:"rttest/similarity_jaccard-to-jaccard-check_08.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/sort-cust.aql b/asterix-app/src/test/resources/optimizerts/queries/sort-cust.aql
index ec4f6f1..9605b37 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/sort-cust.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/sort-cust.aql
@@ -27,7 +27,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
write output to nc1:"/tmp/custlimit.adm";
diff --git a/asterix-app/src/test/resources/optimizerts/queries/unnest_list_in_subplan.aql b/asterix-app/src/test/resources/optimizerts/queries/unnest_list_in_subplan.aql
index 5a943f1..3f34242 100644
--- a/asterix-app/src/test/resources/optimizerts/queries/unnest_list_in_subplan.aql
+++ b/asterix-app/src/test/resources/optimizerts/queries/unnest_list_in_subplan.aql
@@ -17,8 +17,8 @@
create nodegroup group1 if not exists on nc1, nc2;
-create dataset DBLP(DBLPType) partitioned by key id on group1;
-create dataset TOKENSRANKEDADM(TOKENSRANKEDADMType) partitioned by key rank on group1;
+create dataset DBLP(DBLPType) primary key id on group1;
+create dataset TOKENSRANKEDADM(TOKENSRANKEDADMType) primary key rank on group1;
write output to nc1:'rttest/unnest_list_in_subplan.adm';
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join-neg_01.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join-neg_01.plan
new file mode 100644
index 0000000..5327887
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join-neg_01.plan
@@ -0,0 +1,17 @@
+-- SINK_WRITE |PARTITIONED|
+ -- RANDOM_MERGE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$7][$$10] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$10] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join-neg_02.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join-neg_02.plan
new file mode 100644
index 0000000..9eb8a7f
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join-neg_02.plan
@@ -0,0 +1,18 @@
+-- SINK_WRITE |PARTITIONED|
+ -- RANDOM_MERGE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$9][$$8] |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$9] |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_01.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_01.plan
index a83e0eb..95a2309 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_01.plan
@@ -1,16 +1,15 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STABLE_SORT [$$11(ASC)] |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$11] |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STABLE_SORT [$$10(ASC)] |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$10] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_02.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_02.plan
index d6d4497..b2a3f65 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_02.plan
@@ -1,16 +1,14 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STABLE_SORT [$$10(ASC)] |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$10] |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$9(ASC)] |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$9] |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_03.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_03.plan
index c5c0f4f..a83e0eb 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_03.plan
@@ -6,6 +6,11 @@
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STABLE_SORT [$$11(ASC)] |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$11] |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_04.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_04.plan
new file mode 100644
index 0000000..d6d4497
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_04.plan
@@ -0,0 +1,16 @@
+-- SINK_WRITE |PARTITIONED|
+ -- RANDOM_MERGE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$10(ASC)] |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$10] |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_05.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_05.plan
new file mode 100644
index 0000000..c5c0f4f
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_05.plan
@@ -0,0 +1,11 @@
+-- SINK_WRITE |PARTITIONED|
+ -- RANDOM_MERGE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-ge-join_01.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-ge-join_01.plan
new file mode 100644
index 0000000..6575922
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-ge-join_01.plan
@@ -0,0 +1,13 @@
+-- SINK_WRITE |PARTITIONED|
+ -- RANDOM_MERGE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-ge-join_02.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-ge-join_02.plan
new file mode 100644
index 0000000..0abc866
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-ge-join_02.plan
@@ -0,0 +1,12 @@
+-- SINK_WRITE |PARTITIONED|
+ -- RANDOM_MERGE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-gt-join_01.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-gt-join_01.plan
new file mode 100644
index 0000000..6575922
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-gt-join_01.plan
@@ -0,0 +1,13 @@
+-- SINK_WRITE |PARTITIONED|
+ -- RANDOM_MERGE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-gt-join_02.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-gt-join_02.plan
new file mode 100644
index 0000000..0abc866
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-gt-join_02.plan
@@ -0,0 +1,12 @@
+-- SINK_WRITE |PARTITIONED|
+ -- RANDOM_MERGE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-le-join_01.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-le-join_01.plan
new file mode 100644
index 0000000..6575922
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-le-join_01.plan
@@ -0,0 +1,13 @@
+-- SINK_WRITE |PARTITIONED|
+ -- RANDOM_MERGE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-le-join_02.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-le-join_02.plan
new file mode 100644
index 0000000..0abc866
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-le-join_02.plan
@@ -0,0 +1,12 @@
+-- SINK_WRITE |PARTITIONED|
+ -- RANDOM_MERGE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-lt-join_01.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-lt-join_01.plan
new file mode 100644
index 0000000..6575922
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-lt-join_01.plan
@@ -0,0 +1,13 @@
+-- SINK_WRITE |PARTITIONED|
+ -- RANDOM_MERGE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-lt-join_02.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-lt-join_02.plan
new file mode 100644
index 0000000..0abc866
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-lt-join_02.plan
@@ -0,0 +1,12 @@
+-- SINK_WRITE |PARTITIONED|
+ -- RANDOM_MERGE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/q1_pricing_summary_report_nt.plan b/asterix-app/src/test/resources/optimizerts/results/q1_pricing_summary_report_nt.plan
new file mode 100644
index 0000000..676e6e2
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/results/q1_pricing_summary_report_nt.plan
@@ -0,0 +1,25 @@
+-- SINK_WRITE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- SORT_MERGE_EXCHANGE [$$1(ASC), $$2(ASC) ] |PARTITIONED|
+ -- STABLE_SORT [$$1(ASC), $$2(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EXTERNAL_GROUP_BY[$$74, $$75] |PARTITIONED|
+ {
+ -- AGGREGATE |LOCAL|
+ -- NESTED_TUPLE_SOURCE |LOCAL|
+ }
+ -- HASH_PARTITION_EXCHANGE [$$74, $$75] |PARTITIONED|
+ -- EXTERNAL_GROUP_BY[$$48, $$49] |PARTITIONED|
+ {
+ -- AGGREGATE |LOCAL|
+ -- NESTED_TUPLE_SOURCE |LOCAL|
+ }
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/runtimets/queries/aggregate/avg_empty_02.aql b/asterix-app/src/test/resources/runtimets/queries/aggregate/avg_empty_02.aql
index 3583ce0..2c4ba6a 100644
--- a/asterix-app/src/test/resources/runtimets/queries/aggregate/avg_empty_02.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/aggregate/avg_empty_02.aql
@@ -13,7 +13,7 @@
val: double
}
-create dataset Test(TestType) partitioned by key id;
+create dataset Test(TestType) primary key id;
write output to nc1:"rttest/aggregate_avg_empty_02.adm";
diff --git a/asterix-app/src/test/resources/runtimets/queries/aggregate/count_empty_02.aql b/asterix-app/src/test/resources/runtimets/queries/aggregate/count_empty_02.aql
index 0a6cc8e..9ab7168 100644
--- a/asterix-app/src/test/resources/runtimets/queries/aggregate/count_empty_02.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/aggregate/count_empty_02.aql
@@ -13,7 +13,7 @@
val: double
}
-create dataset Test(TestType) partitioned by key id;
+create dataset Test(TestType) primary key id;
write output to nc1:"rttest/aggregate_count_empty_02.adm";
diff --git a/asterix-app/src/test/resources/runtimets/queries/aggregate/max_empty_02.aql b/asterix-app/src/test/resources/runtimets/queries/aggregate/max_empty_02.aql
index 79ae1d8..8b954e1 100644
--- a/asterix-app/src/test/resources/runtimets/queries/aggregate/max_empty_02.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/aggregate/max_empty_02.aql
@@ -13,7 +13,7 @@
val: double
}
-create dataset Test(TestType) partitioned by key id;
+create dataset Test(TestType) primary key id;
write output to nc1:"rttest/aggregate_max_empty_02.adm";
diff --git a/asterix-app/src/test/resources/runtimets/queries/aggregate/min_empty_02.aql b/asterix-app/src/test/resources/runtimets/queries/aggregate/min_empty_02.aql
index 99d49f4..6aa122c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/aggregate/min_empty_02.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/aggregate/min_empty_02.aql
@@ -13,7 +13,7 @@
val: double
}
-create dataset Test(TestType) partitioned by key id;
+create dataset Test(TestType) primary key id;
write output to nc1:"rttest/aggregate_min_empty_02.adm";
diff --git a/asterix-app/src/test/resources/runtimets/queries/aggregate/sum_empty_02.aql b/asterix-app/src/test/resources/runtimets/queries/aggregate/sum_empty_02.aql
index a94457a..c3fd645 100644
--- a/asterix-app/src/test/resources/runtimets/queries/aggregate/sum_empty_02.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/aggregate/sum_empty_02.aql
@@ -13,7 +13,7 @@
val: double
}
-create dataset Test(TestType) partitioned by key id;
+create dataset Test(TestType) primary key id;
write output to nc1:"rttest/aggregate_sum_empty_02.adm";
diff --git a/asterix-app/src/test/resources/runtimets/queries/aggregate/sum_null-with-pred.aql b/asterix-app/src/test/resources/runtimets/queries/aggregate/sum_null-with-pred.aql
index 10e952c..8c695c5 100644
--- a/asterix-app/src/test/resources/runtimets/queries/aggregate/sum_null-with-pred.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/aggregate/sum_null-with-pred.aql
@@ -16,7 +16,7 @@
sal:int32?
}
-create dataset tdst(TestType) partitioned by key id;
+create dataset tdst(TestType) primary key id;
insert into dataset tdst({"id":123,"sal":1000});
insert into dataset tdst({"id":113,"sal":2000});
diff --git a/asterix-app/src/test/resources/runtimets/queries/aggregate/sum_numeric_null.aql b/asterix-app/src/test/resources/runtimets/queries/aggregate/sum_numeric_null.aql
index 9197e33..4c568b1 100644
--- a/asterix-app/src/test/resources/runtimets/queries/aggregate/sum_numeric_null.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/aggregate/sum_numeric_null.aql
@@ -15,7 +15,7 @@
sal:int32?
}
-create dataset tdst(TestType) partitioned by key id;
+create dataset tdst(TestType) primary key id;
// In AQL
// sum(numeric + null) => null
diff --git a/asterix-app/src/test/resources/runtimets/queries/comparison/date_order.aql b/asterix-app/src/test/resources/runtimets/queries/comparison/date_order.aql
new file mode 100644
index 0000000..d38d386
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/comparison/date_order.aql
@@ -0,0 +1,16 @@
+drop dataverse test if exists;
+
+create dataverse test;
+
+write output to nc1:"rttest/comparison_date_order.adm";
+
+let $d1 := date("2049-04-23")
+let $d2 := date("2012-02-29")
+let $d3 := date("2021-03-01")
+let $d4 := date("1362-02-28")
+let $d5 := date("1600-02-29")
+let $d6 := date("-0500-03-21")
+
+for $d in [$d1, $d2, $d3, $d4, $d5, $d6]
+order by $d
+return $d
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/comparison/datetime_range.aql b/asterix-app/src/test/resources/runtimets/queries/comparison/datetime_range.aql
index 4268231..a1cbf52 100644
--- a/asterix-app/src/test/resources/runtimets/queries/comparison/datetime_range.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/comparison/datetime_range.aql
@@ -11,7 +11,7 @@
}
create dataset TwitterData(Tweet)
- partitioned by key id;
+ primary key id;
load dataset TwitterData
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/comparison/time_order.aql b/asterix-app/src/test/resources/runtimets/queries/comparison/time_order.aql
new file mode 100644
index 0000000..ba0d211
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/comparison/time_order.aql
@@ -0,0 +1,16 @@
+drop dataverse test if exists;
+
+create dataverse test;
+
+write output to nc1:"rttest/comparison_time_order.adm";
+
+let $t1 := time("13:00:00.382-10:00")
+let $t2 := time("23:59:59.999Z")
+let $t3 := time("22:00:00+03:00")
+let $t4 := time("00:00:00.00Z")
+let $t5 := time("00:00:00.00-02:00")
+let $t6 := time("00:00:00.47+04:00")
+
+for $t in [$t1, $t2, $t3, $t4, $t5, $t6]
+order by $t
+return $t
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/constructor/int_01.aql b/asterix-app/src/test/resources/runtimets/queries/constructor/int_01.aql
index d35c84f..58ae18d 100644
--- a/asterix-app/src/test/resources/runtimets/queries/constructor/int_01.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/constructor/int_01.aql
@@ -12,5 +12,5 @@
let $c6 := int16("-160i16")
let $c7 := int32("-320")
let $c8 := int64("-640i64")
-return {"int8": $c1,"int16": $c2,"int32": $c3, "int64": $c4, "int8": $c5,"int16": $c6,"int32": $c7, "int64": $c8}
-
+let $c9 := int64("-9223372036854775808")
+return {"int8": $c1,"int16": $c2,"int32": $c3, "int64": $c4, "int8_2": $c5,"int16_2": $c6,"int32_2": $c7, "int64_2": $c8, "int64_min" : $c9}
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/constructor/interval.aql b/asterix-app/src/test/resources/runtimets/queries/constructor/interval.aql
new file mode 100644
index 0000000..71e90fa
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/constructor/interval.aql
@@ -0,0 +1,15 @@
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+write output to nc1:"rttest/constructor_interval.adm";
+
+let $itv1 := interval-from-date("2010-10-30", "2012-10-21")
+let $itv2 := interval-from-time("03:04:05.678", "232425267")
+let $itv3 := interval-from-datetime("-1987-11-19T02:43:57.938", "1999-11-12T12:49:35.948")
+let $itv4 := interval-start-from-date("0001-12-27", "P3Y394DT48H398.483S")
+let $itv5 := interval-start-from-time("20:03:20.948", "PT48M389.938S")
+let $itv6 := interval-start-from-datetime("-2043-11-19T15:32:39.293", "P439Y3M20DT20H39M58.949S")
+
+return {"interval1": $itv1, "interval2": $itv2, "interval3": $itv3, "interval4": $itv4, "interval5": $itv5, "interval6": $itv6}
+
diff --git a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv01.aql b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv01.aql
index e95d102..eaec65a 100644
--- a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv01.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv01.aql
@@ -30,10 +30,10 @@
dept : string
}
-create dataset student.ugdstd(stdType) partitioned by key id;
-create dataset student.gdstd(stdType) partitioned by key id;
-create dataset teacher.prof(tchrType) partitioned by key id;
-create dataset teacher.pstdoc(tchrType) partitioned by key id;
+create dataset student.ugdstd(stdType) primary key id;
+create dataset student.gdstd(stdType) primary key id;
+create dataset teacher.prof(tchrType) primary key id;
+create dataset teacher.pstdoc(tchrType) primary key id;
insert into dataset student.ugdstd({"id":457,"name":"John Doe","age":22,"sex":"M","dept":"Dance"});
diff --git a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv02.aql b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv02.aql
index 1257cac..4ab7c29 100644
--- a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv02.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv02.aql
@@ -30,10 +30,10 @@
dept : string
}
-create dataset student.ugdstd(stdType) partitioned by key id;
-create dataset student.gdstd(stdType) partitioned by key id;
-create dataset teacher.prof(tchrType) partitioned by key id;
-create dataset teacher.pstdoc(tchrType) partitioned by key id;
+create dataset student.ugdstd(stdType) primary key id;
+create dataset student.gdstd(stdType) primary key id;
+create dataset teacher.prof(tchrType) primary key id;
+create dataset teacher.pstdoc(tchrType) primary key id;
insert into dataset student.ugdstd({"id":457,"name":"John Doe","age":22,"sex":"M","dept":"Dance"});
diff --git a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv03.aql b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv03.aql
index 0c80540..a68d428 100644
--- a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv03.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv03.aql
@@ -32,10 +32,10 @@
dept : string
}
-create dataset student.ugdstd(stdType) partitioned by key id;
-create dataset student.gdstd(stdType) partitioned by key id;
-create dataset teacher.prof(tchrType) partitioned by key id;
-create dataset teacher.pstdoc(tchrType) partitioned by key id;
+create dataset student.ugdstd(stdType) primary key id;
+create dataset student.gdstd(stdType) primary key id;
+create dataset teacher.prof(tchrType) primary key id;
+create dataset teacher.pstdoc(tchrType) primary key id;
drop dataset student.ugdstd;
drop dataset student.gdstd;
diff --git a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv04.aql b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv04.aql
index 20be103..55a3fa0 100644
--- a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv04.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv04.aql
@@ -33,20 +33,20 @@
dept : string
}
-create dataset student.ugdstd(stdType) partitioned by key id;
-create dataset student.gdstd(stdType) partitioned by key id;
-create dataset teacher.prof(tchrType) partitioned by key id;
-create dataset teacher.pstdoc(tchrType) partitioned by key id;
+create dataset student.ugdstd(stdType) primary key id;
+create dataset student.gdstd(stdType) primary key id;
+create dataset teacher.prof(tchrType) primary key id;
+create dataset teacher.pstdoc(tchrType) primary key id;
drop dataset student.ugdstd;
drop dataset student.gdstd;
drop dataset teacher.prof;
drop dataset teacher.pstdoc;
-create dataset student.ugdstd(stdType) partitioned by key id;
-create dataset student.gdstd(stdType) partitioned by key id;
-create dataset teacher.prof(tchrType) partitioned by key id;
-create dataset teacher.pstdoc(tchrType) partitioned by key id;
+create dataset student.ugdstd(stdType) primary key id;
+create dataset student.gdstd(stdType) primary key id;
+create dataset teacher.prof(tchrType) primary key id;
+create dataset teacher.pstdoc(tchrType) primary key id;
for $l in dataset('Metadata.Dataset')
where $l.DataverseName='student' or $l.DataverseName='teacher'
diff --git a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv07.aql b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv07.aql
index 4bba053..25e5d67 100644
--- a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv07.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv07.aql
@@ -18,7 +18,7 @@
dept:string
}
-create dataset test.employee(Emp) partitioned by key id;
+create dataset test.employee(Emp) primary key id;
load dataset test.employee
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv08.aql b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv08.aql
index 10985e3..8b4a32d 100644
--- a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv08.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv08.aql
@@ -22,8 +22,8 @@
id : int32
}
-create dataset test.t1(testtype) partitioned by key id;
-create dataset fest.t1(testtype) partitioned by key id;
+create dataset test.t1(testtype) primary key id;
+create dataset fest.t1(testtype) primary key id;
insert into dataset test.t1({"id":24});
insert into dataset test.t1({"id":23});
diff --git a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv17.aql b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv17.aql
index 26556e0..8e42043 100644
--- a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv17.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv17.aql
@@ -20,8 +20,8 @@
id : int32
}
-create dataset test.t1(testtype) partitioned by key id;
-create dataset fest.t1(testtype) partitioned by key id;
+create dataset test.t1(testtype) primary key id;
+create dataset fest.t1(testtype) primary key id;
insert into dataset test.t1({"id":24});
insert into dataset test.t1({"id":23});
diff --git a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv18.aql b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv18.aql
index 0d3bd53..69dd67d 100644
--- a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv18.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv18.aql
@@ -21,8 +21,8 @@
id : int32
}
-create dataset test.t1(testtype) partitioned by key id;
-create dataset fest.t1(testtype) partitioned by key id;
+create dataset test.t1(testtype) primary key id;
+create dataset fest.t1(testtype) primary key id;
insert into dataset test.t1({"id":24});
insert into dataset test.t1({"id":23});
diff --git a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv19.aql b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv19.aql
index 335f11c..6a0cf97 100644
--- a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv19.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/cross-dv19.aql
@@ -33,17 +33,17 @@
text: string
}
-create dataset test1.t1(testtype) partitioned by key id;
+create dataset test1.t1(testtype) primary key id;
-create dataset test2.t2(testtype) partitioned by key id;
+create dataset test2.t2(testtype) primary key id;
-create dataset test2.t3(testtype) partitioned by key id;
+create dataset test2.t3(testtype) primary key id;
-create dataset test1.t2(testtype) partitioned by key id;
+create dataset test1.t2(testtype) primary key id;
-create dataset test1.t3(testtype) partitioned by key id;
+create dataset test1.t3(testtype) primary key id;
-create dataset test2.t4(testtype) partitioned by key id;
+create dataset test2.t4(testtype) primary key id;
create external dataset test1.TwitterData(Tweet)
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/drop_dataset.aql b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/drop_dataset.aql
index 7b14957..860dd74 100644
--- a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/drop_dataset.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/drop_dataset.aql
@@ -20,7 +20,7 @@
};
create dataset test.Customers(CustomerType)
-partitioned by key cid;
+primary key cid;
drop dataset test.Customers;
diff --git a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/insert_across_dataverses.aql b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/insert_across_dataverses.aql
index 7eebbaf..2395a48 100644
--- a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/insert_across_dataverses.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/insert_across_dataverses.aql
@@ -42,10 +42,10 @@
};
create dataset test1.Customers(CustomerType)
-partitioned by key cid;
+primary key cid;
create dataset test2.Customers(CustomerType)
-partitioned by key cid;
+primary key cid;
load dataset test1.Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/insert_from_source_dataset.aql b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/insert_from_source_dataset.aql
index 89180ef..3c96539 100644
--- a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/insert_from_source_dataset.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/insert_from_source_dataset.aql
@@ -14,7 +14,7 @@
write output to nc1:"rttest/cross-dataverse_insert_from_source_dataset.adm";
-create dataset test.t1(testtype) partitioned by key id;
+create dataset test.t1(testtype) primary key id;
insert into dataset test.t1({"id":456,"name":"Roger"});
insert into dataset test.t1({"id":351,"name":"Bob"});
@@ -22,7 +22,7 @@
insert into dataset test.t1({"id":926,"name":"Richard"});
insert into dataset test.t1({"id":482,"name":"Kevin"});
-create dataset test.t2(testtype) partitioned by key id;
+create dataset test.t2(testtype) primary key id;
insert into dataset test.t2({"id":438,"name":"Ravi"});
insert into dataset test.t2({"id":321,"name":"Bobby"});
diff --git a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/join_across_dataverses.aql b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/join_across_dataverses.aql
index d09755a..197baaa 100644
--- a/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/join_across_dataverses.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/cross-dataverse/join_across_dataverses.aql
@@ -25,7 +25,7 @@
};
create dataset test1.Customers(CustomerType)
-partitioned by key cid;
+primary key cid;
create type test2.OrderType as open {
@@ -39,7 +39,7 @@
}
create dataset test2.Orders(OrderType)
-partitioned by key oid;
+primary key oid;
load dataset test1.Customers
@@ -57,4 +57,4 @@
for $o in dataset('test2.Orders')
where $c.cid = $o.cid
order by $c.name, $o.total
-return {"cust_name":$c.name, "cust_age": $c.age, "order_total":$o.total, "orderList":[$o.oid, $o.cid], "orderList":{{$o.oid, $o.cid}}}
+return {"cust_name":$c.name, "cust_age": $c.age, "order_total":$o.total, "orderList":[$o.oid, $o.cid]}
diff --git a/asterix-app/src/test/resources/runtimets/queries/custord/co.aql b/asterix-app/src/test/resources/runtimets/queries/custord/co.aql
index 879b2b8..2b9cc6b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/custord/co.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/custord/co.aql
@@ -37,11 +37,11 @@
}
create dataset Customers3(CustomerType)
- partitioned by key cid;
+ primary key cid;
create dataset Orders3(OrderType)
- partitioned by key oid;
+ primary key oid;
create dataset CustomerOrders3(CustomerOrdersType)
- partitioned by key cid;
+ primary key cid;
write output to nc1:"rttest/custord_co.adm";
diff --git a/asterix-app/src/test/resources/runtimets/queries/custord/denorm-cust-order_01.aql b/asterix-app/src/test/resources/runtimets/queries/custord/denorm-cust-order_01.aql
index decc8bb..bcc4079 100644
--- a/asterix-app/src/test/resources/runtimets/queries/custord/denorm-cust-order_01.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/custord/denorm-cust-order_01.aql
@@ -37,11 +37,11 @@
}
create dataset Customers1(CustomerType)
- partitioned by key cid;
+ primary key cid;
create dataset Orders1(OrderType)
- partitioned by key oid;
+ primary key oid;
create dataset CustomerOrders1(CustomerOrdersType)
- partitioned by key cid;
+ primary key cid;
load dataset Customers1
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/custord/denorm-cust-order_02.aql b/asterix-app/src/test/resources/runtimets/queries/custord/denorm-cust-order_02.aql
index 2ac6910..481adf4 100644
--- a/asterix-app/src/test/resources/runtimets/queries/custord/denorm-cust-order_02.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/custord/denorm-cust-order_02.aql
@@ -37,11 +37,11 @@
}
create dataset Customers2(CustomerType)
- partitioned by key cid;
+ primary key cid;
create dataset Orders2(OrderType)
- partitioned by key oid;
+ primary key oid;
create dataset CustomerOrders2(CustomerOrdersType)
- partitioned by key cid;
+ primary key cid;
load dataset Customers2
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/custord/denorm-cust-order_03.aql b/asterix-app/src/test/resources/runtimets/queries/custord/denorm-cust-order_03.aql
index 324dbb0..92a049d5 100644
--- a/asterix-app/src/test/resources/runtimets/queries/custord/denorm-cust-order_03.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/custord/denorm-cust-order_03.aql
@@ -37,11 +37,11 @@
}
create dataset Customers3(CustomerType)
- partitioned by key cid;
+ primary key cid;
create dataset Orders3(OrderType)
- partitioned by key oid;
+ primary key oid;
create dataset CustomerOrders3(CustomerOrdersType)
- partitioned by key cid;
+ primary key cid;
load dataset Customers3
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/custord/freq-clerk.aql b/asterix-app/src/test/resources/runtimets/queries/custord/freq-clerk.aql
index cd4ef0e..9bc318a 100644
--- a/asterix-app/src/test/resources/runtimets/queries/custord/freq-clerk.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/custord/freq-clerk.aql
@@ -28,7 +28,7 @@
}
create dataset CustomerOrders(CustomerOrderType)
- partitioned by key cid;
+ primary key cid;
load dataset CustomerOrders
diff --git a/asterix-app/src/test/resources/runtimets/queries/custord/join_q_01.aql b/asterix-app/src/test/resources/runtimets/queries/custord/join_q_01.aql
index ddde56e..f5d2080 100644
--- a/asterix-app/src/test/resources/runtimets/queries/custord/join_q_01.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/custord/join_q_01.aql
@@ -47,4 +47,4 @@
for $o in dataset('Orders')
where $c.cid = $o.cid
order by $c.name, $o.total
-return {"cust_name":$c.name, "cust_age": $c.age, "order_total":$o.total, "orderList":[$o.oid, $o.cid], "orderList":{{$o.oid, $o.cid}}}
+return {"cust_name":$c.name, "cust_age": $c.age, "order_total":$o.total, "orderList":[$o.oid, $o.cid]}
diff --git a/asterix-app/src/test/resources/runtimets/queries/custord/load-test.aql b/asterix-app/src/test/resources/runtimets/queries/custord/load-test.aql
index 5f73e4b..6cafbe0 100644
--- a/asterix-app/src/test/resources/runtimets/queries/custord/load-test.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/custord/load-test.aql
@@ -21,9 +21,9 @@
}
create dataset c1(CustomerType)
- partitioned by key cid;
+ primary key cid;
create dataset c2(CustomerType)
- partitioned by key cid;
+ primary key cid;
load dataset c1
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/custord/order_q_03.aql b/asterix-app/src/test/resources/runtimets/queries/custord/order_q_03.aql
index b930a10..52bc6d2 100644
--- a/asterix-app/src/test/resources/runtimets/queries/custord/order_q_03.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/custord/order_q_03.aql
@@ -26,4 +26,4 @@
let $c2 := {{ $o.orderstatus, $o.clerk}}
let $c3 := [$o.heList, $o.openlist, $o.loc, $o.line, $o.poly, $o.lastorder]
let $c4 := [$o.heList, $o.openlist, $o.loc, $o.line, $o.poly, $o.lastorder]
-return { "orderid": $o.oid, "ordertot":$o.total, "list": $c1, "item1": $c1[0], "item1": $c1[?], "item2": $c1[1], "item3": $c1[2]}
+return { "orderid": $o.oid, "ordertot":$o.total, "list": $c1, "item1": $c1[0], "item2": $c1[1], "item3": $c1[2]}
diff --git a/asterix-app/src/test/resources/runtimets/queries/custord/order_q_04.aql b/asterix-app/src/test/resources/runtimets/queries/custord/order_q_04.aql
index b2d55d9..e2fd5b6 100644
--- a/asterix-app/src/test/resources/runtimets/queries/custord/order_q_04.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/custord/order_q_04.aql
@@ -26,4 +26,4 @@
let $c2 := {{ $o.orderstatus, $o.clerk}}
let $c3 := [$o.heList, $o.openlist, $o.loc, $o.line, $o.poly, $o.lastorder]
let $c4 := [$o.heList, $o.openlist, $o.loc, $o.line, $o.poly, $o.lastorder]
-return { "orderid": $o.oid, "ordertot":$o.total, "list": $c3, "item1": $c3[0], "item1": $c3[?], "item2": $c3[1], "item5": $c3[5], "item10": $c3[10]}
+return { "orderid": $o.oid, "ordertot":$o.total, "list": $c3, "item1": $c3[0], "item2": $c3[1], "item5": $c3[5], "item10": $c3[10]}
diff --git a/asterix-app/src/test/resources/runtimets/queries/custord/order_q_05.aql b/asterix-app/src/test/resources/runtimets/queries/custord/order_q_05.aql
index 5693312..481c1fd 100644
--- a/asterix-app/src/test/resources/runtimets/queries/custord/order_q_05.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/custord/order_q_05.aql
@@ -24,4 +24,4 @@
for $o in dataset('Orders')
let $c1 := []
let $c2 := {{}}
-return { "orderid": $o.oid, "ordertot":$o.total, "emptyorderedlist": $c1, "emptyunorderedlist": $c2, "olist_item1": $c1[0], "olist_item1": $c1[?], "olist_item5": $c1[4], "ulist_item1": $c2[?]}
+return { "orderid": $o.oid, "ordertot":$o.total, "emptyorderedlist": $c1, "emptyunorderedlist": $c2, "olist_item1": $c1[0], "olist_item5": $c1[4], "ulist_item1": $c2[?]}
diff --git a/asterix-app/src/test/resources/runtimets/queries/dapd/q3.aql b/asterix-app/src/test/resources/runtimets/queries/dapd/q3.aql
index 23e4557..63b9119 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dapd/q3.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dapd/q3.aql
@@ -24,7 +24,7 @@
}
create dataset User(UserType)
- partitioned by key name;
+ primary key name;
load dataset User
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/create-drop-cltype.aql b/asterix-app/src/test/resources/runtimets/queries/dml/create-drop-cltype.aql
new file mode 100644
index 0000000..f6c6e91
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/create-drop-cltype.aql
@@ -0,0 +1,40 @@
+/*
+ * Description : Create and drop and recreate the same closed type, here type has optional fields.
+ * : verify correctness by querying metadata.
+ * Date : 11th Feb 2013
+ * Expected Res : Success
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type TestType as closed {
+id : int32,
+salary : double ?,
+name : string,
+durtn : duration ?,
+inter : interval,
+dt : date ?,
+tm : time,
+pt : point ?
+}
+
+drop type TestType;
+
+create type TestType as closed {
+id : int32,
+salary : double ?,
+name : string,
+durtn : duration ?,
+inter : interval,
+dt : date ?,
+tm : time,
+pt : point ?
+}
+
+write output to nc1:"rttest/dml_create-drop-cltype.adm";
+
+for $l in dataset('Metadata.Datatype')
+where $l.DatatypeName = 'TestType'
+return $l
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/create-drop-opntype.aql b/asterix-app/src/test/resources/runtimets/queries/dml/create-drop-opntype.aql
new file mode 100644
index 0000000..0ef6e93
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/create-drop-opntype.aql
@@ -0,0 +1,40 @@
+/*
+ * Description : Create and drop and recreate the same open type, here type has optional fields.
+ * : verify correctness by querying metadata.
+ * Date : 11th Feb 2013
+ * Expected Res : Success
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type TestType as open {
+id : int32,
+salary : double ?,
+name : string,
+durtn : duration ?,
+inter : interval,
+dt : date ?,
+tm : time,
+pt : point ?
+}
+
+drop type TestType;
+
+create type TestType as open {
+id : int32,
+salary : double ?,
+name : string,
+durtn : duration ?,
+inter : interval,
+dt : date ?,
+tm : time,
+pt : point ?
+}
+
+write output to nc1:"rttest/dml_create-drop-opntype.adm";
+
+for $l in dataset('Metadata.Datatype')
+where $l.DatatypeName = 'TestType'
+return $l
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/delete-from-loaded-dataset-with-index.aql b/asterix-app/src/test/resources/runtimets/queries/dml/delete-from-loaded-dataset-with-index.aql
index ab6b6c9..e642073 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/delete-from-loaded-dataset-with-index.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/delete-from-loaded-dataset-with-index.aql
@@ -22,7 +22,7 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/delete-from-loaded-dataset.aql b/asterix-app/src/test/resources/runtimets/queries/dml/delete-from-loaded-dataset.aql
index 1a445bf..ff66d65 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/delete-from-loaded-dataset.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/delete-from-loaded-dataset.aql
@@ -22,7 +22,7 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/drop-empty-secondary-indexes.aql b/asterix-app/src/test/resources/runtimets/queries/dml/drop-empty-secondary-indexes.aql
new file mode 100644
index 0000000..b25f028
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/drop-empty-secondary-indexes.aql
@@ -0,0 +1,37 @@
+/*
+ * Description : Drop empty secondary index.
+ * Expected Result : Success
+ * Date : 8th Feb 2013
+ *
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type TestType as open {
+id : int32,
+name : string,
+locn : point,
+zip : string
+}
+
+write output to nc1:"rttest/dml_drop-empty-secondary-indexes.adm";
+
+create dataset t1(TestType) primary key id;
+
+create index rtree_index_point on t1(locn) type rtree;
+
+create index keyWD_indx on t1(name) type keyword;
+
+create index secndIndx on t1(zip);
+
+drop index t1.rtree_index_point;
+
+drop index t1.keyWD_indx;
+
+drop index t1.secndIndx;
+
+for $l in dataset('Metadata.Index')
+where $l.IsPrimary=false
+return $l;
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/drop-index.aql b/asterix-app/src/test/resources/runtimets/queries/dml/drop-index.aql
index 0c87734..8b99487 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/drop-index.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/drop-index.aql
@@ -28,7 +28,7 @@
string4: string
}
-create dataset t1(Schema) partitioned by key unique2;
+create dataset t1(Schema) primary key unique2;
// Load data
load dataset t1
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/empty-load-with-index.aql b/asterix-app/src/test/resources/runtimets/queries/dml/empty-load-with-index.aql
index 4f22e84..45ec5e6 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/empty-load-with-index.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/empty-load-with-index.aql
@@ -29,7 +29,7 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
create index part_index on LineItem(l_partkey);
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/empty-load.aql b/asterix-app/src/test/resources/runtimets/queries/dml/empty-load.aql
index f9db086..63739d6 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/empty-load.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/empty-load.aql
@@ -29,7 +29,7 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
create index part_index on LineItem(l_partkey);
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset-with-index.aql b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset-with-index.aql
index 00b60cb..586226c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset-with-index.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset-with-index.aql
@@ -16,7 +16,7 @@
}
create dataset LineID(LineIDType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
create index idx_LineID_partkey on LineID(l_linenumber);
create index idx_LineID_suppkey on LineID(l_suppkey);
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset.aql b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset.aql
index 253f5e7..58548b6 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset.aql
@@ -16,7 +16,7 @@
}
create dataset LineID(LineIDType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
insert into dataset LineID (
let $x:=1
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-loaded-dataset-with-index_01.aql b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-loaded-dataset-with-index_01.aql
index 7a3cce2..761066e 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-loaded-dataset-with-index_01.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-loaded-dataset-with-index_01.aql
@@ -9,7 +9,7 @@
}
create dataset LineID(LineIDType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
load dataset LineID
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-loaded-dataset-with-index_02.aql b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-loaded-dataset-with-index_02.aql
index 04a5043..475e6ab 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-loaded-dataset-with-index_02.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-loaded-dataset-with-index_02.aql
@@ -28,10 +28,10 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
create dataset LineID(LineIDType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-loaded-dataset_01.aql b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-loaded-dataset_01.aql
index 1f211e8..7ece2ab 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-loaded-dataset_01.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-loaded-dataset_01.aql
@@ -9,7 +9,7 @@
}
create dataset LineID(LineIDType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
load dataset LineID
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-loaded-dataset_02.aql b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-loaded-dataset_02.aql
index ef38109..f05844b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-loaded-dataset_02.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-loaded-dataset_02.aql
@@ -21,9 +21,9 @@
string4: string
}
-create dataset onektup(Schema) partitioned by key unique2;
+create dataset onektup(Schema) primary key unique2;
-create dataset tenktup1(Schema) partitioned by key unique2;
+create dataset tenktup1(Schema) primary key unique2;
load dataset onektup
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
@@ -33,7 +33,7 @@
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
(("path"="nc1://data/wisc/tenktup.adm"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
-create dataset tmp(Schema) partitioned by key unique2;
+create dataset tmp(Schema) primary key unique2;
load dataset tmp
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/insert-src-dst-01.aql b/asterix-app/src/test/resources/runtimets/queries/dml/insert-src-dst-01.aql
index c3dc7ba..7c055f7 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/insert-src-dst-01.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/insert-src-dst-01.aql
@@ -20,8 +20,8 @@
id: string
}
-create dataset testds01(testtype01) partitioned by key id;
-create dataset testds02(testtype02) partitioned by key id;
+create dataset testds01(testtype01) primary key id;
+create dataset testds02(testtype02) primary key id;
insert into dataset testds01 ({ "id": "001" });
insert into dataset testds01 ({ "id": "002", "name": "John Doe" });
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/insert.aql b/asterix-app/src/test/resources/runtimets/queries/dml/insert.aql
index 2b4622f..a0d4d5b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/insert.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/insert.aql
@@ -28,10 +28,10 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
create dataset LineID(LineIDType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/insert_less_nc.aql b/asterix-app/src/test/resources/runtimets/queries/dml/insert_less_nc.aql
index 7e133ae..c646c4c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/insert_less_nc.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/insert_less_nc.aql
@@ -28,10 +28,10 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
create dataset LineID(LineIDType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/load-from-hdfs.aql b/asterix-app/src/test/resources/runtimets/queries/dml/load-from-hdfs.aql
index 0411145..908f515 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/load-from-hdfs.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/load-from-hdfs.aql
@@ -22,7 +22,7 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.HDFSAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/load-with-index.aql b/asterix-app/src/test/resources/runtimets/queries/dml/load-with-index.aql
index efab267..61b455c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/load-with-index.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/load-with-index.aql
@@ -22,7 +22,7 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
create index idx_partkey on LineItem(l_partkey);
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-c2o-recursive.aql b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-c2o-recursive.aql
index 6c8264f..b2ec24f 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-c2o-recursive.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-c2o-recursive.aql
@@ -30,9 +30,9 @@
id: string
}
-create dataset testds(testtype) partitioned by key id;
+create dataset testds(testtype) primary key id;
-create dataset testds2(testtype2) partitioned by key id;
+create dataset testds2(testtype2) primary key id;
insert into dataset testds (
{ "id": "001", "name": "Person One", "address": {"street": "3019 DBH", "city": "Irvine", "zip": 92697}, "department": {{ {"name":"CS", "id":299, "review":5}, {"name":"EE", "id":399} }} }
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-c2o.aql b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-c2o.aql
index a63e2df..29d7d61 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-c2o.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-c2o.aql
@@ -23,9 +23,9 @@
id: string
}
-create dataset testds(testtype) partitioned by key id;
+create dataset testds(testtype) primary key id;
-create dataset testds2(testtype2) partitioned by key id;
+create dataset testds2(testtype2) primary key id;
insert into dataset testds (
{ "hobby": {{"music", "coding"}}, "id": "001", "name": "Person Three"}
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-closed-optional.aql b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-closed-optional.aql
index 94991be..b510ba4 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-closed-optional.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-closed-optional.aql
@@ -13,7 +13,7 @@
id: string
}
-create dataset testds(testtype) partitioned by key id;
+create dataset testds(testtype) primary key id;
insert into dataset testds (
{ "id": "001", "name": "Person One"}
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-insert.aql b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-insert.aql
index 12248dd..9301ac5 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-insert.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-insert.aql
@@ -13,7 +13,7 @@
name: string
}
-create dataset testds(testtype) partitioned by key id;
+create dataset testds(testtype) primary key id;
insert into dataset testds (
{ "id": "001", "name": "Person Three", "hobbies": {{"scuba", "music"}}}
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-insert2.aql b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-insert2.aql
index 1903bf2..e673c57 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-insert2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-insert2.aql
@@ -13,7 +13,7 @@
id:int32
}
-create dataset testds(TestType) partitioned by key id;
+create dataset testds(TestType) primary key id;
insert into dataset testds( for $i in range(1,10) return { "id":$i,"name":"John Doe" });
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-noexpand.aql b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-noexpand.aql
index c6ff1cd..bc408be 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-noexpand.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-noexpand.aql
@@ -14,7 +14,7 @@
id: string
}
-create dataset testds(testtype) partitioned by key id;
+create dataset testds(testtype) primary key id;
insert into dataset testds (
{ "id": "001", "name": "Person One"}
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-o2c-recursive.aql b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-o2c-recursive.aql
index 2070e20..92d2bce 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-o2c-recursive.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-o2c-recursive.aql
@@ -31,9 +31,9 @@
department: {{Dept}}?
}
-create dataset testds(testtype) partitioned by key id;
+create dataset testds(testtype) primary key id;
-create dataset testds2(testtype2) partitioned by key id;
+create dataset testds2(testtype2) primary key id;
insert into dataset testds (
{ "id": "001", "name": "Person One", "address": {"street": "3019 DBH", "city": "Irvine", "zip": 92697}, "department": {{ {"name":"CS", "id":299}, {"name":"EE", "id":399} }} }
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-o2c.aql b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-o2c.aql
index 015aa7b..ae69e17 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-o2c.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-o2c.aql
@@ -21,9 +21,9 @@
name: string
}
-create dataset testds(testtype) partitioned by key id;
+create dataset testds(testtype) primary key id;
-create dataset testds2(testtype2) partitioned by key id;
+create dataset testds2(testtype2) primary key id;
insert into dataset testds (
{ "id": "001", "hobby": {{"music"}}, "name": "Person Three"}
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-o2o.aql b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-o2o.aql
index 545790b..c836a69 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/opentype-o2o.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/opentype-o2o.aql
@@ -21,9 +21,9 @@
hobby: string
}
-create dataset testds(testtype) partitioned by key id;
+create dataset testds(testtype) primary key id;
-create dataset testds2(testtype2) partitioned by key id;
+create dataset testds2(testtype2) primary key id;
insert into dataset testds (
{ "name": "Person One", "id": "001", "hobby": "music"}
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/query-issue205.aql b/asterix-app/src/test/resources/runtimets/queries/dml/query-issue205.aql
index e0a0695..3a2ad40 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/query-issue205.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/query-issue205.aql
@@ -21,7 +21,7 @@
}
create dataset Employees(EmployeeType)
- partitioned by key id;
+ primary key id;
insert into dataset Employees({"id":"1234", "stat":{ "age":50, "salary":120000}, "deptCode":32 });
insert into dataset Employees({"id":"5678", "stat":{ "age":40, "salary":100000}, "deptCode":16 });
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/scan-delete-btree-secondary-index-nullable.aql b/asterix-app/src/test/resources/runtimets/queries/dml/scan-delete-btree-secondary-index-nullable.aql
index a35c8e5..36df10a 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/scan-delete-btree-secondary-index-nullable.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/scan-delete-btree-secondary-index-nullable.aql
@@ -24,7 +24,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/scan-delete-rtree-secondary-index-nullable.aql b/asterix-app/src/test/resources/runtimets/queries/dml/scan-delete-rtree-secondary-index-nullable.aql
index fc30529..c44c3e8 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/scan-delete-rtree-secondary-index-nullable.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/scan-delete-rtree-secondary-index-nullable.aql
@@ -21,7 +21,7 @@
}
create dataset MyData(MyRecord)
- partitioned by key id;
+ primary key id;
load dataset MyData
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/scan-delete-rtree-secondary-index.aql b/asterix-app/src/test/resources/runtimets/queries/dml/scan-delete-rtree-secondary-index.aql
index 4e2ca6c..b6bc17b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/scan-delete-rtree-secondary-index.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/scan-delete-rtree-secondary-index.aql
@@ -15,7 +15,7 @@
}
create dataset MyData(MyRecord)
- partitioned by key id;
+ primary key id;
load dataset MyData
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/scan-insert-btree-secondary-index-nullable.aql b/asterix-app/src/test/resources/runtimets/queries/dml/scan-insert-btree-secondary-index-nullable.aql
index d5d22d3..dde8892 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/scan-insert-btree-secondary-index-nullable.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/scan-insert-btree-secondary-index-nullable.aql
@@ -24,8 +24,8 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
-create dataset CustomersMini(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
+create dataset CustomersMini(CustomerType) primary key cid;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/scan-insert-rtree-secondary-index-nullable.aql b/asterix-app/src/test/resources/runtimets/queries/dml/scan-insert-rtree-secondary-index-nullable.aql
index 161439a..a3d38eb 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/scan-insert-rtree-secondary-index-nullable.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/scan-insert-rtree-secondary-index-nullable.aql
@@ -26,14 +26,14 @@
}
create dataset MyData(MyRecord)
- partitioned by key id;
+ primary key id;
load dataset MyData
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
(("path"="nc1://data/spatial/spatialDataNulls.json"),("format"="adm")) pre-sorted;
create dataset MyMiniData(MyMiniRecord)
- partitioned by key id;
+ primary key id;
create index rtree_index_point on MyMiniData(point) type rtree;
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/scan-insert-rtree-secondary-index.aql b/asterix-app/src/test/resources/runtimets/queries/dml/scan-insert-rtree-secondary-index.aql
index f1bc29d..0ed6488 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/scan-insert-rtree-secondary-index.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/scan-insert-rtree-secondary-index.aql
@@ -20,14 +20,14 @@
}
create dataset MyData(MyRecord)
- partitioned by key id;
+ primary key id;
load dataset MyData
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
(("path"="nc1://data/spatial/spatialData.json"),("format"="adm")) pre-sorted;
create dataset MyMiniData(MyMiniRecord)
- partitioned by key id;
+ primary key id;
load dataset MyMiniData
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree.aql b/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree.aql
index fb2c701..85616e4 100644
--- a/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree.aql
@@ -12,7 +12,7 @@
}
create dataset MyData(MyRecord)
- partitioned by key id;
+ primary key id;
create index rtree_index on MyData(loc) type rtree;
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/delete.aql b/asterix-app/src/test/resources/runtimets/queries/failure/delete.aql
index 3da88f8..125fd99 100644
--- a/asterix-app/src/test/resources/runtimets/queries/failure/delete.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/failure/delete.aql
@@ -23,7 +23,7 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree.aql b/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree.aql
index 2ec32f4..9530df5 100644
--- a/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree.aql
@@ -17,7 +17,7 @@
}
create dataset MyData(MyRecord)
- partitioned by key id;
+ primary key id;
load dataset MyData
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
@@ -25,7 +25,7 @@
create dataset MyMiniData(MyMiniRecord)
- partitioned by key id;
+ primary key id;
load dataset MyMiniData
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/insert.aql b/asterix-app/src/test/resources/runtimets/queries/failure/insert.aql
index a0ce842..0309b7c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/failure/insert.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/failure/insert.aql
@@ -29,10 +29,10 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
create dataset LineID(LineIDType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/q1_pricing_summary_report_failure.aql b/asterix-app/src/test/resources/runtimets/queries/failure/q1_pricing_summary_report_failure.aql
index a0c7f87..58db4b8 100644
--- a/asterix-app/src/test/resources/runtimets/queries/failure/q1_pricing_summary_report_failure.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/failure/q1_pricing_summary_report_failure.aql
@@ -24,7 +24,7 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_01.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_01.aql
new file mode 100644
index 0000000..fe0e9e5
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_01.aql
@@ -0,0 +1,27 @@
+/*
+ * Description : Create a feed dataset and verify contents in Metadata
+ * Expected Res : Success
+ * Date : 24th Dec 2012
+ */
+drop dataverse feeds if exists;
+create dataverse feeds;
+use dataverse feeds;
+
+create type TweetType as closed {
+ id: string,
+ username : string,
+ location : string,
+ text : string,
+ timestamp : string
+}
+
+create feed dataset TweetFeed(TweetType)
+using "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory"
+(("output-type-name"="TweetType"),("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("tuple-interval"="10"))
+primary key id;
+
+write output to nc1:"rttest/feeds_feeds_01.adm";
+
+for $x in dataset('Metadata.Dataset')
+where $x.DataverseName='feeds' and $x.DatasetName='TweetFeed'
+return $x
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02.aql
new file mode 100644
index 0000000..d72e623
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02.aql
@@ -0,0 +1,31 @@
+/*
+ * Description : Create a feed dataset that uses the feed simulator adapter.
+ Begin ingestion and verify contents of the dataset post completion.
+ * Expected Res : Success
+ * Date : 24th Dec 2012
+ */
+drop dataverse feeds if exists;
+create dataverse feeds;
+use dataverse feeds;
+
+create type TweetType as closed {
+ id: string,
+ username : string,
+ location : string,
+ text : string,
+ timestamp : string
+}
+
+create feed dataset TweetFeed(TweetType)
+using "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory"
+(("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("output-type-name"="TweetType"),("tuple-interval"="10"))
+primary key id;
+
+begin feed TweetFeed;
+
+write output to nc1:"rttest/feeds_feeds_02.adm";
+
+for $x in dataset('TweetFeed')
+return $x
+
+drop dataverse feeds;
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_03.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_03.aql
new file mode 100644
index 0000000..615ee83
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_03.aql
@@ -0,0 +1,34 @@
+/*
+ * Description : Create a feed dataset with an associated function and verify contents in Metadata
+ * Expected Res : Success
+ * Date : 24th Dec 2012
+ */
+drop dataverse feeds if exists;
+create dataverse feeds;
+use dataverse feeds;
+
+create type TweetType as closed {
+ id: string,
+ username : string,
+ location : string,
+ text : string,
+ timestamp : string
+}
+
+create function feed_processor($x) {
+$x
+}
+
+create feed dataset TweetFeed(TweetType)
+using "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory"
+(("output-type-name"="TweetType"),("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("tuple-interval"="10"))
+apply function feed_processor@1
+primary key id;
+
+write output to nc1:"rttest/feeds_feeds_03.adm";
+
+for $x in dataset('Metadata.Dataset')
+where $x.DataverseName='feeds' and $x.DatasetName='TweetFeed'
+return $x
+
+drop dataverse feeds;
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04.aql
new file mode 100644
index 0000000..bea88fb
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04.aql
@@ -0,0 +1,32 @@
+/*
+ * Description : Create a feed dataset that uses the feed simulator adapter.
+ The feed simulator simulates feed from a file in the HDFS.
+ Begin ingestion and verify contents of the dataset post completion.
+ * Expected Res : Success
+ * Date : 24th Dec 2012
+ */
+drop dataverse feeds if exists;
+create dataverse feeds;
+use dataverse feeds;
+
+create type TweetType as closed {
+ id: string,
+ username : string,
+ location : string,
+ text : string,
+ timestamp : string
+}
+
+create feed dataset TweetFeed(TweetType)
+using "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory"
+(("fs"="hdfs"),("hdfs"="hdfs://127.0.0.1:31888"),("path"="/asterix/obamatweets.adm"),("format"="adm"),("input-format"="text-input-format"),("output-type-name"="TweetType"),("tuple-interval"="10"))
+primary key id;
+
+begin feed TweetFeed;
+
+write output to nc1:"rttest/feeds_feeds_04.adm";
+
+for $x in dataset('TweetFeed')
+return $x
+
+drop dataverse feeds;
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds.aql
new file mode 100644
index 0000000..a9f3236
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds.aql
@@ -0,0 +1,31 @@
+/*
+ * Description : Create a feed dataset that uses the feed simulator adapter.
+ Begin ingestion using a fully qualified name and verify contents of the dataset post completion.
+ * Expected Res : Success
+ * Date : 24th Dec 2012
+ */
+drop dataverse feeds if exists;
+create dataverse feeds;
+use dataverse feeds;
+
+create type TweetType as closed {
+ id: string,
+ username : string,
+ location : string,
+ text : string,
+ timestamp : string
+}
+
+create feed dataset TweetFeed(TweetType)
+using "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory"
+(("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("output-type-name"="TweetType"),("tuple-interval"="10"))
+primary key id;
+
+begin feed feeds.TweetFeed;
+
+write output to nc1:"rttest/feeds_issue_230_feeds.adm";
+
+for $x in dataset('TweetFeed')
+return $x
+
+drop dataverse feeds;
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-1_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-1_1.aql
index 8438ae9..ffabe64 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-1_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-1_1.aql
@@ -12,7 +12,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-1_2.1.1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-1_2.1.1.aql
index d575424..a74bc3e 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-1_2.1.1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-1_2.1.1.aql
@@ -12,7 +12,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-1_2.1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-1_2.1.aql
index f0b43d1..53771c1 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-1_2.1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-1_2.1.aql
@@ -12,7 +12,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-1_2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-1_2.aql
index a96e933..923959b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-1_2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-1_2.aql
@@ -12,7 +12,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2.1_5.3.1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2.1_5.3.1.aql
index f424166..3c7bb5b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2.1_5.3.1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2.1_5.3.1.aql
@@ -12,7 +12,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2.2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2.2.aql
index 05304c5..d0eebcb 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2.2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2.2.aql
@@ -17,8 +17,8 @@
rank: int32
}
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset TOKENSRANKEDADM(TOKENSRANKEDADMType) partitioned by key rank;
+create dataset DBLP(DBLPType) primary key id;
+create dataset TOKENSRANKEDADM(TOKENSRANKEDADMType) primary key rank;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_1.aql
index be6073b..b5ae4c6 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_1.aql
@@ -12,7 +12,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_2.aql
index 9d31f4a..d50b67c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_2.aql
@@ -12,7 +12,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_3.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_3.aql
index 4bc6836..2e57a79 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_3.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_3.aql
@@ -12,7 +12,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_4.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_4.aql
index 149a9b4..7f59c30 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_4.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_4.aql
@@ -12,7 +12,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.1.aql
index 90e1b15..4f53db6 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.1.aql
@@ -12,7 +12,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.2.aql
index 2771589..67bd952 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.2.aql
@@ -12,7 +12,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.3.1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.3.1.aql
index 57cdab4..9944c93 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.3.1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.3.1.aql
@@ -12,7 +12,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.3.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.3.aql
index e4e8cfd..0319117 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.3.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.3.aql
@@ -12,7 +12,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.aql
index 8d3c4c3..9a10925 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-2_5.aql
@@ -12,7 +12,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-3_1.1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-3_1.1.aql
index e65952d..2de6807 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-3_1.1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-3_1.1.aql
@@ -12,7 +12,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-3_1.2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-3_1.2.aql
index 4a98706..a3b5bc5 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-3_1.2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-3_1.2.aql
@@ -12,7 +12,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-3_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-3_1.aql
index d998928..26f5e46 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-3_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-3_1.aql
@@ -12,7 +12,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-aqlplus_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-aqlplus_1.aql
index fa0bfac..dd88883 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-aqlplus_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-aqlplus_1.aql
@@ -12,7 +12,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-aqlplus_2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-aqlplus_2.aql
index 7bf7d5f..abd3425 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-aqlplus_2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-aqlplus_2.aql
@@ -15,7 +15,7 @@
id: int32
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_1.aql
index a899a80..288194c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_1.aql
@@ -20,8 +20,8 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_2.aql
index 4c30bb7..0010b8b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_2.aql
@@ -20,8 +20,8 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_3.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_3.aql
index 5c49fd2..aa3c661 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_3.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_3.aql
@@ -20,8 +20,8 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_4.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_4.aql
index fa8501c..2aa331c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_4.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_4.aql
@@ -20,8 +20,8 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.1.aql
index 97d163f..a8f5eec 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.1.aql
@@ -20,8 +20,8 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.2.aql
index 0329a87..d724b57 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.2.aql
@@ -20,8 +20,8 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.3.1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.3.1.aql
index 9d9c141..57dbb53 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.3.1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.3.1.aql
@@ -20,8 +20,8 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.3.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.3.aql
index 11d4327..fa9f11c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.3.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.3.aql
@@ -20,8 +20,8 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.aql
index 603077e..40fd306 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-2_5.aql
@@ -20,8 +20,8 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_1.aql
index 2a3f6ec..59a881b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_1.aql
@@ -20,8 +20,8 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_2.aql
index e942aa0..9352c3b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_2.aql
@@ -20,8 +20,8 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_3.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_3.aql
index dbbc1b1..c1c6322 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_3.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_3.aql
@@ -20,8 +20,8 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_4.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_4.aql
index a703037..75fe12b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_4.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_4.aql
@@ -20,8 +20,8 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.1.aql
index c6d61e2..bfed5c6 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.1.aql
@@ -20,8 +20,8 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.2.aql
index f881151..b683acb 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.2.aql
@@ -20,8 +20,8 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.3.1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.3.1.aql
index b0884f2..3134c41 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.3.1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.3.1.aql
@@ -20,8 +20,8 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.3.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.3.aql
index 46bd31d..97e16c9 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.3.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.3.aql
@@ -20,8 +20,8 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.4.1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.4.1.aql
index c8ed34b..175e4b1 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.4.1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.4.1.aql
@@ -20,8 +20,8 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.4.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.4.aql
index e4eab9d..e471d69 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.4.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.4.aql
@@ -20,8 +20,8 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.aql
index 55d508b..588591d 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-3_5.aql
@@ -20,8 +20,8 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-aqlplus_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-aqlplus_1.aql
index 30445bf..0195185 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-aqlplus_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-aqlplus_1.aql
@@ -20,8 +20,8 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-aqlplus_2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-aqlplus_2.aql
index 01ee281..085d139 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-aqlplus_2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-aqlplus_2.aql
@@ -20,8 +20,8 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-aqlplus_3.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-aqlplus_3.aql
index 4439af1..d379cc4 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-aqlplus_3.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-aqlplus_3.aql
@@ -20,8 +20,8 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-dblp-aqlplus_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-dblp-aqlplus_1.aql
index 77e32a2..cb774be 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-dblp-aqlplus_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-csx-dblp-aqlplus_1.aql
@@ -20,8 +20,8 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-lookup_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-lookup_1.aql
index 274f0c2..edec926 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-lookup_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-lookup_1.aql
@@ -12,7 +12,7 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/events-users-aqlplus_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/events-users-aqlplus_1.aql
index 500454f..b31ba58 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/events-users-aqlplus_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/events-users-aqlplus_1.aql
@@ -22,7 +22,7 @@
}}
}
-create dataset User(UserType) partitioned by key name;
+create dataset User(UserType) primary key name;
load dataset User
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-int-aqlplus_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-int-aqlplus_1.aql
index 7598987..af327b4 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-int-aqlplus_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-int-aqlplus_1.aql
@@ -9,7 +9,7 @@
interests: {{string}}
}
-create dataset Users(UserType) partitioned by key uid;
+create dataset Users(UserType) primary key uid;
load dataset Users
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-int-aqlplus_2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-int-aqlplus_2.aql
index 5b3c828..2d81ddd 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-int-aqlplus_2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-int-aqlplus_2.aql
@@ -9,7 +9,7 @@
interests: {{string}}
}
-create dataset Users(UserType) partitioned by key uid;
+create dataset Users(UserType) primary key uid;
load dataset Users
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-int-aqlplus_3.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-int-aqlplus_3.aql
index c131c75..9785f52 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-int-aqlplus_3.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-int-aqlplus_3.aql
@@ -9,7 +9,7 @@
interests: {{string}}
}
-create dataset Users(UserType) partitioned by key uid;
+create dataset Users(UserType) primary key uid;
load dataset Users
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-lot-aqlplus_1.1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-lot-aqlplus_1.1.aql
index 6a6248f..bdcaa36 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-lot-aqlplus_1.1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-lot-aqlplus_1.1.aql
@@ -9,7 +9,7 @@
interests: {{string}}
}
-create dataset Users(UserType) partitioned by key uid;
+create dataset Users(UserType) primary key uid;
load dataset Users
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-lot-aqlplus_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-lot-aqlplus_1.aql
index 75eb9c8..db833d0 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-lot-aqlplus_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-lot-aqlplus_1.aql
@@ -9,7 +9,7 @@
interests: {{string}}
}
-create dataset Users(UserType) partitioned by key uid;
+create dataset Users(UserType) primary key uid;
load dataset Users
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-lot-aqlplus_2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-lot-aqlplus_2.aql
index 39168c6..9f6e758 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-lot-aqlplus_2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-lot-aqlplus_2.aql
@@ -9,7 +9,7 @@
interests: {{string}}
}
-create dataset Users(UserType) partitioned by key uid;
+create dataset Users(UserType) primary key uid;
load dataset Users
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-lot-aqlplus_3.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-lot-aqlplus_3.aql
index c9e4ece..e438ec8 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-lot-aqlplus_3.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-lot-aqlplus_3.aql
@@ -9,7 +9,7 @@
interests: {{string}}
}
-create dataset Users(UserType) partitioned by key uid;
+create dataset Users(UserType) primary key uid;
load dataset Users
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-3_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-3_1.aql
index fa63e8e..6dd9cbc 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-3_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-3_1.aql
@@ -18,8 +18,8 @@
interests: {{string}}
}
-create dataset Users(UserType) partitioned by key uid;
-create dataset Visitors(VisitorType) partitioned by key vid;
+create dataset Users(UserType) primary key uid;
+create dataset Visitors(VisitorType) primary key vid;
load dataset Users
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-aqlplus_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-aqlplus_1.aql
index 5cea93e..40fa07b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-aqlplus_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-aqlplus_1.aql
@@ -18,8 +18,8 @@
interests: {{string}}
}
-create dataset Users(UserType) partitioned by key uid;
-create dataset Visitors(VisitorType) partitioned by key vid;
+create dataset Users(UserType) primary key uid;
+create dataset Visitors(VisitorType) primary key vid;
load dataset Users
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-aqlplus_2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-aqlplus_2.aql
index 6ec3fc6..e56c48c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-aqlplus_2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-aqlplus_2.aql
@@ -18,8 +18,8 @@
interests: {{string}}
}
-create dataset Users(UserType) partitioned by key uid;
-create dataset Visitors(VisitorType) partitioned by key vid;
+create dataset Users(UserType) primary key uid;
+create dataset Visitors(VisitorType) primary key vid;
load dataset Users
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-aqlplus_3.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-aqlplus_3.aql
index baf6d12..01c172e 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-aqlplus_3.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-aqlplus_3.aql
@@ -18,8 +18,8 @@
interests: {{string}}
}
-create dataset Users(UserType) partitioned by key uid;
-create dataset Visitors(VisitorType) partitioned by key vid;
+create dataset Users(UserType) primary key uid;
+create dataset Visitors(VisitorType) primary key vid;
load dataset Users
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-vis-user-lot-aqlplus_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-vis-user-lot-aqlplus_1.aql
index ff94813..0746be5 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-vis-user-lot-aqlplus_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-int-vis-user-lot-aqlplus_1.aql
@@ -18,8 +18,8 @@
interests: {{string}}
}
-create dataset Users(UserType) partitioned by key uid;
-create dataset Visitors(VisitorType) partitioned by key vid;
+create dataset Users(UserType) primary key uid;
+create dataset Visitors(VisitorType) primary key vid;
load dataset Users
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-3_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-3_1.aql
index 9f2fd07..6654d07 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-3_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-3_1.aql
@@ -18,8 +18,8 @@
interests: {{string}}
}
-create dataset Users(UserType) partitioned by key uid;
-create dataset Visitors(VisitorType) partitioned by key vid;
+create dataset Users(UserType) primary key uid;
+create dataset Visitors(VisitorType) primary key vid;
load dataset Users
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_1.aql
index 1cfaccf..a289015 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_1.aql
@@ -18,8 +18,8 @@
interests: {{string}}
}
-create dataset Users(UserType) partitioned by key uid;
-create dataset Visitors(VisitorType) partitioned by key vid;
+create dataset Users(UserType) primary key uid;
+create dataset Visitors(VisitorType) primary key vid;
load dataset Users
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_2.aql
index b5eb560..59b7c9a 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_2.aql
@@ -18,8 +18,8 @@
interests: {{string}}
}
-create dataset Users(UserType) partitioned by key uid;
-create dataset Visitors(VisitorType) partitioned by key vid;
+create dataset Users(UserType) primary key uid;
+create dataset Visitors(VisitorType) primary key vid;
load dataset Users
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_3.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_3.aql
index 5b557ee..861db2f 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_3.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_3.aql
@@ -18,8 +18,8 @@
interests: {{string}}
}
-create dataset Users(UserType) partitioned by key uid;
-create dataset Visitors(VisitorType) partitioned by key vid;
+create dataset Users(UserType) primary key uid;
+create dataset Visitors(VisitorType) primary key vid;
load dataset Users
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_4.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_4.aql
index 1353960..49ac082 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_4.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_4.aql
@@ -18,8 +18,8 @@
interests: {{string}}
}
-create dataset Users(UserType) partitioned by key uid;
-create dataset Visitors(VisitorType) partitioned by key vid;
+create dataset Users(UserType) primary key uid;
+create dataset Visitors(VisitorType) primary key vid;
load dataset Users
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_5.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_5.aql
index 58cc81f..08e8bf8 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_5.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-aqlplus_5.aql
@@ -18,8 +18,8 @@
interests: {{string}}
}
-create dataset Users(UserType) partitioned by key uid;
-create dataset Visitors(VisitorType) partitioned by key vid;
+create dataset Users(UserType) primary key uid;
+create dataset Visitors(VisitorType) primary key vid;
load dataset Users
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-int-aqlplus_1.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-int-aqlplus_1.aql
index 01ad330..4679eca 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-int-aqlplus_1.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-int-aqlplus_1.aql
@@ -18,8 +18,8 @@
interests: {{string}}
}
-create dataset Users(UserType) partitioned by key uid;
-create dataset Visitors(VisitorType) partitioned by key vid;
+create dataset Users(UserType) primary key uid;
+create dataset Visitors(VisitorType) primary key vid;
load dataset Users
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-int-aqlplus_2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-int-aqlplus_2.aql
index 35fde70..669284c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-int-aqlplus_2.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/user-vis-lot-int-aqlplus_2.aql
@@ -18,8 +18,8 @@
interests: {{string}}
}
-create dataset Users(UserType) partitioned by key uid;
-create dataset Visitors(VisitorType) partitioned by key vid;
+create dataset Users(UserType) primary key uid;
+create dataset Visitors(VisitorType) primary key vid;
load dataset Users
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/hdfs/hdfs_02.aql b/asterix-app/src/test/resources/runtimets/queries/hdfs/hdfs_02.aql
new file mode 100644
index 0000000..8271f1d
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/hdfs/hdfs_02.aql
@@ -0,0 +1,26 @@
+/*
+* Description : Create an external dataset that contains a tuples, the lines from a (*sequence*) file in HDFS.
+ Perform a word-count over the data in the dataset.
+* Expected Res : Success
+* Date : 7th Jan 2013
+*/
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type LineType as closed {
+ content: string
+};
+
+create external dataset TextDataset(LineType)
+using hdfs
+(("hdfs"="hdfs://127.0.0.1:31888"),("path"="/asterix/textFileS"),("input-format"="sequence-input-format"),("format"="delimited-text"),("delimiter"="."));
+
+write output to nc1:"rttest/hdfs_hdfs_02.adm";
+
+for $line in dataset('TextDataset')
+let $tokens := word-tokens($line.content)
+for $token in $tokens
+group by $tok := $token with $token
+order by $tok
+return { "word": $tok, "count": count($token) }
diff --git a/asterix-app/src/test/resources/runtimets/queries/hdfs/hdfs_03.aql b/asterix-app/src/test/resources/runtimets/queries/hdfs/hdfs_03.aql
new file mode 100644
index 0000000..4ddf511
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/hdfs/hdfs_03.aql
@@ -0,0 +1,28 @@
+/*
+* Description : Create an external dataset that contains a tuples, the lines from a large (35kb) text file in HDFS.
+ The input file is sufficiently large to guarantee that # of bytes > than internal buffer of size 8192.
+ This causes a record to span across the buffer size boundaries.
+ Perform a word-count over the data in the dataset.
+* Expected Res : Success
+* Date : 7th Jan 2013
+*/
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type LineType as closed {
+ content: string
+};
+
+create external dataset TextDataset(LineType)
+using hdfs
+(("hdfs"="hdfs://127.0.0.1:31888"),("path"="/asterix/large_text"),("input-format"="text-input-format"),("format"="delimited-text"),("delimiter"="."));
+
+write output to nc1:"rttest/hdfs_hdfs_03.adm";
+
+for $line in dataset('TextDataset')
+let $tokens := word-tokens($line.content)
+for $token in $tokens
+group by $tok := $token with $token
+order by $tok
+return { "word": $tok, "count": count($token) }
diff --git a/asterix-app/src/test/resources/runtimets/queries/hdfs/issue_245_hdfs.aql b/asterix-app/src/test/resources/runtimets/queries/hdfs/issue_245_hdfs.aql
new file mode 100644
index 0000000..4a42a20
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/hdfs/issue_245_hdfs.aql
@@ -0,0 +1,23 @@
+/*
+* Description : Create an external dataset that contains a tuples, the lines from a file in HDFS.
+ Iterate over the contained tuples.
+* Expected Res : Success
+* Issue : 245
+* Date : 7th Jan 2013
+*/
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type LineType as closed {
+ line: string
+};
+
+create external dataset TextDataset(LineType)
+using hdfs
+(("hdfs"="hdfs://127.0.0.1:31888"),("path"="/asterix/asterix_info.txt"),("input-format"="text-input-format"),("format"="delimited-text"),("delimiter"="."));
+
+write output to nc1:"rttest/hdfs_issue_245_hdfs.adm";
+
+for $x in dataset('TextDataset')
+return $x
diff --git a/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_5.aql b/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_5.aql
new file mode 100644
index 0000000..544b37c
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_5.aql
@@ -0,0 +1,35 @@
+/*
+* Description : Create an dataset and load it from two file splits
+ Use hint (cardinality) for the created dataset.
+* Expected Res : Success
+* Date : 30th Jan 2013
+*/
+
+/* scan and print an ADM file as a dataset of closed records */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type DBLPType as closed {
+ id: int32,
+ dblpid: string,
+ title: string,
+ authors: string,
+ misc: string
+}
+
+create dataset DBLPadm(DBLPType)
+primary key id
+hints(cardinality=200);
+
+// drop dataset DBLPadm;
+load dataset DBLPadm
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/dblp-small/part-00000.adm,nc1://data/dblp-small/part-00001.adm"),("format"="adm"));
+
+write output to nc1:"rttest/hints_issue_251_dataset_hint_5.adm";
+
+for $paper in dataset('DBLPadm')
+order by $paper.id
+return $paper
diff --git a/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_6.aql b/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_6.aql
new file mode 100644
index 0000000..0b7c20d
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_6.aql
@@ -0,0 +1,28 @@
+/*
+* Description : Create an external dataset that contains a tuples, the lines from a (*sequence*) file in HDFS.
+ Provide hint(cardinality) when creating the dataset.
+ Perform a word-count over the data in the dataset.
+* Expected Res : Success
+* Date : 30th Jan 2013
+*/
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type LineType as closed {
+ content: string
+};
+
+create external dataset TextDataset(LineType)
+using hdfs
+(("hdfs"="hdfs://127.0.0.1:31888"),("path"="/asterix/textFileS"),("input-format"="sequence-input-format"),("format"="delimited-text"),("delimiter"="."))
+hints(cardinality=10);
+
+write output to nc1:"rttest/hints_issue_251_dataset_hint_6.adm";
+
+for $line in dataset('TextDataset')
+let $tokens := word-tokens($line.content)
+for $token in $tokens
+group by $tok := $token with $token
+order by $tok
+return { "word": $tok, "count": count($token) }
diff --git a/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_7.aql b/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_7.aql
new file mode 100644
index 0000000..8f4a74d
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_7.aql
@@ -0,0 +1,33 @@
+/*
+ * Description : Create a feed dataset that uses the feed simulator adapter.
+ Use hint (cardinality) for the feed dataset.
+ Begin ingestion using a fully qualified name and verify contents of the dataset post completion.
+ * Expected Res : Success
+ * Date : 30th Jan 2013
+ */
+drop dataverse feeds if exists;
+create dataverse feeds;
+use dataverse feeds;
+
+create type TweetType as closed {
+ id: string,
+ username : string,
+ location : string,
+ text : string,
+ timestamp : string
+}
+
+create feed dataset TweetFeed(TweetType)
+using "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory"
+(("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("output-type-name"="TweetType"),("tuple-interval"="10"))
+primary key id
+hints(cardinality=200);
+
+begin feed feeds.TweetFeed;
+
+write output to nc1:"rttest/hints_issue_251_dataset_hint_7.adm";
+
+for $x in dataset('TweetFeed')
+return $x
+
+drop dataverse feeds;
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-join/btree-primary-equi-join.aql b/asterix-app/src/test/resources/runtimets/queries/index-join/btree-primary-equi-join.aql
index 1015e82..0800784 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-join/btree-primary-equi-join.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-join/btree-primary-equi-join.aql
@@ -37,8 +37,8 @@
items: [int32]
}
-create dataset Customers(CustomerType) partitioned by key cid;
-create dataset Orders(OrderType) partitioned by key oid;
+create dataset Customers(CustomerType) primary key cid;
+create dataset Orders(OrderType) primary key oid;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-join/btree-secondary-equi-join.aql b/asterix-app/src/test/resources/runtimets/queries/index-join/btree-secondary-equi-join.aql
index d1e9824..1bf3490 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-join/btree-secondary-equi-join.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-join/btree-secondary-equi-join.aql
@@ -25,8 +25,8 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-join/rtree-spatial-intersect-point.aql b/asterix-app/src/test/resources/runtimets/queries/index-join/rtree-spatial-intersect-point.aql
index ab79189..58fbf73 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-join/rtree-spatial-intersect-point.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-join/rtree-spatial-intersect-point.aql
@@ -21,8 +21,8 @@
circle: circle
}
-create dataset MyData1(MyRecord) partitioned by key id;
-create dataset MyData2(MyRecord) partitioned by key id;
+create dataset MyData1(MyRecord) primary key id;
+create dataset MyData2(MyRecord) primary key id;
load dataset MyData1
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/btree-index-composite-key-mixed-intervals.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/btree-index-composite-key-mixed-intervals.aql
index 79060d9..90d4cb9 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/btree-index-composite-key-mixed-intervals.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/btree-index-composite-key-mixed-intervals.aql
@@ -20,7 +20,7 @@
dept:string
}
-create dataset employee(Emp) partitioned by key id;
+create dataset employee(Emp) primary key id;
load dataset employee
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/btree-index-composite-key.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/btree-index-composite-key.aql
index dadb884..248c8ba 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/btree-index-composite-key.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/btree-index-composite-key.aql
@@ -19,7 +19,7 @@
dept:string
}
-create dataset employee(Emp) partitioned by key id;
+create dataset employee(Emp) primary key id;
load dataset employee
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/btree-index-rewrite-multiple.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/btree-index-rewrite-multiple.aql
index 7b72a80..e3214de 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/btree-index-rewrite-multiple.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/btree-index-rewrite-multiple.aql
@@ -22,7 +22,7 @@
o_comment: string
}
-create dataset Orders(OrderType) partitioned by key o_orderkey;
+create dataset Orders(OrderType) primary key o_orderkey;
load dataset Orders
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/cust-index-age-nullable.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/cust-index-age-nullable.aql
index 460a212..8fb416b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/cust-index-age-nullable.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/cust-index-age-nullable.aql
@@ -17,7 +17,7 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-ngram-contains.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-ngram-contains.aql
index 54ec794..2667a07 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-ngram-contains.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-ngram-contains.aql
@@ -13,7 +13,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-ngram-edit-distance-panic.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-ngram-edit-distance-panic.aql
index 8bfc878..82f3faa 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-ngram-edit-distance-panic.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-ngram-edit-distance-panic.aql
@@ -13,7 +13,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-ngram-edit-distance.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-ngram-edit-distance.aql
index 6077389..e905ee1 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-ngram-edit-distance.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-ngram-edit-distance.aql
@@ -13,7 +13,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-ngram-jaccard.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-ngram-jaccard.aql
index 314c3bd..a7832c1 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-ngram-jaccard.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-ngram-jaccard.aql
@@ -13,7 +13,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-olist-edit-distance-panic.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-olist-edit-distance-panic.aql
index 3a3a303..553f65c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-olist-edit-distance-panic.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-olist-edit-distance-panic.aql
@@ -20,7 +20,7 @@
create nodegroup group1 if not exists on nc1;
create dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-olist-edit-distance.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-olist-edit-distance.aql
index b1ce7c4..afa6e4e 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-olist-edit-distance.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-olist-edit-distance.aql
@@ -20,7 +20,7 @@
create nodegroup group1 if not exists on nc1;
create dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-olist-jaccard.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-olist-jaccard.aql
index b495ffe..9725739 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-olist-jaccard.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-olist-jaccard.aql
@@ -20,7 +20,7 @@
create nodegroup group1 if not exists on nc1;
create dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-ulist-jaccard.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-ulist-jaccard.aql
index 4427369..18b583a 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-ulist-jaccard.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-ulist-jaccard.aql
@@ -20,7 +20,7 @@
create nodegroup group1 if not exists on nc1;
create dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-word-contains.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-word-contains.aql
index 888a381..6667e76 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-word-contains.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-word-contains.aql
@@ -13,7 +13,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-word-jaccard.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-word-jaccard.aql
index 23c9289..7e978a8 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-word-jaccard.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/fuzzy-inverted-index-word-jaccard.aql
@@ -13,7 +13,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ngram-contains.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ngram-contains.aql
index ad85be7..e9b9540 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ngram-contains.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ngram-contains.aql
@@ -13,7 +13,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ngram-edit-distance-panic.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ngram-edit-distance-panic.aql
index b275423..3e637ed 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ngram-edit-distance-panic.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ngram-edit-distance-panic.aql
@@ -13,7 +13,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ngram-edit-distance.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ngram-edit-distance.aql
index ddcdd4b..1fca0d4 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ngram-edit-distance.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ngram-edit-distance.aql
@@ -13,7 +13,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ngram-jaccard.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ngram-jaccard.aql
index 501ebce..1b8167b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ngram-jaccard.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ngram-jaccard.aql
@@ -13,7 +13,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-olist-edit-distance-panic.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-olist-edit-distance-panic.aql
index deb51a7..79e728f 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-olist-edit-distance-panic.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-olist-edit-distance-panic.aql
@@ -20,7 +20,7 @@
create nodegroup group1 if not exists on nc1;
create dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-olist-edit-distance.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-olist-edit-distance.aql
index bb05fc1..793d578 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-olist-edit-distance.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-olist-edit-distance.aql
@@ -20,7 +20,7 @@
create nodegroup group1 if not exists on nc1;
create dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-olist-jaccard.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-olist-jaccard.aql
index 8e2d1e7..aacb656 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-olist-jaccard.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-olist-jaccard.aql
@@ -20,7 +20,7 @@
create nodegroup group1 if not exists on nc1;
create dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ulist-jaccard.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ulist-jaccard.aql
index 6a0e266..a6ea348 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ulist-jaccard.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-ulist-jaccard.aql
@@ -20,7 +20,7 @@
create nodegroup group1 if not exists on nc1;
create dataset Customers(CustomerType)
- partitioned by key cid on group1;
+ primary key cid on group1;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-word-contains.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-word-contains.aql
index 348f686..f873dbb 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-word-contains.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-word-contains.aql
@@ -13,7 +13,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-word-jaccard.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-word-jaccard.aql
index 9852e67..051f4f8 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-word-jaccard.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/inverted-index-word-jaccard.aql
@@ -13,7 +13,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/orders-index-custkey-conjunctive-open.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/orders-index-custkey-conjunctive-open.aql
index 1a0ecbc..174c50f 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/orders-index-custkey-conjunctive-open.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/orders-index-custkey-conjunctive-open.aql
@@ -15,7 +15,7 @@
}
create dataset Orders(OrderType)
- partitioned by key o_orderkey;
+ primary key o_orderkey;
load dataset Orders
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/orders-index-custkey-conjunctive.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/orders-index-custkey-conjunctive.aql
index ceca42e..6e4e659 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/orders-index-custkey-conjunctive.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/orders-index-custkey-conjunctive.aql
@@ -15,7 +15,7 @@
}
create dataset Orders(OrderType)
- partitioned by key o_orderkey;
+ primary key o_orderkey;
load dataset Orders
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/orders-index-custkey-open.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/orders-index-custkey-open.aql
index 281f566..50088f4 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/orders-index-custkey-open.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/orders-index-custkey-open.aql
@@ -18,7 +18,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
load dataset Orders
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/orders-index-custkey.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/orders-index-custkey.aql
index 365cfcb..9a950aa 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/orders-index-custkey.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/orders-index-custkey.aql
@@ -15,7 +15,7 @@
}
create dataset Orders(OrderType)
- partitioned by key o_orderkey;
+ primary key o_orderkey;
load dataset Orders
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/range-search-open.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/range-search-open.aql
index 099e2d2..9a1906c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/range-search-open.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/range-search-open.aql
@@ -23,7 +23,7 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/range-search.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/range-search.aql
index 62714ed..758e110 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/range-search.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/range-search.aql
@@ -23,7 +23,7 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index-nullable.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index-nullable.aql
index 6ca6d28..d920cfce 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index-nullable.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index-nullable.aql
@@ -14,7 +14,7 @@
}
create dataset MyData(MyRecord)
- partitioned by key id;
+ primary key id;
load dataset MyData
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index-open.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index-open.aql
index 44425cc..0a49ac1 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index-open.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index-open.aql
@@ -14,7 +14,7 @@
}
create dataset MyData(MyRecord)
- partitioned by key id;
+ primary key id;
load dataset MyData
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index.aql b/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index.aql
index c1e1890..19df2db 100644
--- a/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/index-selection/rtree-secondary-index.aql
@@ -15,7 +15,7 @@
}
create dataset MyData(MyRecord)
- partitioned by key id;
+ primary key id;
load dataset MyData
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ngram-edit-distance-inline.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ngram-edit-distance-inline.aql
index 0ea267c..c6e29e6 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ngram-edit-distance-inline.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ngram-edit-distance-inline.aql
@@ -25,9 +25,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ngram-edit-distance.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ngram-edit-distance.aql
index f7e3a8b..46cd05c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ngram-edit-distance.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ngram-edit-distance.aql
@@ -24,9 +24,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ngram-jaccard-inline.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ngram-jaccard-inline.aql
index 734a269..f2f330e 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ngram-jaccard-inline.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ngram-jaccard-inline.aql
@@ -26,9 +26,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ngram-jaccard.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ngram-jaccard.aql
index 2e1a635..1df7a5b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ngram-jaccard.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ngram-jaccard.aql
@@ -25,9 +25,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/olist-edit-distance-inline.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/olist-edit-distance-inline.aql
index 3b46c7d..a72d292 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/olist-edit-distance-inline.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/olist-edit-distance-inline.aql
@@ -25,9 +25,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/olist-edit-distance.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/olist-edit-distance.aql
index 3f025ed..ea05e33 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/olist-edit-distance.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/olist-edit-distance.aql
@@ -24,9 +24,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/olist-jaccard-inline.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/olist-jaccard-inline.aql
index ea28721..e8b4b05 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/olist-jaccard-inline.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/olist-jaccard-inline.aql
@@ -25,9 +25,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/olist-jaccard.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/olist-jaccard.aql
index 458d31c..d33fc87 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/olist-jaccard.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/olist-jaccard.aql
@@ -24,9 +24,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ulist-jaccard-inline.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ulist-jaccard-inline.aql
index e11b2f0..c46d534 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ulist-jaccard-inline.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ulist-jaccard-inline.aql
@@ -25,9 +25,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ulist-jaccard.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ulist-jaccard.aql
index 9732a51..c7534f3 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ulist-jaccard.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/ulist-jaccard.aql
@@ -24,9 +24,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/word-jaccard-inline.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/word-jaccard-inline.aql
index 1985878..ebec1d8 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/word-jaccard-inline.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/word-jaccard-inline.aql
@@ -26,9 +26,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/word-jaccard.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/word-jaccard.aql
index 013b51e..29e17bc 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/word-jaccard.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join-noeqjoin/word-jaccard.aql
@@ -25,9 +25,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ngram-edit-distance-inline.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ngram-edit-distance-inline.aql
index a602ca1..75ebe7b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ngram-edit-distance-inline.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ngram-edit-distance-inline.aql
@@ -24,9 +24,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ngram-edit-distance.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ngram-edit-distance.aql
index 1c88536..9c305ec 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ngram-edit-distance.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ngram-edit-distance.aql
@@ -23,9 +23,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ngram-jaccard-inline.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ngram-jaccard-inline.aql
index cd88072..048aee8 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ngram-jaccard-inline.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ngram-jaccard-inline.aql
@@ -25,9 +25,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ngram-jaccard.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ngram-jaccard.aql
index abb5e33..96c685d 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ngram-jaccard.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ngram-jaccard.aql
@@ -24,9 +24,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/olist-edit-distance-inline.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/olist-edit-distance-inline.aql
index bdac6f1..41e1d9e 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/olist-edit-distance-inline.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/olist-edit-distance-inline.aql
@@ -24,9 +24,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/olist-edit-distance.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/olist-edit-distance.aql
index 5e679e4..64e16ba 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/olist-edit-distance.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/olist-edit-distance.aql
@@ -23,9 +23,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/olist-jaccard-inline.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/olist-jaccard-inline.aql
index 8fd8632..ef2730b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/olist-jaccard-inline.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/olist-jaccard-inline.aql
@@ -24,9 +24,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/olist-jaccard.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/olist-jaccard.aql
index 50d13f1..d8af52c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/olist-jaccard.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/olist-jaccard.aql
@@ -23,9 +23,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ulist-jaccard-inline.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ulist-jaccard-inline.aql
index a62c66d..c6b2252 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ulist-jaccard-inline.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ulist-jaccard-inline.aql
@@ -24,9 +24,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ulist-jaccard.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ulist-jaccard.aql
index 8c6570f..efdb925 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ulist-jaccard.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/ulist-jaccard.aql
@@ -23,9 +23,9 @@
children: [ { name: string, age: int32? } ]
}
-create dataset Customers(CustomerType) partitioned by key cid;
+create dataset Customers(CustomerType) primary key cid;
-create dataset Customers2(CustomerType) partitioned by key cid;
+create dataset Customers2(CustomerType) primary key cid;
load dataset Customers
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/word-jaccard-inline.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/word-jaccard-inline.aql
index 3ac3583..b50a92b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/word-jaccard-inline.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/word-jaccard-inline.aql
@@ -25,9 +25,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/word-jaccard.aql b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/word-jaccard.aql
index 7060fe6..6063e0b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/word-jaccard.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/inverted-index-join/word-jaccard.aql
@@ -24,9 +24,9 @@
misc: string
}
-create dataset DBLP(DBLPType) partitioned by key id;
+create dataset DBLP(DBLPType) primary key id;
-create dataset CSX(CSXType) partitioned by key id;
+create dataset CSX(CSXType) primary key id;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/load/issue14_query.aql b/asterix-app/src/test/resources/runtimets/queries/load/issue14_query.aql
new file mode 100644
index 0000000..2127117
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/load/issue14_query.aql
@@ -0,0 +1,26 @@
+/*
+ * Description : Create and load a dataset but with an unspecified data format.
+ * Expected Res : Failure
+ * Date : 16 Jan 2012
+ */
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type Schema as closed{
+id: int32,
+age: int32,
+name: string
+}
+
+create dataset onektup(Schema)
+primary key id;
+
+load dataset onektup
+using localfs(("path"="nc1:///tmp/one.adm"));
+
+write output to nc1:"/tmp/foo.adm";
+
+for $l in dataset('onektup')
+return $l
+
diff --git a/asterix-app/src/test/resources/runtimets/queries/misc/nested-loop-join_01.aql b/asterix-app/src/test/resources/runtimets/queries/misc/nested-loop-join_01.aql
index f148d2e..a2e2c1d 100644
--- a/asterix-app/src/test/resources/runtimets/queries/misc/nested-loop-join_01.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/misc/nested-loop-join_01.aql
@@ -16,8 +16,8 @@
interests: {{string}}
}
-create dataset Users(UserType) partitioned by key uid;
-create dataset Visitors(VisitorType) partitioned by key vid;
+create dataset Users(UserType) primary key uid;
+create dataset Visitors(VisitorType) primary key vid;
load dataset Users
diff --git a/asterix-app/src/test/resources/runtimets/queries/misc/partition-by-nonexistent-field.aql b/asterix-app/src/test/resources/runtimets/queries/misc/partition-by-nonexistent-field.aql
new file mode 100644
index 0000000..10dbfc1
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/misc/partition-by-nonexistent-field.aql
@@ -0,0 +1,20 @@
+/*
+ * Description : Tries to partition a dataset by a non-existent field
+ * Expected Result: An error reporting that this is not allowed
+ * Author: zheilbron
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type TestType as open{
+name1:string
+}
+
+create dataset testds(TestType) primary key id;
+
+insert into dataset testds({"name1":"John","name2":"Smith"});
+
+for $l in dataset('testds')
+return $l
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/nestrecords/nestrecord.aql b/asterix-app/src/test/resources/runtimets/queries/nestrecords/nestrecord.aql
index c0b5e06..e038aeb 100644
--- a/asterix-app/src/test/resources/runtimets/queries/nestrecords/nestrecord.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/nestrecords/nestrecord.aql
@@ -20,7 +20,7 @@
address: AddressType?
}
-create dataset testds(testtype) partitioned by key id;
+create dataset testds(testtype) primary key id;
insert into dataset testds (
{ "id": "001", "name": "Person One", "address": {"street": "3019 DBH", "city": "Irvine", "zip": 92697} }
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/c2c-w-optional.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/c2c-w-optional.aql
index 354757d..c5fb80c1 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/c2c-w-optional.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/c2c-w-optional.aql
@@ -18,9 +18,9 @@
optnl_fld:string?
}
-create dataset T1(TestType) partitioned by key id;
+create dataset T1(TestType) primary key id;
-create dataset T2(TestType) partitioned by key id;
+create dataset T2(TestType) primary key id;
insert into dataset T1({
"id":1234,
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/c2c-wo-optional.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/c2c-wo-optional.aql
index 9d4b90a..b31fe88 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/c2c-wo-optional.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/c2c-wo-optional.aql
@@ -19,9 +19,9 @@
optnl_fld:string?
}
-create dataset T1(TestType) partitioned by key id;
+create dataset T1(TestType) primary key id;
-create dataset T2(TestType) partitioned by key id;
+create dataset T2(TestType) primary key id;
insert into dataset T1({
"id":1234,
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/c2c.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/c2c.aql
index 83888f3..9f923e7 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/c2c.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/c2c.aql
@@ -17,10 +17,10 @@
}
// source dataset
-create dataset T1(TestType) partitioned by key id;
+create dataset T1(TestType) primary key id;
// target dataset
-create dataset T2(TestType) partitioned by key id;
+create dataset T2(TestType) primary key id;
insert into dataset T1({
"id":1234,
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/heterog-list-ordered01.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/heterog-list-ordered01.aql
index 27935e0..b51c1fe 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/heterog-list-ordered01.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/heterog-list-ordered01.aql
@@ -22,7 +22,7 @@
batters:[[BatterType]]
}
-create dataset T1(TestType) partitioned by key id;
+create dataset T1(TestType) primary key id;
insert into dataset T1({
"id":1234,
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/heterog-list01.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/heterog-list01.aql
index 7ba1641..82823ce 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/heterog-list01.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/heterog-list01.aql
@@ -22,7 +22,7 @@
batters:{{BatterType}}
}
-create dataset T1(TestType) partitioned by key id;
+create dataset T1(TestType) primary key id;
insert into dataset T1({
"id":1234,
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/heterog-list02.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/heterog-list02.aql
index 47dd921..45e4bac 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/heterog-list02.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/heterog-list02.aql
@@ -22,7 +22,7 @@
batters:[[BatterType]]
}
-create dataset T1(TestType) partitioned by key id;
+create dataset T1(TestType) primary key id;
insert into dataset T1({
"id":1234,
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/heterog-list03.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/heterog-list03.aql
index 94a87c0..94faa44 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/heterog-list03.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/heterog-list03.aql
@@ -23,7 +23,7 @@
batters:[[BatterType]]?
}
-create dataset T1(TestType) partitioned by key id;
+create dataset T1(TestType) primary key id;
insert into dataset T1({
"id":1234,
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-01.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-01.aql
index 0d26b5d..55d0497 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-01.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-01.aql
@@ -16,7 +16,7 @@
name : string
}
-create dataset testds(testType) partitioned by key id;
+create dataset testds(testType) primary key id;
insert into dataset testds({"id": 123, "name": "John Doe", "hobbies": {{ "scuba", "music" }} }
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-12.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-12.aql
index ae21794..087ca36 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-12.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-12.aql
@@ -22,9 +22,9 @@
name : string
}
-create dataset testds01(testtype01) partitioned by key id;
+create dataset testds01(testtype01) primary key id;
-create dataset testds02(testtype02) partitioned by key id;
+create dataset testds02(testtype02) primary key id;
insert into dataset testds02 (
{ "id": "001", "name": "Person One", "hobbies": {{"scuba", "music"}}}
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-14.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-14.aql
index 6c517ca..7038437 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-14.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-14.aql
@@ -19,9 +19,9 @@
id: string
}
-create dataset testds01(testtype01) partitioned by key id;
+create dataset testds01(testtype01) primary key id;
-create dataset testds02(testtype02) partitioned by key id;
+create dataset testds02(testtype02) primary key id;
insert into dataset testds01 (
{ "id": "001" }
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-15.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-15.aql
index 2ac2567..d3547e2 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-15.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-15.aql
@@ -26,7 +26,7 @@
lat_lon: point
}
-create dataset tdtst(Schema) partitioned by key id_32;
+create dataset tdtst(Schema) primary key id_32;
insert into dataset tdtst(
let $f1:=time("10:50:56:200+05:00")
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-16.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-16.aql
index 5e1a591..d07fce0 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-16.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-16.aql
@@ -26,7 +26,7 @@
lat_lon: point
}
-create dataset tdtst(Schema) partitioned by key id_32;
+create dataset tdtst(Schema) primary key id_32;
insert into dataset tdtst(
let $f1:=time("10:50:56:200+05:00")
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-17.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-17.aql
index a590cf1..da83e0f 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-17.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-17.aql
@@ -22,7 +22,7 @@
lat_lon: point
}
-create dataset tdtst(Schema) partitioned by key id_32;
+create dataset tdtst(Schema) primary key id_32;
insert into dataset tdtst(
let $f1:=time("10:50:56:200+05:00")
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-19.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-19.aql
index b406663..1d93992 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-19.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-19.aql
@@ -15,7 +15,7 @@
id:int32
}
-create dataset dtst01(TestType) partitioned by key id;
+create dataset dtst01(TestType) primary key id;
insert into dtst01({"id":137});
insert into dtst01({"id":117});
@@ -30,7 +30,7 @@
dob:date
}
-create dataset employee(Emp) partitioned by key id;
+create dataset employee(Emp) primary key id;
insert into dataset employee({"id":201,"name":"John Doe","age":32,"sex":"M","dob":"1-1-82"});
insert into dataset employee({"id":202,"name":"John Smith","age":30,"sex":"M","dob":"1-1-82"});
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-20.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-20.aql
index 29b4366..7f1be27 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-20.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-20.aql
@@ -16,7 +16,7 @@
id:int32
}
-create dataset dtst01(TestType) partitioned by key id;
+create dataset dtst01(TestType) primary key id;
create type Emp as open {
id:int32,
@@ -26,7 +26,7 @@
dob:date?
}
-create dataset employee(Emp) partitioned by key id;
+create dataset employee(Emp) primary key id;
insert into dataset employee({"id":201,"name":"John Doe","age":32,"sex":"M","dob":date("1975-01-11")});
insert into dataset employee({"id":202,"name":"John Smith","age":30,date("1982-05-23")});
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-21.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-21.aql
index c52344e..3d955c8 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-21.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-21.aql
@@ -14,7 +14,7 @@
id:int32
}
-create dataset dtst01(TestType) partitioned by key id;
+create dataset dtst01(TestType) primary key id;
create type Emp as open {
id:int32,
@@ -24,7 +24,7 @@
dob:date
}
-create dataset employee(Emp) partitioned by key id;
+create dataset employee(Emp) primary key id;
insert into dataset employee({"id":201,"name":"John Doe","age":32,"sex":"M","dob":date("1975-01-11")});
insert into dataset employee({"id":202,"name":"John Smith","age":30,"sex":"M","dob":date("1982-07-12")});
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-22.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-22.aql
index cd9ac44..0b4c31f 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-22.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-22.aql
@@ -20,7 +20,7 @@
dob:date?
}
-create dataset employee(Emp) partitioned by key id;
+create dataset employee(Emp) primary key id;
//date("YYYY-MM-DD")
insert into dataset employee({"id":201,"name":"John Doe","age":37,"dept":"HR","sex":"M","dob":date("1975-11-02")});
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-24.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-24.aql
index 7012a5a..4123cf2 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-24.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-24.aql
@@ -17,7 +17,7 @@
opt_tag : {{ string }}
}
-create dataset testds(testType) partitioned by key id;
+create dataset testds(testType) primary key id;
insert into dataset testds({"id": 32,"name": "UCI","opt_tag":{{"optional text","put any text here","and more"}}});
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-25.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-25.aql
index b29b63a..ec0d4cd 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-25.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-25.aql
@@ -17,7 +17,7 @@
opt_tag : {{ string }}?
}
-create dataset testds(testType) partitioned by key id;
+create dataset testds(testType) primary key id;
insert into dataset testds({"id": 32,"name": "UCI","opt_tag":{{"optional text","put any text here","and more"}}});
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-26.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-26.aql
index f19e4a7..486b76e 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-26.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-26.aql
@@ -18,7 +18,7 @@
opt_tag : {{ string }}?
}
-create dataset testds(testType) partitioned by key id;
+create dataset testds(testType) primary key id;
insert into dataset testds({"id": 32,"name": "UCI"});
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-28.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-28.aql
index 8fdab75..acef21e 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-28.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-28.aql
@@ -20,9 +20,9 @@
name : string
}
-create dataset testds01(testtype01) partitioned by key id;
+create dataset testds01(testtype01) primary key id;
-create dataset testds02(testtype02) partitioned by key id;
+create dataset testds02(testtype02) primary key id;
insert into dataset testds02 (
{ "id": "001", "name": "Person One", "hobbies": {{"scuba", "music"}}}
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-29.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-29.aql
index 9560430..5047342 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-29.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-29.aql
@@ -20,9 +20,9 @@
name : string
}
-create dataset testds01(testtype01) partitioned by key id;
+create dataset testds01(testtype01) primary key id;
-create dataset testds02(testtype02) partitioned by key id;
+create dataset testds02(testtype02) primary key id;
insert into dataset testds02 (
{ "id": "001", "name": "Person One", "hobbies": {{"scuba", "music"}}}
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-30.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-30.aql
index 42aa2e6..e52be74 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-30.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-30.aql
@@ -19,9 +19,9 @@
name : string
}
-create dataset testds01(testtype01) partitioned by key id;
+create dataset testds01(testtype01) primary key id;
-create dataset testds02(testtype02) partitioned by key id;
+create dataset testds02(testtype02) primary key id;
insert into dataset testds02 (
{ "id": "011", "name": "John Doe", "hobbies": {{"scuba", "music"}}}
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-31.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-31.aql
index 03b1754..5d5fa33 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-31.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-31.aql
@@ -18,9 +18,9 @@
name : string
}
-create dataset testds01(testtype01) partitioned by key id;
+create dataset testds01(testtype01) primary key id;
-create dataset testds02(testtype02) partitioned by key id;
+create dataset testds02(testtype02) primary key id;
insert into dataset testds02 (
{ "id": "011", "name": "John Doe", "hobbies": {{"scuba", "music"}}}
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-32.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-32.aql
index f101e47..a79c53d 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-32.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-32.aql
@@ -23,9 +23,9 @@
interests : {{string}}
}
-create dataset testds01(testtype01) partitioned by key id;
+create dataset testds01(testtype01) primary key id;
-create dataset testds02(testtype02) partitioned by key id;
+create dataset testds02(testtype02) primary key id;
insert into dataset testds02 (
{ "id": "011", "name": "John Doe", "sex":"Male", "dept":"HR", "salary":80000,"interests":{{"hiking","scuba","painting","biking"}}});
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-33.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-33.aql
index b75e6c6..809c93e 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-33.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/open-closed-33.aql
@@ -24,9 +24,9 @@
interests : {{string}}?
}
-create dataset testds01(testtype01) partitioned by key id;
+create dataset testds01(testtype01) primary key id;
-create dataset testds02(testtype02) partitioned by key id;
+create dataset testds02(testtype02) primary key id;
insert into dataset testds02 (
{ "id": "011", "name": "John Doe", "sex":"Male", "dept":"HR", "salary":80000,"interests":{{"hiking","scuba","painting","biking"}}});
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue208.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue208.aql
index e46286c..2094b4a 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue208.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue208.aql
@@ -26,7 +26,7 @@
}
create dataset TweetMessages(TweetMessageType)
-partitioned by key tweetid;
+primary key tweetid;
load dataset TweetMessages
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue236.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue236.aql
new file mode 100644
index 0000000..7e859af
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue236.aql
@@ -0,0 +1,52 @@
+/*
+ * Description : This test case is to verify the fix for issue236
+ : https://code.google.com/p/asterixdb/issues/detail?id=236
+ * Expected Res : Success
+ * Date : 20 Dec. 2012
+ */
+
+drop dataverse SocialNetworkData if exists;
+
+create dataverse SocialNetworkData;
+use dataverse SocialNetworkData;
+
+create type TwitterUserType as open {
+screen-name: string,
+lang: string,
+friends_count: int32,
+statuses_count: int32,
+name: string,
+followers_count: int32
+}
+
+create type TweetMessageType as closed {
+tweetid: string,
+tweetid-copy: string,
+user: TwitterUserType,
+sender-location: point?,
+send-time: datetime,
+send-time-copy: datetime,
+referred-topics: {{ string }},
+message-text: string
+}
+
+create dataset TweetMessages(TweetMessageType)
+primary key tweetid;
+
+
+insert into dataset TweetMessages(
+{
+"tweetid": "1111387810",
+"tweetid-copy": "1111387810",
+"user": { "screen-name": "TonyNapier#786", "lang": "en", "friends_count": 4241366,
+"statuses_count": 97, "name": "Tony Napier", "followers_count": 5984113 },
+"sender-location": point("29.24,78.35"),
+"send-time": datetime("2011-11-24T14:24:51.000Z"),
+"send-time-copy": datetime("2011-11-24T14:24:51.000Z"),
+"referred-topics": {{ "sprint", "wireless" }},
+"message-text": " love sprint its wireless is mind-blowing:)"
+});
+
+write output to nc1:"rttest/open-closed_query-issue236.adm";
+for $r in dataset('TweetMessages')
+return $r
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-proposal.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-proposal.aql
index a8d00f8..2668430 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-proposal.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-proposal.aql
@@ -26,7 +26,7 @@
};
create dataset TweetMessages(TweetMessageType)
-partitioned by key tweetid;
+primary key tweetid;
insert into dataset TweetMessages(
{
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-proposal02.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-proposal02.aql
index 36feac4..b3945a0 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-proposal02.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-proposal02.aql
@@ -27,7 +27,7 @@
};
create dataset TweetMessages(TweetMessageType)
-partitioned by key tweetid;
+primary key tweetid;
insert into dataset TweetMessages(
{
diff --git a/asterix-app/src/test/resources/runtimets/queries/quantifiers/somesat_02.aql b/asterix-app/src/test/resources/runtimets/queries/quantifiers/somesat_02.aql
index 18f1c20..6718ff0 100644
--- a/asterix-app/src/test/resources/runtimets/queries/quantifiers/somesat_02.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/quantifiers/somesat_02.aql
@@ -30,9 +30,9 @@
}
create dataset CustomerSomeSat02(CustomerType)
- partitioned by key cid;
+ primary key cid;
create dataset OrdersSomeSat02(OrderType)
- partitioned by key oid;
+ primary key oid;
load dataset CustomerSomeSat02
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/records/closed-closed-fieldname-conflict_issue173.aql b/asterix-app/src/test/resources/runtimets/queries/records/closed-closed-fieldname-conflict_issue173.aql
new file mode 100644
index 0000000..b356a56
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/records/closed-closed-fieldname-conflict_issue173.aql
@@ -0,0 +1,11 @@
+/*
+ * Description : Tests whether a conflict between two closed field names are detected
+ * Expected Result: An error reporting that there is a duplicate field name "name"
+ * Author: zheilbron
+ */
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+let $x := {"name": "john", "name": "smith"}
+return $x
diff --git a/asterix-app/src/test/resources/runtimets/queries/records/field-access-on-open-field.aql b/asterix-app/src/test/resources/runtimets/queries/records/field-access-on-open-field.aql
index 2592c67..1b7cf02 100644
--- a/asterix-app/src/test/resources/runtimets/queries/records/field-access-on-open-field.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/records/field-access-on-open-field.aql
@@ -13,7 +13,7 @@
name : string
}
-create dataset testds(TestType) partitioned by key id;
+create dataset testds(TestType) primary key id;
insert into dataset testds({"id": 123, "name": "John Doe", "address": { "zip": 92617} });
diff --git a/asterix-app/src/test/resources/runtimets/queries/records/open-closed-fieldname-conflict_issue173.aql b/asterix-app/src/test/resources/runtimets/queries/records/open-closed-fieldname-conflict_issue173.aql
new file mode 100644
index 0000000..a5ac400
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/records/open-closed-fieldname-conflict_issue173.aql
@@ -0,0 +1,21 @@
+/*
+ * Description : Tests whether a conflict between an open and closed field name are detected
+ * Expected Result: An error reporting that there is a duplicate field name "name"
+ * Author: zheilbron
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type opentype as open {
+id:int32,
+fname:string
+}
+
+create dataset testds(opentype) primary key id;
+
+insert into dataset testds({'id': 1, 'fname': "name"});
+
+for $x in dataset('testds')
+return {$x.fname: "smith", lowercase("NAME"): "john"}
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/records/open-open-fieldname-conflict_issue173.aql b/asterix-app/src/test/resources/runtimets/queries/records/open-open-fieldname-conflict_issue173.aql
new file mode 100644
index 0000000..225e596
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/records/open-open-fieldname-conflict_issue173.aql
@@ -0,0 +1,21 @@
+/*
+ * Description : Tests whether a conflict between two open field names are detected
+ * Expected Result: An error reporting that there is a duplicate field name "name"
+ * Author: zheilbron
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type opentype as open {
+fname1: string,
+fname2: string
+}
+
+create dataset testds(opentype) primary key fname1;
+
+insert into dataset testds({'fname1': "name", 'fname2': "name"});
+
+for $x in dataset('testds')
+return {$x.fname1: "john", $x.fname2: "smith"}
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/scan/10.aql b/asterix-app/src/test/resources/runtimets/queries/scan/10.aql
index 2d71d87..b255178 100644
--- a/asterix-app/src/test/resources/runtimets/queries/scan/10.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/scan/10.aql
@@ -12,7 +12,7 @@
}
create dataset DBLP1(DBLPType)
- partitioned by key id;
+ primary key id;
// drop dataset DBLP1;
load dataset DBLP1
diff --git a/asterix-app/src/test/resources/runtimets/queries/scan/20.aql b/asterix-app/src/test/resources/runtimets/queries/scan/20.aql
index 675f26d..17da0b0 100644
--- a/asterix-app/src/test/resources/runtimets/queries/scan/20.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/scan/20.aql
@@ -13,7 +13,7 @@
}
create dataset DBLPadm(DBLPType)
- partitioned by key id;
+ primary key id;
// drop dataset DBLPadm;
load dataset DBLPadm
diff --git a/asterix-app/src/test/resources/runtimets/queries/scan/issue238_query_1.aql b/asterix-app/src/test/resources/runtimets/queries/scan/issue238_query_1.aql
new file mode 100644
index 0000000..86c8430
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/scan/issue238_query_1.aql
@@ -0,0 +1,35 @@
+/*
+* Description : Create an dataset and load it from two file splits
+ Include whitespace between the elements in the comma-separated list of file paths.
+* Expected Res : Success
+* Issue : 238
+* Date : 7th Jan 2013
+*/
+
+/* scan and print an ADM file as a dataset of closed records */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type DBLPType as closed {
+ id: int32,
+ dblpid: string,
+ title: string,
+ authors: string,
+ misc: string
+}
+
+create dataset DBLPadm(DBLPType)
+ primary key id;
+
+// drop dataset DBLPadm;
+load dataset DBLPadm
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/dblp-small/part-00000.adm, nc1://data/dblp-small/part-00001.adm"),("format"="adm"));
+
+write output to nc1:"rttest/scan_issue238_query_1.adm";
+
+for $paper in dataset('DBLPadm')
+order by $paper.id
+return $paper
diff --git a/asterix-app/src/test/resources/runtimets/queries/scan/issue238_query_2.aql b/asterix-app/src/test/resources/runtimets/queries/scan/issue238_query_2.aql
new file mode 100644
index 0000000..7dd319a
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/scan/issue238_query_2.aql
@@ -0,0 +1,36 @@
+/*
+* Description : Create an dataset and load it from two file splits
+ Include newline between the elements in the comma-separated list of file paths.
+* Expected Res : Success
+* Issue : 238
+* Date : 7th Jan 2013
+*/
+
+/* scan and print an ADM file as a dataset of closed records */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type DBLPType as closed {
+ id: int32,
+ dblpid: string,
+ title: string,
+ authors: string,
+ misc: string
+}
+
+create dataset DBLPadm(DBLPType)
+ primary key id;
+
+// drop dataset DBLPadm;
+load dataset DBLPadm
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/dblp-small/part-00000.adm,
+ nc1://data/dblp-small/part-00001.adm"),("format"="adm"));
+
+write output to nc1:"rttest/scan_issue238_query_2.adm";
+
+for $paper in dataset('DBLPadm')
+order by $paper.id
+return $paper
diff --git a/asterix-app/src/test/resources/runtimets/queries/scan/spatial_types_02.aql b/asterix-app/src/test/resources/runtimets/queries/scan/spatial_types_02.aql
index ea06b96..0997e4f 100644
--- a/asterix-app/src/test/resources/runtimets/queries/scan/spatial_types_02.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/scan/spatial_types_02.aql
@@ -12,7 +12,7 @@
}
create dataset Spatial2(SpatialType)
- partitioned by key id;
+ primary key id;
load dataset Spatial2
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/scan/temp_types_02.aql b/asterix-app/src/test/resources/runtimets/queries/scan/temp_types_02.aql
index 064385c..a8654bd 100644
--- a/asterix-app/src/test/resources/runtimets/queries/scan/temp_types_02.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/scan/temp_types_02.aql
@@ -11,7 +11,7 @@
}
create dataset Temp2(TempType)
- partitioned by key id;
+ primary key id;
load dataset Temp2
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/similarity/fuzzyeq-edit-distance.aql b/asterix-app/src/test/resources/runtimets/queries/similarity/fuzzyeq-edit-distance.aql
index d3eed71..3d6dee4 100644
--- a/asterix-app/src/test/resources/runtimets/queries/similarity/fuzzyeq-edit-distance.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/similarity/fuzzyeq-edit-distance.aql
@@ -13,7 +13,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/similarity/fuzzyeq-similarity-jaccard.aql b/asterix-app/src/test/resources/runtimets/queries/similarity/fuzzyeq-similarity-jaccard.aql
index 0c11edb..1bef743 100644
--- a/asterix-app/src/test/resources/runtimets/queries/similarity/fuzzyeq-similarity-jaccard.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/similarity/fuzzyeq-similarity-jaccard.aql
@@ -13,7 +13,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/similarity/similarity-jaccard-check_query.aql b/asterix-app/src/test/resources/runtimets/queries/similarity/similarity-jaccard-check_query.aql
index 01bea0b..836fdb6 100644
--- a/asterix-app/src/test/resources/runtimets/queries/similarity/similarity-jaccard-check_query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/similarity/similarity-jaccard-check_query.aql
@@ -13,7 +13,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/similarity/similarity-jaccard-sorted-check_query.aql b/asterix-app/src/test/resources/runtimets/queries/similarity/similarity-jaccard-sorted-check_query.aql
index aa5b067..ba3b85e 100644
--- a/asterix-app/src/test/resources/runtimets/queries/similarity/similarity-jaccard-sorted-check_query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/similarity/similarity-jaccard-sorted-check_query.aql
@@ -13,7 +13,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/similarity/similarity-jaccard-sorted_query.aql b/asterix-app/src/test/resources/runtimets/queries/similarity/similarity-jaccard-sorted_query.aql
index a2373af..25cb071 100644
--- a/asterix-app/src/test/resources/runtimets/queries/similarity/similarity-jaccard-sorted_query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/similarity/similarity-jaccard-sorted_query.aql
@@ -13,7 +13,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/similarity/similarity-jaccard_query.aql b/asterix-app/src/test/resources/runtimets/queries/similarity/similarity-jaccard_query.aql
index 05f3a61..20afc67 100644
--- a/asterix-app/src/test/resources/runtimets/queries/similarity/similarity-jaccard_query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/similarity/similarity-jaccard_query.aql
@@ -13,7 +13,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset DBLP(DBLPType)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset DBLP
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/spatial/cell-aggregation-with-filtering.aql b/asterix-app/src/test/resources/runtimets/queries/spatial/cell-aggregation-with-filtering.aql
index 0face3b..81b8bd2 100644
--- a/asterix-app/src/test/resources/runtimets/queries/spatial/cell-aggregation-with-filtering.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/spatial/cell-aggregation-with-filtering.aql
@@ -13,7 +13,7 @@
create nodegroup group1 if not exists on nc1, nc2;
create dataset TwitterData(Tweet)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset TwitterData
diff --git a/asterix-app/src/test/resources/runtimets/queries/spatial/create-rtree-index.aql b/asterix-app/src/test/resources/runtimets/queries/spatial/create-rtree-index.aql
index e755aa9..3fe1355 100644
--- a/asterix-app/src/test/resources/runtimets/queries/spatial/create-rtree-index.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/spatial/create-rtree-index.aql
@@ -17,7 +17,7 @@
circle: circle
}
-create dataset MyData(SpatialType) partitioned by key id;
+create dataset MyData(SpatialType) primary key id;
create index rtree_index1 on MyData(point) type rtree;
create index rtree_index2 on MyData(line1) type rtree;
create index rtree_index3 on MyData(poly1) type rtree;
diff --git a/asterix-app/src/test/resources/runtimets/queries/string/cpttostr01.aql b/asterix-app/src/test/resources/runtimets/queries/string/cpttostr01.aql
index a66b00f..7516bea 100644
--- a/asterix-app/src/test/resources/runtimets/queries/string/cpttostr01.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/string/cpttostr01.aql
@@ -16,7 +16,7 @@
cpt:[int32]
}
-create dataset testds(TestType) partitioned by key id;
+create dataset testds(TestType) primary key id;
// insert codepoint data into internal dataset testds here into the cpt attribute
diff --git a/asterix-app/src/test/resources/runtimets/queries/string/endwith03.aql b/asterix-app/src/test/resources/runtimets/queries/string/endwith03.aql
index 832efbc..257d992 100644
--- a/asterix-app/src/test/resources/runtimets/queries/string/endwith03.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/string/endwith03.aql
@@ -15,7 +15,7 @@
name:string
}
-create dataset testds(TestType) partitioned by key name;
+create dataset testds(TestType) primary key name;
insert into dataset testds({"name":"Jim Jones"});
insert into dataset testds({"name":"Ravi Kumar"});
diff --git a/asterix-app/src/test/resources/runtimets/queries/string/matches05.aql b/asterix-app/src/test/resources/runtimets/queries/string/matches05.aql
index 2f7b83e..4d9e5b2 100644
--- a/asterix-app/src/test/resources/runtimets/queries/string/matches05.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/string/matches05.aql
@@ -16,7 +16,7 @@
id:int32
}
-create dataset testds1(TestType1) partitioned by key id;
+create dataset testds1(TestType1) primary key id;
insert into dataset testds1({"fname":"Test","lname":"Test","id":123});
insert into dataset testds1({"fname":"Testa","lname":"Test","id":124});
diff --git a/asterix-app/src/test/resources/runtimets/queries/string/startwith03.aql b/asterix-app/src/test/resources/runtimets/queries/string/startwith03.aql
index 8aed603..baa7d0b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/string/startwith03.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/string/startwith03.aql
@@ -15,7 +15,7 @@
name:string
}
-create dataset testds(TestType) partitioned by key name;
+create dataset testds(TestType) primary key name;
insert into dataset testds({"name":"John Smith"});
insert into dataset testds({"name":"John Doe"});
diff --git a/asterix-app/src/test/resources/runtimets/queries/string/strconcat01.aql b/asterix-app/src/test/resources/runtimets/queries/string/strconcat01.aql
index 0b3941d..5266fa1 100644
--- a/asterix-app/src/test/resources/runtimets/queries/string/strconcat01.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/string/strconcat01.aql
@@ -17,7 +17,7 @@
lname:string
}
-create dataset testds(TestType) partitioned by key id;
+create dataset testds(TestType) primary key id;
// insert string data into internal dataset testds into the name attribute
diff --git a/asterix-app/src/test/resources/runtimets/queries/string/strlen03.aql b/asterix-app/src/test/resources/runtimets/queries/string/strlen03.aql
index bba2a7f..ddff7ed 100644
--- a/asterix-app/src/test/resources/runtimets/queries/string/strlen03.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/string/strlen03.aql
@@ -15,7 +15,7 @@
name:string
}
-create dataset testds(TestType) partitioned by key name;
+create dataset testds(TestType) primary key name;
insert into dataset testds({"name":"Maradona"});
insert into dataset testds({"name":"Pele"});
diff --git a/asterix-app/src/test/resources/runtimets/queries/string/substr05.aql b/asterix-app/src/test/resources/runtimets/queries/string/substr05.aql
index fbdcba4..1a33fdf 100644
--- a/asterix-app/src/test/resources/runtimets/queries/string/substr05.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/string/substr05.aql
@@ -15,7 +15,7 @@
name : string
}
-create dataset testdst(TestType) partitioned by key name;
+create dataset testdst(TestType) primary key name;
insert into dataset testdst({"name":"UC Berkeley"});
insert into dataset testdst({"name":"UC Irvine"});
diff --git a/asterix-app/src/test/resources/runtimets/queries/string/substr06.aql b/asterix-app/src/test/resources/runtimets/queries/string/substr06.aql
index 82d21c2..bfeff2d 100644
--- a/asterix-app/src/test/resources/runtimets/queries/string/substr06.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/string/substr06.aql
@@ -14,7 +14,7 @@
name : string
}
-create dataset testdst(TestType) partitioned by key name;
+create dataset testdst(TestType) primary key name;
insert into dataset testdst({"name":"UC Berkeley"});
insert into dataset testdst({"name":"UC Irvine"});
diff --git a/asterix-app/src/test/resources/runtimets/queries/string/toLowerCase03.aql b/asterix-app/src/test/resources/runtimets/queries/string/toLowerCase03.aql
index 411dacf..301af52 100644
--- a/asterix-app/src/test/resources/runtimets/queries/string/toLowerCase03.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/string/toLowerCase03.aql
@@ -17,7 +17,7 @@
name:string
}
-create dataset testds(TestType) partitioned by key name;
+create dataset testds(TestType) primary key name;
insert into dataset testds({"name":"Maradona"});
insert into dataset testds({"name":"Pele"});
diff --git a/asterix-app/src/test/resources/runtimets/queries/temp/accessors.aql b/asterix-app/src/test/resources/runtimets/queries/temp/accessors.aql
new file mode 100644
index 0000000..9cc9f8d
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/temp/accessors.aql
@@ -0,0 +1,22 @@
+/*
+ * Description : Check temporal accessors for different types
+ * Expected Result : Success
+ * Date : 31st Aug, 2012
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+write output to nc1:"rttest/temp_accessors.adm";
+
+let $c1 := date("2010-10-30")
+let $c2 := datetime("1987-11-19T23:49:23.938")
+let $c3 := date("-1987-11-19")
+let $c4 := date("09280329")
+let $c5 := datetime("19371229T20030628")
+let $c6 := time("12:23:34.930+07:00")
+let $c7 := string("-0003-01-09T23:12:12.39-07:00")
+let $c8 := duration("P3Y73M632DT49H743M3948.94S")
+
+return {"year1": year($c1), "year2": year($c2), "year3": year($c3), "year4": year($c4), "year5": year($c5), "year6": year($c7), "year7": year($c8), "month1": month($c1), "month2": month($c2), "month3": month($c3), "month4": month($c4), "month5": month($c5), "month6": month($c8), "day1": day($c1), "day2": day($c2), "day3": day($c3), "day4": day($c4), "day5": day($c5), "day6": day($c8), "hour1": hour($c2), "hour2": hour($c5), "hour3": hour($c6), "hour4": hour($c8), "min1": minute($c2), "min2": minute($c5), "min3": minute($c6), "min4": minute($c8), "second1": second($c2), "second2": second($c5), "second3": second($c6), "second4": second($c8), "ms1": millisecond($c2), "ms2": millisecond($c5), "ms3": millisecond($c6), "ms4": millisecond($c8)}
diff --git a/asterix-app/src/test/resources/runtimets/queries/temp/adjust_timezone.aql b/asterix-app/src/test/resources/runtimets/queries/temp/adjust_timezone.aql
new file mode 100644
index 0000000..9f6c60e
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/temp/adjust_timezone.aql
@@ -0,0 +1,17 @@
+/*
+ * Description : Check the adjust-timezone functions
+ * Expected Result : Success
+ * Date : 15th Oct, 2012
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+write output to nc1:"rttest/temp_adjust_timezone.adm";
+
+let $t1 := time("20:15:10.327")
+let $dt1 := datetime("2010-10-23T01:12:13.329Z")
+let $s1 := adjust-time-for-timezone($t1, "+0800")
+let $s2 := adjust-datetime-for-timezone($dt1, "-0615")
+return { "string1" : $s1, "string2" : $s2 }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/temp/calendar_duration.aql b/asterix-app/src/test/resources/runtimets/queries/temp/calendar_duration.aql
new file mode 100644
index 0000000..67360ce
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/temp/calendar_duration.aql
@@ -0,0 +1,28 @@
+/*
+ * Description : Check the calendar-duration functions
+ * Expected Result : Success
+ * Date : 15th Oct, 2012
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+write output to nc1:"rttest/temp_calendar_duration.adm";
+
+let $t1 := datetime("1987-11-19T23:49:23.938")
+let $t2 := date("-1328-10-23")
+let $dr1 := duration("P7382DT39283M3921.329S")
+let $dr2 := duration("-PT63H398212M3219.328S")
+let $dr3 := duration("P1Y90M")
+let $dr4 := duration("-P3Y89M4089DT47382.983S")
+let $cdr1 := calendar-duration-from-datetime($t1, $dr1)
+let $cdr2 := calendar-duration-from-datetime($t1, $dr2)
+let $cdr3 := calendar-duration-from-datetime($t1, $dr3)
+let $cdr4 := calendar-duration-from-datetime($t1, $dr4)
+let $cdr5 := calendar-duration-from-date($t2, $dr1)
+let $cdr6 := calendar-duration-from-date($t2, $dr2)
+let $cdr7 := calendar-duration-from-date($t2, $dr3)
+let $cdr8 := calendar-duration-from-date($t2, $dr4)
+
+return { "cduration1":$cdr1, "cduration2":$cdr2, "cduration3":$cdr3, "cduration4":$cdr4, "cduration5":$cdr5, "cduration6":$cdr6, "cduration7":$cdr7, "cduration8":$cdr8 }
diff --git a/asterix-app/src/test/resources/runtimets/queries/temp/date_functions.aql b/asterix-app/src/test/resources/runtimets/queries/temp/date_functions.aql
new file mode 100644
index 0000000..310fa43
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/temp/date_functions.aql
@@ -0,0 +1,24 @@
+/*
+ * Description : Check temporal functions for date type
+ * Expected Result : Success
+ * Date : 24th Sep, 2012
+ */
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+write output to nc1:"rttest/temp_date_functions.adm";
+
+let $d1 := date-from-unix-time-in-days(15600)
+let $dt1 := datetime("1327-12-02T23:35:49.938Z")
+let $d2 := date-from-datetime($dt1)
+let $dt2 := datetime("2012-10-11T02:30:23+03:00")
+let $d3 := date-from-datetime($dt2)
+let $dr1 := duration("-P2Y1M90DT30H")
+let $d4 := add-date-duration($d1, $dr1)
+let $dr2 := duration("P300Y900MT360000M")
+let $d5 := add-date-duration($d2, $dr2)
+let $dr3 := subtract-date($d5, $d2)
+let $dr4 := subtract-date($d4, $d1)
+
+return { "date1" : $d1, "date2" : $d2, "date3" : $d3, "date4" : $d4, "date5" : $d5, "duration1" : $dr3, "duration2" : $dr4 }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/temp/datetime_functions.aql b/asterix-app/src/test/resources/runtimets/queries/temp/datetime_functions.aql
new file mode 100644
index 0000000..a70c9f2
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/temp/datetime_functions.aql
@@ -0,0 +1,20 @@
+/*
+ * Description : Check temporal functions for datetime
+ * Expected Result : Success
+ * Date : 24th Sep, 2012
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+write output to nc1:"rttest/temp_datetime_functions.adm";
+
+let $dt1 := datetime-from-unix-time-in-ms(956007429)
+let $d1 := date("1327-12-02")
+let $t1 := time("23:35:49.938Z")
+let $dt2 := datetime-from-date-time($d1, $t1)
+let $dr1 := subtract-datetime($dt2, $dt1)
+let $dt3 := add-datetime-duration($dt1, $dr1)
+
+return { "datetime1" : $dt1, "datetime2" : $dt2, "datetime3" : $dt3, "duration1" : $dr1 }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/temp/insert_from_delimited_ds.aql b/asterix-app/src/test/resources/runtimets/queries/temp/insert_from_delimited_ds.aql
new file mode 100644
index 0000000..4520f46
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/temp/insert_from_delimited_ds.aql
@@ -0,0 +1,26 @@
+/*
+ * Test case name: date-insert.aql
+ * Description: verify insertion operation for date type
+ * Expected result: success
+ */
+
+drop dataverse testdvt if exists;
+create dataverse testdvt;
+use dataverse testdvt;
+
+create type testtype as closed {
+ id: string,
+ dateField: date,
+ timeField: time,
+ datetimeField: datetime,
+ durationField: duration
+}
+
+write output to nc1:"rttest/temp_insert_from_delimited_ds.adm";
+
+create external dataset testds(testtype)
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/temporal/temporalData.txt"),("format"="delimited-text"),("delimiter"="|"));
+
+for $r in dataset("testds")
+return {"date": $r.dateField, "time": $r.timeField, "datetime": $r.datetimeField, "duration": $r.durationField }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/temp/insert_from_ext_ds.aql b/asterix-app/src/test/resources/runtimets/queries/temp/insert_from_ext_ds.aql
new file mode 100644
index 0000000..5813696
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/temp/insert_from_ext_ds.aql
@@ -0,0 +1,27 @@
+/*
+ * Test case name: date-insert.aql
+ * Description: verify insertion operation for date type
+ * Expected result: success
+ */
+
+drop dataverse testdvt if exists;
+create dataverse testdvt;
+use dataverse testdvt;
+
+create type testtype as open {
+ id: string,
+ dateField: date?,
+ timeField: time?,
+ datetimeField: datetime?,
+ durationField: duration?,
+ intervalField: interval?
+}
+
+write output to nc1:"rttest/temp_insert_from_ext_ds.adm";
+
+create external dataset testds(testtype)
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/temporal/temporalData.json"),("format"="adm"));
+
+for $r in dataset("testds")
+return {"date": $r.dateField, "time": $r.timeField, "datetime": $r.datetimeField, "duration": $r.durationField, "interval": $r.intervalField }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/temp/interval_functions.aql b/asterix-app/src/test/resources/runtimets/queries/temp/interval_functions.aql
new file mode 100644
index 0000000..a29f7f5
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/temp/interval_functions.aql
@@ -0,0 +1,56 @@
+/*
+ * Description : Check temporal functions for interval
+ * Expected Result : Success
+ * Date : 2nd Nov, 2012
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+write output to nc1:"rttest/temp_interval_functions.adm";
+
+let $itv1 := interval-from-date("2010-10-30", "2010-12-21")
+let $itv2 := interval-from-date("2011-10-30", "2012-10-21")
+let $itv3 := interval-from-date("2010-06-30", "2013-01-01")
+let $blnBefore1 := interval-before($itv1, $itv2)
+let $blnAfter1 := interval-after($itv2, $itv1)
+let $blnBefore2 := interval-before($itv1, $itv3)
+let $blnAfter2 := interval-after($itv3, $itv1)
+
+let $itv4 := interval-from-datetime("2012-06-26T01:01:01.111", "2012-07-27T02:02:02.222")
+let $itv5 := interval-from-datetime("20120727T020202222", "2013-08-08T03:03:03.333")
+let $itv6 := interval-from-datetime("19000707T020202222", "2013-08-07T03:03:03.333")
+let $blnMeet1 := interval-meets($itv4, $itv5)
+let $blnMetBy1 := interval-met-by($itv5, $itv4)
+let $blnMeet2 := interval-meets($itv6, $itv4)
+let $blnMetBy2 := interval-met-by($itv6, $itv4)
+
+let $itv7 := interval-from-time("12:32:38", "20:29:20")
+let $itv8 := interval-from-time("17:48:19", "22:19:49")
+let $itv9 := interval-from-time("01:32:49", "12:33:00")
+let $blnOverlaps1 := interval-overlaps($itv7, $itv8)
+let $blnOverlapped1 := interval-overlapped-by($itv8, $itv7)
+let $blnOverlaps2 := interval-overlaps($itv8, $itv7)
+let $blnOverlapped2 := interval-overlapped-by($itv7, $itv8)
+let $blnOverlap1 := overlap($itv9, $itv7)
+let $blnOverlap2 := overlap($itv9, $itv8)
+
+let $itv10 := interval-from-date("2010-10-30", "2010-11-30")
+let $blnStarts1 := interval-starts($itv10, $itv1)
+let $blnStarts2 := interval-starts($itv10, $itv2)
+let $blnStartedBy1 := interval-started-by($itv1, $itv10)
+let $blnStartedBy2 := interval-started-by($itv10, $itv2)
+
+let $blnCovers1 := interval-covers($itv6, $itv4)
+let $blnCovers2 := interval-covers($itv6, $itv5)
+let $blnCoveredBy1 := interval-covered-by($itv4, $itv6)
+let $blnCoveredBy2 := interval-covered-by($itv5, $itv6)
+
+let $itv11 := interval-from-time("19:00:00.009", "20:29:20.000")
+let $blnEnds1 := interval-ends($itv11, $itv7)
+let $blnEnds2 := interval-ends($itv11, $itv8)
+let $blnEndedBy1 := interval-ended-by($itv7, $itv11)
+let $blnEndedBy2 := interval-ended-by($itv8, $itv11)
+
+return { "before1" : $blnBefore1, "before2" : $blnBefore2, "after1" : $blnAfter1, "after2" : $blnAfter2, "meet1" : $blnMeet1, "meet2" : $blnMeet2, "metby1" : $blnMetBy1, "metby2" : $blnMetBy2, "overlaps1" : $blnOverlaps1, "overlaps2" : $blnOverlaps2, "overlapped1" : $blnOverlapped1, "overlapped2" : $blnOverlapped2, "overlap1" : $blnOverlap1, "overlap2" : $blnOverlap2, "starts1" : $blnStarts1, "starts2" : $blnStarts2, "startedby1" : $blnStartedBy1, "startedby2" : $blnStartedBy2, "covers1" : $blnCovers1, "covers2" : $blnCovers2, "coveredby1" : $blnCoveredBy1, "coveredby2" : $blnCoveredBy2, "ends1" : $blnEnds1, "ends2" : $blnEnds2, "endedby1" : $blnEndedBy1, "endedby2" : $blnEndedBy2 }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/temp/time_functions.aql b/asterix-app/src/test/resources/runtimets/queries/temp/time_functions.aql
new file mode 100644
index 0000000..29adcc0
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/temp/time_functions.aql
@@ -0,0 +1,28 @@
+/*
+ * Description : Check temporal functions for time
+ * Expected Result : Success
+ * Date : 24th Sep, 2012
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+write output to nc1:"rttest/temp_time_functions.adm";
+
+let $t1 := time-from-unix-time-in-ms(1560074)
+let $dt1 := datetime("1327-12-02T23:35:49.938Z")
+let $t2 := time-from-datetime($dt1)
+let $dt2 := datetime("2012-10-11T02:30:23+03:00")
+let $t3 := time-from-datetime($dt2)
+let $dr1 := duration("-PT30H")
+let $t4 := add-time-duration($t1, $dr1)
+let $dr2 := duration("PT36M")
+let $t5 := add-time-duration($t2, $dr2)
+let $dr3 := subtract-time($t5, $t2)
+let $dr4 := subtract-time($t4, $t1)
+let $ct := current-time()
+let $cd := current-date()
+let $cdt := current-datetime()
+
+return { "time1" : $t1, "time2" : $t2, "time3" : $t3, "time4" : $t4, "time5" : $t5, "duration1" : $dr3, "duration2" : $dr4 }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/distinct_by.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/distinct_by.aql
index 95d715e..3e41042 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/distinct_by.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/distinct_by.aql
@@ -24,7 +24,7 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/group_no_agg.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/group_no_agg.aql
index dedaa7f..8b941e4 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/group_no_agg.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/group_no_agg.aql
@@ -10,7 +10,7 @@
}
create dataset Regions_group_no_agg(RegionType)
- partitioned by key r_regionkey;
+ primary key r_regionkey;
write output to nc1:"rttest/tpch_group_no_agg.adm";
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q10_returned_item.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q10_returned_item.aql
index 3b641a4..b775f09 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q10_returned_item.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q10_returned_item.aql
@@ -89,21 +89,21 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
create dataset Orders(OrderType)
- partitioned by key o_orderkey;
+ primary key o_orderkey;
create dataset Supplier(SupplierType)
- partitioned by key s_suppkey;
+ primary key s_suppkey;
create dataset Region(RegionType)
- partitioned by key r_regionkey;
+ primary key r_regionkey;
create dataset Nation(NationType)
- partitioned by key n_nationkey;
+ primary key n_nationkey;
create dataset Part(PartType)
- partitioned by key p_partkey;
+ primary key p_partkey;
create dataset Partsupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey;
+ primary key ps_partkey, ps_suppkey;
create dataset Customer(CustomerType)
- partitioned by key c_custkey;
+ primary key c_custkey;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q10_returned_item_int64.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q10_returned_item_int64.aql
new file mode 100644
index 0000000..27fa512
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q10_returned_item_int64.aql
@@ -0,0 +1,195 @@
+drop dataverse tpch if exists;
+create dataverse tpch;
+
+use dataverse tpch;
+
+create type LineItemType as closed {
+ l_orderkey: int64,
+ l_partkey: int64,
+ l_suppkey: int64,
+ l_linenumber: int64,
+ l_quantity: int64,
+ l_extendedprice: double,
+ l_discount: double,
+ l_tax: double,
+ l_returnflag: string,
+ l_linestatus: string,
+ l_shipdate: string,
+ l_commitdate: string,
+ l_receiptdate: string,
+ l_shipinstruct: string,
+ l_shipmode: string,
+ l_comment: string
+}
+
+create type OrderType as closed {
+ o_orderkey: int64,
+ o_custkey: int64,
+ o_orderstatus: string,
+ o_totalprice: double,
+ o_orderdate: string,
+ o_orderpriority: string,
+ o_clerk: string,
+ o_shippriority: int64,
+ o_comment: string
+}
+
+create type CustomerType as closed {
+ c_custkey: int64,
+ c_name: string,
+ c_address: string,
+ c_nationkey: int64,
+ c_phone: string,
+ c_acctbal: double,
+ c_mktsegment: string,
+ c_comment: string
+}
+
+create type SupplierType as closed {
+ s_suppkey: int64,
+ s_name: string,
+ s_address: string,
+ s_nationkey: int64,
+ s_phone: string,
+ s_acctbal: double,
+ s_comment: string
+}
+
+create type NationType as closed {
+ n_nationkey: int64,
+ n_name: string,
+ n_regionkey: int64,
+ n_comment: string
+}
+
+create type RegionType as closed {
+ r_regionkey: int64,
+ r_name: string,
+ r_comment: string
+}
+
+create type PartType as closed {
+ p_partkey: int64,
+ p_name: string,
+ p_mfgr: string,
+ p_brand: string,
+ p_type: string,
+ p_size: int64,
+ p_container: string,
+ p_retailprice: double,
+ p_comment: string
+}
+
+create type PartSuppType as closed {
+ ps_partkey: int64,
+ ps_suppkey: int64,
+ ps_availqty: int64,
+ ps_supplycost: double,
+ ps_comment: string
+}
+
+create dataset LineItem(LineItemType)
+ primary key l_orderkey, l_linenumber;
+create dataset Orders(OrderType)
+ primary key o_orderkey;
+create dataset Supplier(SupplierType)
+ primary key s_suppkey;
+create dataset Region(RegionType)
+ primary key r_regionkey;
+create dataset Nation(NationType)
+ primary key n_nationkey;
+create dataset Part(PartType)
+ primary key p_partkey;
+create dataset Partsupp(PartSuppType)
+ primary key ps_partkey, ps_suppkey;
+create dataset Customer(CustomerType)
+ primary key c_custkey;
+
+load dataset LineItem
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+
+load dataset Orders
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+
+load dataset Supplier
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+
+load dataset Region
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+
+load dataset Nation
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+
+load dataset Part
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+
+load dataset Partsupp
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+
+load dataset Customer
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+
+write output to nc1:"rttest/tpch_q10_returned_item_int64.adm";
+
+
+for $locn in (
+for $l in dataset('LineItem')
+for $ocn in (
+ for $o in dataset('Orders')
+ for $c in dataset('Customer')
+ where $c.c_custkey = $o.o_custkey and $o.o_orderdate >= '1993-10-01' and $o.o_orderdate < '1994-01-01'
+ for $n in dataset('Nation')
+ where $c.c_nationkey = $n.n_nationkey
+ return {
+ "c_custkey": $c.c_custkey,
+ "c_name": $c.c_name,
+ "c_acctbal": $c.c_acctbal,
+ "n_name": $n.n_name,
+ "c_address": $c.c_address,
+ "c_phone": $c.c_phone,
+ "c_comment": $c.c_comment,
+ "o_orderkey": $o.o_orderkey
+ }
+)
+where
+ $l.l_orderkey = $ocn.o_orderkey and $l.l_returnflag = 'R'
+ return {
+ "c_custkey": $ocn.c_custkey,
+ "c_name": $ocn.c_name,
+ "c_acctbal": $ocn.c_acctbal,
+ "n_name": $ocn.n_name,
+ "c_address": $ocn.c_address,
+ "c_phone": $ocn.c_phone,
+ "c_comment": $ocn.c_comment,
+ "l_extendedprice": $l.l_extendedprice,
+ "l_discount": $l.l_discount
+ }
+)
+group by $c_custkey:=$locn.c_custkey,
+ $c_name:=$locn.c_name,
+ $c_acctbal:=$locn.c_acctbal, $c_phone:=$locn.c_phone,
+ $n_name:=$locn.n_name, $c_address:=$locn.c_address, $c_comment:=$locn.c_comment
+ with $locn
+let $revenue := sum(for $i in $locn return $i.l_extendedprice * (1 - $i.l_discount))
+order by $revenue desc
+limit 20
+return {
+ "c_custkey": $c_custkey,
+ "c_name": $c_name,
+ "revenue": $revenue,
+ "c_acctbal": $c_acctbal,
+ "n_name": $n_name,
+ "c_address": $c_address,
+ "c_phone": $c_phone,
+ "c_comment": $c_comment
+}
+
+
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q11_important_stock.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q11_important_stock.aql
index 245c7f5..9e4b165 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q11_important_stock.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q11_important_stock.aql
@@ -90,21 +90,21 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
create dataset Orders(OrderType)
- partitioned by key o_orderkey;
+ primary key o_orderkey;
create dataset Supplier(SupplierType)
- partitioned by key s_suppkey;
+ primary key s_suppkey;
create dataset Region(RegionType)
- partitioned by key r_regionkey;
+ primary key r_regionkey;
create dataset Nation(NationType)
- partitioned by key n_nationkey;
+ primary key n_nationkey;
create dataset Part(PartType)
- partitioned by key p_partkey;
+ primary key p_partkey;
create dataset Partsupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey;
+ primary key ps_partkey, ps_suppkey;
create dataset Customer(CustomerType)
- partitioned by key c_custkey;
+ primary key c_custkey;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q12_shipping.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q12_shipping.aql
index 9dcade8..b541615 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q12_shipping.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q12_shipping.aql
@@ -90,21 +90,21 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
create dataset Orders(OrderType)
- partitioned by key o_orderkey;
+ primary key o_orderkey;
create dataset Supplier(SupplierType)
- partitioned by key s_suppkey;
+ primary key s_suppkey;
create dataset Region(RegionType)
- partitioned by key r_regionkey;
+ primary key r_regionkey;
create dataset Nation(NationType)
- partitioned by key n_nationkey;
+ primary key n_nationkey;
create dataset Part(PartType)
- partitioned by key p_partkey;
+ primary key p_partkey;
create dataset Partsupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey;
+ primary key ps_partkey, ps_suppkey;
create dataset Customer(CustomerType)
- partitioned by key c_custkey;
+ primary key c_custkey;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q13_customer_distribution.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q13_customer_distribution.aql
index 9a91177..c4a987a 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q13_customer_distribution.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q13_customer_distribution.aql
@@ -90,21 +90,21 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
create dataset Orders(OrderType)
- partitioned by key o_orderkey;
+ primary key o_orderkey;
create dataset Supplier(SupplierType)
- partitioned by key s_suppkey;
+ primary key s_suppkey;
create dataset Region(RegionType)
- partitioned by key r_regionkey;
+ primary key r_regionkey;
create dataset Nation(NationType)
- partitioned by key n_nationkey;
+ primary key n_nationkey;
create dataset Part(PartType)
- partitioned by key p_partkey;
+ primary key p_partkey;
create dataset Partsupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey;
+ primary key ps_partkey, ps_suppkey;
create dataset Customer(CustomerType)
- partitioned by key c_custkey;
+ primary key c_custkey;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q14_promotion_effect.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q14_promotion_effect.aql
index a38b1d6..4ea7988 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q14_promotion_effect.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q14_promotion_effect.aql
@@ -90,21 +90,21 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
create dataset Orders(OrderType)
- partitioned by key o_orderkey;
+ primary key o_orderkey;
create dataset Supplier(SupplierType)
- partitioned by key s_suppkey;
+ primary key s_suppkey;
create dataset Region(RegionType)
- partitioned by key r_regionkey;
+ primary key r_regionkey;
create dataset Nation(NationType)
- partitioned by key n_nationkey;
+ primary key n_nationkey;
create dataset Part(PartType)
- partitioned by key p_partkey;
+ primary key p_partkey;
create dataset Partsupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey;
+ primary key ps_partkey, ps_suppkey;
create dataset Customer(CustomerType)
- partitioned by key c_custkey;
+ primary key c_custkey;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q15_top_supplier.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q15_top_supplier.aql
index 49b0c03..29830a9 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q15_top_supplier.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q15_top_supplier.aql
@@ -90,21 +90,21 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
create dataset Orders(OrderType)
- partitioned by key o_orderkey;
+ primary key o_orderkey;
create dataset Supplier(SupplierType)
- partitioned by key s_suppkey;
+ primary key s_suppkey;
create dataset Region(RegionType)
- partitioned by key r_regionkey;
+ primary key r_regionkey;
create dataset Nation(NationType)
- partitioned by key n_nationkey;
+ primary key n_nationkey;
create dataset Part(PartType)
- partitioned by key p_partkey;
+ primary key p_partkey;
create dataset Partsupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey;
+ primary key ps_partkey, ps_suppkey;
create dataset Customer(CustomerType)
- partitioned by key c_custkey;
+ primary key c_custkey;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q16_parts_supplier_relationship.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q16_parts_supplier_relationship.aql
index ad16d3c..978fb0f 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q16_parts_supplier_relationship.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q16_parts_supplier_relationship.aql
@@ -90,21 +90,21 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
create dataset Orders(OrderType)
- partitioned by key o_orderkey;
+ primary key o_orderkey;
create dataset Supplier(SupplierType)
- partitioned by key s_suppkey;
+ primary key s_suppkey;
create dataset Region(RegionType)
- partitioned by key r_regionkey;
+ primary key r_regionkey;
create dataset Nation(NationType)
- partitioned by key n_nationkey;
+ primary key n_nationkey;
create dataset Part(PartType)
- partitioned by key p_partkey;
+ primary key p_partkey;
create dataset Partsupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey;
+ primary key ps_partkey, ps_suppkey;
create dataset Customer(CustomerType)
- partitioned by key c_custkey;
+ primary key c_custkey;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q17_small_quantity_order_revenue.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q17_small_quantity_order_revenue.aql
index 05cf693..92e4083 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q17_small_quantity_order_revenue.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q17_small_quantity_order_revenue.aql
@@ -90,21 +90,21 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
create dataset Orders(OrderType)
- partitioned by key o_orderkey;
+ primary key o_orderkey;
create dataset Supplier(SupplierType)
- partitioned by key s_suppkey;
+ primary key s_suppkey;
create dataset Region(RegionType)
- partitioned by key r_regionkey;
+ primary key r_regionkey;
create dataset Nation(NationType)
- partitioned by key n_nationkey;
+ primary key n_nationkey;
create dataset Part(PartType)
- partitioned by key p_partkey;
+ primary key p_partkey;
create dataset Partsupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey;
+ primary key ps_partkey, ps_suppkey;
create dataset Customer(CustomerType)
- partitioned by key c_custkey;
+ primary key c_custkey;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q18_large_volume_customer.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q18_large_volume_customer.aql
index 1347ff5..9ab4f83 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q18_large_volume_customer.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q18_large_volume_customer.aql
@@ -90,21 +90,21 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
create dataset Orders(OrderType)
- partitioned by key o_orderkey;
+ primary key o_orderkey;
create dataset Supplier(SupplierType)
- partitioned by key s_suppkey;
+ primary key s_suppkey;
create dataset Region(RegionType)
- partitioned by key r_regionkey;
+ primary key r_regionkey;
create dataset Nation(NationType)
- partitioned by key n_nationkey;
+ primary key n_nationkey;
create dataset Part(PartType)
- partitioned by key p_partkey;
+ primary key p_partkey;
create dataset Partsupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey;
+ primary key ps_partkey, ps_suppkey;
create dataset Customer(CustomerType)
- partitioned by key c_custkey;
+ primary key c_custkey;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q19_discounted_revenue.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q19_discounted_revenue.aql
index b856eed..c83f663 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q19_discounted_revenue.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q19_discounted_revenue.aql
@@ -90,21 +90,21 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
create dataset Orders(OrderType)
- partitioned by key o_orderkey;
+ primary key o_orderkey;
create dataset Supplier(SupplierType)
- partitioned by key s_suppkey;
+ primary key s_suppkey;
create dataset Region(RegionType)
- partitioned by key r_regionkey;
+ primary key r_regionkey;
create dataset Nation(NationType)
- partitioned by key n_nationkey;
+ primary key n_nationkey;
create dataset Part(PartType)
- partitioned by key p_partkey;
+ primary key p_partkey;
create dataset Partsupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey;
+ primary key ps_partkey, ps_suppkey;
create dataset Customer(CustomerType)
- partitioned by key c_custkey;
+ primary key c_custkey;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q1_pricing_summary_report_nt.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q1_pricing_summary_report_nt.aql
index af39b3f..b6de1fa 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q1_pricing_summary_report_nt.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q1_pricing_summary_report_nt.aql
@@ -22,7 +22,7 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q20_potential_part_promotion.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q20_potential_part_promotion.aql
index bdb591f..2143cfa 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q20_potential_part_promotion.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q20_potential_part_promotion.aql
@@ -90,21 +90,21 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
create dataset Orders(OrderType)
- partitioned by key o_orderkey;
+ primary key o_orderkey;
create dataset Supplier(SupplierType)
- partitioned by key s_suppkey;
+ primary key s_suppkey;
create dataset Region(RegionType)
- partitioned by key r_regionkey;
+ primary key r_regionkey;
create dataset Nation(NationType)
- partitioned by key n_nationkey;
+ primary key n_nationkey;
create dataset Part(PartType)
- partitioned by key p_partkey;
+ primary key p_partkey;
create dataset Partsupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey;
+ primary key ps_partkey, ps_suppkey;
create dataset Customer(CustomerType)
- partitioned by key c_custkey;
+ primary key c_custkey;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q21_suppliers_who_kept_orders_waiting.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q21_suppliers_who_kept_orders_waiting.aql
index f544e09..32cf149 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q21_suppliers_who_kept_orders_waiting.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q21_suppliers_who_kept_orders_waiting.aql
@@ -90,21 +90,21 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
create dataset Orders(OrderType)
- partitioned by key o_orderkey;
+ primary key o_orderkey;
create dataset Supplier(SupplierType)
- partitioned by key s_suppkey;
+ primary key s_suppkey;
create dataset Region(RegionType)
- partitioned by key r_regionkey;
+ primary key r_regionkey;
create dataset Nation(NationType)
- partitioned by key n_nationkey;
+ primary key n_nationkey;
create dataset Part(PartType)
- partitioned by key p_partkey;
+ primary key p_partkey;
create dataset Partsupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey;
+ primary key ps_partkey, ps_suppkey;
create dataset Customer(CustomerType)
- partitioned by key c_custkey;
+ primary key c_custkey;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q22_global_sales_opportunity.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q22_global_sales_opportunity.aql
index 29131c8..b2ac2d0 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q22_global_sales_opportunity.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q22_global_sales_opportunity.aql
@@ -90,21 +90,21 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
create dataset Orders(OrderType)
- partitioned by key o_orderkey;
+ primary key o_orderkey;
create dataset Supplier(SupplierType)
- partitioned by key s_suppkey;
+ primary key s_suppkey;
create dataset Region(RegionType)
- partitioned by key r_regionkey;
+ primary key r_regionkey;
create dataset Nation(NationType)
- partitioned by key n_nationkey;
+ primary key n_nationkey;
create dataset Part(PartType)
- partitioned by key p_partkey;
+ primary key p_partkey;
create dataset Partsupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey;
+ primary key ps_partkey, ps_suppkey;
create dataset Customer(CustomerType)
- partitioned by key c_custkey;
+ primary key c_custkey;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q2_minimum_cost_supplier.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q2_minimum_cost_supplier.aql
index 5490213..c5e17d6 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q2_minimum_cost_supplier.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q2_minimum_cost_supplier.aql
@@ -90,21 +90,21 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
create dataset Orders(OrderType)
- partitioned by key o_orderkey;
+ primary key o_orderkey;
create dataset Supplier(SupplierType)
- partitioned by key s_suppkey;
+ primary key s_suppkey;
create dataset Region(RegionType)
- partitioned by key r_regionkey;
+ primary key r_regionkey;
create dataset Nation(NationType)
- partitioned by key n_nationkey;
+ primary key n_nationkey;
create dataset Part(PartType)
- partitioned by key p_partkey;
+ primary key p_partkey;
create dataset Partsupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey;
+ primary key ps_partkey, ps_suppkey;
create dataset Customer(CustomerType)
- partitioned by key c_custkey;
+ primary key c_custkey;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q3_shipping_priority_nt.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q3_shipping_priority_nt.aql
index 68fb864..c3fcc25 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q3_shipping_priority_nt.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q3_shipping_priority_nt.aql
@@ -90,21 +90,21 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
create dataset Orders(OrderType)
- partitioned by key o_orderkey;
+ primary key o_orderkey;
create dataset Supplier(SupplierType)
- partitioned by key s_suppkey;
+ primary key s_suppkey;
create dataset Region(RegionType)
- partitioned by key r_regionkey;
+ primary key r_regionkey;
create dataset Nation(NationType)
- partitioned by key n_nationkey;
+ primary key n_nationkey;
create dataset Part(PartType)
- partitioned by key p_partkey;
+ primary key p_partkey;
create dataset Partsupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey;
+ primary key ps_partkey, ps_suppkey;
create dataset Customer(CustomerType)
- partitioned by key c_custkey;
+ primary key c_custkey;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q4_order_priority.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q4_order_priority.aql
index 3f1c7ec..6d755d2 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q4_order_priority.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q4_order_priority.aql
@@ -90,21 +90,21 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
create dataset Orders(OrderType)
- partitioned by key o_orderkey;
+ primary key o_orderkey;
create dataset Supplier(SupplierType)
- partitioned by key s_suppkey;
+ primary key s_suppkey;
create dataset Region(RegionType)
- partitioned by key r_regionkey;
+ primary key r_regionkey;
create dataset Nation(NationType)
- partitioned by key n_nationkey;
+ primary key n_nationkey;
create dataset Part(PartType)
- partitioned by key p_partkey;
+ primary key p_partkey;
create dataset Partsupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey;
+ primary key ps_partkey, ps_suppkey;
create dataset Customer(CustomerType)
- partitioned by key c_custkey;
+ primary key c_custkey;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q5_local_supplier_volume.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q5_local_supplier_volume.aql
index d345b3b..e64d98a 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q5_local_supplier_volume.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q5_local_supplier_volume.aql
@@ -90,21 +90,21 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
create dataset Orders(OrderType)
- partitioned by key o_orderkey;
+ primary key o_orderkey;
create dataset Supplier(SupplierType)
- partitioned by key s_suppkey;
+ primary key s_suppkey;
create dataset Region(RegionType)
- partitioned by key r_regionkey;
+ primary key r_regionkey;
create dataset Nation(NationType)
- partitioned by key n_nationkey;
+ primary key n_nationkey;
create dataset Part(PartType)
- partitioned by key p_partkey;
+ primary key p_partkey;
create dataset Partsupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey;
+ primary key ps_partkey, ps_suppkey;
create dataset Customer(CustomerType)
- partitioned by key c_custkey;
+ primary key c_custkey;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q6_forecast_revenue_change.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q6_forecast_revenue_change.aql
index 3ca0424..dee85bb 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q6_forecast_revenue_change.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q6_forecast_revenue_change.aql
@@ -90,21 +90,21 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
create dataset Orders(OrderType)
- partitioned by key o_orderkey;
+ primary key o_orderkey;
create dataset Supplier(SupplierType)
- partitioned by key s_suppkey;
+ primary key s_suppkey;
create dataset Region(RegionType)
- partitioned by key r_regionkey;
+ primary key r_regionkey;
create dataset Nation(NationType)
- partitioned by key n_nationkey;
+ primary key n_nationkey;
create dataset Part(PartType)
- partitioned by key p_partkey;
+ primary key p_partkey;
create dataset Partsupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey;
+ primary key ps_partkey, ps_suppkey;
create dataset Customer(CustomerType)
- partitioned by key c_custkey;
+ primary key c_custkey;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q7_volume_shipping.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q7_volume_shipping.aql
index c91eca5..6432361 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q7_volume_shipping.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q7_volume_shipping.aql
@@ -90,21 +90,21 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
create dataset Orders(OrderType)
- partitioned by key o_orderkey;
+ primary key o_orderkey;
create dataset Supplier(SupplierType)
- partitioned by key s_suppkey;
+ primary key s_suppkey;
create dataset Region(RegionType)
- partitioned by key r_regionkey;
+ primary key r_regionkey;
create dataset Nation(NationType)
- partitioned by key n_nationkey;
+ primary key n_nationkey;
create dataset Part(PartType)
- partitioned by key p_partkey;
+ primary key p_partkey;
create dataset Partsupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey;
+ primary key ps_partkey, ps_suppkey;
create dataset Customer(CustomerType)
- partitioned by key c_custkey;
+ primary key c_custkey;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q8_national_market_share.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q8_national_market_share.aql
index 420a179..2e0eefb 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q8_national_market_share.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q8_national_market_share.aql
@@ -90,21 +90,21 @@
}
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
create dataset Orders(OrderType)
- partitioned by key o_orderkey;
+ primary key o_orderkey;
create dataset Supplier(SupplierType)
- partitioned by key s_suppkey;
+ primary key s_suppkey;
create dataset Region(RegionType)
- partitioned by key r_regionkey;
+ primary key r_regionkey;
create dataset Nation(NationType)
- partitioned by key n_nationkey;
+ primary key n_nationkey;
create dataset Part(PartType)
- partitioned by key p_partkey;
+ primary key p_partkey;
create dataset Partsupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey;
+ primary key ps_partkey, ps_suppkey;
create dataset Customer(CustomerType)
- partitioned by key c_custkey;
+ primary key c_custkey;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q9_product_type_profit_nt.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q9_product_type_profit_nt.aql
index 33c2aad..21cdcdd 100644
--- a/asterix-app/src/test/resources/runtimets/queries/tpch/q9_product_type_profit_nt.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q9_product_type_profit_nt.aql
@@ -91,21 +91,21 @@
create dataset LineItem(LineItemType)
- partitioned by key l_orderkey, l_linenumber;
+ primary key l_orderkey, l_linenumber;
create dataset Orders(OrderType)
- partitioned by key o_orderkey;
+ primary key o_orderkey;
create dataset Supplier(SupplierType)
- partitioned by key s_suppkey;
+ primary key s_suppkey;
create dataset Region(RegionType)
- partitioned by key r_regionkey;
+ primary key r_regionkey;
create dataset Nation(NationType)
- partitioned by key n_nationkey;
+ primary key n_nationkey;
create dataset Part(PartType)
- partitioned by key p_partkey;
+ primary key p_partkey;
create dataset PartSupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey;
+ primary key ps_partkey, ps_suppkey;
create dataset Customer(CustomerType)
- partitioned by key c_custkey;
+ primary key c_custkey;
load dataset LineItem
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/udf09.aql b/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/udf09.aql
index b16e2dd..8a09ea2 100644
--- a/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/udf09.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/udf09.aql
@@ -13,7 +13,7 @@
id : int32
}
-create dataset test.t1(TestType) partitioned by key id;
+create dataset test.t1(TestType) primary key id;
insert into dataset test.t1({"id":345});
insert into dataset test.t1({"id":315});
diff --git a/asterix-app/src/test/resources/runtimets/results/comparison/date_order.adm b/asterix-app/src/test/resources/runtimets/results/comparison/date_order.adm
new file mode 100644
index 0000000..3fd4c19
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/comparison/date_order.adm
@@ -0,0 +1,6 @@
+date("-0500-03-21")
+date("1362-02-28")
+date("1600-02-29")
+date("2012-02-29")
+date("2021-03-01")
+date("2049-04-23")
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/comparison/time_order.adm b/asterix-app/src/test/resources/runtimets/results/comparison/time_order.adm
new file mode 100644
index 0000000..c937bdd
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/comparison/time_order.adm
@@ -0,0 +1,6 @@
+time("00:00:00.000Z")
+time("02:00:00.000Z")
+time("19:00:00.000Z")
+time("20:00:00.470Z")
+time("23:00:00.382Z")
+time("23:59:59.999Z")
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/constructor/int_01.adm b/asterix-app/src/test/resources/runtimets/results/constructor/int_01.adm
index f9a1203..470d1f8 100644
--- a/asterix-app/src/test/resources/runtimets/results/constructor/int_01.adm
+++ b/asterix-app/src/test/resources/runtimets/results/constructor/int_01.adm
@@ -1 +1 @@
-{ "int8": 80i8, "int16": 160i16, "int32": 320, "int64": 640i64, "int8": -80i8, "int16": -160i16, "int32": -320, "int64": -640i64 }
\ No newline at end of file
+{ "int8": 80i8, "int16": 160i16, "int32": 320, "int64": 640i64, "int8_2": -80i8, "int16_2": -160i16, "int32_2": -320, "int64_2": -640i64, "int64_min": -9223372036854775808i64 }
diff --git a/asterix-app/src/test/resources/runtimets/results/constructor/interval.adm b/asterix-app/src/test/resources/runtimets/results/constructor/interval.adm
new file mode 100644
index 0000000..8fb7e25
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/constructor/interval.adm
@@ -0,0 +1 @@
+{ "interval1": interval("date("2010-10-30"), date("2012-10-21")"), "interval2": interval("time("03:04:05.678Z"), time("23:24:25.267Z")"), "interval3": interval("datetime("-1987-11-19T02:43:57.938Z"), datetime("1999-11-12T12:49:35.948Z")"), "interval4": interval("date("0001-12-27"), date("0006-01-27")"), "interval5": interval("time("20:03:20.948Z"), time("20:57:50.886Z")"), "interval6": interval("datetime("-2043-11-19T15:32:39.293Z"), datetime("-1603-03-12T12:12:38.242Z")") }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv02.adm b/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv02.adm
index f8d0c9d..8bd73db 100644
--- a/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv02.adm
+++ b/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv02.adm
@@ -1,4 +1,4 @@
-{ "DataverseName": "student", "DatasetName": "gdstd", "DataTypeName": "stdType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Thu Nov 08 13:22:47 PST 2012" }
-{ "DataverseName": "student", "DatasetName": "ugdstd", "DataTypeName": "stdType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Thu Nov 08 13:22:47 PST 2012" }
-{ "DataverseName": "teacher", "DatasetName": "prof", "DataTypeName": "tchrType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Thu Nov 08 13:22:47 PST 2012" }
-{ "DataverseName": "teacher", "DatasetName": "pstdoc", "DataTypeName": "tchrType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Thu Nov 08 13:22:47 PST 2012" }
+{ "DataverseName": "student", "DatasetName": "gdstd", "DataTypeName": "stdType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 19:01:46 PST 2013" }
+{ "DataverseName": "student", "DatasetName": "ugdstd", "DataTypeName": "stdType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 19:01:46 PST 2013" }
+{ "DataverseName": "teacher", "DatasetName": "prof", "DataTypeName": "tchrType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 19:01:46 PST 2013" }
+{ "DataverseName": "teacher", "DatasetName": "pstdoc", "DataTypeName": "tchrType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 19:01:46 PST 2013" }
diff --git a/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv04.adm b/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv04.adm
index bd3c0af..ff1f9df 100644
--- a/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv04.adm
+++ b/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv04.adm
@@ -1,4 +1,4 @@
-{ "DataverseName": "student", "DatasetName": "gdstd", "DataTypeName": "stdType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Thu Nov 08 13:25:37 PST 2012" }
-{ "DataverseName": "student", "DatasetName": "ugdstd", "DataTypeName": "stdType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Thu Nov 08 13:25:37 PST 2012" }
-{ "DataverseName": "teacher", "DatasetName": "prof", "DataTypeName": "tchrType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Thu Nov 08 13:25:37 PST 2012" }
-{ "DataverseName": "teacher", "DatasetName": "pstdoc", "DataTypeName": "tchrType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Thu Nov 08 13:25:37 PST 2012" }
+{ "DataverseName": "student", "DatasetName": "gdstd", "DataTypeName": "stdType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 19:03:50 PST 2013" }
+{ "DataverseName": "student", "DatasetName": "ugdstd", "DataTypeName": "stdType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 19:03:50 PST 2013" }
+{ "DataverseName": "teacher", "DatasetName": "prof", "DataTypeName": "tchrType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 19:03:50 PST 2013" }
+{ "DataverseName": "teacher", "DatasetName": "pstdoc", "DataTypeName": "tchrType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 19:03:50 PST 2013" }
diff --git a/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv19.adm b/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv19.adm
index be94bc8..7be9c57 100644
--- a/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv19.adm
+++ b/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv19.adm
@@ -1,7 +1,7 @@
-{ "DataverseName": "test1", "DatasetName": "TwitterData", "DataTypeName": "Tweet", "DatasetType": "EXTERNAL", "InternalDetails": null, "ExternalDetails": { "DatasourceAdapter": "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter", "Properties": [ { "Name": "path", "Value": "nc1://data/twitter/extrasmalltweets.txt" }, { "Name": "format", "Value": "adm" } ] }, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:41:21 PST 2012" }
-{ "DataverseName": "test1", "DatasetName": "t1", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:41:21 PST 2012" }
-{ "DataverseName": "test1", "DatasetName": "t2", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:41:21 PST 2012" }
-{ "DataverseName": "test1", "DatasetName": "t3", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:41:21 PST 2012" }
-{ "DataverseName": "test2", "DatasetName": "t2", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:41:21 PST 2012" }
-{ "DataverseName": "test2", "DatasetName": "t3", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:41:21 PST 2012" }
-{ "DataverseName": "test2", "DatasetName": "t4", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:41:21 PST 2012" }
+{ "DataverseName": "test1", "DatasetName": "TwitterData", "DataTypeName": "Tweet", "DatasetType": "EXTERNAL", "InternalDetails": null, "ExternalDetails": { "DatasourceAdapter": "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter", "Properties": [ { "Name": "path", "Value": "nc1://data/twitter/extrasmalltweets.txt" }, { "Name": "format", "Value": "adm" } ] }, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 19:04:36 PST 2013" }
+{ "DataverseName": "test1", "DatasetName": "t1", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 19:04:36 PST 2013" }
+{ "DataverseName": "test1", "DatasetName": "t2", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 19:04:36 PST 2013" }
+{ "DataverseName": "test1", "DatasetName": "t3", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 19:04:36 PST 2013" }
+{ "DataverseName": "test2", "DatasetName": "t2", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 19:04:36 PST 2013" }
+{ "DataverseName": "test2", "DatasetName": "t3", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 19:04:36 PST 2013" }
+{ "DataverseName": "test2", "DatasetName": "t4", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 19:04:36 PST 2013" }
diff --git a/asterix-app/src/test/resources/runtimets/results/cross-dataverse/join_across_dataverses.adm b/asterix-app/src/test/resources/runtimets/results/cross-dataverse/join_across_dataverses.adm
index 87619a8..e78ad8f 100644
--- a/asterix-app/src/test/resources/runtimets/results/cross-dataverse/join_across_dataverses.adm
+++ b/asterix-app/src/test/resources/runtimets/results/cross-dataverse/join_across_dataverses.adm
@@ -1,3 +1,3 @@
-{ "cust_name": "Jodi Alex", "cust_age": 19, "order_total": 7.206f, "orderList": [ 10, 5 ], "orderList": {{ 10, 5 }} }
-{ "cust_name": "Jodi Rotruck", "cust_age": null, "order_total": 14.2326f, "orderList": [ 10, 775 ], "orderList": {{ 10, 775 }} }
-{ "cust_name": "Jodi Rotruck", "cust_age": null, "order_total": 97.20656f, "orderList": [ 1000, 775 ], "orderList": {{ 1000, 775 }} }
+{ "cust_name": "Jodi Alex", "cust_age": 19, "order_total": 7.206f, "orderList": [ 10, 5 ] }
+{ "cust_name": "Jodi Rotruck", "cust_age": null, "order_total": 14.2326f, "orderList": [ 10, 775 ] }
+{ "cust_name": "Jodi Rotruck", "cust_age": null, "order_total": 97.20656f, "orderList": [ 1000, 775 ] }
diff --git a/asterix-app/src/test/resources/runtimets/results/custord/join_q_01.adm b/asterix-app/src/test/resources/runtimets/results/custord/join_q_01.adm
index 7648e4a..e78ad8f 100644
--- a/asterix-app/src/test/resources/runtimets/results/custord/join_q_01.adm
+++ b/asterix-app/src/test/resources/runtimets/results/custord/join_q_01.adm
@@ -1,3 +1,3 @@
-{ "cust_name": "Jodi Alex", "cust_age": 19, "order_total": 7.206f, "orderList": [ 10, 5 ], "orderList": {{ 10, 5 }} }
-{ "cust_name": "Jodi Rotruck", "cust_age": null, "order_total": 14.2326f, "orderList": [ 10, 775 ], "orderList": {{ 10, 775 }} }
-{ "cust_name": "Jodi Rotruck", "cust_age": null, "order_total": 97.20656f, "orderList": [ 1000, 775 ], "orderList": {{ 1000, 775 }} }
\ No newline at end of file
+{ "cust_name": "Jodi Alex", "cust_age": 19, "order_total": 7.206f, "orderList": [ 10, 5 ] }
+{ "cust_name": "Jodi Rotruck", "cust_age": null, "order_total": 14.2326f, "orderList": [ 10, 775 ] }
+{ "cust_name": "Jodi Rotruck", "cust_age": null, "order_total": 97.20656f, "orderList": [ 1000, 775 ] }
diff --git a/asterix-app/src/test/resources/runtimets/results/custord/order_q_03.adm b/asterix-app/src/test/resources/runtimets/results/custord/order_q_03.adm
index 903c00d..4e02c4b 100644
--- a/asterix-app/src/test/resources/runtimets/results/custord/order_q_03.adm
+++ b/asterix-app/src/test/resources/runtimets/results/custord/order_q_03.adm
@@ -1,4 +1,4 @@
-{ "orderid": 1000, "ordertot": 97.20656f, "list": [ "ORDER_DELIVERED", "Kathryne" ], "item1": "ORDER_DELIVERED", "item1": "ORDER_DELIVERED", "item2": "Kathryne", "item3": null }
-{ "orderid": 10, "ordertot": 7.206f, "list": [ "ORDER_DELIVERED", "ALEX" ], "item1": "ORDER_DELIVERED", "item1": "ORDER_DELIVERED", "item2": "ALEX", "item3": null }
-{ "orderid": 100, "ordertot": 124.26f, "list": [ "ORDER_DELIVERED", "YASSER" ], "item1": "ORDER_DELIVERED", "item1": "ORDER_DELIVERED", "item2": "YASSER", "item3": null }
-{ "orderid": 10, "ordertot": 14.2326f, "list": [ "ORDER_DELIVERED", "MIKE" ], "item1": "ORDER_DELIVERED", "item1": "ORDER_DELIVERED", "item2": "MIKE", "item3": null }
+{ "orderid": 1000, "ordertot": 97.20656f, "list": [ "ORDER_DELIVERED", "Kathryne" ], "item1": "ORDER_DELIVERED", "item2": "Kathryne", "item3": null }
+{ "orderid": 10, "ordertot": 7.206f, "list": [ "ORDER_DELIVERED", "ALEX" ], "item1": "ORDER_DELIVERED", "item2": "ALEX", "item3": null }
+{ "orderid": 100, "ordertot": 124.26f, "list": [ "ORDER_DELIVERED", "YASSER" ], "item1": "ORDER_DELIVERED", "item2": "YASSER", "item3": null }
+{ "orderid": 10, "ordertot": 14.2326f, "list": [ "ORDER_DELIVERED", "MIKE" ], "item1": "ORDER_DELIVERED", "item2": "MIKE", "item3": null }
diff --git a/asterix-app/src/test/resources/runtimets/results/custord/order_q_04.adm b/asterix-app/src/test/resources/runtimets/results/custord/order_q_04.adm
index f22ea7f..3992f7d 100644
--- a/asterix-app/src/test/resources/runtimets/results/custord/order_q_04.adm
+++ b/asterix-app/src/test/resources/runtimets/results/custord/order_q_04.adm
@@ -1,4 +1,4 @@
-{ "orderid": 1000, "ordertot": 97.20656f, "list": [ [ "1.0f", "yassser" ], [ 11, 14, "yasir", 1.6f ], point("10.1,11.1"), line("10.1,11.1 10.2,11.2"), polygon("1.2,1.3 2.1,2.5 3.5,3.6 4.6,4.8"), null ], "item1": [ "1.0f", "yassser" ], "item1": [ "1.0f", "yassser" ], "item2": [ 11, 14, "yasir", 1.6f ], "item5": null, "item10": null }
-{ "orderid": 10, "ordertot": 7.206f, "list": [ [ 1.0f, "5.2f", "60" ], [ 13231, "foo", null, 13.25d, 13.2f ], point("10.1,11.1"), line("10.1,11.1 10.2,11.2"), polygon("1.2,1.3 2.1,2.5 3.5,3.6 4.6,4.8"), null ], "item1": [ 1.0f, "5.2f", "60" ], "item1": [ 1.0f, "5.2f", "60" ], "item2": [ 13231, "foo", null, 13.25d, 13.2f ], "item5": null, "item10": null }
-{ "orderid": 100, "ordertot": 124.26f, "list": [ [ 1.3f, 5.2f, "60", 12.32f ], [ 10, 2.0f, 3.0d, 40 ], point("10.1,11.1"), line("10.1,11.1 10.2,11.2"), polygon("1.2,1.3 2.1,2.5 3.5,3.6 4.6,4.8"), null ], "item1": [ 1.3f, 5.2f, "60", 12.32f ], "item1": [ 1.3f, 5.2f, "60", 12.32f ], "item2": [ 10, 2.0f, 3.0d, 40 ], "item5": null, "item10": null }
-{ "orderid": 10, "ordertot": 14.2326f, "list": [ [ 2.4f, "15" ], [ 110 ], point("10.1,11.1"), line("10.1,11.1 10.2,11.2"), polygon("1.2,1.3 2.1,2.5 3.5,3.6 4.6,4.8"), { "oid": 75, "total": 87.61863f } ], "item1": [ 2.4f, "15" ], "item1": [ 2.4f, "15" ], "item2": [ 110 ], "item5": { "oid": 75, "total": 87.61863f }, "item10": null }
+{ "orderid": 1000, "ordertot": 97.20656f, "list": [ [ "1.0f", "yassser" ], [ 11, 14, "yasir", 1.6f ], point("10.1,11.1"), line("10.1,11.1 10.2,11.2"), polygon("1.2,1.3 2.1,2.5 3.5,3.6 4.6,4.8"), null ], "item1": [ "1.0f", "yassser" ], "item2": [ 11, 14, "yasir", 1.6f ], "item5": null, "item10": null }
+{ "orderid": 10, "ordertot": 7.206f, "list": [ [ 1.0f, "5.2f", "60" ], [ 13231, "foo", null, 13.25d, 13.2f ], point("10.1,11.1"), line("10.1,11.1 10.2,11.2"), polygon("1.2,1.3 2.1,2.5 3.5,3.6 4.6,4.8"), null ], "item1": [ 1.0f, "5.2f", "60" ], "item2": [ 13231, "foo", null, 13.25d, 13.2f ], "item5": null, "item10": null }
+{ "orderid": 100, "ordertot": 124.26f, "list": [ [ 1.3f, 5.2f, "60", 12.32f ], [ 10, 2.0f, 3.0d, 40 ], point("10.1,11.1"), line("10.1,11.1 10.2,11.2"), polygon("1.2,1.3 2.1,2.5 3.5,3.6 4.6,4.8"), null ], "item1": [ 1.3f, 5.2f, "60", 12.32f ], "item2": [ 10, 2.0f, 3.0d, 40 ], "item5": null, "item10": null }
+{ "orderid": 10, "ordertot": 14.2326f, "list": [ [ 2.4f, "15" ], [ 110 ], point("10.1,11.1"), line("10.1,11.1 10.2,11.2"), polygon("1.2,1.3 2.1,2.5 3.5,3.6 4.6,4.8"), { "oid": 75, "total": 87.61863f } ], "item1": [ 2.4f, "15" ], "item2": [ 110 ], "item5": { "oid": 75, "total": 87.61863f }, "item10": null }
diff --git a/asterix-app/src/test/resources/runtimets/results/custord/order_q_05.adm b/asterix-app/src/test/resources/runtimets/results/custord/order_q_05.adm
index dd3420e..4d8b37c 100644
--- a/asterix-app/src/test/resources/runtimets/results/custord/order_q_05.adm
+++ b/asterix-app/src/test/resources/runtimets/results/custord/order_q_05.adm
@@ -1,4 +1,4 @@
-{ "orderid": 1000, "ordertot": 97.20656f, "emptyorderedlist": [ ], "emptyunorderedlist": {{ }}, "olist_item1": null, "olist_item1": null, "olist_item5": null, "ulist_item1": null }
-{ "orderid": 10, "ordertot": 7.206f, "emptyorderedlist": [ ], "emptyunorderedlist": {{ }}, "olist_item1": null, "olist_item1": null, "olist_item5": null, "ulist_item1": null }
-{ "orderid": 100, "ordertot": 124.26f, "emptyorderedlist": [ ], "emptyunorderedlist": {{ }}, "olist_item1": null, "olist_item1": null, "olist_item5": null, "ulist_item1": null }
-{ "orderid": 10, "ordertot": 14.2326f, "emptyorderedlist": [ ], "emptyunorderedlist": {{ }}, "olist_item1": null, "olist_item1": null, "olist_item5": null, "ulist_item1": null }
+{ "orderid": 1000, "ordertot": 97.20656f, "emptyorderedlist": [ ], "emptyunorderedlist": {{ }}, "olist_item1": null, "olist_item5": null, "ulist_item1": null }
+{ "orderid": 10, "ordertot": 7.206f, "emptyorderedlist": [ ], "emptyunorderedlist": {{ }}, "olist_item1": null, "olist_item5": null, "ulist_item1": null }
+{ "orderid": 100, "ordertot": 124.26f, "emptyorderedlist": [ ], "emptyunorderedlist": {{ }}, "olist_item1": null, "olist_item5": null, "ulist_item1": null }
+{ "orderid": 10, "ordertot": 14.2326f, "emptyorderedlist": [ ], "emptyunorderedlist": {{ }}, "olist_item1": null, "olist_item5": null, "ulist_item1": null }
diff --git a/asterix-app/src/test/resources/runtimets/results/dml/create-drop-cltype.adm b/asterix-app/src/test/resources/runtimets/results/dml/create-drop-cltype.adm
new file mode 100644
index 0000000..e2f6676
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/dml/create-drop-cltype.adm
@@ -0,0 +1 @@
+{ "DataverseName": "test", "DatatypeName": "TestType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": false, "Fields": [ { "FieldName": "id", "FieldType": "int32" }, { "FieldName": "salary", "FieldType": "Field_salary_in_TestType" }, { "FieldName": "name", "FieldType": "string" }, { "FieldName": "durtn", "FieldType": "Field_durtn_in_TestType" }, { "FieldName": "inter", "FieldType": "interval" }, { "FieldName": "dt", "FieldType": "Field_dt_in_TestType" }, { "FieldName": "tm", "FieldType": "time" }, { "FieldName": "pt", "FieldType": "Field_pt_in_TestType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Feb 11 18:10:43 PST 2013" }
diff --git a/asterix-app/src/test/resources/runtimets/results/dml/create-drop-opntype.adm b/asterix-app/src/test/resources/runtimets/results/dml/create-drop-opntype.adm
new file mode 100644
index 0000000..8c0b451
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/dml/create-drop-opntype.adm
@@ -0,0 +1 @@
+{ "DataverseName": "test", "DatatypeName": "TestType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "id", "FieldType": "int32" }, { "FieldName": "salary", "FieldType": "Field_salary_in_TestType" }, { "FieldName": "name", "FieldType": "string" }, { "FieldName": "durtn", "FieldType": "Field_durtn_in_TestType" }, { "FieldName": "inter", "FieldType": "interval" }, { "FieldName": "dt", "FieldType": "Field_dt_in_TestType" }, { "FieldName": "tm", "FieldType": "time" }, { "FieldName": "pt", "FieldType": "Field_pt_in_TestType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Feb 11 18:12:10 PST 2013" }
diff --git a/asterix-app/src/test/resources/runtimets/results/dml/drop-empty-secondary-indexes.adm b/asterix-app/src/test/resources/runtimets/results/dml/drop-empty-secondary-indexes.adm
new file mode 100644
index 0000000..9b49756
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/dml/drop-empty-secondary-indexes.adm
@@ -0,0 +1,3 @@
+{ "DataverseName": "Metadata", "DatasetName": "Dataset", "IndexName": "DatatypeName", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "DatatypeName", "DatasetName" ], "IsPrimary": false, "Timestamp": "Fri Feb 08 17:57:01 PST 2013" }
+{ "DataverseName": "Metadata", "DatasetName": "Dataset", "IndexName": "GroupName", "IndexStructure": "BTREE", "SearchKey": [ "GroupName", "DataverseName", "DatasetName" ], "IsPrimary": false, "Timestamp": "Fri Feb 08 17:57:01 PST 2013" }
+{ "DataverseName": "Metadata", "DatasetName": "Datatype", "IndexName": "DatatypeName", "IndexStructure": "BTREE", "SearchKey": [ "DataverseName", "NestedDatatypeName", "TopDatatypeName" ], "IsPrimary": false, "Timestamp": "Fri Feb 08 17:57:01 PST 2013" }
diff --git a/asterix-app/src/test/resources/runtimets/results/feeds/feeds_01.adm b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_01.adm
new file mode 100644
index 0000000..4e3714c
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_01.adm
@@ -0,0 +1 @@
+{ "DataverseName": "feeds", "DatasetName": "TweetFeed", "DataTypeName": "TweetType", "DatasetType": "FEED", "InternalDetails": null, "ExternalDetails": null, "FeedDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "DatasourceAdapter": "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory", "Properties": [ { "Name": "output-type-name", "Value": "TweetType" }, { "Name": "fs", "Value": "localfs" }, { "Name": "path", "Value": "nc1://data/twitter/obamatweets.adm" }, { "Name": "format", "Value": "adm" }, { "Name": "tuple-interval", "Value": "10" } ], "Function": null, "Status": "INACTIVE" }, "Hints": {{ }}, "Timestamp": "Tue Jan 29 19:07:24 PST 2013" }
diff --git a/asterix-app/src/test/resources/runtimets/results/feeds/feeds_02.adm b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_02.adm
new file mode 100644
index 0000000..9720960
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_02.adm
@@ -0,0 +1,12 @@
+{ "id": "nc1:1", "username": "BronsonMike", "location": "", "text": "@GottaLaff @reutersus Christie and obama just foul weather friends", "timestamp": "Thu Dec 06 16:53:06 PST 2012" }
+{ "id": "nc1:100", "username": "KidrauhlProuds", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:102", "username": "jaysauce82", "location": "", "text": "Not voting for President Obama #BadDecision", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:104", "username": "princeofsupras", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson e uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:15 PST 2012" }
+{ "id": "nc1:106", "username": "GulfDogs", "location": "", "text": "Obama Admin Knew Libyan Terrorists Had US-Provided Weaponsteaparty #tcot #ccot #NewGuards #BreitbartArmy #patriotwttp://t.co/vJxzrQUE", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:108", "username": "Laugzpz", "location": "", "text": "@AlfredoJalife Maestro Obama se hace de la vista gorda, es un acuerdo de siempre creo yo.", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:11", "username": "magarika", "location": "", "text": "RT @ken24xavier: Obama tells SOROS - our plan is ALMOST finished http://t.co/WvzK0GtU", "timestamp": "Thu Dec 06 16:53:05 PST 2012" }
+{ "id": "nc1:111", "username": "ToucanMall", "location": "", "text": "RT @WorldWar3Watch: Michelle Obama Gets More Grammy Nominations Than Justin ... #Obama #WW3 http://t.co/0Wv2GKij", "timestamp": "Thu Dec 06 16:53:13 PST 2012" }
+{ "id": "nc1:113", "username": "ToucanMall", "location": "", "text": "RT @ObamaPalooza: Tiffany Shared What $2,000 Meant to Her ... and the President Stopped by to Talk About It http://t.co/sgT7lsNV #Obama", "timestamp": "Thu Dec 06 16:53:12 PST 2012" }
+{ "id": "nc1:115", "username": "thewildpitch", "location": "", "text": "RT @RevkahJC: Dennis Miller: Obama Should Just Say He Wants To Tax Successful People http://t.co/Ihlemy9Y", "timestamp": "Thu Dec 06 16:53:11 PST 2012" }
+{ "id": "nc1:117", "username": "Rnugent24", "location": "", "text": "RT @ConservativeQuo: unemployment is above 8% again. I wonder how long it will take for Obama to start blaming Bush? 3-2-1 #tcot #antiobama", "timestamp": "Thu Dec 06 16:53:10 PST 2012" }
+{ "id": "nc1:119", "username": "ToucanMall", "location": "", "text": "RT @Newitrsdotcom: I hope #Obama will win re-election... Other four years without meaningless #wars", "timestamp": "Thu Dec 06 16:53:09 PST 2012" }
diff --git a/asterix-app/src/test/resources/runtimets/results/feeds/feeds_03.adm b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_03.adm
new file mode 100644
index 0000000..8011e4b
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_03.adm
@@ -0,0 +1 @@
+{ "DataverseName": "feeds", "DatasetName": "TweetFeed", "DataTypeName": "TweetType", "DatasetType": "FEED", "InternalDetails": null, "ExternalDetails": null, "FeedDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "DatasourceAdapter": "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory", "Properties": [ { "Name": "output-type-name", "Value": "TweetType" }, { "Name": "fs", "Value": "localfs" }, { "Name": "path", "Value": "nc1://data/twitter/obamatweets.adm" }, { "Name": "format", "Value": "adm" }, { "Name": "tuple-interval", "Value": "10" } ], "Function": "feeds.feed_processor@1", "Status": "INACTIVE" }, "Hints": {{ }}, "Timestamp": "Tue Jan 29 19:08:49 PST 2013" }
diff --git a/asterix-app/src/test/resources/runtimets/results/feeds/feeds_04.adm b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_04.adm
new file mode 100644
index 0000000..2567483
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_04.adm
@@ -0,0 +1,11 @@
+{ "id": "nc1:1", "username": "BronsonMike", "location": "", "text": "@GottaLaff @reutersus Christie and obama just foul weather friends", "timestamp": "Thu Dec 06 16:53:06 PST 2012" }
+{ "id": "nc1:100", "username": "KidrauhlProuds", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:102", "username": "jaysauce82", "location": "", "text": "Not voting for President Obama #BadDecision", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:104", "username": "princeofsupras", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson e uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:15 PST 2012" }
+{ "id": "nc1:106", "username": "GulfDogs", "location": "", "text": "Obama Admin Knew Libyan Terrorists Had US-Provided Weaponsteaparty #tcot #ccot #NewGuards #BreitbartArmy #patriotwttp://t.co/vJxzrQUE", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:108", "username": "Laugzpz", "location": "", "text": "@AlfredoJalife Maestro Obama se hace de la vista gorda, es un acuerdo de siempre creo yo.", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:11", "username": "magarika", "location": "", "text": "RT @ken24xavier: Obama tells SOROS - our plan is ALMOST finished http://t.co/WvzK0GtU", "timestamp": "Thu Dec 06 16:53:05 PST 2012" }
+{ "id": "nc1:111", "username": "ToucanMall", "location": "", "text": "RT @WorldWar3Watch: Michelle Obama Gets More Grammy Nominations Than Justin ... #Obama #WW3 http://t.co/0Wv2GKij", "timestamp": "Thu Dec 06 16:53:13 PST 2012" }
+{ "id": "nc1:113", "username": "ToucanMall", "location": "", "text": "RT @ObamaPalooza: Tiffany Shared What $2,000 Meant to Her ... and the President Stopped by to Talk About It http://t.co/sgT7lsNV #Obama", "timestamp": "Thu Dec 06 16:53:12 PST 2012" }
+{ "id": "nc1:115", "username": "thewildpitch", "location": "", "text": "RT @RevkahJC: Dennis Miller: Obama Should Just Say He Wants To Tax Successful People http://t.co/Ihlemy9Y", "timestamp": "Thu Dec 06 16:53:11 PST 2012" }
+{ "id": "nc1:117", "username": "Rnugent24", "location": "", "text": "RT @ConservativeQuo: unemployment is above 8% again. I wonder how long it will take for Obama to start blaming Bush? 3-2-1 #tcot #antiobama", "timestamp": "Thu Dec 06 16:53:10 PST 2012" }
diff --git a/asterix-app/src/test/resources/runtimets/results/feeds/issue_230_feeds.adm b/asterix-app/src/test/resources/runtimets/results/feeds/issue_230_feeds.adm
new file mode 100644
index 0000000..9720960
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/feeds/issue_230_feeds.adm
@@ -0,0 +1,12 @@
+{ "id": "nc1:1", "username": "BronsonMike", "location": "", "text": "@GottaLaff @reutersus Christie and obama just foul weather friends", "timestamp": "Thu Dec 06 16:53:06 PST 2012" }
+{ "id": "nc1:100", "username": "KidrauhlProuds", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:102", "username": "jaysauce82", "location": "", "text": "Not voting for President Obama #BadDecision", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:104", "username": "princeofsupras", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson e uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:15 PST 2012" }
+{ "id": "nc1:106", "username": "GulfDogs", "location": "", "text": "Obama Admin Knew Libyan Terrorists Had US-Provided Weaponsteaparty #tcot #ccot #NewGuards #BreitbartArmy #patriotwttp://t.co/vJxzrQUE", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:108", "username": "Laugzpz", "location": "", "text": "@AlfredoJalife Maestro Obama se hace de la vista gorda, es un acuerdo de siempre creo yo.", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:11", "username": "magarika", "location": "", "text": "RT @ken24xavier: Obama tells SOROS - our plan is ALMOST finished http://t.co/WvzK0GtU", "timestamp": "Thu Dec 06 16:53:05 PST 2012" }
+{ "id": "nc1:111", "username": "ToucanMall", "location": "", "text": "RT @WorldWar3Watch: Michelle Obama Gets More Grammy Nominations Than Justin ... #Obama #WW3 http://t.co/0Wv2GKij", "timestamp": "Thu Dec 06 16:53:13 PST 2012" }
+{ "id": "nc1:113", "username": "ToucanMall", "location": "", "text": "RT @ObamaPalooza: Tiffany Shared What $2,000 Meant to Her ... and the President Stopped by to Talk About It http://t.co/sgT7lsNV #Obama", "timestamp": "Thu Dec 06 16:53:12 PST 2012" }
+{ "id": "nc1:115", "username": "thewildpitch", "location": "", "text": "RT @RevkahJC: Dennis Miller: Obama Should Just Say He Wants To Tax Successful People http://t.co/Ihlemy9Y", "timestamp": "Thu Dec 06 16:53:11 PST 2012" }
+{ "id": "nc1:117", "username": "Rnugent24", "location": "", "text": "RT @ConservativeQuo: unemployment is above 8% again. I wonder how long it will take for Obama to start blaming Bush? 3-2-1 #tcot #antiobama", "timestamp": "Thu Dec 06 16:53:10 PST 2012" }
+{ "id": "nc1:119", "username": "ToucanMall", "location": "", "text": "RT @Newitrsdotcom: I hope #Obama will win re-election... Other four years without meaningless #wars", "timestamp": "Thu Dec 06 16:53:09 PST 2012" }
diff --git a/asterix-app/src/test/resources/runtimets/results/hdfs/hdfs_02.adm b/asterix-app/src/test/resources/runtimets/results/hdfs/hdfs_02.adm
new file mode 100644
index 0000000..d7ae022
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/hdfs/hdfs_02.adm
@@ -0,0 +1,5 @@
+{ "word": "am", "count": 1 }
+{ "word": "grover", "count": 1 }
+{ "word": "hi", "count": 1 }
+{ "word": "i", "count": 1 }
+{ "word": "raman", "count": 1 }
diff --git a/asterix-app/src/test/resources/runtimets/results/hdfs/hdfs_03.adm b/asterix-app/src/test/resources/runtimets/results/hdfs/hdfs_03.adm
new file mode 100644
index 0000000..1033913
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/hdfs/hdfs_03.adm
@@ -0,0 +1,93 @@
+{ "word": "a", "count": 68 }
+{ "word": "addressing", "count": 34 }
+{ "word": "an", "count": 34 }
+{ "word": "analyzing", "count": 68 }
+{ "word": "and", "count": 238 }
+{ "word": "areas", "count": 34 }
+{ "word": "asterix", "count": 102 }
+{ "word": "by", "count": 34 }
+{ "word": "cases", "count": 68 }
+{ "word": "clusters", "count": 68 }
+{ "word": "combining", "count": 34 }
+{ "word": "commodity", "count": 34 }
+{ "word": "computing", "count": 102 }
+{ "word": "content", "count": 34 }
+{ "word": "create", "count": 34 }
+{ "word": "data", "count": 238 }
+{ "word": "database", "count": 34 }
+{ "word": "databases", "count": 34 }
+{ "word": "datum", "count": 34 }
+{ "word": "declarative", "count": 34 }
+{ "word": "developing", "count": 34 }
+{ "word": "distinct", "count": 34 }
+{ "word": "each", "count": 34 }
+{ "word": "for", "count": 34 }
+{ "word": "formats", "count": 34 }
+{ "word": "from", "count": 68 }
+{ "word": "generation", "count": 34 }
+{ "word": "highly", "count": 68 }
+{ "word": "ideas", "count": 34 }
+{ "word": "including", "count": 34 }
+{ "word": "indexing", "count": 68 }
+{ "word": "information", "count": 136 }
+{ "word": "ingesting", "count": 34 }
+{ "word": "intensive", "count": 68 }
+{ "word": "irregular", "count": 34 }
+{ "word": "is", "count": 204 }
+{ "word": "issues", "count": 34 }
+{ "word": "large", "count": 68 }
+{ "word": "managing", "count": 34 }
+{ "word": "merging", "count": 34 }
+{ "word": "much", "count": 34 }
+{ "word": "new", "count": 34 }
+{ "word": "next", "count": 34 }
+{ "word": "nothing", "count": 34 }
+{ "word": "of", "count": 136 }
+{ "word": "on", "count": 102 }
+{ "word": "open", "count": 68 }
+{ "word": "parallel", "count": 68 }
+{ "word": "performant", "count": 34 }
+{ "word": "platform", "count": 34 }
+{ "word": "problem", "count": 34 }
+{ "word": "processing", "count": 34 }
+{ "word": "project", "count": 68 }
+{ "word": "quantities", "count": 34 }
+{ "word": "query", "count": 34 }
+{ "word": "querying", "count": 34 }
+{ "word": "range", "count": 34 }
+{ "word": "ranging", "count": 34 }
+{ "word": "regular", "count": 34 }
+{ "word": "research", "count": 34 }
+{ "word": "running", "count": 34 }
+{ "word": "scalable", "count": 34 }
+{ "word": "scales", "count": 34 }
+{ "word": "semi", "count": 170 }
+{ "word": "shared", "count": 34 }
+{ "word": "software", "count": 34 }
+{ "word": "solutions", "count": 34 }
+{ "word": "source", "count": 34 }
+{ "word": "stance", "count": 34 }
+{ "word": "storage", "count": 34 }
+{ "word": "storing", "count": 34 }
+{ "word": "structured", "count": 170 }
+{ "word": "subscribing", "count": 34 }
+{ "word": "support", "count": 34 }
+{ "word": "tagged", "count": 34 }
+{ "word": "taking", "count": 34 }
+{ "word": "targets", "count": 34 }
+{ "word": "techniques", "count": 68 }
+{ "word": "technologies", "count": 34 }
+{ "word": "textual", "count": 34 }
+{ "word": "that", "count": 34 }
+{ "word": "the", "count": 102 }
+{ "word": "three", "count": 34 }
+{ "word": "to", "count": 170 }
+{ "word": "todays", "count": 34 }
+{ "word": "use", "count": 68 }
+{ "word": "vast", "count": 34 }
+{ "word": "very", "count": 34 }
+{ "word": "well", "count": 34 }
+{ "word": "where", "count": 68 }
+{ "word": "wide", "count": 34 }
+{ "word": "with", "count": 34 }
+{ "word": "yet", "count": 34 }
diff --git a/asterix-app/src/test/resources/runtimets/results/hdfs/issue_245_hdfs.adm b/asterix-app/src/test/resources/runtimets/results/hdfs/issue_245_hdfs.adm
new file mode 100644
index 0000000..8af2f5f
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/hdfs/issue_245_hdfs.adm
@@ -0,0 +1,4 @@
+{ "line": "The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information" }
+{ "line": "The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters" }
+{ "line": "ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual" }
+{ "line": "ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information" }
diff --git a/asterix-app/src/test/resources/runtimets/results/hints/issue_251_dataset_hint_5.adm b/asterix-app/src/test/resources/runtimets/results/hints/issue_251_dataset_hint_5.adm
new file mode 100644
index 0000000..a7ec8f6
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/hints/issue_251_dataset_hint_5.adm
@@ -0,0 +1,100 @@
+{ "id": 1, "dblpid": "books/acm/kim95/AnnevelinkACFHK95", "title": "Object SQL - A Language for the Design and Implementation of Object Databases.", "authors": "Jurgen Annevelink Rafiul Ahad Amelia Carlson Daniel H. Fishman Michael L. Heytens William Kent", "misc": "2002-01-03 42-68 1995 Modern Database Systems db/books/collections/kim95.html#AnnevelinkACFHK95" }
+{ "id": 2, "dblpid": "books/acm/kim95/Blakeley95", "title": "OQL[C++] Extending C++ with an Object Query Capability.", "authors": "José A. Blakeley", "misc": "2002-01-03 69-88 Modern Database Systems db/books/collections/kim95.html#Blakeley95 1995" }
+{ "id": 3, "dblpid": "books/acm/kim95/BreitbartGS95", "title": "Transaction Management in Multidatabase Systems.", "authors": "Yuri Breitbart Hector Garcia-Molina Abraham Silberschatz", "misc": "2004-03-08 573-591 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#BreitbartGS95 1995" }
+{ "id": 4, "dblpid": "books/acm/kim95/ChristodoulakisK95", "title": "Multimedia Information Systems Issues and Approaches.", "authors": "Stavros Christodoulakis Leonidas Koveos", "misc": "2002-01-03 318-337 1995 Modern Database Systems db/books/collections/kim95.html#ChristodoulakisK95" }
+{ "id": 5, "dblpid": "books/acm/kim95/DayalHW95", "title": "Active Database Systems.", "authors": "Umeshwar Dayal Eric N. Hanson Jennifer Widom", "misc": "2002-01-03 434-456 1995 Modern Database Systems db/books/collections/kim95.html#DayalHW95" }
+{ "id": 6, "dblpid": "books/acm/kim95/DittrichD95", "title": "Where Object-Oriented DBMSs Should Do Better A Critique Based on Early Experiences.", "authors": "Angelika Kotz Dittrich Klaus R. Dittrich", "misc": "2002-01-03 238-254 1995 Modern Database Systems db/books/collections/kim95.html#DittrichD95" }
+{ "id": 7, "dblpid": "books/acm/kim95/Garcia-MolinaH95", "title": "Distributed Databases.", "authors": "Hector Garcia-Molina Meichun Hsu", "misc": "2002-01-03 477-493 1995 Modern Database Systems db/books/collections/kim95.html#Garcia-MolinaH95" }
+{ "id": 8, "dblpid": "books/acm/kim95/Goodman95", "title": "An Object-Oriented DBMS War Story Developing a Genome Mapping Database in C++.", "authors": "Nathan Goodman", "misc": "2002-01-03 216-237 1995 Modern Database Systems db/books/collections/kim95.html#Goodman95" }
+{ "id": 9, "dblpid": "books/acm/kim95/Kaiser95", "title": "Cooperative Transactions for Multiuser Environments.", "authors": "Gail E. Kaiser", "misc": "2002-01-03 409-433 1995 Modern Database Systems db/books/collections/kim95.html#Kaiser95" }
+{ "id": 10, "dblpid": "books/acm/kim95/KelleyGKRG95", "title": "Schema Architecture of the UniSQL/M Multidatabase System", "authors": "William Kelley Sunit K. Gala Won Kim Tom C. Reyes Bruce Graham", "misc": "2004-03-08 Modern Database Systems books/acm/Kim95 621-648 1995 db/books/collections/kim95.html#KelleyGKRG95" }
+{ "id": 11, "dblpid": "books/acm/kim95/KemperM95", "title": "Physical Object Management.", "authors": "Alfons Kemper Guido Moerkotte", "misc": "2002-01-03 175-202 1995 Modern Database Systems db/books/collections/kim95.html#KemperM95" }
+{ "id": 12, "dblpid": "books/acm/kim95/Kim95", "title": "Introduction to Part 1 Next-Generation Database Technology.", "authors": "Won Kim", "misc": "2002-01-03 5-17 1995 Modern Database Systems db/books/collections/kim95.html#Kim95" }
+{ "id": 13, "dblpid": "books/acm/kim95/Kim95a", "title": "Object-Oriented Database Systems Promises, Reality, and Future.", "authors": "Won Kim", "misc": "2002-01-03 255-280 1995 Modern Database Systems db/books/collections/kim95.html#Kim95a" }
+{ "id": 14, "dblpid": "books/acm/kim95/Kim95b", "title": "Introduction to Part 2 Technology for Interoperating Legacy Databases.", "authors": "Won Kim", "misc": "2002-01-03 515-520 1995 Modern Database Systems db/books/collections/kim95.html#Kim95b" }
+{ "id": 15, "dblpid": "books/acm/kim95/KimCGS95", "title": "On Resolving Schematic Heterogeneity in Multidatabase Systems.", "authors": "Won Kim Injun Choi Sunit K. Gala Mark Scheevel", "misc": "2002-01-03 521-550 1995 Modern Database Systems db/books/collections/kim95.html#KimCGS95" }
+{ "id": 16, "dblpid": "books/acm/kim95/KimG95", "title": "Requirements for a Performance Benchmark for Object-Oriented Database Systems.", "authors": "Won Kim Jorge F. Garza", "misc": "2002-01-03 203-215 1995 Modern Database Systems db/books/collections/kim95.html#KimG95" }
+{ "id": 17, "dblpid": "books/acm/kim95/KimK95", "title": "On View Support in Object-Oriented Databases Systems.", "authors": "Won Kim William Kelley", "misc": "2002-01-03 108-129 1995 Modern Database Systems db/books/collections/kim95.html#KimK95" }
+{ "id": 18, "dblpid": "books/acm/kim95/Kowalski95", "title": "The POSC Solution to Managing E&P Data.", "authors": "Vincent J. Kowalski", "misc": "2002-01-03 281-301 1995 Modern Database Systems db/books/collections/kim95.html#Kowalski95" }
+{ "id": 19, "dblpid": "books/acm/kim95/KriegerA95", "title": "C++ Bindings to an Object Database.", "authors": "David Krieger Tim Andrews", "misc": "2002-01-03 89-107 1995 Modern Database Systems db/books/collections/kim95.html#KriegerA95" }
+{ "id": 20, "dblpid": "books/acm/kim95/Lunt95", "title": "Authorization in Object-Oriented Databases.", "authors": "Teresa F. Lunt", "misc": "2002-01-03 130-145 1995 Modern Database Systems db/books/collections/kim95.html#Lunt95" }
+{ "id": 21, "dblpid": "books/acm/kim95/MengY95", "title": "Query Processing in Multidatabase Systems.", "authors": "Weiyi Meng Clement T. Yu", "misc": "2002-01-03 551-572 1995 Modern Database Systems db/books/collections/kim95.html#MengY95" }
+{ "id": 22, "dblpid": "books/acm/kim95/Motro95", "title": "Management of Uncerainty in database Systems.", "authors": "Amihai Motro", "misc": "2002-01-03 457-476 1995 Modern Database Systems db/books/collections/kim95.html#Motro95" }
+{ "id": 23, "dblpid": "books/acm/kim95/Omiecinski95", "title": "Parallel Relational Database Systems.", "authors": "Edward Omiecinski", "misc": "2002-01-03 494-512 1995 Modern Database Systems db/books/collections/kim95.html#Omiecinski95" }
+{ "id": 24, "dblpid": "books/acm/kim95/OzsuB95", "title": "Query Processing in Object-Oriented Database Systems.", "authors": "M. Tamer Özsu José A. Blakeley", "misc": "2002-01-03 146-174 1995 Modern Database Systems db/books/collections/kim95.html#OzsuB95" }
+{ "id": 25, "dblpid": "books/acm/kim95/RusinkiewiczS95", "title": "Specification and Execution of Transactional Workflows.", "authors": "Marek Rusinkiewicz Amit P. Sheth", "misc": "2004-03-08 592-620 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#RusinkiewiczS95 1995" }
+{ "id": 26, "dblpid": "books/acm/kim95/Samet95", "title": "Spatial Data Structures.", "authors": "Hanan Samet", "misc": "2004-03-08 361-385 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#Samet95 1995" }
+{ "id": 27, "dblpid": "books/acm/kim95/SametA95", "title": "Spatial Data Models and Query Processing.", "authors": "Hanan Samet Walid G. Aref", "misc": "2002-01-03 338-360 1995 Modern Database Systems db/books/collections/kim95.html#SametA95" }
+{ "id": 28, "dblpid": "books/acm/kim95/ShanADDK95", "title": "Pegasus A Heterogeneous Information Management System.", "authors": "Ming-Chien Shan Rafi Ahmed Jim Davis Weimin Du William Kent", "misc": "2004-03-08 664-682 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#ShanADDK95 1995" }
+{ "id": 29, "dblpid": "books/acm/kim95/Snodgrass95", "title": "Temporal Object-Oriented Databases A Critical Comparison.", "authors": "Richard T. Snodgrass", "misc": "2002-01-03 386-408 1995 Modern Database Systems db/books/collections/kim95.html#Snodgrass95" }
+{ "id": 30, "dblpid": "books/acm/kim95/SoleyK95", "title": "The OMG Object Model.", "authors": "Richard Mark Soley William Kent", "misc": "2002-01-03 18-41 1995 Modern Database Systems db/books/collections/kim95.html#SoleyK95" }
+{ "id": 31, "dblpid": "books/acm/kim95/Stout95", "title": "EDA/SQL.", "authors": "Ralph L. Stout", "misc": "2004-03-08 649-663 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#Stout95 1995" }
+{ "id": 32, "dblpid": "books/acm/kim95/Thompson95", "title": "The Changing Database Standards Landscape.", "authors": "Craig W. Thompson", "misc": "2002-01-03 302-317 1995 Modern Database Systems db/books/collections/kim95.html#Thompson95" }
+{ "id": 33, "dblpid": "books/acm/kim95/BreitbartR95", "title": "Overview of the ADDS System.", "authors": "Yuri Breitbart Tom C. Reyes", "misc": "2009-06-12 683-701 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#BreitbartR95 1995" }
+{ "id": 34, "dblpid": "books/acm/Kim95", "title": "Modern Database Systems The Object Model, Interoperability, and Beyond.", "authors": "", "misc": "2004-03-08 Won Kim Modern Database Systems ACM Press and Addison-Wesley 1995 0-201-59098-0 db/books/collections/kim95.html" }
+{ "id": 35, "dblpid": "books/ap/MarshallO79", "title": "Inequalities Theory of Majorization and Its Application.", "authors": "Albert W. Marshall Ingram Olkin", "misc": "2002-01-03 Academic Press 1979 0-12-473750-1" }
+{ "id": 36, "dblpid": "books/aw/kimL89/BjornerstedtH89", "title": "Version Control in an Object-Oriented Architecture.", "authors": "Anders Björnerstedt Christer Hulten", "misc": "2006-02-24 451-485 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#BjornerstedtH89" }
+{ "id": 37, "dblpid": "books/aw/kimL89/BretlMOPSSWW89", "title": "The GemStone Data Management System.", "authors": "Robert Bretl David Maier Allen Otis D. Jason Penney Bruce Schuchardt Jacob Stein E. Harold Williams Monty Williams", "misc": "2002-01-03 283-308 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#BretlMOPSSWW89" }
+{ "id": 38, "dblpid": "books/aw/kimL89/CareyDRS89", "title": "Storage Management in EXODUS.", "authors": "Michael J. Carey David J. DeWitt Joel E. Richardson Eugene J. Shekita", "misc": "2002-01-03 341-369 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#CareyDRS89" }
+{ "id": 39, "dblpid": "books/aw/kimL89/Decouchant89", "title": "A Distributed Object Manager for the Smalltalk-80 System.", "authors": "Dominique Decouchant", "misc": "2002-01-03 487-520 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Decouchant89" }
+{ "id": 40, "dblpid": "books/aw/kimL89/DiederichM89", "title": "Objects, Messages, and Rules in Database Design.", "authors": "Jim Diederich Jack Milton", "misc": "2002-01-03 177-197 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#DiederichM89" }
+{ "id": 41, "dblpid": "books/aw/kimL89/EllisG89", "title": "Active Objects Ealities and Possibilities.", "authors": "Clarence A. Ellis Simon J. Gibbs", "misc": "2002-01-03 561-572 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#EllisG89" }
+{ "id": 42, "dblpid": "books/aw/kimL89/FishmanABCCDHHKLLMNRSW89", "title": "Overview of the Iris DBMS.", "authors": "Daniel H. Fishman Jurgen Annevelink David Beech E. C. Chow Tim Connors J. W. Davis Waqar Hasan C. G. Hoch William Kent S. Leichner Peter Lyngbæk Brom Mahbod Marie-Anne Neimat Tore Risch Ming-Chien Shan W. Kevin Wilkinson", "misc": "2002-01-03 219-250 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#FishmanABCCDHHKLLMNRSW89" }
+{ "id": 43, "dblpid": "books/aw/kimL89/KimBCGW89", "title": "Features of the ORION Object-Oriented Database System.", "authors": "Won Kim Nat Ballou Hong-Tai Chou Jorge F. Garza Darrell Woelk", "misc": "2002-01-03 251-282 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#KimBCGW89" }
+{ "id": 44, "dblpid": "books/aw/kimL89/KimKD89", "title": "Indexing Techniques for Object-Oriented Databases.", "authors": "Won Kim Kyung-Chang Kim Alfred G. Dale", "misc": "2002-01-03 371-394 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#KimKD89" }
+{ "id": 45, "dblpid": "books/aw/kimL89/King89", "title": "My Cat Is Object-Oriented.", "authors": "Roger King", "misc": "2002-01-03 23-30 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#King89" }
+{ "id": 46, "dblpid": "books/aw/kimL89/Maier89", "title": "Making Database Systems Fast Enough for CAD Applications.", "authors": "David Maier", "misc": "2002-01-03 573-582 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Maier89" }
+{ "id": 47, "dblpid": "books/aw/kimL89/MellenderRS89", "title": "Optimizing Smalltalk Message Performance.", "authors": "Fred Mellender Steve Riegel Andrew Straw", "misc": "2002-01-03 423-450 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#MellenderRS89" }
+{ "id": 48, "dblpid": "books/aw/kimL89/Moon89", "title": "The Common List Object-Oriented Programming Language Standard.", "authors": "David A. Moon", "misc": "2002-01-03 49-78 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Moon89" }
+{ "id": 49, "dblpid": "books/aw/kimL89/Moss89", "title": "Object Orientation as Catalyst for Language-Database Inegration.", "authors": "J. Eliot B. Moss", "misc": "2002-01-03 583-592 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Moss89" }
+{ "id": 50, "dblpid": "books/aw/kimL89/Nierstrasz89", "title": "A Survey of Object-Oriented Concepts.", "authors": "Oscar Nierstrasz", "misc": "2002-01-03 3-21 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Nierstrasz89" }
+{ "id": 51, "dblpid": "books/aw/kimL89/NierstraszT89", "title": "Integrated Office Systems.", "authors": "Oscar Nierstrasz Dennis Tsichritzis", "misc": "2002-01-03 199-215 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#NierstraszT89" }
+{ "id": 52, "dblpid": "books/aw/kimL89/Russinoff89", "title": "Proteus A Frame-Based Nonmonotonic Inference System.", "authors": "David M. Russinoff", "misc": "2002-01-03 127-150 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#Russinoff89" }
+{ "id": 53, "dblpid": "books/aw/kimL89/SkarraZ89", "title": "Concurrency Control and Object-Oriented Databases.", "authors": "Andrea H. Skarra Stanley B. Zdonik", "misc": "2002-01-03 395-421 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#SkarraZ89" }
+{ "id": 54, "dblpid": "books/aw/kimL89/SteinLU89", "title": "A Shared View of Sharing The Treaty of Orlando.", "authors": "Lynn Andrea Stein Henry Lieberman David Ungar", "misc": "2002-01-03 31-48 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#SteinLU89" }
+{ "id": 55, "dblpid": "books/aw/kimL89/TarltonT89", "title": "Pogo A Declarative Representation System for Graphics.", "authors": "Mark A. Tarlton P. Nong Tarlton", "misc": "2002-01-03 151-176 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#TarltonT89" }
+{ "id": 56, "dblpid": "books/aw/kimL89/TomlinsonS89", "title": "Concurrent Object-Oriented Programming Languages.", "authors": "Chris Tomlinson Mark Scheevel", "misc": "2002-01-03 79-124 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#TomlinsonS89" }
+{ "id": 57, "dblpid": "books/aw/kimL89/TsichritzisN89", "title": "Directions in Object-Oriented Research.", "authors": "Dennis Tsichritzis Oscar Nierstrasz", "misc": "2002-01-03 523-536 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#TsichritzisN89" }
+{ "id": 58, "dblpid": "books/aw/kimL89/Wand89", "title": "A Proposal for a Formal Model of Objects.", "authors": "Yair Wand", "misc": "2002-01-03 537-559 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Wand89" }
+{ "id": 59, "dblpid": "books/aw/kimL89/WeiserL89", "title": "OZ+ An Object-Oriented Database System.", "authors": "Stephen P. Weiser Frederick H. Lochovsky", "misc": "2002-01-03 309-337 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#WeiserL89" }
+{ "id": 60, "dblpid": "books/aw/stonebraker86/RoweS86", "title": "The Commercial INGRES Epilogue.", "authors": "Lawrence A. Rowe Michael Stonebraker", "misc": "2002-01-03 63-82 1986 The INGRES Papers db/books/collections/Stonebraker86.html#RoweS86 db/books/collections/Stonebraker86/RoweS86.html ingres/P063.pdf" }
+{ "id": 61, "dblpid": "books/aw/stonebraker86/Stonebraker86", "title": "Design of Relational Systems (Introduction to Section 1).", "authors": "Michael Stonebraker", "misc": "2002-01-03 1-3 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86 db/books/collections/Stonebraker86/Stonebraker86.html ingres/P001.pdf" }
+{ "id": 62, "dblpid": "books/aw/stonebraker86/Stonebraker86a", "title": "Supporting Studies on Relational Systems (Introduction to Section 2).", "authors": "Michael Stonebraker", "misc": "2002-01-03 83-85 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86a db/books/collections/Stonebraker86/Stonebraker86a.html ingres/P083.pdf" }
+{ "id": 63, "dblpid": "books/aw/stonebraker86/Stonebraker86b", "title": "Distributed Database Systems (Introduction to Section 3).", "authors": "Michael Stonebraker", "misc": "2002-01-03 183-186 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86b db/books/collections/Stonebraker86/Stonebraker86b.html ingres/P183.pdf" }
+{ "id": 64, "dblpid": "books/aw/stonebraker86/Stonebraker86c", "title": "The Design and Implementation of Distributed INGRES.", "authors": "Michael Stonebraker", "misc": "2002-01-03 187-196 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86c db/books/collections/Stonebraker86/Stonebraker86c.html ingres/P187.pdf" }
+{ "id": 65, "dblpid": "books/aw/stonebraker86/Stonebraker86d", "title": "User Interfaces for Database Systems (Introduction to Section 4).", "authors": "Michael Stonebraker", "misc": "2002-01-03 243-245 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86d db/books/collections/Stonebraker86/Stonebraker86d.html ingres/P243.pdf" }
+{ "id": 66, "dblpid": "books/aw/stonebraker86/Stonebraker86e", "title": "Extended Semantics for the Relational Model (Introduction to Section 5).", "authors": "Michael Stonebraker", "misc": "2002-01-03 313-316 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86e db/books/collections/Stonebraker86/Stonebraker86e.html ingres/P313.pdf" }
+{ "id": 67, "dblpid": "books/aw/stonebraker86/Stonebraker86f", "title": "Database Design (Introduction to Section 6).", "authors": "Michael Stonebraker", "misc": "2002-01-03 393-394 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86f db/books/collections/Stonebraker86/Stonebraker86f.html ingres/P393.pdf" }
+{ "id": 68, "dblpid": "books/aw/stonebraker86/X86", "title": "Title, Preface, Contents.", "authors": "", "misc": "2002-01-03 1986 The INGRES Papers db/books/collections/Stonebraker86.html#X86 db/books/collections/Stonebraker86/X86.html ingres/frontmatter.pdf" }
+{ "id": 69, "dblpid": "books/aw/stonebraker86/X86a", "title": "References.", "authors": "", "misc": "2002-01-03 429-444 1986 The INGRES Papers db/books/collections/Stonebraker86.html#X86a db/books/collections/Stonebraker86/X86a.html ingres/P429.pdf" }
+{ "id": 70, "dblpid": "books/aw/Knuth86a", "title": "TeX The Program", "authors": "Donald E. Knuth", "misc": "2002-01-03 Addison-Wesley 1986 0-201-13437-3" }
+{ "id": 71, "dblpid": "books/aw/AbiteboulHV95", "title": "Foundations of Databases.", "authors": "Serge Abiteboul Richard Hull Victor Vianu", "misc": "2002-01-03 Addison-Wesley 1995 0-201-53771-0 AHV/Toc.pdf ... ... journals/tods/AstrahanBCEGGKLMMPTWW76 books/bc/AtzeniA93 journals/tcs/AtzeniABM82 journals/jcss/AbiteboulB86 journals/csur/AtkinsonB87 conf/pods/AtzeniB87 journals/vldb/AbiteboulB95 conf/sigmod/AbiteboulB91 conf/dood/AtkinsonBDDMZ89 conf/vldb/AlbanoBGO93 ... conf/icdt/Abiteboul88 journals/ipl/Abiteboul89 conf/ds/Abrial74 journals/tods/AhoBU79 books/mk/minker88/AptBW88 conf/vldb/AroraC78 conf/stoc/AfratiC89 journals/tods/AlbanoCO85 conf/pods/AfratiCY91 conf/pods/AusielloDM85 conf/vldb/AbiteboulG85 journals/jacm/AjtaiG87 conf/focs/AjtaiG89 journals/tods/AbiteboulG91 ... ... journals/tods/AbiteboulH87 conf/sigmod/AbiteboulH88 ... conf/sigmod/AbiteboulK89 journals/tcs/AbiteboulKG91 journals/jcss/AbiteboulKRW95 conf/sigmod/AbiteboulLUW93 conf/pods/AtzeniP82 conf/pods/AfratiP87 conf/pods/AptP87 conf/wg/AndriesP91 conf/pods/AfratiPPRSU86 books/el/leeuwen90/Apt90 conf/ifip/Armstrong74 journals/siamcomp/AhoSSU81 journals/tods/AhoSU79 journals/siamcomp/AhoSU79 conf/pods/AbiteboulSV90 journals/is/AtzeniT93 conf/popl/AhoU79 conf/pods/AbiteboulV87 conf/jcdkb/AbiteboulV88 journals/jacm/AbiteboulV88 conf/pods/AbiteboulV88 journals/jacm/AbiteboulV89 journals/jcss/AbiteboulV90 journals/jcss/AbiteboulV91 conf/stoc/AbiteboulV91 journals/amai/AbiteboulV91 journals/jcss/AbiteboulV95 journals/jacm/AptE82 conf/coco/AbiteboulVV92 conf/iclp/AptB88 conf/oopsla/BobrowKKMSZ86 journals/tse/BatoryBGSTTW88 conf/mfcs/Bancilhon78 ... conf/db-workshops/Bancilhon85 books/el/leeuwen90/Barendregt90 ... journals/tods/BeeriB79 books/el/leeuwen90/BerstelB90 conf/icdt/BeneventanoB92 conf/vldb/BernsteinBC80 conf/vldb/BeeriBG78 conf/sigmod/BorgidaBMR89 journals/tods/BunemanC79 journals/jacm/BernsteinC81 conf/dbpl/BancilhonCD89 books/bc/tanselCGSS93/BaudinetCW93 conf/sigmod/BiskupDB79 journals/jacm/BeeriDFS84 books/mk/BancilhonDK92 conf/edbt/BryDM88 conf/pods/BunemanDW88 journals/jcss/BunemanDW91 journals/tods/Beeri80 journals/dke/Beeri90 ... journals/tods/Bernstein76 conf/lics/BidoitF87 journals/iandc/BidoitF91 conf/sigmod/BeeriFH77 conf/stoc/BeeriFMMUY81 journals/jacm/BeeriFMY83 journals/tods/BunemanFN82 journals/siamcomp/BernsteinG81 journals/iandc/BlassGK85 conf/ijcai/BrachmanGL85 journals/tods/BernsteinGWRR81 books/aw/BernsteinHG87 ... journals/tcs/Bidoit91 journals/tcs/Biskup80 conf/adbt/Biskup79 journals/tods/Biskup83 journals/tcs/BunemanJO91 journals/tods/BeeriK86 conf/pods/BeeriKBR87 conf/icdt/BidoitL90 journals/csur/BatiniL86 conf/sigmod/BlakeleyLT86 conf/vldb/BeeriM91 conf/sigmod/BlakeleyMG93 journals/siamcomp/BeeriMSU81 conf/pods/BancilhonMSU86 conf/pods/BeeriNRST87 journals/software/Borgida85 conf/icalp/BraP83 conf/fgcs/BalbinMR88 ... conf/pods/BeeriR87 journals/jlp/BalbinR87 conf/sigmod/BancilhonR86 books/mk/minker88/BancilhonR88 journals/jlp/BeeriR91 conf/vldb/BancilhonRS82 conf/pods/BeeriRSS92 conf/dood/Bry89 journals/tods/BancilhonS81 journals/cogsci/BrachmanS85 journals/tods/BergamaschiS92 conf/sigmod/BernsteinST75 conf/dbpl/TannenBN91 conf/icdt/TannenBW92 ... journals/jacm/BeeriV84 conf/icalp/BeeriV81 conf/adbt/BeeriV79 journals/siamcomp/BeeriV84 journals/iandc/BeeriV84 journals/jacm/BeeriV84 journals/tcs/BeeriV85 journals/ibmrd/ChamberlinAEGLMRW76 ... journals/iandc/Cardelli88 books/mk/Cattell94 conf/sigmod/CacaceCCTZ90 conf/vldb/CastilhoCF82 conf/adbt/CasanovaF82 conf/focs/CaiFI89 journals/jcss/CasanovaFP84 conf/stoc/CosmadakisGKV88 conf/dood/CorciuloGP93 books/sp/CeriGT90 conf/focs/ChandraH80 journals/jcss/ChandraH80 journals/jcss/ChandraH82 journals/jlp/ChandraH85 conf/popl/Chandra81 conf/adbt/Chang79 conf/pods/Chandra88 ... journals/tods/Chen76 conf/ride/ChenHM94 conf/icde/Chomicki92 conf/pods/Chomicki92 ... ... ... conf/stoc/CosmadakisK85 journals/acr/CosmadakisK86 ... journals/jcss/CosmadakisKS86 journals/jacm/CosmadakisKV90 ... conf/pods/CalvaneseL94 conf/adbt/Clark77 conf/stoc/ChandraLM81 conf/stoc/ChandraM77 conf/pods/ConsensM90 conf/sigmod/ConsensM93 conf/icdt/ConsensM90 journals/cacm/Codd70 conf/sigmod/Codd71a persons/Codd71a persons/Codd72 conf/ifip/Codd74 ... conf/sigmod/Codd79 journals/cacm/Codd82 ... conf/sigmod/Cohen89 journals/cacm/Cohen90 ... journals/jcss/Cook74 conf/pods/Cosmadakis83 conf/focs/Cosmadakis87 books/el/leeuwen90/Courcelle90a journals/jacm/CosmadakisP84 conf/edbt/CeriCGLLTZ88 ... conf/vldb/CeriT87 conf/vldb/CasanovaTF88 ... conf/pods/CasanovaV83 journals/siamcomp/ChandraV85 conf/pods/ChaudhuriV92 conf/pods/ChaudhuriV93 conf/pods/ChaudhuriV94 journals/csur/CardelliW85 conf/pods/ChenW89 conf/pods/CohenW89 conf/vldb/CeriW90 conf/vldb/CeriW91 conf/iclp/ChenW92 conf/vldb/CeriW93 ... conf/birthday/Dahlhaus87 conf/vldb/Date81 books/aw/Date86 ... conf/dbpl/Dayal89 journals/tods/DayalB82 journals/ibmrd/DelobelC73 conf/icde/DelcambreD89 ... journals/tods/Delobel78 journals/jacm/Demolombe92 journals/tods/DateF92 ... conf/vldb/DayalHL91 journals/jacm/Paola69a conf/caap/DahlhausM86 journals/acr/DAtriM86 journals/iandc/DahlhausM92 conf/sigmod/DerrMP93 conf/vldb/MaindrevilleS88 conf/pods/Dong92 conf/adbt/BraP82 ... conf/dbpl/DongS91 journals/iandc/DongS95 conf/dbpl/DongS93 conf/dbpl/DongS93 conf/icdt/DongT92 conf/vldb/DenninghoffV91 conf/pods/DenninghoffV93 ... ... books/acm/kim95/DayalHW95 ... conf/pods/EiterGM94 conf/pods/Escobar-MolanoHJ93 ... books/el/leeuwen90/Emerson90 books/bc/ElmasriN89 ... conf/icse/Eswaran76 conf/sigmod/EpsteinSW78 ... ... conf/vldb/Fagin77 journals/tods/Fagin77 conf/sigmod/Fagin79 journals/tods/Fagin81 journals/ipl/FaginV83 journals/jacm/Fagin82 journals/jacm/Fagin83 journals/tcs/Fagin93 books/sp/kimrb85/FurtadoC85 ... journals/jlp/Fitting85a journals/tcs/FischerJT83 journals/acr/FaginKUV86 conf/icdt/FernandezM92 journals/tods/FaginMU82 conf/vldb/FaloutsosNS91 ... journals/ai/Forgy82 ... conf/sigmod/Freytag87 ... journals/siamcomp/FischerT83 journals/siamcomp/FaginMUY83 conf/pods/FaginUV83 conf/icalp/FaginV84 ... ... ... ... conf/sigmod/GraefeD87 conf/ride/GatziuD94 conf/sigmod/GardarinM86 conf/sigmod/GyssensG88 journals/tcs/GinsburgH83a journals/jacm/GinsburgH86 ... books/bc/tanselCGSS93/Ginsburg93 books/fm/GareyJ79 journals/jacm/GrantJ82 conf/vldb/GehaniJ91 conf/vldb/GhandeharizadehHJCELLTZ93 journals/tods/GhandeharizadehHJ96 conf/vldb/GehaniJS92 ... conf/sigmod/GehaniJS92 ... conf/deductive/GuptaKM92 conf/pods/GurevichL82 conf/iclp/GelfondL88 conf/adbt/77 journals/csur/GallaireMN84 conf/pods/GrahneMR92 conf/sigmod/GuptaMS93 conf/lics/GaifmanMSV87 journals/jacm/GaifmanMSV93 journals/jacm/GrahamMV86 conf/csl/GradelO92 ... conf/pods/Gottlob87 conf/pods/GyssensPG90 conf/dood/GiannottiPSZ91 books/aw/GoldbergR83 journals/acr/GrahneR86 journals/ipl/Grant77 ... journals/iandc/Grandjean83 conf/vldb/Grahne84 ... journals/csur/Graefe93 books/sp/Greibach75 journals/tods/GoodmanS82 journals/jcss/GoodmanS84 conf/focs/GurevichS85 ... conf/pods/GrumbachS94 conf/sigmod/GangulyST90 ... journals/tcs/Gunter92 ... ... ... ... conf/pods/GrahamV84 conf/pods/GrumbachV91 conf/icde/GardarinV92 conf/sigmod/GraefeW89 ... journals/jacm/GinsburgZ82 conf/vldb/GottlobZ88 ... ... journals/sigmod/Hanson89 ... journals/cacm/Harel80 journals/tkde/HaasCLMWLLPCS90 conf/lics/Hella92 journals/iandc/Herrmann95 conf/pods/HirstH93 conf/vldb/HullJ91 conf/ewdw/HullJ90 journals/csur/HullK87 journals/tods/HudsonK89 conf/lics/HillebrandKM93 conf/nato/HillebrandKR93 conf/jcdkb/HsuLM88 journals/ipl/HoneymanLY80 journals/tods/HammerM81 conf/adbt/HenschenMN82 ... journals/jacm/HenschenN84 journals/jacm/Honeyman82 conf/sigmod/HullS89 conf/pods/HullS89 journals/acta/HullS94 journals/jcss/HullS93 conf/fodo/HullTY89 journals/jcss/Hull83 journals/jacm/Hull84 journals/tcs/Hull85 journals/siamcomp/Hull86 ... conf/vldb/Hulin89 ... journals/jacm/HullY84 conf/vldb/HullY90 conf/pods/HullY91 conf/sigmod/IoannidisK90 journals/jcss/ImielinskiL84 conf/adbt/Imielinski82 journals/jcss/Immerman82 journals/iandc/Immerman86 ... journals/siamcomp/Immerman87 conf/pods/ImielinskiN88 conf/vldb/IoannidisNSS92 conf/sigmod/ImielinskiNV91 conf/dood/ImielinskiNV91 conf/vldb/Ioannidis85 journals/jacm/Jacobs82 conf/dbpl/JacobsH91 journals/csur/JarkeK84 journals/jcss/JohnsonK84 conf/popl/JaffarL87 books/el/leeuwen90/Johnson90 journals/jacm/Joyner76 conf/pods/JaeschkeS82 ... books/mk/minker88/Kanellakis88 books/el/leeuwen90/Kanellakis90 conf/oopsla/KhoshafianC86 conf/edbt/KotzDM88 conf/jcdkb/Keller82 conf/pods/Keller85 journals/computer/Keller86 ... journals/tods/Kent79 ... journals/ngc/RohmerLK86 conf/tacs/KanellakisG94 conf/jcdkb/Kifer88 conf/pods/KanellakisKR90 conf/sigmod/KiferKS92 ... conf/icdt/KiferL86 books/aw/KimL89 ... journals/tods/Klug80 journals/jacm/Klug82 journals/jacm/Klug88 journals/jacm/KiferLW95 conf/kr/KatsunoM91 journals/ai/KatsunoM92 conf/jcdkb/KrishnamurthyN88 journals/csur/Knight89 ... journals/iandc/Kolaitis91 journals/ai/Konolige88 conf/ifip/Kowalski74 journals/jacm/Kowalski75 conf/bncod/Kowalski84 conf/vldb/KoenigP81 journals/tods/KlugP82 ... conf/pods/KolaitisP88 conf/pods/KiferRS88 conf/sigmod/KrishnamurthyRS88 books/mg/SilberschatzK91 conf/iclp/KempT88 conf/sigmod/KellerU84 conf/dood/Kuchenhoff91 ... journals/jlp/Kunen87 conf/iclp/Kunen88 conf/pods/Kuper87 conf/pods/Kuper88 conf/ppcp/Kuper93 conf/pods/KuperV84 conf/stoc/KolaitisV87 journals/tcs/KarabegV90 journals/iandc/KolaitisV90 conf/pods/KolaitisV90 journals/tods/KarabegV91 journals/iandc/KolaitisV92 journals/tcs/KuperV93 journals/tods/KuperV93 journals/tse/KellerW85 conf/pods/KiferW89 conf/jcdkb/Lang88 books/el/Leeuwen90 ... journals/jcss/Leivant89 ... journals/iandc/Leivant90 ... conf/db-workshops/Levesque82 journals/ai/Levesque84 conf/mfdbs/Libkin91 conf/er/Lien79 journals/jacm/Lien82 books/mk/minker88/Lifschitz88 ... journals/tcs/Lindell91 journals/tods/Lipski79 journals/jacm/Lipski81 journals/tcs/LeratL86 journals/cj/LeveneL90 books/sp/Lloyd87 conf/pods/LakshmananM89 conf/tlca/LeivantM93 conf/sigmod/LaverMG83 conf/pods/LiptonN90 journals/jcss/LucchesiO78 conf/sigmod/Lohman88 ... conf/ijcai/Lozinskii85 books/ph/LewisP81 ... conf/sigmod/LecluseRV88 journals/is/LipeckS87 journals/jlp/LloydST87 journals/tods/LingTK81 conf/sigmod/LyngbaekV87 conf/dood/LefebvreV89 conf/pods/LibkinW93 conf/dbpl/LibkinW93 journals/jacm/Maier80 books/cs/Maier83 ... conf/vldb/Makinouchi77 conf/icalp/Makowsky81 ... conf/icdt/Malvestuto86 conf/aaai/MacGregorB92 journals/tods/MylopoulosBW80 conf/sigmod/McCarthyD89 journals/csur/MishraE92 conf/sigmod/MumickFPR90 books/mk/Minker88 journals/jlp/Minker88 conf/vldb/MillerIR93 journals/is/MillerIR94 journals/iandc/Mitchell83 conf/pods/Mitchell83 conf/vldb/MendelzonM79 journals/tods/MaierMS79 journals/jcss/MaierMSU80 conf/pods/MendelzonMW94 journals/debu/MorrisNSUG87 journals/ai/Moore85 conf/vldb/Morgenstern83 conf/pods/Morris88 ... conf/pods/MannilaR85 ... journals/jlp/MinkerR90 books/aw/MannilaR92 journals/acr/MaierRW86 ... journals/tods/MarkowitzS92 conf/pods/Marchetti-SpaccamelaPS87 journals/jacm/MaierSY81 conf/iclp/MorrisUG86 journals/tods/MaierUV84 conf/iclp/MorrisUG86 journals/acta/MakowskyV86 books/bc/MaierW88 books/mk/minker88/ManchandraW88 conf/pods/Naughton86 conf/sigmod/NgFS91 ... conf/vldb/Nejdl87 conf/adbt/NicolasM77 conf/sigmod/Nicolas78 journals/acta/Nicolas82 conf/ds/76 conf/pods/NaqviK88 journals/tods/NegriPS91 conf/vldb/NaughtonRSU89 conf/pods/NaughtonS87 ... ... conf/vldb/Osborn79 ... journals/tods/OzsoyogluY87 conf/adbt/Paige82 ... books/cs/Papadimitriou86 ... journals/ipl/Paredaens78 ... books/sp/ParedaensBGG89 journals/ai/Andersen91 books/el/leeuwen90/Perrin90 journals/ins/Petrov89 conf/pods/ParedaensG88 conf/pods/PatnaikI94 conf/adbt/ParedaensJ79 journals/csur/PeckhamM88 ... ... conf/sigmod/ParkerP80 ... conf/iclp/Przymusinski88 conf/pods/Przymusinski89 ... conf/vldb/ParkerSV92 conf/aaai/PearlV87 journals/ai/PereiraW80a conf/pods/PapadimitriouY92 journals/tkde/QianW91 ... journals/jlp/Ramakrishnan91 conf/pods/RamakrishnanBS87 ... conf/adbt/Reiter77 journals/ai/Reiter80 conf/db-workshops/Reiter82 journals/jacm/Reiter86 journals/tods/Rissanen77 conf/mfcs/Rissanen78 conf/pods/Rissanen82 ... journals/ngc/RohmerLK86 journals/jacm/Robinson65 ... conf/pods/Ross89 ... ... conf/sigmod/RoweS79 conf/sigmod/RichardsonS91 journals/debu/RamamohanaraoSBPNTZD87 conf/vldb/RamakrishnanSS92 conf/sigmod/RamakrishnanSSS93 conf/pods/RamakrishnanSUV89 journals/jcss/RamakrishnanSUV93 journals/jlp/RamakrishnanU95 conf/sigmod/SelingerACLP79 conf/sigmod/Sagiv81 journals/tods/Sagiv83 books/mk/minker88/Sagiv88 conf/slp/Sagiv90 conf/sigmod/Sciore81 journals/jacm/Sciore82 conf/pods/Sciore83 journals/acr/Sciore86 journals/jacm/SagivDPF81 conf/pods/X89 ... journals/ai/SmithG85 books/mk/minker88/Shepherdson88 journals/tods/Shipman81 conf/pods/Shmueli87 conf/iclp/SekiI88 conf/sigmod/ShmueliI84 journals/tc/Sickel76 journals/jsc/Siekmann89 conf/sigmod/StonebrakerJGP90 conf/vldb/SimonKM92 journals/csur/ShethL90 conf/pods/SeibL91 conf/sigmod/SuLRD93 conf/adbt/SilvaM79 journals/sigmod/Snodgrass90 journals/sigmod/Soo91 conf/pods/SuciuP94 conf/sigmod/StonebrakerR86 conf/slp/SudarshanR93 conf/pods/SagivS86 journals/cacm/Stonebraker81 books/mk/Stonebraker88 journals/tkde/Stonebraker92 books/aw/Stroustrup91 journals/jacm/SadriU82 conf/vldb/Su91 conf/pods/SagivV89 journals/jacm/SagivW82 journals/tods/StonebrakerWKH76 journals/jacm/SagivY80 conf/pods/SaccaZ86 journals/tcs/SaccaZ88 ... conf/pods/SaccaZ90 ... ... books/bc/TanselCGJSS93 ... journals/acr/ThomasF86 ... ... ... ... journals/tcs/Topor87 ... books/mk/minker88/ToporS88 ... journals/siamcomp/TarjanY84 journals/csur/TeoreyYF86 journals/algorithmica/UllmanG88 conf/pods/Ullman82 books/cs/Ullman82 journals/tods/Ullman85 books/cs/Ullman88 conf/pods/Ullman89 books/cs/Ullman89 conf/sigmod/Gelder86 ... conf/pods/BusscheG92 conf/focs/BusscheGAG92 conf/pods/BusscheP91 conf/slp/Gelder86 conf/pods/Gelder89 conf/pods/GelderRS88 journals/jacm/GelderRS91 journals/tods/GelderT91 journals/ipl/Vardi81 conf/stoc/Vardi82 conf/focs/Vardi82 journals/acta/Vardi83 journals/jcss/Vardi84 conf/pods/Vardi85 conf/pods/Vardi86 journals/jcss/Vardi86 ... conf/pods/Vardi88 conf/sigmod/Vassiliou79 ... ... journals/jacm/EmdenK76 conf/nf2/SchollABBGPRV87 journals/jacm/Vianu87 journals/acta/Vianu87 conf/eds/Vieille86 conf/iclp/Vieille87 ... conf/eds/Vieille88 journals/tcs/Vieille89 ... journals/tcs/VianuV92 conf/sigmod/WidomF90 conf/icde/WangH92 conf/pos/WidjojoHW90 journals/computer/Wiederhold92 conf/pods/Wilkins86 conf/pods/Winslett88 conf/sigmod/WolfsonO90 conf/pods/Wong93 conf/sigmod/WolfsonS88 journals/ibmrd/WangW75 journals/tods/WongY76 conf/vldb/Yannakakis81 journals/csur/YuC84 ... journals/jcss/YannakakisP82 ... journals/tods/Zaniolo82 journals/jcss/Zaniolo84 ... conf/edbt/ZhouH90 journals/ibmsj/Zloof77 books/mk/ZdonikM90 db/books/dbtext/abiteboul95.html" }
+{ "id": 72, "dblpid": "books/aw/Lamport86", "title": "LaTeX User's Guide & Reference Manual", "authors": "Leslie Lamport", "misc": "2002-01-03 Addison-Wesley 1986 0-201-15790-X" }
+{ "id": 73, "dblpid": "books/aw/AhoHU74", "title": "The Design and Analysis of Computer Algorithms.", "authors": "Alfred V. Aho John E. Hopcroft Jeffrey D. Ullman", "misc": "2002-01-03 Addison-Wesley 1974 0-201-00029-6" }
+{ "id": 74, "dblpid": "books/aw/Lamport2002", "title": "Specifying Systems, The TLA+ Language and Tools for Hardware and Software Engineers", "authors": "Leslie Lamport", "misc": "2005-07-28 Addison-Wesley 2002 0-3211-4306-X http //research.microsoft.com/users/lamport/tla/book.html" }
+{ "id": 75, "dblpid": "books/aw/AhoHU83", "title": "Data Structures and Algorithms.", "authors": "Alfred V. Aho John E. Hopcroft Jeffrey D. Ullman", "misc": "2002-01-03 Addison-Wesley 1983 0-201-00023-7" }
+{ "id": 76, "dblpid": "books/aw/LewisBK01", "title": "Databases and Transaction Processing An Application-Oriented Approach", "authors": "Philip M. Lewis Arthur J. Bernstein Michael Kifer", "misc": "2002-01-03 Addison-Wesley 2001 0-201-70872-8" }
+{ "id": 77, "dblpid": "books/aw/AhoKW88", "title": "The AWK Programming Language", "authors": "Alfred V. Aho Brian W. Kernighan Peter J. Weinberger", "misc": "2002-01-03 Addison-Wesley 1988" }
+{ "id": 78, "dblpid": "books/aw/LindholmY97", "title": "The Java Virtual Machine Specification", "authors": "Tim Lindholm Frank Yellin", "misc": "2002-01-28 Addison-Wesley 1997 0-201-63452-X" }
+{ "id": 79, "dblpid": "books/aw/AhoSU86", "title": "Compilers Princiles, Techniques, and Tools.", "authors": "Alfred V. Aho Ravi Sethi Jeffrey D. Ullman", "misc": "2002-01-03 Addison-Wesley 1986 0-201-10088-6" }
+{ "id": 80, "dblpid": "books/aw/Sedgewick83", "title": "Algorithms", "authors": "Robert Sedgewick", "misc": "2002-01-03 Addison-Wesley 1983 0-201-06672-6" }
+{ "id": 81, "dblpid": "journals/siamcomp/AspnesW96", "title": "Randomized Consensus in Expected O(n log² n) Operations Per Processor.", "authors": "James Aspnes Orli Waarts", "misc": "2002-01-03 1024-1044 1996 25 SIAM J. Comput. 5 db/journals/siamcomp/siamcomp25.html#AspnesW96" }
+{ "id": 82, "dblpid": "conf/focs/AspnesW92", "title": "Randomized Consensus in Expected O(n log ^2 n) Operations Per Processor", "authors": "James Aspnes Orli Waarts", "misc": "2006-04-25 137-146 conf/focs/FOCS33 1992 FOCS db/conf/focs/focs92.html#AspnesW92" }
+{ "id": 83, "dblpid": "journals/siamcomp/Bloniarz83", "title": "A Shortest-Path Algorithm with Expected Time O(n² log n log* n).", "authors": "Peter A. Bloniarz", "misc": "2002-01-03 588-600 1983 12 SIAM J. Comput. 3 db/journals/siamcomp/siamcomp12.html#Bloniarz83" }
+{ "id": 84, "dblpid": "conf/stoc/Bloniarz80", "title": "A Shortest-Path Algorithm with Expected Time O(n^2 log n log ^* n)", "authors": "Peter A. Bloniarz", "misc": "2006-04-25 378-384 conf/stoc/STOC12 1980 STOC db/conf/stoc/stoc80.html#Bloniarz80" }
+{ "id": 85, "dblpid": "journals/siamcomp/Megiddo83a", "title": "Linear-Time Algorithms for Linear Programming in R³ and Related Problems.", "authors": "Nimrod Megiddo", "misc": "2002-01-03 759-776 1983 12 SIAM J. Comput. 4 db/journals/siamcomp/siamcomp12.html#Megiddo83a" }
+{ "id": 86, "dblpid": "conf/focs/Megiddo82", "title": "Linear-Time Algorithms for Linear Programming in R^3 and Related Problems", "authors": "Nimrod Megiddo", "misc": "2006-04-25 329-338 conf/focs/FOCS23 1982 FOCS db/conf/focs/focs82.html#Megiddo82" }
+{ "id": 87, "dblpid": "journals/siamcomp/MoffatT87", "title": "An All Pairs Shortest Path Algorithm with Expected Time O(n² log n).", "authors": "Alistair Moffat Tadao Takaoka", "misc": "2002-01-03 1023-1031 1987 16 SIAM J. Comput. 6 db/journals/siamcomp/siamcomp16.html#MoffatT87" }
+{ "id": 88, "dblpid": "conf/focs/MoffatT85", "title": "An All Pairs Shortest Path Algorithm with Expected Running Time O(n^2 log n)", "authors": "Alistair Moffat Tadao Takaoka", "misc": "2006-04-25 101-105 conf/focs/FOCS26 1985 FOCS db/conf/focs/focs85.html#MoffatT85" }
+{ "id": 89, "dblpid": "conf/icip/SchonfeldL98", "title": "VORTEX Video Retrieval and Tracking from Compressed Multimedia Databases.", "authors": "Dan Schonfeld Dan Lelescu", "misc": "2002-11-05 123-127 1998 ICIP (3) db/conf/icip/icip1998-3.html#SchonfeldL98" }
+{ "id": 90, "dblpid": "conf/hicss/SchonfeldL99", "title": "VORTEX Video Retrieval and Tracking from Compressed Multimedia Databases ¾ Visual Search Engine.", "authors": "Dan Schonfeld Dan Lelescu", "misc": "2002-01-03 1999 HICSS http //computer.org/proceedings/hicss/0001/00013/00013006abs.htm db/conf/hicss/hicss1999-3.html#SchonfeldL99" }
+{ "id": 91, "dblpid": "journals/corr/abs-0802-2861", "title": "Geometric Set Cover and Hitting Sets for Polytopes in $R^3$", "authors": "Sören Laue", "misc": "2008-03-03 http //arxiv.org/abs/0802.2861 2008 CoRR abs/0802.2861 db/journals/corr/corr0802.html#abs-0802-2861 informal publication" }
+{ "id": 92, "dblpid": "conf/stacs/Laue08", "title": "Geometric Set Cover and Hitting Sets for Polytopes in R³.", "authors": "Sören Laue", "misc": "2008-03-04 2008 STACS 479-490 http //drops.dagstuhl.de/opus/volltexte/2008/1367 conf/stacs/2008 db/conf/stacs/stacs2008.html#Laue08" }
+{ "id": 93, "dblpid": "journals/iandc/IbarraJCR91", "title": "Some Classes of Languages in NC¹", "authors": "Oscar H. Ibarra Tao Jiang Jik H. Chang Bala Ravikumar", "misc": "2006-04-25 86-106 Inf. Comput. January 1991 90 1 db/journals/iandc/iandc90.html#IbarraJCR91" }
+{ "id": 94, "dblpid": "conf/awoc/IbarraJRC88", "title": "On Some Languages in NC.", "authors": "Oscar H. Ibarra Tao Jiang Bala Ravikumar Jik H. Chang", "misc": "2002-08-06 64-73 1988 conf/awoc/1988 AWOC db/conf/awoc/awoc88.html#IbarraJRC88" }
+{ "id": 95, "dblpid": "journals/jacm/GalilHLSW87", "title": "An O(n³log n) deterministic and an O(n³) Las Vegs isomorphism test for trivalent graphs.", "authors": "Zvi Galil Christoph M. Hoffmann Eugene M. Luks Claus-Peter Schnorr Andreas Weber", "misc": "2003-11-20 513-531 1987 34 J. ACM 3 http //doi.acm.org/10.1145/28869.28870 db/journals/jacm/jacm34.html#GalilHLSW87" }
+{ "id": 96, "dblpid": "conf/focs/GalilHLSW82", "title": "An O(n^3 log n) Deterministic and an O(n^3) Probabilistic Isomorphism Test for Trivalent Graphs", "authors": "Zvi Galil Christoph M. Hoffmann Eugene M. Luks Claus-Peter Schnorr Andreas Weber", "misc": "2006-04-25 118-125 conf/focs/FOCS23 1982 FOCS db/conf/focs/focs82.html#GalilHLSW82" }
+{ "id": 97, "dblpid": "journals/jacm/GalilT88", "title": "An O(n²(m + n log n)log n) min-cost flow algorithm.", "authors": "Zvi Galil Éva Tardos", "misc": "2003-11-20 374-386 1988 35 J. ACM 2 http //doi.acm.org/10.1145/42282.214090 db/journals/jacm/jacm35.html#GalilT88" }
+{ "id": 98, "dblpid": "conf/focs/GalilT86", "title": "An O(n^2 (m + n log n) log n) Min-Cost Flow Algorithm", "authors": "Zvi Galil Éva Tardos", "misc": "2006-04-25 1-9 conf/focs/FOCS27 1986 FOCS db/conf/focs/focs86.html#GalilT86" }
+{ "id": 99, "dblpid": "series/synthesis/2009Weintraub", "title": "Jordan Canonical Form Theory and Practice", "authors": "Steven H. Weintraub", "misc": "2009-09-06 Jordan Canonical Form Theory and Practice http //dx.doi.org/10.2200/S00218ED1V01Y200908MAS006 http //dx.doi.org/10.2200/S00218ED1V01Y200908MAS006 2009 Synthesis Lectures on Mathematics & Statistics Morgan & Claypool Publishers" }
+{ "id": 100, "dblpid": "series/synthesis/2009Brozos", "title": "The Geometry of Walker Manifolds", "authors": "Miguel Brozos-Vázquez Eduardo García-Río Peter Gilkey Stana Nikcevic Rámon Vázquez-Lorenzo", "misc": "2009-09-06 The Geometry of Walker Manifolds http //dx.doi.org/10.2200/S00197ED1V01Y200906MAS005 http //dx.doi.org/10.2200/S00197ED1V01Y200906MAS005 2009 Synthesis Lectures on Mathematics & Statistics Morgan & Claypool Publishers" }
diff --git a/asterix-app/src/test/resources/runtimets/results/hints/issue_251_dataset_hint_6.adm b/asterix-app/src/test/resources/runtimets/results/hints/issue_251_dataset_hint_6.adm
new file mode 100644
index 0000000..d7ae022
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/hints/issue_251_dataset_hint_6.adm
@@ -0,0 +1,5 @@
+{ "word": "am", "count": 1 }
+{ "word": "grover", "count": 1 }
+{ "word": "hi", "count": 1 }
+{ "word": "i", "count": 1 }
+{ "word": "raman", "count": 1 }
diff --git a/asterix-app/src/test/resources/runtimets/results/hints/issue_251_dataset_hint_7.adm b/asterix-app/src/test/resources/runtimets/results/hints/issue_251_dataset_hint_7.adm
new file mode 100644
index 0000000..9720960
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/hints/issue_251_dataset_hint_7.adm
@@ -0,0 +1,12 @@
+{ "id": "nc1:1", "username": "BronsonMike", "location": "", "text": "@GottaLaff @reutersus Christie and obama just foul weather friends", "timestamp": "Thu Dec 06 16:53:06 PST 2012" }
+{ "id": "nc1:100", "username": "KidrauhlProuds", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:102", "username": "jaysauce82", "location": "", "text": "Not voting for President Obama #BadDecision", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:104", "username": "princeofsupras", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson e uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:15 PST 2012" }
+{ "id": "nc1:106", "username": "GulfDogs", "location": "", "text": "Obama Admin Knew Libyan Terrorists Had US-Provided Weaponsteaparty #tcot #ccot #NewGuards #BreitbartArmy #patriotwttp://t.co/vJxzrQUE", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:108", "username": "Laugzpz", "location": "", "text": "@AlfredoJalife Maestro Obama se hace de la vista gorda, es un acuerdo de siempre creo yo.", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:11", "username": "magarika", "location": "", "text": "RT @ken24xavier: Obama tells SOROS - our plan is ALMOST finished http://t.co/WvzK0GtU", "timestamp": "Thu Dec 06 16:53:05 PST 2012" }
+{ "id": "nc1:111", "username": "ToucanMall", "location": "", "text": "RT @WorldWar3Watch: Michelle Obama Gets More Grammy Nominations Than Justin ... #Obama #WW3 http://t.co/0Wv2GKij", "timestamp": "Thu Dec 06 16:53:13 PST 2012" }
+{ "id": "nc1:113", "username": "ToucanMall", "location": "", "text": "RT @ObamaPalooza: Tiffany Shared What $2,000 Meant to Her ... and the President Stopped by to Talk About It http://t.co/sgT7lsNV #Obama", "timestamp": "Thu Dec 06 16:53:12 PST 2012" }
+{ "id": "nc1:115", "username": "thewildpitch", "location": "", "text": "RT @RevkahJC: Dennis Miller: Obama Should Just Say He Wants To Tax Successful People http://t.co/Ihlemy9Y", "timestamp": "Thu Dec 06 16:53:11 PST 2012" }
+{ "id": "nc1:117", "username": "Rnugent24", "location": "", "text": "RT @ConservativeQuo: unemployment is above 8% again. I wonder how long it will take for Obama to start blaming Bush? 3-2-1 #tcot #antiobama", "timestamp": "Thu Dec 06 16:53:10 PST 2012" }
+{ "id": "nc1:119", "username": "ToucanMall", "location": "", "text": "RT @Newitrsdotcom: I hope #Obama will win re-election... Other four years without meaningless #wars", "timestamp": "Thu Dec 06 16:53:09 PST 2012" }
diff --git a/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue236.adm b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue236.adm
new file mode 100644
index 0000000..65bdb8f
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue236.adm
@@ -0,0 +1 @@
+{ "tweetid": "1111387810", "tweetid-copy": "1111387810", "user": { "screen-name": "TonyNapier#786", "lang": "en", "friends_count": 4241366, "statuses_count": 97, "name": "Tony Napier", "followers_count": 5984113 }, "sender-location": point("29.24,78.35"), "send-time": datetime("2011-11-24T14:24:51.000Z"), "send-time-copy": datetime("2011-11-24T14:24:51.000Z"), "referred-topics": {{ "sprint", "wireless" }}, "message-text": " love sprint its wireless is mind-blowing:)" }
diff --git a/asterix-app/src/test/resources/runtimets/results/scan/issue238_query_1.adm b/asterix-app/src/test/resources/runtimets/results/scan/issue238_query_1.adm
new file mode 100644
index 0000000..a7ec8f6
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/scan/issue238_query_1.adm
@@ -0,0 +1,100 @@
+{ "id": 1, "dblpid": "books/acm/kim95/AnnevelinkACFHK95", "title": "Object SQL - A Language for the Design and Implementation of Object Databases.", "authors": "Jurgen Annevelink Rafiul Ahad Amelia Carlson Daniel H. Fishman Michael L. Heytens William Kent", "misc": "2002-01-03 42-68 1995 Modern Database Systems db/books/collections/kim95.html#AnnevelinkACFHK95" }
+{ "id": 2, "dblpid": "books/acm/kim95/Blakeley95", "title": "OQL[C++] Extending C++ with an Object Query Capability.", "authors": "José A. Blakeley", "misc": "2002-01-03 69-88 Modern Database Systems db/books/collections/kim95.html#Blakeley95 1995" }
+{ "id": 3, "dblpid": "books/acm/kim95/BreitbartGS95", "title": "Transaction Management in Multidatabase Systems.", "authors": "Yuri Breitbart Hector Garcia-Molina Abraham Silberschatz", "misc": "2004-03-08 573-591 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#BreitbartGS95 1995" }
+{ "id": 4, "dblpid": "books/acm/kim95/ChristodoulakisK95", "title": "Multimedia Information Systems Issues and Approaches.", "authors": "Stavros Christodoulakis Leonidas Koveos", "misc": "2002-01-03 318-337 1995 Modern Database Systems db/books/collections/kim95.html#ChristodoulakisK95" }
+{ "id": 5, "dblpid": "books/acm/kim95/DayalHW95", "title": "Active Database Systems.", "authors": "Umeshwar Dayal Eric N. Hanson Jennifer Widom", "misc": "2002-01-03 434-456 1995 Modern Database Systems db/books/collections/kim95.html#DayalHW95" }
+{ "id": 6, "dblpid": "books/acm/kim95/DittrichD95", "title": "Where Object-Oriented DBMSs Should Do Better A Critique Based on Early Experiences.", "authors": "Angelika Kotz Dittrich Klaus R. Dittrich", "misc": "2002-01-03 238-254 1995 Modern Database Systems db/books/collections/kim95.html#DittrichD95" }
+{ "id": 7, "dblpid": "books/acm/kim95/Garcia-MolinaH95", "title": "Distributed Databases.", "authors": "Hector Garcia-Molina Meichun Hsu", "misc": "2002-01-03 477-493 1995 Modern Database Systems db/books/collections/kim95.html#Garcia-MolinaH95" }
+{ "id": 8, "dblpid": "books/acm/kim95/Goodman95", "title": "An Object-Oriented DBMS War Story Developing a Genome Mapping Database in C++.", "authors": "Nathan Goodman", "misc": "2002-01-03 216-237 1995 Modern Database Systems db/books/collections/kim95.html#Goodman95" }
+{ "id": 9, "dblpid": "books/acm/kim95/Kaiser95", "title": "Cooperative Transactions for Multiuser Environments.", "authors": "Gail E. Kaiser", "misc": "2002-01-03 409-433 1995 Modern Database Systems db/books/collections/kim95.html#Kaiser95" }
+{ "id": 10, "dblpid": "books/acm/kim95/KelleyGKRG95", "title": "Schema Architecture of the UniSQL/M Multidatabase System", "authors": "William Kelley Sunit K. Gala Won Kim Tom C. Reyes Bruce Graham", "misc": "2004-03-08 Modern Database Systems books/acm/Kim95 621-648 1995 db/books/collections/kim95.html#KelleyGKRG95" }
+{ "id": 11, "dblpid": "books/acm/kim95/KemperM95", "title": "Physical Object Management.", "authors": "Alfons Kemper Guido Moerkotte", "misc": "2002-01-03 175-202 1995 Modern Database Systems db/books/collections/kim95.html#KemperM95" }
+{ "id": 12, "dblpid": "books/acm/kim95/Kim95", "title": "Introduction to Part 1 Next-Generation Database Technology.", "authors": "Won Kim", "misc": "2002-01-03 5-17 1995 Modern Database Systems db/books/collections/kim95.html#Kim95" }
+{ "id": 13, "dblpid": "books/acm/kim95/Kim95a", "title": "Object-Oriented Database Systems Promises, Reality, and Future.", "authors": "Won Kim", "misc": "2002-01-03 255-280 1995 Modern Database Systems db/books/collections/kim95.html#Kim95a" }
+{ "id": 14, "dblpid": "books/acm/kim95/Kim95b", "title": "Introduction to Part 2 Technology for Interoperating Legacy Databases.", "authors": "Won Kim", "misc": "2002-01-03 515-520 1995 Modern Database Systems db/books/collections/kim95.html#Kim95b" }
+{ "id": 15, "dblpid": "books/acm/kim95/KimCGS95", "title": "On Resolving Schematic Heterogeneity in Multidatabase Systems.", "authors": "Won Kim Injun Choi Sunit K. Gala Mark Scheevel", "misc": "2002-01-03 521-550 1995 Modern Database Systems db/books/collections/kim95.html#KimCGS95" }
+{ "id": 16, "dblpid": "books/acm/kim95/KimG95", "title": "Requirements for a Performance Benchmark for Object-Oriented Database Systems.", "authors": "Won Kim Jorge F. Garza", "misc": "2002-01-03 203-215 1995 Modern Database Systems db/books/collections/kim95.html#KimG95" }
+{ "id": 17, "dblpid": "books/acm/kim95/KimK95", "title": "On View Support in Object-Oriented Databases Systems.", "authors": "Won Kim William Kelley", "misc": "2002-01-03 108-129 1995 Modern Database Systems db/books/collections/kim95.html#KimK95" }
+{ "id": 18, "dblpid": "books/acm/kim95/Kowalski95", "title": "The POSC Solution to Managing E&P Data.", "authors": "Vincent J. Kowalski", "misc": "2002-01-03 281-301 1995 Modern Database Systems db/books/collections/kim95.html#Kowalski95" }
+{ "id": 19, "dblpid": "books/acm/kim95/KriegerA95", "title": "C++ Bindings to an Object Database.", "authors": "David Krieger Tim Andrews", "misc": "2002-01-03 89-107 1995 Modern Database Systems db/books/collections/kim95.html#KriegerA95" }
+{ "id": 20, "dblpid": "books/acm/kim95/Lunt95", "title": "Authorization in Object-Oriented Databases.", "authors": "Teresa F. Lunt", "misc": "2002-01-03 130-145 1995 Modern Database Systems db/books/collections/kim95.html#Lunt95" }
+{ "id": 21, "dblpid": "books/acm/kim95/MengY95", "title": "Query Processing in Multidatabase Systems.", "authors": "Weiyi Meng Clement T. Yu", "misc": "2002-01-03 551-572 1995 Modern Database Systems db/books/collections/kim95.html#MengY95" }
+{ "id": 22, "dblpid": "books/acm/kim95/Motro95", "title": "Management of Uncerainty in database Systems.", "authors": "Amihai Motro", "misc": "2002-01-03 457-476 1995 Modern Database Systems db/books/collections/kim95.html#Motro95" }
+{ "id": 23, "dblpid": "books/acm/kim95/Omiecinski95", "title": "Parallel Relational Database Systems.", "authors": "Edward Omiecinski", "misc": "2002-01-03 494-512 1995 Modern Database Systems db/books/collections/kim95.html#Omiecinski95" }
+{ "id": 24, "dblpid": "books/acm/kim95/OzsuB95", "title": "Query Processing in Object-Oriented Database Systems.", "authors": "M. Tamer Özsu José A. Blakeley", "misc": "2002-01-03 146-174 1995 Modern Database Systems db/books/collections/kim95.html#OzsuB95" }
+{ "id": 25, "dblpid": "books/acm/kim95/RusinkiewiczS95", "title": "Specification and Execution of Transactional Workflows.", "authors": "Marek Rusinkiewicz Amit P. Sheth", "misc": "2004-03-08 592-620 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#RusinkiewiczS95 1995" }
+{ "id": 26, "dblpid": "books/acm/kim95/Samet95", "title": "Spatial Data Structures.", "authors": "Hanan Samet", "misc": "2004-03-08 361-385 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#Samet95 1995" }
+{ "id": 27, "dblpid": "books/acm/kim95/SametA95", "title": "Spatial Data Models and Query Processing.", "authors": "Hanan Samet Walid G. Aref", "misc": "2002-01-03 338-360 1995 Modern Database Systems db/books/collections/kim95.html#SametA95" }
+{ "id": 28, "dblpid": "books/acm/kim95/ShanADDK95", "title": "Pegasus A Heterogeneous Information Management System.", "authors": "Ming-Chien Shan Rafi Ahmed Jim Davis Weimin Du William Kent", "misc": "2004-03-08 664-682 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#ShanADDK95 1995" }
+{ "id": 29, "dblpid": "books/acm/kim95/Snodgrass95", "title": "Temporal Object-Oriented Databases A Critical Comparison.", "authors": "Richard T. Snodgrass", "misc": "2002-01-03 386-408 1995 Modern Database Systems db/books/collections/kim95.html#Snodgrass95" }
+{ "id": 30, "dblpid": "books/acm/kim95/SoleyK95", "title": "The OMG Object Model.", "authors": "Richard Mark Soley William Kent", "misc": "2002-01-03 18-41 1995 Modern Database Systems db/books/collections/kim95.html#SoleyK95" }
+{ "id": 31, "dblpid": "books/acm/kim95/Stout95", "title": "EDA/SQL.", "authors": "Ralph L. Stout", "misc": "2004-03-08 649-663 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#Stout95 1995" }
+{ "id": 32, "dblpid": "books/acm/kim95/Thompson95", "title": "The Changing Database Standards Landscape.", "authors": "Craig W. Thompson", "misc": "2002-01-03 302-317 1995 Modern Database Systems db/books/collections/kim95.html#Thompson95" }
+{ "id": 33, "dblpid": "books/acm/kim95/BreitbartR95", "title": "Overview of the ADDS System.", "authors": "Yuri Breitbart Tom C. Reyes", "misc": "2009-06-12 683-701 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#BreitbartR95 1995" }
+{ "id": 34, "dblpid": "books/acm/Kim95", "title": "Modern Database Systems The Object Model, Interoperability, and Beyond.", "authors": "", "misc": "2004-03-08 Won Kim Modern Database Systems ACM Press and Addison-Wesley 1995 0-201-59098-0 db/books/collections/kim95.html" }
+{ "id": 35, "dblpid": "books/ap/MarshallO79", "title": "Inequalities Theory of Majorization and Its Application.", "authors": "Albert W. Marshall Ingram Olkin", "misc": "2002-01-03 Academic Press 1979 0-12-473750-1" }
+{ "id": 36, "dblpid": "books/aw/kimL89/BjornerstedtH89", "title": "Version Control in an Object-Oriented Architecture.", "authors": "Anders Björnerstedt Christer Hulten", "misc": "2006-02-24 451-485 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#BjornerstedtH89" }
+{ "id": 37, "dblpid": "books/aw/kimL89/BretlMOPSSWW89", "title": "The GemStone Data Management System.", "authors": "Robert Bretl David Maier Allen Otis D. Jason Penney Bruce Schuchardt Jacob Stein E. Harold Williams Monty Williams", "misc": "2002-01-03 283-308 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#BretlMOPSSWW89" }
+{ "id": 38, "dblpid": "books/aw/kimL89/CareyDRS89", "title": "Storage Management in EXODUS.", "authors": "Michael J. Carey David J. DeWitt Joel E. Richardson Eugene J. Shekita", "misc": "2002-01-03 341-369 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#CareyDRS89" }
+{ "id": 39, "dblpid": "books/aw/kimL89/Decouchant89", "title": "A Distributed Object Manager for the Smalltalk-80 System.", "authors": "Dominique Decouchant", "misc": "2002-01-03 487-520 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Decouchant89" }
+{ "id": 40, "dblpid": "books/aw/kimL89/DiederichM89", "title": "Objects, Messages, and Rules in Database Design.", "authors": "Jim Diederich Jack Milton", "misc": "2002-01-03 177-197 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#DiederichM89" }
+{ "id": 41, "dblpid": "books/aw/kimL89/EllisG89", "title": "Active Objects Ealities and Possibilities.", "authors": "Clarence A. Ellis Simon J. Gibbs", "misc": "2002-01-03 561-572 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#EllisG89" }
+{ "id": 42, "dblpid": "books/aw/kimL89/FishmanABCCDHHKLLMNRSW89", "title": "Overview of the Iris DBMS.", "authors": "Daniel H. Fishman Jurgen Annevelink David Beech E. C. Chow Tim Connors J. W. Davis Waqar Hasan C. G. Hoch William Kent S. Leichner Peter Lyngbæk Brom Mahbod Marie-Anne Neimat Tore Risch Ming-Chien Shan W. Kevin Wilkinson", "misc": "2002-01-03 219-250 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#FishmanABCCDHHKLLMNRSW89" }
+{ "id": 43, "dblpid": "books/aw/kimL89/KimBCGW89", "title": "Features of the ORION Object-Oriented Database System.", "authors": "Won Kim Nat Ballou Hong-Tai Chou Jorge F. Garza Darrell Woelk", "misc": "2002-01-03 251-282 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#KimBCGW89" }
+{ "id": 44, "dblpid": "books/aw/kimL89/KimKD89", "title": "Indexing Techniques for Object-Oriented Databases.", "authors": "Won Kim Kyung-Chang Kim Alfred G. Dale", "misc": "2002-01-03 371-394 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#KimKD89" }
+{ "id": 45, "dblpid": "books/aw/kimL89/King89", "title": "My Cat Is Object-Oriented.", "authors": "Roger King", "misc": "2002-01-03 23-30 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#King89" }
+{ "id": 46, "dblpid": "books/aw/kimL89/Maier89", "title": "Making Database Systems Fast Enough for CAD Applications.", "authors": "David Maier", "misc": "2002-01-03 573-582 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Maier89" }
+{ "id": 47, "dblpid": "books/aw/kimL89/MellenderRS89", "title": "Optimizing Smalltalk Message Performance.", "authors": "Fred Mellender Steve Riegel Andrew Straw", "misc": "2002-01-03 423-450 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#MellenderRS89" }
+{ "id": 48, "dblpid": "books/aw/kimL89/Moon89", "title": "The Common List Object-Oriented Programming Language Standard.", "authors": "David A. Moon", "misc": "2002-01-03 49-78 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Moon89" }
+{ "id": 49, "dblpid": "books/aw/kimL89/Moss89", "title": "Object Orientation as Catalyst for Language-Database Inegration.", "authors": "J. Eliot B. Moss", "misc": "2002-01-03 583-592 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Moss89" }
+{ "id": 50, "dblpid": "books/aw/kimL89/Nierstrasz89", "title": "A Survey of Object-Oriented Concepts.", "authors": "Oscar Nierstrasz", "misc": "2002-01-03 3-21 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Nierstrasz89" }
+{ "id": 51, "dblpid": "books/aw/kimL89/NierstraszT89", "title": "Integrated Office Systems.", "authors": "Oscar Nierstrasz Dennis Tsichritzis", "misc": "2002-01-03 199-215 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#NierstraszT89" }
+{ "id": 52, "dblpid": "books/aw/kimL89/Russinoff89", "title": "Proteus A Frame-Based Nonmonotonic Inference System.", "authors": "David M. Russinoff", "misc": "2002-01-03 127-150 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#Russinoff89" }
+{ "id": 53, "dblpid": "books/aw/kimL89/SkarraZ89", "title": "Concurrency Control and Object-Oriented Databases.", "authors": "Andrea H. Skarra Stanley B. Zdonik", "misc": "2002-01-03 395-421 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#SkarraZ89" }
+{ "id": 54, "dblpid": "books/aw/kimL89/SteinLU89", "title": "A Shared View of Sharing The Treaty of Orlando.", "authors": "Lynn Andrea Stein Henry Lieberman David Ungar", "misc": "2002-01-03 31-48 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#SteinLU89" }
+{ "id": 55, "dblpid": "books/aw/kimL89/TarltonT89", "title": "Pogo A Declarative Representation System for Graphics.", "authors": "Mark A. Tarlton P. Nong Tarlton", "misc": "2002-01-03 151-176 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#TarltonT89" }
+{ "id": 56, "dblpid": "books/aw/kimL89/TomlinsonS89", "title": "Concurrent Object-Oriented Programming Languages.", "authors": "Chris Tomlinson Mark Scheevel", "misc": "2002-01-03 79-124 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#TomlinsonS89" }
+{ "id": 57, "dblpid": "books/aw/kimL89/TsichritzisN89", "title": "Directions in Object-Oriented Research.", "authors": "Dennis Tsichritzis Oscar Nierstrasz", "misc": "2002-01-03 523-536 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#TsichritzisN89" }
+{ "id": 58, "dblpid": "books/aw/kimL89/Wand89", "title": "A Proposal for a Formal Model of Objects.", "authors": "Yair Wand", "misc": "2002-01-03 537-559 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Wand89" }
+{ "id": 59, "dblpid": "books/aw/kimL89/WeiserL89", "title": "OZ+ An Object-Oriented Database System.", "authors": "Stephen P. Weiser Frederick H. Lochovsky", "misc": "2002-01-03 309-337 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#WeiserL89" }
+{ "id": 60, "dblpid": "books/aw/stonebraker86/RoweS86", "title": "The Commercial INGRES Epilogue.", "authors": "Lawrence A. Rowe Michael Stonebraker", "misc": "2002-01-03 63-82 1986 The INGRES Papers db/books/collections/Stonebraker86.html#RoweS86 db/books/collections/Stonebraker86/RoweS86.html ingres/P063.pdf" }
+{ "id": 61, "dblpid": "books/aw/stonebraker86/Stonebraker86", "title": "Design of Relational Systems (Introduction to Section 1).", "authors": "Michael Stonebraker", "misc": "2002-01-03 1-3 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86 db/books/collections/Stonebraker86/Stonebraker86.html ingres/P001.pdf" }
+{ "id": 62, "dblpid": "books/aw/stonebraker86/Stonebraker86a", "title": "Supporting Studies on Relational Systems (Introduction to Section 2).", "authors": "Michael Stonebraker", "misc": "2002-01-03 83-85 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86a db/books/collections/Stonebraker86/Stonebraker86a.html ingres/P083.pdf" }
+{ "id": 63, "dblpid": "books/aw/stonebraker86/Stonebraker86b", "title": "Distributed Database Systems (Introduction to Section 3).", "authors": "Michael Stonebraker", "misc": "2002-01-03 183-186 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86b db/books/collections/Stonebraker86/Stonebraker86b.html ingres/P183.pdf" }
+{ "id": 64, "dblpid": "books/aw/stonebraker86/Stonebraker86c", "title": "The Design and Implementation of Distributed INGRES.", "authors": "Michael Stonebraker", "misc": "2002-01-03 187-196 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86c db/books/collections/Stonebraker86/Stonebraker86c.html ingres/P187.pdf" }
+{ "id": 65, "dblpid": "books/aw/stonebraker86/Stonebraker86d", "title": "User Interfaces for Database Systems (Introduction to Section 4).", "authors": "Michael Stonebraker", "misc": "2002-01-03 243-245 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86d db/books/collections/Stonebraker86/Stonebraker86d.html ingres/P243.pdf" }
+{ "id": 66, "dblpid": "books/aw/stonebraker86/Stonebraker86e", "title": "Extended Semantics for the Relational Model (Introduction to Section 5).", "authors": "Michael Stonebraker", "misc": "2002-01-03 313-316 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86e db/books/collections/Stonebraker86/Stonebraker86e.html ingres/P313.pdf" }
+{ "id": 67, "dblpid": "books/aw/stonebraker86/Stonebraker86f", "title": "Database Design (Introduction to Section 6).", "authors": "Michael Stonebraker", "misc": "2002-01-03 393-394 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86f db/books/collections/Stonebraker86/Stonebraker86f.html ingres/P393.pdf" }
+{ "id": 68, "dblpid": "books/aw/stonebraker86/X86", "title": "Title, Preface, Contents.", "authors": "", "misc": "2002-01-03 1986 The INGRES Papers db/books/collections/Stonebraker86.html#X86 db/books/collections/Stonebraker86/X86.html ingres/frontmatter.pdf" }
+{ "id": 69, "dblpid": "books/aw/stonebraker86/X86a", "title": "References.", "authors": "", "misc": "2002-01-03 429-444 1986 The INGRES Papers db/books/collections/Stonebraker86.html#X86a db/books/collections/Stonebraker86/X86a.html ingres/P429.pdf" }
+{ "id": 70, "dblpid": "books/aw/Knuth86a", "title": "TeX The Program", "authors": "Donald E. Knuth", "misc": "2002-01-03 Addison-Wesley 1986 0-201-13437-3" }
+{ "id": 71, "dblpid": "books/aw/AbiteboulHV95", "title": "Foundations of Databases.", "authors": "Serge Abiteboul Richard Hull Victor Vianu", "misc": "2002-01-03 Addison-Wesley 1995 0-201-53771-0 AHV/Toc.pdf ... ... journals/tods/AstrahanBCEGGKLMMPTWW76 books/bc/AtzeniA93 journals/tcs/AtzeniABM82 journals/jcss/AbiteboulB86 journals/csur/AtkinsonB87 conf/pods/AtzeniB87 journals/vldb/AbiteboulB95 conf/sigmod/AbiteboulB91 conf/dood/AtkinsonBDDMZ89 conf/vldb/AlbanoBGO93 ... conf/icdt/Abiteboul88 journals/ipl/Abiteboul89 conf/ds/Abrial74 journals/tods/AhoBU79 books/mk/minker88/AptBW88 conf/vldb/AroraC78 conf/stoc/AfratiC89 journals/tods/AlbanoCO85 conf/pods/AfratiCY91 conf/pods/AusielloDM85 conf/vldb/AbiteboulG85 journals/jacm/AjtaiG87 conf/focs/AjtaiG89 journals/tods/AbiteboulG91 ... ... journals/tods/AbiteboulH87 conf/sigmod/AbiteboulH88 ... conf/sigmod/AbiteboulK89 journals/tcs/AbiteboulKG91 journals/jcss/AbiteboulKRW95 conf/sigmod/AbiteboulLUW93 conf/pods/AtzeniP82 conf/pods/AfratiP87 conf/pods/AptP87 conf/wg/AndriesP91 conf/pods/AfratiPPRSU86 books/el/leeuwen90/Apt90 conf/ifip/Armstrong74 journals/siamcomp/AhoSSU81 journals/tods/AhoSU79 journals/siamcomp/AhoSU79 conf/pods/AbiteboulSV90 journals/is/AtzeniT93 conf/popl/AhoU79 conf/pods/AbiteboulV87 conf/jcdkb/AbiteboulV88 journals/jacm/AbiteboulV88 conf/pods/AbiteboulV88 journals/jacm/AbiteboulV89 journals/jcss/AbiteboulV90 journals/jcss/AbiteboulV91 conf/stoc/AbiteboulV91 journals/amai/AbiteboulV91 journals/jcss/AbiteboulV95 journals/jacm/AptE82 conf/coco/AbiteboulVV92 conf/iclp/AptB88 conf/oopsla/BobrowKKMSZ86 journals/tse/BatoryBGSTTW88 conf/mfcs/Bancilhon78 ... conf/db-workshops/Bancilhon85 books/el/leeuwen90/Barendregt90 ... journals/tods/BeeriB79 books/el/leeuwen90/BerstelB90 conf/icdt/BeneventanoB92 conf/vldb/BernsteinBC80 conf/vldb/BeeriBG78 conf/sigmod/BorgidaBMR89 journals/tods/BunemanC79 journals/jacm/BernsteinC81 conf/dbpl/BancilhonCD89 books/bc/tanselCGSS93/BaudinetCW93 conf/sigmod/BiskupDB79 journals/jacm/BeeriDFS84 books/mk/BancilhonDK92 conf/edbt/BryDM88 conf/pods/BunemanDW88 journals/jcss/BunemanDW91 journals/tods/Beeri80 journals/dke/Beeri90 ... journals/tods/Bernstein76 conf/lics/BidoitF87 journals/iandc/BidoitF91 conf/sigmod/BeeriFH77 conf/stoc/BeeriFMMUY81 journals/jacm/BeeriFMY83 journals/tods/BunemanFN82 journals/siamcomp/BernsteinG81 journals/iandc/BlassGK85 conf/ijcai/BrachmanGL85 journals/tods/BernsteinGWRR81 books/aw/BernsteinHG87 ... journals/tcs/Bidoit91 journals/tcs/Biskup80 conf/adbt/Biskup79 journals/tods/Biskup83 journals/tcs/BunemanJO91 journals/tods/BeeriK86 conf/pods/BeeriKBR87 conf/icdt/BidoitL90 journals/csur/BatiniL86 conf/sigmod/BlakeleyLT86 conf/vldb/BeeriM91 conf/sigmod/BlakeleyMG93 journals/siamcomp/BeeriMSU81 conf/pods/BancilhonMSU86 conf/pods/BeeriNRST87 journals/software/Borgida85 conf/icalp/BraP83 conf/fgcs/BalbinMR88 ... conf/pods/BeeriR87 journals/jlp/BalbinR87 conf/sigmod/BancilhonR86 books/mk/minker88/BancilhonR88 journals/jlp/BeeriR91 conf/vldb/BancilhonRS82 conf/pods/BeeriRSS92 conf/dood/Bry89 journals/tods/BancilhonS81 journals/cogsci/BrachmanS85 journals/tods/BergamaschiS92 conf/sigmod/BernsteinST75 conf/dbpl/TannenBN91 conf/icdt/TannenBW92 ... journals/jacm/BeeriV84 conf/icalp/BeeriV81 conf/adbt/BeeriV79 journals/siamcomp/BeeriV84 journals/iandc/BeeriV84 journals/jacm/BeeriV84 journals/tcs/BeeriV85 journals/ibmrd/ChamberlinAEGLMRW76 ... journals/iandc/Cardelli88 books/mk/Cattell94 conf/sigmod/CacaceCCTZ90 conf/vldb/CastilhoCF82 conf/adbt/CasanovaF82 conf/focs/CaiFI89 journals/jcss/CasanovaFP84 conf/stoc/CosmadakisGKV88 conf/dood/CorciuloGP93 books/sp/CeriGT90 conf/focs/ChandraH80 journals/jcss/ChandraH80 journals/jcss/ChandraH82 journals/jlp/ChandraH85 conf/popl/Chandra81 conf/adbt/Chang79 conf/pods/Chandra88 ... journals/tods/Chen76 conf/ride/ChenHM94 conf/icde/Chomicki92 conf/pods/Chomicki92 ... ... ... conf/stoc/CosmadakisK85 journals/acr/CosmadakisK86 ... journals/jcss/CosmadakisKS86 journals/jacm/CosmadakisKV90 ... conf/pods/CalvaneseL94 conf/adbt/Clark77 conf/stoc/ChandraLM81 conf/stoc/ChandraM77 conf/pods/ConsensM90 conf/sigmod/ConsensM93 conf/icdt/ConsensM90 journals/cacm/Codd70 conf/sigmod/Codd71a persons/Codd71a persons/Codd72 conf/ifip/Codd74 ... conf/sigmod/Codd79 journals/cacm/Codd82 ... conf/sigmod/Cohen89 journals/cacm/Cohen90 ... journals/jcss/Cook74 conf/pods/Cosmadakis83 conf/focs/Cosmadakis87 books/el/leeuwen90/Courcelle90a journals/jacm/CosmadakisP84 conf/edbt/CeriCGLLTZ88 ... conf/vldb/CeriT87 conf/vldb/CasanovaTF88 ... conf/pods/CasanovaV83 journals/siamcomp/ChandraV85 conf/pods/ChaudhuriV92 conf/pods/ChaudhuriV93 conf/pods/ChaudhuriV94 journals/csur/CardelliW85 conf/pods/ChenW89 conf/pods/CohenW89 conf/vldb/CeriW90 conf/vldb/CeriW91 conf/iclp/ChenW92 conf/vldb/CeriW93 ... conf/birthday/Dahlhaus87 conf/vldb/Date81 books/aw/Date86 ... conf/dbpl/Dayal89 journals/tods/DayalB82 journals/ibmrd/DelobelC73 conf/icde/DelcambreD89 ... journals/tods/Delobel78 journals/jacm/Demolombe92 journals/tods/DateF92 ... conf/vldb/DayalHL91 journals/jacm/Paola69a conf/caap/DahlhausM86 journals/acr/DAtriM86 journals/iandc/DahlhausM92 conf/sigmod/DerrMP93 conf/vldb/MaindrevilleS88 conf/pods/Dong92 conf/adbt/BraP82 ... conf/dbpl/DongS91 journals/iandc/DongS95 conf/dbpl/DongS93 conf/dbpl/DongS93 conf/icdt/DongT92 conf/vldb/DenninghoffV91 conf/pods/DenninghoffV93 ... ... books/acm/kim95/DayalHW95 ... conf/pods/EiterGM94 conf/pods/Escobar-MolanoHJ93 ... books/el/leeuwen90/Emerson90 books/bc/ElmasriN89 ... conf/icse/Eswaran76 conf/sigmod/EpsteinSW78 ... ... conf/vldb/Fagin77 journals/tods/Fagin77 conf/sigmod/Fagin79 journals/tods/Fagin81 journals/ipl/FaginV83 journals/jacm/Fagin82 journals/jacm/Fagin83 journals/tcs/Fagin93 books/sp/kimrb85/FurtadoC85 ... journals/jlp/Fitting85a journals/tcs/FischerJT83 journals/acr/FaginKUV86 conf/icdt/FernandezM92 journals/tods/FaginMU82 conf/vldb/FaloutsosNS91 ... journals/ai/Forgy82 ... conf/sigmod/Freytag87 ... journals/siamcomp/FischerT83 journals/siamcomp/FaginMUY83 conf/pods/FaginUV83 conf/icalp/FaginV84 ... ... ... ... conf/sigmod/GraefeD87 conf/ride/GatziuD94 conf/sigmod/GardarinM86 conf/sigmod/GyssensG88 journals/tcs/GinsburgH83a journals/jacm/GinsburgH86 ... books/bc/tanselCGSS93/Ginsburg93 books/fm/GareyJ79 journals/jacm/GrantJ82 conf/vldb/GehaniJ91 conf/vldb/GhandeharizadehHJCELLTZ93 journals/tods/GhandeharizadehHJ96 conf/vldb/GehaniJS92 ... conf/sigmod/GehaniJS92 ... conf/deductive/GuptaKM92 conf/pods/GurevichL82 conf/iclp/GelfondL88 conf/adbt/77 journals/csur/GallaireMN84 conf/pods/GrahneMR92 conf/sigmod/GuptaMS93 conf/lics/GaifmanMSV87 journals/jacm/GaifmanMSV93 journals/jacm/GrahamMV86 conf/csl/GradelO92 ... conf/pods/Gottlob87 conf/pods/GyssensPG90 conf/dood/GiannottiPSZ91 books/aw/GoldbergR83 journals/acr/GrahneR86 journals/ipl/Grant77 ... journals/iandc/Grandjean83 conf/vldb/Grahne84 ... journals/csur/Graefe93 books/sp/Greibach75 journals/tods/GoodmanS82 journals/jcss/GoodmanS84 conf/focs/GurevichS85 ... conf/pods/GrumbachS94 conf/sigmod/GangulyST90 ... journals/tcs/Gunter92 ... ... ... ... conf/pods/GrahamV84 conf/pods/GrumbachV91 conf/icde/GardarinV92 conf/sigmod/GraefeW89 ... journals/jacm/GinsburgZ82 conf/vldb/GottlobZ88 ... ... journals/sigmod/Hanson89 ... journals/cacm/Harel80 journals/tkde/HaasCLMWLLPCS90 conf/lics/Hella92 journals/iandc/Herrmann95 conf/pods/HirstH93 conf/vldb/HullJ91 conf/ewdw/HullJ90 journals/csur/HullK87 journals/tods/HudsonK89 conf/lics/HillebrandKM93 conf/nato/HillebrandKR93 conf/jcdkb/HsuLM88 journals/ipl/HoneymanLY80 journals/tods/HammerM81 conf/adbt/HenschenMN82 ... journals/jacm/HenschenN84 journals/jacm/Honeyman82 conf/sigmod/HullS89 conf/pods/HullS89 journals/acta/HullS94 journals/jcss/HullS93 conf/fodo/HullTY89 journals/jcss/Hull83 journals/jacm/Hull84 journals/tcs/Hull85 journals/siamcomp/Hull86 ... conf/vldb/Hulin89 ... journals/jacm/HullY84 conf/vldb/HullY90 conf/pods/HullY91 conf/sigmod/IoannidisK90 journals/jcss/ImielinskiL84 conf/adbt/Imielinski82 journals/jcss/Immerman82 journals/iandc/Immerman86 ... journals/siamcomp/Immerman87 conf/pods/ImielinskiN88 conf/vldb/IoannidisNSS92 conf/sigmod/ImielinskiNV91 conf/dood/ImielinskiNV91 conf/vldb/Ioannidis85 journals/jacm/Jacobs82 conf/dbpl/JacobsH91 journals/csur/JarkeK84 journals/jcss/JohnsonK84 conf/popl/JaffarL87 books/el/leeuwen90/Johnson90 journals/jacm/Joyner76 conf/pods/JaeschkeS82 ... books/mk/minker88/Kanellakis88 books/el/leeuwen90/Kanellakis90 conf/oopsla/KhoshafianC86 conf/edbt/KotzDM88 conf/jcdkb/Keller82 conf/pods/Keller85 journals/computer/Keller86 ... journals/tods/Kent79 ... journals/ngc/RohmerLK86 conf/tacs/KanellakisG94 conf/jcdkb/Kifer88 conf/pods/KanellakisKR90 conf/sigmod/KiferKS92 ... conf/icdt/KiferL86 books/aw/KimL89 ... journals/tods/Klug80 journals/jacm/Klug82 journals/jacm/Klug88 journals/jacm/KiferLW95 conf/kr/KatsunoM91 journals/ai/KatsunoM92 conf/jcdkb/KrishnamurthyN88 journals/csur/Knight89 ... journals/iandc/Kolaitis91 journals/ai/Konolige88 conf/ifip/Kowalski74 journals/jacm/Kowalski75 conf/bncod/Kowalski84 conf/vldb/KoenigP81 journals/tods/KlugP82 ... conf/pods/KolaitisP88 conf/pods/KiferRS88 conf/sigmod/KrishnamurthyRS88 books/mg/SilberschatzK91 conf/iclp/KempT88 conf/sigmod/KellerU84 conf/dood/Kuchenhoff91 ... journals/jlp/Kunen87 conf/iclp/Kunen88 conf/pods/Kuper87 conf/pods/Kuper88 conf/ppcp/Kuper93 conf/pods/KuperV84 conf/stoc/KolaitisV87 journals/tcs/KarabegV90 journals/iandc/KolaitisV90 conf/pods/KolaitisV90 journals/tods/KarabegV91 journals/iandc/KolaitisV92 journals/tcs/KuperV93 journals/tods/KuperV93 journals/tse/KellerW85 conf/pods/KiferW89 conf/jcdkb/Lang88 books/el/Leeuwen90 ... journals/jcss/Leivant89 ... journals/iandc/Leivant90 ... conf/db-workshops/Levesque82 journals/ai/Levesque84 conf/mfdbs/Libkin91 conf/er/Lien79 journals/jacm/Lien82 books/mk/minker88/Lifschitz88 ... journals/tcs/Lindell91 journals/tods/Lipski79 journals/jacm/Lipski81 journals/tcs/LeratL86 journals/cj/LeveneL90 books/sp/Lloyd87 conf/pods/LakshmananM89 conf/tlca/LeivantM93 conf/sigmod/LaverMG83 conf/pods/LiptonN90 journals/jcss/LucchesiO78 conf/sigmod/Lohman88 ... conf/ijcai/Lozinskii85 books/ph/LewisP81 ... conf/sigmod/LecluseRV88 journals/is/LipeckS87 journals/jlp/LloydST87 journals/tods/LingTK81 conf/sigmod/LyngbaekV87 conf/dood/LefebvreV89 conf/pods/LibkinW93 conf/dbpl/LibkinW93 journals/jacm/Maier80 books/cs/Maier83 ... conf/vldb/Makinouchi77 conf/icalp/Makowsky81 ... conf/icdt/Malvestuto86 conf/aaai/MacGregorB92 journals/tods/MylopoulosBW80 conf/sigmod/McCarthyD89 journals/csur/MishraE92 conf/sigmod/MumickFPR90 books/mk/Minker88 journals/jlp/Minker88 conf/vldb/MillerIR93 journals/is/MillerIR94 journals/iandc/Mitchell83 conf/pods/Mitchell83 conf/vldb/MendelzonM79 journals/tods/MaierMS79 journals/jcss/MaierMSU80 conf/pods/MendelzonMW94 journals/debu/MorrisNSUG87 journals/ai/Moore85 conf/vldb/Morgenstern83 conf/pods/Morris88 ... conf/pods/MannilaR85 ... journals/jlp/MinkerR90 books/aw/MannilaR92 journals/acr/MaierRW86 ... journals/tods/MarkowitzS92 conf/pods/Marchetti-SpaccamelaPS87 journals/jacm/MaierSY81 conf/iclp/MorrisUG86 journals/tods/MaierUV84 conf/iclp/MorrisUG86 journals/acta/MakowskyV86 books/bc/MaierW88 books/mk/minker88/ManchandraW88 conf/pods/Naughton86 conf/sigmod/NgFS91 ... conf/vldb/Nejdl87 conf/adbt/NicolasM77 conf/sigmod/Nicolas78 journals/acta/Nicolas82 conf/ds/76 conf/pods/NaqviK88 journals/tods/NegriPS91 conf/vldb/NaughtonRSU89 conf/pods/NaughtonS87 ... ... conf/vldb/Osborn79 ... journals/tods/OzsoyogluY87 conf/adbt/Paige82 ... books/cs/Papadimitriou86 ... journals/ipl/Paredaens78 ... books/sp/ParedaensBGG89 journals/ai/Andersen91 books/el/leeuwen90/Perrin90 journals/ins/Petrov89 conf/pods/ParedaensG88 conf/pods/PatnaikI94 conf/adbt/ParedaensJ79 journals/csur/PeckhamM88 ... ... conf/sigmod/ParkerP80 ... conf/iclp/Przymusinski88 conf/pods/Przymusinski89 ... conf/vldb/ParkerSV92 conf/aaai/PearlV87 journals/ai/PereiraW80a conf/pods/PapadimitriouY92 journals/tkde/QianW91 ... journals/jlp/Ramakrishnan91 conf/pods/RamakrishnanBS87 ... conf/adbt/Reiter77 journals/ai/Reiter80 conf/db-workshops/Reiter82 journals/jacm/Reiter86 journals/tods/Rissanen77 conf/mfcs/Rissanen78 conf/pods/Rissanen82 ... journals/ngc/RohmerLK86 journals/jacm/Robinson65 ... conf/pods/Ross89 ... ... conf/sigmod/RoweS79 conf/sigmod/RichardsonS91 journals/debu/RamamohanaraoSBPNTZD87 conf/vldb/RamakrishnanSS92 conf/sigmod/RamakrishnanSSS93 conf/pods/RamakrishnanSUV89 journals/jcss/RamakrishnanSUV93 journals/jlp/RamakrishnanU95 conf/sigmod/SelingerACLP79 conf/sigmod/Sagiv81 journals/tods/Sagiv83 books/mk/minker88/Sagiv88 conf/slp/Sagiv90 conf/sigmod/Sciore81 journals/jacm/Sciore82 conf/pods/Sciore83 journals/acr/Sciore86 journals/jacm/SagivDPF81 conf/pods/X89 ... journals/ai/SmithG85 books/mk/minker88/Shepherdson88 journals/tods/Shipman81 conf/pods/Shmueli87 conf/iclp/SekiI88 conf/sigmod/ShmueliI84 journals/tc/Sickel76 journals/jsc/Siekmann89 conf/sigmod/StonebrakerJGP90 conf/vldb/SimonKM92 journals/csur/ShethL90 conf/pods/SeibL91 conf/sigmod/SuLRD93 conf/adbt/SilvaM79 journals/sigmod/Snodgrass90 journals/sigmod/Soo91 conf/pods/SuciuP94 conf/sigmod/StonebrakerR86 conf/slp/SudarshanR93 conf/pods/SagivS86 journals/cacm/Stonebraker81 books/mk/Stonebraker88 journals/tkde/Stonebraker92 books/aw/Stroustrup91 journals/jacm/SadriU82 conf/vldb/Su91 conf/pods/SagivV89 journals/jacm/SagivW82 journals/tods/StonebrakerWKH76 journals/jacm/SagivY80 conf/pods/SaccaZ86 journals/tcs/SaccaZ88 ... conf/pods/SaccaZ90 ... ... books/bc/TanselCGJSS93 ... journals/acr/ThomasF86 ... ... ... ... journals/tcs/Topor87 ... books/mk/minker88/ToporS88 ... journals/siamcomp/TarjanY84 journals/csur/TeoreyYF86 journals/algorithmica/UllmanG88 conf/pods/Ullman82 books/cs/Ullman82 journals/tods/Ullman85 books/cs/Ullman88 conf/pods/Ullman89 books/cs/Ullman89 conf/sigmod/Gelder86 ... conf/pods/BusscheG92 conf/focs/BusscheGAG92 conf/pods/BusscheP91 conf/slp/Gelder86 conf/pods/Gelder89 conf/pods/GelderRS88 journals/jacm/GelderRS91 journals/tods/GelderT91 journals/ipl/Vardi81 conf/stoc/Vardi82 conf/focs/Vardi82 journals/acta/Vardi83 journals/jcss/Vardi84 conf/pods/Vardi85 conf/pods/Vardi86 journals/jcss/Vardi86 ... conf/pods/Vardi88 conf/sigmod/Vassiliou79 ... ... journals/jacm/EmdenK76 conf/nf2/SchollABBGPRV87 journals/jacm/Vianu87 journals/acta/Vianu87 conf/eds/Vieille86 conf/iclp/Vieille87 ... conf/eds/Vieille88 journals/tcs/Vieille89 ... journals/tcs/VianuV92 conf/sigmod/WidomF90 conf/icde/WangH92 conf/pos/WidjojoHW90 journals/computer/Wiederhold92 conf/pods/Wilkins86 conf/pods/Winslett88 conf/sigmod/WolfsonO90 conf/pods/Wong93 conf/sigmod/WolfsonS88 journals/ibmrd/WangW75 journals/tods/WongY76 conf/vldb/Yannakakis81 journals/csur/YuC84 ... journals/jcss/YannakakisP82 ... journals/tods/Zaniolo82 journals/jcss/Zaniolo84 ... conf/edbt/ZhouH90 journals/ibmsj/Zloof77 books/mk/ZdonikM90 db/books/dbtext/abiteboul95.html" }
+{ "id": 72, "dblpid": "books/aw/Lamport86", "title": "LaTeX User's Guide & Reference Manual", "authors": "Leslie Lamport", "misc": "2002-01-03 Addison-Wesley 1986 0-201-15790-X" }
+{ "id": 73, "dblpid": "books/aw/AhoHU74", "title": "The Design and Analysis of Computer Algorithms.", "authors": "Alfred V. Aho John E. Hopcroft Jeffrey D. Ullman", "misc": "2002-01-03 Addison-Wesley 1974 0-201-00029-6" }
+{ "id": 74, "dblpid": "books/aw/Lamport2002", "title": "Specifying Systems, The TLA+ Language and Tools for Hardware and Software Engineers", "authors": "Leslie Lamport", "misc": "2005-07-28 Addison-Wesley 2002 0-3211-4306-X http //research.microsoft.com/users/lamport/tla/book.html" }
+{ "id": 75, "dblpid": "books/aw/AhoHU83", "title": "Data Structures and Algorithms.", "authors": "Alfred V. Aho John E. Hopcroft Jeffrey D. Ullman", "misc": "2002-01-03 Addison-Wesley 1983 0-201-00023-7" }
+{ "id": 76, "dblpid": "books/aw/LewisBK01", "title": "Databases and Transaction Processing An Application-Oriented Approach", "authors": "Philip M. Lewis Arthur J. Bernstein Michael Kifer", "misc": "2002-01-03 Addison-Wesley 2001 0-201-70872-8" }
+{ "id": 77, "dblpid": "books/aw/AhoKW88", "title": "The AWK Programming Language", "authors": "Alfred V. Aho Brian W. Kernighan Peter J. Weinberger", "misc": "2002-01-03 Addison-Wesley 1988" }
+{ "id": 78, "dblpid": "books/aw/LindholmY97", "title": "The Java Virtual Machine Specification", "authors": "Tim Lindholm Frank Yellin", "misc": "2002-01-28 Addison-Wesley 1997 0-201-63452-X" }
+{ "id": 79, "dblpid": "books/aw/AhoSU86", "title": "Compilers Princiles, Techniques, and Tools.", "authors": "Alfred V. Aho Ravi Sethi Jeffrey D. Ullman", "misc": "2002-01-03 Addison-Wesley 1986 0-201-10088-6" }
+{ "id": 80, "dblpid": "books/aw/Sedgewick83", "title": "Algorithms", "authors": "Robert Sedgewick", "misc": "2002-01-03 Addison-Wesley 1983 0-201-06672-6" }
+{ "id": 81, "dblpid": "journals/siamcomp/AspnesW96", "title": "Randomized Consensus in Expected O(n log² n) Operations Per Processor.", "authors": "James Aspnes Orli Waarts", "misc": "2002-01-03 1024-1044 1996 25 SIAM J. Comput. 5 db/journals/siamcomp/siamcomp25.html#AspnesW96" }
+{ "id": 82, "dblpid": "conf/focs/AspnesW92", "title": "Randomized Consensus in Expected O(n log ^2 n) Operations Per Processor", "authors": "James Aspnes Orli Waarts", "misc": "2006-04-25 137-146 conf/focs/FOCS33 1992 FOCS db/conf/focs/focs92.html#AspnesW92" }
+{ "id": 83, "dblpid": "journals/siamcomp/Bloniarz83", "title": "A Shortest-Path Algorithm with Expected Time O(n² log n log* n).", "authors": "Peter A. Bloniarz", "misc": "2002-01-03 588-600 1983 12 SIAM J. Comput. 3 db/journals/siamcomp/siamcomp12.html#Bloniarz83" }
+{ "id": 84, "dblpid": "conf/stoc/Bloniarz80", "title": "A Shortest-Path Algorithm with Expected Time O(n^2 log n log ^* n)", "authors": "Peter A. Bloniarz", "misc": "2006-04-25 378-384 conf/stoc/STOC12 1980 STOC db/conf/stoc/stoc80.html#Bloniarz80" }
+{ "id": 85, "dblpid": "journals/siamcomp/Megiddo83a", "title": "Linear-Time Algorithms for Linear Programming in R³ and Related Problems.", "authors": "Nimrod Megiddo", "misc": "2002-01-03 759-776 1983 12 SIAM J. Comput. 4 db/journals/siamcomp/siamcomp12.html#Megiddo83a" }
+{ "id": 86, "dblpid": "conf/focs/Megiddo82", "title": "Linear-Time Algorithms for Linear Programming in R^3 and Related Problems", "authors": "Nimrod Megiddo", "misc": "2006-04-25 329-338 conf/focs/FOCS23 1982 FOCS db/conf/focs/focs82.html#Megiddo82" }
+{ "id": 87, "dblpid": "journals/siamcomp/MoffatT87", "title": "An All Pairs Shortest Path Algorithm with Expected Time O(n² log n).", "authors": "Alistair Moffat Tadao Takaoka", "misc": "2002-01-03 1023-1031 1987 16 SIAM J. Comput. 6 db/journals/siamcomp/siamcomp16.html#MoffatT87" }
+{ "id": 88, "dblpid": "conf/focs/MoffatT85", "title": "An All Pairs Shortest Path Algorithm with Expected Running Time O(n^2 log n)", "authors": "Alistair Moffat Tadao Takaoka", "misc": "2006-04-25 101-105 conf/focs/FOCS26 1985 FOCS db/conf/focs/focs85.html#MoffatT85" }
+{ "id": 89, "dblpid": "conf/icip/SchonfeldL98", "title": "VORTEX Video Retrieval and Tracking from Compressed Multimedia Databases.", "authors": "Dan Schonfeld Dan Lelescu", "misc": "2002-11-05 123-127 1998 ICIP (3) db/conf/icip/icip1998-3.html#SchonfeldL98" }
+{ "id": 90, "dblpid": "conf/hicss/SchonfeldL99", "title": "VORTEX Video Retrieval and Tracking from Compressed Multimedia Databases ¾ Visual Search Engine.", "authors": "Dan Schonfeld Dan Lelescu", "misc": "2002-01-03 1999 HICSS http //computer.org/proceedings/hicss/0001/00013/00013006abs.htm db/conf/hicss/hicss1999-3.html#SchonfeldL99" }
+{ "id": 91, "dblpid": "journals/corr/abs-0802-2861", "title": "Geometric Set Cover and Hitting Sets for Polytopes in $R^3$", "authors": "Sören Laue", "misc": "2008-03-03 http //arxiv.org/abs/0802.2861 2008 CoRR abs/0802.2861 db/journals/corr/corr0802.html#abs-0802-2861 informal publication" }
+{ "id": 92, "dblpid": "conf/stacs/Laue08", "title": "Geometric Set Cover and Hitting Sets for Polytopes in R³.", "authors": "Sören Laue", "misc": "2008-03-04 2008 STACS 479-490 http //drops.dagstuhl.de/opus/volltexte/2008/1367 conf/stacs/2008 db/conf/stacs/stacs2008.html#Laue08" }
+{ "id": 93, "dblpid": "journals/iandc/IbarraJCR91", "title": "Some Classes of Languages in NC¹", "authors": "Oscar H. Ibarra Tao Jiang Jik H. Chang Bala Ravikumar", "misc": "2006-04-25 86-106 Inf. Comput. January 1991 90 1 db/journals/iandc/iandc90.html#IbarraJCR91" }
+{ "id": 94, "dblpid": "conf/awoc/IbarraJRC88", "title": "On Some Languages in NC.", "authors": "Oscar H. Ibarra Tao Jiang Bala Ravikumar Jik H. Chang", "misc": "2002-08-06 64-73 1988 conf/awoc/1988 AWOC db/conf/awoc/awoc88.html#IbarraJRC88" }
+{ "id": 95, "dblpid": "journals/jacm/GalilHLSW87", "title": "An O(n³log n) deterministic and an O(n³) Las Vegs isomorphism test for trivalent graphs.", "authors": "Zvi Galil Christoph M. Hoffmann Eugene M. Luks Claus-Peter Schnorr Andreas Weber", "misc": "2003-11-20 513-531 1987 34 J. ACM 3 http //doi.acm.org/10.1145/28869.28870 db/journals/jacm/jacm34.html#GalilHLSW87" }
+{ "id": 96, "dblpid": "conf/focs/GalilHLSW82", "title": "An O(n^3 log n) Deterministic and an O(n^3) Probabilistic Isomorphism Test for Trivalent Graphs", "authors": "Zvi Galil Christoph M. Hoffmann Eugene M. Luks Claus-Peter Schnorr Andreas Weber", "misc": "2006-04-25 118-125 conf/focs/FOCS23 1982 FOCS db/conf/focs/focs82.html#GalilHLSW82" }
+{ "id": 97, "dblpid": "journals/jacm/GalilT88", "title": "An O(n²(m + n log n)log n) min-cost flow algorithm.", "authors": "Zvi Galil Éva Tardos", "misc": "2003-11-20 374-386 1988 35 J. ACM 2 http //doi.acm.org/10.1145/42282.214090 db/journals/jacm/jacm35.html#GalilT88" }
+{ "id": 98, "dblpid": "conf/focs/GalilT86", "title": "An O(n^2 (m + n log n) log n) Min-Cost Flow Algorithm", "authors": "Zvi Galil Éva Tardos", "misc": "2006-04-25 1-9 conf/focs/FOCS27 1986 FOCS db/conf/focs/focs86.html#GalilT86" }
+{ "id": 99, "dblpid": "series/synthesis/2009Weintraub", "title": "Jordan Canonical Form Theory and Practice", "authors": "Steven H. Weintraub", "misc": "2009-09-06 Jordan Canonical Form Theory and Practice http //dx.doi.org/10.2200/S00218ED1V01Y200908MAS006 http //dx.doi.org/10.2200/S00218ED1V01Y200908MAS006 2009 Synthesis Lectures on Mathematics & Statistics Morgan & Claypool Publishers" }
+{ "id": 100, "dblpid": "series/synthesis/2009Brozos", "title": "The Geometry of Walker Manifolds", "authors": "Miguel Brozos-Vázquez Eduardo García-Río Peter Gilkey Stana Nikcevic Rámon Vázquez-Lorenzo", "misc": "2009-09-06 The Geometry of Walker Manifolds http //dx.doi.org/10.2200/S00197ED1V01Y200906MAS005 http //dx.doi.org/10.2200/S00197ED1V01Y200906MAS005 2009 Synthesis Lectures on Mathematics & Statistics Morgan & Claypool Publishers" }
diff --git a/asterix-app/src/test/resources/runtimets/results/scan/issue238_query_2.adm b/asterix-app/src/test/resources/runtimets/results/scan/issue238_query_2.adm
new file mode 100644
index 0000000..a7ec8f6
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/scan/issue238_query_2.adm
@@ -0,0 +1,100 @@
+{ "id": 1, "dblpid": "books/acm/kim95/AnnevelinkACFHK95", "title": "Object SQL - A Language for the Design and Implementation of Object Databases.", "authors": "Jurgen Annevelink Rafiul Ahad Amelia Carlson Daniel H. Fishman Michael L. Heytens William Kent", "misc": "2002-01-03 42-68 1995 Modern Database Systems db/books/collections/kim95.html#AnnevelinkACFHK95" }
+{ "id": 2, "dblpid": "books/acm/kim95/Blakeley95", "title": "OQL[C++] Extending C++ with an Object Query Capability.", "authors": "José A. Blakeley", "misc": "2002-01-03 69-88 Modern Database Systems db/books/collections/kim95.html#Blakeley95 1995" }
+{ "id": 3, "dblpid": "books/acm/kim95/BreitbartGS95", "title": "Transaction Management in Multidatabase Systems.", "authors": "Yuri Breitbart Hector Garcia-Molina Abraham Silberschatz", "misc": "2004-03-08 573-591 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#BreitbartGS95 1995" }
+{ "id": 4, "dblpid": "books/acm/kim95/ChristodoulakisK95", "title": "Multimedia Information Systems Issues and Approaches.", "authors": "Stavros Christodoulakis Leonidas Koveos", "misc": "2002-01-03 318-337 1995 Modern Database Systems db/books/collections/kim95.html#ChristodoulakisK95" }
+{ "id": 5, "dblpid": "books/acm/kim95/DayalHW95", "title": "Active Database Systems.", "authors": "Umeshwar Dayal Eric N. Hanson Jennifer Widom", "misc": "2002-01-03 434-456 1995 Modern Database Systems db/books/collections/kim95.html#DayalHW95" }
+{ "id": 6, "dblpid": "books/acm/kim95/DittrichD95", "title": "Where Object-Oriented DBMSs Should Do Better A Critique Based on Early Experiences.", "authors": "Angelika Kotz Dittrich Klaus R. Dittrich", "misc": "2002-01-03 238-254 1995 Modern Database Systems db/books/collections/kim95.html#DittrichD95" }
+{ "id": 7, "dblpid": "books/acm/kim95/Garcia-MolinaH95", "title": "Distributed Databases.", "authors": "Hector Garcia-Molina Meichun Hsu", "misc": "2002-01-03 477-493 1995 Modern Database Systems db/books/collections/kim95.html#Garcia-MolinaH95" }
+{ "id": 8, "dblpid": "books/acm/kim95/Goodman95", "title": "An Object-Oriented DBMS War Story Developing a Genome Mapping Database in C++.", "authors": "Nathan Goodman", "misc": "2002-01-03 216-237 1995 Modern Database Systems db/books/collections/kim95.html#Goodman95" }
+{ "id": 9, "dblpid": "books/acm/kim95/Kaiser95", "title": "Cooperative Transactions for Multiuser Environments.", "authors": "Gail E. Kaiser", "misc": "2002-01-03 409-433 1995 Modern Database Systems db/books/collections/kim95.html#Kaiser95" }
+{ "id": 10, "dblpid": "books/acm/kim95/KelleyGKRG95", "title": "Schema Architecture of the UniSQL/M Multidatabase System", "authors": "William Kelley Sunit K. Gala Won Kim Tom C. Reyes Bruce Graham", "misc": "2004-03-08 Modern Database Systems books/acm/Kim95 621-648 1995 db/books/collections/kim95.html#KelleyGKRG95" }
+{ "id": 11, "dblpid": "books/acm/kim95/KemperM95", "title": "Physical Object Management.", "authors": "Alfons Kemper Guido Moerkotte", "misc": "2002-01-03 175-202 1995 Modern Database Systems db/books/collections/kim95.html#KemperM95" }
+{ "id": 12, "dblpid": "books/acm/kim95/Kim95", "title": "Introduction to Part 1 Next-Generation Database Technology.", "authors": "Won Kim", "misc": "2002-01-03 5-17 1995 Modern Database Systems db/books/collections/kim95.html#Kim95" }
+{ "id": 13, "dblpid": "books/acm/kim95/Kim95a", "title": "Object-Oriented Database Systems Promises, Reality, and Future.", "authors": "Won Kim", "misc": "2002-01-03 255-280 1995 Modern Database Systems db/books/collections/kim95.html#Kim95a" }
+{ "id": 14, "dblpid": "books/acm/kim95/Kim95b", "title": "Introduction to Part 2 Technology for Interoperating Legacy Databases.", "authors": "Won Kim", "misc": "2002-01-03 515-520 1995 Modern Database Systems db/books/collections/kim95.html#Kim95b" }
+{ "id": 15, "dblpid": "books/acm/kim95/KimCGS95", "title": "On Resolving Schematic Heterogeneity in Multidatabase Systems.", "authors": "Won Kim Injun Choi Sunit K. Gala Mark Scheevel", "misc": "2002-01-03 521-550 1995 Modern Database Systems db/books/collections/kim95.html#KimCGS95" }
+{ "id": 16, "dblpid": "books/acm/kim95/KimG95", "title": "Requirements for a Performance Benchmark for Object-Oriented Database Systems.", "authors": "Won Kim Jorge F. Garza", "misc": "2002-01-03 203-215 1995 Modern Database Systems db/books/collections/kim95.html#KimG95" }
+{ "id": 17, "dblpid": "books/acm/kim95/KimK95", "title": "On View Support in Object-Oriented Databases Systems.", "authors": "Won Kim William Kelley", "misc": "2002-01-03 108-129 1995 Modern Database Systems db/books/collections/kim95.html#KimK95" }
+{ "id": 18, "dblpid": "books/acm/kim95/Kowalski95", "title": "The POSC Solution to Managing E&P Data.", "authors": "Vincent J. Kowalski", "misc": "2002-01-03 281-301 1995 Modern Database Systems db/books/collections/kim95.html#Kowalski95" }
+{ "id": 19, "dblpid": "books/acm/kim95/KriegerA95", "title": "C++ Bindings to an Object Database.", "authors": "David Krieger Tim Andrews", "misc": "2002-01-03 89-107 1995 Modern Database Systems db/books/collections/kim95.html#KriegerA95" }
+{ "id": 20, "dblpid": "books/acm/kim95/Lunt95", "title": "Authorization in Object-Oriented Databases.", "authors": "Teresa F. Lunt", "misc": "2002-01-03 130-145 1995 Modern Database Systems db/books/collections/kim95.html#Lunt95" }
+{ "id": 21, "dblpid": "books/acm/kim95/MengY95", "title": "Query Processing in Multidatabase Systems.", "authors": "Weiyi Meng Clement T. Yu", "misc": "2002-01-03 551-572 1995 Modern Database Systems db/books/collections/kim95.html#MengY95" }
+{ "id": 22, "dblpid": "books/acm/kim95/Motro95", "title": "Management of Uncerainty in database Systems.", "authors": "Amihai Motro", "misc": "2002-01-03 457-476 1995 Modern Database Systems db/books/collections/kim95.html#Motro95" }
+{ "id": 23, "dblpid": "books/acm/kim95/Omiecinski95", "title": "Parallel Relational Database Systems.", "authors": "Edward Omiecinski", "misc": "2002-01-03 494-512 1995 Modern Database Systems db/books/collections/kim95.html#Omiecinski95" }
+{ "id": 24, "dblpid": "books/acm/kim95/OzsuB95", "title": "Query Processing in Object-Oriented Database Systems.", "authors": "M. Tamer Özsu José A. Blakeley", "misc": "2002-01-03 146-174 1995 Modern Database Systems db/books/collections/kim95.html#OzsuB95" }
+{ "id": 25, "dblpid": "books/acm/kim95/RusinkiewiczS95", "title": "Specification and Execution of Transactional Workflows.", "authors": "Marek Rusinkiewicz Amit P. Sheth", "misc": "2004-03-08 592-620 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#RusinkiewiczS95 1995" }
+{ "id": 26, "dblpid": "books/acm/kim95/Samet95", "title": "Spatial Data Structures.", "authors": "Hanan Samet", "misc": "2004-03-08 361-385 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#Samet95 1995" }
+{ "id": 27, "dblpid": "books/acm/kim95/SametA95", "title": "Spatial Data Models and Query Processing.", "authors": "Hanan Samet Walid G. Aref", "misc": "2002-01-03 338-360 1995 Modern Database Systems db/books/collections/kim95.html#SametA95" }
+{ "id": 28, "dblpid": "books/acm/kim95/ShanADDK95", "title": "Pegasus A Heterogeneous Information Management System.", "authors": "Ming-Chien Shan Rafi Ahmed Jim Davis Weimin Du William Kent", "misc": "2004-03-08 664-682 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#ShanADDK95 1995" }
+{ "id": 29, "dblpid": "books/acm/kim95/Snodgrass95", "title": "Temporal Object-Oriented Databases A Critical Comparison.", "authors": "Richard T. Snodgrass", "misc": "2002-01-03 386-408 1995 Modern Database Systems db/books/collections/kim95.html#Snodgrass95" }
+{ "id": 30, "dblpid": "books/acm/kim95/SoleyK95", "title": "The OMG Object Model.", "authors": "Richard Mark Soley William Kent", "misc": "2002-01-03 18-41 1995 Modern Database Systems db/books/collections/kim95.html#SoleyK95" }
+{ "id": 31, "dblpid": "books/acm/kim95/Stout95", "title": "EDA/SQL.", "authors": "Ralph L. Stout", "misc": "2004-03-08 649-663 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#Stout95 1995" }
+{ "id": 32, "dblpid": "books/acm/kim95/Thompson95", "title": "The Changing Database Standards Landscape.", "authors": "Craig W. Thompson", "misc": "2002-01-03 302-317 1995 Modern Database Systems db/books/collections/kim95.html#Thompson95" }
+{ "id": 33, "dblpid": "books/acm/kim95/BreitbartR95", "title": "Overview of the ADDS System.", "authors": "Yuri Breitbart Tom C. Reyes", "misc": "2009-06-12 683-701 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#BreitbartR95 1995" }
+{ "id": 34, "dblpid": "books/acm/Kim95", "title": "Modern Database Systems The Object Model, Interoperability, and Beyond.", "authors": "", "misc": "2004-03-08 Won Kim Modern Database Systems ACM Press and Addison-Wesley 1995 0-201-59098-0 db/books/collections/kim95.html" }
+{ "id": 35, "dblpid": "books/ap/MarshallO79", "title": "Inequalities Theory of Majorization and Its Application.", "authors": "Albert W. Marshall Ingram Olkin", "misc": "2002-01-03 Academic Press 1979 0-12-473750-1" }
+{ "id": 36, "dblpid": "books/aw/kimL89/BjornerstedtH89", "title": "Version Control in an Object-Oriented Architecture.", "authors": "Anders Björnerstedt Christer Hulten", "misc": "2006-02-24 451-485 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#BjornerstedtH89" }
+{ "id": 37, "dblpid": "books/aw/kimL89/BretlMOPSSWW89", "title": "The GemStone Data Management System.", "authors": "Robert Bretl David Maier Allen Otis D. Jason Penney Bruce Schuchardt Jacob Stein E. Harold Williams Monty Williams", "misc": "2002-01-03 283-308 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#BretlMOPSSWW89" }
+{ "id": 38, "dblpid": "books/aw/kimL89/CareyDRS89", "title": "Storage Management in EXODUS.", "authors": "Michael J. Carey David J. DeWitt Joel E. Richardson Eugene J. Shekita", "misc": "2002-01-03 341-369 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#CareyDRS89" }
+{ "id": 39, "dblpid": "books/aw/kimL89/Decouchant89", "title": "A Distributed Object Manager for the Smalltalk-80 System.", "authors": "Dominique Decouchant", "misc": "2002-01-03 487-520 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Decouchant89" }
+{ "id": 40, "dblpid": "books/aw/kimL89/DiederichM89", "title": "Objects, Messages, and Rules in Database Design.", "authors": "Jim Diederich Jack Milton", "misc": "2002-01-03 177-197 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#DiederichM89" }
+{ "id": 41, "dblpid": "books/aw/kimL89/EllisG89", "title": "Active Objects Ealities and Possibilities.", "authors": "Clarence A. Ellis Simon J. Gibbs", "misc": "2002-01-03 561-572 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#EllisG89" }
+{ "id": 42, "dblpid": "books/aw/kimL89/FishmanABCCDHHKLLMNRSW89", "title": "Overview of the Iris DBMS.", "authors": "Daniel H. Fishman Jurgen Annevelink David Beech E. C. Chow Tim Connors J. W. Davis Waqar Hasan C. G. Hoch William Kent S. Leichner Peter Lyngbæk Brom Mahbod Marie-Anne Neimat Tore Risch Ming-Chien Shan W. Kevin Wilkinson", "misc": "2002-01-03 219-250 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#FishmanABCCDHHKLLMNRSW89" }
+{ "id": 43, "dblpid": "books/aw/kimL89/KimBCGW89", "title": "Features of the ORION Object-Oriented Database System.", "authors": "Won Kim Nat Ballou Hong-Tai Chou Jorge F. Garza Darrell Woelk", "misc": "2002-01-03 251-282 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#KimBCGW89" }
+{ "id": 44, "dblpid": "books/aw/kimL89/KimKD89", "title": "Indexing Techniques for Object-Oriented Databases.", "authors": "Won Kim Kyung-Chang Kim Alfred G. Dale", "misc": "2002-01-03 371-394 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#KimKD89" }
+{ "id": 45, "dblpid": "books/aw/kimL89/King89", "title": "My Cat Is Object-Oriented.", "authors": "Roger King", "misc": "2002-01-03 23-30 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#King89" }
+{ "id": 46, "dblpid": "books/aw/kimL89/Maier89", "title": "Making Database Systems Fast Enough for CAD Applications.", "authors": "David Maier", "misc": "2002-01-03 573-582 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Maier89" }
+{ "id": 47, "dblpid": "books/aw/kimL89/MellenderRS89", "title": "Optimizing Smalltalk Message Performance.", "authors": "Fred Mellender Steve Riegel Andrew Straw", "misc": "2002-01-03 423-450 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#MellenderRS89" }
+{ "id": 48, "dblpid": "books/aw/kimL89/Moon89", "title": "The Common List Object-Oriented Programming Language Standard.", "authors": "David A. Moon", "misc": "2002-01-03 49-78 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Moon89" }
+{ "id": 49, "dblpid": "books/aw/kimL89/Moss89", "title": "Object Orientation as Catalyst for Language-Database Inegration.", "authors": "J. Eliot B. Moss", "misc": "2002-01-03 583-592 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Moss89" }
+{ "id": 50, "dblpid": "books/aw/kimL89/Nierstrasz89", "title": "A Survey of Object-Oriented Concepts.", "authors": "Oscar Nierstrasz", "misc": "2002-01-03 3-21 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Nierstrasz89" }
+{ "id": 51, "dblpid": "books/aw/kimL89/NierstraszT89", "title": "Integrated Office Systems.", "authors": "Oscar Nierstrasz Dennis Tsichritzis", "misc": "2002-01-03 199-215 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#NierstraszT89" }
+{ "id": 52, "dblpid": "books/aw/kimL89/Russinoff89", "title": "Proteus A Frame-Based Nonmonotonic Inference System.", "authors": "David M. Russinoff", "misc": "2002-01-03 127-150 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#Russinoff89" }
+{ "id": 53, "dblpid": "books/aw/kimL89/SkarraZ89", "title": "Concurrency Control and Object-Oriented Databases.", "authors": "Andrea H. Skarra Stanley B. Zdonik", "misc": "2002-01-03 395-421 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#SkarraZ89" }
+{ "id": 54, "dblpid": "books/aw/kimL89/SteinLU89", "title": "A Shared View of Sharing The Treaty of Orlando.", "authors": "Lynn Andrea Stein Henry Lieberman David Ungar", "misc": "2002-01-03 31-48 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#SteinLU89" }
+{ "id": 55, "dblpid": "books/aw/kimL89/TarltonT89", "title": "Pogo A Declarative Representation System for Graphics.", "authors": "Mark A. Tarlton P. Nong Tarlton", "misc": "2002-01-03 151-176 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#TarltonT89" }
+{ "id": 56, "dblpid": "books/aw/kimL89/TomlinsonS89", "title": "Concurrent Object-Oriented Programming Languages.", "authors": "Chris Tomlinson Mark Scheevel", "misc": "2002-01-03 79-124 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#TomlinsonS89" }
+{ "id": 57, "dblpid": "books/aw/kimL89/TsichritzisN89", "title": "Directions in Object-Oriented Research.", "authors": "Dennis Tsichritzis Oscar Nierstrasz", "misc": "2002-01-03 523-536 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#TsichritzisN89" }
+{ "id": 58, "dblpid": "books/aw/kimL89/Wand89", "title": "A Proposal for a Formal Model of Objects.", "authors": "Yair Wand", "misc": "2002-01-03 537-559 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Wand89" }
+{ "id": 59, "dblpid": "books/aw/kimL89/WeiserL89", "title": "OZ+ An Object-Oriented Database System.", "authors": "Stephen P. Weiser Frederick H. Lochovsky", "misc": "2002-01-03 309-337 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#WeiserL89" }
+{ "id": 60, "dblpid": "books/aw/stonebraker86/RoweS86", "title": "The Commercial INGRES Epilogue.", "authors": "Lawrence A. Rowe Michael Stonebraker", "misc": "2002-01-03 63-82 1986 The INGRES Papers db/books/collections/Stonebraker86.html#RoweS86 db/books/collections/Stonebraker86/RoweS86.html ingres/P063.pdf" }
+{ "id": 61, "dblpid": "books/aw/stonebraker86/Stonebraker86", "title": "Design of Relational Systems (Introduction to Section 1).", "authors": "Michael Stonebraker", "misc": "2002-01-03 1-3 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86 db/books/collections/Stonebraker86/Stonebraker86.html ingres/P001.pdf" }
+{ "id": 62, "dblpid": "books/aw/stonebraker86/Stonebraker86a", "title": "Supporting Studies on Relational Systems (Introduction to Section 2).", "authors": "Michael Stonebraker", "misc": "2002-01-03 83-85 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86a db/books/collections/Stonebraker86/Stonebraker86a.html ingres/P083.pdf" }
+{ "id": 63, "dblpid": "books/aw/stonebraker86/Stonebraker86b", "title": "Distributed Database Systems (Introduction to Section 3).", "authors": "Michael Stonebraker", "misc": "2002-01-03 183-186 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86b db/books/collections/Stonebraker86/Stonebraker86b.html ingres/P183.pdf" }
+{ "id": 64, "dblpid": "books/aw/stonebraker86/Stonebraker86c", "title": "The Design and Implementation of Distributed INGRES.", "authors": "Michael Stonebraker", "misc": "2002-01-03 187-196 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86c db/books/collections/Stonebraker86/Stonebraker86c.html ingres/P187.pdf" }
+{ "id": 65, "dblpid": "books/aw/stonebraker86/Stonebraker86d", "title": "User Interfaces for Database Systems (Introduction to Section 4).", "authors": "Michael Stonebraker", "misc": "2002-01-03 243-245 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86d db/books/collections/Stonebraker86/Stonebraker86d.html ingres/P243.pdf" }
+{ "id": 66, "dblpid": "books/aw/stonebraker86/Stonebraker86e", "title": "Extended Semantics for the Relational Model (Introduction to Section 5).", "authors": "Michael Stonebraker", "misc": "2002-01-03 313-316 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86e db/books/collections/Stonebraker86/Stonebraker86e.html ingres/P313.pdf" }
+{ "id": 67, "dblpid": "books/aw/stonebraker86/Stonebraker86f", "title": "Database Design (Introduction to Section 6).", "authors": "Michael Stonebraker", "misc": "2002-01-03 393-394 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86f db/books/collections/Stonebraker86/Stonebraker86f.html ingres/P393.pdf" }
+{ "id": 68, "dblpid": "books/aw/stonebraker86/X86", "title": "Title, Preface, Contents.", "authors": "", "misc": "2002-01-03 1986 The INGRES Papers db/books/collections/Stonebraker86.html#X86 db/books/collections/Stonebraker86/X86.html ingres/frontmatter.pdf" }
+{ "id": 69, "dblpid": "books/aw/stonebraker86/X86a", "title": "References.", "authors": "", "misc": "2002-01-03 429-444 1986 The INGRES Papers db/books/collections/Stonebraker86.html#X86a db/books/collections/Stonebraker86/X86a.html ingres/P429.pdf" }
+{ "id": 70, "dblpid": "books/aw/Knuth86a", "title": "TeX The Program", "authors": "Donald E. Knuth", "misc": "2002-01-03 Addison-Wesley 1986 0-201-13437-3" }
+{ "id": 71, "dblpid": "books/aw/AbiteboulHV95", "title": "Foundations of Databases.", "authors": "Serge Abiteboul Richard Hull Victor Vianu", "misc": "2002-01-03 Addison-Wesley 1995 0-201-53771-0 AHV/Toc.pdf ... ... journals/tods/AstrahanBCEGGKLMMPTWW76 books/bc/AtzeniA93 journals/tcs/AtzeniABM82 journals/jcss/AbiteboulB86 journals/csur/AtkinsonB87 conf/pods/AtzeniB87 journals/vldb/AbiteboulB95 conf/sigmod/AbiteboulB91 conf/dood/AtkinsonBDDMZ89 conf/vldb/AlbanoBGO93 ... conf/icdt/Abiteboul88 journals/ipl/Abiteboul89 conf/ds/Abrial74 journals/tods/AhoBU79 books/mk/minker88/AptBW88 conf/vldb/AroraC78 conf/stoc/AfratiC89 journals/tods/AlbanoCO85 conf/pods/AfratiCY91 conf/pods/AusielloDM85 conf/vldb/AbiteboulG85 journals/jacm/AjtaiG87 conf/focs/AjtaiG89 journals/tods/AbiteboulG91 ... ... journals/tods/AbiteboulH87 conf/sigmod/AbiteboulH88 ... conf/sigmod/AbiteboulK89 journals/tcs/AbiteboulKG91 journals/jcss/AbiteboulKRW95 conf/sigmod/AbiteboulLUW93 conf/pods/AtzeniP82 conf/pods/AfratiP87 conf/pods/AptP87 conf/wg/AndriesP91 conf/pods/AfratiPPRSU86 books/el/leeuwen90/Apt90 conf/ifip/Armstrong74 journals/siamcomp/AhoSSU81 journals/tods/AhoSU79 journals/siamcomp/AhoSU79 conf/pods/AbiteboulSV90 journals/is/AtzeniT93 conf/popl/AhoU79 conf/pods/AbiteboulV87 conf/jcdkb/AbiteboulV88 journals/jacm/AbiteboulV88 conf/pods/AbiteboulV88 journals/jacm/AbiteboulV89 journals/jcss/AbiteboulV90 journals/jcss/AbiteboulV91 conf/stoc/AbiteboulV91 journals/amai/AbiteboulV91 journals/jcss/AbiteboulV95 journals/jacm/AptE82 conf/coco/AbiteboulVV92 conf/iclp/AptB88 conf/oopsla/BobrowKKMSZ86 journals/tse/BatoryBGSTTW88 conf/mfcs/Bancilhon78 ... conf/db-workshops/Bancilhon85 books/el/leeuwen90/Barendregt90 ... journals/tods/BeeriB79 books/el/leeuwen90/BerstelB90 conf/icdt/BeneventanoB92 conf/vldb/BernsteinBC80 conf/vldb/BeeriBG78 conf/sigmod/BorgidaBMR89 journals/tods/BunemanC79 journals/jacm/BernsteinC81 conf/dbpl/BancilhonCD89 books/bc/tanselCGSS93/BaudinetCW93 conf/sigmod/BiskupDB79 journals/jacm/BeeriDFS84 books/mk/BancilhonDK92 conf/edbt/BryDM88 conf/pods/BunemanDW88 journals/jcss/BunemanDW91 journals/tods/Beeri80 journals/dke/Beeri90 ... journals/tods/Bernstein76 conf/lics/BidoitF87 journals/iandc/BidoitF91 conf/sigmod/BeeriFH77 conf/stoc/BeeriFMMUY81 journals/jacm/BeeriFMY83 journals/tods/BunemanFN82 journals/siamcomp/BernsteinG81 journals/iandc/BlassGK85 conf/ijcai/BrachmanGL85 journals/tods/BernsteinGWRR81 books/aw/BernsteinHG87 ... journals/tcs/Bidoit91 journals/tcs/Biskup80 conf/adbt/Biskup79 journals/tods/Biskup83 journals/tcs/BunemanJO91 journals/tods/BeeriK86 conf/pods/BeeriKBR87 conf/icdt/BidoitL90 journals/csur/BatiniL86 conf/sigmod/BlakeleyLT86 conf/vldb/BeeriM91 conf/sigmod/BlakeleyMG93 journals/siamcomp/BeeriMSU81 conf/pods/BancilhonMSU86 conf/pods/BeeriNRST87 journals/software/Borgida85 conf/icalp/BraP83 conf/fgcs/BalbinMR88 ... conf/pods/BeeriR87 journals/jlp/BalbinR87 conf/sigmod/BancilhonR86 books/mk/minker88/BancilhonR88 journals/jlp/BeeriR91 conf/vldb/BancilhonRS82 conf/pods/BeeriRSS92 conf/dood/Bry89 journals/tods/BancilhonS81 journals/cogsci/BrachmanS85 journals/tods/BergamaschiS92 conf/sigmod/BernsteinST75 conf/dbpl/TannenBN91 conf/icdt/TannenBW92 ... journals/jacm/BeeriV84 conf/icalp/BeeriV81 conf/adbt/BeeriV79 journals/siamcomp/BeeriV84 journals/iandc/BeeriV84 journals/jacm/BeeriV84 journals/tcs/BeeriV85 journals/ibmrd/ChamberlinAEGLMRW76 ... journals/iandc/Cardelli88 books/mk/Cattell94 conf/sigmod/CacaceCCTZ90 conf/vldb/CastilhoCF82 conf/adbt/CasanovaF82 conf/focs/CaiFI89 journals/jcss/CasanovaFP84 conf/stoc/CosmadakisGKV88 conf/dood/CorciuloGP93 books/sp/CeriGT90 conf/focs/ChandraH80 journals/jcss/ChandraH80 journals/jcss/ChandraH82 journals/jlp/ChandraH85 conf/popl/Chandra81 conf/adbt/Chang79 conf/pods/Chandra88 ... journals/tods/Chen76 conf/ride/ChenHM94 conf/icde/Chomicki92 conf/pods/Chomicki92 ... ... ... conf/stoc/CosmadakisK85 journals/acr/CosmadakisK86 ... journals/jcss/CosmadakisKS86 journals/jacm/CosmadakisKV90 ... conf/pods/CalvaneseL94 conf/adbt/Clark77 conf/stoc/ChandraLM81 conf/stoc/ChandraM77 conf/pods/ConsensM90 conf/sigmod/ConsensM93 conf/icdt/ConsensM90 journals/cacm/Codd70 conf/sigmod/Codd71a persons/Codd71a persons/Codd72 conf/ifip/Codd74 ... conf/sigmod/Codd79 journals/cacm/Codd82 ... conf/sigmod/Cohen89 journals/cacm/Cohen90 ... journals/jcss/Cook74 conf/pods/Cosmadakis83 conf/focs/Cosmadakis87 books/el/leeuwen90/Courcelle90a journals/jacm/CosmadakisP84 conf/edbt/CeriCGLLTZ88 ... conf/vldb/CeriT87 conf/vldb/CasanovaTF88 ... conf/pods/CasanovaV83 journals/siamcomp/ChandraV85 conf/pods/ChaudhuriV92 conf/pods/ChaudhuriV93 conf/pods/ChaudhuriV94 journals/csur/CardelliW85 conf/pods/ChenW89 conf/pods/CohenW89 conf/vldb/CeriW90 conf/vldb/CeriW91 conf/iclp/ChenW92 conf/vldb/CeriW93 ... conf/birthday/Dahlhaus87 conf/vldb/Date81 books/aw/Date86 ... conf/dbpl/Dayal89 journals/tods/DayalB82 journals/ibmrd/DelobelC73 conf/icde/DelcambreD89 ... journals/tods/Delobel78 journals/jacm/Demolombe92 journals/tods/DateF92 ... conf/vldb/DayalHL91 journals/jacm/Paola69a conf/caap/DahlhausM86 journals/acr/DAtriM86 journals/iandc/DahlhausM92 conf/sigmod/DerrMP93 conf/vldb/MaindrevilleS88 conf/pods/Dong92 conf/adbt/BraP82 ... conf/dbpl/DongS91 journals/iandc/DongS95 conf/dbpl/DongS93 conf/dbpl/DongS93 conf/icdt/DongT92 conf/vldb/DenninghoffV91 conf/pods/DenninghoffV93 ... ... books/acm/kim95/DayalHW95 ... conf/pods/EiterGM94 conf/pods/Escobar-MolanoHJ93 ... books/el/leeuwen90/Emerson90 books/bc/ElmasriN89 ... conf/icse/Eswaran76 conf/sigmod/EpsteinSW78 ... ... conf/vldb/Fagin77 journals/tods/Fagin77 conf/sigmod/Fagin79 journals/tods/Fagin81 journals/ipl/FaginV83 journals/jacm/Fagin82 journals/jacm/Fagin83 journals/tcs/Fagin93 books/sp/kimrb85/FurtadoC85 ... journals/jlp/Fitting85a journals/tcs/FischerJT83 journals/acr/FaginKUV86 conf/icdt/FernandezM92 journals/tods/FaginMU82 conf/vldb/FaloutsosNS91 ... journals/ai/Forgy82 ... conf/sigmod/Freytag87 ... journals/siamcomp/FischerT83 journals/siamcomp/FaginMUY83 conf/pods/FaginUV83 conf/icalp/FaginV84 ... ... ... ... conf/sigmod/GraefeD87 conf/ride/GatziuD94 conf/sigmod/GardarinM86 conf/sigmod/GyssensG88 journals/tcs/GinsburgH83a journals/jacm/GinsburgH86 ... books/bc/tanselCGSS93/Ginsburg93 books/fm/GareyJ79 journals/jacm/GrantJ82 conf/vldb/GehaniJ91 conf/vldb/GhandeharizadehHJCELLTZ93 journals/tods/GhandeharizadehHJ96 conf/vldb/GehaniJS92 ... conf/sigmod/GehaniJS92 ... conf/deductive/GuptaKM92 conf/pods/GurevichL82 conf/iclp/GelfondL88 conf/adbt/77 journals/csur/GallaireMN84 conf/pods/GrahneMR92 conf/sigmod/GuptaMS93 conf/lics/GaifmanMSV87 journals/jacm/GaifmanMSV93 journals/jacm/GrahamMV86 conf/csl/GradelO92 ... conf/pods/Gottlob87 conf/pods/GyssensPG90 conf/dood/GiannottiPSZ91 books/aw/GoldbergR83 journals/acr/GrahneR86 journals/ipl/Grant77 ... journals/iandc/Grandjean83 conf/vldb/Grahne84 ... journals/csur/Graefe93 books/sp/Greibach75 journals/tods/GoodmanS82 journals/jcss/GoodmanS84 conf/focs/GurevichS85 ... conf/pods/GrumbachS94 conf/sigmod/GangulyST90 ... journals/tcs/Gunter92 ... ... ... ... conf/pods/GrahamV84 conf/pods/GrumbachV91 conf/icde/GardarinV92 conf/sigmod/GraefeW89 ... journals/jacm/GinsburgZ82 conf/vldb/GottlobZ88 ... ... journals/sigmod/Hanson89 ... journals/cacm/Harel80 journals/tkde/HaasCLMWLLPCS90 conf/lics/Hella92 journals/iandc/Herrmann95 conf/pods/HirstH93 conf/vldb/HullJ91 conf/ewdw/HullJ90 journals/csur/HullK87 journals/tods/HudsonK89 conf/lics/HillebrandKM93 conf/nato/HillebrandKR93 conf/jcdkb/HsuLM88 journals/ipl/HoneymanLY80 journals/tods/HammerM81 conf/adbt/HenschenMN82 ... journals/jacm/HenschenN84 journals/jacm/Honeyman82 conf/sigmod/HullS89 conf/pods/HullS89 journals/acta/HullS94 journals/jcss/HullS93 conf/fodo/HullTY89 journals/jcss/Hull83 journals/jacm/Hull84 journals/tcs/Hull85 journals/siamcomp/Hull86 ... conf/vldb/Hulin89 ... journals/jacm/HullY84 conf/vldb/HullY90 conf/pods/HullY91 conf/sigmod/IoannidisK90 journals/jcss/ImielinskiL84 conf/adbt/Imielinski82 journals/jcss/Immerman82 journals/iandc/Immerman86 ... journals/siamcomp/Immerman87 conf/pods/ImielinskiN88 conf/vldb/IoannidisNSS92 conf/sigmod/ImielinskiNV91 conf/dood/ImielinskiNV91 conf/vldb/Ioannidis85 journals/jacm/Jacobs82 conf/dbpl/JacobsH91 journals/csur/JarkeK84 journals/jcss/JohnsonK84 conf/popl/JaffarL87 books/el/leeuwen90/Johnson90 journals/jacm/Joyner76 conf/pods/JaeschkeS82 ... books/mk/minker88/Kanellakis88 books/el/leeuwen90/Kanellakis90 conf/oopsla/KhoshafianC86 conf/edbt/KotzDM88 conf/jcdkb/Keller82 conf/pods/Keller85 journals/computer/Keller86 ... journals/tods/Kent79 ... journals/ngc/RohmerLK86 conf/tacs/KanellakisG94 conf/jcdkb/Kifer88 conf/pods/KanellakisKR90 conf/sigmod/KiferKS92 ... conf/icdt/KiferL86 books/aw/KimL89 ... journals/tods/Klug80 journals/jacm/Klug82 journals/jacm/Klug88 journals/jacm/KiferLW95 conf/kr/KatsunoM91 journals/ai/KatsunoM92 conf/jcdkb/KrishnamurthyN88 journals/csur/Knight89 ... journals/iandc/Kolaitis91 journals/ai/Konolige88 conf/ifip/Kowalski74 journals/jacm/Kowalski75 conf/bncod/Kowalski84 conf/vldb/KoenigP81 journals/tods/KlugP82 ... conf/pods/KolaitisP88 conf/pods/KiferRS88 conf/sigmod/KrishnamurthyRS88 books/mg/SilberschatzK91 conf/iclp/KempT88 conf/sigmod/KellerU84 conf/dood/Kuchenhoff91 ... journals/jlp/Kunen87 conf/iclp/Kunen88 conf/pods/Kuper87 conf/pods/Kuper88 conf/ppcp/Kuper93 conf/pods/KuperV84 conf/stoc/KolaitisV87 journals/tcs/KarabegV90 journals/iandc/KolaitisV90 conf/pods/KolaitisV90 journals/tods/KarabegV91 journals/iandc/KolaitisV92 journals/tcs/KuperV93 journals/tods/KuperV93 journals/tse/KellerW85 conf/pods/KiferW89 conf/jcdkb/Lang88 books/el/Leeuwen90 ... journals/jcss/Leivant89 ... journals/iandc/Leivant90 ... conf/db-workshops/Levesque82 journals/ai/Levesque84 conf/mfdbs/Libkin91 conf/er/Lien79 journals/jacm/Lien82 books/mk/minker88/Lifschitz88 ... journals/tcs/Lindell91 journals/tods/Lipski79 journals/jacm/Lipski81 journals/tcs/LeratL86 journals/cj/LeveneL90 books/sp/Lloyd87 conf/pods/LakshmananM89 conf/tlca/LeivantM93 conf/sigmod/LaverMG83 conf/pods/LiptonN90 journals/jcss/LucchesiO78 conf/sigmod/Lohman88 ... conf/ijcai/Lozinskii85 books/ph/LewisP81 ... conf/sigmod/LecluseRV88 journals/is/LipeckS87 journals/jlp/LloydST87 journals/tods/LingTK81 conf/sigmod/LyngbaekV87 conf/dood/LefebvreV89 conf/pods/LibkinW93 conf/dbpl/LibkinW93 journals/jacm/Maier80 books/cs/Maier83 ... conf/vldb/Makinouchi77 conf/icalp/Makowsky81 ... conf/icdt/Malvestuto86 conf/aaai/MacGregorB92 journals/tods/MylopoulosBW80 conf/sigmod/McCarthyD89 journals/csur/MishraE92 conf/sigmod/MumickFPR90 books/mk/Minker88 journals/jlp/Minker88 conf/vldb/MillerIR93 journals/is/MillerIR94 journals/iandc/Mitchell83 conf/pods/Mitchell83 conf/vldb/MendelzonM79 journals/tods/MaierMS79 journals/jcss/MaierMSU80 conf/pods/MendelzonMW94 journals/debu/MorrisNSUG87 journals/ai/Moore85 conf/vldb/Morgenstern83 conf/pods/Morris88 ... conf/pods/MannilaR85 ... journals/jlp/MinkerR90 books/aw/MannilaR92 journals/acr/MaierRW86 ... journals/tods/MarkowitzS92 conf/pods/Marchetti-SpaccamelaPS87 journals/jacm/MaierSY81 conf/iclp/MorrisUG86 journals/tods/MaierUV84 conf/iclp/MorrisUG86 journals/acta/MakowskyV86 books/bc/MaierW88 books/mk/minker88/ManchandraW88 conf/pods/Naughton86 conf/sigmod/NgFS91 ... conf/vldb/Nejdl87 conf/adbt/NicolasM77 conf/sigmod/Nicolas78 journals/acta/Nicolas82 conf/ds/76 conf/pods/NaqviK88 journals/tods/NegriPS91 conf/vldb/NaughtonRSU89 conf/pods/NaughtonS87 ... ... conf/vldb/Osborn79 ... journals/tods/OzsoyogluY87 conf/adbt/Paige82 ... books/cs/Papadimitriou86 ... journals/ipl/Paredaens78 ... books/sp/ParedaensBGG89 journals/ai/Andersen91 books/el/leeuwen90/Perrin90 journals/ins/Petrov89 conf/pods/ParedaensG88 conf/pods/PatnaikI94 conf/adbt/ParedaensJ79 journals/csur/PeckhamM88 ... ... conf/sigmod/ParkerP80 ... conf/iclp/Przymusinski88 conf/pods/Przymusinski89 ... conf/vldb/ParkerSV92 conf/aaai/PearlV87 journals/ai/PereiraW80a conf/pods/PapadimitriouY92 journals/tkde/QianW91 ... journals/jlp/Ramakrishnan91 conf/pods/RamakrishnanBS87 ... conf/adbt/Reiter77 journals/ai/Reiter80 conf/db-workshops/Reiter82 journals/jacm/Reiter86 journals/tods/Rissanen77 conf/mfcs/Rissanen78 conf/pods/Rissanen82 ... journals/ngc/RohmerLK86 journals/jacm/Robinson65 ... conf/pods/Ross89 ... ... conf/sigmod/RoweS79 conf/sigmod/RichardsonS91 journals/debu/RamamohanaraoSBPNTZD87 conf/vldb/RamakrishnanSS92 conf/sigmod/RamakrishnanSSS93 conf/pods/RamakrishnanSUV89 journals/jcss/RamakrishnanSUV93 journals/jlp/RamakrishnanU95 conf/sigmod/SelingerACLP79 conf/sigmod/Sagiv81 journals/tods/Sagiv83 books/mk/minker88/Sagiv88 conf/slp/Sagiv90 conf/sigmod/Sciore81 journals/jacm/Sciore82 conf/pods/Sciore83 journals/acr/Sciore86 journals/jacm/SagivDPF81 conf/pods/X89 ... journals/ai/SmithG85 books/mk/minker88/Shepherdson88 journals/tods/Shipman81 conf/pods/Shmueli87 conf/iclp/SekiI88 conf/sigmod/ShmueliI84 journals/tc/Sickel76 journals/jsc/Siekmann89 conf/sigmod/StonebrakerJGP90 conf/vldb/SimonKM92 journals/csur/ShethL90 conf/pods/SeibL91 conf/sigmod/SuLRD93 conf/adbt/SilvaM79 journals/sigmod/Snodgrass90 journals/sigmod/Soo91 conf/pods/SuciuP94 conf/sigmod/StonebrakerR86 conf/slp/SudarshanR93 conf/pods/SagivS86 journals/cacm/Stonebraker81 books/mk/Stonebraker88 journals/tkde/Stonebraker92 books/aw/Stroustrup91 journals/jacm/SadriU82 conf/vldb/Su91 conf/pods/SagivV89 journals/jacm/SagivW82 journals/tods/StonebrakerWKH76 journals/jacm/SagivY80 conf/pods/SaccaZ86 journals/tcs/SaccaZ88 ... conf/pods/SaccaZ90 ... ... books/bc/TanselCGJSS93 ... journals/acr/ThomasF86 ... ... ... ... journals/tcs/Topor87 ... books/mk/minker88/ToporS88 ... journals/siamcomp/TarjanY84 journals/csur/TeoreyYF86 journals/algorithmica/UllmanG88 conf/pods/Ullman82 books/cs/Ullman82 journals/tods/Ullman85 books/cs/Ullman88 conf/pods/Ullman89 books/cs/Ullman89 conf/sigmod/Gelder86 ... conf/pods/BusscheG92 conf/focs/BusscheGAG92 conf/pods/BusscheP91 conf/slp/Gelder86 conf/pods/Gelder89 conf/pods/GelderRS88 journals/jacm/GelderRS91 journals/tods/GelderT91 journals/ipl/Vardi81 conf/stoc/Vardi82 conf/focs/Vardi82 journals/acta/Vardi83 journals/jcss/Vardi84 conf/pods/Vardi85 conf/pods/Vardi86 journals/jcss/Vardi86 ... conf/pods/Vardi88 conf/sigmod/Vassiliou79 ... ... journals/jacm/EmdenK76 conf/nf2/SchollABBGPRV87 journals/jacm/Vianu87 journals/acta/Vianu87 conf/eds/Vieille86 conf/iclp/Vieille87 ... conf/eds/Vieille88 journals/tcs/Vieille89 ... journals/tcs/VianuV92 conf/sigmod/WidomF90 conf/icde/WangH92 conf/pos/WidjojoHW90 journals/computer/Wiederhold92 conf/pods/Wilkins86 conf/pods/Winslett88 conf/sigmod/WolfsonO90 conf/pods/Wong93 conf/sigmod/WolfsonS88 journals/ibmrd/WangW75 journals/tods/WongY76 conf/vldb/Yannakakis81 journals/csur/YuC84 ... journals/jcss/YannakakisP82 ... journals/tods/Zaniolo82 journals/jcss/Zaniolo84 ... conf/edbt/ZhouH90 journals/ibmsj/Zloof77 books/mk/ZdonikM90 db/books/dbtext/abiteboul95.html" }
+{ "id": 72, "dblpid": "books/aw/Lamport86", "title": "LaTeX User's Guide & Reference Manual", "authors": "Leslie Lamport", "misc": "2002-01-03 Addison-Wesley 1986 0-201-15790-X" }
+{ "id": 73, "dblpid": "books/aw/AhoHU74", "title": "The Design and Analysis of Computer Algorithms.", "authors": "Alfred V. Aho John E. Hopcroft Jeffrey D. Ullman", "misc": "2002-01-03 Addison-Wesley 1974 0-201-00029-6" }
+{ "id": 74, "dblpid": "books/aw/Lamport2002", "title": "Specifying Systems, The TLA+ Language and Tools for Hardware and Software Engineers", "authors": "Leslie Lamport", "misc": "2005-07-28 Addison-Wesley 2002 0-3211-4306-X http //research.microsoft.com/users/lamport/tla/book.html" }
+{ "id": 75, "dblpid": "books/aw/AhoHU83", "title": "Data Structures and Algorithms.", "authors": "Alfred V. Aho John E. Hopcroft Jeffrey D. Ullman", "misc": "2002-01-03 Addison-Wesley 1983 0-201-00023-7" }
+{ "id": 76, "dblpid": "books/aw/LewisBK01", "title": "Databases and Transaction Processing An Application-Oriented Approach", "authors": "Philip M. Lewis Arthur J. Bernstein Michael Kifer", "misc": "2002-01-03 Addison-Wesley 2001 0-201-70872-8" }
+{ "id": 77, "dblpid": "books/aw/AhoKW88", "title": "The AWK Programming Language", "authors": "Alfred V. Aho Brian W. Kernighan Peter J. Weinberger", "misc": "2002-01-03 Addison-Wesley 1988" }
+{ "id": 78, "dblpid": "books/aw/LindholmY97", "title": "The Java Virtual Machine Specification", "authors": "Tim Lindholm Frank Yellin", "misc": "2002-01-28 Addison-Wesley 1997 0-201-63452-X" }
+{ "id": 79, "dblpid": "books/aw/AhoSU86", "title": "Compilers Princiles, Techniques, and Tools.", "authors": "Alfred V. Aho Ravi Sethi Jeffrey D. Ullman", "misc": "2002-01-03 Addison-Wesley 1986 0-201-10088-6" }
+{ "id": 80, "dblpid": "books/aw/Sedgewick83", "title": "Algorithms", "authors": "Robert Sedgewick", "misc": "2002-01-03 Addison-Wesley 1983 0-201-06672-6" }
+{ "id": 81, "dblpid": "journals/siamcomp/AspnesW96", "title": "Randomized Consensus in Expected O(n log² n) Operations Per Processor.", "authors": "James Aspnes Orli Waarts", "misc": "2002-01-03 1024-1044 1996 25 SIAM J. Comput. 5 db/journals/siamcomp/siamcomp25.html#AspnesW96" }
+{ "id": 82, "dblpid": "conf/focs/AspnesW92", "title": "Randomized Consensus in Expected O(n log ^2 n) Operations Per Processor", "authors": "James Aspnes Orli Waarts", "misc": "2006-04-25 137-146 conf/focs/FOCS33 1992 FOCS db/conf/focs/focs92.html#AspnesW92" }
+{ "id": 83, "dblpid": "journals/siamcomp/Bloniarz83", "title": "A Shortest-Path Algorithm with Expected Time O(n² log n log* n).", "authors": "Peter A. Bloniarz", "misc": "2002-01-03 588-600 1983 12 SIAM J. Comput. 3 db/journals/siamcomp/siamcomp12.html#Bloniarz83" }
+{ "id": 84, "dblpid": "conf/stoc/Bloniarz80", "title": "A Shortest-Path Algorithm with Expected Time O(n^2 log n log ^* n)", "authors": "Peter A. Bloniarz", "misc": "2006-04-25 378-384 conf/stoc/STOC12 1980 STOC db/conf/stoc/stoc80.html#Bloniarz80" }
+{ "id": 85, "dblpid": "journals/siamcomp/Megiddo83a", "title": "Linear-Time Algorithms for Linear Programming in R³ and Related Problems.", "authors": "Nimrod Megiddo", "misc": "2002-01-03 759-776 1983 12 SIAM J. Comput. 4 db/journals/siamcomp/siamcomp12.html#Megiddo83a" }
+{ "id": 86, "dblpid": "conf/focs/Megiddo82", "title": "Linear-Time Algorithms for Linear Programming in R^3 and Related Problems", "authors": "Nimrod Megiddo", "misc": "2006-04-25 329-338 conf/focs/FOCS23 1982 FOCS db/conf/focs/focs82.html#Megiddo82" }
+{ "id": 87, "dblpid": "journals/siamcomp/MoffatT87", "title": "An All Pairs Shortest Path Algorithm with Expected Time O(n² log n).", "authors": "Alistair Moffat Tadao Takaoka", "misc": "2002-01-03 1023-1031 1987 16 SIAM J. Comput. 6 db/journals/siamcomp/siamcomp16.html#MoffatT87" }
+{ "id": 88, "dblpid": "conf/focs/MoffatT85", "title": "An All Pairs Shortest Path Algorithm with Expected Running Time O(n^2 log n)", "authors": "Alistair Moffat Tadao Takaoka", "misc": "2006-04-25 101-105 conf/focs/FOCS26 1985 FOCS db/conf/focs/focs85.html#MoffatT85" }
+{ "id": 89, "dblpid": "conf/icip/SchonfeldL98", "title": "VORTEX Video Retrieval and Tracking from Compressed Multimedia Databases.", "authors": "Dan Schonfeld Dan Lelescu", "misc": "2002-11-05 123-127 1998 ICIP (3) db/conf/icip/icip1998-3.html#SchonfeldL98" }
+{ "id": 90, "dblpid": "conf/hicss/SchonfeldL99", "title": "VORTEX Video Retrieval and Tracking from Compressed Multimedia Databases ¾ Visual Search Engine.", "authors": "Dan Schonfeld Dan Lelescu", "misc": "2002-01-03 1999 HICSS http //computer.org/proceedings/hicss/0001/00013/00013006abs.htm db/conf/hicss/hicss1999-3.html#SchonfeldL99" }
+{ "id": 91, "dblpid": "journals/corr/abs-0802-2861", "title": "Geometric Set Cover and Hitting Sets for Polytopes in $R^3$", "authors": "Sören Laue", "misc": "2008-03-03 http //arxiv.org/abs/0802.2861 2008 CoRR abs/0802.2861 db/journals/corr/corr0802.html#abs-0802-2861 informal publication" }
+{ "id": 92, "dblpid": "conf/stacs/Laue08", "title": "Geometric Set Cover and Hitting Sets for Polytopes in R³.", "authors": "Sören Laue", "misc": "2008-03-04 2008 STACS 479-490 http //drops.dagstuhl.de/opus/volltexte/2008/1367 conf/stacs/2008 db/conf/stacs/stacs2008.html#Laue08" }
+{ "id": 93, "dblpid": "journals/iandc/IbarraJCR91", "title": "Some Classes of Languages in NC¹", "authors": "Oscar H. Ibarra Tao Jiang Jik H. Chang Bala Ravikumar", "misc": "2006-04-25 86-106 Inf. Comput. January 1991 90 1 db/journals/iandc/iandc90.html#IbarraJCR91" }
+{ "id": 94, "dblpid": "conf/awoc/IbarraJRC88", "title": "On Some Languages in NC.", "authors": "Oscar H. Ibarra Tao Jiang Bala Ravikumar Jik H. Chang", "misc": "2002-08-06 64-73 1988 conf/awoc/1988 AWOC db/conf/awoc/awoc88.html#IbarraJRC88" }
+{ "id": 95, "dblpid": "journals/jacm/GalilHLSW87", "title": "An O(n³log n) deterministic and an O(n³) Las Vegs isomorphism test for trivalent graphs.", "authors": "Zvi Galil Christoph M. Hoffmann Eugene M. Luks Claus-Peter Schnorr Andreas Weber", "misc": "2003-11-20 513-531 1987 34 J. ACM 3 http //doi.acm.org/10.1145/28869.28870 db/journals/jacm/jacm34.html#GalilHLSW87" }
+{ "id": 96, "dblpid": "conf/focs/GalilHLSW82", "title": "An O(n^3 log n) Deterministic and an O(n^3) Probabilistic Isomorphism Test for Trivalent Graphs", "authors": "Zvi Galil Christoph M. Hoffmann Eugene M. Luks Claus-Peter Schnorr Andreas Weber", "misc": "2006-04-25 118-125 conf/focs/FOCS23 1982 FOCS db/conf/focs/focs82.html#GalilHLSW82" }
+{ "id": 97, "dblpid": "journals/jacm/GalilT88", "title": "An O(n²(m + n log n)log n) min-cost flow algorithm.", "authors": "Zvi Galil Éva Tardos", "misc": "2003-11-20 374-386 1988 35 J. ACM 2 http //doi.acm.org/10.1145/42282.214090 db/journals/jacm/jacm35.html#GalilT88" }
+{ "id": 98, "dblpid": "conf/focs/GalilT86", "title": "An O(n^2 (m + n log n) log n) Min-Cost Flow Algorithm", "authors": "Zvi Galil Éva Tardos", "misc": "2006-04-25 1-9 conf/focs/FOCS27 1986 FOCS db/conf/focs/focs86.html#GalilT86" }
+{ "id": 99, "dblpid": "series/synthesis/2009Weintraub", "title": "Jordan Canonical Form Theory and Practice", "authors": "Steven H. Weintraub", "misc": "2009-09-06 Jordan Canonical Form Theory and Practice http //dx.doi.org/10.2200/S00218ED1V01Y200908MAS006 http //dx.doi.org/10.2200/S00218ED1V01Y200908MAS006 2009 Synthesis Lectures on Mathematics & Statistics Morgan & Claypool Publishers" }
+{ "id": 100, "dblpid": "series/synthesis/2009Brozos", "title": "The Geometry of Walker Manifolds", "authors": "Miguel Brozos-Vázquez Eduardo García-Río Peter Gilkey Stana Nikcevic Rámon Vázquez-Lorenzo", "misc": "2009-09-06 The Geometry of Walker Manifolds http //dx.doi.org/10.2200/S00197ED1V01Y200906MAS005 http //dx.doi.org/10.2200/S00197ED1V01Y200906MAS005 2009 Synthesis Lectures on Mathematics & Statistics Morgan & Claypool Publishers" }
diff --git a/asterix-app/src/test/resources/runtimets/results/temp/accessors.adm b/asterix-app/src/test/resources/runtimets/results/temp/accessors.adm
new file mode 100644
index 0000000..4f36f91
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/temp/accessors.adm
@@ -0,0 +1 @@
+{ "year1": 2010, "year2": 1987, "year3": -1987, "year4": 928, "year5": 1937, "year6": -3, "year7": 9, "month1": 10, "month2": 11, "month3": 11, "month4": 3, "month5": 12, "month6": 1, "day1": 30, "day2": 19, "day3": 19, "day4": 29, "day5": 29, "day6": 634, "hour1": 23, "hour2": 20, "hour3": 5, "hour4": 14, "min1": 49, "min2": 3, "min3": 23, "min4": 28, "second1": 23, "second2": 6, "second3": 34, "second4": 48, "ms1": 938, "ms2": 280, "ms3": 930, "ms4": 94 }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/temp/adjust_timezone.adm b/asterix-app/src/test/resources/runtimets/results/temp/adjust_timezone.adm
new file mode 100644
index 0000000..1f80fd9
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/temp/adjust_timezone.adm
@@ -0,0 +1 @@
+{ "string1": "04:15:10.327+08:00", "string2": "2010-10-22T18:57:13.329-06:15" }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/temp/calendar_duration.adm b/asterix-app/src/test/resources/runtimets/results/temp/calendar_duration.adm
new file mode 100644
index 0000000..eb7d565
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/temp/calendar_duration.adm
@@ -0,0 +1 @@
+{ "cduration1": duration("P20Y3M13DT7H48M21.329S"), "cduration2": duration("-P9M6DT4H45M39.328S"), "cduration3": duration("P8Y6M"), "cduration4": duration("-P21Y7M10DT13H9M42.983S"), "cduration5": duration("P20Y3M12DT7H48M21.329S"), "cduration6": duration("-P9M5DT4H45M39.328S"), "cduration7": duration("P8Y6M"), "cduration8": duration("-P21Y7M10DT13H9M42.983S") }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/temp/date_functions.adm b/asterix-app/src/test/resources/runtimets/results/temp/date_functions.adm
new file mode 100644
index 0000000..2276f85
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/temp/date_functions.adm
@@ -0,0 +1 @@
+{ "date1": date("2012-09-17"), "date2": date("1327-12-02"), "date3": date("2012-10-10"), "date4": date("2010-05-17"), "date5": date("1703-08-09"), "duration1": duration("P137216D"), "duration2": duration("-P854D") }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/temp/datetime_functions.adm b/asterix-app/src/test/resources/runtimets/results/temp/datetime_functions.adm
new file mode 100644
index 0000000..6ebc562
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/temp/datetime_functions.adm
@@ -0,0 +1 @@
+{ "datetime1": datetime("1970-01-12T01:33:27.429Z"), "datetime2": datetime("1327-12-02T23:35:49.938Z"), "datetime3": datetime("1327-12-02T23:35:49.938Z"), "duration1": duration("-P234526DT1H57M37.491S") }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/temp/insert_from_delimited_ds.adm b/asterix-app/src/test/resources/runtimets/results/temp/insert_from_delimited_ds.adm
new file mode 100644
index 0000000..5c40e46
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/temp/insert_from_delimited_ds.adm
@@ -0,0 +1,4 @@
+{ "date": date("-2012-12-12"), "time": time("23:49:12.390Z"), "datetime": datetime("3827-12-12T11:43:29.329Z"), "duration": duration("P20Y19DT4H14M23.34S") }
+{ "date": date("1993-12-12"), "time": time("03:32:00.000Z"), "datetime": datetime("-2012-12-12T05:00:23.071Z"), "duration": duration("P20Y19D") }
+{ "date": date("1839-03-12"), "time": time("12:30:49.382Z"), "datetime": datetime("1012-06-12T00:37:00.000Z"), "duration": duration("PT4H14M23.34S") }
+{ "date": date("0003-11-02"), "time": time("23:19:32.382Z"), "datetime": datetime("2012-12-12T00:00:00.001Z"), "duration": duration("P20Y12DT12H9.34S") }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/temp/insert_from_ext_ds.adm b/asterix-app/src/test/resources/runtimets/results/temp/insert_from_ext_ds.adm
new file mode 100644
index 0000000..afe2ccc
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/temp/insert_from_ext_ds.adm
@@ -0,0 +1,3 @@
+{ "date": date("-2012-12-12"), "time": time("23:49:12.390Z"), "datetime": datetime("2012-12-12T00:00:00.001Z"), "duration": duration("P20Y19DT4H14M23.34S"), "interval": interval("datetime("2012-12-12T00:00:00.001Z"), datetime("2013-08-10T22:10:15.398Z")") }
+{ "date": null, "time": null, "datetime": datetime("1920-12-20T23:29:18.478Z"), "duration": null, "interval": null }
+{ "date": null, "time": null, "datetime": null, "duration": null, "interval": null }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/temp/interval_functions.adm b/asterix-app/src/test/resources/runtimets/results/temp/interval_functions.adm
new file mode 100644
index 0000000..9f9c9d3
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/temp/interval_functions.adm
@@ -0,0 +1 @@
+{ "before1": true, "before2": false, "after1": true, "after2": false, "meet1": true, "meet2": false, "metby1": true, "metby2": false, "overlaps1": true, "overlaps2": false, "overlapped1": true, "overlapped2": false, "overlap1": true, "overlap2": false, "starts1": true, "starts2": false, "startedby1": true, "startedby2": false, "covers1": true, "covers2": false, "coveredby1": true, "coveredby2": false, "ends1": true, "ends2": false, "endedby1": true, "endedby2": false }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/temp/time_functions.adm b/asterix-app/src/test/resources/runtimets/results/temp/time_functions.adm
new file mode 100644
index 0000000..56531bd
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/temp/time_functions.adm
@@ -0,0 +1 @@
+{ "time1": time("00:26:00.074Z"), "time2": time("23:35:49.938Z"), "time3": time("23:30:23.000Z"), "time4": time("18:26:00.074Z"), "time5": time("00:11:49.938Z"), "duration1": duration("-PT23H24M"), "duration2": duration("PT18H") }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/tpch/q10_returned_item_int64.adm b/asterix-app/src/test/resources/runtimets/results/tpch/q10_returned_item_int64.adm
new file mode 100644
index 0000000..422b480
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/tpch/q10_returned_item_int64.adm
@@ -0,0 +1,20 @@
+{ "c_custkey": 121i64, "c_name": "Customer#000000121", "revenue": 282635.1719d, "c_acctbal": 6428.32d, "n_name": "PERU", "c_address": "tv nCR2YKupGN73mQudO", "c_phone": "27-411-990-2959", "c_comment": "uriously stealthy ideas. carefully final courts use carefully" }
+{ "c_custkey": 124i64, "c_name": "Customer#000000124", "revenue": 222182.5188d, "c_acctbal": 1842.49d, "n_name": "CHINA", "c_address": "aTbyVAW5tCd,v09O", "c_phone": "28-183-750-7809", "c_comment": "le fluffily even dependencies. quietly s" }
+{ "c_custkey": 106i64, "c_name": "Customer#000000106", "revenue": 190241.3334d, "c_acctbal": 3288.42d, "n_name": "ARGENTINA", "c_address": "xGCOEAUjUNG", "c_phone": "11-751-989-4627", "c_comment": "lose slyly. ironic accounts along the evenly regular theodolites wake about the special, final gifts. " }
+{ "c_custkey": 16i64, "c_name": "Customer#000000016", "revenue": 161422.04609999998d, "c_acctbal": 4681.03d, "n_name": "IRAN", "c_address": "cYiaeMLZSMAOQ2 d0W,", "c_phone": "20-781-609-3107", "c_comment": "kly silent courts. thinly regular theodolites sleep fluffily after " }
+{ "c_custkey": 44i64, "c_name": "Customer#000000044", "revenue": 149364.5652d, "c_acctbal": 7315.94d, "n_name": "MOZAMBIQUE", "c_address": "Oi,dOSPwDu4jo4x,,P85E0dmhZGvNtBwi", "c_phone": "26-190-260-5375", "c_comment": "r requests around the unusual, bold a" }
+{ "c_custkey": 71i64, "c_name": "Customer#000000071", "revenue": 129481.02450000001d, "c_acctbal": -611.19d, "n_name": "GERMANY", "c_address": "TlGalgdXWBmMV,6agLyWYDyIz9MKzcY8gl,w6t1B", "c_phone": "17-710-812-5403", "c_comment": "g courts across the regular, final pinto beans are blithely pending ac" }
+{ "c_custkey": 89i64, "c_name": "Customer#000000089", "revenue": 121663.1243d, "c_acctbal": 1530.76d, "n_name": "KENYA", "c_address": "dtR, y9JQWUO6FoJExyp8whOU", "c_phone": "24-394-451-5404", "c_comment": "counts are slyly beyond the slyly final accounts. quickly final ideas wake. r" }
+{ "c_custkey": 112i64, "c_name": "Customer#000000112", "revenue": 111137.7141d, "c_acctbal": 2953.35d, "n_name": "ROMANIA", "c_address": "RcfgG3bO7QeCnfjqJT1", "c_phone": "29-233-262-8382", "c_comment": "rmanently unusual multipliers. blithely ruthless deposits are furiously along the" }
+{ "c_custkey": 62i64, "c_name": "Customer#000000062", "revenue": 106368.0153d, "c_acctbal": 595.61d, "n_name": "GERMANY", "c_address": "upJK2Dnw13,", "c_phone": "17-361-978-7059", "c_comment": "kly special dolphins. pinto beans are slyly. quickly regular accounts are furiously a" }
+{ "c_custkey": 146i64, "c_name": "Customer#000000146", "revenue": 103265.98879999999d, "c_acctbal": 3328.68d, "n_name": "CANADA", "c_address": "GdxkdXG9u7iyI1,,y5tq4ZyrcEy", "c_phone": "13-835-723-3223", "c_comment": "ffily regular dinos are slyly unusual requests. slyly specia" }
+{ "c_custkey": 19i64, "c_name": "Customer#000000019", "revenue": 99306.0127d, "c_acctbal": 8914.71d, "n_name": "CHINA", "c_address": "uc,3bHIx84H,wdrmLOjVsiqXCq2tr", "c_phone": "28-396-526-5053", "c_comment": " nag. furiously careful packages are slyly at the accounts. furiously regular in" }
+{ "c_custkey": 145i64, "c_name": "Customer#000000145", "revenue": 99256.9018d, "c_acctbal": 9748.93d, "n_name": "JORDAN", "c_address": "kQjHmt2kcec cy3hfMh969u", "c_phone": "23-562-444-8454", "c_comment": "ests? express, express instructions use. blithely fina" }
+{ "c_custkey": 103i64, "c_name": "Customer#000000103", "revenue": 97311.77240000002d, "c_acctbal": 2757.45d, "n_name": "INDONESIA", "c_address": "8KIsQX4LJ7QMsj6DrtFtXu0nUEdV,8a", "c_phone": "19-216-107-2107", "c_comment": "furiously pending notornis boost slyly around the blithely ironic ideas? final, even instructions cajole fl" }
+{ "c_custkey": 136i64, "c_name": "Customer#000000136", "revenue": 95855.39799999999d, "c_acctbal": -842.39d, "n_name": "GERMANY", "c_address": "QoLsJ0v5C1IQbh,DS1", "c_phone": "17-501-210-4726", "c_comment": "ackages sleep ironic, final courts. even requests above the blithely bold requests g" }
+{ "c_custkey": 53i64, "c_name": "Customer#000000053", "revenue": 92568.9124d, "c_acctbal": 4113.64d, "n_name": "MOROCCO", "c_address": "HnaxHzTfFTZs8MuCpJyTbZ47Cm4wFOOgib", "c_phone": "25-168-852-5363", "c_comment": "ar accounts are. even foxes are blithely. fluffily pending deposits boost" }
+{ "c_custkey": 49i64, "c_name": "Customer#000000049", "revenue": 90965.7262d, "c_acctbal": 4573.94d, "n_name": "IRAN", "c_address": "cNgAeX7Fqrdf7HQN9EwjUa4nxT,68L FKAxzl", "c_phone": "20-908-631-4424", "c_comment": "nusual foxes! fluffily pending packages maintain to the regular " }
+{ "c_custkey": 37i64, "c_name": "Customer#000000037", "revenue": 88065.74579999999d, "c_acctbal": -917.75d, "n_name": "INDIA", "c_address": "7EV4Pwh,3SboctTWt", "c_phone": "18-385-235-7162", "c_comment": "ilent packages are carefully among the deposits. furiousl" }
+{ "c_custkey": 82i64, "c_name": "Customer#000000082", "revenue": 86998.9644d, "c_acctbal": 9468.34d, "n_name": "CHINA", "c_address": "zhG3EZbap4c992Gj3bK,3Ne,Xn", "c_phone": "28-159-442-5305", "c_comment": "s wake. bravely regular accounts are furiously. regula" }
+{ "c_custkey": 125i64, "c_name": "Customer#000000125", "revenue": 84808.068d, "c_acctbal": -234.12d, "n_name": "ROMANIA", "c_address": ",wSZXdVR xxIIfm9s8ITyLl3kgjT6UC07GY0Y", "c_phone": "29-261-996-3120", "c_comment": "x-ray finally after the packages? regular requests c" }
+{ "c_custkey": 59i64, "c_name": "Customer#000000059", "revenue": 84655.5711d, "c_acctbal": 3458.6d, "n_name": "ARGENTINA", "c_address": "zLOCP0wh92OtBihgspOGl4", "c_phone": "11-355-584-3112", "c_comment": "ously final packages haggle blithely after the express deposits. furiou" }
diff --git a/asterix-app/src/test/resources/runtimets/results/user-defined-functions/udf23.adm b/asterix-app/src/test/resources/runtimets/results/user-defined-functions/udf23.adm
index 7fc9d1f..8bd28d4 100644
--- a/asterix-app/src/test/resources/runtimets/results/user-defined-functions/udf23.adm
+++ b/asterix-app/src/test/resources/runtimets/results/user-defined-functions/udf23.adm
@@ -1,6 +1,6 @@
-{ "DataverseName": "Metadata", "DatasetName": "Dataset", "DataTypeName": "DatasetRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName" ], "PrimaryKey": [ "DataverseName", "DatasetName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:59:27 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "DatasourceAdapter", "DataTypeName": "DatasourceAdapterRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name" ], "PrimaryKey": [ "DataverseName", "Name" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:59:27 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Datatype", "DataTypeName": "DatatypeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatatypeName" ], "PrimaryKey": [ "DataverseName", "DatatypeName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:59:27 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Dataverse", "DataTypeName": "DataverseRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName" ], "PrimaryKey": [ "DataverseName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:59:27 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Function", "DataTypeName": "FunctionRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name", "Arity" ], "PrimaryKey": [ "DataverseName", "Name", "Arity" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:59:27 PST 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Index", "DataTypeName": "IndexRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName", "IndexName" ], "PrimaryKey": [ "DataverseName", "DatasetName", "IndexName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Nov 05 10:59:27 PST 2012" }
+{ "DataverseName": "Metadata", "DatasetName": "Dataset", "DataTypeName": "DatasetRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName" ], "PrimaryKey": [ "DataverseName", "DatasetName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
+{ "DataverseName": "Metadata", "DatasetName": "DatasourceAdapter", "DataTypeName": "DatasourceAdapterRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name" ], "PrimaryKey": [ "DataverseName", "Name" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
+{ "DataverseName": "Metadata", "DatasetName": "Datatype", "DataTypeName": "DatatypeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatatypeName" ], "PrimaryKey": [ "DataverseName", "DatatypeName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
+{ "DataverseName": "Metadata", "DatasetName": "Dataverse", "DataTypeName": "DataverseRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName" ], "PrimaryKey": [ "DataverseName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
+{ "DataverseName": "Metadata", "DatasetName": "Function", "DataTypeName": "FunctionRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name", "Arity" ], "PrimaryKey": [ "DataverseName", "Name", "Arity" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
+{ "DataverseName": "Metadata", "DatasetName": "Index", "DataTypeName": "IndexRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName", "IndexName" ], "PrimaryKey": [ "DataverseName", "DatasetName", "IndexName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Hints": {{ }}, "Timestamp": "Tue Jan 29 18:54:03 PST 2013" }
diff --git a/asterix-app/src/test/resources/runtimets/testsuite.xml b/asterix-app/src/test/resources/runtimets/testsuite.xml
index 2cb241d..5f52f9a 100644
--- a/asterix-app/src/test/resources/runtimets/testsuite.xml
+++ b/asterix-app/src/test/resources/runtimets/testsuite.xml
@@ -754,8 +754,18 @@
</compilation-unit>
</test-case>
<test-case FilePath="dml">
- <compilation-unit name="drop-index">
- <output-file compare="Text">drop-index.adm</output-file>
+ <compilation-unit name="drop-empty-secondary-indexes">
+ <output-file compare="Text">drop-empty-secondary-indexes.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="dml">
+ <compilation-unit name="create-drop-cltype">
+ <output-file compare="Text">create-drop-cltype.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="dml">
+ <compilation-unit name="create-drop-opntype">
+ <output-file compare="Text">create-drop-opntype.adm</output-file>
</compilation-unit>
</test-case>
<test-case FilePath="dml">
@@ -2008,6 +2018,12 @@
</test-case>
</test-group>
<test-group name="misc">
+ <test-case FilePath="misc">
+ <compilation-unit name="partition-by-nonexistent-field">
+ <output-file compare="Text">partition-by-nonexistent-field.adm</output-file>
+ <expected-error>edu.uci.ics.asterix.common.exceptions.AsterixException</expected-error>
+ </compilation-unit>
+ </test-case>
<test-case FilePath="misc">
<compilation-unit name="float_01">
<output-file compare="Text">float_01.adm</output-file>
@@ -2454,6 +2470,11 @@
<output-file compare="Text">query-issue208.adm</output-file>
</compilation-unit>
</test-case>
+ <test-case FilePath="open-closed">
+ <compilation-unit name="query-issue236">
+ <output-file compare="Text">query-issue236.adm</output-file>
+ </compilation-unit>
+ </test-case>
<!--
<test-case FilePath="open-closed">
<compilation-unit name="open-closed-15">
@@ -2666,6 +2687,24 @@
<output-file compare="Text">open-record-constructor_02.adm</output-file>
</compilation-unit>
</test-case>
+ <test-case FilePath="records">
+ <compilation-unit name="closed-closed-fieldname-conflict_issue173">
+ <output-file compare="Text">closed-closed-fieldname-conflict_issue173.adm</output-file>
+ <expected-error>edu.uci.ics.asterix.common.exceptions.AsterixException</expected-error>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="records">
+ <compilation-unit name="open-closed-fieldname-conflict_issue173">
+ <output-file compare="Text">open-closed-fieldname-conflict_issue173.adm</output-file>
+ <expected-error>edu.uci.ics.asterix.common.exceptions.AsterixException</expected-error>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="records">
+ <compilation-unit name="open-open-fieldname-conflict_issue173">
+ <output-file compare="Text">open-open-fieldname-conflict_issue173.adm</output-file>
+ <expected-error>edu.uci.ics.asterix.common.exceptions.AsterixException</expected-error>
+ </compilation-unit>
+ </test-case>
</test-group>
<test-group name="scan">
<test-case FilePath="scan">
@@ -2679,6 +2718,16 @@
</compilation-unit>
</test-case>
<test-case FilePath="scan">
+ <compilation-unit name="issue238_query_1">
+ <output-file compare="Text">issue238_query_1.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="scan">
+ <compilation-unit name="issue238_query_2">
+ <output-file compare="Text">issue238_query_2.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="scan">
<compilation-unit name="30">
<output-file compare="Text">30.adm</output-file>
</compilation-unit>
@@ -3498,6 +3547,11 @@
</compilation-unit>
</test-case>
<test-case FilePath="tpch">
+ <compilation-unit name="q10_returned_item_int64">
+ <output-file compare="Text">q10_returned_item_int64.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="tpch">
<compilation-unit name="q11_important_stock">
<output-file compare="Text">q11_important_stock.adm</output-file>
</compilation-unit>
@@ -3872,4 +3926,73 @@
</compilation-unit>
</test-case>
</test-group>
-</test-suite>
\ No newline at end of file
+ <test-group name="load">
+ <test-case FilePath="load">
+ <compilation-unit name="issue14_query">
+ <output-file compare="Text">none.adm</output-file>
+ <expected-error>edu.uci.ics.asterix.common.exceptions.AsterixException</expected-error>
+ </compilation-unit>
+ </test-case>
+ </test-group>
+ <test-group name="hints">
+ <test-case FilePath="hints">
+ <compilation-unit name="issue_251_dataset_hint_5">
+ <output-file compare="Text">issue_251_dataset_hint_5.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="hints">
+ <compilation-unit name="issue_251_dataset_hint_6">
+ <output-file compare="Text">issue_251_dataset_hint_6.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="hints">
+ <compilation-unit name="issue_251_dataset_hint_7">
+ <output-file compare="Text">issue_251_dataset_hint_7.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ </test-group>
+ <test-group name="feeds">
+ <test-case FilePath="feeds">
+ <compilation-unit name="feeds_01">
+ <output-file compare="Text">feeds_01.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="feeds">
+ <compilation-unit name="feeds_02">
+ <output-file compare="Text">feeds_02.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="feeds">
+ <compilation-unit name="feeds_03">
+ <output-file compare="Text">feeds_03.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="feeds">
+ <compilation-unit name="feeds_04">
+ <output-file compare="Text">feeds_04.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="feeds">
+ <compilation-unit name="issue_230_feeds">
+ <output-file compare="Text">issue_230_feeds.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ </test-group>
+ <test-group name="hdfs">
+ <test-case FilePath="hdfs">
+ <compilation-unit name="issue_245_hdfs">
+ <output-file compare="Text">issue_245_hdfs.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="hdfs">
+ <compilation-unit name="hdfs_02">
+ <output-file compare="Text">hdfs_02.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="hdfs">
+ <compilation-unit name="hdfs_03">
+ <output-file compare="Text">hdfs_03.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ </test-group>
+</test-suite>
diff --git a/asterix-app/src/test/resources/spatial/local/spatial-drop.aql b/asterix-app/src/test/resources/spatial/local/spatial-drop.aql
index 61d4913..fc881ad 100644
--- a/asterix-app/src/test/resources/spatial/local/spatial-drop.aql
+++ b/asterix-app/src/test/resources/spatial/local/spatial-drop.aql
@@ -11,6 +11,6 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset MyData(MyRecord)
- partitioned by key id on group1;
+ primary key id on group1;
drop dataset MyData;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/spatial/local/spatial-load.aql b/asterix-app/src/test/resources/spatial/local/spatial-load.aql
index 6c135f3..1df33c0 100644
--- a/asterix-app/src/test/resources/spatial/local/spatial-load.aql
+++ b/asterix-app/src/test/resources/spatial/local/spatial-load.aql
@@ -11,7 +11,7 @@
set format "edu.uci.ics.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset MyData(MyRecord)
- partitioned by key id on group1;
+ primary key id on group1;
load dataset MyData
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/tpch/queries/asterix/enlist_data.aql b/asterix-app/src/test/resources/tpch/queries/asterix/enlist_data.aql
index 2eb06f5..b0396f9 100644
--- a/asterix-app/src/test/resources/tpch/queries/asterix/enlist_data.aql
+++ b/asterix-app/src/test/resources/tpch/queries/asterix/enlist_data.aql
@@ -89,21 +89,21 @@
asterix-005, asterix-006, asterix-007, asterix-008, asterix-009, asterix-010;
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
declare dataset Suppliers(SupplierType)
- partitioned by key s_suppkey on group1;
+ primary key s_suppkey on group1;
declare dataset Nations(NationType)
- partitioned by key n_nationkey on group1;
+ primary key n_nationkey on group1;
declare dataset Regions(RegionType)
- partitioned by key r_regionkey on group1;
+ primary key r_regionkey on group1;
declare dataset Parts(PartType)
- partitioned by key p_partkey on group1;
+ primary key p_partkey on group1;
declare dataset PartSupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey on group1;
+ primary key ps_partkey, ps_suppkey on group1;
enlist dataset Customers;
diff --git a/asterix-app/src/test/resources/tpch/queries/asterix/inlined_q18_large_volume_customer.aql b/asterix-app/src/test/resources/tpch/queries/asterix/inlined_q18_large_volume_customer.aql
index 1ac9c1c..bb21d7e 100644
--- a/asterix-app/src/test/resources/tpch/queries/asterix/inlined_q18_large_volume_customer.aql
+++ b/asterix-app/src/test/resources/tpch/queries/asterix/inlined_q18_large_volume_customer.aql
@@ -46,11 +46,11 @@
asterix-005, asterix-006, asterix-007, asterix-008, asterix-009, asterix-010;
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
write output to asterix-001:"/tmp/inlined_q18_large_volume_customer.adm";
diff --git a/asterix-app/src/test/resources/tpch/queries/asterix/load_data_adm.aql b/asterix-app/src/test/resources/tpch/queries/asterix/load_data_adm.aql
index e4b3a3b..7b872f0 100644
--- a/asterix-app/src/test/resources/tpch/queries/asterix/load_data_adm.aql
+++ b/asterix-app/src/test/resources/tpch/queries/asterix/load_data_adm.aql
@@ -89,21 +89,21 @@
asterix-005, asterix-006, asterix-007, asterix-008, asterix-009, asterix-010;
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
declare dataset Suppliers(SupplierType)
- partitioned by key s_suppkey on group1;
+ primary key s_suppkey on group1;
declare dataset Nations(NationType)
- partitioned by key n_nationkey on group1;
+ primary key n_nationkey on group1;
declare dataset Regions(RegionType)
- partitioned by key r_regionkey on group1;
+ primary key r_regionkey on group1;
declare dataset Parts(PartType)
- partitioned by key p_partkey on group1;
+ primary key p_partkey on group1;
declare dataset PartSupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey on group1;
+ primary key ps_partkey, ps_suppkey on group1;
load dataset LineItems
diff --git a/asterix-app/src/test/resources/tpch/queries/asterix/load_data_tbl_100x.aql b/asterix-app/src/test/resources/tpch/queries/asterix/load_data_tbl_100x.aql
index 1d40ba7..c4ccbe0 100644
--- a/asterix-app/src/test/resources/tpch/queries/asterix/load_data_tbl_100x.aql
+++ b/asterix-app/src/test/resources/tpch/queries/asterix/load_data_tbl_100x.aql
@@ -89,21 +89,21 @@
asterix-005, asterix-006, asterix-007, asterix-008, asterix-009, asterix-010;
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
declare dataset Suppliers(SupplierType)
- partitioned by key s_suppkey on group1;
+ primary key s_suppkey on group1;
declare dataset Nations(NationType)
- partitioned by key n_nationkey on group1;
+ primary key n_nationkey on group1;
declare dataset Regions(RegionType)
- partitioned by key r_regionkey on group1;
+ primary key r_regionkey on group1;
declare dataset Parts(PartType)
- partitioned by key p_partkey on group1;
+ primary key p_partkey on group1;
declare dataset PartSupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey on group1;
+ primary key ps_partkey, ps_suppkey on group1;
load dataset Customers
diff --git a/asterix-app/src/test/resources/tpch/queries/asterix/load_data_tbl_1x.aql b/asterix-app/src/test/resources/tpch/queries/asterix/load_data_tbl_1x.aql
index bb4a49d..654dcb6 100644
--- a/asterix-app/src/test/resources/tpch/queries/asterix/load_data_tbl_1x.aql
+++ b/asterix-app/src/test/resources/tpch/queries/asterix/load_data_tbl_1x.aql
@@ -89,21 +89,21 @@
asterix-005, asterix-006, asterix-007, asterix-008, asterix-009, asterix-010;
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
declare dataset Suppliers(SupplierType)
- partitioned by key s_suppkey on group1;
+ primary key s_suppkey on group1;
declare dataset Nations(NationType)
- partitioned by key n_nationkey on group1;
+ primary key n_nationkey on group1;
declare dataset Regions(RegionType)
- partitioned by key r_regionkey on group1;
+ primary key r_regionkey on group1;
declare dataset Parts(PartType)
- partitioned by key p_partkey on group1;
+ primary key p_partkey on group1;
declare dataset PartSupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey on group1;
+ primary key ps_partkey, ps_suppkey on group1;
load dataset Customers
diff --git a/asterix-app/src/test/resources/tpch/queries/asterix/q1_pricing_summary_report.aql b/asterix-app/src/test/resources/tpch/queries/asterix/q1_pricing_summary_report.aql
index b29e7a6..411fdbb 100644
--- a/asterix-app/src/test/resources/tpch/queries/asterix/q1_pricing_summary_report.aql
+++ b/asterix-app/src/test/resources/tpch/queries/asterix/q1_pricing_summary_report.aql
@@ -23,7 +23,7 @@
asterix-005, asterix-006, asterix-007, asterix-008, asterix-009, asterix-010;
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
write output to asterix-001:"/home/onose/hyracks-asterix/results/q1_pricing_summary_report.adm";
diff --git a/asterix-app/src/test/resources/tpch/queries/asterix/q3_shipping_priority.aql b/asterix-app/src/test/resources/tpch/queries/asterix/q3_shipping_priority.aql
index 71ed6db..6f52101 100644
--- a/asterix-app/src/test/resources/tpch/queries/asterix/q3_shipping_priority.aql
+++ b/asterix-app/src/test/resources/tpch/queries/asterix/q3_shipping_priority.aql
@@ -46,11 +46,11 @@
asterix-005, asterix-006, asterix-007, asterix-008, asterix-009, asterix-010;
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
write output to asterix-001:"/home/onose/hyracks-asterix/results/q3_shipping_priority.adm";
diff --git a/asterix-app/src/test/resources/tpch/queries/asterix/q5_local_supplier_volume.aql b/asterix-app/src/test/resources/tpch/queries/asterix/q5_local_supplier_volume.aql
index 8d0d6dd..7544d19 100644
--- a/asterix-app/src/test/resources/tpch/queries/asterix/q5_local_supplier_volume.aql
+++ b/asterix-app/src/test/resources/tpch/queries/asterix/q5_local_supplier_volume.aql
@@ -69,17 +69,17 @@
asterix-005, asterix-006, asterix-007, asterix-008, asterix-009, asterix-010;
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
declare dataset Suppliers(SupplierType)
- partitioned by key s_suppkey on group1;
+ primary key s_suppkey on group1;
declare dataset Nations(NationType)
- partitioned by key n_nationkey on group1;
+ primary key n_nationkey on group1;
declare dataset Regions(RegionType)
- partitioned by key r_regionkey on group1;
+ primary key r_regionkey on group1;
write output to asterix-001:"/home/onose/hyracks-asterix/results/q5_local_supplier.adm";
diff --git a/asterix-app/src/test/resources/tpch/queries/asterix/q9_product_type_profit.aql b/asterix-app/src/test/resources/tpch/queries/asterix/q9_product_type_profit.aql
index 3067fbe..164869a 100644
--- a/asterix-app/src/test/resources/tpch/queries/asterix/q9_product_type_profit.aql
+++ b/asterix-app/src/test/resources/tpch/queries/asterix/q9_product_type_profit.aql
@@ -73,17 +73,17 @@
asterix-005, asterix-006, asterix-007, asterix-008, asterix-009, asterix-010;
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare dataset Suppliers(SupplierType)
- partitioned by key s_suppkey on group1;
+ primary key s_suppkey on group1;
declare dataset Nations(NationType)
- partitioned by key n_nationkey on group1;
+ primary key n_nationkey on group1;
declare dataset Parts(PartType)
- partitioned by key p_partkey on group1;
+ primary key p_partkey on group1;
declare dataset PartSupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey on group1;
+ primary key ps_partkey, ps_suppkey on group1;
write output to asterix-001:"/home/onose/hyracks-asterix/results/q9_product_type_profit.adm";
diff --git a/asterix-app/src/test/resources/tpch/queries/local/inlined_q18_large_volume_customer.aql b/asterix-app/src/test/resources/tpch/queries/local/inlined_q18_large_volume_customer.aql
index abd06af..280a1d0 100644
--- a/asterix-app/src/test/resources/tpch/queries/local/inlined_q18_large_volume_customer.aql
+++ b/asterix-app/src/test/resources/tpch/queries/local/inlined_q18_large_volume_customer.aql
@@ -45,11 +45,11 @@
declare nodegroup group1 on nc1, nc2;
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
write output to nc1:"/tmp/inlined_q18_large_volume_customer.adm";
diff --git a/asterix-app/src/test/resources/tpch/queries/local/load_adm_data.aql b/asterix-app/src/test/resources/tpch/queries/local/load_adm_data.aql
index b7e5b3d..d31c421 100644
--- a/asterix-app/src/test/resources/tpch/queries/local/load_adm_data.aql
+++ b/asterix-app/src/test/resources/tpch/queries/local/load_adm_data.aql
@@ -88,21 +88,21 @@
declare nodegroup group1 on nc1, nc2;
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
declare dataset Suppliers(SupplierType)
- partitioned by key s_suppkey on group1;
+ primary key s_suppkey on group1;
declare dataset Nations(NationType)
- partitioned by key n_nationkey on group1;
+ primary key n_nationkey on group1;
declare dataset Regions(RegionType)
- partitioned by key r_regionkey on group1;
+ primary key r_regionkey on group1;
declare dataset Parts(PartType)
- partitioned by key p_partkey on group1;
+ primary key p_partkey on group1;
declare dataset PartSupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey on group1;
+ primary key ps_partkey, ps_suppkey on group1;
load dataset LineItems
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/tpch/queries/local/load_tbl_data_0.001x_2nodes.aql b/asterix-app/src/test/resources/tpch/queries/local/load_tbl_data_0.001x_2nodes.aql
index 0d03e5d..47724fa 100644
--- a/asterix-app/src/test/resources/tpch/queries/local/load_tbl_data_0.001x_2nodes.aql
+++ b/asterix-app/src/test/resources/tpch/queries/local/load_tbl_data_0.001x_2nodes.aql
@@ -88,21 +88,21 @@
declare nodegroup group1 on nc1, nc2;
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
declare dataset Suppliers(SupplierType)
- partitioned by key s_suppkey on group1;
+ primary key s_suppkey on group1;
declare dataset Nations(NationType)
- partitioned by key n_nationkey on group1;
+ primary key n_nationkey on group1;
declare dataset Regions(RegionType)
- partitioned by key r_regionkey on group1;
+ primary key r_regionkey on group1;
declare dataset Parts(PartType)
- partitioned by key p_partkey on group1;
+ primary key p_partkey on group1;
declare dataset PartSupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey on group1;
+ primary key ps_partkey, ps_suppkey on group1;
load dataset LineItems
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/tpch/queries/local/load_tbl_data_10x_1node.aql b/asterix-app/src/test/resources/tpch/queries/local/load_tbl_data_10x_1node.aql
index 7c8fc8c..49e2a9a 100644
--- a/asterix-app/src/test/resources/tpch/queries/local/load_tbl_data_10x_1node.aql
+++ b/asterix-app/src/test/resources/tpch/queries/local/load_tbl_data_10x_1node.aql
@@ -88,21 +88,21 @@
declare nodegroup group1 on nc1;
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
declare dataset Suppliers(SupplierType)
- partitioned by key s_suppkey on group1;
+ primary key s_suppkey on group1;
declare dataset Nations(NationType)
- partitioned by key n_nationkey on group1;
+ primary key n_nationkey on group1;
declare dataset Regions(RegionType)
- partitioned by key r_regionkey on group1;
+ primary key r_regionkey on group1;
declare dataset Parts(PartType)
- partitioned by key p_partkey on group1;
+ primary key p_partkey on group1;
declare dataset PartSupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey on group1;
+ primary key ps_partkey, ps_suppkey on group1;
load dataset LineItems
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/tpch/queries/local/load_tbl_data_1x_1node.aql b/asterix-app/src/test/resources/tpch/queries/local/load_tbl_data_1x_1node.aql
index b6cf6f8..3e6ccc4 100644
--- a/asterix-app/src/test/resources/tpch/queries/local/load_tbl_data_1x_1node.aql
+++ b/asterix-app/src/test/resources/tpch/queries/local/load_tbl_data_1x_1node.aql
@@ -88,21 +88,21 @@
declare nodegroup group1 on nc1;
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
declare dataset Suppliers(SupplierType)
- partitioned by key s_suppkey on group1;
+ primary key s_suppkey on group1;
declare dataset Nations(NationType)
- partitioned by key n_nationkey on group1;
+ primary key n_nationkey on group1;
declare dataset Regions(RegionType)
- partitioned by key r_regionkey on group1;
+ primary key r_regionkey on group1;
declare dataset Parts(PartType)
- partitioned by key p_partkey on group1;
+ primary key p_partkey on group1;
declare dataset PartSupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey on group1;
+ primary key ps_partkey, ps_suppkey on group1;
load dataset LineItems
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/tpch/queries/local/load_tbl_data_20x_2node.aql b/asterix-app/src/test/resources/tpch/queries/local/load_tbl_data_20x_2node.aql
index 606cde1..f44d388 100644
--- a/asterix-app/src/test/resources/tpch/queries/local/load_tbl_data_20x_2node.aql
+++ b/asterix-app/src/test/resources/tpch/queries/local/load_tbl_data_20x_2node.aql
@@ -88,21 +88,21 @@
declare nodegroup group1 on nc1, nc2;
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
declare dataset Suppliers(SupplierType)
- partitioned by key s_suppkey on group1;
+ primary key s_suppkey on group1;
declare dataset Nations(NationType)
- partitioned by key n_nationkey on group1;
+ primary key n_nationkey on group1;
declare dataset Regions(RegionType)
- partitioned by key r_regionkey on group1;
+ primary key r_regionkey on group1;
declare dataset Parts(PartType)
- partitioned by key p_partkey on group1;
+ primary key p_partkey on group1;
declare dataset PartSupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey on group1;
+ primary key ps_partkey, ps_suppkey on group1;
load dataset LineItems
using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
diff --git a/asterix-app/src/test/resources/tpch/queries/local/orders-index-create.aql b/asterix-app/src/test/resources/tpch/queries/local/orders-index-create.aql
index e369f10..9c5e361 100644
--- a/asterix-app/src/test/resources/tpch/queries/local/orders-index-create.aql
+++ b/asterix-app/src/test/resources/tpch/queries/local/orders-index-create.aql
@@ -17,6 +17,6 @@
declare nodegroup group1 on nc1, nc2;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
create index idx_Orders_Custkey on Orders(o_custkey);
diff --git a/asterix-app/src/test/resources/tpch/queries/local/orders-index-search.aql b/asterix-app/src/test/resources/tpch/queries/local/orders-index-search.aql
index f4c9fd9..66e88a5 100644
--- a/asterix-app/src/test/resources/tpch/queries/local/orders-index-search.aql
+++ b/asterix-app/src/test/resources/tpch/queries/local/orders-index-search.aql
@@ -17,7 +17,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare index idx_Orders_Custkey on Orders(o_custkey);
diff --git a/asterix-app/src/test/resources/tpch/queries/local/q1_pricing_summary_report.aql b/asterix-app/src/test/resources/tpch/queries/local/q1_pricing_summary_report.aql
index a69528c..9ab0322 100644
--- a/asterix-app/src/test/resources/tpch/queries/local/q1_pricing_summary_report.aql
+++ b/asterix-app/src/test/resources/tpch/queries/local/q1_pricing_summary_report.aql
@@ -22,7 +22,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
// for external datasets, use:
//
diff --git a/asterix-app/src/test/resources/tpch/queries/local/q3_shipping_priority.aql b/asterix-app/src/test/resources/tpch/queries/local/q3_shipping_priority.aql
index b5c27cc..cadc5c9 100644
--- a/asterix-app/src/test/resources/tpch/queries/local/q3_shipping_priority.aql
+++ b/asterix-app/src/test/resources/tpch/queries/local/q3_shipping_priority.aql
@@ -45,11 +45,11 @@
declare nodegroup group1 on nc1, nc2;
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
write output to nc1:"/tmp/q3_shipping_priority.adm";
diff --git a/asterix-app/src/test/resources/tpch/queries/local/q5_local_supplier_volume.aql b/asterix-app/src/test/resources/tpch/queries/local/q5_local_supplier_volume.aql
index 9fe2ed6..724d58b 100644
--- a/asterix-app/src/test/resources/tpch/queries/local/q5_local_supplier_volume.aql
+++ b/asterix-app/src/test/resources/tpch/queries/local/q5_local_supplier_volume.aql
@@ -68,17 +68,17 @@
declare nodegroup group1 on nc1, nc2;
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
declare dataset Suppliers(SupplierType)
- partitioned by key s_suppkey on group1;
+ primary key s_suppkey on group1;
declare dataset Nations(NationType)
- partitioned by key n_nationkey on group1;
+ primary key n_nationkey on group1;
declare dataset Regions(RegionType)
- partitioned by key r_regionkey on group1;
+ primary key r_regionkey on group1;
write output to nc1:"/tmp/q5_local_supplier_volume.adm";
diff --git a/asterix-app/src/test/resources/tpch/queries/local/q9_product_type_profit.aql b/asterix-app/src/test/resources/tpch/queries/local/q9_product_type_profit.aql
index c0643b1..5407270 100644
--- a/asterix-app/src/test/resources/tpch/queries/local/q9_product_type_profit.aql
+++ b/asterix-app/src/test/resources/tpch/queries/local/q9_product_type_profit.aql
@@ -72,17 +72,17 @@
declare nodegroup group1 on nc1, nc2;
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare dataset Suppliers(SupplierType)
- partitioned by key s_suppkey on group1;
+ primary key s_suppkey on group1;
declare dataset Nations(NationType)
- partitioned by key n_nationkey on group1;
+ primary key n_nationkey on group1;
declare dataset Parts(PartType)
- partitioned by key p_partkey on group1;
+ primary key p_partkey on group1;
declare dataset PartSupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey on group1;
+ primary key ps_partkey, ps_suppkey on group1;
write output to nc1:"/tmp/q9_product_type_profit.adm";
diff --git a/asterix-app/src/test/resources/tpch/queries/local/scan_filter_lineitem.aql b/asterix-app/src/test/resources/tpch/queries/local/scan_filter_lineitem.aql
index 3855746..ccddad0 100644
--- a/asterix-app/src/test/resources/tpch/queries/local/scan_filter_lineitem.aql
+++ b/asterix-app/src/test/resources/tpch/queries/local/scan_filter_lineitem.aql
@@ -22,7 +22,7 @@
declare nodegroup group1 on nc1, nc2;
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
// load dataset LineItems from nc1:"/opt/tpch/tengigs/lineitem.tbl" delimited by "|";
diff --git a/asterix-app/src/test/resources/tpch/queries/local/write-custorder.aql b/asterix-app/src/test/resources/tpch/queries/local/write-custorder.aql
index 9757ff8..b767d57 100644
--- a/asterix-app/src/test/resources/tpch/queries/local/write-custorder.aql
+++ b/asterix-app/src/test/resources/tpch/queries/local/write-custorder.aql
@@ -50,13 +50,13 @@
declare nodegroup group1 on nc1, nc2;
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
declare dataset CustOrder(CustOrderType)
- partitioned by key co_custkey on group1;
+ primary key co_custkey on group1;
write into dataset CustOrder
( for $c in dataset('Customers')
diff --git a/asterix-app/src/test/resources/tpch/queries/rainbow/load_data_tbl_50x.aql b/asterix-app/src/test/resources/tpch/queries/rainbow/load_data_tbl_50x.aql
index 9635891..7d0295d 100644
--- a/asterix-app/src/test/resources/tpch/queries/rainbow/load_data_tbl_50x.aql
+++ b/asterix-app/src/test/resources/tpch/queries/rainbow/load_data_tbl_50x.aql
@@ -89,21 +89,21 @@
rainbow-04, rainbow-05;
declare dataset LineItems(LineItemType)
- partitioned by key l_orderkey, l_linenumber on group1;
+ primary key l_orderkey, l_linenumber on group1;
declare dataset Orders(OrderType)
- partitioned by key o_orderkey on group1;
+ primary key o_orderkey on group1;
declare dataset Customers(CustomerType)
- partitioned by key c_custkey on group1;
+ primary key c_custkey on group1;
declare dataset Suppliers(SupplierType)
- partitioned by key s_suppkey on group1;
+ primary key s_suppkey on group1;
declare dataset Nations(NationType)
- partitioned by key n_nationkey on group1;
+ primary key n_nationkey on group1;
declare dataset Regions(RegionType)
- partitioned by key r_regionkey on group1;
+ primary key r_regionkey on group1;
declare dataset Parts(PartType)
- partitioned by key p_partkey on group1;
+ primary key p_partkey on group1;
declare dataset PartSupp(PartSuppType)
- partitioned by key ps_partkey, ps_suppkey on group1;
+ primary key ps_partkey, ps_suppkey on group1;
load dataset Customers from
diff --git a/asterix-aql/pom.xml b/asterix-aql/pom.xml
index 7ac2cbb..50a0f99 100644
--- a/asterix-aql/pom.xml
+++ b/asterix-aql/pom.xml
@@ -1,4 +1,5 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>asterix</artifactId>
@@ -13,8 +14,9 @@
<artifactId>maven-compiler-plugin</artifactId>
<version>2.0.2</version>
<configuration>
- <source>1.6</source>
- <target>1.6</target>
+ <source>1.7</source>
+ <target>1.7</target>
+ <fork>true</fork>
</configuration>
</plugin>
<plugin>
@@ -36,14 +38,15 @@
<goals>
<goal>jjdoc</goal>
</goals>
- <phase>process-sources</phase>
+ <phase>process-sources</phase>
</execution>
</executions>
</plugin>
</plugins>
<pluginManagement>
<plugins>
- <!--This plugin's configuration is used to store Eclipse m2e settings only. It has no influence on the Maven build itself.-->
+ <!--This plugin's configuration is used to store Eclipse m2e settings
+ only. It has no influence on the Maven build itself. -->
<plugin>
<groupId>org.eclipse.m2e</groupId>
<artifactId>lifecycle-mapping</artifactId>
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DatasetDecl.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DatasetDecl.java
index 463a256..443524e 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DatasetDecl.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DatasetDecl.java
@@ -14,6 +14,8 @@
*/
package edu.uci.ics.asterix.aql.expression;
+import java.util.Map;
+
import edu.uci.ics.asterix.aql.base.Statement;
import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
@@ -26,17 +28,18 @@
protected final Identifier itemTypeName;
protected final DatasetType datasetType;
protected final IDatasetDetailsDecl datasetDetailsDecl;
+ protected final Map<String, String> hints;
+ protected final boolean ifNotExists;
- public boolean ifNotExists;
-
- public DatasetDecl(Identifier dataverse, Identifier name, Identifier itemTypeName, DatasetType datasetType,
- IDatasetDetailsDecl idd, boolean ifNotExists) {
+ public DatasetDecl(Identifier dataverse, Identifier name, Identifier itemTypeName, Map<String, String> hints,
+ DatasetType datasetType, IDatasetDetailsDecl idd, boolean ifNotExists) {
this.dataverse = dataverse;
this.name = name;
this.itemTypeName = itemTypeName;
+ this.hints = hints;
this.ifNotExists = ifNotExists;
this.datasetType = datasetType;
- datasetDetailsDecl = idd;
+ this.datasetDetailsDecl = idd;
}
public boolean getIfNotExists() {
@@ -55,6 +58,10 @@
return itemTypeName;
}
+ public Map<String, String> getHints() {
+ return hints;
+ }
+
@Override
public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
return visitor.visitDatasetDecl(this, arg);
diff --git a/asterix-aql/src/main/javacc/AQL.jj b/asterix-aql/src/main/javacc/AQL.jj
index 59a7753..5256322 100644
--- a/asterix-aql/src/main/javacc/AQL.jj
+++ b/asterix-aql/src/main/javacc/AQL.jj
@@ -17,6 +17,7 @@
import java.util.Map;
import java.util.HashMap;
+import java.util.LinkedHashMap;
import edu.uci.ics.asterix.aql.literal.FloatLiteral;
import edu.uci.ics.asterix.aql.literal.DoubleLiteral;
import edu.uci.ics.asterix.aql.literal.FalseLiteral;
@@ -633,7 +634,7 @@
Identifier dataverseName = null;
Identifier datasetName = null;
boolean alreadySorted = false;
- String adapterClassname;
+ String adapterName;
Map<String,String> properties;
Pair<Identifier,Identifier> nameComponents = null;
}
@@ -646,12 +647,11 @@
}
"using"
-
- <STRING_LITERAL>
+
{
- adapterClassname = removeQuotesAndEscapes(token.image);
+ adapterName = getAdapterName();
}
-
+
{
properties = getConfiguration();
}
@@ -662,11 +662,31 @@
";"
{
- return new LoadFromFileStatement(dataverseName, datasetName, adapterClassname, properties, alreadySorted);
+ return new LoadFromFileStatement(dataverseName, datasetName, adapterName, properties, alreadySorted);
}
}
+String getAdapterName() throws ParseException :
+{
+ String adapterName = null;
+}
+{
+ (
+ <IDENTIFIER> {
+ adapterName = (new Identifier(token.image)).getValue();;
+ }
+ |
+ <STRING_LITERAL>
+ {
+ adapterName = removeQuotesAndEscapes(token.image);
+ }
+ )
+ {
+ return adapterName;
+ }
+}
+
DatasetDecl DatasetDeclaration(DatasetType datasetType) throws ParseException :
{
@@ -679,7 +699,8 @@
String nameComponentSecond = null;
boolean ifNotExists = false;
IDatasetDetailsDecl datasetDetails = null;
- Pair<Identifier,Identifier> nameComponents = null;
+ Pair<Identifier,Identifier> nameComponents = null;
+ Map<String,String> hints = new HashMap<String,String>();
}
{
{
@@ -711,41 +732,44 @@
else if(datasetType == DatasetType.FEED) {
datasetDetails = FeedDatasetDeclaration();
}
- dd = new DatasetDecl(dataverseName, datasetName, itemTypeName, datasetType, datasetDetails,ifNotExists);
-
}
+
+ (
+ "hints"
{
- return dd;
+ initProperties(hints);
+ }
+ )?
+ ";"
+
+ {
+ dd = new DatasetDecl(dataverseName, datasetName, itemTypeName, hints, datasetType, datasetDetails,ifNotExists);
+ return dd;
}
}
InternalDetailsDecl InternalDatasetDeclaration() throws ParseException :
{
InternalDetailsDecl idd = null;
- List<String> partitioningExprs = new ArrayList<String>();
+ List<String> primaryKeyFields = new ArrayList<String>();
Identifier nodeGroupName=null;
}
{
- "partitioned" "by" "key"
- < IDENTIFIER >
- {
- partitioningExprs.add(token.image);
- }
(
- "," < IDENTIFIER >
{
- partitioningExprs.add(token.image);
+ primaryKeyFields = getPrimaryKeyFields();
}
- )*
+ )
+
(
"on" < IDENTIFIER >
{
nodeGroupName = new Identifier(token.image);
}
)?
- ";"
+
{
- idd = new InternalDetailsDecl(nodeGroupName, partitioningExprs);
+ idd = new InternalDetailsDecl(nodeGroupName, primaryKeyFields);
return idd;
}
}
@@ -753,7 +777,7 @@
ExternalDetailsDecl ExternalDatasetDeclaration() throws ParseException :
{
ExternalDetailsDecl edd = null;
- String adapterClassname = null;
+ String adapterName = null;
Map < String, String > properties;
}
{
@@ -762,10 +786,8 @@
}
"using"
-
- <STRING_LITERAL>
{
- adapterClassname = removeQuotesAndEscapes(token.image);
+ adapterName = getAdapterName();
}
{
@@ -774,10 +796,9 @@
{
edd = new ExternalDetailsDecl();
- edd.setAdapter(adapterClassname);
+ edd.setAdapter(adapterName);
edd.setProperties(properties);
}
- ";"
{
return edd;
@@ -787,10 +808,10 @@
FeedDetailsDecl FeedDatasetDeclaration() throws ParseException :
{
FeedDetailsDecl fdd = null;
- String adapterFactoryClassname = null;
+ String adapterName = null;
Map < String, String > properties;
Pair<Identifier,Identifier> nameComponents;
- List<String> partitioningExprs = new ArrayList<String>();
+ List<String> primaryKeyFields = new ArrayList<String>();
Identifier nodeGroupName=null;
FunctionSignature appliedFunction=null;
String dataverse;
@@ -799,10 +820,8 @@
}
{
"using"
-
- <STRING_LITERAL>
{
- adapterFactoryClassname = removeQuotesAndEscapes(token.image);
+ adapterName = getAdapterName();
}
{
@@ -815,7 +834,7 @@
dataverse = nameComponents.first != null ? nameComponents.first.getValue() : defaultDataverse;
functionName = nameComponents.second.getValue();
}
- ("@" <IDENTIFIER>
+ ("@" <INTEGER_LITERAL>
{
arity = Integer.parseInt(token.image);
}
@@ -826,30 +845,52 @@
}
)?
- "partitioned" "by" "key"
- < IDENTIFIER >
- {
- partitioningExprs.add(token.image);
- }
(
- "," < IDENTIFIER >
{
- partitioningExprs.add(token.image);
+ primaryKeyFields = getPrimaryKeyFields();
}
- )*
+ )
+
(
"on" < IDENTIFIER >
{
nodeGroupName = new Identifier(token.image);
}
)?
- ";"
+
{
- fdd = new FeedDetailsDecl(adapterFactoryClassname, properties, appliedFunction, nodeGroupName, partitioningExprs);
+ fdd = new FeedDetailsDecl(adapterName, properties, appliedFunction, nodeGroupName, primaryKeyFields);
return fdd;
}
}
+List<String> getPrimaryKeyFields() throws ParseException :
+{
+ List<String> primaryKeyFields = new ArrayList<String>();
+}
+{
+
+ "primary" "key"
+ < IDENTIFIER >
+ {
+ primaryKeyFields.add(token.image);
+ }
+ (
+ "," < IDENTIFIER >
+ {
+ primaryKeyFields.add(token.image);
+ }
+ )*
+ {
+ return primaryKeyFields;
+ }
+
+}
+
+
+
+
+
ControlFeedStatement ControlFeedDeclaration(ControlFeedStatement.OperationType operationType) throws ParseException :
{
Pair<Identifier,Identifier> nameComponents = null;
@@ -876,7 +917,7 @@
{
configuration = getConfiguration();
}
- ";"
+
{
return new ControlFeedStatement(ControlFeedStatement.OperationType.ALTER, nameComponents.first, nameComponents.second, configuration);
}
@@ -884,7 +925,7 @@
Map<String,String> getConfiguration() throws ParseException :
{
- Map<String,String> configuration = new HashMap<String,String>();
+ Map<String,String> configuration = new LinkedHashMap<String,String>();
String key;
String value;
}
@@ -933,6 +974,75 @@
}
}
+void initProperties(Map<String,String> properties) throws ParseException :
+{
+ String key;
+ String value;
+}
+{
+ (
+ <LEFTPAREN>
+ (
+ <IDENTIFIER>
+ {
+ key = (new Identifier(token.image)).getValue();
+ }
+ "="
+ (
+ (<STRING_LITERAL>
+ {
+ value = removeQuotesAndEscapes(token.image);
+ }
+ ) |
+ (<INTEGER_LITERAL>
+ {
+ try{
+ value = "" + Long.valueOf(token.image);
+ } catch (NumberFormatException nfe){
+ throw new ParseException("inapproriate value: " + token.image);
+ }
+ }
+ )
+ )
+ {
+ properties.put(key.toUpperCase(), value);
+ }
+ (
+ ","
+ (
+ <IDENTIFIER>
+ {
+ key = (new Identifier(token.image)).getValue();
+ }
+ "="
+ (
+ (<STRING_LITERAL>
+ {
+ value = removeQuotesAndEscapes(token.image);
+ }
+ ) |
+ (<INTEGER_LITERAL>
+ {
+ try{
+ value = "" + Long.valueOf(token.image);
+ } catch (NumberFormatException nfe){
+ throw new ParseException("inapproriate value: " + token.image);
+ }
+ }
+ )
+ )
+ )
+ {
+ properties.put(key.toUpperCase(), value);
+ }
+
+ )*
+ )
+ <RIGHTPAREN>
+ )?
+}
+
+
NodegroupDecl NodegroupDeclaration() throws ParseException :
{
diff --git a/asterix-common/pom.xml b/asterix-common/pom.xml
index f7f3767..d71e9d3 100644
--- a/asterix-common/pom.xml
+++ b/asterix-common/pom.xml
@@ -1,13 +1,12 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
<version>0.0.4-SNAPSHOT</version>
</parent>
- <groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-common</artifactId>
- <version>0.0.4-SNAPSHOT</version>
<build>
<plugins>
<plugin>
@@ -15,8 +14,9 @@
<artifactId>maven-compiler-plugin</artifactId>
<version>2.0.2</version>
<configuration>
- <source>1.6</source>
- <target>1.6</target>
+ <source>1.7</source>
+ <target>1.7</target>
+ <fork>true</fork>
</configuration>
</plugin>
</plugins>
@@ -25,13 +25,11 @@
<dependencies>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
- <artifactId>hyracks-algebricks-compiler</artifactId>
- <version>0.2.2-SNAPSHOT</version>
+ <artifactId>algebricks-compiler</artifactId>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-std</artifactId>
- <version>0.2.2-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
@@ -43,7 +41,7 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-lsm-common</artifactId>
- <version>0.2.2-SNAPSHOT</version>
+ <version>0.2.3-SNAPSHOT</version>
</dependency>
</dependencies>
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/AsterixAppContextInfoImpl.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/AsterixAppContextInfoImpl.java
index cef79bf..0f1de56 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/AsterixAppContextInfoImpl.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/AsterixAppContextInfoImpl.java
@@ -1,20 +1,48 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.common.api;
-import java.util.Map;
-import java.util.Set;
-
import edu.uci.ics.asterix.common.context.AsterixRuntimeComponentsProvider;
import edu.uci.ics.asterix.common.dataflow.IAsterixApplicationContextInfo;
+import edu.uci.ics.hyracks.api.application.ICCApplicationContext;
import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+/*
+ * Acts as an holder class for IndexRegistryProvider, AsterixStorageManager
+ * instances that are accessed from the NCs. In addition an instance of ICCApplicationContext
+ * is stored for access by the CC.
+ */
public class AsterixAppContextInfoImpl implements IAsterixApplicationContextInfo {
- public static final AsterixAppContextInfoImpl INSTANCE = new AsterixAppContextInfoImpl();
+ private static AsterixAppContextInfoImpl INSTANCE;
- private static Map<String, Set<String>> nodeControllerMap;
+ private final ICCApplicationContext appCtx;
- private AsterixAppContextInfoImpl() {
+ public static void initialize(ICCApplicationContext ccAppCtx) {
+ if (INSTANCE == null) {
+ INSTANCE = new AsterixAppContextInfoImpl(ccAppCtx);
+ }
+ }
+
+ private AsterixAppContextInfoImpl(ICCApplicationContext ccAppCtx) {
+ this.appCtx = ccAppCtx;
+ }
+
+ public static IAsterixApplicationContextInfo getInstance() {
+ return INSTANCE;
}
@Override
@@ -22,17 +50,13 @@
return AsterixRuntimeComponentsProvider.NOINDEX_PROVIDER;
}
- public static void setNodeControllerInfo(Map<String, Set<String>> nodeControllerInfo) {
- nodeControllerMap = nodeControllerInfo;
- }
-
- public static Map<String, Set<String>> getNodeControllerMap() {
- return nodeControllerMap;
+ @Override
+ public ICCApplicationContext getCCApplicationContext() {
+ return appCtx;
}
@Override
public IIndexLifecycleManagerProvider getIndexLifecycleManagerProvider() {
return AsterixRuntimeComponentsProvider.NOINDEX_PROVIDER;
}
-
}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/context/AsterixAppRuntimeContext.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/context/AsterixAppRuntimeContext.java
index a214ab2..85fb784 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/context/AsterixAppRuntimeContext.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/context/AsterixAppRuntimeContext.java
@@ -23,9 +23,11 @@
import edu.uci.ics.hyracks.storage.am.lsm.common.impls.SynchronousScheduler;
import edu.uci.ics.hyracks.storage.common.buffercache.BufferCache;
import edu.uci.ics.hyracks.storage.common.buffercache.ClockPageReplacementStrategy;
+import edu.uci.ics.hyracks.storage.common.buffercache.DelayPageCleanerPolicy;
import edu.uci.ics.hyracks.storage.common.buffercache.HeapBufferAllocator;
import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
+import edu.uci.ics.hyracks.storage.common.buffercache.IPageCleanerPolicy;
import edu.uci.ics.hyracks.storage.common.buffercache.IPageReplacementStrategy;
import edu.uci.ics.hyracks.storage.common.file.IFileMapManager;
import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
@@ -38,7 +40,7 @@
public class AsterixAppRuntimeContext {
private static final int DEFAULT_BUFFER_CACHE_PAGE_SIZE = 32768;
private static final int DEFAULT_LIFECYCLEMANAGER_MEMORY_BUDGET = 1024 * 1024 * 1024; // 1GB
- private static final int DEFAULT_MAX_OPEN_FILES = 500;
+ private static final int DEFAULT_MAX_OPEN_FILES = Integer.MAX_VALUE;
private final INCApplicationContext ncApplicationContext;
private IIndexLifecycleManager indexLifecycleManager;
@@ -67,12 +69,13 @@
ICacheMemoryAllocator allocator = new HeapBufferAllocator();
IPageReplacementStrategy prs = new ClockPageReplacementStrategy();
ioManager = ncApplicationContext.getRootContext().getIOManager();
- bufferCache = new BufferCache(ioManager, allocator, prs, fileMapManager, pageSize, numPages,
- DEFAULT_MAX_OPEN_FILES);
indexLifecycleManager = new IndexLifecycleManager(DEFAULT_LIFECYCLEMANAGER_MEMORY_BUDGET);
IAsterixAppRuntimeContextProvider asterixAppRuntimeContextProvider = new AsterixAppRuntimeContextProviderForRecovery(
this);
txnSubsystem = new TransactionSubsystem(ncApplicationContext.getNodeId(), asterixAppRuntimeContextProvider);
+ IPageCleanerPolicy pcp = new DelayPageCleanerPolicy(600000);
+ bufferCache = new BufferCache(ioManager, allocator, prs, pcp, fileMapManager, pageSize, numPages,
+ DEFAULT_MAX_OPEN_FILES);
lsmIOScheduler = SynchronousScheduler.INSTANCE;
mergePolicy = new ConstantMergePolicy(3);
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/dataflow/IAsterixApplicationContextInfo.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/dataflow/IAsterixApplicationContextInfo.java
index 200c2e19..401af28 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/dataflow/IAsterixApplicationContextInfo.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/dataflow/IAsterixApplicationContextInfo.java
@@ -1,10 +1,41 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.common.dataflow;
+import edu.uci.ics.hyracks.api.application.ICCApplicationContext;
import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+/**
+ * Provides methods for obtaining the IIndexRegistryProvider, IStorageManager and
+ * ICCApplicationContext implementation.
+ */
public interface IAsterixApplicationContextInfo {
public IIndexLifecycleManagerProvider getIndexLifecycleManagerProvider();
+ /**
+ * Returns an instance of the implementation for IStorageManagerInterface.
+ *
+ * @return IStorageManagerInterface implementation instance
+ */
public IStorageManagerInterface getStorageManagerInterface();
+
+ /**
+ * Returns an instance of the implementation for ICCApplicationContext.
+ *
+ * @return ICCApplicationContext implementation instance
+ */
+ public ICCApplicationContext getCCApplicationContext();
}
diff --git a/asterix-dist/pom.xml b/asterix-dist/pom.xml
index 53168b4..654a11e 100644
--- a/asterix-dist/pom.xml
+++ b/asterix-dist/pom.xml
@@ -1,54 +1,55 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <artifactId>asterix-dist</artifactId>
- <parent>
- <groupId>edu.uci.ics.asterix</groupId>
- <artifactId>asterix</artifactId>
- <version>0.0.4-SNAPSHOT</version>
- </parent>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <artifactId>asterix-dist</artifactId>
+ <parent>
+ <groupId>edu.uci.ics.asterix</groupId>
+ <artifactId>asterix</artifactId>
+ <version>0.0.4-SNAPSHOT</version>
+ </parent>
- <build>
- <plugins>
- <plugin>
- <artifactId>maven-assembly-plugin</artifactId>
- <version>2.2-beta-5</version>
- <executions>
- <execution>
- <configuration>
- <descriptors>
- <descriptor>src/main/assembly/binary-assembly.xml</descriptor>
- </descriptors>
- </configuration>
- <phase>package</phase>
- <goals>
- <goal>attached</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
- <dependencies>
- <dependency>
- <groupId>edu.uci.ics.hyracks</groupId>
- <artifactId>hyracks-server</artifactId>
- <version>0.2.2-SNAPSHOT</version>
- <type>zip</type>
- <classifier>binary-assembly</classifier>
- </dependency>
- <dependency>
- <groupId>edu.uci.ics.hyracks</groupId>
- <artifactId>hyracks-cli</artifactId>
- <version>0.2.2-SNAPSHOT</version>
- <type>zip</type>
- <classifier>binary-assembly</classifier>
- </dependency>
- <dependency>
- <groupId>edu.uci.ics.asterix</groupId>
- <artifactId>asterix-app</artifactId>
- <version>0.0.4-SNAPSHOT</version>
- <type>zip</type>
- <classifier>binary-assembly</classifier>
- </dependency>
- </dependencies>
+ <build>
+ <plugins>
+ <plugin>
+ <artifactId>maven-assembly-plugin</artifactId>
+ <version>2.2-beta-5</version>
+ <executions>
+ <execution>
+ <configuration>
+ <descriptors>
+ <descriptor>src/main/assembly/binary-assembly.xml</descriptor>
+ </descriptors>
+ </configuration>
+ <phase>package</phase>
+ <goals>
+ <goal>attached</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+ <dependencies>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-server</artifactId>
+ <version>0.2.3-SNAPSHOT</version>
+ <type>zip</type>
+ <classifier>binary-assembly</classifier>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-cli</artifactId>
+ <version>0.2.3-SNAPSHOT</version>
+ <type>zip</type>
+ <classifier>binary-assembly</classifier>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.asterix</groupId>
+ <artifactId>asterix-app</artifactId>
+ <version>0.0.4-SNAPSHOT</version>
+ <type>zip</type>
+ <classifier>binary-assembly</classifier>
+ </dependency>
+ </dependencies>
</project>
diff --git a/asterix-external-data/pom.xml b/asterix-external-data/pom.xml
index 7da6bd9..f54f1c2 100644
--- a/asterix-external-data/pom.xml
+++ b/asterix-external-data/pom.xml
@@ -1,13 +1,12 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
<version>0.0.4-SNAPSHOT</version>
</parent>
- <groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-external-data</artifactId>
- <version>0.0.4-SNAPSHOT</version>
<build>
<plugins>
@@ -16,8 +15,9 @@
<artifactId>maven-compiler-plugin</artifactId>
<version>2.0.2</version>
<configuration>
- <source>1.6</source>
- <target>1.6</target>
+ <source>1.7</source>
+ <target>1.7</target>
+ <fork>true</fork>
</configuration>
</plugin>
<plugin>
@@ -52,14 +52,12 @@
<artifactId>maven-surefire-plugin</artifactId>
<version>2.7.2</version>
<configuration>
- <!--
- doesn't work from m2eclipse, currently
- <additionalClasspathElements>
- <additionalClasspathElement>${basedir}/src/main/resources</additionalClasspathElement>
- </additionalClasspathElements>
- -->
+ <!-- doesn't work from m2eclipse, currently <additionalClasspathElements>
+ <additionalClasspathElement>${basedir}/src/main/resources</additionalClasspathElement>
+ </additionalClasspathElements> -->
<forkMode>pertest</forkMode>
- <argLine>-enableassertions -Xmx${test.heap.size}m -Dfile.encoding=UTF-8
+ <argLine>-enableassertions -Xmx${test.heap.size}m
+ -Dfile.encoding=UTF-8
-Djava.util.logging.config.file=src/test/resources/logging.properties</argLine>
<includes>
<include>**/*TestSuite.java</include>
@@ -104,40 +102,39 @@
<version>1.3.1-201002241208</version>
<scope>test</scope>
</dependency>
- <dependency>
- <groupId>org.twitter4j</groupId>
- <artifactId>twitter4j-core</artifactId>
- <version>2.2.3</version>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-core</artifactId>
- <version>0.20.2</version>
- <type>jar</type>
- <scope>compile</scope>
- </dependency>
- <dependency>
- <groupId>net.java.dev.rome</groupId>
- <artifactId>rome-fetcher</artifactId>
- <version>1.0.0</version>
- <type>jar</type>
- <scope>compile</scope>
- </dependency>
- <dependency>
- <groupId>rome</groupId>
- <artifactId>rome</artifactId>
- <version>1.0.1-modified-01</version>
- </dependency>
- <dependency>
- <groupId>edu.uci.ics.hyracks</groupId>
- <artifactId>hyracks-dataflow-hadoop</artifactId>
- <version>0.2.2-SNAPSHOT</version>
- </dependency>
-<dependency>
- <groupId>jdom</groupId>
- <artifactId>jdom</artifactId>
- <version>1.0</version>
- </dependency>
+ <dependency>
+ <groupId>org.twitter4j</groupId>
+ <artifactId>twitter4j-core</artifactId>
+ <version>2.2.3</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>0.20.2</version>
+ <type>jar</type>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>net.java.dev.rome</groupId>
+ <artifactId>rome-fetcher</artifactId>
+ <version>1.0.0</version>
+ <type>jar</type>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>rome</groupId>
+ <artifactId>rome</artifactId>
+ <version>1.0.1-modified-01</version>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-dataflow-hadoop</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>jdom</groupId>
+ <artifactId>jdom</artifactId>
+ <version>1.0</version>
+ </dependency>
</dependencies>
</project>
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/CNNFeedAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/CNNFeedAdapterFactory.java
index 7d5984a..f1f5884 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/CNNFeedAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/CNNFeedAdapterFactory.java
@@ -1,3 +1,17 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.external.adapter.factory;
import java.util.Map;
@@ -5,7 +19,10 @@
import edu.uci.ics.asterix.external.dataset.adapter.CNNFeedAdapter;
import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
-public class CNNFeedAdapterFactory implements ITypedFeedDatasetAdapterFactory {
+/**
+ * A factory class for creating the @see {CNNFeedAdapter}.
+ */
+public class CNNFeedAdapterFactory implements ITypedDatasetAdapterFactory {
@Override
public IDatasourceAdapter createAdapter(Map<String, String> configuration) throws Exception {
@@ -19,14 +36,4 @@
return "cnn_feed";
}
- @Override
- public FeedAdapterType getFeedAdapterType() {
- return FeedAdapterType.TYPED;
- }
-
- @Override
- public AdapterType getAdapterType() {
- return AdapterType.FEED;
- }
-
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HDFSAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HDFSAdapterFactory.java
index f0e30b69..6fcb710 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HDFSAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HDFSAdapterFactory.java
@@ -1,3 +1,17 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.external.adapter.factory;
import java.util.Map;
@@ -6,7 +20,12 @@
import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
import edu.uci.ics.asterix.om.types.IAType;
-public class HDFSAdapterFactory implements IExternalDatasetAdapterFactory {
+/**
+ * A factory class for creating an instance of HDFSAdapter
+ */
+public class HDFSAdapterFactory implements IGenericDatasetAdapterFactory {
+
+ public static final String HDFS_ADAPTER_NAME = "hdfs";
@Override
public IDatasourceAdapter createAdapter(Map<String, String> configuration, IAType atype) throws Exception {
@@ -16,13 +35,8 @@
}
@Override
- public AdapterType getAdapterType() {
- return AdapterType.EXTERNAL_DATASET;
- }
-
- @Override
public String getName() {
- return "hdfs";
+ return HDFS_ADAPTER_NAME;
}
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HiveAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HiveAdapterFactory.java
index b21abe6..5e28eed 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HiveAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HiveAdapterFactory.java
@@ -1,3 +1,17 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.external.adapter.factory;
import java.util.Map;
@@ -6,7 +20,10 @@
import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
import edu.uci.ics.asterix.om.types.IAType;
-public class HiveAdapterFactory implements IExternalDatasetAdapterFactory {
+/**
+ * A factory class for creating an instance of HiveAdapter
+ */
+public class HiveAdapterFactory implements IGenericDatasetAdapterFactory {
@Override
public IDatasourceAdapter createAdapter(Map<String, String> configuration, IAType type) throws Exception {
@@ -16,11 +33,6 @@
}
@Override
- public AdapterType getAdapterType() {
- return AdapterType.EXTERNAL_DATASET;
- }
-
- @Override
public String getName() {
return "hive";
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IAdapterFactory.java
index ca59da7..45fd6cf 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IAdapterFactory.java
@@ -1,13 +1,30 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.external.adapter.factory;
+/**
+ * Base interface for IGenericDatasetAdapterFactory and ITypedDatasetAdapterFactory.
+ * Acts as a marker interface indicating that the implementation provides functionality
+ * for creating an adapter.
+ */
public interface IAdapterFactory {
- public enum AdapterType {
- EXTERNAL_DATASET,
- FEED
- }
-
- public AdapterType getAdapterType();
-
+ /**
+ * Returns the display name corresponding to the Adapter type that is created by the factory.
+ *
+ * @return the display name
+ */
public String getName();
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IExternalDatasetAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IExternalDatasetAdapterFactory.java
deleted file mode 100644
index 22768a3..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IExternalDatasetAdapterFactory.java
+++ /dev/null
@@ -1,12 +0,0 @@
-package edu.uci.ics.asterix.external.adapter.factory;
-
-import java.util.Map;
-
-import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
-import edu.uci.ics.asterix.om.types.IAType;
-
-public interface IExternalDatasetAdapterFactory extends IAdapterFactory {
-
- public IDatasourceAdapter createAdapter(Map<String, String> configuration, IAType sourceType) throws Exception;
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IFeedDatasetAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IFeedDatasetAdapterFactory.java
deleted file mode 100644
index a7d5998..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IFeedDatasetAdapterFactory.java
+++ /dev/null
@@ -1,13 +0,0 @@
-package edu.uci.ics.asterix.external.adapter.factory;
-
-
-public interface IFeedDatasetAdapterFactory extends IAdapterFactory {
-
- public enum FeedAdapterType {
- GENERIC,
- TYPED
- }
-
- public FeedAdapterType getFeedAdapterType();
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IGenericDatasetAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IGenericDatasetAdapterFactory.java
new file mode 100644
index 0000000..093a3dd
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IGenericDatasetAdapterFactory.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.adapter.factory;
+
+import java.util.Map;
+
+import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
+import edu.uci.ics.asterix.om.types.IAType;
+
+/**
+ * A base interface for an adapter factory that creates instance of an adapter kind that
+ * is 'generic' in nature. A 'typed' adapter returns records with a configurable datatype.
+ */
+public interface IGenericDatasetAdapterFactory extends IAdapterFactory {
+
+ public static final String KEY_TYPE_NAME = "output-type-name";
+
+ /**
+ * Creates an instance of IDatasourceAdapter.
+ *
+ * @param configuration
+ * The configuration parameters for the adapter that is instantiated.
+ * The passed-in configuration is used to configure the created instance of the adapter.
+ * @param atype
+ * The type for the ADM records that are returned by the adapter.
+ * @return An instance of IDatasourceAdapter.
+ * @throws Exception
+ */
+ public IDatasourceAdapter createAdapter(Map<String, String> configuration, IAType atype) throws Exception;
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IGenericFeedDatasetAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IGenericFeedDatasetAdapterFactory.java
deleted file mode 100644
index 34eeff2..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IGenericFeedDatasetAdapterFactory.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package edu.uci.ics.asterix.external.adapter.factory;
-
-import java.util.Map;
-
-import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
-import edu.uci.ics.asterix.om.types.IAType;
-
-public interface IGenericFeedDatasetAdapterFactory extends IFeedDatasetAdapterFactory {
-
- public static final String KEY_TYPE_NAME="output-type-name";
-
- public IDatasourceAdapter createAdapter(Map<String, String> configuration, IAType atype) throws Exception;
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/ITypedDatasetAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/ITypedDatasetAdapterFactory.java
new file mode 100644
index 0000000..0f9978e
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/ITypedDatasetAdapterFactory.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.adapter.factory;
+
+import java.util.Map;
+
+import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
+
+/**
+ * A base interface for an adapter factory that creates instance of an adapter kind that
+ * is 'typed' in nature. A 'typed' adapter returns records with a pre-defined datatype.
+ */
+public interface ITypedDatasetAdapterFactory extends IAdapterFactory {
+
+ /**
+ * Creates an instance of IDatasourceAdapter.
+ *
+ * @param configuration
+ * The configuration parameters for the adapter that is instantiated.
+ * The passed-in configuration is used to configure the created instance of the adapter.
+ * @return An instance of IDatasourceAdapter.
+ * @throws Exception
+ */
+ public IDatasourceAdapter createAdapter(Map<String, String> configuration) throws Exception;
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/ITypedFeedDatasetAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/ITypedFeedDatasetAdapterFactory.java
deleted file mode 100644
index 84aa88d..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/ITypedFeedDatasetAdapterFactory.java
+++ /dev/null
@@ -1,11 +0,0 @@
-package edu.uci.ics.asterix.external.adapter.factory;
-
-import java.util.Map;
-
-import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
-
-public interface ITypedFeedDatasetAdapterFactory extends IFeedDatasetAdapterFactory {
-
- public IDatasourceAdapter createAdapter(Map<String, String> configuration) throws Exception;
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/NCFileSystemAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/NCFileSystemAdapterFactory.java
index ed43371..2040949 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/NCFileSystemAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/NCFileSystemAdapterFactory.java
@@ -1,3 +1,17 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.external.adapter.factory;
import java.util.Map;
@@ -6,7 +20,14 @@
import edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter;
import edu.uci.ics.asterix.om.types.IAType;
-public class NCFileSystemAdapterFactory implements IExternalDatasetAdapterFactory {
+/**
+ * Factory class for creating an instance of NCFileSystemAdapter. An
+ * NCFileSystemAdapter reads external data residing on the local file system of
+ * an NC.
+ */
+public class NCFileSystemAdapterFactory implements IGenericDatasetAdapterFactory {
+
+ public static final String NC_FILE_SYSTEM_ADAPTER_NAME = "localfs";
@Override
public IDatasourceAdapter createAdapter(Map<String, String> configuration, IAType atype) throws Exception {
@@ -16,12 +37,7 @@
}
@Override
- public AdapterType getAdapterType() {
- return AdapterType.EXTERNAL_DATASET;
- }
-
- @Override
public String getName() {
- return "localfs";
+ return NC_FILE_SYSTEM_ADAPTER_NAME;
}
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PullBasedTwitterAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PullBasedTwitterAdapterFactory.java
index 46a8004..bc00469 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PullBasedTwitterAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PullBasedTwitterAdapterFactory.java
@@ -1,3 +1,17 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.external.adapter.factory;
import java.util.Map;
@@ -5,7 +19,14 @@
import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
import edu.uci.ics.asterix.external.dataset.adapter.PullBasedTwitterAdapter;
-public class PullBasedTwitterAdapterFactory implements ITypedFeedDatasetAdapterFactory {
+/**
+ * Factory class for creating an instance of PullBasedTwitterAdapter.
+ * This adapter provides the functionality of fetching tweets from Twitter service
+ * via pull-based Twitter API.
+ */
+public class PullBasedTwitterAdapterFactory implements ITypedDatasetAdapterFactory {
+
+ public static final String PULL_BASED_TWITTER_ADAPTER_NAME = "pull_twitter";
@Override
public IDatasourceAdapter createAdapter(Map<String, String> configuration) throws Exception {
@@ -15,18 +36,8 @@
}
@Override
- public AdapterType getAdapterType() {
- return AdapterType.EXTERNAL_DATASET;
- }
-
- @Override
public String getName() {
- return "pull_twitter";
- }
-
- @Override
- public FeedAdapterType getFeedAdapterType() {
- return FeedAdapterType.TYPED;
+ return PULL_BASED_TWITTER_ADAPTER_NAME;
}
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/RSSFeedAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/RSSFeedAdapterFactory.java
index 1154d8a..bbbea38 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/RSSFeedAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/RSSFeedAdapterFactory.java
@@ -1,3 +1,17 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.external.adapter.factory;
import java.util.Map;
@@ -5,7 +19,13 @@
import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
import edu.uci.ics.asterix.external.dataset.adapter.RSSFeedAdapter;
-public class RSSFeedAdapterFactory implements ITypedFeedDatasetAdapterFactory {
+/**
+ * Factory class for creating an instance of @see {RSSFeedAdapter}.
+ * RSSFeedAdapter provides the functionality of fetching an RSS based feed.
+ */
+public class RSSFeedAdapterFactory implements ITypedDatasetAdapterFactory {
+
+ public static final String RSS_FEED_ADAPTER_NAME = "rss_feed";
@Override
public IDatasourceAdapter createAdapter(Map<String, String> configuration) throws Exception {
@@ -15,18 +35,8 @@
}
@Override
- public AdapterType getAdapterType() {
- return AdapterType.FEED;
- }
-
- @Override
public String getName() {
return "rss_feed";
}
- @Override
- public FeedAdapterType getFeedAdapterType() {
- return FeedAdapterType.TYPED;
- }
-
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/ExternalDataScanOperatorDescriptor.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/ExternalDataScanOperatorDescriptor.java
index 6dbec48..fb4cc99 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/ExternalDataScanOperatorDescriptor.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/ExternalDataScanOperatorDescriptor.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -16,7 +16,7 @@
import java.util.Map;
-import edu.uci.ics.asterix.external.adapter.factory.IExternalDatasetAdapterFactory;
+import edu.uci.ics.asterix.external.adapter.factory.IGenericDatasetAdapterFactory;
import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
import edu.uci.ics.asterix.om.types.IAType;
import edu.uci.ics.hyracks.api.application.ICCApplicationContext;
@@ -30,13 +30,18 @@
import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
+/*
+ * A single activity operator that provides the functionality of scanning data using an
+ * instance of the configured adapter.
+ */
public class ExternalDataScanOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+
private static final long serialVersionUID = 1L;
private final String adapterFactory;
private final Map<String, String> adapterConfiguration;
private final IAType atype;
- private IExternalDatasetAdapterFactory datasourceAdapterFactory;
+ private IGenericDatasetAdapterFactory datasourceAdapterFactory;
public ExternalDataScanOperatorDescriptor(JobSpecification spec, String adapter, Map<String, String> arguments,
IAType atype, RecordDescriptor rDesc) {
@@ -79,11 +84,12 @@
}
+ @Override
public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions)
throws HyracksDataException {
try {
- datasourceAdapterFactory = (IExternalDatasetAdapterFactory) Class.forName(adapterFactory).newInstance();
+ datasourceAdapterFactory = (IGenericDatasetAdapterFactory) Class.forName(adapterFactory).newInstance();
} catch (Exception e) {
throw new HyracksDataException("initialization of adapter failed", e);
}
@@ -93,7 +99,7 @@
writer.open();
IDatasourceAdapter adapter = null;
try {
- adapter = ((IExternalDatasetAdapterFactory) datasourceAdapterFactory).createAdapter(
+ adapter = ((IGenericDatasetAdapterFactory) datasourceAdapterFactory).createAdapter(
adapterConfiguration, atype);
adapter.initialize(ctx);
adapter.start(partition, writer);
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedIntakeOperatorDescriptor.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedIntakeOperatorDescriptor.java
new file mode 100644
index 0000000..2da4e76
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedIntakeOperatorDescriptor.java
@@ -0,0 +1,86 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.data.operator;
+
+import java.util.Map;
+
+import edu.uci.ics.asterix.external.adapter.factory.IAdapterFactory;
+import edu.uci.ics.asterix.external.adapter.factory.IGenericDatasetAdapterFactory;
+import edu.uci.ics.asterix.external.adapter.factory.ITypedDatasetAdapterFactory;
+import edu.uci.ics.asterix.external.dataset.adapter.ITypedDatasourceAdapter;
+import edu.uci.ics.asterix.external.feed.lifecycle.FeedId;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+
+/**
+ * Operator responsible for ingesting data from an external source. This
+ * operator uses a (configurable) adapter associated with the feed dataset.
+ */
+public class FeedIntakeOperatorDescriptor extends
+ AbstractSingleActivityOperatorDescriptor {
+
+ private static final long serialVersionUID = 1L;
+
+ private final String adapterFactoryClassName;
+ private final Map<String, String> adapterConfiguration;
+ private final IAType atype;
+ private final FeedId feedId;
+
+ private transient IAdapterFactory datasourceAdapterFactory;
+
+ public FeedIntakeOperatorDescriptor(JobSpecification spec, FeedId feedId,
+ String adapter, Map<String, String> arguments, ARecordType atype,
+ RecordDescriptor rDesc) {
+ super(spec, 1, 1);
+ recordDescriptors[0] = rDesc;
+ this.adapterFactoryClassName = adapter;
+ this.adapterConfiguration = arguments;
+ this.atype = atype;
+ this.feedId = feedId;
+ }
+
+ public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+ IRecordDescriptorProvider recordDescProvider, final int partition,
+ int nPartitions) throws HyracksDataException {
+ ITypedDatasourceAdapter adapter;
+ try {
+ datasourceAdapterFactory = (IAdapterFactory) Class.forName(
+ adapterFactoryClassName).newInstance();
+ if (datasourceAdapterFactory instanceof IGenericDatasetAdapterFactory) {
+ adapter = (ITypedDatasourceAdapter) ((IGenericDatasetAdapterFactory) datasourceAdapterFactory)
+ .createAdapter(adapterConfiguration, atype);
+ } else if (datasourceAdapterFactory instanceof ITypedDatasetAdapterFactory) {
+ adapter = (ITypedDatasourceAdapter) ((ITypedDatasetAdapterFactory) datasourceAdapterFactory)
+ .createAdapter(adapterConfiguration);
+ } else {
+ throw new IllegalStateException(
+ " Unknown adapter factory type for "
+ + adapterFactoryClassName);
+ }
+ adapter.initialize(ctx);
+ } catch (Exception e) {
+ throw new HyracksDataException("initialization of adapter failed",
+ e);
+ }
+ return new FeedIntakeOperatorNodePushable(feedId, adapter, partition);
+ }
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedIntakeOperatorNodePushable.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedIntakeOperatorNodePushable.java
new file mode 100644
index 0000000..d0dbb98
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedIntakeOperatorNodePushable.java
@@ -0,0 +1,124 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.data.operator;
+
+import java.nio.ByteBuffer;
+import java.util.concurrent.LinkedBlockingQueue;
+
+import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
+import edu.uci.ics.asterix.external.feed.lifecycle.AlterFeedMessage;
+import edu.uci.ics.asterix.external.feed.lifecycle.FeedId;
+import edu.uci.ics.asterix.external.feed.lifecycle.FeedManager;
+import edu.uci.ics.asterix.external.feed.lifecycle.IFeedManager;
+import edu.uci.ics.asterix.external.feed.lifecycle.IFeedMessage;
+import edu.uci.ics.asterix.feed.managed.adapter.IManagedFeedAdapter;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
+
+/**
+ * The runtime for @see{FeedIntakeOperationDescriptor}
+ */
+public class FeedIntakeOperatorNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
+
+ private final IDatasourceAdapter adapter;
+ private final int partition;
+ private final IFeedManager feedManager;
+ private final FeedId feedId;
+ private final LinkedBlockingQueue<IFeedMessage> inbox;
+ private FeedInboxMonitor feedInboxMonitor;
+
+ public FeedIntakeOperatorNodePushable(FeedId feedId, IDatasourceAdapter adapter, int partition) {
+ this.adapter = adapter;
+ this.partition = partition;
+ this.feedManager = (IFeedManager) FeedManager.INSTANCE;
+ this.feedId = feedId;
+ inbox = new LinkedBlockingQueue<IFeedMessage>();
+ }
+
+ @Override
+ public void open() throws HyracksDataException {
+ if (adapter instanceof IManagedFeedAdapter) {
+ feedInboxMonitor = new FeedInboxMonitor((IManagedFeedAdapter) adapter, inbox, partition);
+ feedInboxMonitor.start();
+ feedManager.registerFeedMsgQueue(feedId, inbox);
+ }
+ writer.open();
+ try {
+ adapter.start(partition, writer);
+ } catch (Exception e) {
+ e.printStackTrace();
+ throw new HyracksDataException(e);
+ /*
+ we do not throw an exception, but allow the operator to close
+ gracefully throwing an exception here would result in a job abort and a
+ transaction roll back that undoes all the work done so far.
+ */
+
+ } finally {
+ writer.close();
+ if (adapter instanceof IManagedFeedAdapter) {
+ feedManager.unregisterFeedMsgQueue(feedId, inbox);
+ }
+ }
+ }
+
+ @Override
+ public void fail() throws HyracksDataException {
+ writer.close();
+ }
+
+ @Override
+ public void close() throws HyracksDataException {
+
+ }
+
+ @Override
+ public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+ // do nothing
+ }
+}
+
+class FeedInboxMonitor extends Thread {
+
+ private LinkedBlockingQueue<IFeedMessage> inbox;
+ private final IManagedFeedAdapter adapter;
+
+ public FeedInboxMonitor(IManagedFeedAdapter adapter, LinkedBlockingQueue<IFeedMessage> inbox, int partition) {
+ this.inbox = inbox;
+ this.adapter = adapter;
+ }
+
+ @Override
+ public void run() {
+ while (true) {
+ try {
+ IFeedMessage feedMessage = inbox.take();
+ switch (feedMessage.getMessageType()) {
+ case STOP:
+ adapter.stop();
+ break;
+ case ALTER:
+ adapter.alter(((AlterFeedMessage) feedMessage).getAlteredConfParams());
+ break;
+ }
+ } catch (InterruptedException ie) {
+ break;
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+ }
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedMessageOperatorDescriptor.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedMessageOperatorDescriptor.java
new file mode 100644
index 0000000..d0dc5ca
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedMessageOperatorDescriptor.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.data.operator;
+
+import java.util.List;
+
+import edu.uci.ics.asterix.external.feed.lifecycle.FeedId;
+import edu.uci.ics.asterix.external.feed.lifecycle.IFeedMessage;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+
+/**
+ * Sends a control message to the registered message queue for feed specified by its feedId.
+ */
+public class FeedMessageOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+
+ private static final long serialVersionUID = 1L;
+
+ private final FeedId feedId;
+ private final List<IFeedMessage> feedMessages;
+ private final boolean sendToAll = true;
+
+ public FeedMessageOperatorDescriptor(JobSpecification spec, String dataverse, String dataset,
+ List<IFeedMessage> feedMessages) {
+ super(spec, 0, 1);
+ this.feedId = new FeedId(dataverse, dataset);
+ this.feedMessages = feedMessages;
+ }
+
+ @Override
+ public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+ IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
+ return new FeedMessageOperatorNodePushable(ctx, feedId, feedMessages, sendToAll, partition, nPartitions);
+ }
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedMessageOperatorNodePushable.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedMessageOperatorNodePushable.java
new file mode 100644
index 0000000..d03eeaa
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedMessageOperatorNodePushable.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.data.operator;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import edu.uci.ics.asterix.external.feed.lifecycle.FeedId;
+import edu.uci.ics.asterix.external.feed.lifecycle.FeedManager;
+import edu.uci.ics.asterix.external.feed.lifecycle.IFeedManager;
+import edu.uci.ics.asterix.external.feed.lifecycle.IFeedMessage;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
+
+/**
+ * Runtime for the @see{FeedMessageOperatorDescriptor}
+ */
+public class FeedMessageOperatorNodePushable extends AbstractUnaryOutputSourceOperatorNodePushable {
+
+ private final FeedId feedId;
+ private final List<IFeedMessage> feedMessages;
+ private IFeedManager feedManager;
+
+ public FeedMessageOperatorNodePushable(IHyracksTaskContext ctx, FeedId feedId, List<IFeedMessage> feedMessages,
+ boolean applyToAll, int partition, int nPartitions) {
+ this.feedId = feedId;
+ if (applyToAll) {
+ this.feedMessages = feedMessages;
+ } else {
+ this.feedMessages = new ArrayList<IFeedMessage>();
+ feedMessages.add(feedMessages.get(partition));
+ }
+ feedManager = (IFeedManager) FeedManager.INSTANCE;
+ }
+
+ @Override
+ public void initialize() throws HyracksDataException {
+ try {
+ writer.open();
+ for (IFeedMessage feedMessage : feedMessages) {
+ feedManager.deliverMessage(feedId, feedMessage);
+ }
+ } catch (Exception e) {
+ throw new HyracksDataException(e);
+ } finally {
+ writer.close();
+ }
+ }
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AbstractDatasourceAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AbstractDatasourceAdapter.java
index 64c8853..440ee8c 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AbstractDatasourceAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AbstractDatasourceAdapter.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -34,66 +34,60 @@
*/
public abstract class AbstractDatasourceAdapter implements IDatasourceAdapter {
- private static final long serialVersionUID = -3510610289692452466L;
+ private static final long serialVersionUID = 1L;
- protected Map<String, String> configuration;
- protected transient AlgebricksPartitionConstraint partitionConstraint;
- protected IAType atype;
- protected IHyracksTaskContext ctx;
- protected AdapterType adapterType;
- protected boolean typeInfoRequired = false;
-
-
- protected static final HashMap<ATypeTag, IValueParserFactory> typeToValueParserFactMap = new HashMap<ATypeTag, IValueParserFactory>();
- static {
- typeToValueParserFactMap.put(ATypeTag.INT32,
- IntegerParserFactory.INSTANCE);
- typeToValueParserFactMap.put(ATypeTag.FLOAT,
- FloatParserFactory.INSTANCE);
- typeToValueParserFactMap.put(ATypeTag.DOUBLE,
- DoubleParserFactory.INSTANCE);
- typeToValueParserFactMap
- .put(ATypeTag.INT64, LongParserFactory.INSTANCE);
- typeToValueParserFactMap.put(ATypeTag.STRING,
- UTF8StringParserFactory.INSTANCE);
- }
+ protected Map<String, String> configuration;
+ protected transient AlgebricksPartitionConstraint partitionConstraint;
+ protected IAType atype;
+ protected IHyracksTaskContext ctx;
+ protected AdapterType adapterType;
- protected static final HashMap<String, String> formatToParserFactoryMap = new HashMap<String, String>();
+ protected static final HashMap<ATypeTag, IValueParserFactory> typeToValueParserFactMap = new HashMap<ATypeTag, IValueParserFactory>();
+ static {
+ typeToValueParserFactMap.put(ATypeTag.INT32, IntegerParserFactory.INSTANCE);
+ typeToValueParserFactMap.put(ATypeTag.FLOAT, FloatParserFactory.INSTANCE);
+ typeToValueParserFactMap.put(ATypeTag.DOUBLE, DoubleParserFactory.INSTANCE);
+ typeToValueParserFactMap.put(ATypeTag.INT64, LongParserFactory.INSTANCE);
+ typeToValueParserFactMap.put(ATypeTag.STRING, UTF8StringParserFactory.INSTANCE);
+ }
- public static final String KEY_FORMAT = "format";
- public static final String KEY_PARSER_FACTORY = "parser";
+ protected static final Map<String, String> formatToParserFactoryMap = initializeFormatParserFactoryMap();
- public static final String FORMAT_DELIMITED_TEXT = "delimited-text";
- public static final String FORMAT_ADM = "adm";
+ public static final String KEY_FORMAT = "format";
+ public static final String KEY_PARSER_FACTORY = "parser";
+ public static final String FORMAT_DELIMITED_TEXT = "delimited-text";
+ public static final String FORMAT_ADM = "adm";
- static {
- formatToParserFactoryMap
- .put(FORMAT_DELIMITED_TEXT,
- "edu.uci.ics.asterix.runtime.operators.file.NtDelimitedDataTupleParserFactory");
- formatToParserFactoryMap
- .put(FORMAT_ADM,
- "edu.uci.ics.asterix.runtime.operators.file.AdmSchemafullRecordParserFactory");
+ private static Map<String, String> initializeFormatParserFactoryMap() {
+ Map<String, String> map = new HashMap<String, String>();
+ map.put(FORMAT_DELIMITED_TEXT, "edu.uci.ics.asterix.runtime.operators.file.NtDelimitedDataTupleParserFactory");
+ map.put(FORMAT_ADM, "edu.uci.ics.asterix.runtime.operators.file.AdmSchemafullRecordParserFactory");
+ return map;
+ }
- }
+ /**
+ * Get the partition constraint chosen by the adapter.
+ * An adapter may have preferences as to where it needs to be instantiated and used.
+ */
+ public abstract AlgebricksPartitionConstraint getPartitionConstraint() throws Exception;
- public AlgebricksPartitionConstraint getPartitionConstraint() {
- return partitionConstraint;
- }
+ /**
+ * Get the configured value from the adapter configuration parameters, corresponding to the an attribute.
+ *
+ * @param attribute
+ * The attribute whose value needs to be obtained.
+ */
+ public String getAdapterProperty(String attribute) {
+ return configuration.get(attribute);
+ }
- public String getAdapterProperty(String attribute) {
- return configuration.get(attribute);
- }
-
- public Map<String, String> getConfiguration() {
- return configuration;
- }
-
- public void setAdapterProperty(String property, String value) {
- configuration.put(property, value);
- }
-
- public boolean isTypeInfoRequired() {
- return typeInfoRequired;
+ /**
+ * Get the adapter configuration parameters.
+ *
+ * @return A Map<String,String> instance representing the adapter configuration.
+ */
+ public Map<String, String> getConfiguration() {
+ return configuration;
}
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AbstractFeedDatasourceAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AbstractFeedDatasourceAdapter.java
deleted file mode 100644
index 8f2a896..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AbstractFeedDatasourceAdapter.java
+++ /dev/null
@@ -1,18 +0,0 @@
-package edu.uci.ics.asterix.external.dataset.adapter;
-
-import edu.uci.ics.asterix.om.types.ARecordType;
-
-public abstract class AbstractFeedDatasourceAdapter extends AbstractDatasourceAdapter implements IFeedDatasourceAdapter {
-
- protected AdapterDataFlowType adapterDataFlowType;
- protected ARecordType adapterOutputType;
-
- public AdapterDataFlowType getAdapterDataFlowType() {
- return adapterDataFlowType;
- }
-
- public ARecordType getAdapterOutputType() {
- return adapterOutputType;
- }
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AdapterIdentifier.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AdapterIdentifier.java
index 1a6be25..ac36733 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AdapterIdentifier.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AdapterIdentifier.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -16,8 +16,13 @@
import java.io.Serializable;
+/**
+ * A unique identifier for a datasource adapter.
+ */
public class AdapterIdentifier implements Serializable {
+ private static final long serialVersionUID = 1L;
+
private final String namespace;
private final String adapterName;
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/CNNFeedAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/CNNFeedAdapter.java
index 3760d56..3898f7e 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/CNNFeedAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/CNNFeedAdapter.java
@@ -1,3 +1,17 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.external.dataset.adapter;
import java.util.ArrayList;
@@ -5,19 +19,20 @@
import java.util.List;
import java.util.Map;
-import edu.uci.ics.asterix.feed.managed.adapter.IMutableFeedAdapter;
+import edu.uci.ics.asterix.feed.managed.adapter.IManagedFeedAdapter;
-public class CNNFeedAdapter extends RSSFeedAdapter implements IDatasourceAdapter, IMutableFeedAdapter {
+/**
+ * An Adapter that provides the functionality of fetching news feed from CNN service
+ * The Adapter provides news feed as ADM records.
+ */
+public class CNNFeedAdapter extends RSSFeedAdapter implements IDatasourceAdapter, IManagedFeedAdapter {
private static final long serialVersionUID = 2523303758114582251L;
private List<String> feedURLs = new ArrayList<String>();
- private String id_prefix = "";
+ private static Map<String, String> topicFeeds = new HashMap<String, String>();
public static final String KEY_RSS_URL = "topic";
public static final String KEY_INTERVAL = "interval";
-
- private static Map<String, String> topicFeeds = new HashMap<String, String>();
-
public static final String TOP_STORIES = "topstories";
public static final String WORLD = "world";
public static final String US = "us";
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/FileSystemBasedAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/FileSystemBasedAdapter.java
index dde273e..9f8cedc 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/FileSystemBasedAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/FileSystemBasedAdapter.java
@@ -16,14 +16,20 @@
import java.io.IOException;
import java.io.InputStream;
-import java.lang.reflect.Constructor;
import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.external.util.DNSResolverFactory;
+import edu.uci.ics.asterix.external.util.INodeResolver;
+import edu.uci.ics.asterix.external.util.INodeResolverFactory;
import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.asterix.om.types.ATypeTag;
import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.runtime.operators.file.AdmSchemafullRecordParserFactory;
import edu.uci.ics.asterix.runtime.operators.file.NtDelimitedDataTupleParserFactory;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
import edu.uci.ics.hyracks.api.comm.IFrameWriter;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
@@ -33,10 +39,18 @@
public abstract class FileSystemBasedAdapter extends AbstractDatasourceAdapter {
- protected boolean userDefinedParser = false;
- protected String parserFactoryClassname;
+ private static final long serialVersionUID = 1L;
+ public static final String NODE_RESOLVER_FACTORY_PROPERTY = "node.Resolver";
public static final String KEY_DELIMITER = "delimiter";
+ public static final String KEY_PATH = "path";
+
+ protected ITupleParserFactory parserFactory;
+ protected ITupleParser parser;
+ protected static INodeResolver nodeResolver;
+
+ private static final INodeResolver DEFAULT_NODE_RESOLVER = new DNSResolverFactory().createNodeResolver();
+ private static final Logger LOGGER = Logger.getLogger(FileSystemBasedAdapter.class.getName());
public abstract InputStream getInputStream(int partition) throws IOException;
@@ -44,13 +58,10 @@
this.atype = atype;
}
- public FileSystemBasedAdapter() {
- }
-
@Override
public void start(int partition, IFrameWriter writer) throws Exception {
InputStream in = getInputStream(partition);
- ITupleParser parser = getTupleParser();
+ parser = getTupleParser();
parser.parse(in, writer);
}
@@ -63,44 +74,33 @@
@Override
public abstract AdapterType getAdapterType();
+ @Override
+ public abstract AlgebricksPartitionConstraint getPartitionConstraint() throws Exception;
+
protected ITupleParser getTupleParser() throws Exception {
- ITupleParser parser = null;
- if (userDefinedParser) {
- Class tupleParserFactoryClass = Class.forName(parserFactoryClassname);
- ITupleParserFactory parserFactory = (ITupleParserFactory) tupleParserFactoryClass.newInstance();
- parser = parserFactory.createTupleParser(ctx);
- } else {
- if (FORMAT_DELIMITED_TEXT.equalsIgnoreCase(configuration.get(KEY_FORMAT))) {
- parser = getDelimitedDataTupleParser((ARecordType) atype);
-
- } else if (FORMAT_ADM.equalsIgnoreCase(configuration.get(KEY_FORMAT))) {
- parser = getADMDataTupleParser((ARecordType) atype);
- } else {
- throw new IllegalArgumentException(" format " + configuration.get(KEY_FORMAT) + " not supported");
- }
- }
- return parser;
-
+ return parserFactory.createTupleParser(ctx);
}
protected void configureFormat() throws Exception {
- parserFactoryClassname = configuration.get(KEY_PARSER_FACTORY);
- userDefinedParser = (parserFactoryClassname != null);
-
+ String parserFactoryClassname = configuration.get(KEY_PARSER_FACTORY);
if (parserFactoryClassname == null) {
- if (FORMAT_DELIMITED_TEXT.equalsIgnoreCase(configuration.get(KEY_FORMAT))) {
- parserFactoryClassname = formatToParserFactoryMap.get(FORMAT_DELIMITED_TEXT);
+ String specifiedFormat = configuration.get(KEY_FORMAT);
+ if (specifiedFormat == null) {
+ throw new IllegalArgumentException(" Unspecified data format");
+ } else if (FORMAT_DELIMITED_TEXT.equalsIgnoreCase(specifiedFormat)) {
+ parserFactory = getDelimitedDataTupleParserFactory((ARecordType) atype);
} else if (FORMAT_ADM.equalsIgnoreCase(configuration.get(KEY_FORMAT))) {
- parserFactoryClassname = formatToParserFactoryMap.get(FORMAT_ADM);
+ parserFactory = getADMDataTupleParserFactory((ARecordType) atype);
} else {
throw new IllegalArgumentException(" format " + configuration.get(KEY_FORMAT) + " not supported");
}
+ } else {
+ parserFactory = (ITupleParserFactory) Class.forName(parserFactoryClassname).newInstance();
}
}
- protected ITupleParser getDelimitedDataTupleParser(ARecordType recordType) throws AsterixException {
- ITupleParser parser;
+ protected ITupleParserFactory getDelimitedDataTupleParserFactory(ARecordType recordType) throws AsterixException {
int n = recordType.getFieldTypes().length;
IValueParserFactory[] fieldParserFactories = new IValueParserFactory[n];
for (int i = 0; i < n; i++) {
@@ -117,20 +117,43 @@
}
Character delimiter = delimiterValue.charAt(0);
- parser = new NtDelimitedDataTupleParserFactory(recordType, fieldParserFactories, delimiter)
- .createTupleParser(ctx);
- return parser;
+ return new NtDelimitedDataTupleParserFactory(recordType, fieldParserFactories, delimiter);
}
- protected ITupleParser getADMDataTupleParser(ARecordType recordType) throws AsterixException {
+ protected ITupleParserFactory getADMDataTupleParserFactory(ARecordType recordType) throws AsterixException {
try {
- Class tupleParserFactoryClass = Class.forName(parserFactoryClassname);
- Constructor ctor = tupleParserFactoryClass.getConstructor(ARecordType.class);
- ITupleParserFactory parserFactory = (ITupleParserFactory) ctor.newInstance(atype);
- return parserFactory.createTupleParser(ctx);
+ return new AdmSchemafullRecordParserFactory(recordType);
} catch (Exception e) {
throw new AsterixException(e);
}
}
+
+ protected INodeResolver getNodeResolver() {
+ if (nodeResolver == null) {
+ nodeResolver = initNodeResolver();
+ }
+ return nodeResolver;
+ }
+
+ private static INodeResolver initNodeResolver() {
+ INodeResolver nodeResolver = null;
+ String configuredNodeResolverFactory = System.getProperty(NODE_RESOLVER_FACTORY_PROPERTY);
+ if (configuredNodeResolverFactory != null) {
+ try {
+ nodeResolver = ((INodeResolverFactory) (Class.forName(configuredNodeResolverFactory).newInstance()))
+ .createNodeResolver();
+
+ } catch (Exception e) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.log(Level.WARNING, "Unable to create node resolver from the configured classname "
+ + configuredNodeResolverFactory + "\n" + e.getMessage());
+ }
+ nodeResolver = DEFAULT_NODE_RESOLVER;
+ }
+ } else {
+ nodeResolver = DEFAULT_NODE_RESOLVER;
+ }
+ return nodeResolver;
+ }
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HDFSAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HDFSAdapter.java
index 744683c..1e05b2f 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HDFSAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HDFSAdapter.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -18,7 +18,6 @@
import java.io.IOException;
import java.io.InputStream;
import java.net.UnknownHostException;
-import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@@ -28,8 +27,6 @@
import java.util.logging.Level;
import java.util.logging.Logger;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.Counters.Counter;
import org.apache.hadoop.mapred.InputSplit;
@@ -43,29 +40,36 @@
import edu.uci.ics.asterix.om.util.AsterixRuntimeUtil;
import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksCountPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.hadoop.util.InputSplitsProxy;
+/**
+ * Provides functionality for fetching external data stored in an HDFS instance.
+ */
+@SuppressWarnings({ "deprecation", "rawtypes" })
public class HDFSAdapter extends FileSystemBasedAdapter {
+ private static final long serialVersionUID = 1L;
private static final Logger LOGGER = Logger.getLogger(HDFSAdapter.class.getName());
+ public static final String KEY_HDFS_URL = "hdfs";
+ public static final String KEY_INPUT_FORMAT = "input-format";
+ public static final String INPUT_FORMAT_TEXT = "text-input-format";
+ public static final String INPUT_FORMAT_SEQUENCE = "sequence-input-format";
+
private Object[] inputSplits;
private transient JobConf conf;
private InputSplitsProxy inputSplitsProxy;
- private static final Map<String, String> formatClassNames = new HashMap<String, String>();
+ private static final Map<String, String> formatClassNames = initInputFormatMap();
- public static final String KEY_HDFS_URL = "hdfs";
- public static final String KEY_HDFS_PATH = "path";
- public static final String KEY_INPUT_FORMAT = "input-format";
-
- public static final String INPUT_FORMAT_TEXT = "text-input-format";
- public static final String INPUT_FORMAT_SEQUENCE = "sequence-input-format";
-
- static {
+ private static Map<String, String> initInputFormatMap() {
+ Map<String, String> formatClassNames = new HashMap<String, String>();
formatClassNames.put(INPUT_FORMAT_TEXT, "org.apache.hadoop.mapred.TextInputFormat");
formatClassNames.put(INPUT_FORMAT_SEQUENCE, "org.apache.hadoop.mapred.SequenceFileInputFormat");
+ return formatClassNames;
}
public HDFSAdapter(IAType atype) {
@@ -77,77 +81,89 @@
configuration = arguments;
configureFormat();
configureJobConf();
- configurePartitionConstraint();
+ configureSplits();
+ }
+
+ private void configureSplits() throws IOException {
+ if (inputSplitsProxy == null) {
+ inputSplits = conf.getInputFormat().getSplits(conf, 0);
+ }
+ inputSplitsProxy = new InputSplitsProxy(conf, inputSplits);
}
private void configurePartitionConstraint() throws Exception {
- AlgebricksAbsolutePartitionConstraint absPartitionConstraint;
List<String> locations = new ArrayList<String>();
Random random = new Random();
- boolean couldConfigureLocationConstraints = true;
- if (inputSplitsProxy == null) {
- InputSplit[] inputSplits = conf.getInputFormat().getSplits(conf, 0);
- try {
- for (InputSplit inputSplit : inputSplits) {
- String[] dataNodeLocations = inputSplit.getLocations();
- // loop over all replicas until a split location coincides
- // with an asterix datanode location
- for (String datanodeLocation : dataNodeLocations) {
- Set<String> nodeControllersAtLocation = AsterixRuntimeUtil
- .getNodeControllersOnHostName(datanodeLocation);
- if (nodeControllersAtLocation == null || nodeControllersAtLocation.size() == 0) {
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.log(Level.INFO, "No node controller found at " + datanodeLocation
- + " will look at replica location");
- }
- couldConfigureLocationConstraints = false;
- } else {
- int locationIndex = random.nextInt(nodeControllersAtLocation.size());
- String chosenLocation = (String) nodeControllersAtLocation.toArray()[locationIndex];
- locations.add(chosenLocation);
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.log(Level.INFO, "split : " + inputSplit + " to be processed by :"
- + chosenLocation);
- }
- couldConfigureLocationConstraints = true;
- break;
+ boolean couldConfigureLocationConstraints = false;
+ try {
+ Map<String, Set<String>> nodeControllers = AsterixRuntimeUtil.getNodeControllerMap();
+ for (Object inputSplit : inputSplits) {
+ String[] dataNodeLocations = ((InputSplit) inputSplit).getLocations();
+ if (dataNodeLocations == null || dataNodeLocations.length == 0) {
+ throw new IllegalArgumentException("No datanode locations found: check hdfs path");
+ }
+
+ // loop over all replicas until a split location coincides
+ // with an asterix datanode location
+ for (String datanodeLocation : dataNodeLocations) {
+ Set<String> nodeControllersAtLocation = null;
+ try {
+ nodeControllersAtLocation = nodeControllers.get(AsterixRuntimeUtil
+ .getIPAddress(datanodeLocation));
+ } catch (UnknownHostException uhe) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.log(Level.WARNING, "Unknown host :" + datanodeLocation);
}
+ continue;
}
-
- // none of the replica locations coincides with an Asterix
- // node controller location.
- if (!couldConfigureLocationConstraints) {
- List<String> allNodeControllers = AsterixRuntimeUtil.getAllNodeControllers();
- int locationIndex = random.nextInt(allNodeControllers.size());
- String chosenLocation = allNodeControllers.get(locationIndex);
- locations.add(chosenLocation);
-
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.log(Level.INFO, "No local node controller found to process split : " + inputSplit
- + " will be processed by a remote node controller:" + chosenLocation);
+ if (nodeControllersAtLocation == null || nodeControllersAtLocation.size() == 0) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.log(Level.WARNING, "No node controller found at " + datanodeLocation
+ + " will look at replica location");
}
+ couldConfigureLocationConstraints = false;
+ } else {
+ int locationIndex = random.nextInt(nodeControllersAtLocation.size());
+ String chosenLocation = (String) nodeControllersAtLocation.toArray()[locationIndex];
+ locations.add(chosenLocation);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.log(Level.INFO, "split : " + inputSplit + " to be processed by :" + chosenLocation);
+ }
+ couldConfigureLocationConstraints = true;
break;
}
}
- if (couldConfigureLocationConstraints) {
- partitionConstraint = new AlgebricksAbsolutePartitionConstraint(locations.toArray(new String[] {}));
- } else {
- partitionConstraint = new AlgebricksCountPartitionConstraint(inputSplits.length);
+
+ /* none of the replica locations coincides with an Asterix
+ node controller location.
+ */
+ if (!couldConfigureLocationConstraints) {
+ List<String> allNodeControllers = AsterixRuntimeUtil.getAllNodeControllers();
+ int locationIndex = random.nextInt(allNodeControllers.size());
+ String chosenLocation = allNodeControllers.get(locationIndex);
+ locations.add(chosenLocation);
+ if (LOGGER.isLoggable(Level.SEVERE)) {
+ LOGGER.log(Level.SEVERE, "No local node controller found to process split : " + inputSplit
+ + " will be processed by a remote node controller:" + chosenLocation);
+ }
}
- } catch (UnknownHostException e) {
- partitionConstraint = new AlgebricksCountPartitionConstraint(inputSplits.length);
}
- inputSplitsProxy = new InputSplitsProxy(conf, inputSplits);
+ partitionConstraint = new AlgebricksAbsolutePartitionConstraint(locations.toArray(new String[] {}));
+ } catch (Exception e) {
+ if (LOGGER.isLoggable(Level.SEVERE)) {
+ LOGGER.log(Level.SEVERE, "Encountered exception :" + e + " using count constraints");
+ }
+ partitionConstraint = new AlgebricksCountPartitionConstraint(inputSplits.length);
}
}
private JobConf configureJobConf() throws Exception {
conf = new JobConf();
- conf.set("fs.default.name", configuration.get(KEY_HDFS_URL));
+ conf.set("fs.default.name", configuration.get(KEY_HDFS_URL).trim());
conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
conf.setClassLoader(HDFSAdapter.class.getClassLoader());
- conf.set("mapred.input.dir", configuration.get(KEY_HDFS_PATH));
- conf.set("mapred.input.format.class", formatClassNames.get(configuration.get(KEY_INPUT_FORMAT)));
+ conf.set("mapred.input.dir", configuration.get(KEY_PATH).trim());
+ conf.set("mapred.input.format.class", formatClassNames.get(configuration.get(KEY_INPUT_FORMAT).trim()));
return conf;
}
@@ -199,11 +215,10 @@
return reporter;
}
+ @SuppressWarnings("unchecked")
@Override
public InputStream getInputStream(int partition) throws IOException {
- Path path = new Path(inputSplits[partition].toString());
try {
- FileSystem fs = FileSystem.get(conf);
InputStream inputStream;
if (conf.getInputFormat() instanceof SequenceFileInputFormat) {
SequenceFileInputFormat format = (SequenceFileInputFormat) conf.getInputFormat();
@@ -227,61 +242,71 @@
}
+ @Override
+ public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
+ if (partitionConstraint == null) {
+ configurePartitionConstraint();
+ }
+ return partitionConstraint;
+ }
+
}
class HDFSStream extends InputStream {
- private ByteBuffer buffer;
- private int capacity;
- private RecordReader reader;
- private boolean readNext = true;
+ private RecordReader<Object, Text> reader;
private final Object key;
private final Text value;
+ private boolean hasMore = false;
+ private static final int EOL = "\n".getBytes()[0];
+ private Text pendingValue = null;
- public HDFSStream(RecordReader reader, IHyracksTaskContext ctx) throws Exception {
- capacity = ctx.getFrameSize();
- buffer = ByteBuffer.allocate(capacity);
+ public HDFSStream(RecordReader<Object, Text> reader, IHyracksTaskContext ctx) throws Exception {
this.reader = reader;
key = reader.createKey();
try {
value = (Text) reader.createValue();
} catch (ClassCastException cce) {
- throw new Exception("context is not of type org.apache.hadoop.io.Text"
+ throw new Exception("value is not of type org.apache.hadoop.io.Text"
+ " type not supported in sequence file format", cce);
}
- initialize();
- }
-
- private void initialize() throws Exception {
- boolean hasMore = reader.next(key, value);
- if (!hasMore) {
- buffer.limit(0);
- } else {
- buffer.position(0);
- buffer.limit(capacity);
- buffer.put(value.getBytes());
- buffer.put("\n".getBytes());
- buffer.flip();
- }
}
@Override
- public int read() throws IOException {
- if (!buffer.hasRemaining()) {
- boolean hasMore = reader.next(key, value);
- if (!hasMore) {
- return -1;
- }
- buffer.position(0);
- buffer.limit(capacity);
- buffer.put(value.getBytes());
- buffer.put("\n".getBytes());
- buffer.flip();
- return buffer.get();
- } else {
- return buffer.get();
+ public int read(byte[] buffer, int offset, int len) throws IOException {
+ int numBytes = 0;
+ if (pendingValue != null) {
+ System.arraycopy(pendingValue.getBytes(), 0, buffer, offset + numBytes, pendingValue.getLength());
+ buffer[offset + numBytes + pendingValue.getLength()] = (byte) EOL;
+ numBytes += pendingValue.getLength() + 1;
+ pendingValue = null;
}
+ while (numBytes < len) {
+ hasMore = reader.next(key, value);
+ if (!hasMore) {
+ return (numBytes == 0) ? -1 : numBytes;
+ }
+ int sizeOfNextTuple = value.getLength() + 1;
+ if (numBytes + sizeOfNextTuple > len) {
+ // cannot add tuple to current buffer
+ // but the reader has moved pass the fetched tuple
+ // we need to store this for a subsequent read call.
+ // and return this then.
+ pendingValue = value;
+ break;
+ } else {
+ System.arraycopy(value.getBytes(), 0, buffer, offset + numBytes, value.getLength());
+ buffer[offset + numBytes + value.getLength()] = (byte) EOL;
+ numBytes += sizeOfNextTuple;
+ }
+ }
+ return numBytes;
+ }
+
+ @Override
+ public int read() throws IOException {
+ throw new NotImplementedException("Use read(byte[], int, int");
}
}
\ No newline at end of file
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HiveAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HiveAdapter.java
index ffcc658..3731eba 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HiveAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HiveAdapter.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -17,11 +17,17 @@
import java.util.Map;
import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
import edu.uci.ics.hyracks.api.comm.IFrameWriter;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+/**
+ * Provides the functionality of fetching data in form of ADM records from a Hive dataset.
+ */
public class HiveAdapter extends AbstractDatasourceAdapter {
+ private static final long serialVersionUID = 1L;
+
public static final String HIVE_DATABASE = "database";
public static final String HIVE_TABLE = "table";
public static final String HIVE_HOME = "hive-home";
@@ -56,7 +62,7 @@
tablePath = configuration.get(HIVE_WAREHOUSE_DIR) + "/" + tablePath + ".db" + "/"
+ configuration.get(HIVE_TABLE);
}
- configuration.put(HDFSAdapter.KEY_HDFS_PATH, tablePath);
+ configuration.put(HDFSAdapter.KEY_PATH, tablePath);
if (!configuration.get(KEY_FORMAT).equals(FORMAT_DELIMITED_TEXT)) {
throw new IllegalArgumentException("format" + configuration.get(KEY_FORMAT) + " is not supported");
}
@@ -81,4 +87,9 @@
hdfsAdapter.start(partition, writer);
}
+ @Override
+ public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
+ return hdfsAdapter.getPartitionConstraint();
+ }
+
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IDatasourceAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IDatasourceAdapter.java
index ccfb9bd..b0dc32f 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IDatasourceAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IDatasourceAdapter.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -29,7 +29,6 @@
*/
public interface IDatasourceAdapter extends Serializable {
-
/**
* An adapter can be used to read from an external data source and may also
* allow writing to the external data source. This enum type indicates the
@@ -46,7 +45,6 @@
READ_WRITE
}
-
/**
* Returns the type of adapter indicating if the adapter can be used for
* reading from an external data source or writing to an external data
@@ -75,19 +73,6 @@
public String getAdapterProperty(String propertyKey);
/**
- * Allows setting a configuration property of the adapter with a specified
- * value.
- *
- * @caller Used by the wrapper operator to modify the behavior of the
- * adapter, if required.
- * @param property
- * the property to be set
- * @param value
- * the value for the property
- */
- public void setAdapterProperty(String property, String value);
-
- /**
* Configures the IDatasourceAdapter instance.
*
* @caller Scenario 1) Called during compilation of DDL statement that
@@ -131,9 +116,8 @@
* @Caller The wrapper operator configures its partition constraints from
* the constraints obtained from the adapter.
*/
- public AlgebricksPartitionConstraint getPartitionConstraint();
+ public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception;
-
/**
* Allows the adapter to establish connection with the external data source
* expressing intent for data and providing any configuration parameters
@@ -148,5 +132,18 @@
*/
public void initialize(IHyracksTaskContext ctx) throws Exception;
+ /**
+ * Triggers the adapter to begin ingestion of data from the external source.
+ *
+ * @param partition
+ * The adapter could be running with a degree of parallelism.
+ * partition corresponds to the i'th parallel instance.
+ * @param writer
+ * The instance of frame writer that is used by the adapter to
+ * write frame to. Adapter packs the fetched bytes (from external source),
+ * packs them into frames and forwards the frames to an upstream receiving
+ * operator using the instance of IFrameWriter.
+ * @throws Exception
+ */
public void start(int partition, IFrameWriter writer) throws Exception;
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IFeedDatasourceAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IFeedDatasourceAdapter.java
deleted file mode 100644
index 282be78..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IFeedDatasourceAdapter.java
+++ /dev/null
@@ -1,39 +0,0 @@
-package edu.uci.ics.asterix.external.dataset.adapter;
-
-import edu.uci.ics.asterix.om.types.ARecordType;
-
-public interface IFeedDatasourceAdapter extends IDatasourceAdapter {
-
- /**
- * Represents the kind of data exchange that happens between the adapter and
- * the external data source. The data exchange can be either pull based or
- * push based. In the former case (pull), the request for data transfer is
- * initiated by the adapter. In the latter case (push) the adapter is
- * required to submit an initial request to convey intent for data.
- * Subsequently all data transfer requests are initiated by the external
- * data source.
- */
- public enum AdapterDataFlowType {
- PULL,
- PUSH
- }
-
- /**
- * An adapter can be a pull or a push based adapter. This method returns the
- * kind of adapter, that is whether it is a pull based adapter or a push
- * based adapter.
- *
- * @caller Compiler or wrapper operator: Compiler uses this API to choose
- * the right wrapper (push-based) operator that wraps around the
- * adapter and provides an iterator interface. If we decide to form
- * a single operator that handles both pull and push based adapter
- * kinds, then this method will be used by the wrapper operator for
- * switching between the logic for interacting with a pull based
- * adapter versus a push based adapter.
- * @return AdapterDataFlowType
- */
- public AdapterDataFlowType getAdapterDataFlowType();
-
- public ARecordType getAdapterOutputType();
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IPullBasedFeedClient.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IPullBasedFeedClient.java
new file mode 100644
index 0000000..a1eb075
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IPullBasedFeedClient.java
@@ -0,0 +1,37 @@
+package edu.uci.ics.asterix.external.dataset.adapter;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+
+public interface IPullBasedFeedClient {
+
+ /**
+ * Writes the next fetched tuple into the provided instance of DatatOutput.
+ *
+ * @param dataOutput
+ * The receiving channel for the feed client to write ADM records to.
+ * @return true if a record was written to the DataOutput instance
+ * false if no record was written to the DataOutput instance indicating non-availability of new data.
+ * @throws AsterixException
+ */
+ public boolean nextTuple(DataOutput dataOutput) throws AsterixException;
+
+ /**
+ * Provides logic for any corrective action that feed client needs to execute on
+ * encountering an exception.
+ *
+ * @param e
+ * The exception encountered during fetching of data from external source
+ * @throws AsterixException
+ */
+ public void resetOnFailure(Exception e) throws AsterixException;
+
+ /**
+ * Terminates a feed, that is data ingestion activity ceases.
+ *
+ * @throws Exception
+ */
+ public void stop() throws Exception;
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/ITypedDatasourceAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/ITypedDatasourceAdapter.java
new file mode 100644
index 0000000..3a4b97b
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/ITypedDatasourceAdapter.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.dataset.adapter;
+
+import edu.uci.ics.asterix.om.types.ARecordType;
+
+/**
+ * Implemented by datasource adapter that has a fixed output type.
+ * Example @see {PullBasedTwitterAdapter}
+ */
+public interface ITypedDatasourceAdapter extends IDatasourceAdapter {
+
+ public ARecordType getAdapterOutputType();
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/NCFileSystemAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/NCFileSystemAdapter.java
index 324e227..ae9b412 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/NCFileSystemAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/NCFileSystemAdapter.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -21,77 +21,91 @@
import java.io.InputStream;
import java.util.Map;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.om.types.IAType;
import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
import edu.uci.ics.hyracks.api.io.FileReference;
import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+/**
+ * Factory class for creating an instance of NCFileSystemAdapter. An
+ * NCFileSystemAdapter reads external data residing on the local file system of
+ * an NC.
+ */
public class NCFileSystemAdapter extends FileSystemBasedAdapter {
- private static final long serialVersionUID = -4154256369973615710L;
- private FileSplit[] fileSplits;
+ private static final long serialVersionUID = 1L;
+ private FileSplit[] fileSplits;
- public static final String KEY_SPLITS = "path";
-
- public NCFileSystemAdapter(IAType atype) {
- super(atype);
- }
+ public NCFileSystemAdapter(IAType atype) {
+ super(atype);
+ }
- @Override
- public void configure(Map<String, String> arguments) throws Exception {
- this.configuration = arguments;
- String[] splits = arguments.get(KEY_SPLITS).split(",");
- configureFileSplits(splits);
- configurePartitionConstraint();
- configureFormat();
- }
+ @Override
+ public void configure(Map<String, String> arguments) throws Exception {
+ this.configuration = arguments;
+ String[] splits = arguments.get(KEY_PATH).split(",");
+ configureFileSplits(splits);
+ configureFormat();
+ }
- @Override
- public void initialize(IHyracksTaskContext ctx) throws Exception {
- this.ctx = ctx;
- }
+ @Override
+ public void initialize(IHyracksTaskContext ctx) throws Exception {
+ this.ctx = ctx;
+ }
- @Override
- public AdapterType getAdapterType() {
- return AdapterType.READ;
- }
+ @Override
+ public AdapterType getAdapterType() {
+ return AdapterType.READ;
+ }
- private void configureFileSplits(String[] splits) {
- if (fileSplits == null) {
- fileSplits = new FileSplit[splits.length];
- String nodeName;
- String nodeLocalPath;
- int count = 0;
- for (String splitPath : splits) {
- nodeName = splitPath.split(":")[0];
- nodeLocalPath = splitPath.split("://")[1];
- FileSplit fileSplit = new FileSplit(nodeName,
- new FileReference(new File(nodeLocalPath)));
- fileSplits[count++] = fileSplit;
- }
- }
- }
+ private void configureFileSplits(String[] splits) {
+ if (fileSplits == null) {
+ fileSplits = new FileSplit[splits.length];
+ String nodeName;
+ String nodeLocalPath;
+ int count = 0;
+ String trimmedValue;
+ for (String splitPath : splits) {
+ trimmedValue = splitPath.trim();
+ nodeName = trimmedValue.split(":")[0];
+ nodeLocalPath = trimmedValue.split("://")[1];
+ FileSplit fileSplit = new FileSplit(nodeName, new FileReference(new File(nodeLocalPath)));
+ fileSplits[count++] = fileSplit;
+ }
+ }
+ }
- private void configurePartitionConstraint() {
- String[] locs = new String[fileSplits.length];
- for (int i = 0; i < fileSplits.length; i++) {
- locs[i] = fileSplits[i].getNodeName();
- }
- partitionConstraint = new AlgebricksAbsolutePartitionConstraint(locs);
- }
+ private void configurePartitionConstraint() throws AsterixException {
+ String[] locs = new String[fileSplits.length];
+ String location;
+ for (int i = 0; i < fileSplits.length; i++) {
+ location = getNodeResolver().resolveNode(fileSplits[i].getNodeName());
+ locs[i] = location;
+ }
+ partitionConstraint = new AlgebricksAbsolutePartitionConstraint(locs);
+ }
- @Override
- public InputStream getInputStream(int partition) throws IOException {
- FileSplit split = fileSplits[partition];
- File inputFile = split.getLocalFile().getFile();
- InputStream in;
- try {
- in = new FileInputStream(inputFile);
- return in;
- } catch (FileNotFoundException e) {
- throw new IOException(e);
- }
- }
+ @Override
+ public InputStream getInputStream(int partition) throws IOException {
+ FileSplit split = fileSplits[partition];
+ File inputFile = split.getLocalFile().getFile();
+ InputStream in;
+ try {
+ in = new FileInputStream(inputFile);
+ return in;
+ } catch (FileNotFoundException e) {
+ throw new IOException(e);
+ }
+ }
+ @Override
+ public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
+ if (partitionConstraint == null) {
+ configurePartitionConstraint();
+ }
+ return partitionConstraint;
+ }
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAdapter.java
index 326775a..38686c2 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAdapter.java
@@ -17,17 +17,25 @@
import java.nio.ByteBuffer;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.feed.intake.IPullBasedFeedClient;
+import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.hyracks.api.comm.IFrameWriter;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
-public abstract class PullBasedAdapter extends AbstractFeedDatasourceAdapter implements IDatasourceAdapter {
+/**
+ * Acts as an abstract class for all pull-based external data adapters.
+ * Captures the common logic for obtaining bytes from an external source
+ * and packing them into frames as tuples.
+ */
+public abstract class PullBasedAdapter extends AbstractDatasourceAdapter implements ITypedDatasourceAdapter {
+
+ private static final long serialVersionUID = 1L;
protected ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(1);
protected IPullBasedFeedClient pullBasedFeedClient;
+ protected ARecordType adapterOutputType;
private FrameTupleAppender appender;
private ByteBuffer frame;
@@ -40,10 +48,18 @@
appender.reset(frame, true);
pullBasedFeedClient = getFeedClient(partition);
+ boolean moreData = false;
while (true) {
tupleBuilder.reset();
try {
- pullBasedFeedClient.nextTuple(tupleBuilder.getDataOutput()); // nextTuple is a blocking call.
+ moreData = pullBasedFeedClient.nextTuple(tupleBuilder.getDataOutput());
+ if (moreData) {
+ tupleBuilder.addFieldEndOffset();
+ appendTupleToFrame(writer);
+ } else {
+ FrameUtils.flushFrame(frame, writer);
+ break;
+ }
} catch (Exception failureException) {
try {
pullBasedFeedClient.resetOnFailure(failureException);
@@ -51,14 +67,15 @@
} catch (Exception recoveryException) {
throw new Exception(recoveryException);
}
-
}
- tupleBuilder.addFieldEndOffset();
- appendTupleToFrame(writer);
-
}
}
+ /**
+ * Allows an adapter to handle a runtime exception.
+ * @param e exception encountered during runtime
+ * @throws AsterixException
+ */
public void resetOnFailure(Exception e) throws AsterixException {
pullBasedFeedClient.resetOnFailure(e);
tupleBuilder.reset();
@@ -75,4 +92,9 @@
}
}
+ @Override
+ public ARecordType getAdapterOutputType() {
+ return adapterOutputType;
+ }
+
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedFeedClient.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedFeedClient.java
new file mode 100644
index 0000000..17ecd86
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedFeedClient.java
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.dataset.adapter;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ARecordSerializerDeserializer;
+import edu.uci.ics.asterix.om.base.AMutableRecord;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public abstract class PullBasedFeedClient implements IPullBasedFeedClient {
+
+ protected ARecordSerializerDeserializer recordSerDe;
+ protected AMutableRecord mutableRecord;
+ protected boolean messageReceived;
+ protected boolean continueIngestion=true;
+
+ public abstract boolean setNextRecord() throws Exception;
+
+ @Override
+ public boolean nextTuple(DataOutput dataOutput) throws AsterixException {
+ try {
+ boolean newData = setNextRecord();
+ if (newData && continueIngestion) {
+ IAType t = mutableRecord.getType();
+ ATypeTag tag = t.getTypeTag();
+ try {
+ dataOutput.writeByte(tag.serialize());
+ } catch (IOException e) {
+ throw new HyracksDataException(e);
+ }
+ recordSerDe.serialize(mutableRecord, dataOutput);
+ return true;
+ }
+ return false;
+ } catch (Exception e) {
+ throw new AsterixException(e);
+ }
+
+ }
+
+ @Override
+ public void stop() {
+ continueIngestion = false;
+ }
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterAdapter.java
index a7fd3dc..ebfbcad 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterAdapter.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -17,103 +17,92 @@
import java.util.HashMap;
import java.util.Map;
-import edu.uci.ics.asterix.feed.intake.IPullBasedFeedClient;
-import edu.uci.ics.asterix.feed.intake.PullBasedTwitterFeedClient;
import edu.uci.ics.asterix.feed.managed.adapter.IManagedFeedAdapter;
-import edu.uci.ics.asterix.feed.managed.adapter.IMutableFeedAdapter;
import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.asterix.om.types.BuiltinType;
import edu.uci.ics.asterix.om.types.IAType;
import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksCountPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-public class PullBasedTwitterAdapter extends PullBasedAdapter implements
- IManagedFeedAdapter, IMutableFeedAdapter {
+/**
+ * An adapter that provides the functionality of receiving tweets from the
+ * Twitter service in the form of ADM formatted records.
+ */
+public class PullBasedTwitterAdapter extends PullBasedAdapter implements IManagedFeedAdapter {
- private int interval = 10;
- private boolean stopRequested = false;
- private boolean alterRequested = false;
- private Map<String, String> alteredParams = new HashMap<String, String>();
- private ARecordType recordType;
+
+ private static final long serialVersionUID = 1L;
+
+ public static final String QUERY = "query";
+ public static final String INTERVAL = "interval";
- private String[] fieldNames = { "id", "username", "location", "text",
- "timestamp" };
- private IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING,
- BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING };
+ private boolean alterRequested = false;
+ private Map<String, String> alteredParams = new HashMap<String, String>();
+ private ARecordType recordType;
- private PullBasedTwitterFeedClient tweetClient;
+ private PullBasedTwitterFeedClient tweetClient;
- public static final String QUERY = "query";
- public static final String INTERVAL = "interval";
+ @Override
+ public IPullBasedFeedClient getFeedClient(int partition) {
+ return tweetClient;
+ }
- @Override
- public IPullBasedFeedClient getFeedClient(int partition) {
- return tweetClient;
- }
+ @Override
+ public void configure(Map<String, String> arguments) throws Exception {
+ configuration = arguments;
+ String[] fieldNames = { "id", "username", "location", "text", "timestamp" };
+ IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING,
+ BuiltinType.ASTRING };
+ recordType = new ARecordType("FeedRecordType", fieldNames, fieldTypes, false);
+ }
- @Override
- public void configure(Map<String, String> arguments) throws Exception {
- configuration = arguments;
- partitionConstraint = new AlgebricksCountPartitionConstraint(1);
- interval = Integer.parseInt(arguments.get(INTERVAL));
- recordType = new ARecordType("FeedRecordType", fieldNames, fieldTypes,
- false);
- }
+ @Override
+ public void initialize(IHyracksTaskContext ctx) throws Exception {
+ this.ctx = ctx;
+ tweetClient = new PullBasedTwitterFeedClient(ctx, this);
+ }
- @Override
- public void initialize(IHyracksTaskContext ctx) throws Exception {
- this.ctx = ctx;
- tweetClient = new PullBasedTwitterFeedClient(ctx, this);
- }
+ @Override
+ public AdapterType getAdapterType() {
+ return AdapterType.READ;
+ }
- @Override
- public AdapterType getAdapterType() {
- return adapterType.READ;
- }
+ @Override
+ public void stop() {
+ tweetClient.stop();
+ }
- @Override
- public void suspend() throws Exception {
- // TODO Auto-generated method stub
+ @Override
+ public void alter(Map<String, String> properties) {
+ alterRequested = true;
+ this.alteredParams = properties;
+ }
- }
+ public boolean isAlterRequested() {
+ return alterRequested;
+ }
- @Override
- public void resume() throws Exception {
- // TODO Auto-generated method stub
+ public Map<String, String> getAlteredParams() {
+ return alteredParams;
+ }
- }
+ public void postAlteration() {
+ alteredParams = null;
+ alterRequested = false;
+ }
- @Override
- public void stop() throws Exception {
- stopRequested = true;
- }
+ @Override
+ public ARecordType getAdapterOutputType() {
+ return recordType;
+ }
- public boolean isStopRequested() {
- return stopRequested;
- }
-
- @Override
- public void alter(Map<String, String> properties) throws Exception {
- alterRequested = true;
- this.alteredParams = properties;
- }
-
- public boolean isAlterRequested() {
- return alterRequested;
- }
-
- public Map<String, String> getAlteredParams() {
- return alteredParams;
- }
-
- public void postAlteration() {
- alteredParams = null;
- alterRequested = false;
- }
-
- @Override
- public ARecordType getAdapterOutputType() {
- return recordType;
- }
+ @Override
+ public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
+ if (partitionConstraint == null) {
+ partitionConstraint = new AlgebricksCountPartitionConstraint(1);
+ }
+ return partitionConstraint;
+ }
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/PullBasedTwitterFeedClient.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterFeedClient.java
similarity index 78%
rename from asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/PullBasedTwitterFeedClient.java
rename to asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterFeedClient.java
index c1ea800..2a07472 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/PullBasedTwitterFeedClient.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterFeedClient.java
@@ -1,4 +1,18 @@
-package edu.uci.ics.asterix.feed.intake;
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.dataset.adapter;
import java.util.LinkedList;
import java.util.Map;
@@ -12,13 +26,17 @@
import twitter4j.TwitterFactory;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ARecordSerializerDeserializer;
-import edu.uci.ics.asterix.external.dataset.adapter.PullBasedTwitterAdapter;
import edu.uci.ics.asterix.om.base.AMutableRecord;
import edu.uci.ics.asterix.om.base.AMutableString;
import edu.uci.ics.asterix.om.base.IAObject;
import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+/**
+ * An implementation of @see {PullBasedFeedClient} for the Twitter service.
+ * The feed client fetches data from Twitter service by sending request at
+ * regular (configurable) interval.
+ */
public class PullBasedTwitterFeedClient extends PullBasedFeedClient {
private String keywords;
@@ -54,7 +72,7 @@
private Tweet getNextTweet() throws TwitterException, InterruptedException {
if (tweetBuffer.isEmpty()) {
QueryResult result;
- Thread.currentThread().sleep(1000 * requestInterval);
+ Thread.sleep(1000 * requestInterval);
result = twitter.search(query);
tweetBuffer.addAll(result.getTweets());
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedAdapter.java
index 124a9ed..611183c 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedAdapter.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -19,24 +19,26 @@
import java.util.List;
import java.util.Map;
-import edu.uci.ics.asterix.feed.intake.IPullBasedFeedClient;
-import edu.uci.ics.asterix.feed.intake.RSSFeedClient;
import edu.uci.ics.asterix.feed.managed.adapter.IManagedFeedAdapter;
-import edu.uci.ics.asterix.feed.managed.adapter.IMutableFeedAdapter;
import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.asterix.om.types.BuiltinType;
import edu.uci.ics.asterix.om.types.IAType;
import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksCountPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-public class RSSFeedAdapter extends PullBasedAdapter implements IManagedFeedAdapter, IMutableFeedAdapter {
+/**
+ * RSSFeedAdapter provides the functionality of fetching an RSS based feed.
+ */
+public class RSSFeedAdapter extends PullBasedAdapter implements IManagedFeedAdapter {
+
+ private static final long serialVersionUID = 1L;
private List<String> feedURLs = new ArrayList<String>();
private boolean isStopRequested = false;
private boolean isAlterRequested = false;
private Map<String, String> alteredParams = new HashMap<String, String>();
private String id_prefix = "";
- private int interval = 10;
private ARecordType recordType;
private IPullBasedFeedClient rssFeedClient;
@@ -53,40 +55,18 @@
}
@Override
- public void alter(Map<String, String> properties) throws Exception {
+ public void alter(Map<String, String> properties) {
isAlterRequested = true;
this.alteredParams = properties;
reconfigure(properties);
}
- public void postAlteration() {
- alteredParams = null;
- isAlterRequested = false;
- }
-
@Override
- public void suspend() throws Exception {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void resume() throws Exception {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void stop() throws Exception {
+ public void stop() {
isStopRequested = true;
}
@Override
- public AdapterDataFlowType getAdapterDataFlowType() {
- return AdapterDataFlowType.PULL;
- }
-
- @Override
public AdapterType getAdapterType() {
return AdapterType.READ;
}
@@ -151,4 +131,12 @@
return recordType;
}
+ @Override
+ public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
+ if (partitionConstraint == null) {
+ configurePartitionConstraints();
+ }
+ return partitionConstraint;
+ }
+
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/RSSFeedClient.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedClient.java
similarity index 86%
rename from asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/RSSFeedClient.java
rename to asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedClient.java
index 9f04500..366b4af 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/RSSFeedClient.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedClient.java
@@ -1,4 +1,18 @@
-package edu.uci.ics.asterix.feed.intake;
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.dataset.adapter;
import java.net.MalformedURLException;
import java.net.URL;
@@ -15,12 +29,15 @@
import com.sun.syndication.fetcher.impl.HashMapFeedInfoCache;
import com.sun.syndication.fetcher.impl.HttpURLFeedFetcher;
-import edu.uci.ics.asterix.external.dataset.adapter.RSSFeedAdapter;
import edu.uci.ics.asterix.om.base.AMutableRecord;
import edu.uci.ics.asterix.om.base.AMutableString;
import edu.uci.ics.asterix.om.base.IAObject;
import edu.uci.ics.asterix.om.types.ARecordType;
+/**
+ * An implementation of @see {PullBasedFeedClient} responsible for
+ * fetching from an RSS feed source at regular interval.
+ */
@SuppressWarnings("rawtypes")
public class RSSFeedClient extends PullBasedFeedClient {
@@ -93,6 +110,7 @@
}
}
+ @SuppressWarnings("unchecked")
private void fetchFeed() {
try {
System.err.println("Retrieving feed " + feedURL);
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/comm/AlterFeedMessage.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/AlterFeedMessage.java
similarity index 67%
rename from asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/comm/AlterFeedMessage.java
rename to asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/AlterFeedMessage.java
index d34af73..537bf07 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/comm/AlterFeedMessage.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/AlterFeedMessage.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -12,31 +12,26 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package edu.uci.ics.asterix.feed.comm;
+package edu.uci.ics.asterix.external.feed.lifecycle;
import java.util.Map;
+/**
+ * A feed control message containing the altered values for
+ * adapter configuration parameters. This message is dispatched
+ * to all runtime instances of the feed's adapter.
+ */
public class AlterFeedMessage extends FeedMessage {
+ private static final long serialVersionUID = 1L;
+
private final Map<String, String> alteredConfParams;
public AlterFeedMessage(Map<String, String> alteredConfParams) {
super(MessageType.ALTER);
- messageResponseMode = MessageResponseMode.SYNCHRONOUS;
this.alteredConfParams = alteredConfParams;
}
- public AlterFeedMessage(MessageResponseMode mode, Map<String, String> alteredConfParams) {
- super(MessageType.ALTER);
- messageResponseMode = mode;
- this.alteredConfParams = alteredConfParams;
- }
-
- @Override
- public MessageResponseMode getMessageResponseMode() {
- return messageResponseMode;
- }
-
@Override
public MessageType getMessageType() {
return MessageType.ALTER;
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/FeedId.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedId.java
similarity index 75%
rename from asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/FeedId.java
rename to asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedId.java
index 19076c4..b1889ee 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/FeedId.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedId.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -11,19 +11,26 @@
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
-*/
-package edu.uci.ics.asterix.feed.mgmt;
+ */
+package edu.uci.ics.asterix.external.feed.lifecycle;
import java.io.Serializable;
+/**
+ * A unique identifier for a feed (dataset).
+ */
public class FeedId implements Serializable {
+ private static final long serialVersionUID = 1L;
+
private final String dataverse;
private final String dataset;
+ private final int hashcode;
public FeedId(String dataverse, String dataset) {
this.dataset = dataset;
this.dataverse = dataverse;
+ this.hashcode = (dataverse + "." + dataset).hashCode();
}
public String getDataverse() {
@@ -47,7 +54,12 @@
@Override
public int hashCode() {
- return dataverse.hashCode() + dataset.hashCode();
+ return hashcode;
+ }
+
+ @Override
+ public String toString() {
+ return dataverse + "." + dataset;
}
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/FeedManager.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedManager.java
similarity index 60%
rename from asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/FeedManager.java
rename to asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedManager.java
index 748455a..8314267 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/FeedManager.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedManager.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -12,7 +12,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package edu.uci.ics.asterix.feed.mgmt;
+package edu.uci.ics.asterix.external.feed.lifecycle;
import java.util.HashMap;
import java.util.HashSet;
@@ -20,40 +20,50 @@
import java.util.Set;
import java.util.concurrent.LinkedBlockingQueue;
-import edu.uci.ics.asterix.feed.comm.IFeedMessage;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+/**
+ * Handle (de-)registration of feeds for delivery of control messages.
+ */
public class FeedManager implements IFeedManager {
+ public static FeedManager INSTANCE = new FeedManager();
+
+ private FeedManager() {
+
+ }
+
private Map<FeedId, Set<LinkedBlockingQueue<IFeedMessage>>> outGoingMsgQueueMap = new HashMap<FeedId, Set<LinkedBlockingQueue<IFeedMessage>>>();
- private LinkedBlockingQueue<IFeedMessage> incomingMsgQueue = new LinkedBlockingQueue<IFeedMessage>();
@Override
- public boolean deliverMessage(FeedId feedId, IFeedMessage feedMessage) throws Exception {
+ public void deliverMessage(FeedId feedId, IFeedMessage feedMessage) throws AsterixException {
Set<LinkedBlockingQueue<IFeedMessage>> operatorQueues = outGoingMsgQueueMap.get(feedId);
- if (operatorQueues != null) {
- for (LinkedBlockingQueue<IFeedMessage> queue : operatorQueues) {
- queue.put(feedMessage);
- }
+ try {
+ if (operatorQueues != null) {
+ for (LinkedBlockingQueue<IFeedMessage> queue : operatorQueues) {
+ queue.put(feedMessage);
+ }
+ }
+ } catch (Exception e) {
+ throw new AsterixException(e);
}
- return true;
}
@Override
- public void registerFeedOperatorMsgQueue(FeedId feedId, LinkedBlockingQueue<IFeedMessage> queue) {
+ public void registerFeedMsgQueue(FeedId feedId, LinkedBlockingQueue<IFeedMessage> queue) {
Set<LinkedBlockingQueue<IFeedMessage>> feedQueues = outGoingMsgQueueMap.get(feedId);
if (feedQueues == null) {
feedQueues = new HashSet<LinkedBlockingQueue<IFeedMessage>>();
}
feedQueues.add(queue);
outGoingMsgQueueMap.put(feedId, feedQueues);
-
}
@Override
- public void unregisterFeedOperatorMsgQueue(FeedId feedId, LinkedBlockingQueue<IFeedMessage> queue) {
+ public void unregisterFeedMsgQueue(FeedId feedId, LinkedBlockingQueue<IFeedMessage> queue) {
Set<LinkedBlockingQueue<IFeedMessage>> feedQueues = outGoingMsgQueueMap.get(feedId);
if (feedQueues == null || !feedQueues.contains(queue)) {
- throw new IllegalArgumentException(" unable to de-register feed message queue");
+ throw new IllegalArgumentException(" Unable to de-register feed message queue. Unknown feedId " + feedId);
}
feedQueues.remove(queue);
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/comm/FeedMessage.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedMessage.java
similarity index 60%
rename from asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/comm/FeedMessage.java
rename to asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedMessage.java
index 1f1a020..af84d4f 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/comm/FeedMessage.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedMessage.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -12,37 +12,28 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package edu.uci.ics.asterix.feed.comm;
+package edu.uci.ics.asterix.external.feed.lifecycle;
-public class FeedMessage implements IFeedMessage {
+/**
+ * A control message that can be sent to the runtime instance of a
+ * feed's adapter.
+ */
+public class FeedMessage implements IFeedMessage {
- protected MessageResponseMode messageResponseMode = MessageResponseMode.SYNCHRONOUS;
+ private static final long serialVersionUID = 1L;
+
protected MessageType messageType;
-
- public FeedMessage(MessageType messageType){
+ public FeedMessage(MessageType messageType) {
this.messageType = messageType;
}
-
- public MessageResponseMode getMessageResponseMode() {
- return messageResponseMode;
- }
-
-
- public void setMessageResponseMode(MessageResponseMode messageResponseMode) {
- this.messageResponseMode = messageResponseMode;
- }
-
-
public MessageType getMessageType() {
return messageType;
}
-
public void setMessageType(MessageType messageType) {
this.messageType = messageType;
}
-
-
+
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/IFeedManager.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/IFeedManager.java
new file mode 100644
index 0000000..587d5a7
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/IFeedManager.java
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.feed.lifecycle;
+
+import java.util.concurrent.LinkedBlockingQueue;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+
+/**
+ * Handle (de-)registration of feeds for delivery of control messages.
+ */
+public interface IFeedManager {
+
+ /**
+ * Register an input message queue for a feed specified by feedId.
+ * All messages sent to a feed are directed to the registered queue(s).
+ *
+ * @param feedId
+ * an identifier for the feed dataset.
+ * @param queue
+ * an input message queue for receiving control messages.
+ */
+ public void registerFeedMsgQueue(FeedId feedId, LinkedBlockingQueue<IFeedMessage> queue);
+
+ /**
+ * Unregister an input message queue for a feed specified by feedId.
+ * A feed prior to finishing should unregister all previously registered queue(s)
+ * as it is no longer active and thus need not process any control messages.
+ *
+ * @param feedId
+ * an identifier for the feed dataset.
+ * @param queue
+ * an input message queue for receiving control messages.
+ */
+ public void unregisterFeedMsgQueue(FeedId feedId, LinkedBlockingQueue<IFeedMessage> queue);
+
+ /**
+ * Deliver a message to a feed with a given feedId.
+ *
+ * @param feedId
+ * identifier for the feed dataset.
+ * @param feedMessage
+ * control message that needs to be delivered.
+ * @throws Exception
+ */
+ public void deliverMessage(FeedId feedId, IFeedMessage feedMessage) throws AsterixException;
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/comm/IFeedMessage.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/IFeedMessage.java
similarity index 72%
rename from asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/comm/IFeedMessage.java
rename to asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/IFeedMessage.java
index dfb4f91..9e1e907 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/comm/IFeedMessage.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/IFeedMessage.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -12,26 +12,17 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package edu.uci.ics.asterix.feed.comm;
+package edu.uci.ics.asterix.external.feed.lifecycle;
import java.io.Serializable;
public interface IFeedMessage extends Serializable {
- public enum MessageResponseMode {
- SYNCHRONOUS,
- ASYNCHRONOUS,
- }
-
public enum MessageType {
STOP,
- SUSPEND,
- RESUME,
ALTER,
}
- public MessageResponseMode getMessageResponseMode();
-
public MessageType getMessageType();
-
+
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/DNSResolver.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/DNSResolver.java
new file mode 100644
index 0000000..ff6bbdf
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/DNSResolver.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.util;
+
+import java.util.Random;
+import java.util.Set;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.om.util.AsterixRuntimeUtil;
+
+/**
+ * Resolves a value (DNS/IP Address) to the id of a Node Controller running at the location.
+ */
+public class DNSResolver implements INodeResolver {
+
+ private static Random random = new Random();
+
+ @Override
+ public String resolveNode(String value) throws AsterixException {
+ try {
+ String ipAddress = AsterixRuntimeUtil.getIPAddress(value);
+ Set<String> nodeControllers = AsterixRuntimeUtil.getNodeControllersOnIP(ipAddress);
+ if (nodeControllers == null || nodeControllers.isEmpty()) {
+ throw new AsterixException(" No node controllers found at the address: " + value);
+ }
+ String chosenNCId = nodeControllers.toArray(new String[]{})[random
+ .nextInt(nodeControllers.size())];
+ return chosenNCId;
+ } catch (AsterixException ae) {
+ throw ae;
+ } catch (Exception e) {
+ throw new AsterixException(e);
+ }
+ }
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/DNSResolverFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/DNSResolverFactory.java
new file mode 100644
index 0000000..6b56601
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/DNSResolverFactory.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.util;
+
+/**
+ * Factory for creating instance of {@link DNSResolver}
+ */
+public class DNSResolverFactory implements INodeResolverFactory {
+
+ private static final INodeResolver INSTANCE = new DNSResolver();
+
+ @Override
+ public INodeResolver createNodeResolver() {
+ return INSTANCE;
+ }
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/INodeResolver.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/INodeResolver.java
new file mode 100644
index 0000000..d0e8a64
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/INodeResolver.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.util;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+
+/**
+ * A policy for resolving a name to a node controller id.
+ *
+ */
+public interface INodeResolver {
+
+ /**
+ * Resolve a passed-in value to a node controller id.
+ *
+ * @param value
+ * string to be resolved
+ * @return resolved result (a node controller id)
+ * @throws AsterixException
+ */
+ public String resolveNode(String value) throws AsterixException;
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/INodeResolverFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/INodeResolverFactory.java
new file mode 100644
index 0000000..2abde9c
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/INodeResolverFactory.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.util;
+
+/**
+ * Factory for creating an instance of INodeResolver
+ *
+ * @see INodeResolver
+ */
+public interface INodeResolverFactory {
+
+ /**
+ * Create an instance of {@link INodeResolver}
+ *
+ * @return an instance of INodeResolver
+ */
+ public INodeResolver createNodeResolver();
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/IdentitiyResolverFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/IdentitiyResolverFactory.java
new file mode 100644
index 0000000..5161203
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/IdentitiyResolverFactory.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.util;
+
+/**
+ * Factory for creating an instance of @see {IdentityResolver}.
+ * Identity resolver simply resolves a value to itself and is useful when value being resolved
+ * is a node controller id.
+ */
+public class IdentitiyResolverFactory implements INodeResolverFactory {
+
+ private static INodeResolver INSTANCE = new IdentityResolver();
+
+ @Override
+ public INodeResolver createNodeResolver() {
+ return INSTANCE;
+ }
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/IdentityResolver.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/IdentityResolver.java
new file mode 100644
index 0000000..7ff1f9b
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/util/IdentityResolver.java
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.util;
+
+/**
+ * Identity resolver simply resolves a value to itself and is useful when value being resolved
+ * is a node controller id.
+ */
+public class IdentityResolver implements INodeResolver {
+
+ @Override
+ public String resolveNode(String value) {
+ return value;
+ }
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/IFeedClient.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/IFeedClient.java
deleted file mode 100644
index f0e34c3..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/IFeedClient.java
+++ /dev/null
@@ -1,5 +0,0 @@
-package edu.uci.ics.asterix.feed.intake;
-
-public interface IFeedClient {
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/IPullBasedFeedClient.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/IPullBasedFeedClient.java
deleted file mode 100644
index f6b37b3..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/IPullBasedFeedClient.java
+++ /dev/null
@@ -1,24 +0,0 @@
-package edu.uci.ics.asterix.feed.intake;
-
-import java.io.DataOutput;
-
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-
-public interface IPullBasedFeedClient {
-
- public enum status {
- MORE_DATA,
- END_OF_DATA
- }
-
- public boolean nextTuple(DataOutput dataOutput) throws AsterixException;
-
- public void resetOnFailure(Exception e) throws AsterixException;
-
- public void suspend() throws Exception;
-
- public void resume() throws Exception;
-
- public void stop() throws Exception;
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/PullBasedFeedClient.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/PullBasedFeedClient.java
deleted file mode 100644
index 501acd4..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/PullBasedFeedClient.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feed.intake;
-
-import java.io.DataOutput;
-import java.io.IOException;
-
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ARecordSerializerDeserializer;
-import edu.uci.ics.asterix.om.base.AMutableRecord;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-
-public abstract class PullBasedFeedClient implements IPullBasedFeedClient {
-
- protected ARecordSerializerDeserializer recordSerDe;
- protected AMutableRecord mutableRecord;
- protected boolean messageReceived;
-
- public abstract boolean setNextRecord() throws Exception;
-
- @Override
- public boolean nextTuple(DataOutput dataOutput) throws AsterixException {
- try {
- boolean newData = setNextRecord();
- if (newData) {
- IAType t = mutableRecord.getType();
- ATypeTag tag = t.getTypeTag();
- try {
- dataOutput.writeByte(tag.serialize());
- } catch (IOException e) {
- throw new HyracksDataException(e);
- }
- recordSerDe.serialize(mutableRecord, dataOutput);
- return true;
- }
- return false;
- } catch (Exception e) {
- throw new AsterixException(e);
- }
-
- }
-
- /*
- * public void displayFeedRecord() { StringBuilder builder = new
- * StringBuilder(); int numFields = recordType.getFieldNames().length; for
- * (int i = 0; i < numFields; i++) {
- * builder.append(mutableRecord.getValueByPos(i).toString());
- * builder.append("|"); } }
- */
-
- @Override
- public void suspend() throws Exception {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void resume() throws Exception {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void stop() throws Exception {
- // TODO Auto-generated method stub
-
- }
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/managed/adapter/IManagedFeedAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/managed/adapter/IManagedFeedAdapter.java
index 7cf1fb1..6f993ae 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/managed/adapter/IManagedFeedAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/managed/adapter/IManagedFeedAdapter.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -14,24 +14,30 @@
*/
package edu.uci.ics.asterix.feed.managed.adapter;
+import java.util.Map;
+
+/**
+ * Interface implemented by an adapter that can be controlled or managed by external
+ * commands (stop,alter)
+ */
public interface IManagedFeedAdapter {
- public enum OperationState {
- SUSPENDED,
- // INACTIVE state signifies that the feed dataset is not
- // connected with the external world through the feed
- // adapter.
- ACTIVE,
- // ACTIVE state signifies that the feed dataset is connected to the
- // external world using an adapter that may put data into the dataset.
- STOPPED,
- INACTIVE
- }
+ /**
+ * Discontinue the ingestion of data and end the feed.
+ *
+ * @throws Exception
+ */
+ public void stop();
- public void suspend() throws Exception;
-
- public void resume() throws Exception;
-
- public void stop() throws Exception;
+ /**
+ * Modify the adapter configuration parameters. This method is called
+ * when the configuration parameters need to be modified while the adapter
+ * is ingesting data in an active feed.
+ *
+ * @param properties
+ * A HashMap containing the set of configuration parameters
+ * that need to be altered.
+ */
+ public void alter(Map<String, String> properties);
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/FeedSystemProvider.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/FeedSystemProvider.java
deleted file mode 100644
index 82fb67d..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/FeedSystemProvider.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feed.mgmt;
-
-
-/**
- * Provider for all the sub-systems (transaction/lock/log/recovery) managers.
- * Users of transaction sub-systems must obtain them from the provider.
- */
-public class FeedSystemProvider {
- private static final IFeedManager feedManager = new FeedManager();
-
- public static IFeedManager getFeedManager() {
- return feedManager;
- }
-}
\ No newline at end of file
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/IFeedManager.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/IFeedManager.java
deleted file mode 100644
index a8c1303..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/IFeedManager.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feed.mgmt;
-
-import java.util.concurrent.LinkedBlockingQueue;
-
-import edu.uci.ics.asterix.feed.comm.IFeedMessage;
-
-public interface IFeedManager {
-
- public void registerFeedOperatorMsgQueue(FeedId feedId, LinkedBlockingQueue<IFeedMessage> queue);
-
- public void unregisterFeedOperatorMsgQueue(FeedId feedId, LinkedBlockingQueue<IFeedMessage> queue);
-
- public boolean deliverMessage(FeedId feedId, IFeedMessage feedMessage) throws Exception;
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedIntakeOperatorDescriptor.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedIntakeOperatorDescriptor.java
deleted file mode 100644
index c300b0d..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedIntakeOperatorDescriptor.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feed.operator;
-
-import java.util.Map;
-
-import edu.uci.ics.asterix.external.adapter.factory.IFeedDatasetAdapterFactory;
-import edu.uci.ics.asterix.external.adapter.factory.IFeedDatasetAdapterFactory.FeedAdapterType;
-import edu.uci.ics.asterix.external.adapter.factory.IGenericFeedDatasetAdapterFactory;
-import edu.uci.ics.asterix.external.adapter.factory.ITypedFeedDatasetAdapterFactory;
-import edu.uci.ics.asterix.external.dataset.adapter.IFeedDatasourceAdapter;
-import edu.uci.ics.asterix.feed.mgmt.FeedId;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
-
-public class FeedIntakeOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
- private static final long serialVersionUID = 1L;
-
- private final String adapterFactoryClassName;
- private final Map<String, String> adapterConfiguration;
- private final IAType atype;
- private final FeedId feedId;
-
- private transient IFeedDatasetAdapterFactory datasourceAdapterFactory;
-
- public FeedIntakeOperatorDescriptor(JobSpecification spec, FeedId feedId, String adapter,
- Map<String, String> arguments, ARecordType atype, RecordDescriptor rDesc) {
- super(spec, 1, 1);
- recordDescriptors[0] = rDesc;
- this.adapterFactoryClassName = adapter;
- this.adapterConfiguration = arguments;
- this.atype = atype;
- this.feedId = feedId;
- }
-
- public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
- IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions)
- throws HyracksDataException {
- IFeedDatasourceAdapter adapter;
- try {
- datasourceAdapterFactory = (IFeedDatasetAdapterFactory) Class.forName(adapterFactoryClassName)
- .newInstance();
- if (datasourceAdapterFactory.getFeedAdapterType().equals(FeedAdapterType.GENERIC)) {
- adapter = (IFeedDatasourceAdapter) ((IGenericFeedDatasetAdapterFactory) datasourceAdapterFactory)
- .createAdapter(adapterConfiguration, atype);
- } else {
- adapter = (IFeedDatasourceAdapter) ((ITypedFeedDatasetAdapterFactory) datasourceAdapterFactory)
- .createAdapter(adapterConfiguration);
- }
- adapter.initialize(ctx);
- } catch (Exception e) {
- throw new HyracksDataException("initialization of adapter failed", e);
- }
- return new FeedIntakeOperatorNodePushable(feedId, adapter, partition);
- }
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedIntakeOperatorNodePushable.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedIntakeOperatorNodePushable.java
deleted file mode 100644
index ff6216a..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedIntakeOperatorNodePushable.java
+++ /dev/null
@@ -1,110 +0,0 @@
-package edu.uci.ics.asterix.feed.operator;
-
-import java.nio.ByteBuffer;
-import java.util.concurrent.LinkedBlockingQueue;
-
-import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
-import edu.uci.ics.asterix.feed.comm.AlterFeedMessage;
-import edu.uci.ics.asterix.feed.comm.IFeedMessage;
-import edu.uci.ics.asterix.feed.managed.adapter.IManagedFeedAdapter;
-import edu.uci.ics.asterix.feed.managed.adapter.IMutableFeedAdapter;
-import edu.uci.ics.asterix.feed.mgmt.FeedId;
-import edu.uci.ics.asterix.feed.mgmt.FeedSystemProvider;
-import edu.uci.ics.asterix.feed.mgmt.IFeedManager;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
-
-public class FeedIntakeOperatorNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
-
- private final IDatasourceAdapter adapter;
- private final int partition;
- private final IFeedManager feedManager;
- private final FeedId feedId;
- private final LinkedBlockingQueue<IFeedMessage> inbox;
- private FeedInboxMonitor feedInboxMonitor;
-
- public FeedIntakeOperatorNodePushable(FeedId feedId, IDatasourceAdapter adapter, int partition) {
- this.adapter = adapter;
- this.partition = partition;
- this.feedManager = (IFeedManager) FeedSystemProvider.getFeedManager();
- this.feedId = feedId;
- inbox = new LinkedBlockingQueue<IFeedMessage>();
- }
-
- @Override
- public void open() throws HyracksDataException {
- if (adapter instanceof IManagedFeedAdapter) {
- feedInboxMonitor = new FeedInboxMonitor((IManagedFeedAdapter) adapter, inbox, partition);
- feedInboxMonitor.start();
- feedManager.registerFeedOperatorMsgQueue(feedId, inbox);
- }
- writer.open();
- try {
- adapter.start(partition, writer);
- } catch (Exception e) {
- e.printStackTrace();
- // we do not throw an exception, but allow the operator to close
- // gracefully
- // Throwing an exception here would result in a job abort and a
- // transaction roll back
- // that undoes all the work done so far.
-
- } finally {
- writer.close();
- }
- }
-
- @Override
- public void fail() throws HyracksDataException {
- writer.close();
- }
-
- @Override
- public void close() throws HyracksDataException {
- writer.close();
- }
-
- @Override
- public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
- // TODO Auto-generated method stub
- }
-}
-
-class FeedInboxMonitor extends Thread {
-
- private LinkedBlockingQueue<IFeedMessage> inbox;
- private final IManagedFeedAdapter adapter;
-
- public FeedInboxMonitor(IManagedFeedAdapter adapter, LinkedBlockingQueue<IFeedMessage> inbox, int partition) {
- this.inbox = inbox;
- this.adapter = adapter;
- }
-
- @Override
- public void run() {
- while (true) {
- try {
- IFeedMessage feedMessage = inbox.take();
- switch (feedMessage.getMessageType()) {
- case SUSPEND:
- adapter.suspend();
- break;
- case RESUME:
- adapter.resume();
- break;
- case STOP:
- adapter.stop();
- break;
- case ALTER:
- ((IMutableFeedAdapter) adapter).alter(((AlterFeedMessage) feedMessage).getAlteredConfParams());
- break;
- }
- } catch (InterruptedException ie) {
- break;
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
- }
-
-}
\ No newline at end of file
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedMessageOperatorDescriptor.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedMessageOperatorDescriptor.java
deleted file mode 100644
index c66c49e..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedMessageOperatorDescriptor.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feed.operator;
-
-import java.util.List;
-
-import edu.uci.ics.asterix.feed.comm.IFeedMessage;
-import edu.uci.ics.asterix.feed.mgmt.FeedId;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
-
-public class FeedMessageOperatorDescriptor extends
- AbstractSingleActivityOperatorDescriptor {
-
- private final FeedId feedId;
- private final List<IFeedMessage> feedMessages;
- private final boolean sendToAll = true;
-
- public FeedMessageOperatorDescriptor(JobSpecification spec,
- String dataverse, String dataset, List<IFeedMessage> feedMessages) {
- super(spec, 0, 1);
- this.feedId = new FeedId(dataverse, dataset);
- this.feedMessages = feedMessages;
- }
-
- @Override
- public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
- IRecordDescriptorProvider recordDescProvider, int partition,
- int nPartitions) throws HyracksDataException {
- return new FeedMessageOperatorNodePushable(ctx, feedId, feedMessages,
- sendToAll, partition, nPartitions);
- }
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedMessageOperatorNodePushable.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedMessageOperatorNodePushable.java
deleted file mode 100644
index 37e3ba3..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedMessageOperatorNodePushable.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feed.operator;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import edu.uci.ics.asterix.feed.comm.IFeedMessage;
-import edu.uci.ics.asterix.feed.mgmt.FeedId;
-import edu.uci.ics.asterix.feed.mgmt.FeedSystemProvider;
-import edu.uci.ics.asterix.feed.mgmt.IFeedManager;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
-
-public class FeedMessageOperatorNodePushable extends
- AbstractUnaryOutputSourceOperatorNodePushable {
-
- private final FeedId feedId;
- private final List<IFeedMessage> feedMessages;
- private IFeedManager feedManager;
-
- public FeedMessageOperatorNodePushable(IHyracksTaskContext ctx,
- FeedId feedId, List<IFeedMessage> feedMessages, boolean applyToAll,
- int partition, int nPartitions) {
- this.feedId = feedId;
- if (applyToAll) {
- this.feedMessages = feedMessages;
- } else {
- this.feedMessages = new ArrayList<IFeedMessage>();
- feedMessages.add(feedMessages.get(partition));
- }
- feedManager = (IFeedManager) FeedSystemProvider.getFeedManager();
- }
-
- @Override
- public void initialize() throws HyracksDataException {
- try {
- writer.open();
- for (IFeedMessage feedMessage : feedMessages) {
- feedManager.deliverMessage(feedId, feedMessage);
- }
- } catch (Exception e) {
- throw new HyracksDataException(e);
- } finally {
- writer.close();
- }
- }
-
-}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/Asterix_ADM_Parser.md b/asterix-maven-plugins/lexer-generator-maven-plugin/Asterix_ADM_Parser.md
new file mode 100644
index 0000000..eeaffc9
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/Asterix_ADM_Parser.md
@@ -0,0 +1,53 @@
+The Asterix ADM Parser
+======================
+
+The ADM parser inside Asterix is composed by two different components:
+
+* **The Parser** AdmTupleParser, which converts the adm tokens in internal objects
+* **The Lexer** AdmLexer, which scans the adm file and returns a list of adm tokens
+
+These two classes belong to the package:
+
+ edu.uci.ics.asterix.runtime.operators.file
+
+The Parser is loaded through a factory (*AdmSchemafullRecordParserFactory*) by
+
+ edu.uci.ics.asterix.external.dataset.adapter.FileSystemBasedAdapter extends AbstractDatasourceAdapter
+
+
+How to add a new datatype
+-------------------------
+The ADM format allows two different kinds of datatype:
+
+* primitive
+* with constructor
+
+A primitive datatype allows to write the actual value of the field without extra markup:
+
+ { name : "Diego", age : 23 }
+
+while the datatypes with constructor require to specify first the type of the value and then a string with the serialized value
+
+ { center : point3d("P2.1,3,8.5") }
+
+In order to add a new datatype the steps are:
+
+1. Add the new token to the **Lexer**
+ * **if the datatype is primite** is necessary to create a TOKEN able to recognize **the format of the value**
+ * **if the datatype is with constructor** is necessary to create **only** a TOKEN able to recognize **the name of the constructor**
+
+2. Change the **Parser** in order to convert correctly the new token in internal objects
+ * This will require to **add new cases to the switch-case statements** and the introduction of **a serializer/deserializer object** for that datatype.
+
+
+The Lexer
+----------
+To add new datatype or change the tokens definition you have to change ONLY the file adm.grammar located in
+ asterix-runtime/src/main/resources/adm.grammar
+The lexer will be generated from that definition file during each maven building.
+
+The maven configuration in located in asterix-runtime/pom.xml
+
+
+> Author: Diego Giorgini - diegogiorgini@gmail.com
+> 6 December 2012
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/README.md b/asterix-maven-plugins/lexer-generator-maven-plugin/README.md
new file mode 100644
index 0000000..b3632e6
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/README.md
@@ -0,0 +1,111 @@
+Lexer Generator
+===============
+
+This tool automate the creation of Hand-Coded-Like Lexers.
+It was created to address the performance issues of other (more advanced) lexer generators like JavaCC that arise when you need to scan TB of data. In particular it is *~20x faster* than javacc and typically can parse the data from a normal harddisk at *more than 70MBs*.
+
+
+Maven Plugin (to put inside pom.xml)
+-------------------------------------
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <version>2.0.2</version>
+ <configuration>
+ <source>1.6</source>
+ <target>1.6</target>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>edu.uci.ics.asterix</groupId>
+ <artifactId>lexer-generator-maven-plugin</artifactId>
+ <version>0.1-SNAPSHOT</version>
+ <configuration>
+ <grammarFile>src/main/java/edu/uci/ics/asterix/runtime/operators/file/adm/adm.grammar</grammarFile>
+ <outputDir>${project.build.directory}/generated-sources</outputDir>
+ </configuration>
+ <executions>
+ <execution>
+ <id>generate-lexer</id>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>generate-lexer</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+
+
+Command line
+-------------
+ LexerGenerator\nusage: java LexerGenerator <configuration file>
+
+
+
+What means Hand-Coded-Like and why it is so fast
+------------------------------------------------
+The most of the Lexers use a Finite State Machine encoded in data structure called [State Transition Table](http://en.wikipedia.org/wiki/State_transition_table).
+While elegant and practical this approach require some extra controls and operations to deal with the data structure at runtime. A different approach consists in encoding the State Machine as actual code, in this way all the operations done are limited to the minumum amount needed to parse our grammar.
+A common problem with this kind of hard-hand-coded lexers is that is almost impossible to do manutency and changes, this is the reason of this Lexer Generator able to produce a Hand-Coded-Like lexer starting from a grammar specification.
+
+Another big difference with the most of the LexerGenerator (expecially the ones for Java) is that since it is optimized for performance we **don't return objects** and we **use the minimum possible of objects internally**.
+This actually is the main reason of the ~20x when compared with javacc.
+
+
+Configuration File
+------------------
+Is a simple *key: value* configuration file plus the *specification of your grammar*.
+The four configuration keys are listed below:
+
+ # LEXER GENERATOR configuration file
+ # ---------------------------------------
+ # Place *first* the generic configuration
+ # then list your grammar.
+
+ PACKAGE: edu.uci.ics.asterix.admfast.parser
+ LEXER_NAME: AdmLexer
+ OUTPUT_DIR: output/
+
+
+Specify The Grammar
+-------------------
+Your grammar has to be listed in the configuration file after the *TOKENS:* keyword.
+
+ TOKENS:
+
+ BOOLEAN_LIT = string(boolean)
+ COMMA = char(\,)
+ COLON = char(:)
+ STRING_LITERAL = char("), anythingUntil(")
+ INT_LITERAL = signOrNothing(), digitSequence()
+ INT8_LITERAL = token(INT_LITERAL), string(i8)
+ @EXPONENT = caseInsensitiveChar(e), signOrNothing(), digitSequence()
+ DOUBLE_LITERAL = signOrNothing(), digitSequence(), char(.), digitSequence(), token(@EXPONENT)
+ DOUBLE_LITERAL = signOrNothing(), digitSequence(), token(@EXPONENT)
+
+Each token is composed by a **name** and a sequence of **rules**.
+Each rule is then written with the format: **constructor(parameter)**
+the list of the rules available is coded inside *NodeChainFactory.java*
+
+You can write more than a sequence of rules just addind more another line and repeating the token name.
+
+You can reuse the rules of a token inside another one with the special rule: **token(** *TOKEN_NAME* **)**
+
+Lastly you can define *auxiliary* token definitions that will not be encoded in the final lexer (but that can be useful inside other token definitions) just **startig the token name with @**.
+
+**Attention:** please pay attention to not write rules that once merged int the state machine would lead to a *conflict between transaction* like a transaction for a generic digit and one for a the digit 0 from the same node.
+
+The result: MyLexer
+-------------------
+The result of the execution of the LexerGenerator is the creation of the Lexer inside the directory *components**.
+The lexer is extremly easy and minimal and can be used likewise an Iterator:
+
+ MyLexer myLexer = new MyLexer(new FileReader(file)));
+ while((token = MyLexer.next()) != MyLexer.TOKEN_EOF){
+ System.out.println(MyLexer.tokenKindToString(token));
+ }
+
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/pom.xml b/asterix-maven-plugins/lexer-generator-maven-plugin/pom.xml
new file mode 100644
index 0000000..dc5e207
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/pom.xml
@@ -0,0 +1,37 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>edu.uci.ics.asterix</groupId>
+ <artifactId>lexer-generator-maven-plugin</artifactId>
+ <version>0.1</version>
+ <packaging>maven-plugin</packaging>
+ <name>lexer-generator-maven-plugin</name>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <version>2.0.2</version>
+ <configuration>
+ <source>1.7</source>
+ <target>1.7</target>
+ <fork>true</fork>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+ <dependencies>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>4.8.1</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>maven-plugin-api</artifactId>
+ <version>2.0.2</version>
+ </dependency>
+ </dependencies>
+</project>
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/LexerGenerator.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/LexerGenerator.java
new file mode 100644
index 0000000..512f3d0
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/LexerGenerator.java
@@ -0,0 +1,202 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator;
+
+import java.io.BufferedReader;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.Reader;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.Map.Entry;
+import java.util.Set;
+import org.apache.maven.plugin.logging.Log;
+
+public class LexerGenerator {
+ private LinkedHashMap<String, Token> tokens = new LinkedHashMap<String, Token>();
+ private Log logger;
+
+ public LexerGenerator() {
+ }
+
+ public LexerGenerator(Log logger) {
+ this.logger = logger;
+ }
+
+ private void log(String info) {
+ if (logger == null) {
+ System.out.println(info);
+ } else {
+ logger.info(info);
+ }
+ }
+
+ public void addToken(String rule) throws Exception {
+ Token newToken;
+ if (rule.charAt(0) == '@') {
+ newToken = new TokenAux(rule, tokens);
+ } else {
+ newToken = new Token(rule, tokens);
+ }
+ Token existingToken = tokens.get(newToken.getName());
+ if (existingToken == null) {
+ tokens.put(newToken.getName(), newToken);
+ } else {
+ existingToken.merge(newToken);
+ }
+ }
+
+ public void generateLexer(HashMap<String, String> config) throws Exception {
+ LexerNode main = this.compile();
+ config.put("TOKENS_CONSTANTS", this.tokensConstants());
+ config.put("TOKENS_IMAGES", this.tokensImages());
+ config.put("LEXER_LOGIC", main.toJava());
+ config.put("LEXER_AUXFUNCTIONS", replaceParams(this.auxiliaryFunctions(main), config));
+ String[] files = { "/Lexer.java", "/LexerException.java" };
+ String outputDir = config.get("OUTPUT_DIR");
+ (new File(outputDir)).mkdirs();
+ for (String file : files) {
+ String input = readFile(LexerGenerator.class.getResourceAsStream(file));
+ String fileOut = file.replace("Lexer", config.get("LEXER_NAME"));
+ String output = replaceParams(input, config);
+ log("Generating: " + file + "\t>>\t" + fileOut);
+ FileWriter out = new FileWriter((new File(outputDir, fileOut)).toString());
+ out.write(output);
+ out.close();
+ log(" [done]\n");
+ }
+ }
+
+ public String printParsedGrammar() {
+ StringBuilder result = new StringBuilder();
+ for (Token token : tokens.values()) {
+ result.append(token.toString()).append("\n");
+ }
+ return result.toString();
+ }
+
+ private LexerNode compile() throws Exception {
+ LexerNode main = new LexerNode();
+ for (Token token : tokens.values()) {
+ if (token instanceof TokenAux)
+ continue;
+ main.merge(token.getNode());
+ }
+ return main;
+ }
+
+ private String tokensImages() {
+ StringBuilder result = new StringBuilder();
+ Set<String> uniqueTokens = tokens.keySet();
+ for (String token : uniqueTokens) {
+ result.append(", \"<").append(token).append(">\" ");
+ }
+ return result.toString();
+ }
+
+ private String tokensConstants() {
+ StringBuilder result = new StringBuilder();
+ Set<String> uniqueTokens = tokens.keySet();
+ int i = 2;
+ for (String token : uniqueTokens) {
+ result.append(", TOKEN_").append(token).append("=").append(i).append(" ");
+ i++;
+ }
+ return result.toString();
+ }
+
+ private String auxiliaryFunctions(LexerNode main) {
+ StringBuilder result = new StringBuilder();
+ Set<String> functions = main.neededAuxFunctions();
+ for (String token : functions) {
+ result.append("private int parse_" + token
+ + "(char currentChar) throws IOException, [LEXER_NAME]Exception{\n");
+ result.append(tokens.get(token).getNode().toJavaAuxFunction());
+ result.append("\n}\n\n");
+ }
+ return result.toString();
+ }
+
+ private static String readFile(Reader input) throws FileNotFoundException, IOException {
+ StringBuffer fileData = new StringBuffer(1000);
+ BufferedReader reader = new BufferedReader(input);
+ char[] buf = new char[1024];
+ int numRead = 0;
+ while ((numRead = reader.read(buf)) != -1) {
+ String readData = String.valueOf(buf, 0, numRead);
+ fileData.append(readData);
+ buf = new char[1024];
+ }
+ reader.close();
+ return fileData.toString();
+ }
+
+ private static String readFile(InputStream input) throws FileNotFoundException, IOException {
+ if (input == null) {
+ throw new FileNotFoundException();
+ }
+ return readFile(new InputStreamReader(input));
+ }
+
+ private static String readFile(String fileName) throws FileNotFoundException, IOException {
+ return readFile(new FileReader(fileName));
+ }
+
+ private static String replaceParams(String input, HashMap<String, String> config) {
+ for (Entry<String, String> param : config.entrySet()) {
+ String key = "\\[" + param.getKey() + "\\]";
+ String value = param.getValue();
+ input = input.replaceAll(key, value);
+ }
+ return input;
+ }
+
+ public static void main(String args[]) throws Exception {
+ if (args.length == 0 || args[0] == "--help" || args[0] == "-h") {
+ System.out.println("LexerGenerator\nusage: java LexerGenerator <configuration file>");
+ return;
+ }
+
+ LexerGenerator lexer = new LexerGenerator();
+ HashMap<String, String> config = new HashMap<String, String>();
+
+ System.out.println("Config file:\t" + args[0]);
+ String input = readFile(args[0]);
+ boolean tokens = false;
+ for (String line : input.split("\r?\n")) {
+ line = line.trim();
+ if (line.length() == 0 || line.charAt(0) == '#')
+ continue;
+ if (tokens == false && !line.equals("TOKENS:")) {
+ config.put(line.split("\\s*:\\s*")[0], line.split("\\s*:\\s*")[1]);
+ } else if (line.equals("TOKENS:")) {
+ tokens = true;
+ } else {
+ lexer.addToken(line);
+ }
+ }
+
+ String parsedGrammar = lexer.printParsedGrammar();
+ lexer.generateLexer(config);
+ System.out.println("\nGenerated grammar:");
+ System.out.println(parsedGrammar);
+ }
+
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/LexerGeneratorMojo.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/LexerGeneratorMojo.java
new file mode 100644
index 0000000..11ee1d5
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/LexerGeneratorMojo.java
@@ -0,0 +1,92 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator;
+
+import edu.uci.ics.asterix.lexergenerator.LexerGenerator;
+import java.io.BufferedReader;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.HashMap;
+import org.apache.maven.plugin.AbstractMojo;
+import org.apache.maven.plugin.MojoExecutionException;
+
+import java.io.File;
+
+/**
+ * @goal generate-lexer
+ * @phase generate-sources
+ * @requiresDependencyResolution compile
+ */
+public class LexerGeneratorMojo extends AbstractMojo {
+ /**
+ * parameter injected from pom.xml
+ *
+ * @parameter
+ * @required
+ */
+ private File grammarFile;
+
+ /**
+ * parameter injected from pom.xml
+ *
+ * @parameter
+ * @required
+ */
+ private File outputDir;
+
+ public void execute() throws MojoExecutionException {
+ LexerGenerator lexer = new LexerGenerator(getLog());
+ HashMap<String, String> config = new HashMap<String, String>();
+ getLog().info("--- Lexer Generator Maven Plugin - started with grammarFile: " + grammarFile.toString());
+ try {
+ String input = readFile(grammarFile);
+ config.put("OUTPUT_DIR", outputDir.toString());
+ boolean tokens = false;
+ for (String line : input.split("\r?\n")) {
+ line = line.trim();
+ if (line.length() == 0 || line.charAt(0) == '#')
+ continue;
+ if (tokens == false && !line.equals("TOKENS:")) {
+ config.put(line.split("\\s*:\\s*")[0], line.split("\\s*:\\s*")[1]);
+ } else if (line.equals("TOKENS:")) {
+ tokens = true;
+ } else {
+ lexer.addToken(line);
+ }
+ }
+ lexer.generateLexer(config);
+ } catch (Throwable e) {
+ throw new MojoExecutionException("Error while generating lexer", e);
+ }
+ String parsedGrammar = lexer.printParsedGrammar();
+ getLog().info("--- Generated grammar:\n" + parsedGrammar);
+ }
+
+ private String readFile(File file) throws FileNotFoundException, IOException {
+ StringBuffer fileData = new StringBuffer(1000);
+ BufferedReader reader = new BufferedReader(new FileReader(file));
+ char[] buf = new char[1024];
+ int numRead = 0;
+ while ((numRead = reader.read(buf)) != -1) {
+ String readData = String.valueOf(buf, 0, numRead);
+ fileData.append(readData);
+ buf = new char[1024];
+ }
+ reader.close();
+ return fileData.toString();
+ }
+
+}
\ No newline at end of file
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/LexerNode.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/LexerNode.java
new file mode 100644
index 0000000..7b8d059
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/LexerNode.java
@@ -0,0 +1,243 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator;
+
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Set;
+
+import edu.uci.ics.asterix.lexergenerator.rules.*;
+
+public class LexerNode {
+ private static String TOKEN_PREFIX = "TOKEN_";
+ private LinkedHashMap<Rule, LexerNode> actions = new LinkedHashMap<Rule, LexerNode>();
+ private String finalTokenName;
+ private Set<String> ongoingParsing = new HashSet<String>();
+
+ public LexerNode clone() {
+ LexerNode node = new LexerNode();
+ node.finalTokenName = this.finalTokenName;
+ for (Map.Entry<Rule, LexerNode> entry : this.actions.entrySet()) {
+ node.actions.put(entry.getKey().clone(), entry.getValue().clone());
+ }
+ for (String ongoing : this.ongoingParsing) {
+ node.ongoingParsing.add(ongoing);
+ }
+ return node;
+ }
+
+ public void add(Rule newRule) {
+ if (actions.get(newRule) == null) {
+ actions.put(newRule, new LexerNode());
+ }
+ }
+
+ public void append(Rule newRule) {
+ if (actions.size() == 0) {
+ add(newRule);
+ } else {
+ for (Map.Entry<Rule, LexerNode> action : actions.entrySet()) {
+ action.getValue().append(newRule);
+ }
+ if (actions.containsKey(new RuleEpsilon())) {
+ actions.remove(new RuleEpsilon());
+ add(newRule);
+ }
+ }
+ }
+
+ public void merge(LexerNode newNode) throws Exception {
+ for (Map.Entry<Rule, LexerNode> action : newNode.actions.entrySet()) {
+ if (this.actions.get(action.getKey()) == null) {
+ this.actions.put(action.getKey(), action.getValue());
+ } else {
+ this.actions.get(action.getKey()).merge(action.getValue());
+ }
+ }
+ if (newNode.finalTokenName != null) {
+ if (this.finalTokenName == null) {
+ this.finalTokenName = newNode.finalTokenName;
+ } else {
+ throw new Exception("Rule conflict between: " + this.finalTokenName + " and " + newNode.finalTokenName);
+ }
+ }
+ for (String ongoing : newNode.ongoingParsing) {
+ this.ongoingParsing.add(ongoing);
+ }
+ }
+
+ public void append(LexerNode node) throws Exception {
+ for (Map.Entry<Rule, LexerNode> action : actions.entrySet()) {
+ if (action.getKey() instanceof RuleEpsilon)
+ continue;
+ action.getValue().append(node);
+ }
+ if (actions.containsKey(new RuleEpsilon())) {
+ actions.remove(new RuleEpsilon());
+ merge(node.clone());
+ }
+ if (actions.size() == 0 || finalTokenName != null) {
+ finalTokenName = null;
+ merge(node.clone());
+ }
+ }
+
+ public void appendTokenName(String name) {
+ if (actions.size() == 0) {
+ this.finalTokenName = name;
+ } else {
+ ongoingParsing.add(TOKEN_PREFIX + name);
+ for (Map.Entry<Rule, LexerNode> action : actions.entrySet()) {
+ action.getValue().appendTokenName(name);
+ }
+ }
+ }
+
+ public LexerNode removeTokensName() {
+ this.finalTokenName = null;
+ this.ongoingParsing.clear();
+ for (Map.Entry<Rule, LexerNode> action : actions.entrySet()) {
+ action.getValue().removeTokensName();
+ }
+ return this;
+ }
+
+ public String toString() {
+ StringBuilder result = new StringBuilder();
+ if (finalTokenName != null)
+ result.append("! ");
+ if (actions.size() == 1)
+ result.append(actions.keySet().toArray()[0].toString() + actions.values().toArray()[0].toString());
+ if (actions.size() > 1) {
+ result.append(" ( ");
+ for (Map.Entry<Rule, LexerNode> action : actions.entrySet()) {
+ if (result.length() != 3) {
+ result.append(" || ");
+ }
+ result.append(action.getKey().toString());
+ result.append(action.getValue().toString());
+ }
+ result.append(" ) ");
+ }
+ return result.toString();
+ }
+
+ public String toJava() {
+ StringBuffer result = new StringBuffer();
+ if (numberOfRuleChar() > 2) {
+ result.append(toJavaSingleCharRules());
+ result.append(toJavaComplexRules(false));
+ } else {
+ result.append(toJavaComplexRules(true));
+ }
+ if (this.finalTokenName != null) {
+ result.append("return " + TOKEN_PREFIX + finalTokenName + ";\n");
+ } else if (ongoingParsing != null) {
+ String ongoingParsingArgs = collectionJoin(ongoingParsing, ',');
+ result.append("return parseError(" + ongoingParsingArgs + ");\n");
+ }
+ return result.toString();
+ }
+
+ private int numberOfRuleChar() {
+ int singleCharRules = 0;
+ for (Map.Entry<Rule, LexerNode> action : actions.entrySet()) {
+ if (action.getKey() instanceof RuleChar)
+ singleCharRules++;
+ }
+ return singleCharRules;
+ }
+
+ private String toJavaSingleCharRules() {
+ StringBuffer result = new StringBuffer();
+ result.append("switch(currentChar){\n");
+ for (Map.Entry<Rule, LexerNode> action : actions.entrySet()) {
+ if (action.getKey() instanceof RuleChar) {
+ RuleChar rule = (RuleChar) action.getKey();
+ result.append("case '" + rule.expectedChar() + "':\n");
+ result.append(rule.javaAction()).append("\n");
+ result.append(action.getValue().toJava());
+ }
+ }
+ result.append("}\n");
+ return result.toString();
+ }
+
+ private String toJavaComplexRules(boolean all) {
+ StringBuffer result = new StringBuffer();
+ for (Map.Entry<Rule, LexerNode> action : actions.entrySet()) {
+ if (!all && action.getKey() instanceof RuleChar)
+ continue;
+ if (action.getKey() instanceof RuleEpsilon)
+ continue;
+ String act = action.getKey().javaAction();
+ if (act.length() > 0) {
+ act = "\n" + act;
+ }
+ result.append(action.getKey().javaMatch(act + "\n" + action.getValue().toJava()));
+ }
+ return result.toString();
+ }
+
+ public void expandFirstAction(LinkedHashMap<String, Token> tokens) throws Exception {
+ for (Map.Entry<Rule, LexerNode> action : actions.entrySet()) {
+ Rule first = action.getKey();
+ if (first instanceof RulePartial) {
+ if (tokens.get(((RulePartial) first).getPartial()) == null) {
+ throw new Exception("Cannot find a token used as part of another definition, missing token: "
+ + ((RulePartial) first).getPartial());
+ }
+ actions.remove(first);
+ LexerNode node = tokens.get(((RulePartial) first).getPartial()).getNode().clone();
+ merge(node);
+ }
+ }
+ }
+
+ public Set<String> neededAuxFunctions() {
+ HashSet<String> partials = new HashSet<String>();
+ for (Map.Entry<Rule, LexerNode> action : actions.entrySet()) {
+ Rule rule = action.getKey();
+ if (rule instanceof RulePartial) {
+ partials.add(((RulePartial) rule).getPartial());
+ }
+ partials.addAll(action.getValue().neededAuxFunctions());
+ }
+ return partials;
+ }
+
+ public String toJavaAuxFunction() {
+ String oldFinalTokenName = finalTokenName;
+ if (oldFinalTokenName == null)
+ finalTokenName = "AUX_NOT_FOUND";
+ String result = toJava();
+ finalTokenName = oldFinalTokenName;
+ return result;
+ }
+
+ private String collectionJoin(Collection<String> collection, char c) {
+ StringBuilder ongoingParsingArgs = new StringBuilder();
+ for (String token : collection) {
+ ongoingParsingArgs.append(token);
+ ongoingParsingArgs.append(c);
+ }
+ if (ongoingParsing.size() > 0) {
+ ongoingParsingArgs.deleteCharAt(ongoingParsingArgs.length() - 1);
+ }
+ return ongoingParsingArgs.toString();
+ }
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/NodeChainFactory.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/NodeChainFactory.java
new file mode 100644
index 0000000..941f822
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/NodeChainFactory.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator;
+
+import java.util.HashMap;
+
+import edu.uci.ics.asterix.lexergenerator.rulegenerators.*;
+
+public class NodeChainFactory {
+ static private HashMap<String, RuleGenerator> ruleGenerators = new HashMap<String, RuleGenerator>();
+
+ static {
+ ruleGenerators.put("char", new RuleGeneratorChar());
+ ruleGenerators.put("string", new RuleGeneratorString());
+ ruleGenerators.put("anythingUntil", new RuleGeneratorAnythingUntil());
+ ruleGenerators.put("signOrNothing", new RuleGeneratorSignOrNothing());
+ ruleGenerators.put("sign", new RuleGeneratorSign());
+ ruleGenerators.put("digitSequence", new RuleGeneratorDigitSequence());
+ ruleGenerators.put("caseInsensitiveChar", new RuleGeneratorCaseInsensitiveChar());
+ ruleGenerators.put("charOrNothing", new RuleGeneratorCharOrNothing());
+ ruleGenerators.put("token", new RuleGeneratorToken());
+ ruleGenerators.put("nothing", new RuleGeneratorNothing());
+ }
+
+ public static LexerNode create(String generator, String constructor) throws Exception {
+ constructor = constructor.replace("@", "aux_");
+ if (ruleGenerators.get(generator) == null)
+ throw new Exception("Rule Generator not found for '" + generator + "'");
+ return ruleGenerators.get(generator).generate(constructor);
+ }
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/Token.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/Token.java
new file mode 100644
index 0000000..bb122c2
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/Token.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator;
+
+import java.util.LinkedHashMap;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class Token {
+ private String userDescription;
+ private String name;
+ private LexerNode node;
+
+ public Token(String str, LinkedHashMap<String, Token> tokens) throws Exception {
+ userDescription = str;
+ node = new LexerNode();
+ parse(userDescription, tokens);
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public LexerNode getNode() {
+ return node;
+ }
+
+ public String toString() {
+ return this.name + " => " + getNode().toString();
+ }
+
+ public void merge(Token newToken) throws Exception {
+ node.merge(newToken.getNode());
+ }
+
+ private void parse(String str, LinkedHashMap<String, Token> tokens) throws Exception {
+ Pattern p = Pattern.compile("^(@?\\w+)\\s*=\\s*(.+)");
+ Matcher m = p.matcher(str);
+ if (!m.find())
+ throw new Exception("Token definition not correct: " + str);
+ this.name = m.group(1).replaceAll("@", "aux_");
+ String[] textRules = m.group(2).split("(?<!\\\\),\\s*");
+ for (String textRule : textRules) {
+ Pattern pRule = Pattern.compile("^(\\w+)(\\((.*)\\))?");
+ Matcher mRule = pRule.matcher(textRule);
+ mRule.find();
+ String generator = mRule.group(1);
+ String constructor = mRule.group(3);
+ if (constructor == null)
+ throw new Exception("Error in rule format: " + "\n " + str + " = " + generator + " : " + constructor);
+ constructor = constructor.replace("\\", "");
+ node.append(NodeChainFactory.create(generator, constructor));
+ node.expandFirstAction(tokens);
+ }
+ node.appendTokenName(name);
+ }
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/managed/adapter/IMutableFeedAdapter.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/TokenAux.java
similarity index 65%
copy from asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/managed/adapter/IMutableFeedAdapter.java
copy to asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/TokenAux.java
index 290c7d6..a9c7ffc 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/managed/adapter/IMutableFeedAdapter.java
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/TokenAux.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -12,12 +12,14 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package edu.uci.ics.asterix.feed.managed.adapter;
+package edu.uci.ics.asterix.lexergenerator;
-import java.util.Map;
+import java.util.LinkedHashMap;
-public interface IMutableFeedAdapter extends IManagedFeedAdapter {
+public class TokenAux extends Token {
- public void alter(Map<String,String> properties) throws Exception;
-
+ public TokenAux(String str, LinkedHashMap<String, Token> tokens) throws Exception {
+ super(str, tokens);
+ }
+
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/managed/adapter/IMutableFeedAdapter.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGenerator.java
similarity index 66%
rename from asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/managed/adapter/IMutableFeedAdapter.java
rename to asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGenerator.java
index 290c7d6..3733746 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/managed/adapter/IMutableFeedAdapter.java
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGenerator.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -12,12 +12,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package edu.uci.ics.asterix.feed.managed.adapter;
+package edu.uci.ics.asterix.lexergenerator.rulegenerators;
-import java.util.Map;
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
-public interface IMutableFeedAdapter extends IManagedFeedAdapter {
-
- public void alter(Map<String,String> properties) throws Exception;
-
+public interface RuleGenerator {
+ public LexerNode generate(String input) throws Exception;
}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorAnythingUntil.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorAnythingUntil.java
new file mode 100644
index 0000000..b14eb3e
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorAnythingUntil.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator.rulegenerators;
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+import edu.uci.ics.asterix.lexergenerator.rules.RuleAnythingUntil;
+
+public class RuleGeneratorAnythingUntil implements RuleGenerator {
+
+ @Override
+ public LexerNode generate(String input) throws Exception {
+ LexerNode result = new LexerNode();
+ if (input == null || input.length() != 1)
+ throw new Exception("Wrong rule format for generator anythingExcept: " + input);
+ result.append(new RuleAnythingUntil(input.charAt(0)));
+ return result;
+ }
+
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorCaseInsensitiveChar.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorCaseInsensitiveChar.java
new file mode 100644
index 0000000..b789f59
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorCaseInsensitiveChar.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator.rulegenerators;
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+import edu.uci.ics.asterix.lexergenerator.rules.RuleChar;
+
+public class RuleGeneratorCaseInsensitiveChar implements RuleGenerator {
+
+ @Override
+ public LexerNode generate(String input) throws Exception {
+ LexerNode result = new LexerNode();
+ if (input == null || input.length() != 1)
+ throw new Exception("Wrong rule format for generator char: " + input);
+ char cl = Character.toLowerCase(input.charAt(0));
+ char cu = Character.toUpperCase(cl);
+ result.add(new RuleChar(cl));
+ result.add(new RuleChar(cu));
+ return result;
+ }
+
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorChar.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorChar.java
new file mode 100644
index 0000000..0b830e6
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorChar.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator.rulegenerators;
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+import edu.uci.ics.asterix.lexergenerator.rules.RuleChar;
+
+public class RuleGeneratorChar implements RuleGenerator {
+
+ @Override
+ public LexerNode generate(String input) throws Exception {
+ LexerNode result = new LexerNode();
+ if (input == null || input.length() != 1)
+ throw new Exception("Wrong rule format for generator char: " + input);
+ result.append(new RuleChar(input.charAt(0)));
+ return result;
+ }
+
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorCharOrNothing.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorCharOrNothing.java
new file mode 100644
index 0000000..d01ff7d
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorCharOrNothing.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator.rulegenerators;
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+import edu.uci.ics.asterix.lexergenerator.rules.RuleChar;
+import edu.uci.ics.asterix.lexergenerator.rules.RuleEpsilon;
+
+public class RuleGeneratorCharOrNothing implements RuleGenerator {
+
+ @Override
+ public LexerNode generate(String input) throws Exception {
+ LexerNode result = new LexerNode();
+ if (input == null || input.length() != 1)
+ throw new Exception("Wrong rule format for generator charOrNothing: " + input);
+ result.add(new RuleChar(input.charAt(0)));
+ result.add(new RuleEpsilon());
+ return result;
+ }
+
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorDigitSequence.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorDigitSequence.java
new file mode 100644
index 0000000..d067ee7
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorDigitSequence.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator.rulegenerators;
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+import edu.uci.ics.asterix.lexergenerator.rules.RuleDigitSequence;
+
+public class RuleGeneratorDigitSequence implements RuleGenerator {
+
+ @Override
+ public LexerNode generate(String input) throws Exception {
+ LexerNode result = new LexerNode();
+ result.append(new RuleDigitSequence());
+ return result;
+ }
+
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorNothing.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorNothing.java
new file mode 100644
index 0000000..fec06a1
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorNothing.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator.rulegenerators;
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+import edu.uci.ics.asterix.lexergenerator.rules.RuleEpsilon;
+
+public class RuleGeneratorNothing implements RuleGenerator {
+
+ @Override
+ public LexerNode generate(String input) throws Exception {
+ LexerNode node = new LexerNode();
+ node.add(new RuleEpsilon());
+ return node;
+ }
+
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorSign.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorSign.java
new file mode 100644
index 0000000..0160f09
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorSign.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator.rulegenerators;
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+import edu.uci.ics.asterix.lexergenerator.rules.RuleChar;
+
+public class RuleGeneratorSign implements RuleGenerator {
+
+ @Override
+ public LexerNode generate(String input) throws Exception {
+ LexerNode result = new LexerNode();
+ result.add(new RuleChar('+'));
+ result.add(new RuleChar('-'));
+ return result;
+ }
+
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorSignOrNothing.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorSignOrNothing.java
new file mode 100644
index 0000000..7c4297d
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorSignOrNothing.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator.rulegenerators;
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+import edu.uci.ics.asterix.lexergenerator.rules.RuleChar;
+import edu.uci.ics.asterix.lexergenerator.rules.RuleEpsilon;
+
+public class RuleGeneratorSignOrNothing implements RuleGenerator {
+
+ @Override
+ public LexerNode generate(String input) throws Exception {
+ LexerNode result = new LexerNode();
+ result.add(new RuleChar('+'));
+ result.add(new RuleChar('-'));
+ result.add(new RuleEpsilon());
+ return result;
+ }
+
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorString.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorString.java
new file mode 100644
index 0000000..eb0471b
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorString.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator.rulegenerators;
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+import edu.uci.ics.asterix.lexergenerator.rules.RuleChar;
+
+public class RuleGeneratorString implements RuleGenerator {
+
+ @Override
+ public LexerNode generate(String input) {
+ LexerNode result = new LexerNode();
+ if (input == null)
+ return result;
+ for (int i = 0; i < input.length(); i++) {
+ result.append(new RuleChar(input.charAt(i)));
+ }
+ return result;
+ }
+
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorToken.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorToken.java
new file mode 100644
index 0000000..b4c23d8
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rulegenerators/RuleGeneratorToken.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator.rulegenerators;
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+import edu.uci.ics.asterix.lexergenerator.rules.RulePartial;
+
+public class RuleGeneratorToken implements RuleGenerator {
+
+ @Override
+ public LexerNode generate(String input) throws Exception {
+ if (input == null || input.length() == 0)
+ throw new Exception("Wrong rule format for generator token : " + input);
+ LexerNode node = new LexerNode();
+ node.add(new RulePartial(input));
+ return node;
+ }
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/managed/adapter/IMutableFeedAdapter.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/Rule.java
similarity index 62%
copy from asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/managed/adapter/IMutableFeedAdapter.java
copy to asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/Rule.java
index 290c7d6..01cd1d5 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/managed/adapter/IMutableFeedAdapter.java
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/Rule.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -12,12 +12,18 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package edu.uci.ics.asterix.feed.managed.adapter;
+package edu.uci.ics.asterix.lexergenerator.rules;
-import java.util.Map;
+public interface Rule {
+ public int hashCode();
-public interface IMutableFeedAdapter extends IManagedFeedAdapter {
+ public boolean equals(Object o);
- public void alter(Map<String,String> properties) throws Exception;
-
+ public String toString();
+
+ public String javaAction();
+
+ public String javaMatch(String action);
+
+ public Rule clone();
}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/RuleAnythingUntil.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/RuleAnythingUntil.java
new file mode 100644
index 0000000..8d45835
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/RuleAnythingUntil.java
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator.rules;
+
+public class RuleAnythingUntil implements Rule {
+
+ private char expected;
+
+ public RuleAnythingUntil clone() {
+ return new RuleAnythingUntil(expected);
+ }
+
+ public RuleAnythingUntil(char expected) {
+ this.expected = expected;
+ }
+
+ @Override
+ public String toString() {
+ return " .* " + String.valueOf(expected);
+ }
+
+ @Override
+ public int hashCode() {
+ return 10 * (int) expected;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == null)
+ return false;
+ if (o instanceof RuleAnythingUntil) {
+ if (((RuleAnythingUntil) o).expected == this.expected) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ @Override
+ public String javaAction() {
+ return "currentChar = readNextChar();";
+ }
+
+ @Override
+ public String javaMatch(String action) {
+ StringBuilder result = new StringBuilder();
+ result.append("boolean escaped = false;");
+ result.append("while (currentChar!='").append(expected).append("' || escaped)");
+ result.append("{\nif(!escaped && currentChar=='\\\\\\\\'){escaped=true;}\nelse {escaped=false;}\ncurrentChar = readNextChar();\n}");
+ result.append("\nif (currentChar=='").append(expected).append("'){");
+ result.append(action);
+ result.append("}\n");
+ return result.toString();
+ }
+
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/RuleChar.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/RuleChar.java
new file mode 100644
index 0000000..0e53374
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/RuleChar.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator.rules;
+
+public class RuleChar implements Rule {
+
+ private char expected;
+
+ public RuleChar clone() {
+ return new RuleChar(expected);
+ }
+
+ public RuleChar(char expected) {
+ this.expected = expected;
+ }
+
+ @Override
+ public String toString() {
+ return String.valueOf(expected);
+ }
+
+ public char expectedChar() {
+ return expected;
+ }
+
+ @Override
+ public int hashCode() {
+ return (int) expected;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == null)
+ return false;
+ if (o instanceof RuleChar) {
+ if (((RuleChar) o).expected == this.expected) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ @Override
+ public String javaAction() {
+ return "currentChar = readNextChar();";
+ }
+
+ @Override
+ public String javaMatch(String action) {
+ StringBuilder result = new StringBuilder();
+ result.append("if (currentChar=='");
+ result.append(expected);
+ result.append("'){");
+ result.append(action);
+ result.append("}");
+ return result.toString();
+ }
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/RuleDigitSequence.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/RuleDigitSequence.java
new file mode 100644
index 0000000..13381e0
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/RuleDigitSequence.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator.rules;
+
+public class RuleDigitSequence implements Rule {
+
+ public RuleDigitSequence clone() {
+ return new RuleDigitSequence();
+ }
+
+ @Override
+ public String toString() {
+ return " [0-9]+ ";
+ }
+
+ @Override
+ public int hashCode() {
+ return 1;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == null)
+ return false;
+ if (o instanceof RuleDigitSequence) {
+ return true;
+ }
+ return false;
+ }
+
+ @Override
+ public String javaAction() {
+ return "";
+ }
+
+ @Override
+ public String javaMatch(String action) {
+ StringBuilder result = new StringBuilder();
+ result.append("if(currentChar >= '0' && currentChar<='9'){" + "\ncurrentChar = readNextChar();"
+ + "\nwhile(currentChar >= '0' && currentChar<='9'){" + "\ncurrentChar = readNextChar();" + "\n}\n");
+ result.append(action);
+ result.append("\n}");
+ return result.toString();
+ }
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/RuleEpsilon.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/RuleEpsilon.java
new file mode 100644
index 0000000..41b7535
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/RuleEpsilon.java
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator.rules;
+
+public class RuleEpsilon implements Rule {
+
+ public RuleEpsilon clone() {
+ return new RuleEpsilon();
+ }
+
+ @Override
+ public String toString() {
+ return "?";
+ }
+
+ @Override
+ public int hashCode() {
+ return 0;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == null)
+ return false;
+ if (o instanceof RuleEpsilon) {
+ return true;
+ }
+ return false;
+ }
+
+ @Override
+ public String javaAction() {
+ return "";
+ }
+
+ @Override
+ public String javaMatch(String action) {
+ StringBuilder result = new StringBuilder();
+ result.append("{").append(action).append("}");
+ return result.toString();
+ }
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/RulePartial.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/RulePartial.java
new file mode 100644
index 0000000..89caf4f
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/edu/uci/ics/asterix/lexergenerator/rules/RulePartial.java
@@ -0,0 +1,69 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.lexergenerator.rules;
+
+public class RulePartial implements Rule {
+
+ private String partialName;
+
+ public RulePartial clone() {
+ return new RulePartial(partialName);
+ }
+
+ public RulePartial(String expected) {
+ this.partialName = expected;
+ }
+
+ public String getPartial() {
+ return this.partialName;
+ }
+
+ @Override
+ public String toString() {
+ return partialName;
+ }
+
+ @Override
+ public int hashCode() {
+ return (int) partialName.charAt(1);
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == null)
+ return false;
+ if (o instanceof RulePartial) {
+ if (((RulePartial) o).partialName.equals(this.partialName)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ @Override
+ public String javaAction() {
+ return "";
+ }
+
+ @Override
+ public String javaMatch(String action) {
+ StringBuilder result = new StringBuilder();
+ result.append("if (parse_" + partialName + "(currentChar)==TOKEN_" + partialName + "){");
+ result.append(action);
+ result.append("}");
+ return result.toString();
+ }
+
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/resources/Lexer.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/resources/Lexer.java
new file mode 100644
index 0000000..8cee79d
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/resources/Lexer.java
@@ -0,0 +1,219 @@
+package [PACKAGE];
+
+import java.io.IOException;
+import [PACKAGE].[LEXER_NAME]Exception;
+
+public class [LEXER_NAME] {
+
+ public static final int
+ TOKEN_EOF = 0, TOKEN_AUX_NOT_FOUND = 1 [TOKENS_CONSTANTS];
+
+ // Human representation of tokens. Useful for debug.
+ // Is possible to convert a TOKEN_CONSTANT in its image through
+ // [LEXER_NAME].tokenKindToString(TOKEN_CONSTANT);
+ private static final String[] tokenImage = {
+ "<EOF>", "<AUX_NOT_FOUND>" [TOKENS_IMAGES]
+ };
+
+ private static final char EOF_CHAR = 4;
+ protected java.io.Reader inputStream;
+ protected int column;
+ protected int line;
+ protected boolean prevCharIsCR;
+ protected boolean prevCharIsLF;
+ protected char[] buffer;
+ protected int bufsize;
+ protected int bufpos;
+ protected int tokenBegin;
+ protected int endOf_USED_Buffer;
+ protected int endOf_UNUSED_Buffer;
+ protected int maxUnusedBufferSize;
+
+// ================================================================================
+// Auxiliary functions. Can parse the tokens used in the grammar as partial/auxiliary
+// ================================================================================
+
+ [LEXER_AUXFUNCTIONS]
+
+// ================================================================================
+// Main method. Return a TOKEN_CONSTANT
+// ================================================================================
+
+ public int next() throws [LEXER_NAME]Exception, IOException{
+ char currentChar = buffer[bufpos];
+ while (currentChar == ' ' || currentChar=='\t' || currentChar == '\n' || currentChar=='\r')
+ currentChar = readNextChar();
+ tokenBegin = bufpos;
+ if (currentChar==EOF_CHAR) return TOKEN_EOF;
+
+ [LEXER_LOGIC]
+ }
+
+// ================================================================================
+// Public interface
+// ================================================================================
+
+ public [LEXER_NAME](java.io.Reader stream) throws IOException{
+ reInit(stream);
+ }
+
+ public void reInit(java.io.Reader stream) throws IOException{
+ done();
+ inputStream = stream;
+ bufsize = 4096;
+ line = 1;
+ column = 0;
+ bufpos = -1;
+ endOf_UNUSED_Buffer = bufsize;
+ endOf_USED_Buffer = 0;
+ prevCharIsCR = false;
+ prevCharIsLF = false;
+ buffer = new char[bufsize];
+ tokenBegin = -1;
+ maxUnusedBufferSize = 4096/2;
+ readNextChar();
+ }
+
+ public String getLastTokenImage() {
+ if (bufpos >= tokenBegin)
+ return new String(buffer, tokenBegin, bufpos - tokenBegin);
+ else
+ return new String(buffer, tokenBegin, bufsize - tokenBegin) +
+ new String(buffer, 0, bufpos);
+ }
+
+ public static String tokenKindToString(int token) {
+ return tokenImage[token];
+ }
+
+ public void done(){
+ buffer = null;
+ }
+
+// ================================================================================
+// Parse error management
+// ================================================================================
+
+ protected int parseError(String reason) throws [LEXER_NAME]Exception {
+ StringBuilder message = new StringBuilder();
+ message.append(reason).append("\n");
+ message.append("Line: ").append(line).append("\n");
+ message.append("Row: ").append(column).append("\n");
+ throw new [LEXER_NAME]Exception(message.toString());
+ }
+
+ protected int parseError(int ... tokens) throws [LEXER_NAME]Exception {
+ StringBuilder message = new StringBuilder();
+ message.append("Error while parsing. ");
+ message.append(" Line: ").append(line);
+ message.append(" Row: ").append(column);
+ message.append(" Expecting:");
+ for (int tokenId : tokens){
+ message.append(" ").append([LEXER_NAME].tokenKindToString(tokenId));
+ }
+ throw new [LEXER_NAME]Exception(message.toString());
+ }
+
+ protected void updateLineColumn(char c){
+ column++;
+
+ if (prevCharIsLF)
+ {
+ prevCharIsLF = false;
+ line += (column = 1);
+ }
+ else if (prevCharIsCR)
+ {
+ prevCharIsCR = false;
+ if (c == '\n')
+ {
+ prevCharIsLF = true;
+ }
+ else
+ {
+ line += (column = 1);
+ }
+ }
+
+ if (c=='\r') {
+ prevCharIsCR = true;
+ } else if(c == '\n') {
+ prevCharIsLF = true;
+ }
+ }
+
+// ================================================================================
+// Read data, buffer management. It uses a circular (and expandable) buffer
+// ================================================================================
+
+ protected char readNextChar() throws IOException {
+ if (++bufpos >= endOf_USED_Buffer)
+ fillBuff();
+ char c = buffer[bufpos];
+ updateLineColumn(c);
+ return c;
+ }
+
+ protected boolean fillBuff() throws IOException {
+ if (endOf_UNUSED_Buffer == endOf_USED_Buffer) // If no more unused buffer space
+ {
+ if (endOf_UNUSED_Buffer == bufsize) // -- If the previous unused space was
+ { // -- at the end of the buffer
+ if (tokenBegin > maxUnusedBufferSize) // -- -- If the first N bytes before
+ { // the current token are enough
+ bufpos = endOf_USED_Buffer = 0; // -- -- -- setup buffer to use that fragment
+ endOf_UNUSED_Buffer = tokenBegin;
+ }
+ else if (tokenBegin < 0) // -- -- If no token yet
+ bufpos = endOf_USED_Buffer = 0; // -- -- -- reuse the whole buffer
+ else
+ ExpandBuff(false); // -- -- Otherwise expand buffer after its end
+ }
+ else if (endOf_UNUSED_Buffer > tokenBegin) // If the endOf_UNUSED_Buffer is after the token
+ endOf_UNUSED_Buffer = bufsize; // -- set endOf_UNUSED_Buffer to the end of the buffer
+ else if ((tokenBegin - endOf_UNUSED_Buffer) < maxUnusedBufferSize)
+ { // If between endOf_UNUSED_Buffer and the token
+ ExpandBuff(true); // there is NOT enough space expand the buffer
+ } // reorganizing it
+ else
+ endOf_UNUSED_Buffer = tokenBegin; // Otherwise there is enough space at the start
+ } // so we set the buffer to use that fragment
+ int i;
+ if ((i = inputStream.read(buffer, endOf_USED_Buffer, endOf_UNUSED_Buffer - endOf_USED_Buffer)) == -1)
+ {
+ inputStream.close();
+ buffer[endOf_USED_Buffer]=(char)EOF_CHAR;
+ endOf_USED_Buffer++;
+ return false;
+ }
+ else
+ endOf_USED_Buffer += i;
+ return true;
+ }
+
+
+ protected void ExpandBuff(boolean wrapAround)
+ {
+ char[] newbuffer = new char[bufsize + maxUnusedBufferSize];
+
+ try {
+ if (wrapAround) {
+ System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin);
+ System.arraycopy(buffer, 0, newbuffer, bufsize - tokenBegin, bufpos);
+ buffer = newbuffer;
+ endOf_USED_Buffer = (bufpos += (bufsize - tokenBegin));
+ }
+ else {
+ System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin);
+ buffer = newbuffer;
+ endOf_USED_Buffer = (bufpos -= tokenBegin);
+ }
+ } catch (Throwable t) {
+ throw new Error(t.getMessage());
+ }
+
+ bufsize += maxUnusedBufferSize;
+ endOf_UNUSED_Buffer = bufsize;
+ tokenBegin = 0;
+ }
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/resources/LexerException.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/resources/LexerException.java
new file mode 100644
index 0000000..76aa8a4
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/resources/LexerException.java
@@ -0,0 +1,13 @@
+package [PACKAGE];
+
+public class [LEXER_NAME]Exception extends Exception {
+
+ public [LEXER_NAME]Exception(String message) {
+ super(message);
+ }
+
+ private static final long serialVersionUID = 1L;
+
+}
+
+
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/resources/default.config b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/resources/default.config
new file mode 100644
index 0000000..7efbeb8
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/resources/default.config
@@ -0,0 +1,16 @@
+# LEXER GENERATOR configuration file
+# ---------------------------------------
+# Place *first* the generic configuration
+# then list your grammar.
+
+PACKAGE: com.my.lexer
+LEXER_NAME: MyLexer
+OUTPUT_DIR: output
+
+TOKENS:
+
+BOOLEAN_LIT = string(boolean)
+FALSE_LIT = string(false)
+BOMB_LIT = string(bomb)
+BONSAI_LIT = string(bonsai)
+HELLO_LIT = string(hello)
\ No newline at end of file
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/Fixtures.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/Fixtures.java
new file mode 100644
index 0000000..2ed2eaa
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/Fixtures.java
@@ -0,0 +1,100 @@
+package edu.uci.ics.asterix.lexergenerator;
+
+import edu.uci.ics.asterix.lexergenerator.rules.Rule;
+import edu.uci.ics.asterix.lexergenerator.rules.RuleChar;
+
+public class Fixtures {
+ static String token_name = "MYTOKEN";
+ static String token2_name = "MYTOKEN2";
+ static String token_return = "return TOKEN_MYTOKEN;\n";
+ static String token2_return = "return TOKEN_MYTOKEN2;\n";
+ static String token_parseerror = "return parseError(TOKEN_MYTOKEN);\n";
+ static String token_tostring = "! ";
+ static String rule_action = "myaction";
+ static String rule_name = "myrule";
+ static String rule_match = "matchCheck("+rule_name+")";
+ static String rule2_action = "myaction2";
+ static String rule2_name = "myrule2";
+ static String rule2_match = "matchCheck2("+rule_name+")";
+
+ static public Rule createRule(final String name){
+ return new Rule(){
+ String rule_name = name;
+ String rule_action = "myaction";
+ String rule_match = "matchCheck("+rule_name+")";
+
+ @Override
+ public Rule clone(){
+ return Fixtures.createRule(name+"_clone");
+ }
+
+ @Override
+ public String javaAction() {
+ return rule_action;
+ }
+
+ @Override
+ public String javaMatch(String action) {
+ return rule_match+"{"+action+"}";
+ }
+
+ @Override
+ public String toString(){
+ return rule_name;
+ }
+
+ };
+ }
+
+ static Rule rule = new Rule(){
+
+ public Rule clone(){
+ return null;
+ }
+
+ @Override
+ public String javaAction() {
+ return rule_action;
+ }
+
+ @Override
+ public String javaMatch(String action) {
+ return rule_match+"{"+action+"}";
+ }
+
+ @Override
+ public String toString(){
+ return rule_name;
+ }
+
+ };
+
+ static Rule rule2 = new Rule(){
+
+ public Rule clone(){
+ return null;
+ }
+
+ @Override
+ public String javaAction() {
+ return rule2_action;
+ }
+
+ @Override
+ public String javaMatch(String act) {
+ return rule2_match+"{"+act+"}";
+ }
+
+ @Override
+ public String toString(){
+ return rule2_name;
+ }
+
+ };
+
+ static RuleChar ruleA = new RuleChar('a');
+ static RuleChar ruleB = new RuleChar('b');
+ static RuleChar ruleC = new RuleChar('c');
+ static String ruleABC_action = "currentChar = readNextChar();";
+
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeAddRuleTest.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeAddRuleTest.java
new file mode 100644
index 0000000..7541124
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeAddRuleTest.java
@@ -0,0 +1,51 @@
+package edu.uci.ics.asterix.lexergenerator;
+
+import static edu.uci.ics.asterix.lexergenerator.Fixtures.*;
+import static org.junit.Assert.*;
+
+import org.junit.Test;
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+
+public class LexerNodeAddRuleTest {
+
+ @Test
+ public void NodeRuleRuleNodeNode() {
+ LexerNode node = new LexerNode();
+ node.append(rule);
+ node.add(rule2);
+ node.appendTokenName(token_name);
+ assertEquals(" ( " + rule_name +token_tostring + " || " + rule2_name + token_tostring + " ) ", node.toString());
+ assertEquals(rule_match+"{"
+ +"\n" + rule_action
+ +"\n" +token_return
+ +"}"
+ +rule2_match+"{"
+ +"\n"+rule2_action
+ +"\n"+token_return
+ +"}"
+ +token_parseerror , node.toJava());
+ }
+
+ @Test
+ public void NodeSwitchCase() {
+ LexerNode node = new LexerNode();
+ node.append(ruleA);
+ node.add(ruleB);
+ node.add(ruleC);
+ node.appendTokenName(token_name);
+ assertEquals(" ( a" + token_tostring + " || b" + token_tostring + " || c" + token_tostring + " ) ", node.toString());
+ assertEquals("switch(currentChar){\n" +
+ "case 'a':" +
+ "\n" + ruleABC_action +
+ "\n" + token_return +
+ "case 'b':" +
+ "\n" + ruleABC_action +
+ "\n" + token_return +
+ "case 'c':" +
+ "\n" + ruleABC_action +
+ "\n" + token_return +
+ "}\n"+ token_parseerror , node.toJava());
+ }
+
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeAppendNodeTest.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeAppendNodeTest.java
new file mode 100644
index 0000000..5151e77
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeAppendNodeTest.java
@@ -0,0 +1,81 @@
+package edu.uci.ics.asterix.lexergenerator;
+
+import static edu.uci.ics.asterix.lexergenerator.Fixtures.*;
+import static org.junit.Assert.*;
+
+import org.junit.Test;
+
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+import edu.uci.ics.asterix.lexergenerator.rules.RuleEpsilon;
+
+public class LexerNodeAppendNodeTest {
+
+ @Test
+ public void AppendIsMergeIfNoActions() throws Exception {
+ LexerNode node = new LexerNode();
+ LexerNode node2 = new LexerNode();
+ node2.append(createRule("rule"));
+ node2.appendTokenName(token_name);
+ node.append(node2);
+ assertEquals("rule_clone! ", node.toString());
+ }
+
+ @Test
+ public void AppendIsAppend() throws Exception {
+ LexerNode node = new LexerNode();
+ node.append(createRule("A"));
+ LexerNode node2 = new LexerNode();
+ node2.append(createRule("rule"));
+ node2.appendTokenName(token_name);
+ node.append(node2);
+ assertEquals("Arule_clone! ", node.toString());
+ }
+
+ @Test
+ public void AppendedNodesAreCloned() throws Exception {
+ LexerNode node = new LexerNode();
+ node.append(createRule("A"));
+ node.appendTokenName(token_name);
+ LexerNode node2 = new LexerNode();
+ node2.append(createRule("B"));
+ node2.appendTokenName(token2_name);
+ node.append(node2);
+ // TODO
+ // assertEquals("A! B_clone! ", node.toString());
+
+ LexerNode node3 = new LexerNode();
+ node3.append(createRule("C"));
+ node3.append(createRule("D"));
+ node3.appendTokenName(token2_name);
+ node.append(node3);
+ // TODO
+ // assertEquals("A! B_clone! C_cloneD_clone! ", node.toString());
+ }
+
+ @Test
+ public void EpsilonRuleDoesNotPropagateAppended() throws Exception {
+ LexerNode node = new LexerNode();
+ node.append(new RuleEpsilon());
+ LexerNode node2 = new LexerNode();
+ node2.append(createRule("A"));
+ node2.appendTokenName(token2_name);
+ node.append(node2);
+ assertEquals("A_clone! ", node.toString());
+ }
+
+ @Test
+ public void EpsilonRuleIsRemovedAndIssueMerge() throws Exception {
+ LexerNode node = new LexerNode();
+ node.append(new RuleEpsilon());
+ LexerNode node2 = new LexerNode();
+ node2.append(createRule("A"));
+ node2.appendTokenName(token2_name);
+ node.append(node2);
+ node.add(new RuleEpsilon());
+ node.append(node2);
+ // TODO
+ // assertEquals(" ( A_clone! A_clone! || A_clone! ) ", node.toString());
+ }
+
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeAppendRuleTest.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeAppendRuleTest.java
new file mode 100644
index 0000000..84fd292
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeAppendRuleTest.java
@@ -0,0 +1,47 @@
+package edu.uci.ics.asterix.lexergenerator;
+
+import static org.junit.Assert.*;
+
+import org.junit.Test;
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+import static edu.uci.ics.asterix.lexergenerator.Fixtures.*;
+
+public class LexerNodeAppendRuleTest {
+ @Test
+ public void SingleNode() {
+ LexerNode node = new LexerNode();
+ node.appendTokenName(token_name);
+ assertEquals(token_tostring, node.toString());
+ assertEquals(token_return, node.toJava());
+ }
+
+ @Test
+ public void NodeRuleNode() {
+ LexerNode node = new LexerNode();
+ node.append(rule);
+ node.appendTokenName(token_name);
+ assertEquals(rule_name+token_tostring, node.toString());
+ assertEquals(rule_match+"{"
+ +"\n"+rule_action
+ +"\n"+token_return
+ +"}"+token_parseerror, node.toJava());
+ }
+
+ @Test
+ public void NodeRuleNodeRuleNode() {
+ LexerNode node = new LexerNode();
+ node.append(rule);
+ node.append(rule2);
+ node.appendTokenName(token_name);
+ assertEquals(rule_name+rule2_name+token_tostring, node.toString());
+ assertEquals(rule_match+"{"
+ +"\n"+rule_action
+ +"\n"+rule2_match+"{"
+ +"\n"+rule2_action
+ +"\n"+token_return
+ +"}"
+ +token_parseerror
+ +"}"+token_parseerror, node.toJava());
+ }
+}
\ No newline at end of file
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeAuxFunctionsTest.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeAuxFunctionsTest.java
new file mode 100644
index 0000000..9f12c00
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeAuxFunctionsTest.java
@@ -0,0 +1,111 @@
+package edu.uci.ics.asterix.lexergenerator;
+
+import static edu.uci.ics.asterix.lexergenerator.Fixtures.*;
+import static org.junit.Assert.*;
+
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.Set;
+
+import org.junit.Test;
+
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+import edu.uci.ics.asterix.lexergenerator.Token;
+import edu.uci.ics.asterix.lexergenerator.rules.RuleEpsilon;
+import edu.uci.ics.asterix.lexergenerator.rules.RulePartial;
+
+public class LexerNodeAuxFunctionsTest {
+ String expectedDifferentReturn = "return TOKEN_AUX_NOT_FOUND;\n";
+
+ @Test
+ public void NodeRuleRuleNodeNode() {
+ LexerNode node = new LexerNode();
+ node.append(rule);
+ node.add(rule2);
+ node.appendTokenName(token_name);
+ assertEquals(" ( " + rule_name +token_tostring + " || " + rule2_name + token_tostring + " ) ", node.toString());
+ assertEquals(rule_match+"{"
+ +"\n" + rule_action
+ +"\n" +token_return
+ +"}"
+ +rule2_match+"{"
+ +"\n"+rule2_action
+ +"\n"+token_return
+ +"}"
+ +expectedDifferentReturn , node.toJavaAuxFunction());
+ }
+
+ @Test
+ public void NodeSwitchCase() {
+ LexerNode node = new LexerNode();
+ node.append(ruleA);
+ node.add(ruleB);
+ node.add(ruleC);
+ node.appendTokenName(token_name);
+ assertEquals(" ( a" + token_tostring + " || b" + token_tostring + " || c" + token_tostring + " ) ", node.toString());
+ assertEquals("switch(currentChar){\n" +
+ "case 'a':" +
+ "\n" + ruleABC_action +
+ "\n" + token_return +
+ "case 'b':" +
+ "\n" + ruleABC_action +
+ "\n" + token_return +
+ "case 'c':" +
+ "\n" + ruleABC_action +
+ "\n" + token_return +
+ "}\n"+ expectedDifferentReturn , node.toJavaAuxFunction());
+ }
+
+ @Test
+ public void NodeNeededAuxFunctions() {
+ LexerNode node = new LexerNode();
+ node.append(ruleA);
+ node.add(new RulePartial("token1"));
+ node.append(ruleC);
+ node.append(new RulePartial("token2"));
+ node.appendTokenName(token_name);
+ assertEquals(" ( actoken2! || token1ctoken2! ) ", node.toString());
+ Set<String> expectedNeededAuxFunctions = new HashSet<String>();
+ expectedNeededAuxFunctions.add("token1");
+ expectedNeededAuxFunctions.add("token2");
+ assertEquals(expectedNeededAuxFunctions, node.neededAuxFunctions());
+ }
+
+ @Test(expected=Exception.class)
+ public void NodeExpandFirstActionError() throws Exception {
+ LexerNode node = new LexerNode();
+ node.append(ruleA);
+ node.add(new RulePartial("token1"));
+ node.append(ruleC);
+ node.add(new RuleEpsilon());
+ node.append(new RulePartial("token2"));
+ node.appendTokenName(token_name);
+ assertEquals(" ( actoken2! || token1ctoken2! || token2! ) ", node.toString());
+ LinkedHashMap<String, Token> tokens = new LinkedHashMap<String, Token>();
+ try {
+ node.expandFirstAction(tokens);
+ } catch (Exception e) {
+ assertEquals("Cannot find a token used as part of another definition, missing token: token1", e.getMessage());
+ throw e;
+ }
+ }
+
+ public void NodeExpandFirstAction() throws Exception {
+ LexerNode node = new LexerNode();
+ node.append(ruleA);
+ node.add(new RulePartial("token1"));
+ node.append(ruleC);
+ node.add(new RuleEpsilon());
+ node.append(new RulePartial("token2"));
+ node.appendTokenName(token_name);
+ assertEquals(" ( actoken2! || token1ctoken2! || token2! ) ", node.toString());
+ LinkedHashMap<String, Token> tokens = new LinkedHashMap<String, Token>();
+ Token a = new Token("token1 = string(T1-blabla)", tokens);
+ Token b = new Token("token1 = string(T1-blabla)", tokens);
+ tokens.put("token1", a);
+ tokens.put("token2", b);
+ node.expandFirstAction(tokens);
+ assertEquals(" ( actoken2! || T1-blablactoken2! || T2-blabla! ) ", node.toString());
+ }
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeCloneTest.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeCloneTest.java
new file mode 100644
index 0000000..87e3ff4
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeCloneTest.java
@@ -0,0 +1,56 @@
+package edu.uci.ics.asterix.lexergenerator;
+
+import static edu.uci.ics.asterix.lexergenerator.Fixtures.*;
+import static org.junit.Assert.*;
+
+import org.junit.Test;
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+
+public class LexerNodeCloneTest {
+
+ @Test
+ public void Depth1() throws Exception {
+ LexerNode node = new LexerNode();
+ LexerNode newNode = node.clone();
+ assertFalse(node == newNode);
+ }
+
+
+ @Test
+ public void Depth2() throws Exception {
+ LexerNode node = new LexerNode();
+ node.append(createRule("my1"));
+ node.add(createRule("my2"));
+ node.add(ruleA);
+ node.appendTokenName(token_name);
+ LexerNode newNode = node.clone();
+
+ assertEquals(" ( my1! || my2! || a! ) ", node.toString());
+ assertEquals(" ( my1_clone! || my2_clone! || a! ) ", newNode.toString());
+ }
+
+ @Test
+ public void Depth3() throws Exception {
+ LexerNode node = new LexerNode();
+ node.append(createRule("my1"));
+ node.add(createRule("my2"));
+ node.add(ruleA);
+ node.appendTokenName(token_name);
+ LexerNode node2 = new LexerNode();
+ node2.append(createRule("my3"));
+ node2.add(createRule("my4"));
+ node2.add(ruleB);
+ node2.appendTokenName(token2_name);
+ node.append(node2);
+ LexerNode newNode = node.clone();
+ // TODO
+ // assertEquals(" ( my1! ( || my3_clone! || my4_clone! || b! ) " +
+ // " || my2! ( || my3_clone! || my4_clone! || b! ) " +
+ // " || a! ( || my3_clone! || my4_clone! || b! ) ) ", node.toString());
+ // assertEquals(" ( my1_clone! ( || my3_clone_clone! || my4_clone_clone! || b! ) " +
+ // " || my2_clone! ( || my3_clone_clone! || my4_clone_clone! || b! ) " +
+ // " || a! ( || my3_clone_clone! || my4_clone_clone! || b! ) ) ", newNode.toString());
+ }
+
+}
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeMergeNodeTest.java b/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeMergeNodeTest.java
new file mode 100644
index 0000000..4b22d99
--- /dev/null
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/edu/uci/ics/asterix/lexergenerator/LexerNodeMergeNodeTest.java
@@ -0,0 +1,83 @@
+package edu.uci.ics.asterix.lexergenerator;
+
+import static edu.uci.ics.asterix.lexergenerator.Fixtures.*;
+import static org.junit.Assert.*;
+
+import org.junit.Test;
+
+import edu.uci.ics.asterix.lexergenerator.LexerNode;
+
+public class LexerNodeMergeNodeTest {
+
+ @Test
+ public void MergeIsAdd() throws Exception {
+ LexerNode node = new LexerNode();
+ node.append(rule);
+ LexerNode node2 = new LexerNode();
+ node2.append(rule2);
+ node2.append(rule);
+ node2.merge(node);
+ node2.appendTokenName(token_name);
+
+ LexerNode expected = new LexerNode();
+ expected.append(rule2);
+ expected.append(rule);
+ expected.add(rule);
+ expected.appendTokenName(token_name);
+
+ assertEquals(expected.toString(), node2.toString());
+ assertEquals(expected.toJava(), node2.toJava());
+ }
+
+ @Test
+ public void MergeTwoToken() throws Exception {
+ LexerNode node = new LexerNode();
+ node.append(rule);
+ node.appendTokenName(token_name);
+ LexerNode node2 = new LexerNode();
+ node2.append(rule2);
+ node2.appendTokenName(token2_name);
+ node.merge(node2);
+
+ assertEquals(" ( "+rule_name+token_tostring+" || "+rule2_name+token_tostring+" ) ", node.toString());
+ assertEquals(rule_match + "{"
+ + "\n" + rule_action
+ + "\n" + token_return
+ +"}"+rule2_match+"{"
+ + "\n" + rule2_action
+ + "\n" + token2_return
+ +"}return parseError(TOKEN_MYTOKEN,TOKEN_MYTOKEN2);\n"
+, node.toJava());
+ }
+
+ @Test(expected=Exception.class)
+ public void MergeConflict() throws Exception {
+ LexerNode node = new LexerNode();
+ node.append(rule);
+ node.appendTokenName(token_name);
+ LexerNode node2 = new LexerNode();
+ node2.append(rule);
+ node2.appendTokenName(token2_name);
+ try {
+ node.merge(node2);
+ } catch (Exception e) {
+ assertEquals("Rule conflict between: "+token_name +" and "+token2_name, e.getMessage());
+ throw e;
+ }
+ }
+
+ @Test
+ public void MergeWithoutConflictWithRemoveTokensName() throws Exception {
+ LexerNode node = new LexerNode();
+ node.append(rule);
+ node.append(rule);
+ node.appendTokenName(token_name);
+ LexerNode node2 = new LexerNode();
+ node2.append(rule);
+ node2.append(rule);
+ node2.appendTokenName(token2_name);
+ node2.removeTokensName();
+ node.merge(node2);
+ assertEquals(rule_name+rule_name+token_tostring, node.toString());
+ }
+}
diff --git a/asterix-maven-plugins/pom.xml b/asterix-maven-plugins/pom.xml
new file mode 100644
index 0000000..0677ffb
--- /dev/null
+++ b/asterix-maven-plugins/pom.xml
@@ -0,0 +1,21 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>edu.uci.ics.asterix</groupId>
+ <artifactId>asterix-maven-plugins</artifactId>
+ <version>0.1</version>
+ <packaging>pom</packaging>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>maven-plugin-api</artifactId>
+ <version>2.2.1</version>
+ <type>jar</type>
+ <scope>compile</scope>
+ </dependency>
+ </dependencies>
+
+ <modules>
+ <module>lexer-generator-maven-plugin</module>
+ </modules>
+</project>
diff --git a/asterix-metadata/pom.xml b/asterix-metadata/pom.xml
index d95e29a..911f55e 100644
--- a/asterix-metadata/pom.xml
+++ b/asterix-metadata/pom.xml
@@ -6,9 +6,7 @@
<groupId>edu.uci.ics.asterix</groupId>
<version>0.0.4-SNAPSHOT</version>
</parent>
- <groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-metadata</artifactId>
- <version>0.0.4-SNAPSHOT</version>
<build>
<plugins>
@@ -17,8 +15,9 @@
<artifactId>maven-compiler-plugin</artifactId>
<version>2.0.2</version>
<configuration>
- <source>1.6</source>
- <target>1.6</target>
+ <source>1.7</source>
+ <target>1.7</target>
+ <fork>true</fork>
</configuration>
</plugin>
</plugins>
@@ -55,21 +54,28 @@
<artifactId>
hyracks-storage-am-lsm-invertedindex
</artifactId>
- <version>0.2.2-SNAPSHOT</version>
+ <version>0.2.3-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>
hyracks-storage-am-lsm-btree
</artifactId>
- <version>0.2.2-SNAPSHOT</version>
+ <version>0.2.3-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>
hyracks-storage-am-lsm-rtree
</artifactId>
- <version>0.2.2-SNAPSHOT</version>
+ <version>0.2.3-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>0.20.2</version>
+ <type>jar</type>
+ <scope>compile</scope>
</dependency>
</dependencies>
</project>
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataManager.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataManager.java
index 63ed290..17cfc9a 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataManager.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataManager.java
@@ -157,7 +157,7 @@
}
ctx.dropDataverse(dataverseName);
}
-
+
@Override
public List<Dataverse> getDataverses(MetadataTransactionContext ctx) throws MetadataException {
try {
@@ -243,6 +243,7 @@
@Override
public Dataset getDataset(MetadataTransactionContext ctx, String dataverseName, String datasetName)
throws MetadataException {
+
// First look in the context to see if this transaction created the
// requested dataset itself (but the dataset is still uncommitted).
Dataset dataset = ctx.getDataset(dataverseName, datasetName);
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataNode.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataNode.java
index a6be4fa..1d04cd7 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataNode.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataNode.java
@@ -58,7 +58,6 @@
import edu.uci.ics.asterix.transaction.management.exception.ACIDException;
import edu.uci.ics.asterix.transaction.management.opcallbacks.PrimaryIndexModificationOperationCallback;
import edu.uci.ics.asterix.transaction.management.opcallbacks.SecondaryIndexModificationOperationCallback;
-import edu.uci.ics.asterix.transaction.management.resource.TransactionalResourceManagerRepository;
import edu.uci.ics.asterix.transaction.management.service.transaction.DatasetId;
import edu.uci.ics.asterix.transaction.management.service.transaction.DatasetIdFactory;
import edu.uci.ics.asterix.transaction.management.service.transaction.IResourceManager.ResourceType;
@@ -67,7 +66,6 @@
import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionSubsystem;
import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
@@ -90,8 +88,6 @@
public class MetadataNode implements IMetadataNode {
private static final long serialVersionUID = 1L;
- // TODO: Temporary transactional resource id for metadata.
- private static final byte[] metadataResourceId = MetadataNode.class.toString().getBytes();
private static final DatasetId METADATA_DATASET_ID = new DatasetId(MetadataPrimaryIndexes.METADATA_DATASET_ID);
private IIndexLifecycleManager indexLifecycleManager;
@@ -168,6 +164,7 @@
InternalDatasetDetails id = (InternalDatasetDetails) dataset.getDatasetDetails();
Index primaryIndex = new Index(dataset.getDataverseName(), dataset.getDatasetName(),
dataset.getDatasetName(), IndexType.BTREE, id.getPrimaryKey(), true, dataset.getPendingOp());
+
addIndex(jobId, primaryIndex);
ITupleReference nodeGroupTuple = createTuple(id.getNodeGroupName(), dataset.getDataverseName(),
dataset.getDatasetName());
@@ -270,8 +267,7 @@
IModificationOperationCallback modCallback = createIndexModificationCallback(jobId, resourceID, metadataIndex,
lsmIndex, IndexOperation.INSERT);
- IIndexAccessor indexAccessor = null;
- indexAccessor = lsmIndex.createAccessor(modCallback, NoOpOperationCallback.INSTANCE);
+ IIndexAccessor indexAccessor = lsmIndex.createAccessor(modCallback, NoOpOperationCallback.INSTANCE);
// TODO: fix exceptions once new BTree exception model is in hyracks.
indexAccessor.insert(tuple);
@@ -283,40 +279,22 @@
IMetadataIndex metadataIndex, ILSMIndex lsmIndex, IndexOperation indexOp) throws Exception {
TransactionContext txnCtx = transactionSubsystem.getTransactionManager().getTransactionContext(jobId);
- int[] primaryKeyFields = metadataIndex.getPrimaryKeyIndexes();
- int numKeys = primaryKeyFields.length;
- IBinaryHashFunctionFactory[] primaryKeyHashFunctionFactories = null;
if (metadataIndex.isPrimaryIndex()) {
- primaryKeyHashFunctionFactories = metadataIndex.getKeyBinaryHashFunctionFactory();
- } else {
- primaryKeyHashFunctionFactories = new IBinaryHashFunctionFactory[numKeys];
- IBinaryHashFunctionFactory[] secondaryKeyHashFunctionFactories = metadataIndex
- .getKeyBinaryHashFunctionFactory();
- for (int i = 0; i < numKeys; i++) {
- primaryKeyHashFunctionFactories[i] = secondaryKeyHashFunctionFactories[primaryKeyFields[i]];
- }
- }
-
- IModificationOperationCallback callback = null;
- if (metadataIndex.isPrimaryIndex()) {
- callback = new PrimaryIndexModificationOperationCallback(metadataIndex.getDatasetId().getId(),
- primaryKeyFields, primaryKeyHashFunctionFactories, txnCtx, transactionSubsystem.getLockManager(),
+ return new PrimaryIndexModificationOperationCallback(metadataIndex.getDatasetId().getId(),
+ metadataIndex.getPrimaryKeyIndexes(), txnCtx, transactionSubsystem.getLockManager(),
transactionSubsystem, resourceId, ResourceType.LSM_BTREE, indexOp);
} else {
- callback = new SecondaryIndexModificationOperationCallback(metadataIndex.getDatasetId().getId(),
- primaryKeyFields, primaryKeyHashFunctionFactories, txnCtx, transactionSubsystem.getLockManager(),
+ return new SecondaryIndexModificationOperationCallback(metadataIndex.getDatasetId().getId(),
+ metadataIndex.getPrimaryKeyIndexes(), txnCtx, transactionSubsystem.getLockManager(),
transactionSubsystem, resourceId, ResourceType.LSM_BTREE, indexOp);
}
-
- return callback;
}
@Override
public void dropDataverse(JobId jobId, String dataverseName) throws MetadataException, RemoteException {
try {
List<Dataset> dataverseDatasets;
- // As a side effect, acquires an S lock on the 'dataset' dataset
- // on behalf of txnId.
+
dataverseDatasets = getDataverseDatasets(jobId, dataverseName);
if (dataverseDatasets != null && dataverseDatasets.size() > 0) {
// Drop all datasets in this dataverse.
@@ -361,6 +339,7 @@
// on behalf of txnId.
ITupleReference tuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.DATAVERSE_DATASET, searchKey);
deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.DATAVERSE_DATASET, tuple);
+
// TODO: Change this to be a BTree specific exception, e.g.,
// BTreeKeyDoesNotExistException.
} catch (TreeIndexException e) {
@@ -387,36 +366,49 @@
ITupleReference searchKey = createTuple(dataverseName, datasetName);
// Searches the index for the tuple to be deleted. Acquires an S
// lock on the 'dataset' dataset.
- ITupleReference datasetTuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.DATASET_DATASET, searchKey);
- deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.DATASET_DATASET, datasetTuple);
+ try {
+ ITupleReference datasetTuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.DATASET_DATASET,
+ searchKey);
+ deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.DATASET_DATASET, datasetTuple);
+ } catch (TreeIndexException tie) {
+ //ignore this exception and continue deleting all relevant artifacts.
+ }
+
// Delete entry from secondary index 'group'.
if (dataset.getDatasetType() == DatasetType.INTERNAL || dataset.getDatasetType() == DatasetType.FEED) {
InternalDatasetDetails id = (InternalDatasetDetails) dataset.getDatasetDetails();
ITupleReference groupNameSearchKey = createTuple(id.getNodeGroupName(), dataverseName, datasetName);
// Searches the index for the tuple to be deleted. Acquires an S
// lock on the GROUPNAME_ON_DATASET_INDEX index.
- ITupleReference groupNameTuple = getTupleToBeDeleted(jobId,
- MetadataSecondaryIndexes.GROUPNAME_ON_DATASET_INDEX, groupNameSearchKey);
- deleteTupleFromIndex(jobId, MetadataSecondaryIndexes.GROUPNAME_ON_DATASET_INDEX, groupNameTuple);
+ try {
+ ITupleReference groupNameTuple = getTupleToBeDeleted(jobId,
+ MetadataSecondaryIndexes.GROUPNAME_ON_DATASET_INDEX, groupNameSearchKey);
+ deleteTupleFromIndex(jobId, MetadataSecondaryIndexes.GROUPNAME_ON_DATASET_INDEX, groupNameTuple);
+ } catch (TreeIndexException tie) {
+ //ignore this exception and continue deleting all relevant artifacts.
+ }
}
// Delete entry from secondary index 'type'.
ITupleReference dataTypeSearchKey = createTuple(dataverseName, dataset.getItemTypeName(), datasetName);
// Searches the index for the tuple to be deleted. Acquires an S
// lock on the DATATYPENAME_ON_DATASET_INDEX index.
- ITupleReference dataTypeTuple = getTupleToBeDeleted(jobId,
- MetadataSecondaryIndexes.DATATYPENAME_ON_DATASET_INDEX, dataTypeSearchKey);
- deleteTupleFromIndex(jobId, MetadataSecondaryIndexes.DATATYPENAME_ON_DATASET_INDEX, dataTypeTuple);
+ try {
+ ITupleReference dataTypeTuple = getTupleToBeDeleted(jobId,
+ MetadataSecondaryIndexes.DATATYPENAME_ON_DATASET_INDEX, dataTypeSearchKey);
+ deleteTupleFromIndex(jobId, MetadataSecondaryIndexes.DATATYPENAME_ON_DATASET_INDEX, dataTypeTuple);
+ } catch (TreeIndexException tie) {
+ //ignore this exception and continue deleting all relevant artifacts.
+ }
+
// Delete entry(s) from the 'indexes' dataset.
if (dataset.getDatasetType() == DatasetType.INTERNAL || dataset.getDatasetType() == DatasetType.FEED) {
List<Index> datasetIndexes = getDatasetIndexes(jobId, dataverseName, datasetName);
- for (Index index : datasetIndexes) {
- dropIndex(jobId, dataverseName, datasetName, index.getIndexName());
+ if (datasetIndexes != null) {
+ for (Index index : datasetIndexes) {
+ dropIndex(jobId, dataverseName, datasetName, index.getIndexName());
+ }
}
}
- // TODO: Change this to be a BTree specific exception, e.g.,
- // BTreeKeyDoesNotExistException.
- } catch (TreeIndexException e) {
- throw new MetadataException("Cannot drop dataset '" + datasetName + "' because it doesn't exist.", e);
} catch (Exception e) {
throw new MetadataException(e);
}
@@ -853,10 +845,6 @@
private <ResultType> void searchIndex(JobId jobId, IMetadataIndex index, ITupleReference searchKey,
IValueExtractor<ResultType> valueExtractor, List<ResultType> results) throws Exception {
- TransactionContext txnCtx = transactionSubsystem.getTransactionManager().getTransactionContext(jobId);
- //#. currently lock is not needed to access any metadata
- // since the non-compatible concurrent access is always protected by the latch in the MetadataManager.
- //transactionProvider.getLockManager().lock(index.getDatasetId(), -1, LockMode.S, txnCtx);
IBinaryComparatorFactory[] comparatorFactories = index.getKeyBinaryComparatorFactory();
long resourceID = index.getResourceID();
IIndex indexInstance = indexLifecycleManager.getIndex(resourceID);
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataTransactionContext.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataTransactionContext.java
index 5ec1d5a..cbd37d6 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataTransactionContext.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataTransactionContext.java
@@ -105,7 +105,8 @@
}
public void dropDataset(String dataverseName, String datasetName) {
- Dataset dataset = new Dataset(dataverseName, datasetName, null, null, null, -1, IMetadataEntity.PENDING_NO_OP);
+ Dataset dataset = new Dataset(dataverseName, datasetName, null, null, null, null, -1,
+ IMetadataEntity.PENDING_NO_OP);
droppedCache.addDatasetIfNotExists(dataset);
logAndApply(new MetadataLogicalOperation(dataset, false));
}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataEntityTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataEntityTupleTranslator.java
index df61cd9..9c9d02b 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataEntityTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataEntityTupleTranslator.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -50,5 +50,5 @@
* Metadata entity to be written into a tuple.
* @throws IOException
*/
- public ITupleReference getTupleFromMetadataEntity(T metadataEntity) throws IOException;
+ public ITupleReference getTupleFromMetadataEntity(T metadataEntity) throws MetadataException, IOException;
}
\ No newline at end of file
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java
index 84161e2..f1650ca 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java
@@ -19,6 +19,7 @@
import java.rmi.RemoteException;
import java.util.ArrayList;
import java.util.Collection;
+import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
@@ -236,7 +237,7 @@
primaryIndexes[i].getNodeGroupName());
MetadataManager.INSTANCE.addDataset(mdTxnCtx, new Dataset(primaryIndexes[i].getDataverseName(),
primaryIndexes[i].getIndexedDatasetName(), primaryIndexes[i].getPayloadRecordType().getTypeName(),
- id, DatasetType.INTERNAL, primaryIndexes[i].getDatasetId().getId(), IMetadataEntity.PENDING_NO_OP));
+ id, new HashMap<String,String>(), DatasetType.INTERNAL, primaryIndexes[i].getDatasetId().getId(), IMetadataEntity.PENDING_NO_OP));
}
}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataRecordTypes.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataRecordTypes.java
index 17924b7..50681ee 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataRecordTypes.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataRecordTypes.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -18,6 +18,8 @@
import java.util.ArrayList;
import java.util.List;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.metadata.MetadataException;
import edu.uci.ics.asterix.om.types.AOrderedListType;
import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.asterix.om.types.AUnionType;
@@ -34,6 +36,7 @@
public static ARecordType INTERNAL_DETAILS_RECORDTYPE;
public static ARecordType EXTERNAL_DETAILS_RECORDTYPE;
public static ARecordType FEED_DETAILS_RECORDTYPE;
+ public static ARecordType DATASET_HINTS_RECORDTYPE;
public static ARecordType DATASOURCE_ADAPTER_PROPERTIES_RECORDTYPE;
public static ARecordType FIELD_RECORDTYPE;
public static ARecordType RECORD_RECORDTYPE;
@@ -48,31 +51,34 @@
/**
* Create all metadata record types.
*/
- public static void init() {
+ public static void init() throws MetadataException {
// Attention: The order of these calls is important because some types
// depend on other types being created first.
// These calls are one "dependency chain".
- DATASOURCE_ADAPTER_PROPERTIES_RECORDTYPE = createDatasourceAdapterPropertiesRecordType();
- INTERNAL_DETAILS_RECORDTYPE = createInternalDetailsRecordType();
- EXTERNAL_DETAILS_RECORDTYPE = createExternalDetailsRecordType();
- FEED_DETAILS_RECORDTYPE = createFeedDetailsRecordType();
+ try {
+ DATASOURCE_ADAPTER_PROPERTIES_RECORDTYPE = createPropertiesRecordType();
+ INTERNAL_DETAILS_RECORDTYPE = createInternalDetailsRecordType();
+ EXTERNAL_DETAILS_RECORDTYPE = createExternalDetailsRecordType();
+ FEED_DETAILS_RECORDTYPE = createFeedDetailsRecordType();
+ DATASET_HINTS_RECORDTYPE = createPropertiesRecordType();
+ DATASET_RECORDTYPE = createDatasetRecordType();
- DATASET_RECORDTYPE = createDatasetRecordType();
+ // Starting another dependency chain.
+ FIELD_RECORDTYPE = createFieldRecordType();
+ RECORD_RECORDTYPE = createRecordTypeRecordType();
+ DERIVEDTYPE_RECORDTYPE = createDerivedTypeRecordType();
+ DATATYPE_RECORDTYPE = createDatatypeRecordType();
- // Starting another dependency chain.
- FIELD_RECORDTYPE = createFieldRecordType();
- RECORD_RECORDTYPE = createRecordTypeRecordType();
- DERIVEDTYPE_RECORDTYPE = createDerivedTypeRecordType();
- DATATYPE_RECORDTYPE = createDatatypeRecordType();
-
- // Independent of any other types.
- DATAVERSE_RECORDTYPE = createDataverseRecordType();
- INDEX_RECORDTYPE = createIndexRecordType();
- NODE_RECORDTYPE = createNodeRecordType();
- NODEGROUP_RECORDTYPE = createNodeGroupRecordType();
- FUNCTION_RECORDTYPE = createFunctionRecordType();
- DATASOURCE_ADAPTER_RECORDTYPE = createDatasourceAdapterRecordType();
-
+ // Independent of any other types.
+ DATAVERSE_RECORDTYPE = createDataverseRecordType();
+ INDEX_RECORDTYPE = createIndexRecordType();
+ NODE_RECORDTYPE = createNodeRecordType();
+ NODEGROUP_RECORDTYPE = createNodeGroupRecordType();
+ FUNCTION_RECORDTYPE = createFunctionRecordType();
+ DATASOURCE_ADAPTER_RECORDTYPE = createDatasourceAdapterRecordType();
+ } catch (AsterixException e) {
+ throw new MetadataException(e);
+ }
}
// Helper constants for accessing fields in an ARecord of type
@@ -82,17 +88,19 @@
public static final int DATAVERSE_ARECORD_TIMESTAMP_FIELD_INDEX = 2;
public static final int DATAVERSE_ARECORD_PENDINGOP_FIELD_INDEX = 3;
- private static final ARecordType createDataverseRecordType() {
- return new ARecordType("DataverseRecordType", new String[] { "DataverseName", "DataFormat", "Timestamp", "PendingOp" },
- new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.AINT32 }, true);
+ private static final ARecordType createDataverseRecordType() throws AsterixException {
+ return new ARecordType("DataverseRecordType", new String[] { "DataverseName", "DataFormat", "Timestamp",
+ "PendingOp" }, new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING,
+ BuiltinType.AINT32 }, true);
}
// Helper constants for accessing fields in an ARecord of anonymous type
- // external properties.
- public static final int DATASOURCE_ADAPTER_PROPERTIES_ARECORD_NAME_FIELD_INDEX = 0;
- public static final int DATASOURCE_ADAPTER_PROPERTIES_ARECORD_VALUE_FIELD_INDEX = 1;
+ // dataset properties.
+ // Used for dataset hints or dataset adapter properties.
+ public static final int DATASOURCE_PROPERTIES_NAME_FIELD_INDEX = 0;
+ public static final int DATASOURCE_PROPERTIES_VALUE_FIELD_INDEX = 1;
- private static final ARecordType createDatasourceAdapterPropertiesRecordType() {
+ private static final ARecordType createPropertiesRecordType() throws AsterixException {
String[] fieldNames = { "Name", "Value" };
IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING };
return new ARecordType(null, fieldNames, fieldTypes, true);
@@ -106,7 +114,7 @@
public static final int INTERNAL_DETAILS_ARECORD_PRIMARYKEY_FIELD_INDEX = 3;
public static final int INTERNAL_DETAILS_ARECORD_GROUPNAME_FIELD_INDEX = 4;
- private static final ARecordType createInternalDetailsRecordType() {
+ private static final ARecordType createInternalDetailsRecordType() throws AsterixException {
AOrderedListType olType = new AOrderedListType(BuiltinType.ASTRING, null);
String[] fieldNames = { "FileStructure", "PartitioningStrategy", "PartitioningKey", "PrimaryKey", "GroupName" };
IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, olType, olType, BuiltinType.ASTRING };
@@ -118,7 +126,7 @@
public static final int EXTERNAL_DETAILS_ARECORD_DATASOURCE_ADAPTER_FIELD_INDEX = 0;
public static final int EXTERNAL_DETAILS_ARECORD_PROPERTIES_FIELD_INDEX = 1;
- private static final ARecordType createExternalDetailsRecordType() {
+ private static final ARecordType createExternalDetailsRecordType() throws AsterixException {
AOrderedListType orderedPropertyListType = new AOrderedListType(DATASOURCE_ADAPTER_PROPERTIES_RECORDTYPE, null);
String[] fieldNames = { "DatasourceAdapter", "Properties" };
@@ -136,15 +144,20 @@
public static final int FEED_DETAILS_ARECORD_FUNCTION_FIELD_INDEX = 7;
public static final int FEED_DETAILS_ARECORD_STATE_FIELD_INDEX = 8;
- private static final ARecordType createFeedDetailsRecordType() {
+ private static final ARecordType createFeedDetailsRecordType() throws AsterixException {
AOrderedListType orderedListType = new AOrderedListType(BuiltinType.ASTRING, null);
AOrderedListType orderedListOfPropertiesType = new AOrderedListType(DATASOURCE_ADAPTER_PROPERTIES_RECORDTYPE,
null);
String[] fieldNames = { "FileStructure", "PartitioningStrategy", "PartitioningKey", "PrimaryKey", "GroupName",
"DatasourceAdapter", "Properties", "Function", "Status" };
+ List<IAType> feedFunctionUnionList = new ArrayList<IAType>();
+ feedFunctionUnionList.add(BuiltinType.ANULL);
+ feedFunctionUnionList.add(BuiltinType.ASTRING);
+ AUnionType feedFunctionUnion = new AUnionType(feedFunctionUnionList, null);
+
IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, orderedListType, orderedListType,
- BuiltinType.ASTRING, BuiltinType.ASTRING, orderedListOfPropertiesType, BuiltinType.ASTRING,
+ BuiltinType.ASTRING, BuiltinType.ASTRING, orderedListOfPropertiesType, feedFunctionUnion,
BuiltinType.ASTRING };
return new ARecordType(null, fieldNames, fieldTypes, true);
@@ -157,13 +170,14 @@
public static final int DATASET_ARECORD_INTERNALDETAILS_FIELD_INDEX = 4;
public static final int DATASET_ARECORD_EXTERNALDETAILS_FIELD_INDEX = 5;
public static final int DATASET_ARECORD_FEEDDETAILS_FIELD_INDEX = 6;
- public static final int DATASET_ARECORD_TIMESTAMP_FIELD_INDEX = 7;
- public static final int DATASET_ARECORD_DATASETID_FIELD_INDEX = 8;
- public static final int DATASET_ARECORD_PENDINGOP_FIELD_INDEX = 9;
+ public static final int DATASET_ARECORD_HINTS_FIELD_INDEX = 7;
+ public static final int DATASET_ARECORD_TIMESTAMP_FIELD_INDEX = 8;
+ public static final int DATASET_ARECORD_DATASETID_FIELD_INDEX = 9;
+ public static final int DATASET_ARECORD_PENDINGOP_FIELD_INDEX = 10;
- private static final ARecordType createDatasetRecordType() {
+ private static final ARecordType createDatasetRecordType() throws AsterixException {
String[] fieldNames = { "DataverseName", "DatasetName", "DataTypeName", "DatasetType", "InternalDetails",
- "ExternalDetails", "FeedDetails", "Timestamp", "DatasetId", "PendingOp" };
+ "ExternalDetails", "FeedDetails", "Hints", "Timestamp", "DatasetId", "PendingOp" };
List<IAType> internalRecordUnionList = new ArrayList<IAType>();
internalRecordUnionList.add(BuiltinType.ANULL);
@@ -180,9 +194,11 @@
feedRecordUnionList.add(FEED_DETAILS_RECORDTYPE);
AUnionType feedRecordUnion = new AUnionType(feedRecordUnionList, null);
+ AUnorderedListType unorderedListOfHintsType = new AUnorderedListType(DATASET_HINTS_RECORDTYPE, null);
+
IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING,
- internalRecordUnion, externalRecordUnion, feedRecordUnion, BuiltinType.ASTRING, BuiltinType.AINT32,
- BuiltinType.AINT32 };
+ internalRecordUnion, externalRecordUnion, feedRecordUnion, unorderedListOfHintsType,
+ BuiltinType.ASTRING, BuiltinType.AINT32, BuiltinType.AINT32 };
return new ARecordType("DatasetRecordType", fieldNames, fieldTypes, true);
}
@@ -191,7 +207,7 @@
public static final int FIELD_ARECORD_FIELDNAME_FIELD_INDEX = 0;
public static final int FIELD_ARECORD_FIELDTYPE_FIELD_INDEX = 1;
- private static final ARecordType createFieldRecordType() {
+ private static final ARecordType createFieldRecordType() throws AsterixException {
String[] fieldNames = { "FieldName", "FieldType" };
IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING };
return new ARecordType(null, fieldNames, fieldTypes, true);
@@ -202,7 +218,7 @@
public static final int RECORDTYPE_ARECORD_ISOPEN_FIELD_INDEX = 0;
public static final int RECORDTYPE_ARECORD_FIELDS_FIELD_INDEX = 1;
- private static final ARecordType createRecordTypeRecordType() {
+ private static final ARecordType createRecordTypeRecordType() throws AsterixException {
AOrderedListType olType = new AOrderedListType(FIELD_RECORDTYPE, null);
String[] fieldNames = { "IsOpen", "Fields" };
IAType[] fieldTypes = { BuiltinType.ABOOLEAN, olType };
@@ -219,7 +235,7 @@
public static final int DERIVEDTYPE_ARECORD_UNORDEREDLIST_FIELD_INDEX = 5;
public static final int DERIVEDTYPE_ARECORD_ORDEREDLIST_FIELD_INDEX = 6;
- private static final ARecordType createDerivedTypeRecordType() {
+ private static final ARecordType createDerivedTypeRecordType() throws AsterixException {
String[] fieldNames = { "Tag", "IsAnonymous", "EnumValues", "Record", "Union", "UnorderedList", "OrderedList" };
List<IAType> recordUnionList = new ArrayList<IAType>();
recordUnionList.add(BuiltinType.ANULL);
@@ -248,7 +264,7 @@
public static final int DATATYPE_ARECORD_DERIVED_FIELD_INDEX = 2;
public static final int DATATYPE_ARECORD_TIMESTAMP_FIELD_INDEX = 3;
- private static final ARecordType createDatatypeRecordType() {
+ private static final ARecordType createDatatypeRecordType() throws AsterixException {
String[] fieldNames = { "DataverseName", "DatatypeName", "Derived", "Timestamp" };
List<IAType> recordUnionList = new ArrayList<IAType>();
recordUnionList.add(BuiltinType.ANULL);
@@ -269,7 +285,7 @@
public static final int INDEX_ARECORD_TIMESTAMP_FIELD_INDEX = 6;
public static final int INDEX_ARECORD_PENDINGOP_FIELD_INDEX = 7;
- private static final ARecordType createIndexRecordType() {
+ private static final ARecordType createIndexRecordType() throws AsterixException {
AOrderedListType olType = new AOrderedListType(BuiltinType.ASTRING, null);
String[] fieldNames = { "DataverseName", "DatasetName", "IndexName", "IndexStructure", "SearchKey",
"IsPrimary", "Timestamp", "PendingOp" };
@@ -284,7 +300,7 @@
public static final int NODE_ARECORD_NUMBEROFCORES_FIELD_INDEX = 1;
public static final int NODE_ARECORD_WORKINGMEMORYSIZE_FIELD_INDEX = 2;
- private static final ARecordType createNodeRecordType() {
+ private static final ARecordType createNodeRecordType() throws AsterixException {
String[] fieldNames = { "NodeName", "NumberOfCores", "WorkingMemorySize" };
IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.AINT32, BuiltinType.AINT32 };
return new ARecordType("NodeRecordType", fieldNames, fieldTypes, true);
@@ -296,7 +312,7 @@
public static final int NODEGROUP_ARECORD_NODENAMES_FIELD_INDEX = 1;
public static final int NODEGROUP_ARECORD_TIMESTAMP_FIELD_INDEX = 2;
- private static final ARecordType createNodeGroupRecordType() {
+ private static final ARecordType createNodeGroupRecordType() throws AsterixException {
AUnorderedListType ulType = new AUnorderedListType(BuiltinType.ASTRING, null);
String[] fieldNames = { "GroupName", "NodeNames", "Timestamp" };
IAType[] fieldTypes = { BuiltinType.ASTRING, ulType, BuiltinType.ASTRING };
@@ -318,7 +334,7 @@
public static final int FUNCTION_ARECORD_FUNCTION_LANGUAGE_FIELD_INDEX = 6;
public static final int FUNCTION_ARECORD_FUNCTION_KIND_FIELD_INDEX = 7;
- private static final ARecordType createFunctionRecordType() {
+ private static final ARecordType createFunctionRecordType() throws AsterixException {
String[] fieldNames = { "DataverseName", "Name", "Arity", "Params", "ReturnType", "Definition", "Language",
"Kind" };
@@ -334,7 +350,7 @@
public static final int DATASOURCE_ADAPTER_ARECORD_TYPE_FIELD_INDEX = 3;
public static final int DATASOURCE_ADAPTER_ARECORD_TIMESTAMP_FIELD_INDEX = 4;
- private static ARecordType createDatasourceAdapterRecordType() {
+ private static ARecordType createDatasourceAdapterRecordType() throws AsterixException {
String[] fieldNames = { "DataverseName", "Name", "Classname", "Type", "Timestamp" };
IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING,
BuiltinType.ASTRING };
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlDataSource.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlDataSource.java
index b7ba1a1..e4c7ba2 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlDataSource.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlDataSource.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -15,6 +15,7 @@
package edu.uci.ics.asterix.metadata.declared;
+import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
@@ -63,46 +64,54 @@
this.id = id;
this.dataset = dataset;
this.datasourceType = datasourceType;
- switch (datasourceType) {
- case FEED:
- initFeedDataset(itemType, dataset);
- case INTERNAL: {
- initInternalDataset(itemType);
- break;
+ try {
+ switch (datasourceType) {
+ case FEED:
+ initFeedDataset(itemType, dataset);
+ case INTERNAL: {
+ initInternalDataset(itemType);
+ break;
+ }
+ case EXTERNAL_FEED:
+ case EXTERNAL: {
+ initExternalDataset(itemType);
+ break;
+ }
+ default: {
+ throw new IllegalArgumentException();
+ }
}
- case EXTERNAL_FEED:
- case EXTERNAL: {
- initExternalDataset(itemType);
- break;
- }
- default: {
- throw new IllegalArgumentException();
- }
+ } catch (IOException e) {
+ throw new AlgebricksException(e);
}
}
public AqlDataSource(AqlSourceId id, Dataset dataset, IAType itemType) throws AlgebricksException {
this.id = id;
this.dataset = dataset;
- switch (dataset.getDatasetType()) {
- case FEED:
- initFeedDataset(itemType, dataset);
- break;
- case INTERNAL:
- initInternalDataset(itemType);
- break;
- case EXTERNAL: {
- initExternalDataset(itemType);
- break;
+ try {
+ switch (dataset.getDatasetType()) {
+ case FEED:
+ initFeedDataset(itemType, dataset);
+ break;
+ case INTERNAL:
+ initInternalDataset(itemType);
+ break;
+ case EXTERNAL: {
+ initExternalDataset(itemType);
+ break;
+ }
+ default: {
+ throw new IllegalArgumentException();
+ }
}
- default: {
- throw new IllegalArgumentException();
- }
+ } catch (IOException e) {
+ throw new AlgebricksException(e);
}
}
// TODO: Seems like initFeedDataset() could simply call this method.
- private void initInternalDataset(IAType itemType) {
+ private void initInternalDataset(IAType itemType) throws IOException {
List<String> partitioningKeys = DatasetUtils.getPartitioningKeys(dataset);
ARecordType recordType = (ARecordType) itemType;
int n = partitioningKeys.size();
@@ -114,7 +123,7 @@
domain = new AsterixNodeGroupDomain(DatasetUtils.getNodegroupName(dataset));
}
- private void initFeedDataset(IAType itemType, Dataset dataset) {
+ private void initFeedDataset(IAType itemType, Dataset dataset) throws IOException {
if (dataset.getDatasetDetails() instanceof ExternalDatasetDetails) {
initExternalDataset(itemType);
} else {
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java
index 6ba88ee..fc39484 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -16,6 +16,7 @@
package edu.uci.ics.asterix.metadata.declared;
import java.io.File;
+import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@@ -29,18 +30,16 @@
import edu.uci.ics.asterix.common.dataflow.IAsterixApplicationContextInfo;
import edu.uci.ics.asterix.common.parse.IParseFileSplitsDecl;
import edu.uci.ics.asterix.dataflow.data.nontagged.valueproviders.AqlPrimitiveValueProviderFactory;
-import edu.uci.ics.asterix.external.adapter.factory.IExternalDatasetAdapterFactory;
-import edu.uci.ics.asterix.external.adapter.factory.IFeedDatasetAdapterFactory;
-import edu.uci.ics.asterix.external.adapter.factory.IFeedDatasetAdapterFactory.FeedAdapterType;
-import edu.uci.ics.asterix.external.adapter.factory.IGenericFeedDatasetAdapterFactory;
-import edu.uci.ics.asterix.external.adapter.factory.ITypedFeedDatasetAdapterFactory;
+import edu.uci.ics.asterix.external.adapter.factory.IAdapterFactory;
+import edu.uci.ics.asterix.external.adapter.factory.IGenericDatasetAdapterFactory;
+import edu.uci.ics.asterix.external.adapter.factory.ITypedDatasetAdapterFactory;
import edu.uci.ics.asterix.external.data.operator.ExternalDataScanOperatorDescriptor;
+import edu.uci.ics.asterix.external.data.operator.FeedIntakeOperatorDescriptor;
+import edu.uci.ics.asterix.external.data.operator.FeedMessageOperatorDescriptor;
import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
-import edu.uci.ics.asterix.external.dataset.adapter.IFeedDatasourceAdapter;
-import edu.uci.ics.asterix.feed.comm.IFeedMessage;
-import edu.uci.ics.asterix.feed.mgmt.FeedId;
-import edu.uci.ics.asterix.feed.operator.FeedIntakeOperatorDescriptor;
-import edu.uci.ics.asterix.feed.operator.FeedMessageOperatorDescriptor;
+import edu.uci.ics.asterix.external.dataset.adapter.ITypedDatasourceAdapter;
+import edu.uci.ics.asterix.external.feed.lifecycle.FeedId;
+import edu.uci.ics.asterix.external.feed.lifecycle.IFeedMessage;
import edu.uci.ics.asterix.formats.base.IDataFormat;
import edu.uci.ics.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;
import edu.uci.ics.asterix.formats.nontagged.AqlTypeTraitProvider;
@@ -97,7 +96,6 @@
import edu.uci.ics.hyracks.algebricks.runtime.operators.std.SinkWriterRuntimeFactory;
import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
@@ -288,7 +286,7 @@
throw new AlgebricksException("Can only scan datasets of records.");
}
- IExternalDatasetAdapterFactory adapterFactory;
+ IGenericDatasetAdapterFactory adapterFactory;
IDatasourceAdapter adapter;
String adapterName;
DatasourceAdapter adapterEntity;
@@ -299,22 +297,22 @@
adapterName);
if (adapterEntity != null) {
adapterFactoryClassname = adapterEntity.getClassname();
- adapterFactory = (IExternalDatasetAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
+ adapterFactory = (IGenericDatasetAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
} else {
adapterFactoryClassname = adapterFactoryMapping.get(adapterName);
if (adapterFactoryClassname == null) {
throw new AlgebricksException(" Unknown adapter :" + adapterName);
}
- adapterFactory = (IExternalDatasetAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
+ adapterFactory = (IGenericDatasetAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
}
- adapter = ((IExternalDatasetAdapterFactory) adapterFactory).createAdapter(datasetDetails.getProperties(),
+ adapter = ((IGenericDatasetAdapterFactory) adapterFactory).createAdapter(datasetDetails.getProperties(),
itemType);
} catch (AlgebricksException ae) {
throw ae;
} catch (Exception e) {
e.printStackTrace();
- throw new AlgebricksException("unable to load the adapter factory class " + e);
+ throw new AlgebricksException("Unable to create adapter " + e);
}
if (!(adapter.getAdapterType().equals(IDatasourceAdapter.AdapterType.READ) || adapter.getAdapterType().equals(
@@ -329,7 +327,13 @@
ExternalDataScanOperatorDescriptor dataScanner = new ExternalDataScanOperatorDescriptor(jobSpec,
adapterFactoryClassname, datasetDetails.getProperties(), rt, scannerDesc);
- AlgebricksPartitionConstraint constraint = adapter.getPartitionConstraint();
+ AlgebricksPartitionConstraint constraint;
+ try {
+ constraint = adapter.getPartitionConstraint();
+ } catch (Exception e) {
+ throw new AlgebricksException(e);
+ }
+
return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(dataScanner, constraint);
}
@@ -362,25 +366,45 @@
FeedDatasetDetails datasetDetails = (FeedDatasetDetails) dataset.getDatasetDetails();
DatasourceAdapter adapterEntity;
IDatasourceAdapter adapter;
- IFeedDatasetAdapterFactory adapterFactory;
+ IAdapterFactory adapterFactory;
IAType adapterOutputType;
+ String adapterName;
+ String adapterFactoryClassname;
try {
- adapterEntity = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, null, datasetDetails.getAdapterFactory());
- adapterFactory = (IFeedDatasetAdapterFactory) Class.forName(adapterEntity.getClassname()).newInstance();
- if (adapterFactory.getFeedAdapterType().equals(FeedAdapterType.TYPED)) {
- adapter = ((ITypedFeedDatasetAdapterFactory) adapterFactory).createAdapter(datasetDetails
- .getProperties());
- adapterOutputType = ((IFeedDatasourceAdapter) adapter).getAdapterOutputType();
+ adapterName = datasetDetails.getAdapterFactory();
+ adapterEntity = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, MetadataConstants.METADATA_DATAVERSE_NAME,
+ adapterName);
+ if (adapterEntity != null) {
+ adapterFactoryClassname = adapterEntity.getClassname();
+ adapterFactory = (IAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
} else {
- String outputTypeName = datasetDetails.getProperties().get(
- IGenericFeedDatasetAdapterFactory.KEY_TYPE_NAME);
- adapterOutputType = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, null, outputTypeName).getDatatype();
- adapter = ((IGenericFeedDatasetAdapterFactory) adapterFactory).createAdapter(
- datasetDetails.getProperties(), adapterOutputType);
+ adapterFactoryClassname = adapterFactoryMapping.get(adapterName);
+ if (adapterFactoryClassname != null) {
+ } else {
+ // adapterName has been provided as a fully qualified classname
+ adapterFactoryClassname = adapterName;
+ }
+ adapterFactory = (IAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
}
+
+ if (adapterFactory instanceof ITypedDatasetAdapterFactory) {
+ adapter = ((ITypedDatasetAdapterFactory) adapterFactory).createAdapter(datasetDetails.getProperties());
+ adapterOutputType = ((ITypedDatasourceAdapter) adapter).getAdapterOutputType();
+ } else if (adapterFactory instanceof IGenericDatasetAdapterFactory) {
+ String outputTypeName = datasetDetails.getProperties().get(IGenericDatasetAdapterFactory.KEY_TYPE_NAME);
+ adapterOutputType = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataset.getDataverseName(),
+ outputTypeName).getDatatype();
+ adapter = ((IGenericDatasetAdapterFactory) adapterFactory).createAdapter(
+ datasetDetails.getProperties(), adapterOutputType);
+ } else {
+ throw new IllegalStateException(" Unknown factory type for " + adapterFactoryClassname);
+ }
+ } catch (AlgebricksException ae) {
+ throw ae;
} catch (Exception e) {
- throw new AlgebricksException(e);
+ e.printStackTrace();
+ throw new AlgebricksException("unable to create adapter " + e);
}
ISerializerDeserializer payloadSerde = NonTaggedDataFormat.INSTANCE.getSerdeProvider()
@@ -388,11 +412,16 @@
RecordDescriptor feedDesc = new RecordDescriptor(new ISerializerDeserializer[] { payloadSerde });
FeedIntakeOperatorDescriptor feedIngestor = new FeedIntakeOperatorDescriptor(jobSpec, new FeedId(
- dataset.getDataverseName(), dataset.getDatasetName()), adapterEntity.getClassname(),
+ dataset.getDataverseName(), dataset.getDatasetName()), adapterFactoryClassname,
datasetDetails.getProperties(), (ARecordType) adapterOutputType, feedDesc);
- return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(feedIngestor,
- adapter.getPartitionConstraint());
+ AlgebricksPartitionConstraint constraint = null;
+ try {
+ constraint = adapter.getPartitionConstraint();
+ } catch (Exception e) {
+ throw new AlgebricksException(e);
+ }
+ return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(feedIngestor, constraint);
}
public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildFeedMessengerRuntime(
@@ -460,19 +489,13 @@
JobId jobId = ((JobEventListenerFactory) jobSpec.getJobletEventListenerFactory()).getJobId();
int datasetId = dataset.getDatasetId();
int[] primaryKeyFields = new int[numPrimaryKeys];
- List<LogicalVariable> primaryKeyVars = new ArrayList<LogicalVariable>();
- int varIndexOffset = outputVars.size() - numPrimaryKeys - 1;
for (int i = 0; i < numPrimaryKeys; i++) {
primaryKeyFields[i] = i;
- primaryKeyVars.add(new LogicalVariable(outputVars.get(varIndexOffset + i).getId()));
}
- IBinaryHashFunctionFactory[] entityIdFieldHashFunctionFactories = JobGenHelper
- .variablesToBinaryHashFunctionFactories(primaryKeyVars, typeEnv, context);
TransactionSubsystemProvider txnSubsystemProvider = new TransactionSubsystemProvider();
searchCallbackFactory = new PrimaryIndexSearchOperationCallbackFactory(jobId, datasetId,
- primaryKeyFields, entityIdFieldHashFunctionFactories, txnSubsystemProvider,
- ResourceType.LSM_BTREE);
+ primaryKeyFields, txnSubsystemProvider, ResourceType.LSM_BTREE);
}
BTreeSearchOperatorDescriptor btreeSearchOp = new BTreeSearchOperatorDescriptor(jobSpec, outputRecDesc,
@@ -742,12 +765,9 @@
for (i = 0; i < numKeys; i++) {
primaryKeyFields[i] = i;
}
- IBinaryHashFunctionFactory[] entityIdFieldHashFunctionFactories = JobGenHelper
- .variablesToBinaryHashFunctionFactories(keys, typeEnv, context);
TransactionSubsystemProvider txnSubsystemProvider = new TransactionSubsystemProvider();
PrimaryIndexModificationOperationCallbackFactory modificationCallbackFactory = new PrimaryIndexModificationOperationCallbackFactory(
- jobId, datasetId, primaryKeyFields, entityIdFieldHashFunctionFactories, txnSubsystemProvider,
- indexOp, ResourceType.LSM_BTREE);
+ jobId, datasetId, primaryKeyFields, txnSubsystemProvider, indexOp, ResourceType.LSM_BTREE);
LSMTreeIndexInsertUpdateDeleteOperatorDescriptor btreeBulkLoad = new LSMTreeIndexInsertUpdateDeleteOperatorDescriptor(
spec, recordDesc, appContext.getStorageManagerInterface(),
@@ -933,12 +953,9 @@
primaryKeyFields[i] = idx;
i++;
}
- IBinaryHashFunctionFactory[] entityIdFieldHashFunctionFactories = JobGenHelper
- .variablesToBinaryHashFunctionFactories(primaryKeys, typeEnv, context);
TransactionSubsystemProvider txnSubsystemProvider = new TransactionSubsystemProvider();
SecondaryIndexModificationOperationCallbackFactory modificationCallbackFactory = new SecondaryIndexModificationOperationCallbackFactory(
- jobId, datasetId, primaryKeyFields, entityIdFieldHashFunctionFactories, txnSubsystemProvider,
- indexOp, ResourceType.LSM_BTREE);
+ jobId, datasetId, primaryKeyFields, txnSubsystemProvider, indexOp, ResourceType.LSM_BTREE);
LSMTreeIndexInsertUpdateDeleteOperatorDescriptor btreeBulkLoad = new LSMTreeIndexInsertUpdateDeleteOperatorDescriptor(
spec, recordDesc, appContext.getStorageManagerInterface(),
@@ -954,6 +971,8 @@
splitsAndConstraint.second);
} catch (MetadataException e) {
throw new AlgebricksException(e);
+ } catch (IOException e) {
+ throw new AlgebricksException(e);
}
}
@@ -1027,12 +1046,9 @@
primaryKeyFields[i] = idx;
i++;
}
- IBinaryHashFunctionFactory[] entityIdFieldHashFunctionFactories = JobGenHelper
- .variablesToBinaryHashFunctionFactories(primaryKeys, typeEnv, context);
TransactionSubsystemProvider txnSubsystemProvider = new TransactionSubsystemProvider();
SecondaryIndexModificationOperationCallbackFactory modificationCallbackFactory = new SecondaryIndexModificationOperationCallbackFactory(
- jobId, datasetId, primaryKeyFields, entityIdFieldHashFunctionFactories, txnSubsystemProvider,
- indexOp, ResourceType.LSM_RTREE);
+ jobId, datasetId, primaryKeyFields, txnSubsystemProvider, indexOp, ResourceType.LSM_RTREE);
LSMTreeIndexInsertUpdateDeleteOperatorDescriptor rtreeUpdate = new LSMTreeIndexInsertUpdateDeleteOperatorDescriptor(
spec, recordDesc, appContext.getStorageManagerInterface(),
@@ -1047,8 +1063,8 @@
GlobalConfig.DEFAULT_INDEX_MEM_PAGE_SIZE, GlobalConfig.DEFAULT_INDEX_MEM_NUM_PAGES),
filterFactory, modificationCallbackFactory);
return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(rtreeUpdate, splitsAndConstraint.second);
- } catch (MetadataException me) {
- throw new AlgebricksException(me);
+ } catch (MetadataException | IOException e) {
+ throw new AlgebricksException(e);
}
}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/AsterixBuiltinArtifactMap.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/AsterixBuiltinArtifactMap.java
deleted file mode 100644
index cc48d1e..0000000
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/AsterixBuiltinArtifactMap.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.metadata.entities;
-
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-
-import edu.uci.ics.asterix.metadata.bootstrap.MetadataConstants;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-
-public class AsterixBuiltinArtifactMap {
-
- public enum ARTIFACT_KIND {
- DATASET,
- DATAVERSE,
- FUNCTION,
- NODEGROUP
- }
-
- public static final String ARTIFACT_TYPE_DATASET = "DATASET";
- public static final String ARTIFACT_TYPE_DATAVERSE = "DATAVERSE";
- public static final String ARTIFACT_TYPE_FUNCTION = "FUNCTION";
- public static final String ARTIFACT_TYPE_NODEGROUP = "NODEGROUP";
-
- public static final String DATASET_DATASETS = "Dataset";
- public static final String DATASET_INDEX = "Index";
- public static final String DATASET_NODEGROUP = "NodeGroup";
-
- public static final String DATAVERSE_METADATA = "Metadata";
-
- public static final String NODEGROUP_DEFAULT = MetadataConstants.METADATA_DEFAULT_NODEGROUP_NAME;
-
- private static final Map<ARTIFACT_KIND, Set<String>> builtinArtifactMap = new HashMap<ARTIFACT_KIND, Set<String>>();
-
- static {
- Set<String> datasets = new HashSet<String>();
- datasets.add(DATASET_DATASETS);
- datasets.add(DATASET_INDEX);
- datasets.add(DATASET_NODEGROUP);
- builtinArtifactMap.put(ARTIFACT_KIND.DATASET, datasets);
-
- Set<String> dataverses = new HashSet<String>();
- dataverses.add(DATAVERSE_METADATA);
- builtinArtifactMap.put(ARTIFACT_KIND.DATAVERSE, dataverses);
-
- Set<String> nodeGroups = new HashSet<String>();
- nodeGroups.add(NODEGROUP_DEFAULT);
- builtinArtifactMap.put(ARTIFACT_KIND.NODEGROUP, nodeGroups);
-
- }
-
- public static boolean isSystemProtectedArtifact(ARTIFACT_KIND kind, Object artifactIdentifier) {
- switch (kind) {
- case NODEGROUP:
- case DATASET:
- case DATAVERSE:
- return builtinArtifactMap.get(kind).contains((String) artifactIdentifier);
-
- case FUNCTION:
- return AsterixBuiltinFunctions.isBuiltinCompilerFunction((FunctionIdentifier) artifactIdentifier);
- default:
- return false;
- }
- }
-}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/AsterixBuiltinTypeMap.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/AsterixBuiltinTypeMap.java
index 734237e..27cf542 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/AsterixBuiltinTypeMap.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/AsterixBuiltinTypeMap.java
@@ -40,6 +40,7 @@
_builtinTypeMap.put("time", BuiltinType.ATIME);
_builtinTypeMap.put("datetime", BuiltinType.ADATETIME);
_builtinTypeMap.put("duration", BuiltinType.ADURATION);
+ _builtinTypeMap.put("interval", BuiltinType.AINTERVAL);
_builtinTypeMap.put("point", BuiltinType.APOINT);
_builtinTypeMap.put("point3d", BuiltinType.APOINT3D);
_builtinTypeMap.put("line", BuiltinType.ALINE);
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Dataset.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Dataset.java
index e710c90..91a18f3 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Dataset.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Dataset.java
@@ -15,6 +15,8 @@
package edu.uci.ics.asterix.metadata.entities;
+import java.util.Map;
+
import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
import edu.uci.ics.asterix.metadata.IDatasetDetails;
import edu.uci.ics.asterix.metadata.MetadataCache;
@@ -33,13 +35,15 @@
// Type of items stored in this dataset.
private final String itemTypeName;
private final DatasetType datasetType;
- private IDatasetDetails datasetDetails;
+ private final IDatasetDetails datasetDetails;
+ // Hints related to cardinatlity of dataset, avg size of tuples etc.
+ private final Map<String, String> hints;
private final int datasetId;
// Type of pending operations with respect to atomic DDL operation
private final int pendingOp;
public Dataset(String dataverseName, String datasetName, String itemTypeName, IDatasetDetails datasetDetails,
- DatasetType datasetType, int datasetId, int pendingOp) {
+ Map<String, String> hints, DatasetType datasetType, int datasetId, int pendingOp) {
this.dataverseName = dataverseName;
this.datasetName = datasetName;
this.itemTypeName = itemTypeName;
@@ -47,6 +51,7 @@
this.datasetDetails = datasetDetails;
this.datasetId = datasetId;
this.pendingOp = pendingOp;
+ this.hints = hints;
}
public String getDataverseName() {
@@ -69,14 +74,14 @@
return datasetDetails;
}
- public void setDatasetDetails(IDatasetDetails datasetDetails) {
- this.datasetDetails = datasetDetails;
+ public Map<String, String> getHints() {
+ return hints;
}
public int getDatasetId() {
return datasetId;
}
-
+
public int getPendingOp() {
return pendingOp;
}
@@ -90,7 +95,7 @@
public Object dropFromCache(MetadataCache cache) {
return cache.dropDataset(this);
}
-
+
@Override
public boolean equals(Object other) {
if (this == other) {
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/ExternalDatasetDetails.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/ExternalDatasetDetails.java
index 07da617..18cef340 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/ExternalDatasetDetails.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/ExternalDatasetDetails.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -12,6 +12,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+
package edu.uci.ics.asterix.metadata.entities;
import java.io.DataOutput;
@@ -22,6 +23,7 @@
import edu.uci.ics.asterix.builders.OrderedListBuilder;
import edu.uci.ics.asterix.builders.RecordBuilder;
import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
import edu.uci.ics.asterix.metadata.IDatasetDetails;
import edu.uci.ics.asterix.metadata.bootstrap.MetadataRecordTypes;
@@ -76,7 +78,8 @@
fieldValue.reset();
aString.setValue(this.getAdapter());
stringSerde.serialize(aString, fieldValue.getDataOutput());
- externalRecordBuilder.addField(MetadataRecordTypes.EXTERNAL_DETAILS_ARECORD_DATASOURCE_ADAPTER_FIELD_INDEX, fieldValue);
+ externalRecordBuilder.addField(MetadataRecordTypes.EXTERNAL_DETAILS_ARECORD_DATASOURCE_ADAPTER_FIELD_INDEX,
+ fieldValue);
// write field 1
listBuilder.reset((AOrderedListType) externalRecordType.getFieldTypes()[1]);
@@ -93,8 +96,8 @@
try {
externalRecordBuilder.write(out, true);
- } catch (IOException ioe) {
- throw new HyracksDataException(ioe);
+ } catch (IOException | AsterixException e) {
+ throw new HyracksDataException(e);
}
}
@@ -121,8 +124,8 @@
try {
propertyRecordBuilder.write(out, true);
- } catch (IOException ioe) {
- throw new HyracksDataException(ioe);
+ } catch (IOException | AsterixException e) {
+ throw new HyracksDataException(e);
}
}
}
\ No newline at end of file
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/FeedDatasetDetails.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/FeedDatasetDetails.java
index 895466d..22de3d3 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/FeedDatasetDetails.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/FeedDatasetDetails.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -12,6 +12,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+
package edu.uci.ics.asterix.metadata.entities;
import java.io.DataOutput;
@@ -23,6 +24,7 @@
import edu.uci.ics.asterix.builders.OrderedListBuilder;
import edu.uci.ics.asterix.builders.RecordBuilder;
import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.common.functions.FunctionSignature;
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
import edu.uci.ics.asterix.metadata.bootstrap.MetadataRecordTypes;
@@ -34,6 +36,11 @@
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+/**
+ * Provides functionality for writing parameters for a FEED dataset into the
+ * Metadata. Since FEED dataset is a special kind of INTERNAL dataset, this
+ * class extends InternalDatasetDetails.
+ */
public class FeedDatasetDetails extends InternalDatasetDetails {
private static final long serialVersionUID = 1L;
@@ -141,8 +148,8 @@
// write field 7
fieldValue.reset();
- if (getFunction() != null) {
- aString.setValue(getFunction().toString());
+ if (signature != null) {
+ aString.setValue(signature.toString());
stringSerde.serialize(aString, fieldValue.getDataOutput());
feedRecordBuilder.addField(MetadataRecordTypes.FEED_DETAILS_ARECORD_FUNCTION_FIELD_INDEX, fieldValue);
}
@@ -155,8 +162,8 @@
try {
feedRecordBuilder.write(out, true);
- } catch (IOException ioe) {
- throw new HyracksDataException(ioe);
+ } catch (IOException | AsterixException e) {
+ throw new HyracksDataException(e);
}
}
@@ -183,8 +190,8 @@
try {
propertyRecordBuilder.write(out, true);
- } catch (IOException ioe) {
- throw new HyracksDataException(ioe);
+ } catch (IOException | AsterixException e) {
+ throw new HyracksDataException(e);
}
}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Index.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Index.java
index 7c23c65..6d65730 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Index.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Index.java
@@ -133,28 +133,28 @@
}
throw new AlgebricksException("Could not find field " + expr + " in the schema.");
}
-
+
@Override
public int hashCode() {
- return indexName.hashCode() ^ datasetName.hashCode() ^ dataverseName.hashCode();
+ return indexName.hashCode() ^ datasetName.hashCode() ^ dataverseName.hashCode();
}
-
+
@Override
public boolean equals(Object other) {
- if (!(other instanceof Index)) {
- return false;
- }
- Index otherIndex = (Index) other;
- if (!indexName.equals(otherIndex.getIndexName())) {
- return false;
- }
- if (!datasetName.equals(otherIndex.getDatasetName())) {
- return false;
- }
- if (!dataverseName.equals(otherIndex.getDataverseName())) {
- return false;
- }
- return true;
+ if (!(other instanceof Index)) {
+ return false;
+ }
+ Index otherIndex = (Index) other;
+ if (!indexName.equals(otherIndex.getIndexName())) {
+ return false;
+ }
+ if (!datasetName.equals(otherIndex.getDatasetName())) {
+ return false;
+ }
+ if (!dataverseName.equals(otherIndex.getDataverseName())) {
+ return false;
+ }
+ return true;
}
@Override
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/InternalDatasetDetails.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/InternalDatasetDetails.java
index 51d154a..4267af2 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/InternalDatasetDetails.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/InternalDatasetDetails.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -12,6 +12,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+
package edu.uci.ics.asterix.metadata.entities;
import java.io.DataOutput;
@@ -22,6 +23,7 @@
import edu.uci.ics.asterix.builders.OrderedListBuilder;
import edu.uci.ics.asterix.builders.RecordBuilder;
import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
import edu.uci.ics.asterix.metadata.IDatasetDetails;
import edu.uci.ics.asterix.metadata.bootstrap.MetadataRecordTypes;
@@ -146,8 +148,8 @@
try {
internalRecordBuilder.write(out, true);
- } catch (IOException ioe) {
- throw new HyracksDataException(ioe);
+ } catch (IOException | AsterixException e) {
+ throw new HyracksDataException(e);
}
}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
index 7af5aae..61f856a 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -28,11 +28,13 @@
import edu.uci.ics.asterix.builders.IARecordBuilder;
import edu.uci.ics.asterix.builders.RecordBuilder;
-import edu.uci.ics.asterix.common.config.DatasetConfig;
+import edu.uci.ics.asterix.builders.UnorderedListBuilder;
import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.common.functions.FunctionSignature;
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
import edu.uci.ics.asterix.metadata.IDatasetDetails;
+import edu.uci.ics.asterix.metadata.MetadataException;
import edu.uci.ics.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
import edu.uci.ics.asterix.metadata.bootstrap.MetadataRecordTypes;
import edu.uci.ics.asterix.metadata.entities.Dataset;
@@ -44,16 +46,18 @@
import edu.uci.ics.asterix.om.base.AInt32;
import edu.uci.ics.asterix.om.base.AMutableInt32;
import edu.uci.ics.asterix.om.base.AMutableString;
+import edu.uci.ics.asterix.om.base.ANull;
import edu.uci.ics.asterix.om.base.AOrderedList;
import edu.uci.ics.asterix.om.base.ARecord;
import edu.uci.ics.asterix.om.base.AString;
+import edu.uci.ics.asterix.om.base.AUnorderedList;
import edu.uci.ics.asterix.om.base.IACursor;
+import edu.uci.ics.asterix.om.types.AUnorderedListType;
import edu.uci.ics.asterix.om.types.BuiltinType;
import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
/**
* Translates a Dataset metadata entity to an ITupleReference and vice versa.
@@ -67,9 +71,6 @@
// Payload field containing serialized Dataset.
public static final int DATASET_PAYLOAD_TUPLE_FIELD_INDEX = 2;
- private FileSplit[] splits;
- private List<String> partitioningKey;
- private List<String> primaryKey;
@SuppressWarnings("unchecked")
private ISerializerDeserializer<ARecord> recordSerDes = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(MetadataRecordTypes.DATASET_RECORDTYPE);
@@ -109,7 +110,69 @@
int pendingOp = ((AInt32) datasetRecord
.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_PENDINGOP_FIELD_INDEX)).getIntegerValue();
switch (datasetType) {
- case FEED:
+ case FEED: {
+ ARecord datasetDetailsRecord = (ARecord) datasetRecord
+ .getValueByPos(MetadataRecordTypes.DATASET_ARECORD_FEEDDETAILS_FIELD_INDEX);
+ FileStructure fileStructure = FileStructure.valueOf(((AString) datasetDetailsRecord
+ .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_FILESTRUCTURE_FIELD_INDEX))
+ .getStringValue());
+ PartitioningStrategy partitioningStrategy = PartitioningStrategy
+ .valueOf(((AString) datasetDetailsRecord
+ .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_PARTITIONSTRATEGY_FIELD_INDEX))
+ .getStringValue());
+ IACursor cursor = ((AOrderedList) datasetDetailsRecord
+ .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_PARTITIONKEY_FIELD_INDEX)).getCursor();
+ List<String> partitioningKey = new ArrayList<String>();
+ while (cursor.next())
+ partitioningKey.add(((AString) cursor.get()).getStringValue());
+ String groupName = ((AString) datasetDetailsRecord
+ .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_GROUPNAME_FIELD_INDEX))
+ .getStringValue();
+ String adapter = ((AString) datasetDetailsRecord
+ .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_DATASOURCE_ADAPTER_FIELD_INDEX))
+ .getStringValue();
+ cursor = ((AOrderedList) datasetDetailsRecord
+ .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_PROPERTIES_FIELD_INDEX)).getCursor();
+ Map<String, String> properties = new HashMap<String, String>();
+ String key;
+ String value;
+ while (cursor.next()) {
+ ARecord field = (ARecord) cursor.get();
+ key = ((AString) field.getValueByPos(MetadataRecordTypes.DATASOURCE_PROPERTIES_NAME_FIELD_INDEX))
+ .getStringValue();
+ value = ((AString) field.getValueByPos(MetadataRecordTypes.DATASOURCE_PROPERTIES_VALUE_FIELD_INDEX))
+ .getStringValue();
+ properties.put(key, value);
+ }
+
+ Object o = datasetDetailsRecord
+ .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_FUNCTION_FIELD_INDEX);
+ FunctionSignature signature = null;
+ if (!(o instanceof ANull)) {
+ String functionIdentifier = ((AString) o).getStringValue();
+ String[] qnameComponents = functionIdentifier.split("\\.");
+ String functionDataverse;
+ String functionName;
+ if (qnameComponents.length == 2) {
+ functionDataverse = qnameComponents[0];
+ functionName = qnameComponents[1];
+ } else {
+ functionDataverse = dataverseName;
+ functionName = qnameComponents[0];
+ }
+
+ String[] nameComponents = functionName.split("@");
+ signature = new FunctionSignature(functionDataverse, nameComponents[0],
+ Integer.parseInt(nameComponents[1]));
+ }
+
+ String feedState = ((AString) datasetDetailsRecord
+ .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_STATE_FIELD_INDEX)).getStringValue();
+
+ datasetDetails = new FeedDatasetDetails(fileStructure, partitioningStrategy, partitioningKey,
+ partitioningKey, groupName, adapter, properties, signature, feedState);
+ break;
+ }
case INTERNAL: {
ARecord datasetDetailsRecord = (ARecord) datasetRecord
.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_INTERNALDETAILS_FIELD_INDEX);
@@ -130,48 +193,9 @@
.getValueByPos(MetadataRecordTypes.INTERNAL_DETAILS_ARECORD_GROUPNAME_FIELD_INDEX))
.getStringValue();
- if (datasetType == DatasetConfig.DatasetType.INTERNAL) {
- datasetDetails = new InternalDatasetDetails(fileStructure, partitioningStrategy, partitioningKey,
- partitioningKey, groupName);
- } else {
- String adapter = ((AString) datasetDetailsRecord
- .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_DATASOURCE_ADAPTER_FIELD_INDEX))
- .getStringValue();
- cursor = ((AOrderedList) datasetDetailsRecord
- .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_PROPERTIES_FIELD_INDEX))
- .getCursor();
- Map<String, String> properties = new HashMap<String, String>();
- String key;
- String value;
- while (cursor.next()) {
- ARecord field = (ARecord) cursor.get();
- key = ((AString) field
- .getValueByPos(MetadataRecordTypes.DATASOURCE_ADAPTER_PROPERTIES_ARECORD_NAME_FIELD_INDEX))
- .getStringValue();
- value = ((AString) field
- .getValueByPos(MetadataRecordTypes.DATASOURCE_ADAPTER_PROPERTIES_ARECORD_VALUE_FIELD_INDEX))
- .getStringValue();
- properties.put(key, value);
- }
+ datasetDetails = new InternalDatasetDetails(fileStructure, partitioningStrategy, partitioningKey,
+ partitioningKey, groupName);
- String functionIdentifier = ((AString) datasetDetailsRecord
- .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_FUNCTION_FIELD_INDEX))
- .getStringValue();
- String[] nameComponents1 = functionIdentifier.split(".");
- String functionDataverse = nameComponents1[0];
- String[] nameComponents2 = nameComponents1[1].split("@");
- String functionName = nameComponents2[0];
- FunctionSignature signature = new FunctionSignature(functionDataverse, functionName,
- Integer.parseInt(nameComponents2[1]));
-
- String feedState = ((AString) datasetDetailsRecord
- .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_STATE_FIELD_INDEX))
- .getStringValue();
-
- datasetDetails = new FeedDatasetDetails(fileStructure, partitioningStrategy, partitioningKey,
- partitioningKey, groupName, adapter, properties, signature, feedState);
-
- }
break;
}
@@ -189,21 +213,22 @@
String value;
while (cursor.next()) {
ARecord field = (ARecord) cursor.get();
- key = ((AString) field
- .getValueByPos(MetadataRecordTypes.DATASOURCE_ADAPTER_PROPERTIES_ARECORD_NAME_FIELD_INDEX))
+ key = ((AString) field.getValueByPos(MetadataRecordTypes.DATASOURCE_PROPERTIES_NAME_FIELD_INDEX))
.getStringValue();
- value = ((AString) field
- .getValueByPos(MetadataRecordTypes.DATASOURCE_ADAPTER_PROPERTIES_ARECORD_VALUE_FIELD_INDEX))
+ value = ((AString) field.getValueByPos(MetadataRecordTypes.DATASOURCE_PROPERTIES_VALUE_FIELD_INDEX))
.getStringValue();
properties.put(key, value);
}
datasetDetails = new ExternalDatasetDetails(adapter, properties);
}
- return new Dataset(dataverseName, datasetName, typeName, datasetDetails, datasetType, datasetId, pendingOp);
+
+ Map<String, String> hints = getDatasetHints(datasetRecord);
+
+ return new Dataset(dataverseName, datasetName, typeName, datasetDetails, hints, datasetType, datasetId, pendingOp);
}
@Override
- public ITupleReference getTupleFromMetadataEntity(Dataset dataset) throws IOException {
+ public ITupleReference getTupleFromMetadataEntity(Dataset dataset) throws IOException, MetadataException {
// write the key in the first 2 fields of the tuple
tupleBuilder.reset();
aString.setValue(dataset.getDataverseName());
@@ -246,25 +271,45 @@
writeDatasetDetailsRecordType(recordBuilder, dataset, fieldValue.getDataOutput());
// write field 7
+ UnorderedListBuilder listBuilder = new UnorderedListBuilder();
+ listBuilder
+ .reset((AUnorderedListType) MetadataRecordTypes.DATASET_RECORDTYPE.getFieldTypes()[MetadataRecordTypes.DATASET_ARECORD_HINTS_FIELD_INDEX]);
+ ArrayBackedValueStorage itemValue = new ArrayBackedValueStorage();
+ for (Map.Entry<String, String> property : dataset.getHints().entrySet()) {
+ String name = property.getKey();
+ String value = property.getValue();
+ itemValue.reset();
+ writeDatasetHintRecord(name, value, itemValue.getDataOutput());
+ listBuilder.addItem(itemValue);
+ }
+ fieldValue.reset();
+ listBuilder.write(fieldValue.getDataOutput(), true);
+ recordBuilder.addField(MetadataRecordTypes.DATASET_ARECORD_HINTS_FIELD_INDEX, fieldValue);
+
+ // write field 8
fieldValue.reset();
aString.setValue(Calendar.getInstance().getTime().toString());
stringSerde.serialize(aString, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.DATASET_ARECORD_TIMESTAMP_FIELD_INDEX, fieldValue);
- // write field 8
+ // write field 9
fieldValue.reset();
aInt32.setValue(dataset.getDatasetId());
aInt32Serde.serialize(aInt32, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.DATASET_ARECORD_DATASETID_FIELD_INDEX, fieldValue);
- // write field 9
+ // write field 10
fieldValue.reset();
aInt32.setValue(dataset.getPendingOp());
aInt32Serde.serialize(aInt32, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.DATASET_ARECORD_PENDINGOP_FIELD_INDEX, fieldValue);
// write record
- recordBuilder.write(tupleBuilder.getDataOutput(), true);
+ try {
+ recordBuilder.write(tupleBuilder.getDataOutput(), true);
+ } catch (AsterixException e) {
+ throw new MetadataException(e);
+ }
tupleBuilder.addFieldEndOffset();
tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
@@ -289,25 +334,49 @@
}
- public void writePropertyTypeRecord(String name, String value, DataOutput out) throws IOException {
+ private Map<String, String> getDatasetHints(ARecord datasetRecord) {
+ Map<String, String> hints = new HashMap<String, String>();
+ String key;
+ String value;
+ AUnorderedList list = (AUnorderedList) datasetRecord
+ .getValueByPos(MetadataRecordTypes.DATASET_ARECORD_HINTS_FIELD_INDEX);
+ IACursor cursor = list.getCursor();
+ while (cursor.next()) {
+ ARecord field = (ARecord) cursor.get();
+ key = ((AString) field.getValueByPos(MetadataRecordTypes.DATASOURCE_PROPERTIES_NAME_FIELD_INDEX))
+ .getStringValue();
+ value = ((AString) field.getValueByPos(MetadataRecordTypes.DATASOURCE_PROPERTIES_VALUE_FIELD_INDEX))
+ .getStringValue();
+ hints.put(key, value);
+ }
+ return hints;
+ }
+
+ private void writeDatasetHintRecord(String name, String value, DataOutput out) throws HyracksDataException {
IARecordBuilder propertyRecordBuilder = new RecordBuilder();
ArrayBackedValueStorage fieldValue = new ArrayBackedValueStorage();
- propertyRecordBuilder.reset(MetadataRecordTypes.DATASOURCE_ADAPTER_PROPERTIES_RECORDTYPE);
+ propertyRecordBuilder.reset(MetadataRecordTypes.DATASET_HINTS_RECORDTYPE);
+ AMutableString aString = new AMutableString("");
+ ISerializerDeserializer<AString> stringSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ASTRING);
// write field 0
fieldValue.reset();
aString.setValue(name);
stringSerde.serialize(aString, fieldValue.getDataOutput());
- propertyRecordBuilder.addField(MetadataRecordTypes.DATASOURCE_ADAPTER_PROPERTIES_ARECORD_NAME_FIELD_INDEX,
- fieldValue);
+ propertyRecordBuilder.addField(0, fieldValue);
// write field 1
fieldValue.reset();
aString.setValue(value);
stringSerde.serialize(aString, fieldValue.getDataOutput());
- propertyRecordBuilder.addField(MetadataRecordTypes.DATASOURCE_ADAPTER_PROPERTIES_ARECORD_VALUE_FIELD_INDEX,
- fieldValue);
+ propertyRecordBuilder.addField(1, fieldValue);
- propertyRecordBuilder.write(out, true);
+ try {
+ propertyRecordBuilder.write(out, true);
+ } catch (IOException | AsterixException ioe) {
+ throw new HyracksDataException(ioe);
+ }
}
+
}
\ No newline at end of file
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java
index 6353e99..4a5e4dcf 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2012 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -12,6 +12,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+
package edu.uci.ics.asterix.metadata.entitytupletranslators;
import java.io.ByteArrayInputStream;
@@ -20,6 +21,7 @@
import java.io.IOException;
import java.util.Calendar;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.external.dataset.adapter.AdapterIdentifier;
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
import edu.uci.ics.asterix.metadata.MetadataException;
@@ -64,7 +66,8 @@
private DatasourceAdapter createAdapterFromARecord(ARecord adapterRecord) {
String dataverseName = ((AString) adapterRecord
- .getValueByPos(MetadataRecordTypes.DATASOURCE_ADAPTER_ARECORD_DATAVERSENAME_FIELD_INDEX)).getStringValue();
+ .getValueByPos(MetadataRecordTypes.DATASOURCE_ADAPTER_ARECORD_DATAVERSENAME_FIELD_INDEX))
+ .getStringValue();
String adapterName = ((AString) adapterRecord
.getValueByPos(MetadataRecordTypes.DATASOURCE_ADAPTER_ARECORD_NAME_FIELD_INDEX)).getStringValue();
String classname = ((AString) adapterRecord
@@ -76,7 +79,7 @@
}
@Override
- public ITupleReference getTupleFromMetadataEntity(DatasourceAdapter adapter) throws IOException {
+ public ITupleReference getTupleFromMetadataEntity(DatasourceAdapter adapter) throws IOException, MetadataException {
// write the key in the first 2 fields of the tuple
tupleBuilder.reset();
aString.setValue(adapter.getAdapterIdentifier().getNamespace());
@@ -121,7 +124,11 @@
recordBuilder.addField(MetadataRecordTypes.DATASOURCE_ADAPTER_ARECORD_TIMESTAMP_FIELD_INDEX, fieldValue);
// write record
- recordBuilder.write(tupleBuilder.getDataOutput(), true);
+ try {
+ recordBuilder.write(tupleBuilder.getDataOutput(), true);
+ } catch (AsterixException e) {
+ throw new MetadataException(e);
+ }
tupleBuilder.addFieldEndOffset();
tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java
index a2f5f49..7cfa6d0 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -28,6 +28,7 @@
import edu.uci.ics.asterix.builders.IARecordBuilder;
import edu.uci.ics.asterix.builders.OrderedListBuilder;
import edu.uci.ics.asterix.builders.RecordBuilder;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
import edu.uci.ics.asterix.metadata.MetadataException;
import edu.uci.ics.asterix.metadata.MetadataNode;
@@ -141,8 +142,12 @@
fieldTypes[fieldId] = getTypeFromTypeName(dataverseName, fieldTypeName);
fieldId++;
}
- return new Datatype(dataverseName, datatypeName, new ARecordType(datatypeName, fieldNames,
- fieldTypes, isOpen), isAnonymous);
+ try {
+ return new Datatype(dataverseName, datatypeName, new ARecordType(datatypeName, fieldNames,
+ fieldTypes, isOpen), isAnonymous);
+ } catch (AsterixException e) {
+ throw new MetadataException(e);
+ }
}
case UNION: {
IACursor cursor = ((AOrderedList) derivedTypeRecord
@@ -190,7 +195,7 @@
}
@Override
- public ITupleReference getTupleFromMetadataEntity(Datatype dataType) throws IOException {
+ public ITupleReference getTupleFromMetadataEntity(Datatype dataType) throws IOException, MetadataException {
// write the key in the first two fields of the tuple
tupleBuilder.reset();
aString.setValue(dataType.getDataverseName());
@@ -219,7 +224,11 @@
ATypeTag tag = dataType.getDatatype().getTypeTag();
if (isDerivedType(tag)) {
fieldValue.reset();
- writeDerivedTypeRecord(dataType, fieldValue.getDataOutput());
+ try {
+ writeDerivedTypeRecord(dataType, fieldValue.getDataOutput());
+ } catch (AsterixException e) {
+ throw new MetadataException(e);
+ }
recordBuilder.addField(MetadataRecordTypes.DATATYPE_ARECORD_DERIVED_FIELD_INDEX, fieldValue);
}
@@ -230,14 +239,18 @@
recordBuilder.addField(MetadataRecordTypes.DATATYPE_ARECORD_TIMESTAMP_FIELD_INDEX, fieldValue);
// write record
- recordBuilder.write(tupleBuilder.getDataOutput(), true);
+ try {
+ recordBuilder.write(tupleBuilder.getDataOutput(), true);
+ } catch (AsterixException e) {
+ throw new MetadataException(e);
+ }
tupleBuilder.addFieldEndOffset();
tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
return tuple;
}
- public void writeDerivedTypeRecord(Datatype type, DataOutput out) throws IOException {
+ private void writeDerivedTypeRecord(Datatype type, DataOutput out) throws IOException, AsterixException {
DerivedTypeTag tag;
IARecordBuilder derivedRecordBuilder = new RecordBuilder();
ArrayBackedValueStorage fieldValue = new ArrayBackedValueStorage();
@@ -349,7 +362,7 @@
listBuilder.write(dataOutput, true);
}
- public void writeRecordType(Datatype instance, DataOutput out) throws IOException {
+ private void writeRecordType(Datatype instance, DataOutput out) throws IOException, AsterixException {
ArrayBackedValueStorage fieldValue = new ArrayBackedValueStorage();
ArrayBackedValueStorage itemValue = new ArrayBackedValueStorage();
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java
index d23cdd0..23e42d6 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -21,7 +21,9 @@
import java.io.IOException;
import java.util.Calendar;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.metadata.MetadataException;
import edu.uci.ics.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
import edu.uci.ics.asterix.metadata.bootstrap.MetadataRecordTypes;
import edu.uci.ics.asterix.metadata.entities.Dataverse;
@@ -71,7 +73,7 @@
}
@Override
- public ITupleReference getTupleFromMetadataEntity(Dataverse instance) throws IOException {
+ public ITupleReference getTupleFromMetadataEntity(Dataverse instance) throws IOException, MetadataException {
// write the key in the first field of the tuple
tupleBuilder.reset();
aString.setValue(instance.getDataverseName());
@@ -104,7 +106,11 @@
aInt32Serde.serialize(aInt32, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.DATAVERSE_ARECORD_PENDINGOP_FIELD_INDEX, fieldValue);
- recordBuilder.write(tupleBuilder.getDataOutput(), true);
+ try {
+ recordBuilder.write(tupleBuilder.getDataOutput(), true);
+ } catch (AsterixException e) {
+ throw new MetadataException(e);
+ }
tupleBuilder.addFieldEndOffset();
tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java
index 8296a22..c34bc72 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -23,7 +23,9 @@
import java.util.List;
import edu.uci.ics.asterix.builders.OrderedListBuilder;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.metadata.MetadataException;
import edu.uci.ics.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
import edu.uci.ics.asterix.metadata.bootstrap.MetadataRecordTypes;
import edu.uci.ics.asterix.metadata.entities.Function;
@@ -48,7 +50,6 @@
// Third key field.
public static final int FUNCTION_FUNCTIONARITY_TUPLE_FIELD_INDEX = 2;
-
// Payload field containing serialized Function.
public static final int FUNCTION_PAYLOAD_TUPLE_FIELD_INDEX = 3;
@@ -103,7 +104,7 @@
}
@Override
- public ITupleReference getTupleFromMetadataEntity(Function function) throws IOException {
+ public ITupleReference getTupleFromMetadataEntity(Function function) throws IOException, MetadataException {
// write the key in the first 2 fields of the tuple
tupleBuilder.reset();
aString.setValue(function.getDataverseName());
@@ -178,7 +179,11 @@
recordBuilder.addField(MetadataRecordTypes.FUNCTION_ARECORD_FUNCTION_KIND_FIELD_INDEX, fieldValue);
// write record
- recordBuilder.write(tupleBuilder.getDataOutput(), true);
+ try {
+ recordBuilder.write(tupleBuilder.getDataOutput(), true);
+ } catch (AsterixException e) {
+ throw new MetadataException(e);
+ }
tupleBuilder.addFieldEndOffset();
tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java
index c75b7b2..e6f0462 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -25,7 +25,9 @@
import edu.uci.ics.asterix.builders.OrderedListBuilder;
import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.metadata.MetadataException;
import edu.uci.ics.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
import edu.uci.ics.asterix.metadata.bootstrap.MetadataRecordTypes;
import edu.uci.ics.asterix.metadata.entities.Index;
@@ -108,7 +110,7 @@
}
@Override
- public ITupleReference getTupleFromMetadataEntity(Index instance) throws IOException {
+ public ITupleReference getTupleFromMetadataEntity(Index instance) throws IOException, MetadataException {
// write the key in the first 3 fields of the tuple
tupleBuilder.reset();
aString.setValue(instance.getDataverseName());
@@ -188,11 +190,19 @@
aString.setValue(GRAM_LENGTH_FIELD_NAME);
stringSerde.serialize(aString, nameValue.getDataOutput());
intSerde.serialize(new AInt32(instance.getGramLength()), fieldValue.getDataOutput());
- recordBuilder.addField(nameValue, fieldValue);
+ try {
+ recordBuilder.addField(nameValue, fieldValue);
+ } catch (AsterixException e) {
+ throw new MetadataException(e);
+ }
}
// write record
- recordBuilder.write(tupleBuilder.getDataOutput(), true);
+ try {
+ recordBuilder.write(tupleBuilder.getDataOutput(), true);
+ } catch (AsterixException e) {
+ throw new MetadataException(e);
+ }
tupleBuilder.addFieldEndOffset();
tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/NodeGroupTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/NodeGroupTupleTranslator.java
index da66d4b..ce72322 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/NodeGroupTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/NodeGroupTupleTranslator.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -24,7 +24,9 @@
import java.util.List;
import edu.uci.ics.asterix.builders.UnorderedListBuilder;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.metadata.MetadataException;
import edu.uci.ics.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
import edu.uci.ics.asterix.metadata.bootstrap.MetadataRecordTypes;
import edu.uci.ics.asterix.metadata.entities.NodeGroup;
@@ -79,7 +81,7 @@
}
@Override
- public ITupleReference getTupleFromMetadataEntity(NodeGroup instance) throws IOException {
+ public ITupleReference getTupleFromMetadataEntity(NodeGroup instance) throws IOException, MetadataException {
// write the key in the first field of the tuple
tupleBuilder.reset();
aString.setValue(instance.getNodeGroupName());
@@ -114,7 +116,11 @@
stringSerde.serialize(aString, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.NODEGROUP_ARECORD_TIMESTAMP_FIELD_INDEX, fieldValue);
- recordBuilder.write(tupleBuilder.getDataOutput(), true);
+ try {
+ recordBuilder.write(tupleBuilder.getDataOutput(), true);
+ } catch (AsterixException e) {
+ throw new MetadataException(e);
+ }
tupleBuilder.addFieldEndOffset();
tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/NodeTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/NodeTupleTranslator.java
index 8d324b4..9e276cc 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/NodeTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/NodeTupleTranslator.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -17,7 +17,9 @@
import java.io.IOException;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.metadata.MetadataException;
import edu.uci.ics.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
import edu.uci.ics.asterix.metadata.bootstrap.MetadataRecordTypes;
import edu.uci.ics.asterix.metadata.entities.Node;
@@ -80,7 +82,7 @@
}
@Override
- public ITupleReference getTupleFromMetadataEntity(Node instance) throws IOException {
+ public ITupleReference getTupleFromMetadataEntity(Node instance) throws IOException, MetadataException {
// write the key in the first field of the tuple
tupleBuilder.reset();
aString.setValue(instance.getNodeName());
@@ -121,7 +123,11 @@
// listBuilder.write(fieldValue.getDataOutput());
// recordBuilder.addField(3, fieldValue);
- recordBuilder.write(tupleBuilder.getDataOutput(), true);
+ try {
+ recordBuilder.write(tupleBuilder.getDataOutput(), true);
+ } catch (AsterixException e) {
+ throw new MetadataException(e);
+ }
tupleBuilder.addFieldEndOffset();
tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
return tuple;
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/utils/DatasetUtils.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/utils/DatasetUtils.java
index 4101bc8..820f277 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/utils/DatasetUtils.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/utils/DatasetUtils.java
@@ -1,5 +1,21 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.asterix.metadata.utils;
+import java.io.IOException;
import java.util.List;
import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
@@ -25,7 +41,12 @@
List<String> partitioningKeys = getPartitioningKeys(dataset);
IBinaryComparatorFactory[] bcfs = new IBinaryComparatorFactory[partitioningKeys.size()];
for (int i = 0; i < partitioningKeys.size(); i++) {
- IAType keyType = itemType.getFieldType(partitioningKeys.get(i));
+ IAType keyType;
+ try {
+ keyType = itemType.getFieldType(partitioningKeys.get(i));
+ } catch (IOException e) {
+ throw new AlgebricksException(e);
+ }
bcfs[i] = comparatorFactoryProvider.getBinaryComparatorFactory(keyType, true);
}
return bcfs;
@@ -51,7 +72,12 @@
List<String> partitioningKeys = getPartitioningKeys(dataset);
IBinaryHashFunctionFactory[] bhffs = new IBinaryHashFunctionFactory[partitioningKeys.size()];
for (int i = 0; i < partitioningKeys.size(); i++) {
- IAType keyType = itemType.getFieldType(partitioningKeys.get(i));
+ IAType keyType;
+ try {
+ keyType = itemType.getFieldType(partitioningKeys.get(i));
+ } catch (IOException e) {
+ throw new AlgebricksException(e);
+ }
bhffs[i] = hashFunProvider.getBinaryHashFunctionFactory(keyType);
}
return bhffs;
@@ -66,7 +92,12 @@
int numKeys = partitioningKeys.size();
ITypeTraits[] typeTraits = new ITypeTraits[numKeys + 1];
for (int i = 0; i < numKeys; i++) {
- IAType keyType = itemType.getFieldType(partitioningKeys.get(i));
+ IAType keyType;
+ try {
+ keyType = itemType.getFieldType(partitioningKeys.get(i));
+ } catch (IOException e) {
+ throw new AlgebricksException(e);
+ }
typeTraits[i] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(keyType);
}
typeTraits[numKeys] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(itemType);
diff --git a/asterix-om/pom.xml b/asterix-om/pom.xml
index ff9e143..730f443 100644
--- a/asterix-om/pom.xml
+++ b/asterix-om/pom.xml
@@ -6,9 +6,7 @@
<groupId>edu.uci.ics.asterix</groupId>
<version>0.0.4-SNAPSHOT</version>
</parent>
- <groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-om</artifactId>
- <version>0.0.4-SNAPSHOT</version>
<build>
<plugins>
@@ -17,8 +15,9 @@
<artifactId>maven-compiler-plugin</artifactId>
<version>2.0.2</version>
<configuration>
- <source>1.6</source>
- <target>1.6</target>
+ <source>1.7</source>
+ <target>1.7</target>
+ <fork>true</fork>
</configuration>
</plugin>
</plugins>
@@ -33,20 +32,18 @@
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
- <artifactId>hyracks-algebricks-compiler</artifactId>
- <version>0.2.2-SNAPSHOT</version>
+ <artifactId> hyracks-storage-am-lsm-invertedindex </artifactId>
+ <version>0.2.3-SNAPSHOT</version>
</dependency>
- <dependency>
+ <dependency>
<groupId>edu.uci.ics.hyracks</groupId>
- <artifactId>
- hyracks-storage-am-lsm-invertedindex
- </artifactId>
- <version>0.2.2-SNAPSHOT</version>
+ <artifactId>algebricks-compiler</artifactId>
+ <version>0.2.3-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-lsm-rtree</artifactId>
- <version>0.2.2-SNAPSHOT</version>
+ <version>0.2.3-SNAPSHOT</version>
</dependency>
</dependencies>
</project>
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/builders/IARecordBuilder.java b/asterix-om/src/main/java/edu/uci/ics/asterix/builders/IARecordBuilder.java
index 544b532..7cdb9a9 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/builders/IARecordBuilder.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/builders/IARecordBuilder.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -18,6 +18,7 @@
import java.io.DataOutput;
import java.io.IOException;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.hyracks.data.std.api.IValueReference;
@@ -49,8 +50,10 @@
* The field name.
* @param out
* The field value.
+ * @throws AsterixException
+ * if the field name conflicts with a closed field name
*/
- public void addField(IValueReference name, IValueReference value);
+ public void addField(IValueReference name, IValueReference value) throws AsterixException;
/**
* @param out
@@ -59,8 +62,10 @@
* Whether to write a typetag as part of the record's serialized
* representation.
* @throws IOException
+ * @throws AsterixException
+ * if any open field names conflict with each other
*/
- public void write(DataOutput out, boolean writeTypeTag) throws IOException;
+ public void write(DataOutput out, boolean writeTypeTag) throws IOException, AsterixException;
public int getFieldId(String fieldName);
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/builders/RecordBuilder.java b/asterix-om/src/main/java/edu/uci/ics/asterix/builders/RecordBuilder.java
index 03f0e20..f5d07ae 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/builders/RecordBuilder.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/builders/RecordBuilder.java
@@ -1,40 +1,46 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.asterix.builders;
+import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Arrays;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.SerializerDeserializerUtil;
import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.asterix.om.types.ATypeTag;
import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
import edu.uci.ics.hyracks.data.std.api.IValueReference;
import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.data.std.util.ByteArrayAccessibleOutputStream;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
public class RecordBuilder implements IARecordBuilder {
- private int openPartOffset;
+ private final static int DEFAULT_NUM_OPEN_FIELDS = 10;
+ private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+ private final static byte RECORD_TYPE_TAG = ATypeTag.RECORD.serialize();
- private ARecordType recType;
-
- private ByteArrayOutputStream closedPartOutputStream;
- private int[] closedPartOffsets;
- private int numberOfClosedFields;
- private byte[] nullBitMap;
- private int nullBitMapSize;
-
- private ByteArrayOutputStream openPartOutputStream;
- private long[] openPartOffsets;
- private long[] tempOpenPartOffsets;
-
- private int numberOfOpenFields;
-
- private int fieldNameHashCode;
- private final IBinaryHashFunction utf8HashFunction;
-
- // for write()
private int openPartOffsetArraySize;
private byte[] openPartOffsetArray;
private int offsetPosition;
@@ -42,25 +48,40 @@
private boolean isOpen;
private boolean isNullable;
private int numberOfSchemaFields;
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte RECORD_TYPE_TAG = ATypeTag.RECORD.serialize();
+
+ private int openPartOffset;
+ private ARecordType recType;
+
+ private final IBinaryHashFunction utf8HashFunction;
+ private final IBinaryComparator utf8Comparator;
+
+ private final ByteArrayOutputStream closedPartOutputStream;
+ private int[] closedPartOffsets;
+ private int numberOfClosedFields;
+ private byte[] nullBitMap;
+ private int nullBitMapSize;
+
+ private final ByteArrayAccessibleOutputStream openPartOutputStream;
+ private long[] openPartOffsets;
+ private int[] openFieldNameLengths;
+
+ private int numberOfOpenFields;
public RecordBuilder() {
this.closedPartOutputStream = new ByteArrayOutputStream();
this.numberOfClosedFields = 0;
- this.openPartOutputStream = new ByteArrayOutputStream();
- this.openPartOffsets = new long[20];
- this.tempOpenPartOffsets = new long[20];
-
+ this.openPartOutputStream = new ByteArrayAccessibleOutputStream();
+ this.openPartOffsets = new long[DEFAULT_NUM_OPEN_FIELDS];
+ this.openFieldNameLengths = new int[DEFAULT_NUM_OPEN_FIELDS];
this.numberOfOpenFields = 0;
- this.fieldNameHashCode = 0;
this.utf8HashFunction = new PointableBinaryHashFunctionFactory(UTF8StringPointable.FACTORY)
.createBinaryHashFunction();
+ this.utf8Comparator = new PointableBinaryComparatorFactory(UTF8StringPointable.FACTORY)
+ .createBinaryComparator();
- // for write()
this.openPartOffsetArray = null;
this.openPartOffsetArraySize = 0;
this.offsetPosition = 0;
@@ -135,23 +156,31 @@
}
@Override
- public void addField(IValueReference name, IValueReference value) {
+ public void addField(IValueReference name, IValueReference value) throws AsterixException {
if (numberOfOpenFields == openPartOffsets.length) {
- tempOpenPartOffsets = openPartOffsets;
- openPartOffsets = new long[numberOfOpenFields + 20];
- for (int i = 0; i < tempOpenPartOffsets.length; i++)
- openPartOffsets[i] = tempOpenPartOffsets[i];
+ openPartOffsets = Arrays.copyOf(openPartOffsets, openPartOffsets.length + DEFAULT_NUM_OPEN_FIELDS);
+ openFieldNameLengths = Arrays.copyOf(openFieldNameLengths, openFieldNameLengths.length
+ + DEFAULT_NUM_OPEN_FIELDS);
}
- fieldNameHashCode = utf8HashFunction.hash(name.getByteArray(), name.getStartOffset() + 1, name.getLength());
+ int fieldNameHashCode = utf8HashFunction.hash(name.getByteArray(), name.getStartOffset() + 1, name.getLength());
+ if (recType != null) {
+ int cFieldPos = recType.findFieldPosition(name.getByteArray(), name.getStartOffset() + 1,
+ name.getLength() - 1);
+ if (cFieldPos >= 0) {
+ throw new AsterixException("Open field \"" + recType.getFieldNames()[cFieldPos]
+ + "\" has the same field name as closed field at index " + cFieldPos);
+ }
+ }
openPartOffsets[this.numberOfOpenFields] = fieldNameHashCode;
openPartOffsets[this.numberOfOpenFields] = (openPartOffsets[numberOfOpenFields] << 32);
- openPartOffsets[numberOfOpenFields++] += openPartOutputStream.size();
+ openPartOffsets[numberOfOpenFields] += openPartOutputStream.size();
+ openFieldNameLengths[numberOfOpenFields++] = name.getLength() - 1;
openPartOutputStream.write(name.getByteArray(), name.getStartOffset() + 1, name.getLength() - 1);
openPartOutputStream.write(value.getByteArray(), value.getStartOffset(), value.getLength());
}
@Override
- public void write(DataOutput out, boolean writeTypeTag) throws IOException {
+ public void write(DataOutput out, boolean writeTypeTag) throws IOException, AsterixException {
int h = headerSize;
int recordLength;
// prepare the open part
@@ -163,13 +192,27 @@
openPartOffsetArray = new byte[openPartOffsetArraySize];
Arrays.sort(this.openPartOffsets, 0, numberOfOpenFields);
+ if (numberOfOpenFields > 1) {
+ byte[] openBytes = openPartOutputStream.getByteArray();
+ for (int i = 1; i < numberOfOpenFields; i++) {
+ if (utf8Comparator.compare(openBytes, (int) openPartOffsets[i - 1], openFieldNameLengths[i - 1],
+ openBytes, (int) openPartOffsets[i], openFieldNameLengths[i]) == 0) {
+ String field = UTF8StringSerializerDeserializer.INSTANCE
+ .deserialize(new DataInputStream(new ByteArrayInputStream(openBytes,
+ (int) openPartOffsets[i], openFieldNameLengths[i])));
+ throw new AsterixException("Open fields " + (i - 1) + " and " + i
+ + " have the same field name \"" + field + "\"");
+ }
+ }
+ }
openPartOffset = h + numberOfSchemaFields * 4 + closedPartOutputStream.size();
+ int fieldNameHashCode;
for (int i = 0; i < numberOfOpenFields; i++) {
fieldNameHashCode = (int) (openPartOffsets[i] >> 32);
SerializerDeserializerUtil.writeIntToByteArray(openPartOffsetArray, (int) fieldNameHashCode,
offsetPosition);
- int fieldOffset = (int) ((openPartOffsets[i] << 64) >> 64);
+ int fieldOffset = (int) openPartOffsets[i];
SerializerDeserializerUtil.writeIntToByteArray(openPartOffsetArray, fieldOffset + openPartOffset + 4
+ openPartOffsetArraySize, offsetPosition + 4);
offsetPosition += 8;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/ADateOrTimeAscBinaryComparatorFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/ADateOrTimeAscBinaryComparatorFactory.java
new file mode 100644
index 0000000..464a03c
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/ADateOrTimeAscBinaryComparatorFactory.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.dataflow.data.nontagged.comparators;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+
+public class ADateOrTimeAscBinaryComparatorFactory implements IBinaryComparatorFactory {
+
+ private static final long serialVersionUID = 1L;
+
+ public static final ADateOrTimeAscBinaryComparatorFactory INSTANCE = new ADateOrTimeAscBinaryComparatorFactory();
+
+ private ADateOrTimeAscBinaryComparatorFactory() {
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory#createBinaryComparator()
+ */
+ @Override
+ public IBinaryComparator createBinaryComparator() {
+ return new IBinaryComparator() {
+
+ @Override
+ public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
+ int chrononTime1 = getInt(b1, s1);
+ int chrononTime2 = getInt(b2, s2);
+
+ if (chrononTime1 > chrononTime2) {
+ return 1;
+ } else if (chrononTime1 < chrononTime2) {
+ return -1;
+ } else {
+ return 0;
+ }
+ }
+
+ private int getInt(byte[] bytes, int start) {
+ return ((bytes[start] & 0xff) << 24) + ((bytes[start + 1] & 0xff) << 16)
+ + ((bytes[start + 2] & 0xff) << 8) + ((bytes[start + 3] & 0xff) << 0);
+ }
+ };
+ }
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/AObjectAscBinaryComparatorFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/AObjectAscBinaryComparatorFactory.java
index 881590d..0e486e7 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/AObjectAscBinaryComparatorFactory.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/comparators/AObjectAscBinaryComparatorFactory.java
@@ -24,15 +24,21 @@
public IBinaryComparator createBinaryComparator() {
return new IBinaryComparator() {
final IBinaryComparator ascBoolComp = BooleanBinaryComparatorFactory.INSTANCE.createBinaryComparator();
- final IBinaryComparator ascIntComp = new PointableBinaryComparatorFactory(IntegerPointable.FACTORY).createBinaryComparator();
+ final IBinaryComparator ascIntComp = new PointableBinaryComparatorFactory(IntegerPointable.FACTORY)
+ .createBinaryComparator();
final IBinaryComparator ascLongComp = LongBinaryComparatorFactory.INSTANCE.createBinaryComparator();
- final IBinaryComparator ascStrComp = new PointableBinaryComparatorFactory(UTF8StringPointable.FACTORY).createBinaryComparator();
- final IBinaryComparator ascFloatComp = new PointableBinaryComparatorFactory(FloatPointable.FACTORY).createBinaryComparator();
- final IBinaryComparator ascDoubleComp = new PointableBinaryComparatorFactory(DoublePointable.FACTORY).createBinaryComparator();
+ final IBinaryComparator ascStrComp = new PointableBinaryComparatorFactory(UTF8StringPointable.FACTORY)
+ .createBinaryComparator();
+ final IBinaryComparator ascFloatComp = new PointableBinaryComparatorFactory(FloatPointable.FACTORY)
+ .createBinaryComparator();
+ final IBinaryComparator ascDoubleComp = new PointableBinaryComparatorFactory(DoublePointable.FACTORY)
+ .createBinaryComparator();
final IBinaryComparator ascRectangleComp = RectangleBinaryComparatorFactory.INSTANCE
.createBinaryComparator();
final IBinaryComparator ascDateTimeComp = ADateTimeAscBinaryComparatorFactory.INSTANCE
.createBinaryComparator();
+ final IBinaryComparator ascDateOrTimeComp = ADateOrTimeAscBinaryComparatorFactory.INSTANCE
+ .createBinaryComparator();
@Override
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
@@ -73,6 +79,10 @@
case DATETIME: {
return ascDateTimeComp.compare(b1, s1 + 1, l1 - 1, b2, s2 + 1, l2 - 1);
}
+ case TIME:
+ case DATE: {
+ return ascDateOrTimeComp.compare(b1, s1 + 1, l1 - 1, b2, s2 + 1, l2 - 1);
+ }
default: {
throw new NotImplementedException("Comparison for type " + tag + " is not implemented");
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/hash/MurmurHash3BinaryHashFunctionFamily.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/hash/MurmurHash3BinaryHashFunctionFamily.java
new file mode 100644
index 0000000..83b165b
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/hash/MurmurHash3BinaryHashFunctionFamily.java
@@ -0,0 +1,86 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.dataflow.data.nontagged.hash;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFamily;
+
+/**
+ * An implementation of the Murmur3 hash family. The code is implemented based
+ * on the original <a
+ * href=http://code.google.com/p/guava-libraries/source/browse
+ * /guava/src/com/google/common/hash/Murmur3_32HashFunction.java>guava
+ * implementation</a> from Google Guava library.
+ */
+public class MurmurHash3BinaryHashFunctionFamily implements
+ IBinaryHashFunctionFamily {
+
+ public static final IBinaryHashFunctionFamily INSTANCE = new MurmurHash3BinaryHashFunctionFamily();
+
+ private static final long serialVersionUID = 1L;
+
+ private MurmurHash3BinaryHashFunctionFamily() {
+ }
+
+ private static final int C1 = 0xcc9e2d51;
+ private static final int C2 = 0x1b873593;
+ private static final int C3 = 5;
+ private static final int C4 = 0xe6546b64;
+ private static final int C5 = 0x85ebca6b;
+ private static final int C6 = 0xc2b2ae35;
+
+ @Override
+ public IBinaryHashFunction createBinaryHashFunction(final int seed) {
+ return new IBinaryHashFunction() {
+ @Override
+ public int hash(byte[] bytes, int offset, int length) {
+ int h = seed;
+ int p = offset;
+ int remain = length;
+ while (remain >= 4) {
+ int k = (bytes[p] & 0xff) | ((bytes[p + 1] & 0xff) << 8)
+ | ((bytes[p + 2] & 0xff) << 16)
+ | ((bytes[p + 3] & 0xff) << 24);
+ k *= C1;
+ k = Integer.rotateLeft(k, 15);
+ k *= C2;
+ h ^= k;
+ h = Integer.rotateLeft(h, 13);
+ h = h * C3 + C4;
+ p += 4;
+ remain -= 4;
+ }
+ if (remain > 0) {
+ int k = 0;
+ for (int i = 0; remain > 0; i += 8) {
+ k ^= (bytes[p++] & 0xff) << i;
+ remain--;
+ }
+ k *= C1;
+ k = Integer.rotateLeft(k, 15);
+ k *= C2;
+ h ^= k;
+ }
+ h ^= length;
+ h ^= (h >>> 16);
+ h *= C5;
+ h ^= (h >>> 13);
+ h *= C6;
+ h ^= (h >>> 16);
+ return h;
+ }
+ };
+ }
+}
\ No newline at end of file
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AInt32AscNormalizedKeyComputerFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AInt32AscNormalizedKeyComputerFactory.java
deleted file mode 100644
index 903ac3d..0000000
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AInt32AscNormalizedKeyComputerFactory.java
+++ /dev/null
@@ -1,30 +0,0 @@
-package edu.uci.ics.asterix.dataflow.data.nontagged.keynormalizers;
-
-import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
-import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.normalizers.IntegerNormalizedKeyComputerFactory;
-
-public class AInt32AscNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
-
- private static final long serialVersionUID = 1L;
-
- public static final AInt32AscNormalizedKeyComputerFactory INSTANCE = new AInt32AscNormalizedKeyComputerFactory();
-
- private IntegerNormalizedKeyComputerFactory inkcf = new IntegerNormalizedKeyComputerFactory();
-
- private AInt32AscNormalizedKeyComputerFactory() {
- }
-
- @Override
- public INormalizedKeyComputer createNormalizedKeyComputer() {
- final INormalizedKeyComputer intNkc = inkcf.createNormalizedKeyComputer();
- return new INormalizedKeyComputer() {
-
- @Override
- public int normalize(byte[] bytes, int start, int length) {
- return intNkc.normalize(bytes, start + 1, length - 1);
- }
- };
- }
-
-}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AInt32DescNormalizedKeyComputerFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AInt32DescNormalizedKeyComputerFactory.java
deleted file mode 100644
index cec6a40..0000000
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AInt32DescNormalizedKeyComputerFactory.java
+++ /dev/null
@@ -1,30 +0,0 @@
-package edu.uci.ics.asterix.dataflow.data.nontagged.keynormalizers;
-
-import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
-import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.normalizers.IntegerNormalizedKeyComputerFactory;
-
-public class AInt32DescNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
-
- private static final long serialVersionUID = 1L;
-
- public static final AInt32DescNormalizedKeyComputerFactory INSTANCE = new AInt32DescNormalizedKeyComputerFactory();
-
- private IntegerNormalizedKeyComputerFactory inkcf = new IntegerNormalizedKeyComputerFactory();
-
- private AInt32DescNormalizedKeyComputerFactory() {
- }
-
- @Override
- public INormalizedKeyComputer createNormalizedKeyComputer() {
- final INormalizedKeyComputer intNkc = inkcf.createNormalizedKeyComputer();
- return new INormalizedKeyComputer() {
-
- @Override
- public int normalize(byte[] bytes, int start, int length) {
- int nk = intNkc.normalize(bytes, start + 1, length - 1);
- return (int) ((long) 0xffffffff - (long) nk);
- }
- };
- }
-}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AStringAscNormalizedKeyComputerFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AStringAscNormalizedKeyComputerFactory.java
deleted file mode 100644
index 0d1780f..0000000
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AStringAscNormalizedKeyComputerFactory.java
+++ /dev/null
@@ -1,30 +0,0 @@
-package edu.uci.ics.asterix.dataflow.data.nontagged.keynormalizers;
-
-import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
-import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.normalizers.UTF8StringNormalizedKeyComputerFactory;
-
-public class AStringAscNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
-
- private static final long serialVersionUID = 1L;
-
- public static final AStringAscNormalizedKeyComputerFactory INSTANCE = new AStringAscNormalizedKeyComputerFactory();
-
- private AStringAscNormalizedKeyComputerFactory() {
- }
-
- private UTF8StringNormalizedKeyComputerFactory strnkcf = new UTF8StringNormalizedKeyComputerFactory();
-
- @Override
- public INormalizedKeyComputer createNormalizedKeyComputer() {
- final INormalizedKeyComputer strNkc = strnkcf.createNormalizedKeyComputer();
- return new INormalizedKeyComputer() {
-
- @Override
- public int normalize(byte[] bytes, int start, int length) {
- return strNkc.normalize(bytes, start + 1, length - 1);
- }
- };
- }
-
-}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AStringDescNormalizedKeyComputerFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AStringDescNormalizedKeyComputerFactory.java
deleted file mode 100644
index 0ff6acd..0000000
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AStringDescNormalizedKeyComputerFactory.java
+++ /dev/null
@@ -1,31 +0,0 @@
-package edu.uci.ics.asterix.dataflow.data.nontagged.keynormalizers;
-
-import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
-import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.normalizers.UTF8StringNormalizedKeyComputerFactory;
-
-public class AStringDescNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
-
- private static final long serialVersionUID = 1L;
-
- public static final AStringDescNormalizedKeyComputerFactory INSTANCE = new AStringDescNormalizedKeyComputerFactory();
-
- private AStringDescNormalizedKeyComputerFactory() {
- }
-
- private UTF8StringNormalizedKeyComputerFactory strnkcf = new UTF8StringNormalizedKeyComputerFactory();
-
- @Override
- public INormalizedKeyComputer createNormalizedKeyComputer() {
- final INormalizedKeyComputer strNkc = strnkcf.createNormalizedKeyComputer();
- return new INormalizedKeyComputer() {
-
- @Override
- public int normalize(byte[] bytes, int start, int length) {
- int nk = strNkc.normalize(bytes, start + 1, length - 1);
- return (int) ((long) 0xffffffff - (long) nk);
- }
- };
- }
-
-}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AWrappedAscNormalizedKeyComputerFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AWrappedAscNormalizedKeyComputerFactory.java
new file mode 100644
index 0000000..2efa593
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AWrappedAscNormalizedKeyComputerFactory.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.dataflow.data.nontagged.keynormalizers;
+
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+
+/**
+ * This class uses a decorator pattern to wrap an ASC ordered INomralizedKeyComputerFactory implementation to
+ * obtain the ASC order.
+ */
+public class AWrappedAscNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
+
+ private static final long serialVersionUID = 1L;
+ private final INormalizedKeyComputerFactory nkcf;
+
+ public AWrappedAscNormalizedKeyComputerFactory(INormalizedKeyComputerFactory nkcf) {
+ this.nkcf = nkcf;
+ }
+
+ @Override
+ public INormalizedKeyComputer createNormalizedKeyComputer() {
+ final INormalizedKeyComputer nkc = nkcf.createNormalizedKeyComputer();
+ return new INormalizedKeyComputer() {
+
+ @Override
+ public int normalize(byte[] bytes, int start, int length) {
+ // start +1, length -1 is because in ASTERIX data format, there is always a type tag before the value
+ return nkc.normalize(bytes, start + 1, length - 1);
+ }
+ };
+ }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AWrappedDescNormalizedKeyComputerFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AWrappedDescNormalizedKeyComputerFactory.java
new file mode 100644
index 0000000..a5653b9
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AWrappedDescNormalizedKeyComputerFactory.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.dataflow.data.nontagged.keynormalizers;
+
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+
+/**
+ * This class uses a decorator pattern to wrap an ASC ordered INomralizedKeyComputerFactory implementation
+ * to obtain the DESC order.
+ */
+public class AWrappedDescNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
+
+ private static final long serialVersionUID = 1L;
+ private final INormalizedKeyComputerFactory nkcf;
+
+ public AWrappedDescNormalizedKeyComputerFactory(INormalizedKeyComputerFactory nkcf) {
+ this.nkcf = nkcf;
+ }
+
+ @Override
+ public INormalizedKeyComputer createNormalizedKeyComputer() {
+ final INormalizedKeyComputer nkc = nkcf.createNormalizedKeyComputer();
+ return new INormalizedKeyComputer() {
+
+ @Override
+ public int normalize(byte[] bytes, int start, int length) {
+ int key = nkc.normalize(bytes, start + 1, length - 1);
+ return (int) ((long) 0xffffffff - (long) key);
+ }
+ };
+ }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AIntervalPrinter.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AIntervalPrinter.java
new file mode 100644
index 0000000..99c40b2
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AIntervalPrinter.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.dataflow.data.nontagged.printers;
+
+import java.io.PrintStream;
+
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt8SerializerDeserializer;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.data.IPrinter;
+
+public class AIntervalPrinter implements IPrinter {
+
+ private static final long serialVersionUID = 1L;
+
+ public static final AIntervalPrinter INSTANCE = new AIntervalPrinter();
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.hyracks.algebricks.data.IPrinter#init()
+ */
+ @Override
+ public void init() throws AlgebricksException {
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.hyracks.algebricks.data.IPrinter#print(byte[], int, int, java.io.PrintStream)
+ */
+ @Override
+ public void print(byte[] b, int s, int l, PrintStream ps) throws AlgebricksException {
+ ps.print("interval(\"");
+
+ short typetag = AInt8SerializerDeserializer.getByte(b, s + 1 + 8 * 2);
+
+ IPrinter timeInstancePrinter;
+
+ if (typetag == ATypeTag.DATE.serialize()) {
+ timeInstancePrinter = ADatePrinter.INSTANCE;
+ } else if (typetag == ATypeTag.TIME.serialize()) {
+ timeInstancePrinter = ATimePrinter.INSTANCE;
+ } else if (typetag == ATypeTag.DATETIME.serialize()) {
+ timeInstancePrinter = ADateTimePrinter.INSTANCE;
+ } else {
+ throw new AlgebricksException("Unsupport internal time types in interval: " + typetag);
+ }
+
+ if (typetag == ATypeTag.TIME.serialize() || typetag == ATypeTag.DATE.serialize()) {
+ timeInstancePrinter.print(b, s + 1 + 4 - 1, 8, ps);
+ ps.print(", ");
+ timeInstancePrinter.print(b, s + 1 + 8 + 4 - 1, 8, ps);
+ } else {
+ timeInstancePrinter.print(b, s, 8, ps);
+ ps.print(", ");
+ timeInstancePrinter.print(b, s + 1 + 8 - 1, 8, ps);
+ }
+
+ ps.print("\")");
+ }
+}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/common/AQLJException.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AIntervalPrinterFactory.java
similarity index 61%
rename from asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/common/AQLJException.java
rename to asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AIntervalPrinterFactory.java
index 7c19a56..5500091 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/aqlj/common/AQLJException.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AIntervalPrinterFactory.java
@@ -12,25 +12,19 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package edu.uci.ics.asterix.api.aqlj.common;
+package edu.uci.ics.asterix.dataflow.data.nontagged.printers;
-/**
- * This is the base (and currently the only) exception class for AQLJ.
- *
- * @author zheilbron
- */
-public class AQLJException extends Exception {
+import edu.uci.ics.hyracks.algebricks.data.IPrinter;
+import edu.uci.ics.hyracks.algebricks.data.IPrinterFactory;
+
+public class AIntervalPrinterFactory implements IPrinterFactory {
+
private static final long serialVersionUID = 1L;
+ public static final AIntervalPrinterFactory INSTANCE = new AIntervalPrinterFactory();
- public AQLJException(String message) {
- super(message);
+ @Override
+ public IPrinter createPrinter() {
+ return AIntervalPrinter.INSTANCE;
}
- public AQLJException(String message, Throwable cause) {
- super(message, cause);
- }
-
- public AQLJException(Throwable cause) {
- super(cause);
- }
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AObjectPrinter.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AObjectPrinter.java
index edcfc8a..478ad2c 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AObjectPrinter.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AObjectPrinter.java
@@ -73,6 +73,10 @@
ADurationPrinter.INSTANCE.print(b, s, l, ps);
break;
}
+ case INTERVAL: {
+ AIntervalPrinter.INSTANCE.print(b, s, l, ps);
+ break;
+ }
case POINT: {
APointPrinter.INSTANCE.print(b, s, l, ps);
break;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ADateSerializerDeserializer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ADateSerializerDeserializer.java
index 9bd25f4..86a9a8d 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ADateSerializerDeserializer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ADateSerializerDeserializer.java
@@ -21,7 +21,7 @@
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
import edu.uci.ics.asterix.om.base.ADate;
import edu.uci.ics.asterix.om.base.AMutableDate;
-import edu.uci.ics.asterix.om.base.temporal.ADateAndTimeParser;
+import edu.uci.ics.asterix.om.base.temporal.ADateParserFactory;
import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
import edu.uci.ics.asterix.om.base.temporal.StringCharSequenceAccessor;
import edu.uci.ics.asterix.om.types.BuiltinType;
@@ -64,8 +64,8 @@
long chrononTimeInMs = 0;
try {
StringCharSequenceAccessor charAccessor = new StringCharSequenceAccessor();
- charAccessor.reset(date, 0);
- chrononTimeInMs = ADateAndTimeParser.parseDatePart(charAccessor, true);
+ charAccessor.reset(date, 0, date.length());
+ chrononTimeInMs = ADateParserFactory.parseDatePart(charAccessor, true);
} catch (Exception e) {
throw new HyracksDataException(e);
}
@@ -78,4 +78,8 @@
dateSerde.serialize(aDate, out);
}
+
+ public static int getChronon(byte[] byteArray, int offset) {
+ return AInt32SerializerDeserializer.getInt(byteArray, offset);
+ }
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ADateTimeSerializerDeserializer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ADateTimeSerializerDeserializer.java
index cb9e8e0..cedfc8d 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ADateTimeSerializerDeserializer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ADateTimeSerializerDeserializer.java
@@ -21,7 +21,8 @@
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
import edu.uci.ics.asterix.om.base.ADateTime;
import edu.uci.ics.asterix.om.base.AMutableDateTime;
-import edu.uci.ics.asterix.om.base.temporal.ADateAndTimeParser;
+import edu.uci.ics.asterix.om.base.temporal.ADateParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.ATimeParserFactory;
import edu.uci.ics.asterix.om.base.temporal.StringCharSequenceAccessor;
import edu.uci.ics.asterix.om.types.BuiltinType;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -66,7 +67,7 @@
long chrononTimeInMs = 0;
try {
StringCharSequenceAccessor charAccessor = new StringCharSequenceAccessor();
- charAccessor.reset(datetime, 0);
+ charAccessor.reset(datetime, 0, datetime.length());
// +1 if it is negative (-)
short timeOffset = (short) ((charAccessor.getCharAt(0) == '-') ? 1 : 0);
@@ -78,11 +79,11 @@
// if extended form 11, else 9
timeOffset += (charAccessor.getCharAt(timeOffset + 13) == ':') ? (short) (11) : (short) (9);
- chrononTimeInMs = ADateAndTimeParser.parseDatePart(charAccessor, false);
+ chrononTimeInMs = ADateParserFactory.parseDatePart(charAccessor, false);
- charAccessor.reset(datetime, timeOffset);
+ charAccessor.reset(datetime, timeOffset, datetime.length() - timeOffset);
- chrononTimeInMs += ADateAndTimeParser.parseTimePart(charAccessor);
+ chrononTimeInMs += ATimeParserFactory.parseTimePart(charAccessor);
} catch (Exception e) {
throw new HyracksDataException(e);
}
@@ -90,4 +91,8 @@
datetimeSerde.serialize(aDateTime, out);
}
+
+ public static long getChronon(byte[] data, int offset) {
+ return AInt64SerializerDeserializer.getLong(data, offset);
+ }
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ADurationSerializerDeserializer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ADurationSerializerDeserializer.java
index c3333f0..88e2ea5 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ADurationSerializerDeserializer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ADurationSerializerDeserializer.java
@@ -7,7 +7,7 @@
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
import edu.uci.ics.asterix.om.base.ADuration;
import edu.uci.ics.asterix.om.base.AMutableDuration;
-import edu.uci.ics.asterix.om.base.temporal.ADurationParser;
+import edu.uci.ics.asterix.om.base.temporal.ADurationParserFactory;
import edu.uci.ics.asterix.om.base.temporal.StringCharSequenceAccessor;
import edu.uci.ics.asterix.om.types.BuiltinType;
import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
@@ -48,12 +48,34 @@
try {
AMutableDuration aDuration = new AMutableDuration(0, 0);
StringCharSequenceAccessor charAccessor = new StringCharSequenceAccessor();
- charAccessor.reset(duration, 0);
- ADurationParser.parse(charAccessor, aDuration);
+ charAccessor.reset(duration, 0, duration.length());
+ ADurationParserFactory.parseDuration(charAccessor, aDuration);
durationSerde.serialize(aDuration, out);
} catch (Exception e) {
throw new HyracksDataException(e);
}
}
+
+ /**
+ * Get the year-month field of the duration as an integer number of days.
+ *
+ * @param data
+ * @param offset
+ * @return
+ */
+ public static int getYearMonth(byte[] data, int offset) {
+ return AInt32SerializerDeserializer.getInt(data, offset);
+ }
+
+ /**
+ * Get the day-time field of the duration as an long integer number of milliseconds.
+ *
+ * @param data
+ * @param offset
+ * @return
+ */
+ public static long getDayTime(byte[] data, int offset) {
+ return AInt64SerializerDeserializer.getLong(data, offset + 4);
+ }
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/AIntervalSerializerDeserializer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/AIntervalSerializerDeserializer.java
new file mode 100644
index 0000000..7c87dfa
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/AIntervalSerializerDeserializer.java
@@ -0,0 +1,264 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.dataflow.data.nontagged.serde;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AInterval;
+import edu.uci.ics.asterix.om.base.AMutableInterval;
+import edu.uci.ics.asterix.om.base.temporal.ADateParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.ATimeParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.base.temporal.StringCharSequenceAccessor;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public class AIntervalSerializerDeserializer implements ISerializerDeserializer<AInterval> {
+
+ private static final long serialVersionUID = 1L;
+
+ public static final AIntervalSerializerDeserializer INSTANCE = new AIntervalSerializerDeserializer();
+ @SuppressWarnings("unchecked")
+ private static final ISerializerDeserializer<AInterval> intervalSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AINTERVAL);
+
+ private static final String errorMessage = "This can not be an instance of interval";
+
+ private AIntervalSerializerDeserializer() {
+ }
+
+ @Override
+ public AInterval deserialize(DataInput in) throws HyracksDataException {
+ try {
+ return new AInterval(in.readLong(), in.readLong(), in.readByte());
+ } catch (IOException e) {
+ throw new HyracksDataException(e);
+ }
+
+ }
+
+ @Override
+ public void serialize(AInterval instance, DataOutput out) throws HyracksDataException {
+ try {
+ out.writeLong(instance.getIntervalStart());
+ out.writeLong(instance.getIntervalEnd());
+ out.writeByte(instance.getIntervalType());
+ } catch (IOException e) {
+ throw new HyracksDataException(e);
+ }
+
+ }
+
+ public static long getIntervalStart(byte[] data, int offset) {
+ return AInt64SerializerDeserializer.getLong(data, offset);
+ }
+
+ public static long getIntervalEnd(byte[] data, int offset) {
+ return AInt64SerializerDeserializer.getLong(data, offset + 8);
+ }
+
+ public static byte getIntervalTimeType(byte[] data, int offset) {
+ return data[offset + 8 * 2];
+ }
+
+ /**
+ * create an interval value from two given datetime instance.
+ *
+ * @param interval
+ * @param out
+ * @throws HyracksDataException
+ */
+ public static void parseDatetime(String interval, DataOutput out) throws HyracksDataException {
+ AMutableInterval aInterval = new AMutableInterval(0l, 0l, (byte) 0);
+
+ long chrononTimeInMsStart = 0;
+ long chrononTimeInMsEnd = 0;
+ try {
+
+ StringCharSequenceAccessor charAccessor = new StringCharSequenceAccessor();
+
+ // Get the index for the comma
+ int commaIndex = interval.indexOf(',');
+ if (commaIndex < 0) {
+ throw new AlgebricksException("comma is missing for a string of interval");
+ }
+
+ int nonSpaceIndex = commaIndex - 1;
+ while (interval.charAt(nonSpaceIndex) == ' ') {
+ nonSpaceIndex--;
+ }
+
+ // Interval Start
+ charAccessor.reset(interval, 0, nonSpaceIndex + 1);
+
+ // +1 if it is negative (-)
+ short timeOffset = (short) ((charAccessor.getCharAt(0) == '-') ? 1 : 0);
+
+ if (charAccessor.getCharAt(timeOffset + 10) != 'T' && charAccessor.getCharAt(timeOffset + 8) != 'T') {
+ throw new AlgebricksException(errorMessage + ": missing T");
+ }
+
+ // if extended form 11, else 9
+ timeOffset += (charAccessor.getCharAt(timeOffset + 13) == ':') ? (short) (11) : (short) (9);
+
+ chrononTimeInMsStart = ADateParserFactory.parseDatePart(charAccessor, false);
+
+ charAccessor.reset(interval, timeOffset, nonSpaceIndex - timeOffset + 1);
+
+ chrononTimeInMsStart += ATimeParserFactory.parseTimePart(charAccessor);
+
+ // Interval End
+ nonSpaceIndex = commaIndex + 1;
+ while (interval.charAt(nonSpaceIndex) == ' ') {
+ nonSpaceIndex++;
+ }
+
+ charAccessor.reset(interval, nonSpaceIndex, interval.length() - nonSpaceIndex);
+
+ // +1 if it is negative (-)
+ timeOffset = (short) ((charAccessor.getCharAt(0) == '-') ? 1 : 0);
+
+ if (charAccessor.getCharAt(timeOffset + 10) != 'T' && charAccessor.getCharAt(timeOffset + 8) != 'T') {
+ throw new AlgebricksException(errorMessage + ": missing T");
+ }
+
+ // if extended form 11, else 9
+ timeOffset += (charAccessor.getCharAt(timeOffset + 13) == ':') ? (short) (11) : (short) (9);
+
+ chrononTimeInMsEnd = ADateParserFactory.parseDatePart(charAccessor, false);
+
+ charAccessor.reset(interval, nonSpaceIndex + timeOffset, interval.length() - nonSpaceIndex - timeOffset);
+
+ chrononTimeInMsEnd += ATimeParserFactory.parseTimePart(charAccessor);
+
+ } catch (Exception e) {
+ throw new HyracksDataException(e);
+ }
+
+ aInterval.setValue(chrononTimeInMsStart, chrononTimeInMsEnd, ATypeTag.DATETIME.serialize());
+
+ intervalSerde.serialize(aInterval, out);
+ }
+
+ public static void parseTime(String interval, DataOutput out) throws HyracksDataException {
+ AMutableInterval aInterval = new AMutableInterval(0l, 0l, (byte) 0);
+
+ long chrononTimeInMsStart = 0;
+ long chrononTimeInMsEnd = 0;
+ try {
+
+ StringCharSequenceAccessor charAccessor = new StringCharSequenceAccessor();
+
+ // Get the index for the comma
+ int commaIndex = interval.indexOf(',');
+ if (commaIndex < 0) {
+ throw new AlgebricksException("comma is missing for a string of interval");
+ }
+
+ int nonSpaceIndex = commaIndex - 1;
+ while (interval.charAt(nonSpaceIndex) == ' ') {
+ nonSpaceIndex--;
+ }
+
+ // Interval Start
+ charAccessor.reset(interval, 0, nonSpaceIndex + 1);
+ chrononTimeInMsStart = ATimeParserFactory.parseTimePart(charAccessor);
+
+ if (chrononTimeInMsStart < 0) {
+ chrononTimeInMsStart += GregorianCalendarSystem.CHRONON_OF_DAY;
+ }
+
+ // Interval End
+ nonSpaceIndex = commaIndex + 1;
+ while (interval.charAt(nonSpaceIndex) == ' ') {
+ nonSpaceIndex++;
+ }
+
+ charAccessor.reset(interval, nonSpaceIndex, interval.length() - nonSpaceIndex);
+ chrononTimeInMsEnd = ATimeParserFactory.parseTimePart(charAccessor);
+
+ if (chrononTimeInMsEnd < 0) {
+ chrononTimeInMsEnd += GregorianCalendarSystem.CHRONON_OF_DAY;
+ }
+
+ } catch (Exception e) {
+ throw new HyracksDataException(e);
+ }
+
+ aInterval.setValue(chrononTimeInMsStart, chrononTimeInMsEnd, ATypeTag.TIME.serialize());
+ intervalSerde.serialize(aInterval, out);
+ }
+
+ public static void parseDate(String interval, DataOutput out) throws HyracksDataException {
+ AMutableInterval aInterval = new AMutableInterval(0l, 0l, (byte) 0);
+
+ long chrononTimeInMsStart = 0;
+ long chrononTimeInMsEnd = 0;
+ short tempStart = 0;
+ short tempEnd = 0;
+ try {
+ StringCharSequenceAccessor charAccessor = new StringCharSequenceAccessor();
+
+ // Get the index for the comma
+ int commaIndex = interval.indexOf(',');
+ if (commaIndex < 0) {
+ throw new AlgebricksException("comma is missing for a string of interval");
+ }
+
+ int nonSpaceIndex = commaIndex - 1;
+ while (interval.charAt(nonSpaceIndex) == ' ') {
+ nonSpaceIndex--;
+ }
+
+ // Interval Start
+ charAccessor.reset(interval, 0, nonSpaceIndex + 1);
+
+ chrononTimeInMsStart = ADateParserFactory.parseDatePart(charAccessor, true);
+
+ if (chrononTimeInMsStart < 0 && chrononTimeInMsStart % GregorianCalendarSystem.CHRONON_OF_DAY != 0) {
+ tempStart = 1;
+ }
+
+ // Interval End
+ nonSpaceIndex = commaIndex + 1;
+ while (interval.charAt(nonSpaceIndex) == ' ') {
+ nonSpaceIndex++;
+ }
+
+ charAccessor.reset(interval, nonSpaceIndex, interval.length() - nonSpaceIndex);
+
+ chrononTimeInMsEnd = ADateParserFactory.parseDatePart(charAccessor, true);
+
+ if (chrononTimeInMsEnd < 0 && chrononTimeInMsEnd % GregorianCalendarSystem.CHRONON_OF_DAY != 0) {
+ tempEnd = 1;
+ }
+
+ } catch (Exception e) {
+ throw new HyracksDataException(e);
+ }
+
+ aInterval.setValue((chrononTimeInMsStart / GregorianCalendarSystem.CHRONON_OF_DAY) - tempStart,
+ (chrononTimeInMsEnd / GregorianCalendarSystem.CHRONON_OF_DAY) - tempEnd, ATypeTag.DATE.serialize());
+
+ intervalSerde.serialize(aInterval, out);
+ }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/AObjectSerializerDeserializer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/AObjectSerializerDeserializer.java
index 311a6bc..b5b7303 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/AObjectSerializerDeserializer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/AObjectSerializerDeserializer.java
@@ -15,6 +15,7 @@
import edu.uci.ics.asterix.om.base.AInt32;
import edu.uci.ics.asterix.om.base.AInt64;
import edu.uci.ics.asterix.om.base.AInt8;
+import edu.uci.ics.asterix.om.base.AInterval;
import edu.uci.ics.asterix.om.base.ALine;
import edu.uci.ics.asterix.om.base.ANull;
import edu.uci.ics.asterix.om.base.AOrderedList;
@@ -84,6 +85,9 @@
case DURATION: {
return ADurationSerializerDeserializer.INSTANCE.deserialize(in);
}
+ case INTERVAL: {
+ return AIntervalSerializerDeserializer.INSTANCE.deserialize(in);
+ }
case POINT: {
return APointSerializerDeserializer.INSTANCE.deserialize(in);
}
@@ -108,9 +112,9 @@
case UNORDEREDLIST: {
return AUnorderedListSerializerDeserializer.SCHEMALESS_INSTANCE.deserialize(in);
}
- // case TYPE: {
- // return AUnorderedListBytesConverter.INSTANCE.deserialize(in);
- // }
+ // case TYPE: {
+ // return AUnorderedListBytesConverter.INSTANCE.deserialize(in);
+ // }
default: {
throw new NotImplementedException("No serializer/deserializer implemented for type " + typeTag + " .");
}
@@ -179,6 +183,10 @@
ADurationSerializerDeserializer.INSTANCE.serialize((ADuration) instance, out);
break;
}
+ case INTERVAL: {
+ AIntervalSerializerDeserializer.INSTANCE.serialize((AInterval) instance, out);
+ break;
+ }
case POINT: {
APointSerializerDeserializer.INSTANCE.serialize((APoint) instance, out);
break;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ARecordSerializerDeserializer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ARecordSerializerDeserializer.java
index 393f634..a450f27 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ARecordSerializerDeserializer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ARecordSerializerDeserializer.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -21,6 +21,7 @@
import edu.uci.ics.asterix.builders.IARecordBuilder;
import edu.uci.ics.asterix.builders.RecordBuilder;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;
import edu.uci.ics.asterix.formats.nontagged.AqlBinaryHashFunctionFactoryProvider;
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
@@ -149,7 +150,7 @@
} else {
return new ARecord(this.recordType, closedFields);
}
- } catch (IOException e) {
+ } catch (IOException | AsterixException e) {
throw new HyracksDataException(e);
}
}
@@ -166,7 +167,7 @@
return fields;
}
- private ARecordType mergeRecordTypes(ARecordType recType1, ARecordType recType2) {
+ private ARecordType mergeRecordTypes(ARecordType recType1, ARecordType recType2) throws AsterixException {
String[] fieldNames = new String[recType1.getFieldNames().length + recType2.getFieldNames().length];
IAType[] fieldTypes = new IAType[recType1.getFieldTypes().length + recType2.getFieldTypes().length];
@@ -199,7 +200,7 @@
}
try {
recordBuilder.write(out, false);
- } catch (IOException e) {
+ } catch (IOException | AsterixException e) {
throw new HyracksDataException(e);
}
} else {
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ATimeSerializerDeserializer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ATimeSerializerDeserializer.java
index 8860f2a..26e8d7a 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ATimeSerializerDeserializer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/ATimeSerializerDeserializer.java
@@ -7,7 +7,7 @@
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
import edu.uci.ics.asterix.om.base.AMutableTime;
import edu.uci.ics.asterix.om.base.ATime;
-import edu.uci.ics.asterix.om.base.temporal.ADateAndTimeParser;
+import edu.uci.ics.asterix.om.base.temporal.ATimeParserFactory;
import edu.uci.ics.asterix.om.base.temporal.StringCharSequenceAccessor;
import edu.uci.ics.asterix.om.types.BuiltinType;
import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
@@ -52,8 +52,8 @@
try {
StringCharSequenceAccessor charAccessor = new StringCharSequenceAccessor();
- charAccessor.reset(time, 0);
- chrononTimeInMs = ADateAndTimeParser.parseTimePart(charAccessor);
+ charAccessor.reset(time, 0, time.length());
+ chrononTimeInMs = ATimeParserFactory.parseTimePart(charAccessor);
} catch (Exception e) {
throw new HyracksDataException(e);
}
@@ -63,4 +63,8 @@
timeSerde.serialize(aTime, out);
}
+ public static int getChronon(byte[] byteArray, int offset) {
+ return AInt32SerializerDeserializer.getInt(byteArray, offset);
+ }
+
}
\ No newline at end of file
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/base/IDataFormat.java b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/base/IDataFormat.java
index c1df096..813c3a0 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/base/IDataFormat.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/base/IDataFormat.java
@@ -14,6 +14,7 @@
import edu.uci.ics.hyracks.algebricks.data.IBinaryBooleanInspectorFactory;
import edu.uci.ics.hyracks.algebricks.data.IBinaryComparatorFactoryProvider;
import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFactoryProvider;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFamilyProvider;
import edu.uci.ics.hyracks.algebricks.data.IBinaryIntegerInspectorFactory;
import edu.uci.ics.hyracks.algebricks.data.INormalizedKeyComputerFactoryProvider;
import edu.uci.ics.hyracks.algebricks.data.IPrinterFactoryProvider;
@@ -63,4 +64,6 @@
public IExpressionEvalSizeComputer getExpressionEvalSizeComputer();
public INormalizedKeyComputerFactoryProvider getNormalizedKeyComputerFactoryProvider();
+
+ public IBinaryHashFunctionFamilyProvider getBinaryHashFunctionFamilyProvider();
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryComparatorFactoryProvider.java b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryComparatorFactoryProvider.java
index e4db8da..02f0e47 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryComparatorFactoryProvider.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryComparatorFactoryProvider.java
@@ -2,6 +2,7 @@
import java.io.Serializable;
+import edu.uci.ics.asterix.dataflow.data.nontagged.comparators.ADateOrTimeAscBinaryComparatorFactory;
import edu.uci.ics.asterix.dataflow.data.nontagged.comparators.ADateTimeAscBinaryComparatorFactory;
import edu.uci.ics.asterix.dataflow.data.nontagged.comparators.AObjectAscBinaryComparatorFactory;
import edu.uci.ics.asterix.dataflow.data.nontagged.comparators.AObjectDescBinaryComparatorFactory;
@@ -19,8 +20,8 @@
import edu.uci.ics.hyracks.data.std.primitive.FloatPointable;
import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
import edu.uci.ics.hyracks.data.std.primitive.LongPointable;
+import edu.uci.ics.hyracks.data.std.primitive.RawUTF8StringPointable;
import edu.uci.ics.hyracks.data.std.primitive.ShortPointable;
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
public class AqlBinaryComparatorFactoryProvider implements IBinaryComparatorFactoryProvider, Serializable {
@@ -39,7 +40,7 @@
public static final PointableBinaryComparatorFactory DOUBLE_POINTABLE_INSTANCE = new PointableBinaryComparatorFactory(
DoublePointable.FACTORY);
public static final PointableBinaryComparatorFactory UTF8STRING_POINTABLE_INSTANCE = new PointableBinaryComparatorFactory(
- UTF8StringPointable.FACTORY);
+ RawUTF8StringPointable.FACTORY);
// Equivalent to UTF8STRING_POINTABLE_INSTANCE but all characters are considered lower case to implement case-insensitive comparisons.
public static final PointableBinaryComparatorFactory UTF8STRING_LOWERCASE_POINTABLE_INSTANCE = new PointableBinaryComparatorFactory(
UTF8StringLowercasePointable.FACTORY);
@@ -64,11 +65,11 @@
public IBinaryComparatorFactory getBinaryComparatorFactory(Object type, boolean ascending) {
if (type == null) {
return anyBinaryComparatorFactory(ascending);
- }
+ }
IAType aqlType = (IAType) type;
return getBinaryComparatorFactory(aqlType.getTypeTag(), ascending);
}
-
+
public IBinaryComparatorFactory getBinaryComparatorFactory(ATypeTag type, boolean ascending) {
switch (type) {
case ANY:
@@ -120,13 +121,14 @@
return addOffset(RectangleBinaryComparatorFactory.INSTANCE, ascending);
}
case DATE:
- case TIME:
+ case TIME: {
+ return addOffset(ADateOrTimeAscBinaryComparatorFactory.INSTANCE, ascending);
+ }
case DATETIME: {
return addOffset(ADateTimeAscBinaryComparatorFactory.INSTANCE, ascending);
}
default: {
- throw new NotImplementedException("No binary comparator factory implemented for type "
- + type + " .");
+ throw new NotImplementedException("No binary comparator factory implemented for type " + type + " .");
}
}
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryHashFunctionFactoryProvider.java b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryHashFunctionFactoryProvider.java
index cab8ded..1afdbf8 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryHashFunctionFactoryProvider.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryHashFunctionFactoryProvider.java
@@ -16,20 +16,25 @@
import edu.uci.ics.hyracks.data.std.primitive.DoublePointable;
import edu.uci.ics.hyracks.data.std.primitive.FloatPointable;
import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.data.std.primitive.RawUTF8StringPointable;
import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-
public class AqlBinaryHashFunctionFactoryProvider implements IBinaryHashFunctionFactoryProvider, Serializable {
private static final long serialVersionUID = 1L;
public static final AqlBinaryHashFunctionFactoryProvider INSTANCE = new AqlBinaryHashFunctionFactoryProvider();
- public static final PointableBinaryHashFunctionFactory INTEGER_POINTABLE_INSTANCE = new PointableBinaryHashFunctionFactory(IntegerPointable.FACTORY);
- public static final PointableBinaryHashFunctionFactory FLOAT_POINTABLE_INSTANCE = new PointableBinaryHashFunctionFactory(FloatPointable.FACTORY);
- public static final PointableBinaryHashFunctionFactory DOUBLE_POINTABLE_INSTANCE = new PointableBinaryHashFunctionFactory(DoublePointable.FACTORY);
- public static final PointableBinaryHashFunctionFactory UTF8STRING_POINTABLE_INSTANCE = new PointableBinaryHashFunctionFactory(UTF8StringPointable.FACTORY);
+ public static final PointableBinaryHashFunctionFactory INTEGER_POINTABLE_INSTANCE = new PointableBinaryHashFunctionFactory(
+ IntegerPointable.FACTORY);
+ public static final PointableBinaryHashFunctionFactory FLOAT_POINTABLE_INSTANCE = new PointableBinaryHashFunctionFactory(
+ FloatPointable.FACTORY);
+ public static final PointableBinaryHashFunctionFactory DOUBLE_POINTABLE_INSTANCE = new PointableBinaryHashFunctionFactory(
+ DoublePointable.FACTORY);
+ public static final PointableBinaryHashFunctionFactory UTF8STRING_POINTABLE_INSTANCE = new PointableBinaryHashFunctionFactory(
+ RawUTF8StringPointable.FACTORY);
// Equivalent to UTF8STRING_POINTABLE_INSTANCE but all characters are considered lower case to implement case-insensitive hashing.
- public static final PointableBinaryHashFunctionFactory UTF8STRING_LOWERCASE_POINTABLE_INSTANCE = new PointableBinaryHashFunctionFactory(UTF8StringLowercasePointable.FACTORY);
-
+ public static final PointableBinaryHashFunctionFactory UTF8STRING_LOWERCASE_POINTABLE_INSTANCE = new PointableBinaryHashFunctionFactory(
+ UTF8StringLowercasePointable.FACTORY);
+
private AqlBinaryHashFunctionFactoryProvider() {
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryHashFunctionFamilyProvider.java b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryHashFunctionFamilyProvider.java
new file mode 100644
index 0000000..bc7ba26
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryHashFunctionFamilyProvider.java
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.formats.nontagged;
+
+import java.io.Serializable;
+
+import edu.uci.ics.asterix.dataflow.data.nontagged.hash.MurmurHash3BinaryHashFunctionFamily;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFamilyProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFamily;
+
+/**
+ * We use a type-independent binary hash function family from the hyracks
+ * codebase
+ */
+public class AqlBinaryHashFunctionFamilyProvider implements
+ IBinaryHashFunctionFamilyProvider, Serializable {
+
+ private static final long serialVersionUID = 1L;
+ public static final AqlBinaryHashFunctionFamilyProvider INSTANCE = new AqlBinaryHashFunctionFamilyProvider();
+
+ private AqlBinaryHashFunctionFamilyProvider() {
+
+ }
+
+ @Override
+ public IBinaryHashFunctionFamily getBinaryHashFunctionFamily(Object type)
+ throws AlgebricksException {
+ return MurmurHash3BinaryHashFunctionFamily.INSTANCE;
+ }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlNormalizedKeyComputerFactoryProvider.java b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlNormalizedKeyComputerFactoryProvider.java
index fe953dc..23306ed 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlNormalizedKeyComputerFactoryProvider.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlNormalizedKeyComputerFactoryProvider.java
@@ -1,12 +1,15 @@
package edu.uci.ics.asterix.formats.nontagged;
-import edu.uci.ics.asterix.dataflow.data.nontagged.keynormalizers.AInt32AscNormalizedKeyComputerFactory;
-import edu.uci.ics.asterix.dataflow.data.nontagged.keynormalizers.AInt32DescNormalizedKeyComputerFactory;
-import edu.uci.ics.asterix.dataflow.data.nontagged.keynormalizers.AStringAscNormalizedKeyComputerFactory;
-import edu.uci.ics.asterix.dataflow.data.nontagged.keynormalizers.AStringDescNormalizedKeyComputerFactory;
+import edu.uci.ics.asterix.dataflow.data.nontagged.keynormalizers.AWrappedAscNormalizedKeyComputerFactory;
+import edu.uci.ics.asterix.dataflow.data.nontagged.keynormalizers.AWrappedDescNormalizedKeyComputerFactory;
import edu.uci.ics.asterix.om.types.IAType;
import edu.uci.ics.hyracks.algebricks.data.INormalizedKeyComputerFactoryProvider;
import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.normalizers.DoubleNormalizedKeyComputerFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.normalizers.FloatNormalizedKeyComputerFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.normalizers.Integer64NormalizedKeyComputerFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.normalizers.IntegerNormalizedKeyComputerFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.normalizers.UTF8StringNormalizedKeyComputerFactory;
public class AqlNormalizedKeyComputerFactoryProvider implements INormalizedKeyComputerFactoryProvider {
@@ -21,10 +24,19 @@
if (ascending) {
switch (aqlType.getTypeTag()) {
case INT32: {
- return AInt32AscNormalizedKeyComputerFactory.INSTANCE;
+ return new AWrappedAscNormalizedKeyComputerFactory(new IntegerNormalizedKeyComputerFactory());
+ }
+ case INT64: {
+ return new AWrappedAscNormalizedKeyComputerFactory(new Integer64NormalizedKeyComputerFactory());
+ }
+ case FLOAT: {
+ return new AWrappedAscNormalizedKeyComputerFactory(new FloatNormalizedKeyComputerFactory());
+ }
+ case DOUBLE: {
+ return new AWrappedAscNormalizedKeyComputerFactory(new DoubleNormalizedKeyComputerFactory());
}
case STRING: {
- return AStringAscNormalizedKeyComputerFactory.INSTANCE;
+ return new AWrappedAscNormalizedKeyComputerFactory(new UTF8StringNormalizedKeyComputerFactory());
}
default: {
return null;
@@ -33,10 +45,19 @@
} else {
switch (aqlType.getTypeTag()) {
case INT32: {
- return AInt32DescNormalizedKeyComputerFactory.INSTANCE;
+ return new AWrappedDescNormalizedKeyComputerFactory(new IntegerNormalizedKeyComputerFactory());
+ }
+ case INT64: {
+ return new AWrappedDescNormalizedKeyComputerFactory(new Integer64NormalizedKeyComputerFactory());
+ }
+ case FLOAT: {
+ return new AWrappedDescNormalizedKeyComputerFactory(new FloatNormalizedKeyComputerFactory());
+ }
+ case DOUBLE: {
+ return new AWrappedDescNormalizedKeyComputerFactory(new DoubleNormalizedKeyComputerFactory());
}
case STRING: {
- return AStringDescNormalizedKeyComputerFactory.INSTANCE;
+ return new AWrappedDescNormalizedKeyComputerFactory(new UTF8StringNormalizedKeyComputerFactory());
}
default: {
return null;
@@ -44,5 +65,4 @@
}
}
}
-
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlPrinterFactoryProvider.java b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlPrinterFactoryProvider.java
index b576ce0..fe8792f 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlPrinterFactoryProvider.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlPrinterFactoryProvider.java
@@ -1,6 +1,5 @@
package edu.uci.ics.asterix.formats.nontagged;
-
import edu.uci.ics.asterix.dataflow.data.nontagged.printers.ABooleanPrinterFactory;
import edu.uci.ics.asterix.dataflow.data.nontagged.printers.ACirclePrinterFactory;
import edu.uci.ics.asterix.dataflow.data.nontagged.printers.ADatePrinterFactory;
@@ -12,6 +11,7 @@
import edu.uci.ics.asterix.dataflow.data.nontagged.printers.AInt32PrinterFactory;
import edu.uci.ics.asterix.dataflow.data.nontagged.printers.AInt64PrinterFactory;
import edu.uci.ics.asterix.dataflow.data.nontagged.printers.AInt8PrinterFactory;
+import edu.uci.ics.asterix.dataflow.data.nontagged.printers.AIntervalPrinterFactory;
import edu.uci.ics.asterix.dataflow.data.nontagged.printers.ALinePrinterFactory;
import edu.uci.ics.asterix.dataflow.data.nontagged.printers.ANullPrinterFactory;
import edu.uci.ics.asterix.dataflow.data.nontagged.printers.ANullableFieldPrinterFactory;
@@ -49,8 +49,8 @@
if (aqlType != null) {
switch (aqlType.getTypeTag()) {
- // case ANYTYPE:
- // return AAnyTypePrinterFactory.INSTANCE;
+ // case ANYTYPE:
+ // return AAnyTypePrinterFactory.INSTANCE;
case INT8:
return AInt8PrinterFactory.INSTANCE;
case INT16:
@@ -75,6 +75,8 @@
return ADateTimePrinterFactory.INSTANCE;
case DURATION:
return ADurationPrinterFactory.INSTANCE;
+ case INTERVAL:
+ return AIntervalPrinterFactory.INSTANCE;
case POINT:
return APointPrinterFactory.INSTANCE;
case POINT3D:
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlSerializerDeserializerProvider.java b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlSerializerDeserializerProvider.java
index 5492c5c..29e33fd 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlSerializerDeserializerProvider.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlSerializerDeserializerProvider.java
@@ -16,6 +16,7 @@
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt8SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AIntervalSerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ALineSerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ANullSerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AObjectSerializerDeserializer;
@@ -111,6 +112,9 @@
case DURATION: {
return ADurationSerializerDeserializer.INSTANCE;
}
+ case INTERVAL: {
+ return AIntervalSerializerDeserializer.INSTANCE;
+ }
case ORDEREDLIST: {
return new AOrderedListSerializerDeserializer((AOrderedListType) aqlType);
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlTypeTraitProvider.java b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlTypeTraitProvider.java
index eac6602..a7e8e83 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlTypeTraitProvider.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlTypeTraitProvider.java
@@ -14,6 +14,7 @@
private static final ITypeTraits FOURBYTETYPETRAIT = new TypeTrait(4 + 1);
private static final ITypeTraits EIGHTBYTETYPETRAIT = new TypeTrait(8 + 1);
private static final ITypeTraits SIXTEENBYTETYPETRAIT = new TypeTrait(16 + 1);
+ private static final ITypeTraits SEVENTEENBYTETYPETRAIT = new TypeTrait(17 + 1);
private static final ITypeTraits THIRTYTWOBYTETYPETRAIT = new TypeTrait(32 + 1);
private static final ITypeTraits TWENTYFOURBYTETYPETRAIT = new TypeTrait(24 + 1);
@@ -42,6 +43,8 @@
return EIGHTBYTETYPETRAIT;
case POINT:
return SIXTEENBYTETYPETRAIT;
+ case INTERVAL:
+ return SEVENTEENBYTETYPETRAIT;
case POINT3D:
return TWENTYFOURBYTETYPETRAIT;
case LINE:
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/ADate.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/ADate.java
index ea4c33b..097b736 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/ADate.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/ADate.java
@@ -30,7 +30,7 @@
*/
protected int chrononTimeInDay;
- private static long CHRONON_OF_DAY = 24 * 60 * 60 * 1000;
+ protected static long CHRONON_OF_DAY = 24 * 60 * 60 * 1000;
public ADate(int chrononTimeInDay) {
this.chrononTimeInDay = chrononTimeInDay;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/AInterval.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/AInterval.java
new file mode 100644
index 0000000..66a587a
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/AInterval.java
@@ -0,0 +1,144 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.om.base;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.visitors.IOMVisitor;
+
+public class AInterval implements IAObject {
+
+ protected long intervalStart;
+ protected long intervalEnd;
+ protected byte typetag;
+
+ public AInterval(long intervalStart, long intervalEnd, byte typetag) {
+ this.intervalStart = intervalStart;
+ this.intervalEnd = intervalEnd;
+ this.typetag = typetag;
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.base.IAObject#getType()
+ */
+ @Override
+ public IAType getType() {
+ return BuiltinType.AINTERVAL;
+ }
+
+ public int compare(Object o) {
+ if (!(o instanceof AInterval)) {
+ return -1;
+ }
+ AInterval d = (AInterval) o;
+ if (d.intervalStart == this.intervalStart && d.intervalEnd == this.intervalEnd && d.typetag == this.typetag) {
+ return 0;
+ } else {
+ return -1;
+ }
+ }
+
+ public boolean equals(Object o) {
+ if (!(o instanceof AInterval)) {
+ return false;
+ } else {
+ AInterval t = (AInterval) o;
+ return (t.intervalStart == this.intervalStart || t.intervalEnd == this.intervalEnd
+ && t.typetag == this.typetag);
+ }
+ }
+
+ @Override
+ public int hashCode() {
+ return (int) (((int) (this.intervalStart ^ (this.intervalStart >>> 32))) * 31 + (int) (this.intervalEnd ^ (this.intervalEnd >>> 32)))
+ * 31 + (int) this.typetag;
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.base.IAObject#accept(edu.uci.ics.asterix.om.visitors.IOMVisitor)
+ */
+ @Override
+ public void accept(IOMVisitor visitor) throws AsterixException {
+ visitor.visitAInterval(this);
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.base.IAObject#deepEqual(edu.uci.ics.asterix.om.base.IAObject)
+ */
+ @Override
+ public boolean deepEqual(IAObject obj) {
+ return equals(obj);
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.base.IAObject#hash()
+ */
+ @Override
+ public int hash() {
+ return hashCode();
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sbder = new StringBuilder();
+ sbder.append("AInterval: { ");
+ if (typetag == ATypeTag.DATE.serialize()) {
+ sbder.append("ADate: { ");
+ GregorianCalendarSystem.getInstance().getExtendStringRepWithTimezoneUntilField(
+ intervalStart * ADate.CHRONON_OF_DAY, 0, sbder, GregorianCalendarSystem.Fields.YEAR,
+ GregorianCalendarSystem.Fields.DAY);
+ sbder.append(" }, ADate: {");
+ GregorianCalendarSystem.getInstance().getExtendStringRepWithTimezoneUntilField(
+ intervalEnd * ADate.CHRONON_OF_DAY, 0, sbder, GregorianCalendarSystem.Fields.YEAR,
+ GregorianCalendarSystem.Fields.DAY);
+ sbder.append(" }");
+ } else if (typetag == ATypeTag.TIME.serialize()) {
+ sbder.append("ATime: { ");
+ GregorianCalendarSystem.getInstance().getExtendStringRepWithTimezoneUntilField(intervalStart, 0, sbder,
+ GregorianCalendarSystem.Fields.HOUR, GregorianCalendarSystem.Fields.MILLISECOND);
+ sbder.append(" }, ATime: { ");
+
+ GregorianCalendarSystem.getInstance().getExtendStringRepWithTimezoneUntilField(intervalEnd, 0, sbder,
+ GregorianCalendarSystem.Fields.HOUR, GregorianCalendarSystem.Fields.MILLISECOND);
+ sbder.append(" }");
+ } else if (typetag == ATypeTag.DATETIME.serialize()) {
+ sbder.append("ADateTime: { ");
+ GregorianCalendarSystem.getInstance().getExtendStringRepWithTimezoneUntilField(intervalStart, 0, sbder,
+ GregorianCalendarSystem.Fields.YEAR, GregorianCalendarSystem.Fields.MILLISECOND);
+ sbder.append(" }, ADateTime: { ");
+ GregorianCalendarSystem.getInstance().getExtendStringRepWithTimezoneUntilField(intervalEnd, 0, sbder,
+ GregorianCalendarSystem.Fields.YEAR, GregorianCalendarSystem.Fields.MILLISECOND);
+ sbder.append(" }");
+ }
+ sbder.append(" }");
+ return sbder.toString();
+ }
+
+ public long getIntervalStart() {
+ return intervalStart;
+ }
+
+ public long getIntervalEnd() {
+ return intervalEnd;
+ }
+
+ public short getIntervalType() {
+ return typetag;
+ }
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/comm/IFeedMessage.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/AMutableInterval.java
similarity index 60%
copy from asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/comm/IFeedMessage.java
copy to asterix-om/src/main/java/edu/uci/ics/asterix/om/base/AMutableInterval.java
index dfb4f91..055535f 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/comm/IFeedMessage.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/AMutableInterval.java
@@ -12,26 +12,18 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package edu.uci.ics.asterix.feed.comm;
+package edu.uci.ics.asterix.om.base;
-import java.io.Serializable;
+public class AMutableInterval extends AInterval {
-public interface IFeedMessage extends Serializable {
-
- public enum MessageResponseMode {
- SYNCHRONOUS,
- ASYNCHRONOUS,
+ public AMutableInterval(long intervalStart, long intervalEnd, byte typetag) {
+ super(intervalStart, intervalEnd, typetag);
}
- public enum MessageType {
- STOP,
- SUSPEND,
- RESUME,
- ALTER,
+ public void setValue(long intervalStart, long intervalEnd, byte typetag) {
+ this.intervalStart = intervalStart;
+ this.intervalEnd = intervalEnd;
+ this.typetag = typetag;
}
- public MessageResponseMode getMessageResponseMode();
-
- public MessageType getMessageType();
-
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADateAndTimeParser.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADateAndTimeParser.java
deleted file mode 100644
index 30ee525..0000000
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADateAndTimeParser.java
+++ /dev/null
@@ -1,264 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.om.base.temporal;
-
-public class ADateAndTimeParser {
-
- private static final GregorianCalendarSystem gCalInstance = GregorianCalendarSystem.getInstance();
-
- private static final String dateErrorMessage = "Wrong date format!";
- private static final String timeErrorMessage = "Wrong time format!";
-
- /**
- * Parse the given char sequence as a date string, and return the milliseconds represented by the date.
- *
- * @param charAccessor
- * accessor for the char sequence
- * @param isDateOnly
- * indicating whether it is a single date string, or it is the date part of a datetime string
- * @param errorMessage
- * @return
- * @throws Exception
- */
- public static <T> long parseDatePart(ICharSequenceAccessor<T> charAccessor, boolean isDateOnly) throws Exception {
-
- int length = charAccessor.getLength();
- int offset = 0;
-
- int year = 0, month = 0, day = 0;
- boolean positive = true;
-
- boolean isExtendedForm = false;
-
- if (charAccessor.getCharAt(offset) == '-') {
- offset++;
- positive = false;
- }
-
- if ((isDateOnly) && charAccessor.getCharAt(offset + 4) == '-' || (!isDateOnly)
- && charAccessor.getCharAt(offset + 13) == ':') {
- isExtendedForm = true;
- }
-
- if (isExtendedForm) {
- if (charAccessor.getCharAt(offset + 4) != '-' || charAccessor.getCharAt(offset + 7) != '-') {
- throw new Exception(dateErrorMessage);
- }
- }
-
- // year
- for (int i = 0; i < 4; i++) {
- if (charAccessor.getCharAt(offset + i) >= '0' && charAccessor.getCharAt(offset + i) <= '9') {
- year = year * 10 + charAccessor.getCharAt(offset + i) - '0';
- } else {
- throw new Exception(dateErrorMessage);
- }
- }
-
- if (year < GregorianCalendarSystem.FIELD_MINS[GregorianCalendarSystem.Fields.YEAR.ordinal()]
- || year > GregorianCalendarSystem.FIELD_MAXS[GregorianCalendarSystem.Fields.YEAR.ordinal()]) {
- throw new Exception(dateErrorMessage + ": year " + year);
- }
-
- offset += (isExtendedForm) ? 5 : 4;
-
- // month
- for (int i = 0; i < 2; i++) {
- if ((charAccessor.getCharAt(offset + i) >= '0' && charAccessor.getCharAt(offset + i) <= '9')) {
- month = month * 10 + charAccessor.getCharAt(offset + i) - '0';
- } else {
- throw new Exception(dateErrorMessage);
- }
- }
-
- if (month < GregorianCalendarSystem.FIELD_MINS[GregorianCalendarSystem.Fields.MONTH.ordinal()]
- || month > GregorianCalendarSystem.FIELD_MAXS[GregorianCalendarSystem.Fields.MONTH.ordinal()]) {
- throw new Exception(dateErrorMessage + ": month " + month);
- }
- offset += (isExtendedForm) ? 3 : 2;
-
- // day
- for (int i = 0; i < 2; i++) {
- if ((charAccessor.getCharAt(offset + i) >= '0' && charAccessor.getCharAt(offset + i) <= '9')) {
- day = day * 10 + charAccessor.getCharAt(offset + i) - '0';
- } else {
- throw new Exception(dateErrorMessage);
- }
- }
-
- if (day < GregorianCalendarSystem.FIELD_MINS[GregorianCalendarSystem.Fields.DAY.ordinal()]
- || day > GregorianCalendarSystem.FIELD_MAXS[GregorianCalendarSystem.Fields.DAY.ordinal()]) {
- throw new Exception(dateErrorMessage + ": day " + day);
- }
-
- offset += 2;
-
- if (!positive) {
- year *= -1;
- }
-
- if (isDateOnly && length > offset) {
- throw new Exception(dateErrorMessage);
- }
- return gCalInstance.getChronon(year, month, day, 0, 0, 0, 0, 0);
- }
-
- /**
- * Parse the given char sequence as a time string, and return the milliseconds represented by the time.
- *
- * @param charAccessor
- * @return
- * @throws Exception
- */
- public static <T> int parseTimePart(ICharSequenceAccessor<T> charAccessor) throws Exception {
-
- int length = charAccessor.getLength();
- int offset = 0;
-
- int hour = 0, min = 0, sec = 0, millis = 0;
- int timezone = 0;
-
- boolean isExtendedForm = false;
- if (charAccessor.getCharAt(offset + 2) == ':') {
- isExtendedForm = true;
- }
-
- if (isExtendedForm && (charAccessor.getCharAt(offset + 2) != ':' || charAccessor.getCharAt(offset + 5) != ':')) {
- throw new Exception(timeErrorMessage);
- }
- // hour
- for (int i = 0; i < 2; i++) {
- if ((charAccessor.getCharAt(offset + i) >= '0' && charAccessor.getCharAt(offset + i) <= '9')) {
- hour = hour * 10 + charAccessor.getCharAt(offset + i) - '0';
- } else {
- throw new Exception(timeErrorMessage);
- }
- }
-
- if (hour < GregorianCalendarSystem.FIELD_MINS[GregorianCalendarSystem.Fields.HOUR.ordinal()]
- || hour > GregorianCalendarSystem.FIELD_MAXS[GregorianCalendarSystem.Fields.HOUR.ordinal()]) {
- throw new Exception(timeErrorMessage + ": hour " + hour);
- }
-
- offset += (isExtendedForm) ? 3 : 2;
-
- // minute
- for (int i = 0; i < 2; i++) {
- if ((charAccessor.getCharAt(offset + i) >= '0' && charAccessor.getCharAt(offset + i) <= '9')) {
- min = min * 10 + charAccessor.getCharAt(offset + i) - '0';
- } else {
- throw new Exception(timeErrorMessage);
- }
- }
-
- if (min < GregorianCalendarSystem.FIELD_MINS[GregorianCalendarSystem.Fields.MINUTE.ordinal()]
- || min > GregorianCalendarSystem.FIELD_MAXS[GregorianCalendarSystem.Fields.MINUTE.ordinal()]) {
- throw new Exception(timeErrorMessage + ": min " + min);
- }
-
- offset += (isExtendedForm) ? 3 : 2;
-
- // second
- for (int i = 0; i < 2; i++) {
- if ((charAccessor.getCharAt(offset + i) >= '0' && charAccessor.getCharAt(offset + i) <= '9')) {
- sec = sec * 10 + charAccessor.getCharAt(offset + i) - '0';
- } else {
- throw new Exception(timeErrorMessage);
- }
- }
-
- if (sec < GregorianCalendarSystem.FIELD_MINS[GregorianCalendarSystem.Fields.SECOND.ordinal()]
- || sec > GregorianCalendarSystem.FIELD_MAXS[GregorianCalendarSystem.Fields.SECOND.ordinal()]) {
- throw new Exception(timeErrorMessage + ": sec " + sec);
- }
-
- offset += 2;
-
- if ((isExtendedForm && length > offset && charAccessor.getCharAt(offset) == '.')
- || (!isExtendedForm && length > offset)) {
-
- offset += (isExtendedForm) ? 1 : 0;
- int i = 0;
- for (; i < 3 && offset + i < length; i++) {
- if (charAccessor.getCharAt(offset + i) >= '0' && charAccessor.getCharAt(offset + i) <= '9') {
- millis = millis * 10 + charAccessor.getCharAt(offset + i) - '0';
- } else {
- break;
- }
- }
-
- offset += i;
-
- for (; i < 3; i++) {
- millis = millis * 10;
- }
-
- // error is thrown if more than three digits are seen for the millisecond part
- if (charAccessor.getCharAt(offset) >= '0' && charAccessor.getCharAt(offset) <= '9') {
- throw new Exception("Wrong format of time instance: too many fields for millisecond.");
- }
- }
-
- if (length > offset) {
- if (charAccessor.getCharAt(offset) != 'Z') {
- if ((charAccessor.getCharAt(offset) != '+' && charAccessor.getCharAt(offset) != '-')
- || (isExtendedForm && charAccessor.getCharAt(offset + 3) != ':')) {
- throw new Exception(timeErrorMessage);
- }
-
- short timezoneHour = 0;
- short timezoneMinute = 0;
-
- for (int i = 0; i < 2; i++) {
- if ((charAccessor.getCharAt(offset + 1 + i) >= '0' && charAccessor.getCharAt(offset + 1 + i) <= '9')) {
- timezoneHour = (short) (timezoneHour * 10 + charAccessor.getCharAt(offset + 1 + i) - '0');
- } else {
- throw new Exception(timeErrorMessage);
- }
- }
-
- if (timezoneHour < GregorianCalendarSystem.TIMEZONE_HOUR_MIN
- || timezoneHour > GregorianCalendarSystem.TIMEZONE_HOUR_MAX) {
- throw new Exception(timeErrorMessage + ": time zone hour " + timezoneHour);
- }
-
- int temp_offset = (isExtendedForm) ? 1 : 0;
-
- for (int i = 0; i < 2; i++) {
- if ((charAccessor.getCharAt(offset + temp_offset + 3 + i) >= '0' && charAccessor.getCharAt(offset
- + temp_offset + 3 + i) <= '9')) {
- timezoneMinute = (short) (timezoneMinute * 10
- + charAccessor.getCharAt(offset + temp_offset + 3 + i) - '0');
- } else {
- throw new Exception(timeErrorMessage);
- }
- }
-
- if (timezoneMinute < GregorianCalendarSystem.TIMEZONE_MIN_MIN
- || timezoneMinute > GregorianCalendarSystem.TIMEZONE_MIN_MAX) {
- throw new Exception(timeErrorMessage + ": time zone minute " + timezoneMinute);
- }
-
- if (charAccessor.getCharAt(offset) == '-') {
- timezone = (byte) -((timezoneHour * 4) + timezoneMinute / 15);
- } else {
- timezone = (byte) ((timezoneHour * 4) + timezoneMinute / 15);
- }
- }
- }
-
- return gCalInstance.getChronon(hour, min, sec, millis, timezone);
- }
-}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADateParserFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADateParserFactory.java
new file mode 100644
index 0000000..8192919
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADateParserFactory.java
@@ -0,0 +1,150 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.om.base.temporal;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParser;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+
+public class ADateParserFactory implements IValueParserFactory {
+
+ public static final IValueParserFactory INSTANCE = new ADateParserFactory();
+
+ private static final long serialVersionUID = 1L;
+
+ private static final String dateErrorMessage = "Wrong input format for a date value";
+
+ private ADateParserFactory() {
+
+ }
+
+ @Override
+ public IValueParser createValueParser() {
+
+ final CharArrayCharSequenceAccessor charArrayAccessor = new CharArrayCharSequenceAccessor();
+
+ return new IValueParser() {
+
+ @Override
+ public void parse(char[] buffer, int start, int length, DataOutput out) throws HyracksDataException {
+ charArrayAccessor.reset(buffer, start, length);
+ try {
+ out.writeInt((int) (parseDatePart(charArrayAccessor, true) / GregorianCalendarSystem.CHRONON_OF_DAY));
+ } catch (IOException ex) {
+ throw new HyracksDataException(ex);
+ }
+ }
+ };
+ }
+
+ /**
+ * Parse the given char sequence as a date string, and return the milliseconds represented by the date.
+ *
+ * @param charAccessor
+ * accessor for the char sequence
+ * @param isDateOnly
+ * indicating whether it is a single date string, or it is the date part of a datetime string
+ * @param errorMessage
+ * @return
+ * @throws Exception
+ */
+ public static <T> long parseDatePart(ICharSequenceAccessor<T> charAccessor, boolean isDateOnly)
+ throws HyracksDataException {
+
+ int length = charAccessor.getLength();
+ int offset = 0;
+
+ int year = 0, month = 0, day = 0;
+ boolean positive = true;
+
+ boolean isExtendedForm = false;
+
+ if (charAccessor.getCharAt(offset) == '-') {
+ offset++;
+ positive = false;
+ }
+
+ if ((isDateOnly) && charAccessor.getCharAt(offset + 4) == '-' || (!isDateOnly)
+ && charAccessor.getCharAt(offset + 13) == ':') {
+ isExtendedForm = true;
+ }
+
+ if (isExtendedForm) {
+ if (charAccessor.getCharAt(offset + 4) != '-' || charAccessor.getCharAt(offset + 7) != '-') {
+ throw new HyracksDataException("Missing dash in the date string as an extended form");
+ }
+ }
+
+ // year
+ for (int i = 0; i < 4; i++) {
+ if (charAccessor.getCharAt(offset + i) >= '0' && charAccessor.getCharAt(offset + i) <= '9') {
+ year = year * 10 + charAccessor.getCharAt(offset + i) - '0';
+ } else {
+ throw new HyracksDataException("Non-numeric value in year field");
+ }
+ }
+
+ if (year < GregorianCalendarSystem.FIELD_MINS[GregorianCalendarSystem.Fields.YEAR.ordinal()]
+ || year > GregorianCalendarSystem.FIELD_MAXS[GregorianCalendarSystem.Fields.YEAR.ordinal()]) {
+ throw new HyracksDataException(dateErrorMessage + ": year " + year);
+ }
+
+ offset += (isExtendedForm) ? 5 : 4;
+
+ // month
+ for (int i = 0; i < 2; i++) {
+ if ((charAccessor.getCharAt(offset + i) >= '0' && charAccessor.getCharAt(offset + i) <= '9')) {
+ month = month * 10 + charAccessor.getCharAt(offset + i) - '0';
+ } else {
+ throw new HyracksDataException("Non-numeric value in month field");
+ }
+ }
+
+ if (month < GregorianCalendarSystem.FIELD_MINS[GregorianCalendarSystem.Fields.MONTH.ordinal()]
+ || month > GregorianCalendarSystem.FIELD_MAXS[GregorianCalendarSystem.Fields.MONTH.ordinal()]) {
+ throw new HyracksDataException(dateErrorMessage + ": month " + month);
+ }
+ offset += (isExtendedForm) ? 3 : 2;
+
+ // day
+ for (int i = 0; i < 2; i++) {
+ if ((charAccessor.getCharAt(offset + i) >= '0' && charAccessor.getCharAt(offset + i) <= '9')) {
+ day = day * 10 + charAccessor.getCharAt(offset + i) - '0';
+ } else {
+ throw new HyracksDataException("Non-numeric value in day field");
+ }
+ }
+
+ if (day < GregorianCalendarSystem.FIELD_MINS[GregorianCalendarSystem.Fields.DAY.ordinal()]
+ || day > GregorianCalendarSystem.FIELD_MAXS[GregorianCalendarSystem.Fields.DAY.ordinal()]) {
+ throw new HyracksDataException(dateErrorMessage + ": day " + day);
+ }
+
+ offset += 2;
+
+ if (!positive) {
+ year *= -1;
+ }
+
+ if (isDateOnly && length > offset) {
+ throw new HyracksDataException("Too many chars for a date only value");
+ }
+ return GregorianCalendarSystem.getInstance().getChronon(year, month, day, 0, 0, 0, 0, 0);
+ }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADateTimeParserFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADateTimeParserFactory.java
new file mode 100644
index 0000000..2df3c3b
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADateTimeParserFactory.java
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.om.base.temporal;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParser;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+
+public class ADateTimeParserFactory implements IValueParserFactory {
+
+ public static final IValueParserFactory INSTANCE = new ADateTimeParserFactory();
+
+ private static final long serialVersionUID = 1L;
+
+ private static final String dateTimeErrorMessage = "Wrong Input Format for a DateTime Value";
+
+ private ADateTimeParserFactory() {
+
+ }
+
+ @Override
+ public IValueParser createValueParser() {
+
+ final CharArrayCharSequenceAccessor charArrayAccessor = new CharArrayCharSequenceAccessor();
+
+ return new IValueParser() {
+
+ @Override
+ public void parse(char[] buffer, int start, int length, DataOutput out) throws HyracksDataException {
+ long chrononTimeInMs = 0;
+
+ charArrayAccessor.reset(buffer, start, length);
+
+ short timeOffset = (short) ((charArrayAccessor.getCharAt(0) == '-') ? 1 : 0);
+
+ if (charArrayAccessor.getCharAt(timeOffset + 10) != 'T'
+ && charArrayAccessor.getCharAt(timeOffset + 8) != 'T') {
+ throw new HyracksDataException(dateTimeErrorMessage + ": missing T");
+ }
+
+ // if extended form 11, else 9
+ timeOffset += (charArrayAccessor.getCharAt(timeOffset + 13) == ':') ? (short) (11) : (short) (9);
+
+ chrononTimeInMs = ADateParserFactory.parseDatePart(charArrayAccessor, false);
+
+ charArrayAccessor.reset(buffer, start + timeOffset, length - timeOffset);
+
+ chrononTimeInMs += ATimeParserFactory.parseTimePart(charArrayAccessor);
+
+ try {
+ out.writeLong(chrononTimeInMs);
+ } catch (IOException ex) {
+ throw new HyracksDataException(ex);
+ }
+ }
+ };
+ }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADurationParser.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADurationParserFactory.java
similarity index 62%
rename from asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADurationParser.java
rename to asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADurationParserFactory.java
index 5d43bba..b176061 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADurationParser.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ADurationParserFactory.java
@@ -14,9 +14,45 @@
*/
package edu.uci.ics.asterix.om.base.temporal;
-import edu.uci.ics.asterix.om.base.AMutableDuration;
+import java.io.DataOutput;
+import java.io.IOException;
-public class ADurationParser {
+import edu.uci.ics.asterix.om.base.AMutableDuration;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParser;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+
+public class ADurationParserFactory implements IValueParserFactory {
+
+ public static final IValueParserFactory INSTANCE = new ADurationParserFactory();
+
+ private static final long serialVersionUID = 1L;
+
+ private static final String durationErrorMessage = "Wrong Input Format for a Duration Value";
+
+ private ADurationParserFactory() {
+
+ }
+
+ @Override
+ public IValueParser createValueParser() {
+ final CharArrayCharSequenceAccessor charArrayAccessor = new CharArrayCharSequenceAccessor();
+ final AMutableDuration aMutableDuration = new AMutableDuration(0, 0);
+ return new IValueParser() {
+
+ @Override
+ public void parse(char[] buffer, int start, int length, DataOutput out) throws HyracksDataException {
+ charArrayAccessor.reset(buffer, start, length);
+ parseDuration(charArrayAccessor, aMutableDuration);
+ try {
+ out.writeInt(aMutableDuration.getMonths());
+ out.writeLong(aMutableDuration.getMilliseconds());
+ } catch (IOException ex) {
+ throw new HyracksDataException(ex);
+ }
+ }
+ };
+ }
private enum State {
NOTHING_READ,
@@ -30,9 +66,8 @@
SEC;
};
- private static final String errorMessage = "This can not be an instance of duration";
-
- public static <T> void parse(ICharSequenceAccessor<T> charAccessor, AMutableDuration aDuration) throws Exception {
+ public static <T> void parseDuration(ICharSequenceAccessor<T> charAccessor, AMutableDuration aDuration)
+ throws HyracksDataException {
boolean positive = true;
int offset = 0;
@@ -45,7 +80,7 @@
}
if (charAccessor.getCharAt(offset++) != 'P') {
- throw new Exception(errorMessage);
+ throw new HyracksDataException(durationErrorMessage + ": Missing leading 'P'.");
}
for (; offset < charAccessor.getLength(); offset++) {
@@ -59,7 +94,7 @@
year = value;
state = State.YEAR;
} else {
- throw new Exception(errorMessage);
+ throw new HyracksDataException(durationErrorMessage + ": wrong YEAR feild.");
}
break;
case 'M':
@@ -68,13 +103,13 @@
month = value;
state = State.MONTH;
} else {
- throw new Exception(errorMessage);
+ throw new HyracksDataException(durationErrorMessage + ": wrong MONTH field.");
}
} else if (state.compareTo(State.MIN) < 0) {
minute = value;
state = State.MIN;
} else {
- throw new Exception(errorMessage);
+ throw new HyracksDataException(durationErrorMessage + ": wrong MIN field.");
}
break;
case 'D':
@@ -82,14 +117,14 @@
day = value;
state = State.DAY;
} else {
- throw new Exception(errorMessage);
+ throw new HyracksDataException(durationErrorMessage + ": wrong DAY field");
}
break;
case 'T':
if (state.compareTo(State.TIME) < 0) {
state = State.TIME;
} else {
- throw new Exception(errorMessage);
+ throw new HyracksDataException(durationErrorMessage + ": wrong TIME field.");
}
break;
@@ -98,7 +133,7 @@
hour = value;
state = State.HOUR;
} else {
- throw new Exception(errorMessage);
+ throw new HyracksDataException(durationErrorMessage + ": wrong HOUR field.");
}
break;
case '.':
@@ -110,7 +145,8 @@
if (i < 4) {
millisecond = millisecond * 10 + (charAccessor.getCharAt(offset + i) - '0');
} else {
- throw new Exception(errorMessage);
+ throw new HyracksDataException(durationErrorMessage
+ + ": wrong MILLISECOND field.");
}
} else {
break;
@@ -119,18 +155,18 @@
offset += i;
state = State.MILLISEC;
} else {
- throw new Exception(errorMessage);
+ throw new HyracksDataException(durationErrorMessage + ": wrong MILLISECOND field.");
}
case 'S':
if (state.compareTo(State.SEC) < 0) {
second = value;
state = State.SEC;
} else {
- throw new Exception(errorMessage);
+ throw new HyracksDataException(durationErrorMessage + ": wrong SECOND field.");
}
break;
default:
- throw new Exception(errorMessage);
+ throw new HyracksDataException(durationErrorMessage + ": wrong format for duration.");
}
value = 0;
@@ -138,7 +174,7 @@
}
if (state.compareTo(State.TIME) == 0) {
- throw new Exception(errorMessage);
+ throw new HyracksDataException(durationErrorMessage + ": no time fields after time separator.");
}
short temp = 1;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ATimeParserFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ATimeParserFactory.java
new file mode 100644
index 0000000..d76f41d
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ATimeParserFactory.java
@@ -0,0 +1,217 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.om.base.temporal;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParser;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+
+public class ATimeParserFactory implements IValueParserFactory {
+
+ public static final IValueParserFactory INSTANCE = new ATimeParserFactory();
+
+ private static final long serialVersionUID = 1L;
+
+ private static final String timeErrorMessage = "Wrong Input Format for a Time Value";
+
+ private ATimeParserFactory() {
+
+ }
+
+ @Override
+ public IValueParser createValueParser() {
+
+ final CharArrayCharSequenceAccessor charArrayAccessor = new CharArrayCharSequenceAccessor();
+
+ return new IValueParser() {
+
+ @Override
+ public void parse(char[] buffer, int start, int length, DataOutput out) throws HyracksDataException {
+ charArrayAccessor.reset(buffer, start, length);
+ try {
+ out.writeInt(parseTimePart(charArrayAccessor));
+ } catch (IOException ex) {
+ throw new HyracksDataException(ex);
+ }
+ }
+ };
+ }
+
+ /**
+ * Parse the given char sequence as a time string, and return the milliseconds represented by the time.
+ *
+ * @param charAccessor
+ * @return
+ * @throws Exception
+ */
+ public static <T> int parseTimePart(ICharSequenceAccessor<T> charAccessor) throws HyracksDataException {
+
+ int length = charAccessor.getLength();
+ int offset = 0;
+
+ int hour = 0, min = 0, sec = 0, millis = 0;
+ int timezone = 0;
+
+ boolean isExtendedForm = false;
+ if (charAccessor.getCharAt(offset + 2) == ':') {
+ isExtendedForm = true;
+ }
+
+ if (isExtendedForm && (charAccessor.getCharAt(offset + 2) != ':' || charAccessor.getCharAt(offset + 5) != ':')) {
+ throw new HyracksDataException(timeErrorMessage + ": Missing colon in an extended time format.");
+ }
+ // hour
+ for (int i = 0; i < 2; i++) {
+ if ((charAccessor.getCharAt(offset + i) >= '0' && charAccessor.getCharAt(offset + i) <= '9')) {
+ hour = hour * 10 + charAccessor.getCharAt(offset + i) - '0';
+ } else {
+ throw new HyracksDataException(timeErrorMessage + ": Non-numeric value in hour field");
+ }
+ }
+
+ if (hour < GregorianCalendarSystem.FIELD_MINS[GregorianCalendarSystem.Fields.HOUR.ordinal()]
+ || hour > GregorianCalendarSystem.FIELD_MAXS[GregorianCalendarSystem.Fields.HOUR.ordinal()]) {
+ throw new HyracksDataException(timeErrorMessage + ": hour " + hour);
+ }
+
+ offset += (isExtendedForm) ? 3 : 2;
+
+ // minute
+ for (int i = 0; i < 2; i++) {
+ if ((charAccessor.getCharAt(offset + i) >= '0' && charAccessor.getCharAt(offset + i) <= '9')) {
+ min = min * 10 + charAccessor.getCharAt(offset + i) - '0';
+ } else {
+ throw new HyracksDataException(timeErrorMessage + ": Non-numeric value in minute field");
+ }
+ }
+
+ if (min < GregorianCalendarSystem.FIELD_MINS[GregorianCalendarSystem.Fields.MINUTE.ordinal()]
+ || min > GregorianCalendarSystem.FIELD_MAXS[GregorianCalendarSystem.Fields.MINUTE.ordinal()]) {
+ throw new HyracksDataException(timeErrorMessage + ": min " + min);
+ }
+
+ offset += (isExtendedForm) ? 3 : 2;
+
+ // second
+ for (int i = 0; i < 2; i++) {
+ if ((charAccessor.getCharAt(offset + i) >= '0' && charAccessor.getCharAt(offset + i) <= '9')) {
+ sec = sec * 10 + charAccessor.getCharAt(offset + i) - '0';
+ } else {
+ throw new HyracksDataException(timeErrorMessage + ": Non-numeric value in second field");
+ }
+ }
+
+ if (sec < GregorianCalendarSystem.FIELD_MINS[GregorianCalendarSystem.Fields.SECOND.ordinal()]
+ || sec > GregorianCalendarSystem.FIELD_MAXS[GregorianCalendarSystem.Fields.SECOND.ordinal()]) {
+ throw new HyracksDataException(timeErrorMessage + ": sec " + sec);
+ }
+
+ offset += 2;
+
+ if ((isExtendedForm && length > offset && charAccessor.getCharAt(offset) == '.')
+ || (!isExtendedForm && length > offset)) {
+
+ offset += (isExtendedForm) ? 1 : 0;
+ int i = 0;
+ for (; i < 3 && offset + i < length; i++) {
+ if (charAccessor.getCharAt(offset + i) >= '0' && charAccessor.getCharAt(offset + i) <= '9') {
+ millis = millis * 10 + charAccessor.getCharAt(offset + i) - '0';
+ } else {
+ break;
+ }
+ }
+
+ offset += i;
+
+ for (; i < 3; i++) {
+ millis = millis * 10;
+ }
+
+ // error is thrown if more than three digits are seen for the millisecond part
+ if (charAccessor.getLength() > offset && charAccessor.getCharAt(offset) >= '0'
+ && charAccessor.getCharAt(offset) <= '9') {
+ throw new HyracksDataException(timeErrorMessage + ": too many fields for millisecond.");
+ }
+ }
+
+ if (length > offset) {
+ timezone = parseTimezonePart(charAccessor, offset);
+ }
+
+ return GregorianCalendarSystem.getInstance().getChronon(hour, min, sec, millis, timezone);
+ }
+
+ /**
+ * Parse the given char sequence as a time string, and return the milliseconds represented by the time.
+ *
+ * @param charAccessor
+ * @return
+ * @throws Exception
+ */
+ public static <T> int parseTimezonePart(ICharSequenceAccessor<T> charAccessor, int offset)
+ throws HyracksDataException {
+ int timezone = 0;
+
+ if (charAccessor.getCharAt(offset) != 'Z') {
+ if ((charAccessor.getCharAt(offset) != '+' && charAccessor.getCharAt(offset) != '-')) {
+ throw new HyracksDataException("Wrong timezone format: missing sign or missing colon for a time zone");
+ }
+
+ short timezoneHour = 0;
+ short timezoneMinute = 0;
+
+ for (int i = 0; i < 2; i++) {
+ if ((charAccessor.getCharAt(offset + 1 + i) >= '0' && charAccessor.getCharAt(offset + 1 + i) <= '9')) {
+ timezoneHour = (short) (timezoneHour * 10 + charAccessor.getCharAt(offset + 1 + i) - '0');
+ } else {
+ throw new HyracksDataException(timeErrorMessage + ": Non-numeric value in timezone hour field");
+ }
+ }
+
+ if (timezoneHour < GregorianCalendarSystem.TIMEZONE_HOUR_MIN
+ || timezoneHour > GregorianCalendarSystem.TIMEZONE_HOUR_MAX) {
+ throw new HyracksDataException(timeErrorMessage + ": time zone hour " + timezoneHour);
+ }
+
+ int temp_offset = (charAccessor.getCharAt(offset + 3) == ':') ? 1 : 0;
+
+ for (int i = 0; i < 2; i++) {
+ if ((charAccessor.getCharAt(offset + temp_offset + 3 + i) >= '0' && charAccessor.getCharAt(offset
+ + temp_offset + 3 + i) <= '9')) {
+ timezoneMinute = (short) (timezoneMinute * 10
+ + charAccessor.getCharAt(offset + temp_offset + 3 + i) - '0');
+ } else {
+ throw new HyracksDataException(timeErrorMessage + ": Non-numeric value in timezone minute field");
+ }
+ }
+
+ if (timezoneMinute < GregorianCalendarSystem.TIMEZONE_MIN_MIN
+ || timezoneMinute > GregorianCalendarSystem.TIMEZONE_MIN_MAX) {
+ throw new HyracksDataException(timeErrorMessage + ": time zone minute " + timezoneMinute);
+ }
+
+ if (charAccessor.getCharAt(offset) == '-') {
+ timezone = (byte) -((timezoneHour * 4) + timezoneMinute / 15);
+ } else {
+ timezone = (byte) ((timezoneHour * 4) + timezoneMinute / 15);
+ }
+ }
+ return timezone;
+ }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ByteArrayCharSequenceAccessor.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ByteArrayCharSequenceAccessor.java
index e1a2135..453c86f 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ByteArrayCharSequenceAccessor.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ByteArrayCharSequenceAccessor.java
@@ -14,27 +14,41 @@
*/
package edu.uci.ics.asterix.om.base.temporal;
+import edu.uci.ics.asterix.common.exceptions.AsterixRuntimeException;
+
public class ByteArrayCharSequenceAccessor implements ICharSequenceAccessor<Byte[]> {
- private byte[] string;
+ private byte[] buf;
private int offset;
- private int beginOffset;
+ private int length;
@Override
- public char getCharAt(int index) {
- return (char) (string[index + offset + beginOffset]);
+ public char getCharAt(int index) throws AsterixRuntimeException {
+ if (index < 0 || index >= length) {
+ throw new AsterixRuntimeException("Byte array char accessor is out of bound: " + index + ":" + length);
+ }
+ return (char) (buf[index + offset]);
}
- /* The offset is the position of the first letter in the byte array */
- public void reset(byte[] obj, int beginOffset, int offset) {
- string = obj;
+ /**
+ * Reset the wrapped byte array.
+ *
+ * @param obj
+ * The byte array to be wrapped
+ * @param beginOffset
+ * The offset of the string stored in the byte array.
+ * @param offset
+ * The offset of the substring of the string stored (offset from the beginOffset).
+ */
+ public void reset(byte[] obj, int offset, int length) {
+ this.buf = obj;
this.offset = offset;
- this.beginOffset = beginOffset;
+ this.length = length;
}
@Override
public int getLength() {
- return ((string[beginOffset - 2] & 0xff) << 8) + ((string[beginOffset - 1] & 0xff) << 0) - offset;
+ return length;
}
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/CharArrayCharSequenceAccessor.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/CharArrayCharSequenceAccessor.java
new file mode 100644
index 0000000..404f0ee
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/CharArrayCharSequenceAccessor.java
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.om.base.temporal;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixRuntimeException;
+
+public class CharArrayCharSequenceAccessor implements ICharSequenceAccessor<char[]> {
+
+ private char[] buf;
+ private int offset;
+ private int length;
+
+ @Override
+ public char getCharAt(int index) throws AsterixRuntimeException {
+ if (index < 0 || index >= length) {
+ throw new AsterixRuntimeException("Byte array char accessor is out of bound: " + index + ":" + length);
+ }
+ return (char) (buf[index + offset]);
+ }
+
+ /**
+ * Reset the wrapped byte array.
+ *
+ * @param obj
+ * The byte array to be wrapped
+ * @param beginOffset
+ * The offset of the string stored in the byte array.
+ * @param offset
+ * The offset of the substring of the string stored (offset from the beginOffset).
+ */
+ public void reset(char[] obj, int offset, int length) {
+ this.buf = obj;
+ this.offset = offset;
+ this.length = length;
+ }
+
+ @Override
+ public int getLength() {
+ return length;
+ }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/DurationArithmeticOperations.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/DurationArithmeticOperations.java
new file mode 100644
index 0000000..9d6bc2f
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/DurationArithmeticOperations.java
@@ -0,0 +1,87 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.om.base.temporal;
+
+/**
+ * Algorithms for duration related arithmetic operations.
+ */
+public class DurationArithmeticOperations {
+
+ private final static GregorianCalendarSystem calSystem = GregorianCalendarSystem.getInstance();
+
+ /**
+ * Add a duration (with yearMonth and dayTime) onto a time point. The algorithm works as described in
+ * <a
+ * href="http://www.w3.org/TR/xmlschema-2/#adding-durations-to-dateTimes">"XML: adding durations to dateTimes"</a>.
+ * <p/>
+ * The basic algorithm is like this: duration is applied to the time point as two separated fields: year-month field
+ * and day-time field. Year-month field is applied firstly by reserving the correct day within the month's range
+ * (for example add 1M to 03-31 will return 04-30). Then day-time field is applied.
+ * <p/>
+ *
+ * @param pointChronon
+ * @param yearMonthDuration
+ * @param dayTimeDuration
+ * @return
+ */
+ public static long addDuration(long pointChronon, int yearMonthDuration, long dayTimeDuration) {
+
+ int year = calSystem.getYear(pointChronon);
+ int month = calSystem.getMonthOfYear(pointChronon, year);
+ int day = calSystem.getDayOfMonthYear(pointChronon, year, month);
+ int hour = calSystem.getHourOfDay(pointChronon);
+ int min = calSystem.getMinOfHour(pointChronon);
+ int sec = calSystem.getSecOfMin(pointChronon);
+ int ms = calSystem.getMillisOfSec(pointChronon);
+
+ // Apply the year-month duration
+ int carry = yearMonthDuration / 12;
+ month += (yearMonthDuration % 12);
+
+ if (month < 0) {
+ month += 12;
+ carry -= 1;
+ } else if (month > 12) {
+ month -= 12;
+ carry += 1;
+ }
+
+ year += carry;
+
+ boolean isLeapYear = calSystem.isLeapYear(year);
+
+ if (isLeapYear) {
+ if (day > GregorianCalendarSystem.DAYS_OF_MONTH_ORDI[month - 1]) {
+ day = GregorianCalendarSystem.DAYS_OF_MONTH_ORDI[month - 1];
+ }
+ } else {
+ if (day > GregorianCalendarSystem.DAYS_OF_MONTH_LEAP[month - 1]) {
+ day = GregorianCalendarSystem.DAYS_OF_MONTH_LEAP[month - 1];
+ }
+ }
+
+ return calSystem.getChronon(year, month, day, hour, min, sec, ms, 0) + dayTimeDuration;
+ }
+
+ public static int addDuration(int pointChronon, long dayTimeDuration) {
+ int rtnChronon = (int) ((pointChronon + dayTimeDuration) % GregorianCalendarSystem.CHRONON_OF_DAY);
+ if (rtnChronon < 0) {
+ rtnChronon += GregorianCalendarSystem.CHRONON_OF_DAY;
+ }
+
+ return rtnChronon;
+ }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/GregorianCalendarSystem.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/GregorianCalendarSystem.java
index 149a1d2..d43f235 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/GregorianCalendarSystem.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/GregorianCalendarSystem.java
@@ -57,6 +57,7 @@
public static final int CHRONON_OF_MINUTE = 60 * CHRONON_OF_SECOND;
public static final int CHRONON_OF_HOUR = 60 * CHRONON_OF_MINUTE;
public static final long CHRONON_OF_DAY = 24 * CHRONON_OF_HOUR;
+ public static final int MONTHS_IN_A_YEAR = 12;
/**
* Minimum feasible value of each field
@@ -238,8 +239,25 @@
return chrononTime;
}
+ public long adjustChrononByTimezone(long chronon, int timezone) {
+ return chronon + timezone / 4 * CHRONON_OF_HOUR + (timezone % 4) * 15 * CHRONON_OF_MINUTE;
+ }
+
+ public static int getChrononInDays(long chronon) {
+ if (chronon >= 0) {
+ return (int) (chronon / CHRONON_OF_DAY);
+ } else {
+ if (chronon % CHRONON_OF_DAY != 0) {
+ return (int) (chronon / CHRONON_OF_DAY - 1);
+ } else {
+ return (int) (chronon / CHRONON_OF_DAY);
+ }
+ }
+ }
+
/**
- * Get the extended string representation of the given UTC chronon time under the given time zone. Only fields before
+ * Get the extended string representation of the given UTC chronon time under the given time zone. Only fields
+ * before
* the given field index will be returned.
* <p/>
* The extended string representation is like:<br/>
@@ -258,6 +276,7 @@
switch (startField) {
case YEAR:
+ default:
sbder.append(String.format(year < 0 ? "%05d" : "%04d", year));
if (untilField == Fields.YEAR) {
return;
@@ -304,10 +323,13 @@
break;
}
- if (untilField.compareTo(Fields.DAY) > 0) {
+ if (timezone == 0) {
sbder.append("Z");
} else {
short tzMin = (short) ((timezone % 4) * 15);
+ if (tzMin < 0) {
+ tzMin = (short) (-1 * tzMin);
+ }
short tzHr = (short) (timezone / 4);
sbder.append((tzHr >= 0 ? "+" : "-")).append(String.format("%02d", (tzHr < 0 ? -tzHr : tzHr))).append(":")
.append(String.format("%02d", tzMin));
@@ -328,6 +350,7 @@
switch (startField) {
case YEAR:
+ default:
sbder.append(String.format(year < 0 ? "%05d" : "%04d", year));
if (untilField == Fields.YEAR) {
return;
@@ -359,10 +382,13 @@
break;
}
- if (untilField.compareTo(Fields.DAY) > 0) {
+ if (timezone == 0) {
sbder.append("Z");
} else {
short tzMin = (short) ((timezone % 4) * 15);
+ if (tzMin < 0) {
+ tzMin = (short) (-1 * tzMin);
+ }
short tzHr = (short) (timezone / 4);
sbder.append((tzHr >= 0 ? "+" : "-")).append(String.format("%02d", (tzHr < 0 ? -tzHr : tzHr)))
.append(String.format("%02d", tzMin));
@@ -422,7 +448,7 @@
* @param year
* @return
*/
- protected boolean isLeapYear(int year) {
+ public boolean isLeapYear(int year) {
return ((year & 3) == 0) && ((year % 100) != 0 || (year % 400) == 0);
}
@@ -454,7 +480,8 @@
* Get the year for the given chronon time.
* <p/>
* This code is directly from the Joda library BadicChronology.java.<br/>
- * The original authers are Stephen Colebourne, Brain S O'Neill and Guy Allard, and modified by JArod Wen on May 7th, 2012.
+ * The original authers are Stephen Colebourne, Brain S O'Neill and Guy Allard, and modified by JArod Wen on May
+ * 7th, 2012.
*
* @param chrononTime
* @return
@@ -501,7 +528,8 @@
* Get the month of the year for the given chronon time and the year.
* <p/>
* This code is directly from the Joda library BasicGJChronology.java.<br/>
- * The original authers are Stephen Colebourne, Brain S O'Neill and Guy Allard, and modified by JArod Wen on May 7th, 2012.
+ * The original authers are Stephen Colebourne, Brain S O'Neill and Guy Allard, and modified by JArod Wen on May
+ * 7th, 2012 and commented by Theodoros Ioannou on July 2012.
* <p/>
*
* @param millis
@@ -565,7 +593,8 @@
* Get the day of the given month and year for the input chronon time.
* <p/>
* This function is directly from Joda Library BasicChronology.java.<br/>
- * The original authers are Stephen Colebourne, Brain S O'Neill and Guy Allard, and modified by JArod Wen on May 7th, 2012.
+ * The original authers are Stephen Colebourne, Brain S O'Neill and Guy Allard, and modified by JArod Wen on May
+ * 7th, 2012.
* <p/>
*
* @param millis
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ICharSequenceAccessor.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ICharSequenceAccessor.java
index 6b4e898..d5a99a0 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ICharSequenceAccessor.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/ICharSequenceAccessor.java
@@ -14,10 +14,23 @@
*/
package edu.uci.ics.asterix.om.base.temporal;
+import edu.uci.ics.asterix.common.exceptions.AsterixRuntimeException;
+
public interface ICharSequenceAccessor<T> {
- public char getCharAt(int index);
+ /**
+ * Return the character in the wrapped char sequence at the given index.
+ *
+ * @param index
+ * @return
+ */
+ public char getCharAt(int index) throws AsterixRuntimeException;
+ /**
+ * Get the length of the wrapped char sequence.
+ *
+ * @return
+ */
public int getLength();
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/StringCharSequenceAccessor.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/StringCharSequenceAccessor.java
index 6c02340..17e483a 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/StringCharSequenceAccessor.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/base/temporal/StringCharSequenceAccessor.java
@@ -14,24 +14,31 @@
*/
package edu.uci.ics.asterix.om.base.temporal;
+import edu.uci.ics.asterix.common.exceptions.AsterixRuntimeException;
+
public class StringCharSequenceAccessor implements ICharSequenceAccessor<String> {
private String string;
private int offset;
+ private int length;
@Override
- public char getCharAt(int index) {
+ public char getCharAt(int index) throws AsterixRuntimeException {
+ if (index >= length) {
+ throw new AsterixRuntimeException("String accessor is out of bound.");
+ }
return string.charAt(index + offset);
}
- public void reset(String obj, int offset) {
- string = obj;
+ public void reset(String obj, int offset, int len) {
+ this.string = obj;
this.offset = offset;
+ this.length = len;
}
@Override
public int getLength() {
- return string.length() - offset;
+ return length;
}
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixBuiltinFunctions.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixBuiltinFunctions.java
index 42904fc..a547737 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixBuiltinFunctions.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixBuiltinFunctions.java
@@ -12,6 +12,8 @@
import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.ABooleanTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.ACircleTypeComputer;
+import edu.uci.ics.asterix.om.typecomputer.impl.ADateTimeTypeComputer;
+import edu.uci.ics.asterix.om.typecomputer.impl.ADateTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.ADoubleTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.AFloatTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.AInt32TypeComputer;
@@ -21,6 +23,7 @@
import edu.uci.ics.asterix.om.typecomputer.impl.APolygonTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.ARectangleTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.AStringTypeComputer;
+import edu.uci.ics.asterix.om.typecomputer.impl.ATimeTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.BinaryBooleanOrNullFunctionTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.BinaryStringBoolOrNullTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.BinaryStringStringOrNullTypeComputer;
@@ -38,7 +41,9 @@
import edu.uci.ics.asterix.om.typecomputer.impl.NonTaggedSumTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.NonTaggedSwitchCaseComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.NonTaggedUnaryMinusTypeComputer;
+import edu.uci.ics.asterix.om.typecomputer.impl.NotNullTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.OpenRecordConstructorResultType;
+import edu.uci.ics.asterix.om.typecomputer.impl.OptionalABooleanTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.OptionalACircleTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.OptionalADateTimeTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.OptionalADateTypeComputer;
@@ -49,6 +54,7 @@
import edu.uci.ics.asterix.om.typecomputer.impl.OptionalAInt32TypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.OptionalAInt64TypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.OptionalAInt8TypeComputer;
+import edu.uci.ics.asterix.om.typecomputer.impl.OptionalAIntervalTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.OptionalALineTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.OptionalAPoint3DTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.OptionalAPointTypeComputer;
@@ -235,11 +241,14 @@
public final static FunctionIdentifier AVG = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-avg", 1);
public final static FunctionIdentifier COUNT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-count", 1);
public final static FunctionIdentifier SUM = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-sum", 1);
- public final static FunctionIdentifier LOCAL_SUM = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-local-sum", 1);
+ public final static FunctionIdentifier LOCAL_SUM = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "agg-local-sum", 1);
public final static FunctionIdentifier MAX = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-max", 1);
- public final static FunctionIdentifier LOCAL_MAX = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-local-max", 1);
+ public final static FunctionIdentifier LOCAL_MAX = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "agg-local-max", 1);
public final static FunctionIdentifier MIN = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-min", 1);
- public final static FunctionIdentifier LOCAL_MIN = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-local-min", 1);
+ public final static FunctionIdentifier LOCAL_MIN = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "agg-local-min", 1);
public final static FunctionIdentifier GLOBAL_AVG = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
"agg-global-avg", 1);
public final static FunctionIdentifier LOCAL_AVG = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
@@ -270,8 +279,6 @@
public final static FunctionIdentifier SERIAL_LOCAL_AVG = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
"local-avg-serial", 1);
- public final static FunctionIdentifier YEAR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "year", 1);
-
public final static FunctionIdentifier SCAN_COLLECTION = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
"scan-collection", 1);
public final static FunctionIdentifier SUBSET_COLLECTION = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
@@ -363,6 +370,49 @@
"datetime", 1);
public final static FunctionIdentifier DURATION_CONSTRUCTOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
"duration", 1);
+ public final static FunctionIdentifier INTERVAL_CONSTRUCTOR_DATE = new FunctionIdentifier(
+ FunctionConstants.ASTERIX_NS, "interval-from-date", 2);
+ public final static FunctionIdentifier INTERVAL_CONSTRUCTOR_TIME = new FunctionIdentifier(
+ FunctionConstants.ASTERIX_NS, "interval-from-time", 2);
+ public final static FunctionIdentifier INTERVAL_CONSTRUCTOR_DATETIME = new FunctionIdentifier(
+ FunctionConstants.ASTERIX_NS, "interval-from-datetime", 2);
+ public final static FunctionIdentifier INTERVAL_CONSTRUCTOR_START_FROM_DATE = new FunctionIdentifier(
+ FunctionConstants.ASTERIX_NS, "interval-start-from-date", 2);
+ public final static FunctionIdentifier INTERVAL_CONSTRUCTOR_START_FROM_TIME = new FunctionIdentifier(
+ FunctionConstants.ASTERIX_NS, "interval-start-from-time", 2);
+ public final static FunctionIdentifier INTERVAL_CONSTRUCTOR_START_FROM_DATETIME = new FunctionIdentifier(
+ FunctionConstants.ASTERIX_NS, "interval-start-from-datetime", 2);
+ public final static FunctionIdentifier INTERVAL_BEFORE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "interval-before", 2);
+ public final static FunctionIdentifier INTERVAL_AFTER = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "interval-after", 2);
+ public final static FunctionIdentifier INTERVAL_MEETS = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "interval-meets", 2);
+ public final static FunctionIdentifier INTERVAL_MET_BY = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "interval-met-by", 2);
+ public final static FunctionIdentifier INTERVAL_OVERLAPS = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "interval-overlaps", 2);
+ public final static FunctionIdentifier INTERVAL_OVERLAPPED_BY = new FunctionIdentifier(
+ FunctionConstants.ASTERIX_NS, "interval-overlapped-by", 2);
+ public final static FunctionIdentifier OVERLAP = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "overlap", 2);
+ public final static FunctionIdentifier INTERVAL_STARTS = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "interval-starts", 2);
+ public final static FunctionIdentifier INTERVAL_STARTED_BY = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "interval-started-by", 2);
+ public final static FunctionIdentifier INTERVAL_COVERS = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "interval-covers", 2);
+ public final static FunctionIdentifier INTERVAL_COVERED_BY = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "interval-covered-by", 2);
+ public final static FunctionIdentifier INTERVAL_ENDS = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "interval-ends", 2);
+ public final static FunctionIdentifier INTERVAL_ENDED_BY = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "interval-ended-by", 2);
+ public final static FunctionIdentifier CURRENT_TIME = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "current-time", 0);
+ public final static FunctionIdentifier CURRENT_DATE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "current-date", 0);
+ public final static FunctionIdentifier CURRENT_DATETIME = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "current-datetime", 0);
// spatial
public final static FunctionIdentifier CREATE_POINT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
@@ -394,6 +444,56 @@
public final static FunctionIdentifier CAST_RECORD = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
"cast-record", 1);
+ // Spatial and temporal type accessors
+ public static final FunctionIdentifier ACCESSOR_TEMPORAL_YEAR = new FunctionIdentifier(
+ FunctionConstants.ASTERIX_NS, "year", 1);
+ public static final FunctionIdentifier ACCESSOR_TEMPORAL_MONTH = new FunctionIdentifier(
+ FunctionConstants.ASTERIX_NS, "month", 2);
+ public static final FunctionIdentifier ACCESSOR_TEMPORAL_DAY = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "day", 1);
+ public static final FunctionIdentifier ACCESSOR_TEMPORAL_HOUR = new FunctionIdentifier(
+ FunctionConstants.ASTERIX_NS, "hour", 1);
+ public static final FunctionIdentifier ACCESSOR_TEMPORAL_MIN = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "minute", 1);
+ public static final FunctionIdentifier ACCESSOR_TEMPORAL_SEC = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "second", 1);
+ public static final FunctionIdentifier ACCESSOR_TEMPORAL_MILLISEC = new FunctionIdentifier(
+ FunctionConstants.ASTERIX_NS, "millisecond", 1);
+
+ // Temporal functions
+ public static final FunctionIdentifier DATE_FROM_UNIX_TIME_IN_DAYS = new FunctionIdentifier(
+ FunctionConstants.ASTERIX_NS, "date-from-unix-time-in-days", 1);
+ public static final FunctionIdentifier DATE_FROM_DATETIME = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "date-from-datetime", 1);
+ public final static FunctionIdentifier ADD_DATE_DURATION = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "add-date-duration", 2);
+ public final static FunctionIdentifier SUBTRACT_DATE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "subtract-date", 2);
+ public final static FunctionIdentifier TIME_FROM_UNIX_TIME_IN_MS = new FunctionIdentifier(
+ FunctionConstants.ASTERIX_NS, "time-from-unix-time-in-ms", 1);
+ public final static FunctionIdentifier TIME_FROM_DATETIME = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "time-from-datetime", 1);
+ public final static FunctionIdentifier SUBTRACT_TIME = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "subtract-time", 2);
+ public final static FunctionIdentifier ADD_TIME_DURATION = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "add-time-duration", 2);
+ public final static FunctionIdentifier DATETIME_FROM_UNIX_TIME_IN_MS = new FunctionIdentifier(
+ FunctionConstants.ASTERIX_NS, "datetime-from-unix-time-in-ms", 1);
+ public final static FunctionIdentifier DATETIME_FROM_DATE_TIME = new FunctionIdentifier(
+ FunctionConstants.ASTERIX_NS, "datetime-from-date-time", 2);
+ public final static FunctionIdentifier SUBTRACT_DATETIME = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "subtract-datetime", 2);
+ public final static FunctionIdentifier ADD_DATETIME_DURATION = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "add-datetime-duration", 2);
+ public final static FunctionIdentifier CALENDAR_DURATION_FROM_DATETIME = new FunctionIdentifier(
+ FunctionConstants.ASTERIX_NS, "calendar-duration-from-datetime", 2);
+ public final static FunctionIdentifier CALENDAR_DURATION_FROM_DATE = new FunctionIdentifier(
+ FunctionConstants.ASTERIX_NS, "calendar-duration-from-date", 2);
+ public final static FunctionIdentifier ADJUST_TIME_FOR_TIMEZONE = new FunctionIdentifier(
+ FunctionConstants.ASTERIX_NS, "adjust-time-for-timezone", 2);
+ public final static FunctionIdentifier ADJUST_DATETIME_FOR_TIMEZONE = new FunctionIdentifier(
+ FunctionConstants.ASTERIX_NS, "adjust-datetime-for-timezone", 2);
+
public final static FunctionIdentifier GET_POINT_X_COORDINATE_ACCESSOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-x", 1);
public final static FunctionIdentifier GET_POINT_Y_COORDINATE_ACCESSOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-y", 1);
public final static FunctionIdentifier GET_CIRCLE_RADIUS_ACCESSOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-radius", 1);
@@ -412,6 +512,9 @@
public static final FunctionIdentifier NUMERIC_ADD = AlgebricksBuiltinFunctions.NUMERIC_ADD;
public static final FunctionIdentifier IS_NULL = AlgebricksBuiltinFunctions.IS_NULL;
+ public static final FunctionIdentifier NOT_NULL = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "not-null",
+ 1);
+
public static IFunctionInfo getAsterixFunctionInfo(FunctionIdentifier fid) {
IFunctionInfo finfo = finfoRepo.get(fid);;
if (finfo == null) {
@@ -440,6 +543,7 @@
add(NUMERIC_ADD, NonTaggedNumericAddSubMulDivTypeComputer.INSTANCE);
// and then, Asterix builtin functions
+ add(NOT_NULL, NotNullTypeComputer.INSTANCE);
add(ANY_COLLECTION_MEMBER, NonTaggedCollectionMemberResultType.INSTANCE);
addPrivateFunction(AVG, OptionalADoubleTypeComputer.INSTANCE);
add(BOOLEAN_CONSTRUCTOR, UnaryBooleanOrNullFunctionTypeComputer.INSTANCE);
@@ -648,7 +752,6 @@
add(TIME_CONSTRUCTOR, OptionalATimeTypeComputer.INSTANCE);
add(TYPE_OF, null); // TODO
add(UNORDERED_LIST_CONSTRUCTOR, UnorderedListConstructorResultType.INSTANCE);
- add(YEAR, OptionalAInt32TypeComputer.INSTANCE);
add(WORD_TOKENS, new IResultTypeComputer() {
@Override
@@ -658,12 +761,64 @@
}
});
+ // temporal type accessors
+ add(ACCESSOR_TEMPORAL_YEAR, OptionalAInt32TypeComputer.INSTANCE);
+ add(ACCESSOR_TEMPORAL_MONTH, OptionalAInt32TypeComputer.INSTANCE);
+ add(ACCESSOR_TEMPORAL_DAY, OptionalAInt32TypeComputer.INSTANCE);
+ add(ACCESSOR_TEMPORAL_HOUR, OptionalAInt32TypeComputer.INSTANCE);
+ add(ACCESSOR_TEMPORAL_MIN, OptionalAInt32TypeComputer.INSTANCE);
+ add(ACCESSOR_TEMPORAL_SEC, OptionalAInt32TypeComputer.INSTANCE);
+ add(ACCESSOR_TEMPORAL_MILLISEC, OptionalAInt32TypeComputer.INSTANCE);
+
+ // temporal functions
+ add(DATE_FROM_UNIX_TIME_IN_DAYS, OptionalADateTypeComputer.INSTANCE);
+ add(DATE_FROM_DATETIME, OptionalADateTypeComputer.INSTANCE);
+ add(ADD_DATE_DURATION, OptionalADateTypeComputer.INSTANCE);
+ add(SUBTRACT_DATE, OptionalADurationTypeComputer.INSTANCE);
+ add(TIME_FROM_UNIX_TIME_IN_MS, OptionalATimeTypeComputer.INSTANCE);
+ add(TIME_FROM_DATETIME, OptionalATimeTypeComputer.INSTANCE);
+ add(SUBTRACT_TIME, OptionalADurationTypeComputer.INSTANCE);
+ add(ADD_TIME_DURATION, OptionalATimeTypeComputer.INSTANCE);
+ add(DATETIME_FROM_DATE_TIME, OptionalADateTimeTypeComputer.INSTANCE);
+ add(DATETIME_FROM_UNIX_TIME_IN_MS, OptionalADateTimeTypeComputer.INSTANCE);
+ add(SUBTRACT_DATETIME, OptionalADurationTypeComputer.INSTANCE);
+ add(ADD_DATETIME_DURATION, OptionalADateTimeTypeComputer.INSTANCE);
+ add(CALENDAR_DURATION_FROM_DATETIME, OptionalADurationTypeComputer.INSTANCE);
+ add(CALENDAR_DURATION_FROM_DATE, OptionalADurationTypeComputer.INSTANCE);
+ add(ADJUST_DATETIME_FOR_TIMEZONE, OptionalAStringTypeComputer.INSTANCE);
+ add(ADJUST_TIME_FOR_TIMEZONE, OptionalAStringTypeComputer.INSTANCE);
+ add(INTERVAL_BEFORE, OptionalABooleanTypeComputer.INSTANCE);
+ add(INTERVAL_AFTER, OptionalABooleanTypeComputer.INSTANCE);
+ add(INTERVAL_MEETS, OptionalABooleanTypeComputer.INSTANCE);
+ add(INTERVAL_MET_BY, OptionalABooleanTypeComputer.INSTANCE);
+ add(INTERVAL_OVERLAPS, OptionalABooleanTypeComputer.INSTANCE);
+ add(INTERVAL_OVERLAPPED_BY, OptionalABooleanTypeComputer.INSTANCE);
+ add(OVERLAP, OptionalABooleanTypeComputer.INSTANCE);
+ add(INTERVAL_STARTS, OptionalABooleanTypeComputer.INSTANCE);
+ add(INTERVAL_STARTED_BY, OptionalABooleanTypeComputer.INSTANCE);
+ add(INTERVAL_COVERS, OptionalABooleanTypeComputer.INSTANCE);
+ add(INTERVAL_COVERED_BY, OptionalABooleanTypeComputer.INSTANCE);
+ add(INTERVAL_ENDS, OptionalABooleanTypeComputer.INSTANCE);
+ add(INTERVAL_ENDED_BY, OptionalABooleanTypeComputer.INSTANCE);
+ add(CURRENT_DATE, ADateTypeComputer.INSTANCE);
+ add(CURRENT_TIME, ATimeTypeComputer.INSTANCE);
+ add(CURRENT_DATETIME, ADateTimeTypeComputer.INSTANCE);
+
+ // interval constructors
+ add(INTERVAL_CONSTRUCTOR_DATE, OptionalAIntervalTypeComputer.INSTANCE);
+ add(INTERVAL_CONSTRUCTOR_TIME, OptionalAIntervalTypeComputer.INSTANCE);
+ add(INTERVAL_CONSTRUCTOR_DATETIME, OptionalAIntervalTypeComputer.INSTANCE);
+ add(INTERVAL_CONSTRUCTOR_START_FROM_DATE, OptionalAIntervalTypeComputer.INSTANCE);
+ add(INTERVAL_CONSTRUCTOR_START_FROM_DATETIME, OptionalAIntervalTypeComputer.INSTANCE);
+ add(INTERVAL_CONSTRUCTOR_START_FROM_TIME, OptionalAIntervalTypeComputer.INSTANCE);
+
String metadataFunctionLoaderClassName = "edu.uci.ics.asterix.metadata.functions.MetadataBuiltinFunctions";
try {
Class.forName(metadataFunctionLoaderClassName);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
+
}
static {
@@ -840,7 +995,7 @@
funTypeComputer.put(functionInfo, typeComputer);
finfoRepo.put(fi);
}
-
+
private static IFunctionInfo addPrivateFunction(FunctionIdentifier fi, IResultTypeComputer typeComputer) {
IFunctionInfo functionInfo = getAsterixFunctionInfo(fi);
builtinFunctionsSet.put(functionInfo, functionInfo);
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/base/DefaultOpenFieldType.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/base/DefaultOpenFieldType.java
index 9184616..45ae5c5 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/base/DefaultOpenFieldType.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/base/DefaultOpenFieldType.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -15,6 +15,8 @@
package edu.uci.ics.asterix.om.pointables.base;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.exceptions.AsterixRuntimeException;
import edu.uci.ics.asterix.om.types.AOrderedListType;
import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.asterix.om.types.ATypeTag;
@@ -27,13 +29,19 @@
* fields in the open part, e.g., a "record" (nested) field in the open part is
* always a fully open one, and a "list" field in the open part is always a list
* of "ANY".
- *
*/
public class DefaultOpenFieldType {
// nested open field rec type
- public static ARecordType NESTED_OPEN_RECORD_TYPE = new ARecordType("nested-open", new String[] {},
- new IAType[] {}, true);
+ public static ARecordType NESTED_OPEN_RECORD_TYPE;
+
+ static {
+ try {
+ NESTED_OPEN_RECORD_TYPE = new ARecordType("nested-open", new String[] {}, new IAType[] {}, true);
+ } catch (AsterixException e) {
+ throw new AsterixRuntimeException();
+ }
+ }
// nested open list type
public static AOrderedListType NESTED_OPEN_AORDERED_LIST_TYPE = new AOrderedListType(BuiltinType.ANY,
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/cast/ARecordCaster.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/cast/ARecordCaster.java
index fbad7a7..494ea6f 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/cast/ARecordCaster.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/cast/ARecordCaster.java
@@ -15,9 +15,11 @@
package edu.uci.ics.asterix.om.pointables.cast;
+import java.io.ByteArrayOutputStream;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.IOException;
+import java.io.PrintStream;
import java.util.ArrayList;
import java.util.List;
@@ -28,6 +30,7 @@
import edu.uci.ics.asterix.om.pointables.PointableAllocator;
import edu.uci.ics.asterix.om.pointables.base.DefaultOpenFieldType;
import edu.uci.ics.asterix.om.pointables.base.IVisitablePointable;
+import edu.uci.ics.asterix.om.pointables.printer.APrintVisitor;
import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.asterix.om.types.ATypeTag;
import edu.uci.ics.asterix.om.types.AUnionType;
@@ -35,6 +38,7 @@
import edu.uci.ics.asterix.om.types.IAType;
import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
import edu.uci.ics.asterix.om.util.ResettableByteArrayOutputStream;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
import edu.uci.ics.hyracks.algebricks.common.utils.Triple;
import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
import edu.uci.ics.hyracks.api.dataflow.value.INullWriter;
@@ -184,7 +188,7 @@
}
private void matchClosedPart(List<IVisitablePointable> fieldNames, List<IVisitablePointable> fieldTypeTags,
- List<IVisitablePointable> fieldValues) {
+ List<IVisitablePointable> fieldValues) throws AsterixException {
// sort-merge based match
quickSort(fieldNamesSortedIndex, fieldNames, 0, numInputFields - 1);
int fnStart = 0;
@@ -213,8 +217,30 @@
// check unmatched fields in the input type
for (int i = 0; i < openFields.length; i++) {
- if (openFields[i] == true && !cachedReqType.isOpen())
- throw new IllegalStateException("type mismatch: including extra closed fields");
+ if (openFields[i] == true && !cachedReqType.isOpen()) {
+ //print the field name
+ IVisitablePointable fieldName = fieldNames.get(i);
+ ByteArrayOutputStream bos = new ByteArrayOutputStream();
+ PrintStream ps = new PrintStream(bos);
+ APrintVisitor printVisitor = new APrintVisitor();
+ Pair<PrintStream, ATypeTag> visitorArg = new Pair<PrintStream, ATypeTag>(ps, ATypeTag.STRING);
+ fieldName.accept(printVisitor, visitorArg);
+
+ //print the colon
+ ps.print(":");
+
+ //print the field type
+ IVisitablePointable fieldType = fieldTypeTags.get(i);
+ ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(fieldType.getByteArray()[fieldType
+ .getStartOffset()]);
+ ps.print(typeTag);
+
+ //collect the output message
+ byte[] output = bos.toByteArray();
+
+ //throw the exception
+ throw new IllegalStateException("type mismatch: including an extra field " + new String(output));
+ }
}
// check unmatched fields in the required type
@@ -223,7 +249,8 @@
IAType t = cachedReqType.getFieldTypes()[i];
if (!(t.getTypeTag() == ATypeTag.UNION && NonTaggedFormatUtil.isOptionalField((AUnionType) t))) {
// no matched field in the input for a required closed field
- throw new IllegalStateException("type mismatch: miss a required closed field");
+ throw new IllegalStateException("type mismatch: miss a required closed field "
+ + cachedReqType.getFieldNames()[i] + ":" + t.getTypeName());
}
}
}
@@ -288,8 +315,7 @@
int j = right;
while (true) {
// grow from the left
- while (compare(names.get(index[++i]), names.get(index[right])) < 0)
- ;
+ while (compare(names.get(index[++i]), names.get(index[right])) < 0);
// lower from the right
while (compare(names.get(index[right]), names.get(index[--j])) < 0)
if (j == left)
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ADateTimeTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ADateTimeTypeComputer.java
new file mode 100644
index 0000000..c7a51da
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ADateTimeTypeComputer.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.om.typecomputer.impl;
+
+import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
+
+public class ADateTimeTypeComputer implements IResultTypeComputer {
+
+ public static final ADateTimeTypeComputer INSTANCE = new ADateTimeTypeComputer();
+
+ private ADateTimeTypeComputer() {
+ }
+
+ @Override
+ public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env,
+ IMetadataProvider<?, ?> metadataProvider) throws AlgebricksException {
+ return BuiltinType.ADATETIME;
+ }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ATimeTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ATimeTypeComputer.java
new file mode 100644
index 0000000..55e1bc4
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ATimeTypeComputer.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.om.typecomputer.impl;
+
+import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
+
+public class ATimeTypeComputer implements IResultTypeComputer {
+
+ public static final ATimeTypeComputer INSTANCE = new ATimeTypeComputer();
+
+ private ATimeTypeComputer() {
+ }
+
+ @Override
+ public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env,
+ IMetadataProvider<?, ?> metadataProvider) throws AlgebricksException {
+ return BuiltinType.ATIME;
+ }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ClosedRecordConstructorResultType.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ClosedRecordConstructorResultType.java
index daf7164..8ed2084 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ClosedRecordConstructorResultType.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/ClosedRecordConstructorResultType.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -19,6 +19,7 @@
import org.apache.commons.lang3.mutable.Mutable;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.om.base.AString;
import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
@@ -49,7 +50,7 @@
ARecordType type = (ARecordType) TypeComputerUtilities.getRequiredType(f);
if (type != null)
return type;
-
+
int n = f.getArguments().size() / 2;
String[] fieldNames = new String[n];
IAType[] fieldTypes = new IAType[n];
@@ -68,6 +69,10 @@
fieldTypes[i] = (IAType) env.getType(e2);
i++;
}
- return new ARecordType(null, fieldNames, fieldTypes, false);
+ try {
+ return new ARecordType(null, fieldNames, fieldTypes, false);
+ } catch (AsterixException e) {
+ throw new AlgebricksException(e);
+ }
}
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NonTaggedLocalAvgTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NonTaggedLocalAvgTypeComputer.java
index 8b54197..ee52425 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NonTaggedLocalAvgTypeComputer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NonTaggedLocalAvgTypeComputer.java
@@ -1,8 +1,24 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.asterix.om.typecomputer.impl;
import java.util.ArrayList;
import java.util.List;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.asterix.om.types.AUnionType;
@@ -23,7 +39,11 @@
List<IAType> unionList = new ArrayList<IAType>();
unionList.add(BuiltinType.ANULL);
unionList.add(BuiltinType.ADOUBLE);
- return new ARecordType(null, new String[] { "sum", "count" }, new IAType[] {
- new AUnionType(unionList, "OptionalDouble"), BuiltinType.AINT32 }, false);
+ try {
+ return new ARecordType(null, new String[] { "sum", "count" }, new IAType[] {
+ new AUnionType(unionList, "OptionalDouble"), BuiltinType.AINT32 }, false);
+ } catch (AsterixException e) {
+ throw new AlgebricksException(e);
+ }
}
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NotNullTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NotNullTypeComputer.java
new file mode 100644
index 0000000..a9881c2
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NotNullTypeComputer.java
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.om.typecomputer.impl;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.AUnionType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
+
+/**
+ * This class is the type computer for not-null function.
+ * If the input type is not a union, we just return it.
+ * If the input type is a union,
+ * case 1: we return a new union without null if the new union still has more than one types;
+ * case 2: we return the non-null item type in the original union if there are only null and it in the original union.
+ */
+public class NotNullTypeComputer implements IResultTypeComputer {
+
+ public static final NotNullTypeComputer INSTANCE = new NotNullTypeComputer();
+
+ @Override
+ public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env,
+ IMetadataProvider<?, ?> metadataProvider) throws AlgebricksException {
+ AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) expression;
+ IAType type = (IAType) env.getType(f.getArguments().get(0).getValue());
+ if (type.getTypeTag() != ATypeTag.UNION) {
+ // directly return the input type if it is not a union
+ return type;
+ }
+
+ AUnionType unionType = (AUnionType) type;
+ List<IAType> items = new ArrayList<IAType>();
+ // copy the item types
+ items.addAll(unionType.getUnionList());
+
+ // remove null
+ for (int i = items.size() - 1; i >= 0; i--) {
+ IAType itemType = items.get(i);
+ if (itemType.getTypeTag() == ATypeTag.NULL) {
+ items.remove(i);
+ }
+ }
+ if (items.size() == 1) {
+ //only one type is left
+ return items.get(0);
+ } else {
+ //more than two types are left
+ return new AUnionType(items, unionType.getTypeName());
+ }
+ }
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/OpenRecordConstructorResultType.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/OpenRecordConstructorResultType.java
index c46c59b..0c6fc55 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/OpenRecordConstructorResultType.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/OpenRecordConstructorResultType.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -21,6 +21,7 @@
import org.apache.commons.lang3.mutable.Mutable;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.om.base.AString;
import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
@@ -72,6 +73,10 @@
IAType[] fieldTypes = new IAType[n];
fieldNames = namesList.toArray(fieldNames);
fieldTypes = typesList.toArray(fieldTypes);
- return new ARecordType(null, fieldNames, fieldTypes, true);
+ try {
+ return new ARecordType(null, fieldNames, fieldTypes, true);
+ } catch (AsterixException e) {
+ throw new AlgebricksException(e);
+ }
}
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/OptionalABooleanTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/OptionalABooleanTypeComputer.java
new file mode 100644
index 0000000..abeea2a
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/OptionalABooleanTypeComputer.java
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.om.typecomputer.impl;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
+import edu.uci.ics.asterix.om.types.AUnionType;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
+
+public class OptionalABooleanTypeComputer implements IResultTypeComputer {
+
+ public static final OptionalABooleanTypeComputer INSTANCE = new OptionalABooleanTypeComputer();
+
+ private OptionalABooleanTypeComputer() {
+ }
+
+ @Override
+ public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env,
+ IMetadataProvider<?, ?> metadataProvider) throws AlgebricksException {
+ List<IAType> unionList = new ArrayList<IAType>();
+ unionList.add(BuiltinType.ANULL);
+ unionList.add(BuiltinType.ABOOLEAN);
+ return new AUnionType(unionList, "OptionalBoolean");
+ }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/OptionalAIntervalTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/OptionalAIntervalTypeComputer.java
new file mode 100644
index 0000000..bb9f993
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/OptionalAIntervalTypeComputer.java
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.om.typecomputer.impl;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
+import edu.uci.ics.asterix.om.types.AUnionType;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
+
+public class OptionalAIntervalTypeComputer implements IResultTypeComputer {
+
+ public static final OptionalAIntervalTypeComputer INSTANCE = new OptionalAIntervalTypeComputer();
+
+ private OptionalAIntervalTypeComputer() {
+
+ }
+
+ public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env,
+ IMetadataProvider<?, ?> metadataProvider) throws AlgebricksException {
+ List<IAType> unionList = new ArrayList<IAType>();
+ unionList.add(BuiltinType.ANULL);
+ unionList.add(BuiltinType.AINTERVAL);
+ return new AUnionType(unionList, "OptionalInterval");
+ }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/RecordConstructorResultType.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/RecordConstructorResultType.java
index d20f43b..1f072f0 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/RecordConstructorResultType.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/RecordConstructorResultType.java
@@ -1,9 +1,25 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.asterix.om.typecomputer.impl;
import java.util.Iterator;
import org.apache.commons.lang3.mutable.Mutable;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.om.base.AString;
import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
@@ -58,6 +74,10 @@
}
i++;
}
- return new ARecordType(null, fieldNames, fieldTypes, isOpen);
+ try {
+ return new ARecordType(null, fieldNames, fieldTypes, isOpen);
+ } catch (AsterixException e) {
+ throw new AlgebricksException(e);
+ }
}
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ARecordType.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ARecordType.java
index 1cf6ba7..82320e4 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ARecordType.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ARecordType.java
@@ -1,15 +1,38 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.asterix.om.types;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.io.ObjectInputStream;
import java.util.ArrayList;
import java.util.Arrays;
-import java.util.HashMap;
import java.util.List;
-import java.util.Map;
import edu.uci.ics.asterix.common.annotations.IRecordTypeAnnotation;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.om.base.IAObject;
import edu.uci.ics.asterix.om.visitors.IOMVisitor;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.data.std.util.ByteArrayAccessibleOutputStream;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
public class ARecordType extends AbstractComplexType {
@@ -18,16 +41,110 @@
private IAType[] fieldTypes;
private boolean isOpen;
private final List<IRecordTypeAnnotation> annotations = new ArrayList<IRecordTypeAnnotation>();
- private final Map<String, Integer> typeMap = new HashMap<String, Integer>();
- public ARecordType(String typeName, String[] fieldNames, IAType[] fieldTypes, boolean isOpen) {
+ private transient IBinaryHashFunction fieldNameHashFunction;
+ private transient IBinaryComparator fieldNameComparator;
+ private final byte serializedFieldNames[];
+ private final int serializedFieldNameOffsets[];
+ private final long hashCodeIndexPairs[];
+
+ /**
+ * @param typeName
+ * the name of the type
+ * @param fieldNames
+ * the names of the closed fields
+ * @param fieldTypes
+ * the types of the closed fields
+ * @param isOpen
+ * whether the record is open
+ * @throws AsterixException
+ * if there are duplicate field names or if there is an error serializing the field names
+ */
+ public ARecordType(String typeName, String[] fieldNames, IAType[] fieldTypes, boolean isOpen)
+ throws AsterixException {
super(typeName);
this.fieldNames = fieldNames;
this.fieldTypes = fieldTypes;
this.isOpen = isOpen;
+
+ fieldNameComparator = new PointableBinaryComparatorFactory(UTF8StringPointable.FACTORY)
+ .createBinaryComparator();
+ fieldNameHashFunction = new PointableBinaryHashFunctionFactory(UTF8StringPointable.FACTORY)
+ .createBinaryHashFunction();
+ ByteArrayAccessibleOutputStream baaos = new ByteArrayAccessibleOutputStream();
+ DataOutputStream dos = new DataOutputStream(baaos);
+ serializedFieldNameOffsets = new int[fieldNames.length];
+ hashCodeIndexPairs = new long[fieldNames.length];
+
+ int length = 0;
for (int i = 0; i < fieldNames.length; i++) {
- typeMap.put(fieldNames[i], i);
+ serializedFieldNameOffsets[i] = baaos.size();
+ try {
+ dos.writeUTF(fieldNames[i]);
+ } catch (IOException e) {
+ throw new AsterixException(e);
+ }
+ length = baaos.size() - serializedFieldNameOffsets[i];
+ hashCodeIndexPairs[i] = fieldNameHashFunction.hash(baaos.getByteArray(), serializedFieldNameOffsets[i],
+ length);
+ hashCodeIndexPairs[i] = hashCodeIndexPairs[i] << 32;
+ hashCodeIndexPairs[i] = hashCodeIndexPairs[i] | i;
}
+ serializedFieldNames = baaos.getByteArray();
+
+ Arrays.sort(hashCodeIndexPairs);
+ int j;
+ for (int i = 0; i < fieldNames.length; i++) {
+ j = findFieldPosition(serializedFieldNames, serializedFieldNameOffsets[i],
+ UTF8StringPointable.getStringLength(serializedFieldNames, serializedFieldNameOffsets[i]));
+ if (j != i) {
+ throw new AsterixException("Closed fields " + j + " and " + i + " have the same field name \""
+ + fieldNames[i] + "\"");
+ }
+ }
+ }
+
+ private void readObject(ObjectInputStream ois) throws IOException, ClassNotFoundException {
+ ois.defaultReadObject();
+ fieldNameComparator = new PointableBinaryComparatorFactory(UTF8StringPointable.FACTORY)
+ .createBinaryComparator();
+ fieldNameHashFunction = new PointableBinaryHashFunctionFactory(UTF8StringPointable.FACTORY)
+ .createBinaryHashFunction();
+ }
+
+ /**
+ * Returns the position of the field in the closed schema or -1 if the field does not exist.
+ *
+ * @param bytes
+ * the serialized bytes of the field name
+ * @param start
+ * the starting offset of the field name in bytes
+ * @param length
+ * the length of the field name in bytes
+ * @return the position of the field in the closed schema or -1 if the field does not exist.
+ */
+ public int findFieldPosition(byte[] bytes, int start, int length) {
+ if (hashCodeIndexPairs.length == 0) {
+ return -1;
+ }
+
+ int fIndex;
+ int probeFieldHash = fieldNameHashFunction.hash(bytes, start, length);
+ int i = Arrays.binarySearch(hashCodeIndexPairs, ((long) probeFieldHash) << 32);
+ i = (i < 0) ? (i = -1 * (i + 1)) : i;
+
+ while (i < hashCodeIndexPairs.length && (int) (hashCodeIndexPairs[i] >>> 32) == probeFieldHash) {
+ fIndex = (int) hashCodeIndexPairs[i];
+ int cFieldLength = UTF8StringPointable.getStringLength(serializedFieldNames,
+ serializedFieldNameOffsets[fIndex]);
+ if (fieldNameComparator.compare(serializedFieldNames, serializedFieldNameOffsets[fIndex], cFieldLength,
+ bytes, start, length) == 0) {
+ return fIndex;
+ }
+ i++;
+ }
+
+ return -1;
}
public final String[] getFieldNames() {
@@ -73,17 +190,48 @@
return isOpen;
}
- public int findFieldPosition(String fldName) {
- for (int i = 0; i < fieldNames.length; i++) {
- if (fieldNames[i].equals(fldName)) {
- return i;
- }
- }
- return -1;
+ /**
+ * Returns the position of the field in the closed schema or -1 if the field does not exist.
+ *
+ * @param fieldName
+ * the name of the field whose position is sought
+ * @return the position of the field in the closed schema or -1 if the field does not exist.
+ */
+ public int findFieldPosition(String fieldName) throws IOException {
+ ByteArrayAccessibleOutputStream baaos = new ByteArrayAccessibleOutputStream();
+ DataOutputStream dos = new DataOutputStream(baaos);
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(fieldName, dos);
+ return findFieldPosition(baaos.getByteArray(), 0, baaos.getByteArray().length);
}
- public IAType getFieldType(String fieldName) {
- return fieldTypes[typeMap.get(fieldName)];
+ /**
+ * Returns the field type of the field name if it exists, otherwise null.
+ *
+ * @param fieldName
+ * the fieldName whose type is sought
+ * @return the field type of the field name if it exists, otherwise null
+ * @throws IOException
+ * if an error occurs while serializing the field name
+ */
+ public IAType getFieldType(String fieldName) throws IOException {
+ int fieldPos = findFieldPosition(fieldName);
+ if (fieldPos < 0 || fieldPos >= fieldTypes.length) {
+ return null;
+ }
+ return fieldTypes[fieldPos];
+ }
+
+ /**
+ * Returns true or false indicating whether or not a field is closed.
+ *
+ * @param fieldName
+ * the name of the field to check
+ * @return true if fieldName is a closed field, otherwise false
+ * @throws IOException
+ * if an error occurs while serializing fieldName
+ */
+ public boolean isClosedField(String fieldName) throws IOException {
+ return findFieldPosition(fieldName) != -1;
}
@Override
@@ -115,12 +263,11 @@
public int hash() {
int h = 0;
for (int i = 0; i < fieldNames.length; i++) {
- h += 31 * h + fieldNames[i].hashCode();
+ h += 31 * h + (int) (hashCodeIndexPairs[i] >> 32);
}
for (int i = 0; i < fieldTypes.length; i++) {
h += 31 * h + fieldTypes[i].hashCode();
}
return h;
}
-
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ATypeTag.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ATypeTag.java
index b75c074..e69fbcd 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ATypeTag.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ATypeTag.java
@@ -42,6 +42,7 @@
LINE(30),
POLYGON(31),
CIRCLE(32),
+ INTERVAL(34),
RECTANGLE(33),
SYSTEM_NULL(34);
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/BuiltinType.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/BuiltinType.java
index 0ec3b21..4d9cd7f 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/BuiltinType.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/BuiltinType.java
@@ -329,6 +329,27 @@
};
+ public final static BuiltinType AINTERVAL = new LowerCaseConstructorType() {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public String getDisplayName() {
+ return "AInterval";
+ }
+
+ @Override
+ public ATypeTag getTypeTag() {
+ return ATypeTag.INTERVAL;
+ }
+
+ @Override
+ public String getTypeName() {
+ return "interval";
+ }
+
+ };
+
public final static BuiltinType APOINT = new LowerCaseConstructorType() {
private static final long serialVersionUID = 1L;
@@ -515,25 +536,25 @@
return getTypeTag().toString();
}
- @Override
- public boolean deepEqual(IAObject obj) {
- if (obj == this) {
- return true;
- }
- if (!(obj instanceof BuiltinType)) {
- return false;
- }
- return ((BuiltinType) obj).getTypeTag().equals(getTypeTag());
- }
+ @Override
+ public boolean deepEqual(IAObject obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof BuiltinType)) {
+ return false;
+ }
+ return ((BuiltinType) obj).getTypeTag().equals(getTypeTag());
+ }
@Override
public boolean equals(Object object) {
return this.deepEqual((IAObject) object);
}
-
+
@Override
public int hashCode() {
- return getTypeTag().hashCode();
+ return getTypeTag().hashCode();
}
@Override
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/AsterixRuntimeUtil.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/AsterixRuntimeUtil.java
index 624d7eb..76f3301 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/AsterixRuntimeUtil.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/AsterixRuntimeUtil.java
@@ -18,39 +18,51 @@
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Collection;
+import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import edu.uci.ics.asterix.common.api.AsterixAppContextInfoImpl;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
+/**
+ * Utility class for obtaining information on the set of Hyracks NodeController
+ * processes that are running on a given host.
+ */
public class AsterixRuntimeUtil {
- public static Set<String> getNodeControllersOnIP(String ipAddress) throws AsterixException {
- Map<String, Set<String>> nodeControllerInfo = AsterixAppContextInfoImpl.getNodeControllerMap();
- Set<String> nodeControllersAtLocation = nodeControllerInfo.get(ipAddress);
- return nodeControllersAtLocation;
- }
+ public static Set<String> getNodeControllersOnIP(String ipAddress)
+ throws Exception {
+ Map<String, Set<String>> nodeControllerInfo = getNodeControllerMap();
+ Set<String> nodeControllersAtLocation = nodeControllerInfo
+ .get(ipAddress);
+ return nodeControllersAtLocation;
+ }
- public static Set<String> getNodeControllersOnHostName(String hostName) throws UnknownHostException {
- Map<String, Set<String>> nodeControllerInfo = AsterixAppContextInfoImpl.getNodeControllerMap();
- String address;
- address = InetAddress.getByName(hostName).getHostAddress();
- if (address.equals("127.0.1.1")) {
- address = "127.0.0.1";
- }
- Set<String> nodeControllersAtLocation = nodeControllerInfo.get(address);
- return nodeControllersAtLocation;
- }
+ public static List<String> getAllNodeControllers() throws Exception {
+ Collection<Set<String>> nodeControllersCollection = getNodeControllerMap()
+ .values();
+ List<String> nodeControllers = new ArrayList<String>();
+ for (Set<String> ncCollection : nodeControllersCollection) {
+ nodeControllers.addAll(ncCollection);
+ }
+ return nodeControllers;
+ }
- public static List<String> getAllNodeControllers() {
+ public static Map<String, Set<String>> getNodeControllerMap()
+ throws Exception {
+ Map<String, Set<String>> map = new HashMap<String, Set<String>>();
+ AsterixAppContextInfoImpl.getInstance().getCCApplicationContext()
+ .getCCContext().getIPAddressNodeMap(map);
+ return map;
+ }
- Collection<Set<String>> nodeControllersCollection = AsterixAppContextInfoImpl.getNodeControllerMap().values();
- List<String> nodeControllers = new ArrayList<String>();
- for (Set<String> ncCollection : nodeControllersCollection) {
- nodeControllers.addAll(ncCollection);
- }
- return nodeControllers;
- }
+ public static String getIPAddress(String hostname)
+ throws UnknownHostException {
+ String address = InetAddress.getByName(hostname).getHostAddress();
+ if (address.equals("127.0.1.1")) {
+ address = "127.0.0.1";
+ }
+ return address;
+ }
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/NonTaggedFormatUtil.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/NonTaggedFormatUtil.java
index 8832164..a8c0485 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/NonTaggedFormatUtil.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/NonTaggedFormatUtil.java
@@ -95,6 +95,8 @@
return 12;
case POINT:
return 16;
+ case INTERVAL:
+ return 17;
case POINT3D:
case CIRCLE:
return 24;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/visitors/IOMVisitor.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/visitors/IOMVisitor.java
index 2c82174..703b792 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/visitors/IOMVisitor.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/visitors/IOMVisitor.java
@@ -14,6 +14,7 @@
import edu.uci.ics.asterix.om.base.AInt32;
import edu.uci.ics.asterix.om.base.AInt64;
import edu.uci.ics.asterix.om.base.AInt8;
+import edu.uci.ics.asterix.om.base.AInterval;
import edu.uci.ics.asterix.om.base.ALine;
import edu.uci.ics.asterix.om.base.ANull;
import edu.uci.ics.asterix.om.base.AOrderedList;
@@ -46,6 +47,8 @@
public void visitADuration(ADuration obj) throws AsterixException;
+ public void visitAInterval(AInterval obj) throws AsterixException;
+
public void visitADate(ADate obj) throws AsterixException;
public void visitATime(ATime obj) throws AsterixException;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/visitors/OMPrintToStringVisitor.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/visitors/OMPrintToStringVisitor.java
index 1697c5c..e7856f0 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/visitors/OMPrintToStringVisitor.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/visitors/OMPrintToStringVisitor.java
@@ -15,6 +15,7 @@
import edu.uci.ics.asterix.om.base.AInt32;
import edu.uci.ics.asterix.om.base.AInt64;
import edu.uci.ics.asterix.om.base.AInt8;
+import edu.uci.ics.asterix.om.base.AInterval;
import edu.uci.ics.asterix.om.base.ALine;
import edu.uci.ics.asterix.om.base.ANull;
import edu.uci.ics.asterix.om.base.AOrderedList;
@@ -90,6 +91,12 @@
}
@Override
+ public void visitAInterval(AInterval obj) throws AsterixException {
+ // TODO Auto-generated method stub
+ throw new NotImplementedException();
+ }
+
+ @Override
public void visitAFloat(AFloat obj) throws AsterixException {
buffer.append(obj.getFloatValue() + "f");
}
diff --git a/asterix-runtime/pom.xml b/asterix-runtime/pom.xml
index eae4e62..4223787 100644
--- a/asterix-runtime/pom.xml
+++ b/asterix-runtime/pom.xml
@@ -1,13 +1,12 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
<version>0.0.4-SNAPSHOT</version>
</parent>
- <groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-runtime</artifactId>
- <version>0.0.4-SNAPSHOT</version>
<build>
@@ -17,27 +16,85 @@
<artifactId>maven-compiler-plugin</artifactId>
<version>2.0.2</version>
<configuration>
- <source>1.6</source>
- <target>1.6</target>
+ <source>1.7</source>
+ <target>1.7</target>
+ <fork>true</fork>
</configuration>
</plugin>
- <plugin>
- <groupId>org.codehaus.mojo</groupId>
- <artifactId>javacc-maven-plugin</artifactId>
- <version>2.6</version>
- <executions>
- <execution>
- <id>javacc</id>
- <goals>
- <goal>javacc</goal>
- </goals>
- <configuration>
- <isStatic>false</isStatic>
- </configuration>
- </execution>
- </executions>
- </plugin>
- </plugins>
+ <plugin>
+ <groupId>edu.uci.ics.asterix</groupId>
+ <artifactId>lexer-generator-maven-plugin</artifactId>
+ <version>0.1</version>
+ <configuration>
+ <grammarFile>src/main/resources/adm.grammar</grammarFile>
+ <outputDir>${project.build.directory}/generated-sources/edu/uci/ics/asterix/runtime/operators/file/adm</outputDir>
+ </configuration>
+ <executions>
+ <execution>
+ <id>generate-lexer</id>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>generate-lexer</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>add-source</id>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>add-source</goal>
+ </goals>
+ <configuration>
+ <sources>
+ <source>${project.build.directory}/generated-sources/</source>
+ </sources>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ <pluginManagement>
+ <plugins>
+ <!--This plugin's configuration is used to store Eclipse m2e settings only. It has no influence on the Maven build itself.-->
+ <plugin>
+ <groupId>org.eclipse.m2e</groupId>
+ <artifactId>lifecycle-mapping</artifactId>
+ <version>1.0.0</version>
+ <configuration>
+ <lifecycleMappingMetadata>
+ <pluginExecutions>
+ <pluginExecution>
+ <pluginExecutionFilter>
+ <groupId>
+ edu.uci.ics.asterix
+ </groupId>
+ <artifactId>
+ lexer-generator-maven-plugin
+ </artifactId>
+ <versionRange>
+ [0.1,)
+ </versionRange>
+ <goals>
+ <goal>generate-lexer</goal>
+ </goals>
+ </pluginExecutionFilter>
+ <action>
+ <execute>
+ <runOnIncremental>false</runOnIncremental>
+ </execute>
+ </action>
+ </pluginExecution>
+ </pluginExecutions>
+ </lifecycleMappingMetadata>
+ </configuration>
+ </plugin>
+ </plugins>
+ </pluginManagement>
</build>
<dependencies>
@@ -55,31 +112,30 @@
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
- <artifactId>hyracks-storage-am-btree</artifactId>
- <version>0.2.2-SNAPSHOT</version>
+ <artifactId>hyracks-storage-am-btree</artifactId>
</dependency>
<dependency>
- <groupId>edu.uci.ics.asterix</groupId>
- <artifactId>asterix-transactions</artifactId>
+ <groupId>edu.uci.ics.asterix</groupId>
+ <artifactId>asterix-transactions</artifactId>
<version>0.0.4-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
- <groupId>org.twitter4j</groupId>
- <artifactId>twitter4j-core</artifactId>
- <version>2.2.3</version>
+ <groupId>org.twitter4j</groupId>
+ <artifactId>twitter4j-core</artifactId>
+ <version>2.2.3</version>
</dependency>
<dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-core</artifactId>
- <version>0.20.2</version>
- <type>jar</type>
- <scope>compile</scope>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <version>0.20.2</version>
+ <type>jar</type>
+ <scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-api</artifactId>
- <version>0.2.2-SNAPSHOT</version>
+ <version>0.2.3-SNAPSHOT</version>
</dependency>
</dependencies>
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/serializable/std/SerializableGlobalAvgAggregateDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/serializable/std/SerializableGlobalAvgAggregateDescriptor.java
index 89081ab..f720434 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/serializable/std/SerializableGlobalAvgAggregateDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/serializable/std/SerializableGlobalAvgAggregateDescriptor.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.asterix.runtime.aggregates.serializable.std;
import java.io.DataOutput;
@@ -6,6 +21,7 @@
import java.util.ArrayList;
import java.util.List;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADoubleSerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ARecordSerializerDeserializer;
@@ -59,8 +75,15 @@
List<IAType> unionList = new ArrayList<IAType>();
unionList.add(BuiltinType.ANULL);
unionList.add(BuiltinType.ADOUBLE);
- final ARecordType recType = new ARecordType(null, new String[] { "sum", "count" }, new IAType[] {
- new AUnionType(unionList, "OptionalDouble"), BuiltinType.AINT64 }, true);
+ ARecordType _recType;
+ try {
+ _recType = new ARecordType(null, new String[] { "sum", "count" }, new IAType[] {
+ new AUnionType(unionList, "OptionalDouble"), BuiltinType.AINT64 }, true);
+ } catch (AsterixException e) {
+ throw new AlgebricksException(e);
+ }
+
+ final ARecordType recType = _recType;
return new ICopySerializableAggregateFunctionFactory() {
private static final long serialVersionUID = 1L;
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/serializable/std/SerializableLocalAvgAggregateDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/serializable/std/SerializableLocalAvgAggregateDescriptor.java
index 41047f7..219204b 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/serializable/std/SerializableLocalAvgAggregateDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/serializable/std/SerializableLocalAvgAggregateDescriptor.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.asterix.runtime.aggregates.serializable.std;
import java.io.DataOutput;
@@ -6,6 +21,7 @@
import java.util.ArrayList;
import java.util.List;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADoubleSerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AFloatSerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt16SerializerDeserializer;
@@ -63,8 +79,15 @@
List<IAType> unionList = new ArrayList<IAType>();
unionList.add(BuiltinType.ANULL);
unionList.add(BuiltinType.ADOUBLE);
- final ARecordType recType = new ARecordType(null, new String[] { "sum", "count" }, new IAType[] {
- new AUnionType(unionList, "OptionalDouble"), BuiltinType.AINT64 }, true);
+ ARecordType tmpRecType;
+ try {
+ tmpRecType = new ARecordType(null, new String[] { "sum", "count" }, new IAType[] {
+ new AUnionType(unionList, "OptionalDouble"), BuiltinType.AINT64 }, true);
+ } catch (AsterixException e) {
+ throw new AlgebricksException(e);
+ }
+
+ final ARecordType recType = tmpRecType;
return new ICopySerializableAggregateFunctionFactory() {
private static final long serialVersionUID = 1L;
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/std/AvgAggregateDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/std/AvgAggregateDescriptor.java
index 4fe5e35..93c7026 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/std/AvgAggregateDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/std/AvgAggregateDescriptor.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.asterix.runtime.aggregates.std;
import java.io.DataOutput;
@@ -7,6 +22,7 @@
import java.util.List;
import edu.uci.ics.asterix.common.config.GlobalConfig;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADoubleSerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AFloatSerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt16SerializerDeserializer;
@@ -64,8 +80,15 @@
List<IAType> unionList = new ArrayList<IAType>();
unionList.add(BuiltinType.ANULL);
unionList.add(BuiltinType.ADOUBLE);
- final ARecordType recType = new ARecordType(null, new String[] { "sum", "count" }, new IAType[] {
- new AUnionType(unionList, "OptionalDouble"), BuiltinType.AINT32 }, true);
+ ARecordType tmpRecType;
+ try {
+ tmpRecType = new ARecordType(null, new String[] { "sum", "count" }, new IAType[] {
+ new AUnionType(unionList, "OptionalDouble"), BuiltinType.AINT32 }, true);
+ } catch (AsterixException e) {
+ throw new AlgebricksException(e);
+ }
+
+ final ARecordType recType = tmpRecType;
return new ICopyAggregateFunctionFactory() {
private static final long serialVersionUID = 1L;
@@ -78,7 +101,7 @@
private DataOutput out = provider.getDataOutput();
private ArrayBackedValueStorage inputVal = new ArrayBackedValueStorage();
- private ICopyEvaluator eval = args[0].createEvaluator(inputVal);
+ private ICopyEvaluator eval = args[0].createEvaluator(inputVal);
private double sum;
private int count;
private ATypeTag aggType;
@@ -115,13 +138,13 @@
@Override
public void step(IFrameTupleReference tuple) throws AlgebricksException {
inputVal.reset();
- eval.evaluate(tuple);
+ eval.evaluate(tuple);
ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER
.deserialize(inputVal.getByteArray()[0]);
if (typeTag == ATypeTag.NULL || aggType == ATypeTag.NULL) {
aggType = ATypeTag.NULL;
return;
- } else if (aggType == ATypeTag.SYSTEM_NULL) {
+ } else if (aggType == ATypeTag.SYSTEM_NULL) {
aggType = typeTag;
} else if (typeTag != ATypeTag.SYSTEM_NULL && typeTag != aggType) {
throw new AlgebricksException("Unexpected type " + typeTag
@@ -129,7 +152,7 @@
}
if (typeTag != ATypeTag.SYSTEM_NULL) {
++count;
- }
+ }
switch (typeTag) {
case INT8: {
byte val = AInt8SerializerDeserializer.getByte(inputVal.getByteArray(), 1);
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/std/GlobalAvgAggregateDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/std/GlobalAvgAggregateDescriptor.java
index 347f5e7..d262ef4 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/std/GlobalAvgAggregateDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/std/GlobalAvgAggregateDescriptor.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.asterix.runtime.aggregates.std;
import java.io.DataOutput;
@@ -6,7 +21,7 @@
import java.util.ArrayList;
import java.util.List;
-import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADoubleSerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ARecordSerializerDeserializer;
@@ -61,8 +76,15 @@
List<IAType> unionList = new ArrayList<IAType>();
unionList.add(BuiltinType.ANULL);
unionList.add(BuiltinType.ADOUBLE);
- final ARecordType recType = new ARecordType(null, new String[] { "sum", "count" }, new IAType[] {
- new AUnionType(unionList, "OptionalDouble"), BuiltinType.AINT32 }, false);
+ ARecordType tmpRecType;
+ try {
+ tmpRecType = new ARecordType(null, new String[] { "sum", "count" }, new IAType[] {
+ new AUnionType(unionList, "OptionalDouble"), BuiltinType.AINT32 }, false);
+ } catch (AsterixException e) {
+ throw new AlgebricksException(e);
+ }
+
+ final ARecordType recType = tmpRecType;
return new ICopyAggregateFunctionFactory() {
private static final long serialVersionUID = 1L;
@@ -114,7 +136,7 @@
inputVal.reset();
eval.evaluate(tuple);
byte[] serBytes = inputVal.getByteArray();
- ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(serBytes[0]);
+ ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(serBytes[0]);
switch (typeTag) {
case NULL: {
metNull = true;
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/std/LocalAvgAggregateDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/std/LocalAvgAggregateDescriptor.java
index de02246..5400d78 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/std/LocalAvgAggregateDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/aggregates/std/LocalAvgAggregateDescriptor.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.asterix.runtime.aggregates.std;
import java.io.DataOutput;
@@ -6,7 +21,7 @@
import java.util.ArrayList;
import java.util.List;
-import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADoubleSerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AFloatSerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt16SerializerDeserializer;
@@ -72,8 +87,15 @@
List<IAType> unionList = new ArrayList<IAType>();
unionList.add(BuiltinType.ANULL);
unionList.add(BuiltinType.ADOUBLE);
- final ARecordType recType = new ARecordType(null, new String[] { "sum", "count" }, new IAType[] {
- new AUnionType(unionList, "OptionalDouble"), BuiltinType.AINT32 }, false);
+ ARecordType tmpRecType;
+ try {
+ tmpRecType = new ARecordType(null, new String[] { "sum", "count" }, new IAType[] {
+ new AUnionType(unionList, "OptionalDouble"), BuiltinType.AINT32 }, false);
+ } catch (AsterixException e) {
+ throw new AlgebricksException(e);
+ }
+
+ final ARecordType recType = tmpRecType;
return new ICopyAggregateFunction() {
@@ -204,5 +226,4 @@
}
};
}
-
}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalDayAccessor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalDayAccessor.java
new file mode 100644
index 0000000..05391de
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalDayAccessor.java
@@ -0,0 +1,145 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.accessors;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.AMutableInt32;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class TemporalDayAccessor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private static final long serialVersionUID = 1L;
+
+ private static final FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "day", 1);
+
+ // allowed input types
+ private static final byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
+ private static final byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
+ private static final byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
+ private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+ public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new TemporalDayAccessor();
+ }
+ };
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+ */
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ return new ICopyEvaluatorFactory() {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+
+ private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
+
+ private ICopyEvaluator eval = args[0].createEvaluator(argOut);
+
+ private GregorianCalendarSystem calSystem = GregorianCalendarSystem.getInstance();
+
+ // for output: type integer
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<AInt32> intSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AINT32);
+ private AMutableInt32 aMutableInt32 = new AMutableInt32(0);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ argOut.reset();
+ eval.evaluate(tuple);
+ byte[] bytes = argOut.getByteArray();
+
+ try {
+
+ if (bytes[0] == SER_DURATION_TYPE_TAG) {
+ aMutableInt32.setValue(calSystem.getDurationDay(ADurationSerializerDeserializer
+ .getDayTime(bytes, 1)));
+ intSerde.serialize(aMutableInt32, out);
+ return;
+ }
+
+ long chrononTimeInMs = 0;
+ if (bytes[0] == SER_DATE_TYPE_TAG) {
+ chrononTimeInMs = AInt32SerializerDeserializer.getInt(bytes, 1)
+ * GregorianCalendarSystem.CHRONON_OF_DAY;
+ } else if (bytes[0] == SER_DATETIME_TYPE_TAG) {
+ chrononTimeInMs = AInt64SerializerDeserializer.getLong(bytes, 1);
+ } else if (bytes[0] == SER_NULL_TYPE_TAG) {
+ nullSerde.serialize(ANull.NULL, out);
+ return;
+ } else {
+ throw new AlgebricksException("Inapplicable input type: " + bytes[0]);
+ }
+
+ int year = calSystem.getYear(chrononTimeInMs);
+ int month = calSystem.getMonthOfYear(chrononTimeInMs, year);
+ int day = calSystem.getDayOfMonthYear(chrononTimeInMs, year, month);
+
+ aMutableInt32.setValue(day);
+ intSerde.serialize(aMutableInt32, out);
+
+ } catch (IOException e) {
+ throw new AlgebricksException(e);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalHourAccessor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalHourAccessor.java
new file mode 100644
index 0000000..eba012f
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalHourAccessor.java
@@ -0,0 +1,143 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.accessors;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.AMutableInt32;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class TemporalHourAccessor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private static final long serialVersionUID = 1L;
+
+ private static final FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "hour", 1);
+
+ // allowed input types
+ private static final byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
+ private static final byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
+ private static final byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
+ private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+ public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new TemporalHourAccessor();
+ }
+
+ };
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+ */
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ return new ICopyEvaluatorFactory() {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+
+ private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
+
+ private ICopyEvaluator eval = args[0].createEvaluator(argOut);
+
+ private GregorianCalendarSystem calSystem = GregorianCalendarSystem.getInstance();
+
+ // for output: type integer
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<AInt32> intSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AINT32);
+ private AMutableInt32 aMutableInt32 = new AMutableInt32(0);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ argOut.reset();
+ eval.evaluate(tuple);
+ byte[] bytes = argOut.getByteArray();
+
+ try {
+
+ if (bytes[0] == SER_DURATION_TYPE_TAG) {
+ aMutableInt32.setValue(calSystem.getDurationHour(ADurationSerializerDeserializer
+ .getDayTime(bytes, 1)));
+ intSerde.serialize(aMutableInt32, out);
+ return;
+ }
+
+ long chrononTimeInMs = 0;
+ if (bytes[0] == SER_TIME_TYPE_TAG) {
+ chrononTimeInMs = AInt32SerializerDeserializer.getInt(bytes, 1);
+ } else if (bytes[0] == SER_DATETIME_TYPE_TAG) {
+ chrononTimeInMs = AInt64SerializerDeserializer.getLong(bytes, 1);
+ } else if (bytes[0] == SER_NULL_TYPE_TAG) {
+ nullSerde.serialize(ANull.NULL, out);
+ return;
+ } else {
+ throw new AlgebricksException("Inapplicable input type: " + bytes[0]);
+ }
+
+ int hour = calSystem.getHourOfDay(chrononTimeInMs);
+
+ aMutableInt32.setValue(hour);
+ intSerde.serialize(aMutableInt32, out);
+
+ } catch (IOException e) {
+ throw new AlgebricksException(e);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalMillisecondAccessor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalMillisecondAccessor.java
new file mode 100644
index 0000000..ecc1a35
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalMillisecondAccessor.java
@@ -0,0 +1,143 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.accessors;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.AMutableInt32;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class TemporalMillisecondAccessor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private static final long serialVersionUID = 1L;
+
+ private static final FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "millisecond", 1);
+
+ // allowed input types
+ private static final byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
+ private static final byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
+ private static final byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
+ private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+ public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new TemporalMillisecondAccessor();
+ }
+
+ };
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+ */
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ return new ICopyEvaluatorFactory() {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+
+ private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
+
+ private ICopyEvaluator eval = args[0].createEvaluator(argOut);
+
+ private GregorianCalendarSystem calSystem = GregorianCalendarSystem.getInstance();
+
+ // for output: type integer
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<AInt32> intSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AINT32);
+ private AMutableInt32 aMutableInt32 = new AMutableInt32(0);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ argOut.reset();
+ eval.evaluate(tuple);
+ byte[] bytes = argOut.getByteArray();
+
+ try {
+
+ if (bytes[0] == SER_DURATION_TYPE_TAG) {
+ aMutableInt32.setValue(calSystem.getDurationMillisecond(ADurationSerializerDeserializer
+ .getDayTime(bytes, 1)));
+ intSerde.serialize(aMutableInt32, out);
+ return;
+ }
+
+ long chrononTimeInMs = 0;
+ if (bytes[0] == SER_TIME_TYPE_TAG) {
+ chrononTimeInMs = AInt32SerializerDeserializer.getInt(bytes, 1);
+ } else if (bytes[0] == SER_DATETIME_TYPE_TAG) {
+ chrononTimeInMs = AInt64SerializerDeserializer.getLong(bytes, 1);
+ } else if (bytes[0] == SER_NULL_TYPE_TAG) {
+ nullSerde.serialize(ANull.NULL, out);
+ return;
+ } else {
+ throw new AlgebricksException("Inapplicable input type: " + bytes[0]);
+ }
+
+ int ms = calSystem.getMillisOfSec(chrononTimeInMs);
+
+ aMutableInt32.setValue(ms);
+ intSerde.serialize(aMutableInt32, out);
+
+ } catch (IOException e) {
+ throw new AlgebricksException(e);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalMinuteAccessor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalMinuteAccessor.java
new file mode 100644
index 0000000..f436016
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalMinuteAccessor.java
@@ -0,0 +1,143 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.accessors;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.AMutableInt32;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class TemporalMinuteAccessor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private static final long serialVersionUID = 1L;
+
+ private static final FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "minute", 1);
+
+ // allowed input types
+ private static final byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
+ private static final byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
+ private static final byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
+ private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+ public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new TemporalMinuteAccessor();
+ }
+
+ };
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+ */
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ return new ICopyEvaluatorFactory() {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+
+ private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
+
+ private ICopyEvaluator eval = args[0].createEvaluator(argOut);
+
+ private GregorianCalendarSystem calSystem = GregorianCalendarSystem.getInstance();
+
+ // for output: type integer
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<AInt32> intSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AINT32);
+ private AMutableInt32 aMutableInt32 = new AMutableInt32(0);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ argOut.reset();
+ eval.evaluate(tuple);
+ byte[] bytes = argOut.getByteArray();
+
+ try {
+
+ if (bytes[0] == SER_DURATION_TYPE_TAG) {
+ aMutableInt32.setValue(calSystem.getDurationMinute(ADurationSerializerDeserializer
+ .getDayTime(bytes, 1)));
+ intSerde.serialize(aMutableInt32, out);
+ return;
+ }
+
+ long chrononTimeInMs = 0;
+ if (bytes[0] == SER_TIME_TYPE_TAG) {
+ chrononTimeInMs = AInt32SerializerDeserializer.getInt(bytes, 1);
+ } else if (bytes[0] == SER_DATETIME_TYPE_TAG) {
+ chrononTimeInMs = AInt64SerializerDeserializer.getLong(bytes, 1);
+ } else if (bytes[0] == SER_NULL_TYPE_TAG) {
+ nullSerde.serialize(ANull.NULL, out);
+ return;
+ } else {
+ throw new AlgebricksException("Inapplicable input type: " + bytes[0]);
+ }
+
+ int min = calSystem.getMinOfHour(chrononTimeInMs);
+
+ aMutableInt32.setValue(min);
+ intSerde.serialize(aMutableInt32, out);
+
+ } catch (IOException e) {
+ throw new AlgebricksException(e);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalMonthAccessor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalMonthAccessor.java
new file mode 100644
index 0000000..fb68f7d
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalMonthAccessor.java
@@ -0,0 +1,145 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.accessors;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.AMutableInt32;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class TemporalMonthAccessor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private static final long serialVersionUID = 1L;
+
+ private static final FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "month", 1);
+
+ // allowed input types
+ private static final byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
+ private static final byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
+ private static final byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
+ private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+ public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new TemporalMonthAccessor();
+ }
+
+ };
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+ */
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ return new ICopyEvaluatorFactory() {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+
+ private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
+
+ private ICopyEvaluator eval = args[0].createEvaluator(argOut);
+
+ private GregorianCalendarSystem calSystem = GregorianCalendarSystem.getInstance();
+
+ // for output: type integer
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<AInt32> intSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AINT32);
+ private AMutableInt32 aMutableInt32 = new AMutableInt32(0);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ argOut.reset();
+ eval.evaluate(tuple);
+ byte[] bytes = argOut.getByteArray();
+
+ try {
+
+ if (bytes[0] == SER_DURATION_TYPE_TAG) {
+ aMutableInt32.setValue(calSystem.getDurationMonth(ADurationSerializerDeserializer
+ .getYearMonth(bytes, 1)));
+ intSerde.serialize(aMutableInt32, out);
+ return;
+ }
+
+ long chrononTimeInMs = 0;
+ if (bytes[0] == SER_DATE_TYPE_TAG) {
+ chrononTimeInMs = AInt32SerializerDeserializer.getInt(bytes, 1)
+ * GregorianCalendarSystem.CHRONON_OF_DAY;
+ } else if (bytes[0] == SER_DATETIME_TYPE_TAG) {
+ chrononTimeInMs = AInt64SerializerDeserializer.getLong(bytes, 1);
+ } else if (bytes[0] == SER_NULL_TYPE_TAG) {
+ nullSerde.serialize(ANull.NULL, out);
+ return;
+ } else {
+ throw new AlgebricksException("Inapplicable input type: " + bytes[0]);
+ }
+
+ int year = calSystem.getYear(chrononTimeInMs);
+ int month = calSystem.getMonthOfYear(chrononTimeInMs, year);
+
+ aMutableInt32.setValue(month);
+ intSerde.serialize(aMutableInt32, out);
+
+ } catch (IOException e) {
+ throw new AlgebricksException(e);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalSecondAccessor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalSecondAccessor.java
new file mode 100644
index 0000000..3b9ee95
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalSecondAccessor.java
@@ -0,0 +1,143 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.accessors;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.AMutableInt32;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class TemporalSecondAccessor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private static final long serialVersionUID = 1L;
+
+ private static final FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "second", 1);
+
+ // allowed input types
+ private static final byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
+ private static final byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
+ private static final byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
+ private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+ public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new TemporalSecondAccessor();
+ }
+
+ };
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+ */
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ return new ICopyEvaluatorFactory() {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+
+ private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
+
+ private ICopyEvaluator eval = args[0].createEvaluator(argOut);
+
+ private GregorianCalendarSystem calSystem = GregorianCalendarSystem.getInstance();
+
+ // for output: type integer
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<AInt32> intSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AINT32);
+ private AMutableInt32 aMutableInt32 = new AMutableInt32(0);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ argOut.reset();
+ eval.evaluate(tuple);
+ byte[] bytes = argOut.getByteArray();
+
+ try {
+
+ if (bytes[0] == SER_DURATION_TYPE_TAG) {
+ aMutableInt32.setValue(calSystem.getDurationSecond(ADurationSerializerDeserializer
+ .getDayTime(bytes, 1)));
+ intSerde.serialize(aMutableInt32, out);
+ return;
+ }
+
+ long chrononTimeInMs = 0;
+ if (bytes[0] == SER_TIME_TYPE_TAG) {
+ chrononTimeInMs = AInt32SerializerDeserializer.getInt(bytes, 1);
+ } else if (bytes[0] == SER_DATETIME_TYPE_TAG) {
+ chrononTimeInMs = AInt64SerializerDeserializer.getLong(bytes, 1);
+ } else if (bytes[0] == SER_NULL_TYPE_TAG) {
+ nullSerde.serialize(ANull.NULL, out);
+ return;
+ } else {
+ throw new AlgebricksException("Inapplicable input type: " + bytes[0]);
+ }
+
+ int sec = calSystem.getSecOfMin(chrononTimeInMs);
+
+ aMutableInt32.setValue(sec);
+ intSerde.serialize(aMutableInt32, out);
+
+ } catch (IOException e) {
+ throw new AlgebricksException(e);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalYearAccessor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalYearAccessor.java
new file mode 100644
index 0000000..41dacfd
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalYearAccessor.java
@@ -0,0 +1,156 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.accessors;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.AMutableInt32;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class TemporalYearAccessor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private static final long serialVersionUID = 1L;
+
+ private static final FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "year", 1);
+
+ // allowed input types
+ private static final byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
+ private static final byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
+ private static final byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
+ private static final byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
+ private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+ public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new TemporalYearAccessor();
+ }
+ };
+
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ return new ICopyEvaluatorFactory() {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+
+ private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
+
+ private ICopyEvaluator eval = args[0].createEvaluator(argOut);
+
+ private GregorianCalendarSystem calSystem = GregorianCalendarSystem.getInstance();
+
+ // for output: type integer
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<AInt32> intSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AINT32);
+ private AMutableInt32 aMutableInt32 = new AMutableInt32(0);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ argOut.reset();
+ eval.evaluate(tuple);
+ byte[] bytes = argOut.getByteArray();
+
+ try {
+
+ if (bytes[0] == SER_DURATION_TYPE_TAG) {
+ aMutableInt32.setValue(calSystem.getDurationYear(ADurationSerializerDeserializer
+ .getYearMonth(bytes, 1)));
+ intSerde.serialize(aMutableInt32, out);
+ return;
+ }
+
+ long chrononTimeInMs = 0;
+ if (bytes[0] == SER_DATE_TYPE_TAG) {
+ chrononTimeInMs = AInt32SerializerDeserializer.getInt(bytes, 1)
+ * GregorianCalendarSystem.CHRONON_OF_DAY;
+ } else if (bytes[0] == SER_DATETIME_TYPE_TAG) {
+ chrononTimeInMs = AInt64SerializerDeserializer.getLong(bytes, 1);
+ } else if (bytes[0] == SER_NULL_TYPE_TAG) {
+ nullSerde.serialize(ANull.NULL, out);
+ return;
+ } else if (bytes[0] == SER_STRING_TYPE_TAG) {
+ int year;
+ if (UTF8StringPointable.charAt(bytes, 3) == '-') {
+ // in case of a negative year
+ year = -1
+ * ((UTF8StringPointable.charAt(bytes, 4) - '0') * 1000
+ + (UTF8StringPointable.charAt(bytes, 5) - '0') * 100
+ + (UTF8StringPointable.charAt(bytes, 6) - '0') * 10 + (UTF8StringPointable
+ .charAt(bytes, 7) - '0'));
+ } else {
+ year = (UTF8StringPointable.charAt(bytes, 3) - '0') * 1000
+ + (UTF8StringPointable.charAt(bytes, 4) - '0') * 100
+ + (UTF8StringPointable.charAt(bytes, 5) - '0') * 10
+ + (UTF8StringPointable.charAt(bytes, 6) - '0');
+ }
+ aMutableInt32.setValue(year);
+ intSerde.serialize(aMutableInt32, out);
+ return;
+ } else {
+ throw new AlgebricksException("Inapplicable input type: " + bytes[0]);
+ }
+
+ int year = calSystem.getYear(chrononTimeInMs);
+
+ aMutableInt32.setValue(year);
+ intSerde.serialize(aMutableInt32, out);
+
+ } catch (IOException e) {
+ throw new AlgebricksException(e);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/common/ClosedRecordConstructorEvalFactory.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/common/ClosedRecordConstructorEvalFactory.java
index 38983e2..db3bc40 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/common/ClosedRecordConstructorEvalFactory.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/common/ClosedRecordConstructorEvalFactory.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.asterix.runtime.evaluators.common;
import java.io.DataOutput;
@@ -5,6 +20,7 @@
import edu.uci.ics.asterix.builders.IARecordBuilder;
import edu.uci.ics.asterix.builders.RecordBuilder;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.asterix.om.types.ATypeTag;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -72,8 +88,8 @@
}
}
recBuilder.write(out, true);
- } catch (IOException ioe) {
- throw new AlgebricksException(ioe);
+ } catch (IOException | AsterixException e) {
+ throw new AlgebricksException(e);
}
}
}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/comparisons/AbstractComparisonEvaluator.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/comparisons/AbstractComparisonEvaluator.java
index 18fc2d7..2b4ea3a 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/comparisons/AbstractComparisonEvaluator.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/comparisons/AbstractComparisonEvaluator.java
@@ -2,6 +2,7 @@
import java.io.DataOutput;
+import edu.uci.ics.asterix.dataflow.data.nontagged.comparators.ADateOrTimeAscBinaryComparatorFactory;
import edu.uci.ics.asterix.dataflow.data.nontagged.comparators.ADateTimeAscBinaryComparatorFactory;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADoubleSerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AFloatSerializerDeserializer;
@@ -50,6 +51,8 @@
.createBinaryComparator();
protected IBinaryComparator dateTimeBinaryComp = ADateTimeAscBinaryComparatorFactory.INSTANCE
.createBinaryComparator();
+ protected IBinaryComparator dateOrTimeBinaryComp = ADateOrTimeAscBinaryComparatorFactory.INSTANCE
+ .createBinaryComparator();
public AbstractComparisonEvaluator(DataOutput out, ICopyEvaluatorFactory evalLeftFactory,
ICopyEvaluatorFactory evalRightFactory) throws AlgebricksException {
@@ -119,6 +122,10 @@
case DATETIME: {
return compareDateTimeWithArg(typeTag2);
}
+ case DATE:
+ case TIME: {
+ return compareDateOrTimeWithArg(typeTag2);
+ }
default: {
throw new AlgebricksException("Comparison is undefined between types " + typeTag1 + " and " + typeTag2
+ " .");
@@ -126,6 +133,22 @@
}
}
+ private ComparisonResult compareDateOrTimeWithArg(ATypeTag typeTag2) throws AlgebricksException {
+ if (typeTag2 == ATypeTag.NULL) {
+ return ComparisonResult.GREATER_THAN;
+ } else if (typeTag2 == ATypeTag.DATETIME) {
+ int result = dateOrTimeBinaryComp.compare(outLeft.getByteArray(), 1, outLeft.getLength() - 1,
+ outRight.getByteArray(), 1, outRight.getLength() - 1);
+ if (result == 0)
+ return ComparisonResult.EQUAL;
+ else if (result < 0)
+ return ComparisonResult.LESS_THAN;
+ else
+ return ComparisonResult.GREATER_THAN;
+ }
+ throw new AlgebricksException("Comparison is undefined between types ADateTime and " + typeTag2 + " .");
+ }
+
private ComparisonResult compareDateTimeWithArg(ATypeTag typeTag2) throws AlgebricksException {
if (typeTag2 == ATypeTag.NULL) {
return ComparisonResult.GREATER_THAN;
@@ -153,8 +176,8 @@
private ComparisonResult compareStringWithArg(ATypeTag typeTag2) throws AlgebricksException {
if (typeTag2 == ATypeTag.STRING) {
- int result = strBinaryComp.compare(outLeft.getByteArray(), 1, outLeft.getLength() - 1, outRight.getByteArray(), 1,
- outRight.getLength() - 1);
+ int result = strBinaryComp.compare(outLeft.getByteArray(), 1, outLeft.getLength() - 1,
+ outRight.getByteArray(), 1, outRight.getLength() - 1);
if (result == 0)
return ComparisonResult.EQUAL;
else if (result < 0)
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/ADateConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/ADateConstructorDescriptor.java
index cfebf48..44fcf7c 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/ADateConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/ADateConstructorDescriptor.java
@@ -21,7 +21,7 @@
import edu.uci.ics.asterix.om.base.ADate;
import edu.uci.ics.asterix.om.base.AMutableDate;
import edu.uci.ics.asterix.om.base.ANull;
-import edu.uci.ics.asterix.om.base.temporal.ADateAndTimeParser;
+import edu.uci.ics.asterix.om.base.temporal.ADateParserFactory;
import edu.uci.ics.asterix.om.base.temporal.ByteArrayCharSequenceAccessor;
import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
@@ -84,8 +84,10 @@
byte[] serString = outInput.getByteArray();
if (serString[0] == SER_STRING_TYPE_TAG) {
- charAccessor.reset(serString, 3, 0);
- long chrononTimeInMs = ADateAndTimeParser.parseDatePart(charAccessor, true);
+ int stringLength = (serString[1] & 0xff << 8) + (serString[2] & 0xff << 0);
+
+ charAccessor.reset(serString, 3, stringLength);
+ long chrononTimeInMs = ADateParserFactory.parseDatePart(charAccessor, true);
short temp = 0;
if (chrononTimeInMs < 0
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/ADateTimeConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/ADateTimeConstructorDescriptor.java
index 19849aa..6a5783b 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/ADateTimeConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/ADateTimeConstructorDescriptor.java
@@ -21,7 +21,8 @@
import edu.uci.ics.asterix.om.base.ADateTime;
import edu.uci.ics.asterix.om.base.AMutableDateTime;
import edu.uci.ics.asterix.om.base.ANull;
-import edu.uci.ics.asterix.om.base.temporal.ADateAndTimeParser;
+import edu.uci.ics.asterix.om.base.temporal.ADateParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.ATimeParserFactory;
import edu.uci.ics.asterix.om.base.temporal.ByteArrayCharSequenceAccessor;
import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
@@ -71,7 +72,6 @@
@SuppressWarnings("unchecked")
private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.ANULL);
-
private ByteArrayCharSequenceAccessor charAccessor = new ByteArrayCharSequenceAccessor();
@Override
@@ -82,7 +82,10 @@
eval.evaluate(tuple);
byte[] serString = outInput.getByteArray();
if (serString[0] == SER_STRING_TYPE_TAG) {
- charAccessor.reset(serString, 3, 0);
+
+ int stringLength = (serString[1] & 0xff << 8) + (serString[2] & 0xff << 0);
+
+ charAccessor.reset(serString, 3, stringLength);
// +1 if it is negative (-)
short timeOffset = (short) ((charAccessor.getCharAt(0) == '-') ? 1 : 0);
@@ -96,11 +99,11 @@
timeOffset += (charAccessor.getCharAt(timeOffset + 13) == ':') ? (short) (11)
: (short) (9);
- long chrononTimeInMs = ADateAndTimeParser.parseDatePart(charAccessor, false);
+ long chrononTimeInMs = ADateParserFactory.parseDatePart(charAccessor, false);
- charAccessor.reset(serString, 3, timeOffset);
+ charAccessor.reset(serString, 3 + timeOffset, stringLength - timeOffset);
- chrononTimeInMs += ADateAndTimeParser.parseTimePart(charAccessor);
+ chrononTimeInMs += ATimeParserFactory.parseTimePart(charAccessor);
aDateTime.setValue(chrononTimeInMs);
datetimeSerde.serialize(aDateTime, out);
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/ADurationConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/ADurationConstructorDescriptor.java
index f0660f5..b2b3f4e 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/ADurationConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/ADurationConstructorDescriptor.java
@@ -20,7 +20,7 @@
import edu.uci.ics.asterix.om.base.ADuration;
import edu.uci.ics.asterix.om.base.AMutableDuration;
import edu.uci.ics.asterix.om.base.ANull;
-import edu.uci.ics.asterix.om.base.temporal.ADurationParser;
+import edu.uci.ics.asterix.om.base.temporal.ADurationParserFactory;
import edu.uci.ics.asterix.om.base.temporal.ByteArrayCharSequenceAccessor;
import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
@@ -83,9 +83,11 @@
if (serString[0] == SER_STRING_TYPE_TAG) {
- charAccessor.reset(serString, 3, 0);
+ int stringLength = (serString[1] & 0xff << 8) + (serString[2] & 0xff << 0);
- ADurationParser.parse(charAccessor, aDuration);
+ charAccessor.reset(serString, 3, stringLength);
+
+ ADurationParserFactory.parseDuration(charAccessor, aDuration);
durationSerde.serialize(aDuration, out);
} else if (serString[0] == SER_NULL_TYPE_TAG) {
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AInt64ConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AInt64ConstructorDescriptor.java
index 15e3e72..6fa4925 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AInt64ConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AInt64ConstructorDescriptor.java
@@ -98,8 +98,9 @@
else
throw new AlgebricksException(errorMessage);
}
- if (value < 0)
+ if (value < 0 && value != -9223372036854775808L) {
throw new AlgebricksException(errorMessage);
+ }
if (value > 0 && !positive)
value *= -1;
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromDateConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromDateConstructorDescriptor.java
new file mode 100644
index 0000000..e1a12f8
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromDateConstructorDescriptor.java
@@ -0,0 +1,142 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.constructors;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AInterval;
+import edu.uci.ics.asterix.om.base.AMutableInterval;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.ADateParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.ByteArrayCharSequenceAccessor;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class AIntervalFromDateConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private static final long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "interval-from-date", 2);
+ private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
+ private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+ public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new AIntervalFromDateConstructorDescriptor();
+ }
+ };
+
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) {
+ return new ICopyEvaluatorFactory() {
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+
+ private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+ private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+ private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+ private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+ private String errorMessage = "This can not be an instance of interval (from Date)";
+ //TODO: Where to move and fix these?
+ private AMutableInterval aInterval = new AMutableInterval(0L, 0L, (byte) 0);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<AInterval> intervalSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AINTERVAL);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+
+ private ByteArrayCharSequenceAccessor charAccessor = new ByteArrayCharSequenceAccessor();
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+
+ argOut0.reset();
+ argOut1.reset();
+ eval0.evaluate(tuple);
+ eval1.evaluate(tuple);
+
+ try {
+
+ if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ nullSerde.serialize(ANull.NULL, out);
+ } else if (argOut0.getByteArray()[0] == SER_STRING_TYPE_TAG
+ && argOut1.getByteArray()[0] == SER_STRING_TYPE_TAG) {
+
+ int stringLength = (argOut0.getByteArray()[1] & 0xff << 8)
+ + (argOut0.getByteArray()[2] & 0xff << 0);
+
+ // start date
+ charAccessor.reset(argOut0.getByteArray(), 3, stringLength);
+ long intervalStart = ADateParserFactory.parseDatePart(charAccessor, true)
+ / GregorianCalendarSystem.CHRONON_OF_DAY;
+ // end date
+
+ stringLength = (argOut1.getByteArray()[1] & 0xff << 8)
+ + (argOut1.getByteArray()[2] & 0xff << 0);
+
+ charAccessor.reset(argOut1.getByteArray(), 3, stringLength);
+ long intervalEnd = ADateParserFactory.parseDatePart(charAccessor, true)
+ / GregorianCalendarSystem.CHRONON_OF_DAY;
+
+ if (intervalEnd < intervalStart) {
+ throw new AlgebricksException(
+ "Interval end must not be less than the interval start.");
+ }
+
+ aInterval.setValue(intervalStart, intervalEnd, ATypeTag.DATE.serialize());
+ intervalSerde.serialize(aInterval, out);
+ } else {
+ throw new AlgebricksException("Wrong format for interval constructor from dates.");
+ }
+
+ } catch (IOException e1) {
+ throw new AlgebricksException(errorMessage);
+ } catch (Exception e2) {
+ throw new AlgebricksException(e2);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+}
\ No newline at end of file
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromDateTimeConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromDateTimeConstructorDescriptor.java
new file mode 100644
index 0000000..72a8e37
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromDateTimeConstructorDescriptor.java
@@ -0,0 +1,167 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.constructors;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AInterval;
+import edu.uci.ics.asterix.om.base.AMutableInterval;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.ADateParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.ATimeParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.ByteArrayCharSequenceAccessor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class AIntervalFromDateTimeConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private static final long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "interval-from-datetime", 2);
+ private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
+ private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+ public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new AIntervalFromDateTimeConstructorDescriptor();
+ }
+ };
+
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) {
+ return new ICopyEvaluatorFactory() {
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+
+ private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+ private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+ private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+ private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+ private String errorMessage = "This can not be an instance of interval (from Date)";
+ //TODO: Where to move and fix these?
+ private AMutableInterval aInterval = new AMutableInterval(0L, 0L, (byte) 0);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<AInterval> intervalSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AINTERVAL);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+
+ private ByteArrayCharSequenceAccessor charAccessor = new ByteArrayCharSequenceAccessor();
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+
+ argOut0.reset();
+ argOut1.reset();
+ eval0.evaluate(tuple);
+ eval1.evaluate(tuple);
+
+ try {
+
+ if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ nullSerde.serialize(ANull.NULL, out);
+ } else if (argOut0.getByteArray()[0] == SER_STRING_TYPE_TAG
+ && argOut1.getByteArray()[0] == SER_STRING_TYPE_TAG) {
+ // start date
+
+ int stringLength = (argOut0.getByteArray()[1] & 0xff << 8)
+ + (argOut0.getByteArray()[2] & 0xff << 0);
+
+ charAccessor.reset(argOut0.getByteArray(), 3, stringLength);
+ // get offset for time part: +1 if it is negative (-)
+ short timeOffset = (short) ((charAccessor.getCharAt(0) == '-') ? 1 : 0);
+
+ if (charAccessor.getCharAt(timeOffset + 10) != 'T'
+ && charAccessor.getCharAt(timeOffset + 8) != 'T') {
+ throw new AlgebricksException(errorMessage + ": missing T");
+ }
+
+ // if extended form 11, else 9
+ timeOffset += (charAccessor.getCharAt(timeOffset + 13) == ':') ? (short) (11)
+ : (short) (9);
+ long intervalStart = ADateParserFactory.parseDatePart(charAccessor, false);
+ charAccessor.reset(argOut0.getByteArray(), 3 + timeOffset, stringLength - timeOffset);
+ intervalStart += ATimeParserFactory.parseTimePart(charAccessor);
+
+ // end date
+
+ stringLength = (argOut1.getByteArray()[1] & 0xff << 8)
+ + (argOut1.getByteArray()[2] & 0xff << 0);
+
+ charAccessor.reset(argOut1.getByteArray(), 3, stringLength);
+ // get offset for time part: +1 if it is negative (-)
+ timeOffset = (short) ((charAccessor.getCharAt(0) == '-') ? 1 : 0);
+
+ if (charAccessor.getCharAt(timeOffset + 10) != 'T'
+ && charAccessor.getCharAt(timeOffset + 8) != 'T') {
+ throw new AlgebricksException(errorMessage + ": missing T");
+ }
+
+ // if extended form 11, else 9
+ timeOffset += (charAccessor.getCharAt(timeOffset + 13) == ':') ? (short) (11)
+ : (short) (9);
+ long intervalEnd = ADateParserFactory.parseDatePart(charAccessor, false);
+ charAccessor.reset(argOut1.getByteArray(), 3 + timeOffset, stringLength - timeOffset);
+ intervalEnd += ATimeParserFactory.parseTimePart(charAccessor);
+
+ if (intervalEnd < intervalStart) {
+ throw new AlgebricksException(
+ "Interval end must not be less than the interval start.");
+ }
+
+ aInterval.setValue(intervalStart, intervalEnd, ATypeTag.DATETIME.serialize());
+ intervalSerde.serialize(aInterval, out);
+ } else {
+ throw new AlgebricksException("Wrong format for interval constructor from dates.");
+ }
+
+ } catch (IOException e1) {
+ throw new AlgebricksException(errorMessage);
+ } catch (Exception e2) {
+ throw new AlgebricksException(e2);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+}
\ No newline at end of file
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromTimeConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromTimeConstructorDescriptor.java
new file mode 100644
index 0000000..3ce722a
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromTimeConstructorDescriptor.java
@@ -0,0 +1,147 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.constructors;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AInterval;
+import edu.uci.ics.asterix.om.base.AMutableInterval;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.ATimeParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.ByteArrayCharSequenceAccessor;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class AIntervalFromTimeConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private static final long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "interval-from-time", 2);
+ private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
+ private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+ public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new AIntervalFromTimeConstructorDescriptor();
+ }
+ };
+
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) {
+ return new ICopyEvaluatorFactory() {
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+
+ private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+ private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+ private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+ private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+ private String errorMessage = "This can not be an instance of interval (from Date)";
+ //TODO: Where to move and fix these?
+ private AMutableInterval aInterval = new AMutableInterval(0L, 0L, (byte) 0);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<AInterval> intervalSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AINTERVAL);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+
+ private ByteArrayCharSequenceAccessor charAccessor = new ByteArrayCharSequenceAccessor();
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+
+ argOut0.reset();
+ argOut1.reset();
+ eval0.evaluate(tuple);
+ eval1.evaluate(tuple);
+
+ try {
+
+ if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ nullSerde.serialize(ANull.NULL, out);
+ } else if (argOut0.getByteArray()[0] == SER_STRING_TYPE_TAG
+ && argOut1.getByteArray()[0] == SER_STRING_TYPE_TAG) {
+ // start date
+
+ int stringLength = (argOut0.getByteArray()[1] & 0xff << 8)
+ + (argOut0.getByteArray()[2] & 0xff << 0);
+
+ charAccessor.reset(argOut0.getByteArray(), 3, stringLength);
+ long intervalStart = ATimeParserFactory.parseTimePart(charAccessor);
+ if (intervalStart < 0) {
+ intervalStart += GregorianCalendarSystem.CHRONON_OF_DAY;
+ }
+
+ // end date
+
+ stringLength = (argOut1.getByteArray()[1] & 0xff << 8)
+ + (argOut1.getByteArray()[2] & 0xff << 0);
+
+ charAccessor.reset(argOut1.getByteArray(), 3, stringLength);
+ long intervalEnd = ATimeParserFactory.parseTimePart(charAccessor);
+ if (intervalEnd < 0) {
+ intervalEnd += GregorianCalendarSystem.CHRONON_OF_DAY;
+ }
+
+ if (intervalEnd < intervalStart) {
+ throw new AlgebricksException(
+ "Interval end must not be less than the interval start.");
+ }
+
+ aInterval.setValue(intervalStart, intervalEnd, ATypeTag.TIME.serialize());
+ intervalSerde.serialize(aInterval, out);
+ } else {
+ throw new AlgebricksException("Wrong format for interval constructor from dates.");
+ }
+
+ } catch (IOException e1) {
+ throw new AlgebricksException(errorMessage);
+ } catch (Exception e2) {
+ throw new AlgebricksException(e2);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+}
\ No newline at end of file
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalStartFromDateConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalStartFromDateConstructorDescriptor.java
new file mode 100644
index 0000000..23fdb07
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalStartFromDateConstructorDescriptor.java
@@ -0,0 +1,150 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.constructors;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AInterval;
+import edu.uci.ics.asterix.om.base.AMutableDuration;
+import edu.uci.ics.asterix.om.base.AMutableInterval;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.ADateParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.ADurationParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.ByteArrayCharSequenceAccessor;
+import edu.uci.ics.asterix.om.base.temporal.DurationArithmeticOperations;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class AIntervalStartFromDateConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private static final long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "interval-start-from-date", 2);
+ private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
+ private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+ public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new AIntervalStartFromDateConstructorDescriptor();
+ }
+ };
+
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) {
+ return new ICopyEvaluatorFactory() {
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+
+ private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+ private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+ private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+ private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+ private String errorMessage = "This can not be an instance of interval (from Date)";
+
+ private AMutableInterval aInterval = new AMutableInterval(0L, 0L, (byte) 0);
+ private AMutableDuration aDuration = new AMutableDuration(0, 0L);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<AInterval> intervalSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AINTERVAL);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+
+ private ByteArrayCharSequenceAccessor charAccessor = new ByteArrayCharSequenceAccessor();
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+
+ argOut0.reset();
+ argOut1.reset();
+ eval0.evaluate(tuple);
+ eval1.evaluate(tuple);
+
+ try {
+
+ if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ nullSerde.serialize(ANull.NULL, out);
+ } else if (argOut0.getByteArray()[0] == SER_STRING_TYPE_TAG
+ && argOut1.getByteArray()[0] == SER_STRING_TYPE_TAG) {
+ // start date
+
+ int stringLength = (argOut0.getByteArray()[1] & 0xff << 8)
+ + (argOut0.getByteArray()[2] & 0xff << 0);
+
+ charAccessor.reset(argOut0.getByteArray(), 3, stringLength);
+ long intervalStart = ADateParserFactory.parseDatePart(charAccessor, true);
+ // duration
+
+ stringLength = (argOut1.getByteArray()[1] & 0xff << 8)
+ + (argOut1.getByteArray()[2] & 0xff << 0);
+
+ charAccessor.reset(argOut1.getByteArray(), 3, stringLength);
+ ADurationParserFactory.parseDuration(charAccessor, aDuration);
+
+ long intervalEnd = DurationArithmeticOperations.addDuration(intervalStart,
+ aDuration.getMonths(), aDuration.getMilliseconds());
+
+ intervalStart = GregorianCalendarSystem.getChrononInDays(intervalStart);
+ intervalEnd = GregorianCalendarSystem.getChrononInDays(intervalEnd);
+
+ if (intervalEnd < intervalStart) {
+ throw new AlgebricksException(
+ "Interval end must not be less than the interval start.");
+ }
+
+ aInterval.setValue(intervalStart, intervalEnd, ATypeTag.DATE.serialize());
+ intervalSerde.serialize(aInterval, out);
+ } else {
+ throw new AlgebricksException("Wrong format for interval constructor from dates.");
+ }
+
+ } catch (IOException e1) {
+ throw new AlgebricksException(errorMessage);
+ } catch (Exception e2) {
+ throw new AlgebricksException(e2);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+}
\ No newline at end of file
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalStartFromDateTimeConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalStartFromDateTimeConstructorDescriptor.java
new file mode 100644
index 0000000..91c45df
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalStartFromDateTimeConstructorDescriptor.java
@@ -0,0 +1,161 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.constructors;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AInterval;
+import edu.uci.ics.asterix.om.base.AMutableDuration;
+import edu.uci.ics.asterix.om.base.AMutableInterval;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.ADateParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.ADurationParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.ATimeParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.ByteArrayCharSequenceAccessor;
+import edu.uci.ics.asterix.om.base.temporal.DurationArithmeticOperations;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class AIntervalStartFromDateTimeConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private static final long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "interval-start-from-datetime", 2);
+ private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
+ private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+ public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new AIntervalStartFromDateTimeConstructorDescriptor();
+ }
+ };
+
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) {
+ return new ICopyEvaluatorFactory() {
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+
+ private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+ private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+ private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+ private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+ private String errorMessage = "This can not be an instance of interval (from Date)";
+
+ private AMutableInterval aInterval = new AMutableInterval(0L, 0L, (byte) 0);
+ private AMutableDuration aDuration = new AMutableDuration(0, 0L);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<AInterval> intervalSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AINTERVAL);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+
+ private ByteArrayCharSequenceAccessor charAccessor = new ByteArrayCharSequenceAccessor();
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+
+ argOut0.reset();
+ argOut1.reset();
+ eval0.evaluate(tuple);
+ eval1.evaluate(tuple);
+
+ try {
+
+ if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ nullSerde.serialize(ANull.NULL, out);
+ } else if (argOut0.getByteArray()[0] == SER_STRING_TYPE_TAG
+ && argOut1.getByteArray()[0] == SER_STRING_TYPE_TAG) {
+ // start date
+
+ int stringLength = (argOut0.getByteArray()[1] & 0xff << 8)
+ + (argOut0.getByteArray()[2] & 0xff << 0);
+
+ charAccessor.reset(argOut0.getByteArray(), 3, stringLength);
+ // get offset for time part: +1 if it is negative (-)
+ short timeOffset = (short) ((charAccessor.getCharAt(0) == '-') ? 1 : 0);
+
+ if (charAccessor.getCharAt(timeOffset + 10) != 'T'
+ && charAccessor.getCharAt(timeOffset + 8) != 'T') {
+ throw new AlgebricksException(errorMessage + ": missing T");
+ }
+
+ // if extended form 11, else 9
+ timeOffset += (charAccessor.getCharAt(timeOffset + 13) == ':') ? (short) (11)
+ : (short) (9);
+ long intervalStart = ADateParserFactory.parseDatePart(charAccessor, false);
+ charAccessor.reset(argOut0.getByteArray(), 3 + timeOffset, stringLength - timeOffset);
+ intervalStart += ATimeParserFactory.parseTimePart(charAccessor);
+
+ // duration
+
+ stringLength = (argOut1.getByteArray()[1] & 0xff << 8)
+ + (argOut1.getByteArray()[2] & 0xff << 0);
+
+ charAccessor.reset(argOut1.getByteArray(), 3, stringLength);
+ ADurationParserFactory.parseDuration(charAccessor, aDuration);
+
+ long intervalEnd = DurationArithmeticOperations.addDuration(intervalStart,
+ aDuration.getMonths(), aDuration.getMilliseconds());
+
+ if (intervalEnd < intervalStart) {
+ throw new AlgebricksException(
+ "Interval end must not be less than the interval start.");
+ }
+
+ aInterval.setValue(intervalStart, intervalEnd, ATypeTag.DATETIME.serialize());
+ intervalSerde.serialize(aInterval, out);
+ } else {
+ throw new AlgebricksException("Wrong format for interval constructor from dates.");
+ }
+
+ } catch (IOException e1) {
+ throw new AlgebricksException(errorMessage);
+ } catch (Exception e2) {
+ throw new AlgebricksException(e2);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+}
\ No newline at end of file
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalStartFromTimeConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalStartFromTimeConstructorDescriptor.java
new file mode 100644
index 0000000..e576fef
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalStartFromTimeConstructorDescriptor.java
@@ -0,0 +1,161 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.constructors;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AInterval;
+import edu.uci.ics.asterix.om.base.AMutableDuration;
+import edu.uci.ics.asterix.om.base.AMutableInterval;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.ADurationParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.ATimeParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.ByteArrayCharSequenceAccessor;
+import edu.uci.ics.asterix.om.base.temporal.DurationArithmeticOperations;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class AIntervalStartFromTimeConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private static final long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "interval-start-from-time", 2);
+ private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
+ private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+ public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new AIntervalStartFromTimeConstructorDescriptor();
+ }
+ };
+
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) {
+ return new ICopyEvaluatorFactory() {
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+
+ private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+ private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+ private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+ private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+ private String errorMessage = "This can not be an instance of interval (from Date)";
+
+ private AMutableInterval aInterval = new AMutableInterval(0L, 0L, (byte) 0);
+ private AMutableDuration aDuration = new AMutableDuration(0, 0L);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<AInterval> intervalSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AINTERVAL);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+
+ private ByteArrayCharSequenceAccessor charAccessor = new ByteArrayCharSequenceAccessor();
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+
+ argOut0.reset();
+ argOut1.reset();
+ eval0.evaluate(tuple);
+ eval1.evaluate(tuple);
+
+ try {
+
+ if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ nullSerde.serialize(ANull.NULL, out);
+ } else if (argOut0.getByteArray()[0] == SER_STRING_TYPE_TAG
+ && argOut1.getByteArray()[0] == SER_STRING_TYPE_TAG) {
+ // start time
+
+ int stringLength = (argOut0.getByteArray()[1] & 0xff << 8)
+ + (argOut0.getByteArray()[2] & 0xff << 0);
+
+ charAccessor.reset(argOut0.getByteArray(), 3, stringLength);
+ int intervalStart = ATimeParserFactory.parseTimePart(charAccessor);
+
+ if (intervalStart < 0) {
+ intervalStart += GregorianCalendarSystem.CHRONON_OF_DAY;
+ }
+
+ // duration
+
+ stringLength = (argOut1.getByteArray()[1] & 0xff << 8)
+ + (argOut1.getByteArray()[2] & 0xff << 0);
+
+ charAccessor.reset(argOut1.getByteArray(), 3, stringLength);
+ ADurationParserFactory.parseDuration(charAccessor, aDuration);
+
+ if (aDuration.getMonths() != 0) {
+ throw new AlgebricksException("Cannot add a year-month duration to a time value.");
+ }
+
+ int intervalEnd = DurationArithmeticOperations.addDuration(intervalStart,
+ aDuration.getMilliseconds());
+
+ if (intervalEnd > GregorianCalendarSystem.CHRONON_OF_DAY) {
+
+ intervalEnd = intervalEnd - (int) (GregorianCalendarSystem.CHRONON_OF_DAY);
+ }
+
+ if (intervalEnd < intervalStart) {
+ throw new AlgebricksException(
+ "Interval end must not be less than the interval start.");
+ }
+
+ aInterval.setValue(intervalStart, intervalEnd, ATypeTag.TIME.serialize());
+ intervalSerde.serialize(aInterval, out);
+ } else {
+ throw new AlgebricksException("Wrong format for interval constructor from dates.");
+ }
+
+ } catch (IOException e1) {
+ throw new AlgebricksException(errorMessage);
+ } catch (Exception e2) {
+ throw new AlgebricksException(e2);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+}
\ No newline at end of file
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/ATimeConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/ATimeConstructorDescriptor.java
index 1a7eb12..caff78b 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/ATimeConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/ATimeConstructorDescriptor.java
@@ -21,8 +21,9 @@
import edu.uci.ics.asterix.om.base.AMutableTime;
import edu.uci.ics.asterix.om.base.ANull;
import edu.uci.ics.asterix.om.base.ATime;
-import edu.uci.ics.asterix.om.base.temporal.ADateAndTimeParser;
+import edu.uci.ics.asterix.om.base.temporal.ATimeParserFactory;
import edu.uci.ics.asterix.om.base.temporal.ByteArrayCharSequenceAccessor;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
@@ -82,8 +83,15 @@
eval.evaluate(tuple);
byte[] serString = outInput.getByteArray();
if (serString[0] == SER_STRING_TYPE_TAG) {
- charAccessor.reset(serString, 3, 0);
- int chrononTimeInMs = ADateAndTimeParser.parseTimePart(charAccessor);
+
+ int stringLength = (serString[1] & 0xff << 8) + (serString[2] & 0xff << 0);
+
+ charAccessor.reset(serString, 3, stringLength);
+ int chrononTimeInMs = ATimeParserFactory.parseTimePart(charAccessor);
+
+ if (chrononTimeInMs < 0) {
+ chrononTimeInMs += GregorianCalendarSystem.CHRONON_OF_DAY;
+ }
aTime.setValue(chrononTimeInMs);
timeSerde.serialize(aTime, out);
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/NotNullDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/NotNullDescriptor.java
new file mode 100644
index 0000000..12e73f6
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/NotNullDescriptor.java
@@ -0,0 +1,87 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.runtime.evaluators.functions;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+/**
+ * This runtime function checks if the input is null.
+ * If the input is not null, just return it directly;
+ * Otherwise, throw a runtime exception.
+ */
+public class NotNullDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private static final long serialVersionUID = 1L;
+ public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new NotNullDescriptor();
+ }
+ };
+ private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) {
+ return new ICopyEvaluatorFactory() {
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+ private DataOutput out = output.getDataOutput();
+ private ArrayBackedValueStorage outInput = new ArrayBackedValueStorage();
+ private ICopyEvaluator eval = args[0].createEvaluator(outInput);
+ private String errorMessage = "The input value cannot be null!";
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+
+ try {
+ outInput.reset();
+ eval.evaluate(tuple);
+ byte[] data = outInput.getByteArray();
+ if (data[outInput.getStartOffset()] == SER_NULL_TYPE_TAG) {
+ throw new AlgebricksException(errorMessage);
+ }
+ out.write(data, outInput.getStartOffset(), outInput.getLength());
+ } catch (IOException e1) {
+ throw new AlgebricksException(errorMessage);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return AsterixBuiltinFunctions.NOT_NULL;
+ }
+
+}
\ No newline at end of file
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/OpenRecordConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/OpenRecordConstructorDescriptor.java
index 77869ba..81ae62b 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/OpenRecordConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/OpenRecordConstructorDescriptor.java
@@ -1,9 +1,25 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.asterix.runtime.evaluators.functions;
import java.io.DataOutput;
import java.io.IOException;
import edu.uci.ics.asterix.builders.RecordBuilder;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
@@ -86,8 +102,8 @@
}
}
recBuilder.write(out, true);
- } catch (IOException ioe) {
- throw new AlgebricksException(ioe);
+ } catch (IOException | AsterixException e) {
+ throw new AlgebricksException(e);
}
}
};
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/YearDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/YearDescriptor.java
deleted file mode 100644
index 107f3cd..0000000
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/YearDescriptor.java
+++ /dev/null
@@ -1,102 +0,0 @@
-package edu.uci.ics.asterix.runtime.evaluators.functions;
-
-import java.io.DataOutput;
-
-import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
-import edu.uci.ics.asterix.om.base.AInt32;
-import edu.uci.ics.asterix.om.base.AMutableInt32;
-import edu.uci.ics.asterix.om.base.ANull;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
-import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.BuiltinType;
-import edu.uci.ics.asterix.om.types.EnumDeserializer;
-import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
-import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
-
-public class YearDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
- private static final long serialVersionUID = 1L;
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
- public IFunctionDescriptor createFunctionDescriptor() {
- return new YearDescriptor();
- }
- };
-
- @Override
- public FunctionIdentifier getIdentifier() {
- return AsterixBuiltinFunctions.YEAR;
- }
-
- /**
- * Returns the 4-digit representation of a year from a string, as an int32.
- * e.g. year('2010-10-24') = 2010
- */
-
- @Override
- public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) {
-
- return new ICopyEvaluatorFactory() {
- private static final long serialVersionUID = 1L;
-
- @Override
- public ICopyEvaluator createEvaluator(IDataOutputProvider output) throws AlgebricksException {
- final DataOutput out = output.getDataOutput();
-
- return new ICopyEvaluator() {
- private ArrayBackedValueStorage out1 = new ArrayBackedValueStorage();
- private ICopyEvaluator eval1 = args[0].createEvaluator(out1);
- private AMutableInt32 m = new AMutableInt32(0);
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<AInt32> int32Serde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT32);
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ANULL);
-
- @Override
- public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
- try {
- out1.reset();
- eval1.evaluate(tuple);
- byte[] dateArray = out1.getByteArray();
-
- if (dateArray[0] == SER_NULL_TYPE_TAG) {
- nullSerde.serialize(ANull.NULL, out);
- return;
- }
-
- if (dateArray[0] != SER_STRING_TYPE_TAG) {
- throw new AlgebricksException("year function can not be called on values of type"
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(dateArray[0]));
- }
-
- int year = (UTF8StringPointable.charAt(dateArray, 3) - '0') * 1000
- + (UTF8StringPointable.charAt(dateArray, 4) - '0') * 100
- + (UTF8StringPointable.charAt(dateArray, 5) - '0') * 10
- + (UTF8StringPointable.charAt(dateArray, 6) - '0');
- m.setValue(year);
-
- int32Serde.serialize(m, out);
- } catch (HyracksDataException e) {
- throw new AlgebricksException(e);
- }
- }
- };
- }
- };
- }
-
-}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AbstractIntervalLogicFuncDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AbstractIntervalLogicFuncDescriptor.java
new file mode 100644
index 0000000..241e2e0
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AbstractIntervalLogicFuncDescriptor.java
@@ -0,0 +1,117 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AIntervalSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ABoolean;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public abstract class AbstractIntervalLogicFuncDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private final static long serialVersionUID = 1L;
+
+ // allowed input types
+ private final static byte SER_INTERVAL_TYPE_TAG = ATypeTag.INTERVAL.serialize();
+ private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+ */
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ return new ICopyEvaluatorFactory() {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+ private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+ private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+ private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+ private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+
+ // possible output types
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ABoolean> booleanSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ABOOLEAN);
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ argOut0.reset();
+ eval0.evaluate(tuple);
+ argOut1.reset();
+ eval1.evaluate(tuple);
+
+ try {
+ if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ nullSerde.serialize(ANull.NULL, out);
+ return;
+ }
+
+ if (argOut0.getByteArray()[0] != SER_INTERVAL_TYPE_TAG
+ || argOut1.getByteArray()[0] != SER_INTERVAL_TYPE_TAG) {
+ throw new AlgebricksException("Inapplicable input type for parameters: ("
+ + argOut0.getByteArray()[0] + ", " + argOut1.getByteArray()[0] + ")");
+ }
+
+ if (AIntervalSerializerDeserializer.getIntervalTimeType(argOut0.getByteArray(), 1) != AIntervalSerializerDeserializer
+ .getIntervalTimeType(argOut1.getByteArray(), 1)) {
+ throw new AlgebricksException(
+ "Failed to compare to intervals with different internal time type.");
+ }
+
+ ABoolean res = (compareIntervals(
+ AIntervalSerializerDeserializer.getIntervalStart(argOut0.getByteArray(), 1),
+ AIntervalSerializerDeserializer.getIntervalEnd(argOut0.getByteArray(), 1),
+ AIntervalSerializerDeserializer.getIntervalStart(argOut1.getByteArray(), 1),
+ AIntervalSerializerDeserializer.getIntervalEnd(argOut1.getByteArray(), 1))) ? ABoolean.TRUE
+ : ABoolean.FALSE;
+
+ booleanSerde.serialize(res, out);
+
+ } catch (HyracksDataException hex) {
+ throw new AlgebricksException(hex);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ protected abstract boolean compareIntervals(long s1, long e1, long s2, long e2);
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AddDateDurationDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AddDateDurationDescriptor.java
new file mode 100644
index 0000000..7417f2b
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AddDateDurationDescriptor.java
@@ -0,0 +1,150 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ADate;
+import edu.uci.ics.asterix.om.base.AMutableDate;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.DurationArithmeticOperations;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class AddDateDurationDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private final static long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "add-date-duration", 2);
+
+ // allowed input types
+ private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+ private final static byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
+ private final static byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
+
+ public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new AddDateDurationDescriptor();
+ }
+ };
+
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ return new ICopyEvaluatorFactory() {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+ private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+ private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+ private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+ private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+
+ // possible output types
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ADate> dateSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ADATE);
+
+ private AMutableDate aDate = new AMutableDate(0);
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ argOut0.reset();
+ eval0.evaluate(tuple);
+ argOut1.reset();
+ eval1.evaluate(tuple);
+
+ try {
+ if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ nullSerde.serialize(ANull.NULL, out);
+ return;
+ }
+
+ if (argOut0.getByteArray()[0] != SER_DATE_TYPE_TAG) {
+ throw new AlgebricksException(
+ "Inapplicable input type for parameter 0: expecting a Date ("
+ + SER_DATE_TYPE_TAG + ") or null (" + SER_NULL_TYPE_TAG
+ + "), but got: " + argOut0.getByteArray()[0]);
+ }
+
+ if (argOut1.getByteArray()[0] != SER_DURATION_TYPE_TAG) {
+ throw new AlgebricksException(
+ "Inapplicable input type for parameter 1: expecting a Duration ("
+ + SER_DURATION_TYPE_TAG + ") or null (" + SER_NULL_TYPE_TAG
+ + "), but got: " + argOut1.getByteArray()[0]);
+ }
+
+ // get duration fields: yearMonth field and dayTime field
+ int yearMonth = ADurationSerializerDeserializer.getYearMonth(argOut1.getByteArray(), 1);
+ long dayTime = ADurationSerializerDeserializer.getDayTime(argOut1.getByteArray(), 1);
+
+ // get date fields
+ long datetimeChronon = ADateSerializerDeserializer.getChronon(argOut0.getByteArray(), 1)
+ * GregorianCalendarSystem.CHRONON_OF_DAY;
+
+ datetimeChronon = DurationArithmeticOperations.addDuration(datetimeChronon, yearMonth,
+ dayTime);
+
+ int dateChrononInDays = (int) (datetimeChronon / GregorianCalendarSystem.CHRONON_OF_DAY);
+ if (dateChrononInDays < 0 && datetimeChronon % GregorianCalendarSystem.CHRONON_OF_DAY != 0) {
+ dateChrononInDays -= 1;
+ }
+
+ aDate.setValue(dateChrononInDays);
+
+ dateSerde.serialize(aDate, out);
+
+ } catch (HyracksDataException hex) {
+ throw new AlgebricksException(hex);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AddDatetimeDurationDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AddDatetimeDurationDescriptor.java
new file mode 100644
index 0000000..4f04da3
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AddDatetimeDurationDescriptor.java
@@ -0,0 +1,150 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateTimeSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ADateTime;
+import edu.uci.ics.asterix.om.base.AMutableDateTime;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.DurationArithmeticOperations;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class AddDatetimeDurationDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private final static long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "add-datetime-duration", 2);
+
+ // allowed input types
+ private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+ private final static byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
+ private final static byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
+
+ public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new AddDatetimeDurationDescriptor();
+ }
+ };
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+ */
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ return new ICopyEvaluatorFactory() {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+ private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+ private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+ private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+ private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+
+ // possible output types
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ADateTime> datetimeSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ADATETIME);
+
+ private AMutableDateTime aDatetime = new AMutableDateTime(0);
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ argOut0.reset();
+ eval0.evaluate(tuple);
+ argOut1.reset();
+ eval1.evaluate(tuple);
+
+ try {
+ if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ nullSerde.serialize(ANull.NULL, out);
+ return;
+ }
+
+ if (argOut0.getByteArray()[0] != SER_DATETIME_TYPE_TAG) {
+ throw new AlgebricksException(
+ "Inapplicable input type for parameter 0: expecting a DateTime ("
+ + SER_DATETIME_TYPE_TAG + ") or null (" + SER_NULL_TYPE_TAG
+ + "), but got: " + argOut0.getByteArray()[0]);
+ }
+
+ if (argOut1.getByteArray()[0] != SER_DURATION_TYPE_TAG) {
+ throw new AlgebricksException(
+ "Inapplicable input type for parameter 1: expecting a Duration ("
+ + SER_DURATION_TYPE_TAG + ") or null (" + SER_NULL_TYPE_TAG
+ + "), but got: " + argOut1.getByteArray()[0]);
+ }
+
+ // get duration fields: yearMonth field and dayTime field
+ int yearMonth = ADurationSerializerDeserializer.getYearMonth(argOut1.getByteArray(), 1);
+ long dayTime = ADurationSerializerDeserializer.getDayTime(argOut1.getByteArray(), 1);
+
+ // get date fields
+ long datetimeChronon = ADateTimeSerializerDeserializer
+ .getChronon(argOut0.getByteArray(), 1);
+
+ datetimeChronon = DurationArithmeticOperations.addDuration(datetimeChronon, yearMonth,
+ dayTime);
+
+ aDatetime.setValue(datetimeChronon);
+
+ datetimeSerde.serialize(aDatetime, out);
+
+ } catch (HyracksDataException hex) {
+ throw new AlgebricksException(hex);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AddTimeDurationDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AddTimeDurationDescriptor.java
new file mode 100644
index 0000000..e6b6ebd
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AddTimeDurationDescriptor.java
@@ -0,0 +1,154 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ATimeSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AMutableTime;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.ATime;
+import edu.uci.ics.asterix.om.base.temporal.DurationArithmeticOperations;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class AddTimeDurationDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private final static long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "add-time-duration", 2);
+
+ // allowed input types
+ private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+ private final static byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
+ private final static byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
+
+ public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new AddTimeDurationDescriptor();
+ }
+ };
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+ */
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ return new ICopyEvaluatorFactory() {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+ private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+ private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+ private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+ private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+
+ // possible output types
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ATime> timeSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ATIME);
+
+ private AMutableTime aTime = new AMutableTime(0);
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ argOut0.reset();
+ eval0.evaluate(tuple);
+ argOut1.reset();
+ eval1.evaluate(tuple);
+
+ try {
+ if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ nullSerde.serialize(ANull.NULL, out);
+ return;
+ }
+
+ if (argOut0.getByteArray()[0] != SER_TIME_TYPE_TAG) {
+ throw new AlgebricksException(
+ "Inapplicable input type for parameter 0: expecting a Time ("
+ + SER_TIME_TYPE_TAG + ") or null (" + SER_NULL_TYPE_TAG
+ + "), but got: " + argOut0.getByteArray()[0]);
+ }
+
+ if (argOut1.getByteArray()[0] != SER_DURATION_TYPE_TAG) {
+ throw new AlgebricksException(
+ "Inapplicable input type for parameter 1: expecting a Duration ("
+ + SER_DURATION_TYPE_TAG + ") or null (" + SER_DURATION_TYPE_TAG
+ + "), but got: " + argOut1.getByteArray()[0]);
+ }
+
+ // get duration fields: yearMonth field and dayTime field
+ int yearMonth = ADurationSerializerDeserializer.getYearMonth(argOut1.getByteArray(), 1);
+
+ // cannot add a year-month duration to a time value
+ if (yearMonth != 0) {
+ throw new AlgebricksException("ATime cannot be added by a year-month duration.");
+ }
+
+ long dayTime = ADurationSerializerDeserializer.getDayTime(argOut1.getByteArray(), 1);
+
+ // get time fields
+ int timeChronon = ATimeSerializerDeserializer.getChronon(argOut0.getByteArray(), 1);
+
+ timeChronon = DurationArithmeticOperations.addDuration(timeChronon, dayTime);
+
+ aTime.setValue(timeChronon);
+
+ timeSerde.serialize(aTime, out);
+
+ } catch (HyracksDataException hex) {
+ throw new AlgebricksException(hex);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AdjustDateTimeForTimeZoneDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AdjustDateTimeForTimeZoneDescriptor.java
new file mode 100644
index 0000000..fb01e97
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AdjustDateTimeForTimeZoneDescriptor.java
@@ -0,0 +1,154 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateTimeSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.ATimeParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.ByteArrayCharSequenceAccessor;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem.Fields;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class AdjustDateTimeForTimeZoneDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private final static long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "adjust-datetime-for-timezone", 2);
+
+ // allowed input types
+ private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+ private final static byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
+ private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
+
+ public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new AdjustDateTimeForTimeZoneDescriptor();
+ }
+ };
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+ */
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ return new ICopyEvaluatorFactory() {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+ private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+ private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+ private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+ private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+
+ // possible output types
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+
+ private ByteArrayCharSequenceAccessor charAccessor = new ByteArrayCharSequenceAccessor();
+
+ private GregorianCalendarSystem calInstance = GregorianCalendarSystem.getInstance();
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ argOut0.reset();
+ eval0.evaluate(tuple);
+ argOut1.reset();
+ eval1.evaluate(tuple);
+
+ try {
+ if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ nullSerde.serialize(ANull.NULL, out);
+ return;
+ }
+
+ if (argOut0.getByteArray()[0] != SER_DATETIME_TYPE_TAG) {
+ throw new AlgebricksException(
+ "Inapplicable input type for parameter 0: expecting a Datetime, but got: "
+ + argOut0.getByteArray()[0]);
+ }
+
+ if (argOut1.getByteArray()[0] != SER_STRING_TYPE_TAG) {
+ throw new AlgebricksException(
+ "Inapplicable input type for parameter 0: expecting a String, but got: "
+ + argOut1.getByteArray()[0]);
+ }
+
+ int stringLength = (argOut1.getByteArray()[1] & 0xff << 8)
+ + (argOut1.getByteArray()[2] & 0xff << 0);
+
+ charAccessor.reset(argOut1.getByteArray(), 3, stringLength);
+
+ int timezone = ATimeParserFactory.parseTimezonePart(charAccessor, 0);
+
+ if (!calInstance.validateTimeZone(timezone)) {
+ throw new AlgebricksException("Wrong format for a time zone string!");
+ }
+
+ long chronon = ADateTimeSerializerDeserializer.getChronon(argOut0.getByteArray(), 1);
+
+ chronon = calInstance.adjustChrononByTimezone(chronon, timezone);
+
+ StringBuilder sbder = new StringBuilder();
+
+ calInstance.getExtendStringRepWithTimezoneUntilField(chronon, timezone, sbder, Fields.YEAR,
+ Fields.MILLISECOND);
+
+ out.writeByte(SER_STRING_TYPE_TAG);
+ out.writeUTF(sbder.toString());
+
+ } catch (Exception e1) {
+ throw new AlgebricksException(e1);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AdjustTimeForTimeZoneDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AdjustTimeForTimeZoneDescriptor.java
new file mode 100644
index 0000000..1323664
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/AdjustTimeForTimeZoneDescriptor.java
@@ -0,0 +1,154 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ATimeSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.ATimeParserFactory;
+import edu.uci.ics.asterix.om.base.temporal.ByteArrayCharSequenceAccessor;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem.Fields;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class AdjustTimeForTimeZoneDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private final static long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "adjust-time-for-timezone", 2);
+
+ // allowed input types
+ private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+ private final static byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
+ private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
+
+ public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new AdjustTimeForTimeZoneDescriptor();
+ }
+ };
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+ */
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ return new ICopyEvaluatorFactory() {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+ private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+ private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+ private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+ private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+
+ // possible output types
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+
+ private ByteArrayCharSequenceAccessor charAccessor = new ByteArrayCharSequenceAccessor();
+
+ private GregorianCalendarSystem calInstance = GregorianCalendarSystem.getInstance();
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ argOut0.reset();
+ eval0.evaluate(tuple);
+ argOut1.reset();
+ eval1.evaluate(tuple);
+
+ try {
+ if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ nullSerde.serialize(ANull.NULL, out);
+ return;
+ }
+
+ if (argOut0.getByteArray()[0] != SER_TIME_TYPE_TAG) {
+ throw new AlgebricksException(
+ "Inapplicable input type for parameter 0: expecting a Time, but got: "
+ + argOut0.getByteArray()[0]);
+ }
+
+ if (argOut1.getByteArray()[0] != SER_STRING_TYPE_TAG) {
+ throw new AlgebricksException(
+ "Inapplicable input type for parameter 0: expecting a String, but got: "
+ + argOut1.getByteArray()[0]);
+ }
+
+ int stringLength = (argOut0.getByteArray()[1] & 0xff << 8)
+ + (argOut0.getByteArray()[2] & 0xff << 0);
+
+ charAccessor.reset(argOut1.getByteArray(), 3, stringLength);
+
+ int timezone = ATimeParserFactory.parseTimezonePart(charAccessor, 0);
+
+ if (!calInstance.validateTimeZone(timezone)) {
+ throw new AlgebricksException("Wrong format for a time zone string!");
+ }
+
+ int chronon = ATimeSerializerDeserializer.getChronon(argOut0.getByteArray(), 1);
+
+ chronon = (int) calInstance.adjustChrononByTimezone(chronon, timezone);
+
+ StringBuilder sbder = new StringBuilder();
+
+ calInstance.getExtendStringRepWithTimezoneUntilField(chronon, timezone, sbder, Fields.HOUR,
+ Fields.MILLISECOND);
+
+ out.writeByte(SER_STRING_TYPE_TAG);
+ out.writeUTF(sbder.toString());
+
+ } catch (Exception e1) {
+ throw new AlgebricksException(e1);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/CalendarDuartionFromDateDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/CalendarDuartionFromDateDescriptor.java
new file mode 100644
index 0000000..c6be030
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/CalendarDuartionFromDateDescriptor.java
@@ -0,0 +1,235 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ADuration;
+import edu.uci.ics.asterix.om.base.AMutableDuration;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.DurationArithmeticOperations;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class CalendarDuartionFromDateDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private final static long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "calendar-duration-from-date", 2);
+
+ // allowed input types
+ private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+ private final static byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
+ private final static byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
+
+ public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new CalendarDuartionFromDateDescriptor();
+ }
+ };
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+ */
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ return new ICopyEvaluatorFactory() {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+ private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+ private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+ private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+ private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+
+ // possible output types
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ADuration> durationSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ADURATION);
+
+ private AMutableDuration aDuration = new AMutableDuration(0, 0);
+
+ private GregorianCalendarSystem calInstanct = GregorianCalendarSystem.getInstance();
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+
+ argOut0.reset();
+ eval0.evaluate(tuple);
+ argOut1.reset();
+ eval1.evaluate(tuple);
+
+ try {
+ if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ nullSerde.serialize(ANull.NULL, out);
+ return;
+ }
+
+ if (argOut0.getByteArray()[0] != SER_DATE_TYPE_TAG) {
+ throw new AlgebricksException(
+ "Inapplicable input type for parameter 0: expecting a Date, but got: "
+ + argOut0.getByteArray()[0]);
+ }
+
+ if (argOut1.getByteArray()[0] != SER_DURATION_TYPE_TAG) {
+ throw new AlgebricksException(
+ "Inapplicable input type for parameter 1: expecting a Date, but got: "
+ + argOut1.getByteArray()[0]);
+ }
+
+ int yearMonthDurationInMonths = ADurationSerializerDeserializer.getYearMonth(
+ argOut1.getByteArray(), 1);
+ long dayTimeDurationInMs = ADurationSerializerDeserializer.getDayTime(
+ argOut1.getByteArray(), 1);
+
+ long startingTimePoint = ADateSerializerDeserializer.getChronon(argOut0.getByteArray(), 1)
+ * GregorianCalendarSystem.CHRONON_OF_DAY;
+
+ long endingTimePoint = DurationArithmeticOperations.addDuration(startingTimePoint,
+ yearMonthDurationInMonths, dayTimeDurationInMs);
+
+ if (startingTimePoint == endingTimePoint) {
+ aDuration.setValue(0, 0);
+ } else {
+
+ boolean negative = false;
+
+ if (endingTimePoint < startingTimePoint) {
+ negative = true;
+ // swap the starting and ending time, so that ending time is always larger than the starting time.
+ long tmpTime = endingTimePoint;
+ endingTimePoint = startingTimePoint;
+ startingTimePoint = tmpTime;
+ }
+
+ int year0 = calInstanct.getYear(startingTimePoint);
+ int month0 = calInstanct.getMonthOfYear(startingTimePoint, year0);
+
+ int year1 = calInstanct.getYear(endingTimePoint);
+ int month1 = calInstanct.getMonthOfYear(endingTimePoint, year1);
+
+ int year = year1 - year0;
+ int month = month1 - month0;
+ int day = calInstanct.getDayOfMonthYear(endingTimePoint, year1, month1)
+ - calInstanct.getDayOfMonthYear(startingTimePoint, year0, month0);
+ int hour = calInstanct.getHourOfDay(endingTimePoint)
+ - calInstanct.getHourOfDay(startingTimePoint);
+ int min = calInstanct.getMinOfHour(endingTimePoint)
+ - calInstanct.getMinOfHour(startingTimePoint);
+ int sec = calInstanct.getSecOfMin(endingTimePoint)
+ - calInstanct.getSecOfMin(startingTimePoint);
+ int ms = calInstanct.getMillisOfSec(endingTimePoint)
+ - calInstanct.getMillisOfSec(startingTimePoint);
+
+ if (ms < 0) {
+ ms += GregorianCalendarSystem.CHRONON_OF_SECOND;
+ sec -= 1;
+ }
+
+ if (sec < 0) {
+ sec += GregorianCalendarSystem.CHRONON_OF_MINUTE
+ / GregorianCalendarSystem.CHRONON_OF_SECOND;
+ min -= 1;
+ }
+
+ if (min < 0) {
+ min += GregorianCalendarSystem.CHRONON_OF_HOUR
+ / GregorianCalendarSystem.CHRONON_OF_MINUTE;
+ hour -= 1;
+ }
+
+ if (hour < 0) {
+ hour += GregorianCalendarSystem.CHRONON_OF_DAY
+ / GregorianCalendarSystem.CHRONON_OF_HOUR;
+ day -= 1;
+ }
+
+ if (day < 0) {
+ boolean isLeapYear = calInstanct.isLeapYear(year0);
+ day += (isLeapYear) ? (GregorianCalendarSystem.DAYS_OF_MONTH_LEAP[month0 - 1])
+ : (GregorianCalendarSystem.DAYS_OF_MONTH_ORDI[month0 - 1]);
+ month -= 1;
+ }
+
+ if (month < 0) {
+ month += GregorianCalendarSystem.MONTHS_IN_A_YEAR;
+ year -= 1;
+ }
+
+ if (negative) {
+ aDuration.setValue(-1 * (year * GregorianCalendarSystem.MONTHS_IN_A_YEAR + month),
+ -1
+ * (day * GregorianCalendarSystem.CHRONON_OF_DAY + hour
+ * GregorianCalendarSystem.CHRONON_OF_HOUR + min
+ * GregorianCalendarSystem.CHRONON_OF_MINUTE + sec
+ * GregorianCalendarSystem.CHRONON_OF_SECOND + ms));
+ } else {
+ aDuration.setValue(year * GregorianCalendarSystem.MONTHS_IN_A_YEAR + month, day
+ * GregorianCalendarSystem.CHRONON_OF_DAY + hour
+ * GregorianCalendarSystem.CHRONON_OF_HOUR + min
+ * GregorianCalendarSystem.CHRONON_OF_MINUTE + sec
+ * GregorianCalendarSystem.CHRONON_OF_SECOND + ms);
+ }
+ }
+
+ durationSerde.serialize(aDuration, out);
+
+ } catch (HyracksDataException hex) {
+ throw new AlgebricksException(hex);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/CalendarDurationFromDateTimeDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/CalendarDurationFromDateTimeDescriptor.java
new file mode 100644
index 0000000..2c7a9a7
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/CalendarDurationFromDateTimeDescriptor.java
@@ -0,0 +1,251 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateTimeSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ADuration;
+import edu.uci.ics.asterix.om.base.AMutableDuration;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.DurationArithmeticOperations;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+/**
+ * This function converts a given duration into a "human-readable" duration containing both year-month and day-time
+ * duration parts, by re-organizing values between the duration fields from the given reference time point.
+ * <p/>
+ * The basic algorithm for this convert is simple: <br/>
+ * 1. Calculate the time point by adding the given duration to the given time point;<br/>
+ * 2. Calculate the differences by fields between two different time points;<br/>
+ * 3. Re-format the duration into a human-readable one.
+ * <p/>
+ * Here "human-readable" means the value of each field of the duration is within the value range of the field in the
+ * calendar system. For example, month would be in [0, 12), and hour would be in [0, 24).
+ * <p/>
+ * The result can be considered as a "field-based" difference between the two datetime value, but all negative values
+ * would be converted to be non-negative.
+ * <p/>
+ * In the implementation, we always do the subtraction from the later time point, resulting a positive duration always.
+ * <p/>
+ */
+public class CalendarDurationFromDateTimeDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private final static long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "calendar-duration-from-datetime", 2);
+
+ // allowed input types
+ private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+ private final static byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
+ private final static byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
+
+ public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new CalendarDurationFromDateTimeDescriptor();
+ }
+ };
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+ */
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ return new ICopyEvaluatorFactory() {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+ private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+ private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+ private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+ private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+
+ // possible output types
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ADuration> durationSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ADURATION);
+
+ private AMutableDuration aDuration = new AMutableDuration(0, 0);
+
+ private GregorianCalendarSystem calInstanct = GregorianCalendarSystem.getInstance();
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ argOut0.reset();
+ eval0.evaluate(tuple);
+ argOut1.reset();
+ eval1.evaluate(tuple);
+
+ try {
+ if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ nullSerde.serialize(ANull.NULL, out);
+ return;
+ }
+
+ if (argOut0.getByteArray()[0] != SER_DATETIME_TYPE_TAG) {
+ throw new AlgebricksException(
+ "Inapplicable input type for parameter 0: expecting ADateTime, but got: "
+ + argOut0.getByteArray()[0]);
+ }
+
+ if (argOut1.getByteArray()[0] != SER_DURATION_TYPE_TAG) {
+ throw new AlgebricksException(
+ "Inapplicable input type for parameter 1: expecting ADateTime, but got: "
+ + argOut1.getByteArray()[0]);
+ }
+
+ int yearMonthDurationInMonths = ADurationSerializerDeserializer.getYearMonth(
+ argOut1.getByteArray(), 1);
+ long dayTimeDurationInMs = ADurationSerializerDeserializer.getDayTime(
+ argOut1.getByteArray(), 1);
+
+ long startingTimePoint = ADateTimeSerializerDeserializer.getChronon(argOut0.getByteArray(),
+ 1);
+
+ long endingTimePoint = DurationArithmeticOperations.addDuration(startingTimePoint,
+ yearMonthDurationInMonths, dayTimeDurationInMs);
+
+ if (startingTimePoint == endingTimePoint) {
+ aDuration.setValue(0, 0);
+ } else {
+
+ boolean negative = false;
+
+ if (endingTimePoint < startingTimePoint) {
+ negative = true;
+ // swap the starting and ending time, so that ending time is always larger than the starting time.
+ long tmpTime = endingTimePoint;
+ endingTimePoint = startingTimePoint;
+ startingTimePoint = tmpTime;
+ }
+
+ int year0 = calInstanct.getYear(startingTimePoint);
+ int month0 = calInstanct.getMonthOfYear(startingTimePoint, year0);
+
+ int year1 = calInstanct.getYear(endingTimePoint);
+ int month1 = calInstanct.getMonthOfYear(endingTimePoint, year1);
+
+ int year = year1 - year0;
+ int month = month1 - month0;
+ int day = calInstanct.getDayOfMonthYear(endingTimePoint, year1, month1)
+ - calInstanct.getDayOfMonthYear(startingTimePoint, year0, month0);
+ int hour = calInstanct.getHourOfDay(endingTimePoint)
+ - calInstanct.getHourOfDay(startingTimePoint);
+ int min = calInstanct.getMinOfHour(endingTimePoint)
+ - calInstanct.getMinOfHour(startingTimePoint);
+ int sec = calInstanct.getSecOfMin(endingTimePoint)
+ - calInstanct.getSecOfMin(startingTimePoint);
+ int ms = calInstanct.getMillisOfSec(endingTimePoint)
+ - calInstanct.getMillisOfSec(startingTimePoint);
+
+ if (ms < 0) {
+ ms += GregorianCalendarSystem.CHRONON_OF_SECOND;
+ sec -= 1;
+ }
+
+ if (sec < 0) {
+ sec += GregorianCalendarSystem.CHRONON_OF_MINUTE
+ / GregorianCalendarSystem.CHRONON_OF_SECOND;
+ min -= 1;
+ }
+
+ if (min < 0) {
+ min += GregorianCalendarSystem.CHRONON_OF_HOUR
+ / GregorianCalendarSystem.CHRONON_OF_MINUTE;
+ hour -= 1;
+ }
+
+ if (hour < 0) {
+ hour += GregorianCalendarSystem.CHRONON_OF_DAY
+ / GregorianCalendarSystem.CHRONON_OF_HOUR;
+ day -= 1;
+ }
+
+ if (day < 0) {
+ boolean isLeapYear = calInstanct.isLeapYear(year0);
+ day += (isLeapYear) ? (GregorianCalendarSystem.DAYS_OF_MONTH_LEAP[month0 - 1])
+ : (GregorianCalendarSystem.DAYS_OF_MONTH_ORDI[month0 - 1]);
+ month -= 1;
+ }
+
+ if (month < 0) {
+ month += GregorianCalendarSystem.MONTHS_IN_A_YEAR;
+ year -= 1;
+ }
+
+ if (negative) {
+ aDuration.setValue(-1 * (year * GregorianCalendarSystem.MONTHS_IN_A_YEAR + month),
+ -1
+ * (day * GregorianCalendarSystem.CHRONON_OF_DAY + hour
+ * GregorianCalendarSystem.CHRONON_OF_HOUR + min
+ * GregorianCalendarSystem.CHRONON_OF_MINUTE + sec
+ * GregorianCalendarSystem.CHRONON_OF_SECOND + ms));
+ } else {
+ aDuration.setValue(year * GregorianCalendarSystem.MONTHS_IN_A_YEAR + month, day
+ * GregorianCalendarSystem.CHRONON_OF_DAY + hour
+ * GregorianCalendarSystem.CHRONON_OF_HOUR + min
+ * GregorianCalendarSystem.CHRONON_OF_MINUTE + sec
+ * GregorianCalendarSystem.CHRONON_OF_SECOND + ms);
+ }
+ }
+
+ durationSerde.serialize(aDuration, out);
+
+ } catch (HyracksDataException hex) {
+ throw new AlgebricksException(hex);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/CurrentDateDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/CurrentDateDescriptor.java
new file mode 100644
index 0000000..0cb47b5
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/CurrentDateDescriptor.java
@@ -0,0 +1,88 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ADate;
+import edu.uci.ics.asterix.om.base.AMutableDate;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class CurrentDateDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private static final long serialVersionUID = 1L;
+ private final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "current-date",
+ 0);
+
+ public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new CurrentDateDescriptor();
+ }
+ };
+
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ return new ICopyEvaluatorFactory() {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ADate> dateSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ADATE);
+ private AMutableDate aDate = new AMutableDate(0);
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ try {
+ int dateChronon = (int) (System.currentTimeMillis() / GregorianCalendarSystem.CHRONON_OF_DAY);
+ aDate.setValue(dateChronon);
+ dateSerde.serialize(aDate, out);
+ } catch (HyracksDataException hex) {
+ throw new AlgebricksException(hex);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/CurrentDateTimeDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/CurrentDateTimeDescriptor.java
new file mode 100644
index 0000000..9209cae
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/CurrentDateTimeDescriptor.java
@@ -0,0 +1,92 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ADateTime;
+import edu.uci.ics.asterix.om.base.AMutableDateTime;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class CurrentDateTimeDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private static final long serialVersionUID = 1L;
+ private final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "current-datetime", 0);
+
+ public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new CurrentDateTimeDescriptor();
+ }
+ };
+
+ private CurrentDateTimeDescriptor() {
+ }
+
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ return new ICopyEvaluatorFactory() {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ADateTime> datetimeSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ADATETIME);
+ private AMutableDateTime aDateTime = new AMutableDateTime(0);
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ try {
+ aDateTime.setValue(System.currentTimeMillis());
+ datetimeSerde.serialize(aDateTime, out);
+ } catch (HyracksDataException hex) {
+ throw new AlgebricksException(hex);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/CurrentTimeDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/CurrentTimeDescriptor.java
new file mode 100644
index 0000000..c7078b9
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/CurrentTimeDescriptor.java
@@ -0,0 +1,93 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AMutableTime;
+import edu.uci.ics.asterix.om.base.ATime;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class CurrentTimeDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private static final long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "current-time", 0);
+
+ public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new CurrentTimeDescriptor();
+ }
+ };
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+ */
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ return new ICopyEvaluatorFactory() {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ATime> timeSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ATIME);
+ private AMutableTime aTime = new AMutableTime(0);
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ try {
+ int timeChronon = (int) (System.currentTimeMillis() % GregorianCalendarSystem.CHRONON_OF_DAY);
+ aTime.setValue(timeChronon);
+ timeSerde.serialize(aTime, out);
+ } catch (HyracksDataException hex) {
+ throw new AlgebricksException(hex);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DateFromDatetimeDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DateFromDatetimeDescriptor.java
new file mode 100644
index 0000000..4ef52d9
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DateFromDatetimeDescriptor.java
@@ -0,0 +1,108 @@
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateTimeSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ADate;
+import edu.uci.ics.asterix.om.base.AMutableDate;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class DateFromDatetimeDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private static final long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "date-from-datetime", 1);
+
+ // allowed input types
+ private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+ private final static byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
+
+ public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new DateFromDatetimeDescriptor();
+ }
+
+ };
+
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ return new ICopyEvaluatorFactory() {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+ private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
+ private ICopyEvaluator eval = args[0].createEvaluator(argOut);
+
+ // possible returning types
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ADate> dateSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ADATE);
+ private AMutableDate aDate = new AMutableDate(0);
+
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ argOut.reset();
+ eval.evaluate(tuple);
+ try {
+ if (argOut.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ nullSerde.serialize(ANull.NULL, out);
+ } else {
+ if (argOut.getByteArray()[0] != SER_DATETIME_TYPE_TAG) {
+ throw new AlgebricksException(
+ "Inapplicable input type for function date_from_datetime: expecting ADateTime ("
+ + SER_DATETIME_TYPE_TAG + ") or null (" + SER_NULL_TYPE_TAG
+ + "), but got: " + argOut.getByteArray()[0]);
+ }
+ long datetimeChronon = ADateTimeSerializerDeserializer.getChronon(
+ argOut.getByteArray(), 1);
+ int dateChrononInDays = (int) (datetimeChronon / GregorianCalendarSystem.CHRONON_OF_DAY);
+ if (dateChrononInDays < 0
+ && datetimeChronon % GregorianCalendarSystem.CHRONON_OF_DAY != 0) {
+ dateChrononInDays -= 1;
+ }
+ aDate.setValue(dateChrononInDays);
+ dateSerde.serialize(aDate, out);
+ }
+ } catch (HyracksDataException hex) {
+ throw new AlgebricksException(hex);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DateFromUnixTimeInDaysDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DateFromUnixTimeInDaysDescriptor.java
new file mode 100644
index 0000000..6b78a35
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DateFromUnixTimeInDaysDescriptor.java
@@ -0,0 +1,121 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt16SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt8SerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ADate;
+import edu.uci.ics.asterix.om.base.AMutableDate;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class DateFromUnixTimeInDaysDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private final static long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "date-from-unix-time-in-days", 1);
+
+ public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new DateFromUnixTimeInDaysDescriptor();
+ }
+ };
+
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ return new ICopyEvaluatorFactory() {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+ private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
+ private ICopyEvaluator eval = args[0].createEvaluator(argOut);
+
+ // allowed input types
+ private byte serNullTypeTag = ATypeTag.NULL.serialize();
+ private byte serInt8TypeTag = ATypeTag.INT8.serialize();
+ private byte serInt16TypeTag = ATypeTag.INT16.serialize();
+ private byte serInt32TypeTag = ATypeTag.INT32.serialize();
+
+ private AMutableDate aDate = new AMutableDate(0);
+
+ // possible returning types
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ADate> dateSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ADATE);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ argOut.reset();
+ eval.evaluate(tuple);
+ try {
+ if (argOut.getByteArray()[0] == serNullTypeTag) {
+ nullSerde.serialize(ANull.NULL, out);
+ } else {
+ if (argOut.getByteArray()[0] == serInt8TypeTag) {
+ aDate.setValue(AInt8SerializerDeserializer.getByte(argOut.getByteArray(), 1));
+ } else if (argOut.getByteArray()[0] == serInt16TypeTag) {
+ aDate.setValue(AInt16SerializerDeserializer.getShort(argOut.getByteArray(), 1));
+ } else if (argOut.getByteArray()[0] == serInt32TypeTag) {
+ aDate.setValue(AInt32SerializerDeserializer.getInt(argOut.getByteArray(), 1));
+ } else {
+ throw new AlgebricksException(
+ "Inapplicable input type for function date-from-unix-time-in-days: expecting integer or null type, but got "
+ + argOut.getByteArray()[0]);
+ }
+ dateSerde.serialize(aDate, out);
+ }
+ } catch (HyracksDataException hex) {
+ throw new AlgebricksException(hex);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DatetimeFromDateAndTimeDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DatetimeFromDateAndTimeDescriptor.java
new file mode 100644
index 0000000..2ade4b7
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DatetimeFromDateAndTimeDescriptor.java
@@ -0,0 +1,144 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ATimeSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ADateTime;
+import edu.uci.ics.asterix.om.base.AMutableDateTime;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class DatetimeFromDateAndTimeDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private static final long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "datetime-from-date-time", 2);
+
+ // allowed input types
+ private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+ private final static byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
+ private final static byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
+
+ public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new DatetimeFromDateAndTimeDescriptor();
+ }
+
+ };
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+ */
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ return new ICopyEvaluatorFactory() {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+ private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+ private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+ private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+ private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+
+ // possible returning types
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ADateTime> datetimeSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ADATETIME);
+ private AMutableDateTime aDateTime = new AMutableDateTime(0);
+
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ argOut0.reset();
+ eval0.evaluate(tuple);
+ argOut1.reset();
+ eval1.evaluate(tuple);
+
+ try {
+ if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ nullSerde.serialize(ANull.NULL, out);
+ } else {
+ if (argOut0.getByteArray()[0] != SER_DATE_TYPE_TAG) {
+ throw new AlgebricksException(
+ "Inapplicable input type for function datetime-from-date-time: expecting a Date ("
+ + SER_DATE_TYPE_TAG + ") or null (" + SER_NULL_TYPE_TAG
+ + ") for the first parameter, but got: "
+ + argOut0.getByteArray()[0]);
+ }
+
+ if (argOut1.getByteArray()[0] != SER_TIME_TYPE_TAG) {
+ throw new AlgebricksException(
+ "Inapplicable input type for function datetime-from-date-time: expecting a Time ("
+ + SER_TIME_TYPE_TAG + ") or null (" + SER_NULL_TYPE_TAG
+ + ") for the secon parameter, but got: "
+ + argOut1.getByteArray()[0]);
+ }
+
+ long datetimeChronon = ADateSerializerDeserializer
+ .getChronon(argOut0.getByteArray(), 1)
+ * GregorianCalendarSystem.CHRONON_OF_DAY
+ + ATimeSerializerDeserializer.getChronon(argOut1.getByteArray(), 1);
+
+ aDateTime.setValue(datetimeChronon);
+ datetimeSerde.serialize(aDateTime, out);
+ }
+ } catch (HyracksDataException hex) {
+ throw new AlgebricksException(hex);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DatetimeFromUnixTimeInMsDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DatetimeFromUnixTimeInMsDescriptor.java
new file mode 100644
index 0000000..dbd34f2
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DatetimeFromUnixTimeInMsDescriptor.java
@@ -0,0 +1,131 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt16SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt8SerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ADateTime;
+import edu.uci.ics.asterix.om.base.AMutableDateTime;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class DatetimeFromUnixTimeInMsDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private final static long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "datetime-from-unix-time-in-ms", 1);
+
+ // allowed input types
+ private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+ private final static byte SER_INT8_TYPE_TAG = ATypeTag.INT8.serialize();
+ private final static byte SER_INT16_TYPE_TAG = ATypeTag.INT16.serialize();
+ private final static byte SER_INT32_TYPE_TAG = ATypeTag.INT32.serialize();
+ private final static byte SER_INT64_TYPE_TAG = ATypeTag.INT64.serialize();
+
+ public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new DatetimeFromUnixTimeInMsDescriptor();
+ }
+ };
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+ */
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ return new ICopyEvaluatorFactory() {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+ private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
+ private ICopyEvaluator eval = args[0].createEvaluator(argOut);
+
+ // possible output types
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ADateTime> datetimeSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ADATETIME);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+
+ private AMutableDateTime aDatetime = new AMutableDateTime(0);
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ argOut.reset();
+ eval.evaluate(tuple);
+ try {
+ if (argOut.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ nullSerde.serialize(ANull.NULL, out);
+ } else {
+ if (argOut.getByteArray()[0] == SER_INT8_TYPE_TAG) {
+ aDatetime.setValue(AInt8SerializerDeserializer.getByte(argOut.getByteArray(), 1));
+ } else if (argOut.getByteArray()[0] == SER_INT16_TYPE_TAG) {
+ aDatetime.setValue(AInt16SerializerDeserializer.getShort(argOut.getByteArray(), 1));
+ } else if (argOut.getByteArray()[0] == SER_INT32_TYPE_TAG) {
+ aDatetime.setValue(AInt32SerializerDeserializer.getInt(argOut.getByteArray(), 1));
+ } else if (argOut.getByteArray()[0] == SER_INT64_TYPE_TAG) {
+ aDatetime.setValue(AInt64SerializerDeserializer.getLong(argOut.getByteArray(), 1));
+ } else {
+ throw new AlgebricksException(
+ "Inapplicable input type for function datetime-from-unix-time-in-ms: expecting integer or null type, but got "
+ + argOut.getByteArray()[0]);
+ }
+ datetimeSerde.serialize(aDatetime, out);
+ }
+ } catch (HyracksDataException hex) {
+ throw new AlgebricksException(hex);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalAfterDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalAfterDescriptor.java
new file mode 100644
index 0000000..85bb378
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalAfterDescriptor.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class IntervalAfterDescriptor extends AbstractIntervalLogicFuncDescriptor {
+
+ private final static long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "interval-after",
+ 2);
+
+ public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new IntervalAfterDescriptor();
+ }
+ };
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AbstractIntervalLogicFuncDescriptor#compareIntervals(long, long, long, long)
+ */
+ @Override
+ protected boolean compareIntervals(long s1, long e1, long s2, long e2) {
+ return IntervalLogic.after(s1, e1, s2, e2);
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalBeforeDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalBeforeDescriptor.java
new file mode 100644
index 0000000..2da48ee
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalBeforeDescriptor.java
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class IntervalBeforeDescriptor extends AbstractIntervalLogicFuncDescriptor {
+
+ private final static long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "interval-before", 2);
+
+ public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new IntervalBeforeDescriptor();
+ }
+ };
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+ @Override
+ protected boolean compareIntervals(long s1, long e1, long s2, long e2) {
+ return IntervalLogic.before(s1, e1, s2, e2);
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalCoveredByDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalCoveredByDescriptor.java
new file mode 100644
index 0000000..1064e59
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalCoveredByDescriptor.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class IntervalCoveredByDescriptor extends AbstractIntervalLogicFuncDescriptor {
+
+ private final static long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "interval-covered-by", 2);
+
+ public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new IntervalCoveredByDescriptor();
+ }
+ };
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AbstractIntervalLogicFuncDescriptor#compareIntervals(long, long, long, long)
+ */
+ @Override
+ protected boolean compareIntervals(long s1, long e1, long s2, long e2) {
+ return IntervalLogic.coveredBy(s1, e1, s2, e2);
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalCoversDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalCoversDescriptor.java
new file mode 100644
index 0000000..5b1cfa5
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalCoversDescriptor.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class IntervalCoversDescriptor extends AbstractIntervalLogicFuncDescriptor {
+
+ private final static long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "interval-covers", 2);
+
+ public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new IntervalCoversDescriptor();
+ }
+ };
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AbstractIntervalLogicFuncDescriptor#compareIntervals(long, long, long, long)
+ */
+ @Override
+ protected boolean compareIntervals(long s1, long e1, long s2, long e2) {
+ return IntervalLogic.covers(s1, e1, s2, e2);
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalEndedByDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalEndedByDescriptor.java
new file mode 100644
index 0000000..4610c89
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalEndedByDescriptor.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class IntervalEndedByDescriptor extends AbstractIntervalLogicFuncDescriptor {
+
+ private final static long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "interval-ended-by", 2);
+
+ public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new IntervalEndedByDescriptor();
+ }
+ };
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AbstractIntervalLogicFuncDescriptor#compareIntervals(long, long, long, long)
+ */
+ @Override
+ protected boolean compareIntervals(long s1, long e1, long s2, long e2) {
+ return IntervalLogic.endedBy(s1, e1, s2, e2);
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalEndsDecriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalEndsDecriptor.java
new file mode 100644
index 0000000..9853c62
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalEndsDecriptor.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class IntervalEndsDecriptor extends AbstractIntervalLogicFuncDescriptor {
+
+ private final static long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "interval-ends",
+ 2);
+
+ public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new IntervalEndsDecriptor();
+ }
+ };
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AbstractIntervalLogicFuncDescriptor#compareIntervals(long, long, long, long)
+ */
+ @Override
+ protected boolean compareIntervals(long s1, long e1, long s2, long e2) {
+ return IntervalLogic.ends(s1, e1, s2, e2);
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalLogic.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalLogic.java
new file mode 100644
index 0000000..e8e814f
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalLogic.java
@@ -0,0 +1,161 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+public class IntervalLogic {
+
+ public static <T extends Comparable<T>> boolean validateInterval(T s, T e) {
+ return s.compareTo(e) <= 0;
+ }
+
+ /**
+ * Anything from interval 1 is less than anything from interval 2.
+ * <p/>
+ * |------|<br/>
+ * |------|<br/>
+ *
+ * @param s1
+ * @param e1
+ * @param s2
+ * @param e2
+ * @return
+ */
+ public static <T extends Comparable<T>> boolean before(T s1, T e1, T s2, T e2) {
+ return e1.compareTo(s2) < 0;
+ }
+
+ public static <T extends Comparable<T>> boolean after(T s1, T e1, T s2, T e2) {
+ return before(s2, e2, s1, e1);
+ }
+
+ /**
+ * The end of interval 1 is the same as the start of interval 2.
+ * <p/>
+ * |------|<br/>
+ * |------|<br/>
+ *
+ * @param s1
+ * @param e1
+ * @param s2
+ * @param e2
+ * @return
+ */
+ public static <T extends Comparable<T>> boolean meets(T s1, T e1, T s2, T e2) {
+ return e1.compareTo(s2) == 0;
+ }
+
+ public static <T extends Comparable<T>> boolean metBy(T s1, T e1, T s2, T e2) {
+ return meets(s2, e2, s1, e1);
+ }
+
+ /**
+ * Something at the end of interval 1 is contained as the beginning of interval 2.
+ * <p/>
+ * |------|<br/>
+ * |------|<br/>
+ *
+ * @param s1
+ * @param e1
+ * @param s2
+ * @param e2
+ * @return
+ */
+ public static <T extends Comparable<T>> boolean overlaps(T s1, T e1, T s2, T e2) {
+ return s1.compareTo(s2) < 0 && e1.compareTo(s2) > 0 && e2.compareTo(e1) > 0;
+ }
+
+ public static <T extends Comparable<T>> boolean overlappedBy(T s1, T e1, T s2, T e2) {
+ return overlaps(s2, e2, s1, e1);
+ }
+
+ /**
+ * Something is shared by both interval 1 and interval 2.
+ * <p/>
+ *
+ * @param s1
+ * @param e1
+ * @param s2
+ * @param e2
+ * @return
+ */
+ public static <T extends Comparable<T>> boolean overlap(T s1, T e1, T s2, T e2) {
+ return s1.compareTo(e2) < 0 && s2.compareTo(e1) < 0;
+ }
+
+ /**
+ * Anything from interval 1 is contained in the beginning of interval 2.
+ * <p/>
+ * |------|<br/>
+ * |-------|<br/>
+ *
+ * @param s1
+ * @param e1
+ * @param s2
+ * @param e2
+ * @return
+ */
+ public static <T extends Comparable<T>> boolean starts(T s1, T e1, T s2, T e2) {
+ return s1.compareTo(s2) == 0 && e1.compareTo(e2) <= 0;
+ }
+
+ public static <T extends Comparable<T>> boolean startedBy(T s1, T e1, T s2, T e2) {
+ return starts(s2, e2, s1, e1);
+ }
+
+ /**
+ * Anything from interval 2 is in interval 1.
+ * <p/>
+ * |------|<br/>
+ * |----|<br/>
+ *
+ * @param s1
+ * @param e1
+ * @param s2
+ * @param e2
+ * @return
+ */
+ public static <T extends Comparable<T>> boolean covers(T s1, T e1, T s2, T e2) {
+ return s1.compareTo(s2) <= 0 && e1.compareTo(e2) >= 0;
+ }
+
+ public static <T extends Comparable<T>> boolean coveredBy(T s1, T e1, T s2, T e2) {
+ return covers(s2, e2, s1, e1);
+ }
+
+ /**
+ * Anything from interval 1 is from the ending part of interval 2.
+ * <p/>
+ * |-----|<br/>
+ * |------|<br/>
+ *
+ * @param s1
+ * @param e1
+ * @param s2
+ * @param e2
+ * @return
+ */
+ public static <T extends Comparable<T>> boolean ends(T s1, T e1, T s2, T e2) {
+ return s1.compareTo(s2) >= 0 && e1.compareTo(e2) == 0;
+ }
+
+ public static <T extends Comparable<T>> boolean endedBy(T s1, T e1, T s2, T e2) {
+ return ends(s2, e2, s1, e1);
+ }
+
+ public static <T extends Comparable<T>> boolean equals(T s1, T e1, T s2, T e2) {
+ return s1.compareTo(s1) == 0 && e1.compareTo(e2) == 0;
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalMeetsDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalMeetsDescriptor.java
new file mode 100644
index 0000000..0263edb
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalMeetsDescriptor.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class IntervalMeetsDescriptor extends AbstractIntervalLogicFuncDescriptor {
+
+ private final static long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "interval-meets",
+ 2);
+
+ public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new IntervalMeetsDescriptor();
+ }
+ };
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AbstractIntervalLogicFuncDescriptor#compareIntervals(long, long, long, long)
+ */
+ @Override
+ protected boolean compareIntervals(long s1, long e1, long s2, long e2) {
+ return IntervalLogic.meets(s1, e1, s2, e2);
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalMetByDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalMetByDescriptor.java
new file mode 100644
index 0000000..4cab864
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalMetByDescriptor.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class IntervalMetByDescriptor extends AbstractIntervalLogicFuncDescriptor {
+
+ private final static long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "interval-met-by", 2);
+
+ public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new IntervalMetByDescriptor();
+ }
+ };
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AbstractIntervalLogicFuncDescriptor#compareIntervals(long, long, long, long)
+ */
+ @Override
+ protected boolean compareIntervals(long s1, long e1, long s2, long e2) {
+ return IntervalLogic.metBy(s1, e1, s2, e2);
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalOverlappedByDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalOverlappedByDescriptor.java
new file mode 100644
index 0000000..17e7612
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalOverlappedByDescriptor.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class IntervalOverlappedByDescriptor extends AbstractIntervalLogicFuncDescriptor {
+
+ private final static long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "interval-overlapped-by", 2);
+
+ public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new IntervalOverlappedByDescriptor();
+ }
+ };
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AbstractIntervalLogicFuncDescriptor#compareIntervals(long, long, long, long)
+ */
+ @Override
+ protected boolean compareIntervals(long s1, long e1, long s2, long e2) {
+ return IntervalLogic.overlappedBy(s1, e1, s2, e2);
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalOverlapsDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalOverlapsDescriptor.java
new file mode 100644
index 0000000..ee62711
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalOverlapsDescriptor.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class IntervalOverlapsDescriptor extends AbstractIntervalLogicFuncDescriptor {
+
+ private final static long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "interval-overlaps", 2);
+
+ public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new IntervalOverlapsDescriptor();
+ }
+ };
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AbstractIntervalLogicFuncDescriptor#compareIntervals(long, long, long, long)
+ */
+ @Override
+ protected boolean compareIntervals(long s1, long e1, long s2, long e2) {
+ return IntervalLogic.overlaps(s1, e1, s2, e2);
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalStartedByDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalStartedByDescriptor.java
new file mode 100644
index 0000000..7e5e0fe
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalStartedByDescriptor.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class IntervalStartedByDescriptor extends AbstractIntervalLogicFuncDescriptor {
+
+ private final static long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "interval-started-by", 2);
+
+ public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new IntervalStartedByDescriptor();
+ }
+ };
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AbstractIntervalLogicFuncDescriptor#compareIntervals(long, long, long, long)
+ */
+ @Override
+ protected boolean compareIntervals(long s1, long e1, long s2, long e2) {
+ return IntervalLogic.startedBy(s1, e1, s2, e2);
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalStartsDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalStartsDescriptor.java
new file mode 100644
index 0000000..c2ca32e
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/IntervalStartsDescriptor.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class IntervalStartsDescriptor extends AbstractIntervalLogicFuncDescriptor {
+
+ private final static long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "interval-starts", 2);
+
+ public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new IntervalStartsDescriptor();
+ }
+ };
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AbstractIntervalLogicFuncDescriptor#compareIntervals(long, long, long, long)
+ */
+ @Override
+ protected boolean compareIntervals(long s1, long e1, long s2, long e2) {
+ return IntervalLogic.starts(s1, e1, s2, e2);
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/OverlapDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/OverlapDescriptor.java
new file mode 100644
index 0000000..80479cd
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/OverlapDescriptor.java
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class OverlapDescriptor extends AbstractIntervalLogicFuncDescriptor {
+
+ private final static long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "overlap", 2);
+
+ public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new OverlapDescriptor();
+ }
+ };
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AbstractIntervalLogicFuncDescriptor#compareIntervals(long, long, long, long)
+ */
+ @Override
+ protected boolean compareIntervals(long s1, long e1, long s2, long e2) {
+ return IntervalLogic.overlap(s1, e1, s2, e2);
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/SubtractDateDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/SubtractDateDescriptor.java
new file mode 100644
index 0000000..67d8ef3
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/SubtractDateDescriptor.java
@@ -0,0 +1,134 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ADuration;
+import edu.uci.ics.asterix.om.base.AMutableDuration;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class SubtractDateDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private final static long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "subtract-date",
+ 2);
+
+ // allowed input types
+ private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+ private final static byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
+
+ public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new SubtractDateDescriptor();
+ }
+ };
+
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ return new ICopyEvaluatorFactory() {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+ private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+ private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+ private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+ private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+
+ // possible output types
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ADuration> durationSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ADURATION);
+
+ private AMutableDuration aDuration = new AMutableDuration(0, 0);
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ argOut0.reset();
+ eval0.evaluate(tuple);
+ argOut1.reset();
+ eval1.evaluate(tuple);
+
+ try {
+ if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ nullSerde.serialize(ANull.NULL, out);
+ return;
+ }
+
+ if (argOut0.getByteArray()[0] != SER_DATE_TYPE_TAG) {
+ throw new AlgebricksException(
+ "Inapplicable input type for parameter 0: expecting a Date ("
+ + SER_DATE_TYPE_TAG + ") or null (" + SER_NULL_TYPE_TAG
+ + "), but got: " + argOut0.getByteArray()[0]);
+ }
+
+ if (argOut1.getByteArray()[0] != SER_DATE_TYPE_TAG) {
+ throw new AlgebricksException(
+ "Inapplicable input type for parameter 1: expecting a Date ("
+ + SER_DATE_TYPE_TAG + ") or null (" + SER_NULL_TYPE_TAG
+ + "), but got: " + argOut1.getByteArray()[0]);
+ }
+
+ long durationChronon = (ADateSerializerDeserializer.getChronon(argOut0.getByteArray(), 1) - ADateSerializerDeserializer
+ .getChronon(argOut1.getByteArray(), 1)) * GregorianCalendarSystem.CHRONON_OF_DAY;
+
+ aDuration.setValue(0, durationChronon);
+
+ durationSerde.serialize(aDuration, out);
+
+ } catch (HyracksDataException hex) {
+ throw new AlgebricksException(hex);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/SubtractDatetimeDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/SubtractDatetimeDescriptor.java
new file mode 100644
index 0000000..5b77709
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/SubtractDatetimeDescriptor.java
@@ -0,0 +1,138 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateTimeSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ADuration;
+import edu.uci.ics.asterix.om.base.AMutableDuration;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class SubtractDatetimeDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private final static long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "subtract-datetime", 2);
+
+ // allowed input types
+ private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+ private final static byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
+
+ public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new SubtractDatetimeDescriptor();
+ }
+ };
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+ */
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ return new ICopyEvaluatorFactory() {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+ private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+ private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+ private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+ private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+
+ // possible output types
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ADuration> durationSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ADURATION);
+
+ private AMutableDuration aDuration = new AMutableDuration(0, 0);
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ argOut0.reset();
+ eval0.evaluate(tuple);
+ argOut1.reset();
+ eval1.evaluate(tuple);
+
+ try {
+ if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ nullSerde.serialize(ANull.NULL, out);
+ return;
+ }
+
+ if (argOut0.getByteArray()[0] != SER_DATETIME_TYPE_TAG) {
+ throw new AlgebricksException(
+ "Inapplicable input type for parameter 0: expecting a DateTime, but got: "
+ + argOut0.getByteArray()[0]);
+ }
+
+ if (argOut1.getByteArray()[0] != SER_DATETIME_TYPE_TAG) {
+ throw new AlgebricksException(
+ "Inapplicable input type for parameter 1: expecting a DateTime, but got: "
+ + argOut1.getByteArray()[0]);
+ }
+
+ long durationChronon = ADateTimeSerializerDeserializer
+ .getChronon(argOut0.getByteArray(), 1)
+ - ADateTimeSerializerDeserializer.getChronon(argOut1.getByteArray(), 1);
+
+ aDuration.setValue(0, durationChronon);
+
+ durationSerde.serialize(aDuration, out);
+
+ } catch (HyracksDataException hex) {
+ throw new AlgebricksException(hex);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/SubtractTimeDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/SubtractTimeDescriptor.java
new file mode 100644
index 0000000..323033e
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/SubtractTimeDescriptor.java
@@ -0,0 +1,139 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ATimeSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ADuration;
+import edu.uci.ics.asterix.om.base.AMutableDuration;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class SubtractTimeDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private final static long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "subtract-time",
+ 2);
+
+ // allowed input types
+ private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+ private final static byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
+
+ public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new SubtractTimeDescriptor();
+ }
+ };
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+ */
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ return new ICopyEvaluatorFactory() {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+ private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+ private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+ private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+ private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+
+ // possible output types
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ADuration> durationSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ADURATION);
+
+ private AMutableDuration aDuration = new AMutableDuration(0, 0);
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ argOut0.reset();
+ eval0.evaluate(tuple);
+ argOut1.reset();
+ eval1.evaluate(tuple);
+
+ try {
+ if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ nullSerde.serialize(ANull.NULL, out);
+ return;
+ }
+
+ if (argOut0.getByteArray()[0] != SER_TIME_TYPE_TAG) {
+ throw new AlgebricksException(
+ "Inapplicable input type for parameter 0: expecting a Time ("
+ + SER_TIME_TYPE_TAG + ") or null (" + SER_NULL_TYPE_TAG
+ + "), but got: " + argOut0.getByteArray()[0]);
+ }
+
+ if (argOut1.getByteArray()[0] != SER_TIME_TYPE_TAG) {
+ throw new AlgebricksException(
+ "Inapplicable input type for parameter 1: expecting a Time ("
+ + SER_TIME_TYPE_TAG + ") or null (" + SER_NULL_TYPE_TAG
+ + "), but got: " + argOut1.getByteArray()[0]);
+ }
+
+ int durationChronon = ATimeSerializerDeserializer.getChronon(argOut0.getByteArray(), 1)
+ - ATimeSerializerDeserializer.getChronon(argOut1.getByteArray(), 1);
+
+ aDuration.setValue(0, durationChronon);
+
+ durationSerde.serialize(aDuration, out);
+
+ } catch (HyracksDataException hex) {
+ throw new AlgebricksException(hex);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/TimeFromDatetimeDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/TimeFromDatetimeDescriptor.java
new file mode 100644
index 0000000..b1053a4
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/TimeFromDatetimeDescriptor.java
@@ -0,0 +1,127 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateTimeSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AMutableTime;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.ATime;
+import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class TimeFromDatetimeDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private static final long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "time-from-datetime", 1);
+
+ // allowed input types
+ private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+ private final static byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
+
+ public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new TimeFromDatetimeDescriptor();
+ }
+
+ };
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+ */
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ return new ICopyEvaluatorFactory() {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+ private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
+ private ICopyEvaluator eval = args[0].createEvaluator(argOut);
+
+ // possible returning types
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ATime> timeSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ATIME);
+ private AMutableTime aTime = new AMutableTime(0);
+
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ argOut.reset();
+ eval.evaluate(tuple);
+ try {
+ if (argOut.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ nullSerde.serialize(ANull.NULL, out);
+ } else {
+ if (argOut.getByteArray()[0] != SER_DATETIME_TYPE_TAG) {
+ throw new AlgebricksException(
+ "Inapplicable input type for function time-from-datetime: expecting a DataTime ("
+ + SER_DATETIME_TYPE_TAG + ") or null (" + SER_NULL_TYPE_TAG
+ + "), but got: " + argOut.getByteArray()[0]);
+ }
+ long datetimeChronon = ADateTimeSerializerDeserializer.getChronon(
+ argOut.getByteArray(), 1);
+ int timeChronon = (int) (datetimeChronon % GregorianCalendarSystem.CHRONON_OF_DAY);
+ if (timeChronon < 0) {
+ timeChronon += GregorianCalendarSystem.CHRONON_OF_DAY;
+ }
+ aTime.setValue(timeChronon);
+ timeSerde.serialize(aTime, out);
+ }
+ } catch (HyracksDataException hex) {
+ throw new AlgebricksException(hex);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/TimeFromUnixTimeInMsDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/TimeFromUnixTimeInMsDescriptor.java
new file mode 100644
index 0000000..b3fbc0e
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/TimeFromUnixTimeInMsDescriptor.java
@@ -0,0 +1,126 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt16SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt8SerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AMutableTime;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.ATime;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class TimeFromUnixTimeInMsDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private final static long serialVersionUID = 1L;
+ public final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "time-from-unix-time-in-ms", 1);
+
+ // allowed input types
+ private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+ private final static byte SER_INT8_TYPE_TAG = ATypeTag.INT8.serialize();
+ private final static byte SER_INT16_TYPE_TAG = ATypeTag.INT16.serialize();
+ private final static byte SER_INT32_TYPE_TAG = ATypeTag.INT32.serialize();
+
+ public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new TimeFromUnixTimeInMsDescriptor();
+ }
+ };
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
+ */
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ return new ICopyEvaluatorFactory() {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+ private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
+ private ICopyEvaluator eval = args[0].createEvaluator(argOut);
+
+ // possible output types
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ATime> timeSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ATIME);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+
+ private AMutableTime aTime = new AMutableTime(0);
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ argOut.reset();
+ eval.evaluate(tuple);
+ try {
+ if (argOut.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ nullSerde.serialize(ANull.NULL, out);
+ } else {
+ if (argOut.getByteArray()[0] == SER_INT8_TYPE_TAG) {
+ aTime.setValue(AInt8SerializerDeserializer.getByte(argOut.getByteArray(), 1));
+ } else if (argOut.getByteArray()[0] == SER_INT16_TYPE_TAG) {
+ aTime.setValue(AInt16SerializerDeserializer.getShort(argOut.getByteArray(), 1));
+ } else if (argOut.getByteArray()[0] == SER_INT32_TYPE_TAG) {
+ aTime.setValue(AInt32SerializerDeserializer.getInt(argOut.getByteArray(), 1));
+ } else {
+ throw new AlgebricksException(
+ "Inapplicable input type for function time-from-unix-time-in-ms: expecting integer or null type, but got "
+ + argOut.getByteArray()[0]);
+ }
+ timeSerde.serialize(aTime, out);
+ }
+ } catch (HyracksDataException hex) {
+ throw new AlgebricksException(hex);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.IFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/formats/NonTaggedDataFormat.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/formats/NonTaggedDataFormat.java
index 92d4294..a334893 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/formats/NonTaggedDataFormat.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/formats/NonTaggedDataFormat.java
@@ -17,6 +17,7 @@
import edu.uci.ics.asterix.formats.nontagged.AqlBinaryBooleanInspectorImpl;
import edu.uci.ics.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;
import edu.uci.ics.asterix.formats.nontagged.AqlBinaryHashFunctionFactoryProvider;
+import edu.uci.ics.asterix.formats.nontagged.AqlBinaryHashFunctionFamilyProvider;
import edu.uci.ics.asterix.formats.nontagged.AqlBinaryIntegerInspector;
import edu.uci.ics.asterix.formats.nontagged.AqlNormalizedKeyComputerFactoryProvider;
import edu.uci.ics.asterix.formats.nontagged.AqlPrinterFactoryProvider;
@@ -65,6 +66,13 @@
import edu.uci.ics.asterix.runtime.aggregates.std.SumAggregateDescriptor;
import edu.uci.ics.asterix.runtime.aggregates.stream.EmptyStreamAggregateDescriptor;
import edu.uci.ics.asterix.runtime.aggregates.stream.NonEmptyStreamAggregateDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.accessors.TemporalDayAccessor;
+import edu.uci.ics.asterix.runtime.evaluators.accessors.TemporalHourAccessor;
+import edu.uci.ics.asterix.runtime.evaluators.accessors.TemporalMillisecondAccessor;
+import edu.uci.ics.asterix.runtime.evaluators.accessors.TemporalMinuteAccessor;
+import edu.uci.ics.asterix.runtime.evaluators.accessors.TemporalMonthAccessor;
+import edu.uci.ics.asterix.runtime.evaluators.accessors.TemporalSecondAccessor;
+import edu.uci.ics.asterix.runtime.evaluators.accessors.TemporalYearAccessor;
import edu.uci.ics.asterix.runtime.evaluators.accessors.CircleCenterAccessor;
import edu.uci.ics.asterix.runtime.evaluators.accessors.CircleRadiusAccessor;
import edu.uci.ics.asterix.runtime.evaluators.accessors.LineRectanglePolygonAccessor;
@@ -84,6 +92,12 @@
import edu.uci.ics.asterix.runtime.evaluators.constructors.AInt32ConstructorDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.constructors.AInt64ConstructorDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.constructors.AInt8ConstructorDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.constructors.AIntervalFromDateConstructorDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.constructors.AIntervalFromDateTimeConstructorDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.constructors.AIntervalFromTimeConstructorDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.constructors.AIntervalStartFromDateConstructorDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.constructors.AIntervalStartFromDateTimeConstructorDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.constructors.AIntervalStartFromTimeConstructorDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.constructors.ALineConstructorDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.constructors.ANullConstructorDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.constructors.APoint3DConstructorDescriptor;
@@ -124,6 +138,7 @@
import edu.uci.ics.asterix.runtime.evaluators.functions.LenDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.LikeDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.NotDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.NotNullDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.NumericAbsDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.NumericAddDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.NumericCeilingDescriptor;
@@ -152,6 +167,10 @@
import edu.uci.ics.asterix.runtime.evaluators.functions.SpatialDistanceDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.SpatialIntersectDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.StartsWithDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.SubstringDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.SwitchCaseDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.UnorderedListConstructorDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.WordTokensDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.StringConcatDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.StringEndWithDescrtiptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.StringEqualDescriptor;
@@ -167,11 +186,38 @@
import edu.uci.ics.asterix.runtime.evaluators.functions.Substring2Descriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.SubstringAfterDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.SubstringBeforeDescriptor;
-import edu.uci.ics.asterix.runtime.evaluators.functions.SubstringDescriptor;
-import edu.uci.ics.asterix.runtime.evaluators.functions.SwitchCaseDescriptor;
-import edu.uci.ics.asterix.runtime.evaluators.functions.UnorderedListConstructorDescriptor;
-import edu.uci.ics.asterix.runtime.evaluators.functions.WordTokensDescriptor;
-import edu.uci.ics.asterix.runtime.evaluators.functions.YearDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AddDateDurationDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AddDatetimeDurationDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AddTimeDurationDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AdjustDateTimeForTimeZoneDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.AdjustTimeForTimeZoneDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.CalendarDuartionFromDateDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.CalendarDurationFromDateTimeDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.CurrentDateDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.CurrentDateTimeDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.CurrentTimeDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.DateFromDatetimeDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.DateFromUnixTimeInDaysDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.DatetimeFromDateAndTimeDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.DatetimeFromUnixTimeInMsDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalAfterDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalBeforeDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalCoveredByDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalCoversDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalEndedByDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalEndsDecriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalMeetsDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalMetByDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.OverlapDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalOverlappedByDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalOverlapsDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalStartedByDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalStartsDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.SubtractDateDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.SubtractDatetimeDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.SubtractTimeDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.TimeFromDatetimeDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.TimeFromUnixTimeInMsDescriptor;
import edu.uci.ics.asterix.runtime.operators.file.AdmSchemafullRecordParserFactory;
import edu.uci.ics.asterix.runtime.operators.file.NtDelimitedDataTupleParserFactory;
import edu.uci.ics.asterix.runtime.runningaggregates.std.TidRunningAggregateDescriptor;
@@ -197,6 +243,7 @@
import edu.uci.ics.hyracks.algebricks.data.IBinaryBooleanInspectorFactory;
import edu.uci.ics.hyracks.algebricks.data.IBinaryComparatorFactoryProvider;
import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFactoryProvider;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFamilyProvider;
import edu.uci.ics.hyracks.algebricks.data.IBinaryIntegerInspectorFactory;
import edu.uci.ics.hyracks.algebricks.data.INormalizedKeyComputerFactoryProvider;
import edu.uci.ics.hyracks.algebricks.data.IPrinterFactoryProvider;
@@ -218,52 +265,56 @@
public class NonTaggedDataFormat implements IDataFormat {
- private static boolean registered = false;
+ private static boolean registered = false;
- public static final NonTaggedDataFormat INSTANCE = new NonTaggedDataFormat();
+ public static final NonTaggedDataFormat INSTANCE = new NonTaggedDataFormat();
- private static LogicalVariable METADATA_DUMMY_VAR = new LogicalVariable(-1);
+ private static LogicalVariable METADATA_DUMMY_VAR = new LogicalVariable(-1);
- private static final HashMap<ATypeTag, IValueParserFactory> typeToValueParserFactMap = new HashMap<ATypeTag, IValueParserFactory>();
+ private static final HashMap<ATypeTag, IValueParserFactory> typeToValueParserFactMap = new HashMap<ATypeTag, IValueParserFactory>();
- public static final String NON_TAGGED_DATA_FORMAT = "edu.uci.ics.asterix.runtime.formats.NonTaggedDataFormat";
+ public static final String NON_TAGGED_DATA_FORMAT = "edu.uci.ics.asterix.runtime.formats.NonTaggedDataFormat";
- static {
- typeToValueParserFactMap.put(ATypeTag.INT32, IntegerParserFactory.INSTANCE);
- typeToValueParserFactMap.put(ATypeTag.FLOAT, FloatParserFactory.INSTANCE);
- typeToValueParserFactMap.put(ATypeTag.DOUBLE, DoubleParserFactory.INSTANCE);
- typeToValueParserFactMap.put(ATypeTag.INT64, LongParserFactory.INSTANCE);
- typeToValueParserFactMap.put(ATypeTag.STRING, UTF8StringParserFactory.INSTANCE);
- }
+ static {
+ typeToValueParserFactMap.put(ATypeTag.INT32,
+ IntegerParserFactory.INSTANCE);
+ typeToValueParserFactMap.put(ATypeTag.FLOAT,
+ FloatParserFactory.INSTANCE);
+ typeToValueParserFactMap.put(ATypeTag.DOUBLE,
+ DoubleParserFactory.INSTANCE);
+ typeToValueParserFactMap
+ .put(ATypeTag.INT64, LongParserFactory.INSTANCE);
+ typeToValueParserFactMap.put(ATypeTag.STRING,
+ UTF8StringParserFactory.INSTANCE);
+ }
- public NonTaggedDataFormat() {
- }
+ public NonTaggedDataFormat() {
+ }
- public void registerRuntimeFunctions() throws AlgebricksException {
+ public void registerRuntimeFunctions() throws AlgebricksException {
- if (registered) {
- return;
- }
- registered = true;
+ if (registered) {
+ return;
+ }
+ registered = true;
- if (FunctionManagerHolder.getFunctionManager() != null) {
- return;
- }
+ if (FunctionManagerHolder.getFunctionManager() != null) {
+ return;
+ }
- List<IFunctionDescriptorFactory> temp = new ArrayList<IFunctionDescriptorFactory>();
+ List<IFunctionDescriptorFactory> temp = new ArrayList<IFunctionDescriptorFactory>();
- // format-independent
- temp.add(ContainsDescriptor.FACTORY);
- temp.add(EndsWithDescriptor.FACTORY);
- temp.add(StartsWithDescriptor.FACTORY);
- temp.add(SubstringDescriptor.FACTORY);
- temp.add(TidRunningAggregateDescriptor.FACTORY);
+ // format-independent
+ temp.add(ContainsDescriptor.FACTORY);
+ temp.add(EndsWithDescriptor.FACTORY);
+ temp.add(StartsWithDescriptor.FACTORY);
+ temp.add(SubstringDescriptor.FACTORY);
+ temp.add(TidRunningAggregateDescriptor.FACTORY);
// format-dependent
temp.add(AndDescriptor.FACTORY);
temp.add(OrDescriptor.FACTORY);
temp.add(LikeDescriptor.FACTORY);
- temp.add(YearDescriptor.FACTORY);
temp.add(ScanCollectionDescriptor.FACTORY);
temp.add(AnyCollectionMemberDescriptor.FACTORY);
temp.add(ClosedRecordConstructorDescriptor.FACTORY);
@@ -276,137 +327,189 @@
temp.add(UnorderedListConstructorDescriptor.FACTORY);
temp.add(EmbedTypeDescriptor.FACTORY);
- temp.add(NumericAddDescriptor.FACTORY);
- temp.add(NumericDivideDescriptor.FACTORY);
- temp.add(NumericMultiplyDescriptor.FACTORY);
- temp.add(NumericSubtractDescriptor.FACTORY);
- temp.add(NumericModuloDescriptor.FACTORY);
- temp.add(IsNullDescriptor.FACTORY);
- temp.add(NotDescriptor.FACTORY);
- temp.add(LenDescriptor.FACTORY);
- temp.add(EmptyStreamAggregateDescriptor.FACTORY);
- temp.add(NonEmptyStreamAggregateDescriptor.FACTORY);
- temp.add(RangeDescriptor.FACTORY);
+ temp.add(NumericAddDescriptor.FACTORY);
+ temp.add(NumericDivideDescriptor.FACTORY);
+ temp.add(NumericMultiplyDescriptor.FACTORY);
+ temp.add(NumericSubtractDescriptor.FACTORY);
+ temp.add(NumericModuloDescriptor.FACTORY);
+ temp.add(IsNullDescriptor.FACTORY);
+ temp.add(NotDescriptor.FACTORY);
+ temp.add(LenDescriptor.FACTORY);
+ temp.add(EmptyStreamAggregateDescriptor.FACTORY);
+ temp.add(NonEmptyStreamAggregateDescriptor.FACTORY);
+ temp.add(RangeDescriptor.FACTORY);
- temp.add(NumericAbsDescriptor.FACTORY);
- temp.add(NumericCeilingDescriptor.FACTORY);
- temp.add(NumericFloorDescriptor.FACTORY);
- temp.add(NumericRoundDescriptor.FACTORY);
- temp.add(NumericRoundHalfToEvenDescriptor.FACTORY);
- temp.add(NumericRoundHalfToEven2Descriptor.FACTORY);
- // String functions
- temp.add(StringEqualDescriptor.FACTORY);
- temp.add(StringStartWithDescrtiptor.FACTORY);
- temp.add(StringEndWithDescrtiptor.FACTORY);
- temp.add(StringMatchesDescriptor.FACTORY);
- temp.add(StringLowerCaseDescriptor.FACTORY);
- temp.add(StringMatchesWithFlagDescriptor.FACTORY);
- temp.add(StringReplaceDescriptor.FACTORY);
- temp.add(StringReplaceWithFlagsDescriptor.FACTORY);
- temp.add(StringLengthDescriptor.FACTORY);
- temp.add(Substring2Descriptor.FACTORY);
- temp.add(SubstringBeforeDescriptor.FACTORY);
- temp.add(SubstringAfterDescriptor.FACTORY);
- temp.add(StringToCodePointDescriptor.FACTORY);
- temp.add(CodePointToStringDescriptor.FACTORY);
- temp.add(StringConcatDescriptor.FACTORY);
- temp.add(StringJoinDescriptor.FACTORY);
+ temp.add(NumericAbsDescriptor.FACTORY);
+ temp.add(NumericCeilingDescriptor.FACTORY);
+ temp.add(NumericFloorDescriptor.FACTORY);
+ temp.add(NumericRoundDescriptor.FACTORY);
+ temp.add(NumericRoundHalfToEvenDescriptor.FACTORY);
+ temp.add(NumericRoundHalfToEven2Descriptor.FACTORY);
+ // String functions
+ temp.add(StringEqualDescriptor.FACTORY);
+ temp.add(StringStartWithDescrtiptor.FACTORY);
+ temp.add(StringEndWithDescrtiptor.FACTORY);
+ temp.add(StringMatchesDescriptor.FACTORY);
+ temp.add(StringLowerCaseDescriptor.FACTORY);
+ temp.add(StringMatchesWithFlagDescriptor.FACTORY);
+ temp.add(StringReplaceDescriptor.FACTORY);
+ temp.add(StringReplaceWithFlagsDescriptor.FACTORY);
+ temp.add(StringLengthDescriptor.FACTORY);
+ temp.add(Substring2Descriptor.FACTORY);
+ temp.add(SubstringBeforeDescriptor.FACTORY);
+ temp.add(SubstringAfterDescriptor.FACTORY);
+ temp.add(StringToCodePointDescriptor.FACTORY);
+ temp.add(CodePointToStringDescriptor.FACTORY);
+ temp.add(StringConcatDescriptor.FACTORY);
+ temp.add(StringJoinDescriptor.FACTORY);
- // aggregates
- temp.add(ListifyAggregateDescriptor.FACTORY);
- temp.add(CountAggregateDescriptor.FACTORY);
- temp.add(AvgAggregateDescriptor.FACTORY);
- temp.add(LocalAvgAggregateDescriptor.FACTORY);
- temp.add(GlobalAvgAggregateDescriptor.FACTORY);
- temp.add(SumAggregateDescriptor.FACTORY);
- temp.add(LocalSumAggregateDescriptor.FACTORY);
- temp.add(MaxAggregateDescriptor.FACTORY);
- temp.add(LocalMaxAggregateDescriptor.FACTORY);
- temp.add(MinAggregateDescriptor.FACTORY);
- temp.add(LocalMinAggregateDescriptor.FACTORY);
+ // aggregates
+ temp.add(ListifyAggregateDescriptor.FACTORY);
+ temp.add(CountAggregateDescriptor.FACTORY);
+ temp.add(AvgAggregateDescriptor.FACTORY);
+ temp.add(LocalAvgAggregateDescriptor.FACTORY);
+ temp.add(GlobalAvgAggregateDescriptor.FACTORY);
+ temp.add(SumAggregateDescriptor.FACTORY);
+ temp.add(LocalSumAggregateDescriptor.FACTORY);
+ temp.add(MaxAggregateDescriptor.FACTORY);
+ temp.add(LocalMaxAggregateDescriptor.FACTORY);
+ temp.add(MinAggregateDescriptor.FACTORY);
+ temp.add(LocalMinAggregateDescriptor.FACTORY);
- // serializable aggregates
- temp.add(SerializableCountAggregateDescriptor.FACTORY);
- temp.add(SerializableAvgAggregateDescriptor.FACTORY);
- temp.add(SerializableLocalAvgAggregateDescriptor.FACTORY);
- temp.add(SerializableGlobalAvgAggregateDescriptor.FACTORY);
- temp.add(SerializableSumAggregateDescriptor.FACTORY);
- temp.add(SerializableLocalSumAggregateDescriptor.FACTORY);
+ // serializable aggregates
+ temp.add(SerializableCountAggregateDescriptor.FACTORY);
+ temp.add(SerializableAvgAggregateDescriptor.FACTORY);
+ temp.add(SerializableLocalAvgAggregateDescriptor.FACTORY);
+ temp.add(SerializableGlobalAvgAggregateDescriptor.FACTORY);
+ temp.add(SerializableSumAggregateDescriptor.FACTORY);
+ temp.add(SerializableLocalSumAggregateDescriptor.FACTORY);
- // scalar aggregates
- temp.add(ScalarCountAggregateDescriptor.FACTORY);
- temp.add(ScalarAvgAggregateDescriptor.FACTORY);
- temp.add(ScalarSumAggregateDescriptor.FACTORY);
- temp.add(ScalarMaxAggregateDescriptor.FACTORY);
- temp.add(ScalarMinAggregateDescriptor.FACTORY);
+ // scalar aggregates
+ temp.add(ScalarCountAggregateDescriptor.FACTORY);
+ temp.add(ScalarAvgAggregateDescriptor.FACTORY);
+ temp.add(ScalarSumAggregateDescriptor.FACTORY);
+ temp.add(ScalarMaxAggregateDescriptor.FACTORY);
+ temp.add(ScalarMinAggregateDescriptor.FACTORY);
- // new functions - constructors
- temp.add(ABooleanConstructorDescriptor.FACTORY);
- temp.add(ANullConstructorDescriptor.FACTORY);
- temp.add(AStringConstructorDescriptor.FACTORY);
- temp.add(AInt8ConstructorDescriptor.FACTORY);
- temp.add(AInt16ConstructorDescriptor.FACTORY);
- temp.add(AInt32ConstructorDescriptor.FACTORY);
- temp.add(AInt64ConstructorDescriptor.FACTORY);
- temp.add(AFloatConstructorDescriptor.FACTORY);
- temp.add(ADoubleConstructorDescriptor.FACTORY);
- temp.add(APointConstructorDescriptor.FACTORY);
- temp.add(APoint3DConstructorDescriptor.FACTORY);
- temp.add(ALineConstructorDescriptor.FACTORY);
- temp.add(APolygonConstructorDescriptor.FACTORY);
- temp.add(ACircleConstructorDescriptor.FACTORY);
- temp.add(ARectangleConstructorDescriptor.FACTORY);
- temp.add(ATimeConstructorDescriptor.FACTORY);
- temp.add(ADateConstructorDescriptor.FACTORY);
- temp.add(ADateTimeConstructorDescriptor.FACTORY);
- temp.add(ADurationConstructorDescriptor.FACTORY);
+ // new functions - constructors
+ temp.add(ABooleanConstructorDescriptor.FACTORY);
+ temp.add(ANullConstructorDescriptor.FACTORY);
+ temp.add(AStringConstructorDescriptor.FACTORY);
+ temp.add(AInt8ConstructorDescriptor.FACTORY);
+ temp.add(AInt16ConstructorDescriptor.FACTORY);
+ temp.add(AInt32ConstructorDescriptor.FACTORY);
+ temp.add(AInt64ConstructorDescriptor.FACTORY);
+ temp.add(AFloatConstructorDescriptor.FACTORY);
+ temp.add(ADoubleConstructorDescriptor.FACTORY);
+ temp.add(APointConstructorDescriptor.FACTORY);
+ temp.add(APoint3DConstructorDescriptor.FACTORY);
+ temp.add(ALineConstructorDescriptor.FACTORY);
+ temp.add(APolygonConstructorDescriptor.FACTORY);
+ temp.add(ACircleConstructorDescriptor.FACTORY);
+ temp.add(ARectangleConstructorDescriptor.FACTORY);
+ temp.add(ATimeConstructorDescriptor.FACTORY);
+ temp.add(ADateConstructorDescriptor.FACTORY);
+ temp.add(ADateTimeConstructorDescriptor.FACTORY);
+ temp.add(ADurationConstructorDescriptor.FACTORY);
- // Spatial
- temp.add(CreatePointDescriptor.FACTORY);
- temp.add(CreateLineDescriptor.FACTORY);
- temp.add(CreatePolygonDescriptor.FACTORY);
- temp.add(CreateCircleDescriptor.FACTORY);
- temp.add(CreateRectangleDescriptor.FACTORY);
- temp.add(SpatialAreaDescriptor.FACTORY);
- temp.add(SpatialDistanceDescriptor.FACTORY);
- temp.add(SpatialIntersectDescriptor.FACTORY);
- temp.add(CreateMBRDescriptor.FACTORY);
- temp.add(SpatialCellDescriptor.FACTORY);
- temp.add(PointXCoordinateAccessor.FACTORY);
- temp.add(PointYCoordinateAccessor.FACTORY);
- temp.add(CircleRadiusAccessor.FACTORY);
- temp.add(CircleCenterAccessor.FACTORY);
- temp.add(LineRectanglePolygonAccessor.FACTORY);
+ // Spatial
+ temp.add(CreatePointDescriptor.FACTORY);
+ temp.add(CreateLineDescriptor.FACTORY);
+ temp.add(CreatePolygonDescriptor.FACTORY);
+ temp.add(CreateCircleDescriptor.FACTORY);
+ temp.add(CreateRectangleDescriptor.FACTORY);
+ temp.add(SpatialAreaDescriptor.FACTORY);
+ temp.add(SpatialDistanceDescriptor.FACTORY);
+ temp.add(SpatialIntersectDescriptor.FACTORY);
+ temp.add(CreateMBRDescriptor.FACTORY);
+ temp.add(SpatialCellDescriptor.FACTORY);
+ temp.add(PointXCoordinateAccessor.FACTORY);
+ temp.add(PointYCoordinateAccessor.FACTORY);
+ temp.add(CircleRadiusAccessor.FACTORY);
+ temp.add(CircleCenterAccessor.FACTORY);
+ temp.add(LineRectanglePolygonAccessor.FACTORY);
- // fuzzyjoin function
- temp.add(FuzzyEqDescriptor.FACTORY);
- temp.add(SubsetCollectionDescriptor.FACTORY);
- temp.add(PrefixLenJaccardDescriptor.FACTORY);
+ // fuzzyjoin function
+ temp.add(FuzzyEqDescriptor.FACTORY);
+ temp.add(SubsetCollectionDescriptor.FACTORY);
+ temp.add(PrefixLenJaccardDescriptor.FACTORY);
- temp.add(WordTokensDescriptor.FACTORY);
- temp.add(HashedWordTokensDescriptor.FACTORY);
- temp.add(CountHashedWordTokensDescriptor.FACTORY);
+ temp.add(WordTokensDescriptor.FACTORY);
+ temp.add(HashedWordTokensDescriptor.FACTORY);
+ temp.add(CountHashedWordTokensDescriptor.FACTORY);
- temp.add(GramTokensDescriptor.FACTORY);
- temp.add(HashedGramTokensDescriptor.FACTORY);
- temp.add(CountHashedGramTokensDescriptor.FACTORY);
+ temp.add(GramTokensDescriptor.FACTORY);
+ temp.add(HashedGramTokensDescriptor.FACTORY);
+ temp.add(CountHashedGramTokensDescriptor.FACTORY);
- temp.add(EditDistanceDescriptor.FACTORY);
- temp.add(EditDistanceCheckDescriptor.FACTORY);
- temp.add(EditDistanceStringIsFilterable.FACTORY);
- temp.add(EditDistanceListIsFilterable.FACTORY);
+ temp.add(EditDistanceDescriptor.FACTORY);
+ temp.add(EditDistanceCheckDescriptor.FACTORY);
+ temp.add(EditDistanceStringIsFilterable.FACTORY);
+ temp.add(EditDistanceListIsFilterable.FACTORY);
- temp.add(SimilarityJaccardDescriptor.FACTORY);
- temp.add(SimilarityJaccardCheckDescriptor.FACTORY);
- temp.add(SimilarityJaccardSortedDescriptor.FACTORY);
- temp.add(SimilarityJaccardSortedCheckDescriptor.FACTORY);
- temp.add(SimilarityJaccardPrefixDescriptor.FACTORY);
- temp.add(SimilarityJaccardPrefixCheckDescriptor.FACTORY);
+ temp.add(SimilarityJaccardDescriptor.FACTORY);
+ temp.add(SimilarityJaccardCheckDescriptor.FACTORY);
+ temp.add(SimilarityJaccardSortedDescriptor.FACTORY);
+ temp.add(SimilarityJaccardSortedCheckDescriptor.FACTORY);
+ temp.add(SimilarityJaccardPrefixDescriptor.FACTORY);
+ temp.add(SimilarityJaccardPrefixCheckDescriptor.FACTORY);
- temp.add(SwitchCaseDescriptor.FACTORY);
- temp.add(RegExpDescriptor.FACTORY);
- temp.add(InjectFailureDescriptor.FACTORY);
- temp.add(CastRecordDescriptor.FACTORY);
+ temp.add(SwitchCaseDescriptor.FACTORY);
+ temp.add(RegExpDescriptor.FACTORY);
+ temp.add(InjectFailureDescriptor.FACTORY);
+ temp.add(CastRecordDescriptor.FACTORY);
+ temp.add(NotNullDescriptor.FACTORY);
+
+ // Spatial and temporal type accessors
+ temp.add(TemporalYearAccessor.FACTORY);
+ temp.add(TemporalMonthAccessor.FACTORY);
+ temp.add(TemporalDayAccessor.FACTORY);
+ temp.add(TemporalHourAccessor.FACTORY);
+ temp.add(TemporalMinuteAccessor.FACTORY);
+ temp.add(TemporalSecondAccessor.FACTORY);
+ temp.add(TemporalMillisecondAccessor.FACTORY);
+
+ // Temporal functions
+ temp.add(DateFromUnixTimeInDaysDescriptor.FACTORY);
+ temp.add(DateFromDatetimeDescriptor.FACTORY);
+ temp.add(AddDateDurationDescriptor.FACTORY);
+ temp.add(SubtractDateDescriptor.FACTORY);
+ temp.add(TimeFromUnixTimeInMsDescriptor.FACTORY);
+ temp.add(TimeFromDatetimeDescriptor.FACTORY);
+ temp.add(SubtractTimeDescriptor.FACTORY);
+ temp.add(AddTimeDurationDescriptor.FACTORY);
+ temp.add(DatetimeFromUnixTimeInMsDescriptor.FACTORY);
+ temp.add(DatetimeFromDateAndTimeDescriptor.FACTORY);
+ temp.add(SubtractDatetimeDescriptor.FACTORY);
+ temp.add(AddDatetimeDurationDescriptor.FACTORY);
+ temp.add(CalendarDurationFromDateTimeDescriptor.FACTORY);
+ temp.add(CalendarDuartionFromDateDescriptor.FACTORY);
+ temp.add(AdjustDateTimeForTimeZoneDescriptor.FACTORY);
+ temp.add(AdjustTimeForTimeZoneDescriptor.FACTORY);
+ temp.add(IntervalBeforeDescriptor.FACTORY);
+ temp.add(IntervalAfterDescriptor.FACTORY);
+ temp.add(IntervalMeetsDescriptor.FACTORY);
+ temp.add(IntervalMetByDescriptor.FACTORY);
+ temp.add(IntervalOverlapsDescriptor.FACTORY);
+ temp.add(IntervalOverlappedByDescriptor.FACTORY);
+ temp.add(OverlapDescriptor.FACTORY);
+ temp.add(IntervalStartsDescriptor.FACTORY);
+ temp.add(IntervalStartedByDescriptor.FACTORY);
+ temp.add(IntervalCoversDescriptor.FACTORY);
+ temp.add(IntervalCoveredByDescriptor.FACTORY);
+ temp.add(IntervalEndsDecriptor.FACTORY);
+ temp.add(IntervalEndedByDescriptor.FACTORY);
+ temp.add(CurrentDateDescriptor.FACTORY);
+ temp.add(CurrentTimeDescriptor.FACTORY);
+ temp.add(CurrentDateTimeDescriptor.FACTORY);
+
+ // Interval constructor
+ temp.add(AIntervalFromDateConstructorDescriptor.FACTORY);
+ temp.add(AIntervalFromTimeConstructorDescriptor.FACTORY);
+ temp.add(AIntervalFromDateTimeConstructorDescriptor.FACTORY);
+ temp.add(AIntervalStartFromDateConstructorDescriptor.FACTORY);
+ temp.add(AIntervalStartFromDateTimeConstructorDescriptor.FACTORY);
+ temp.add(AIntervalStartFromTimeConstructorDescriptor.FACTORY);
IFunctionManager mgr = new FunctionManagerImpl();
for (IFunctionDescriptorFactory fdFactory : temp) {
@@ -415,367 +518,425 @@
FunctionManagerHolder.setFunctionManager(mgr);
}
- @Override
- public IBinaryBooleanInspectorFactory getBinaryBooleanInspectorFactory() {
- return AqlBinaryBooleanInspectorImpl.FACTORY;
- }
+ @Override
+ public IBinaryBooleanInspectorFactory getBinaryBooleanInspectorFactory() {
+ return AqlBinaryBooleanInspectorImpl.FACTORY;
+ }
- @Override
- public IBinaryComparatorFactoryProvider getBinaryComparatorFactoryProvider() {
- return AqlBinaryComparatorFactoryProvider.INSTANCE;
- }
+ @Override
+ public IBinaryComparatorFactoryProvider getBinaryComparatorFactoryProvider() {
+ return AqlBinaryComparatorFactoryProvider.INSTANCE;
+ }
- @Override
- public IBinaryHashFunctionFactoryProvider getBinaryHashFunctionFactoryProvider() {
- return AqlBinaryHashFunctionFactoryProvider.INSTANCE;
- }
+ @Override
+ public IBinaryHashFunctionFactoryProvider getBinaryHashFunctionFactoryProvider() {
+ return AqlBinaryHashFunctionFactoryProvider.INSTANCE;
+ }
- @Override
- public ISerializerDeserializerProvider getSerdeProvider() {
- return AqlSerializerDeserializerProvider.INSTANCE; // done
- }
+ @Override
+ public ISerializerDeserializerProvider getSerdeProvider() {
+ return AqlSerializerDeserializerProvider.INSTANCE; // done
+ }
- @Override
- public ITypeTraitProvider getTypeTraitProvider() {
- return AqlTypeTraitProvider.INSTANCE;
- }
+ @Override
+ public ITypeTraitProvider getTypeTraitProvider() {
+ return AqlTypeTraitProvider.INSTANCE;
+ }
- @SuppressWarnings("unchecked")
- @Override
- public ICopyEvaluatorFactory getFieldAccessEvaluatorFactory(ARecordType recType, String fldName, int recordColumn)
- throws AlgebricksException {
- String[] names = recType.getFieldNames();
- int n = names.length;
- for (int i = 0; i < n; i++) {
- if (names[i].equals(fldName)) {
- ICopyEvaluatorFactory recordEvalFactory = new ColumnAccessEvalFactory(recordColumn);
- ArrayBackedValueStorage abvs = new ArrayBackedValueStorage();
- DataOutput dos = abvs.getDataOutput();
- try {
- AInt32 ai = new AInt32(i);
- AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(ai.getType()).serialize(ai,
- dos);
- } catch (HyracksDataException e) {
- throw new AlgebricksException(e);
- }
- ICopyEvaluatorFactory fldIndexEvalFactory = new ConstantEvalFactory(Arrays.copyOf(abvs.getByteArray(),
- abvs.getLength()));
- ICopyEvaluatorFactory evalFactory = new FieldAccessByIndexEvalFactory(recordEvalFactory,
- fldIndexEvalFactory, recType);
- return evalFactory;
- }
- }
- throw new AlgebricksException("Could not find field " + fldName + " in the schema.");
- }
+ @SuppressWarnings("unchecked")
+ @Override
+ public ICopyEvaluatorFactory getFieldAccessEvaluatorFactory(
+ ARecordType recType, String fldName, int recordColumn)
+ throws AlgebricksException {
+ String[] names = recType.getFieldNames();
+ int n = names.length;
+ for (int i = 0; i < n; i++) {
+ if (names[i].equals(fldName)) {
+ ICopyEvaluatorFactory recordEvalFactory = new ColumnAccessEvalFactory(
+ recordColumn);
+ ArrayBackedValueStorage abvs = new ArrayBackedValueStorage();
+ DataOutput dos = abvs.getDataOutput();
+ try {
+ AInt32 ai = new AInt32(i);
+ AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(ai.getType()).serialize(
+ ai, dos);
+ } catch (HyracksDataException e) {
+ throw new AlgebricksException(e);
+ }
+ ICopyEvaluatorFactory fldIndexEvalFactory = new ConstantEvalFactory(
+ Arrays.copyOf(abvs.getByteArray(), abvs.getLength()));
+ ICopyEvaluatorFactory evalFactory = new FieldAccessByIndexEvalFactory(
+ recordEvalFactory, fldIndexEvalFactory, recType);
+ return evalFactory;
+ }
+ }
+ throw new AlgebricksException("Could not find field " + fldName
+ + " in the schema.");
+ }
- @SuppressWarnings("unchecked")
- @Override
- public ICopyEvaluatorFactory[] createMBRFactory(ARecordType recType, String fldName, int recordColumn, int dimension)
- throws AlgebricksException {
- ICopyEvaluatorFactory evalFactory = getFieldAccessEvaluatorFactory(recType, fldName, recordColumn);
- int numOfFields = dimension * 2;
- ICopyEvaluatorFactory[] evalFactories = new ICopyEvaluatorFactory[numOfFields];
+ @SuppressWarnings("unchecked")
+ @Override
+ public ICopyEvaluatorFactory[] createMBRFactory(ARecordType recType,
+ String fldName, int recordColumn, int dimension)
+ throws AlgebricksException {
+ ICopyEvaluatorFactory evalFactory = getFieldAccessEvaluatorFactory(
+ recType, fldName, recordColumn);
+ int numOfFields = dimension * 2;
+ ICopyEvaluatorFactory[] evalFactories = new ICopyEvaluatorFactory[numOfFields];
- ArrayBackedValueStorage abvs1 = new ArrayBackedValueStorage();
- DataOutput dos1 = abvs1.getDataOutput();
- try {
- AInt32 ai = new AInt32(dimension);
- AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(ai.getType()).serialize(ai, dos1);
- } catch (HyracksDataException e) {
- throw new AlgebricksException(e);
- }
- ICopyEvaluatorFactory dimensionEvalFactory = new ConstantEvalFactory(Arrays.copyOf(abvs1.getByteArray(),
- abvs1.getLength()));
+ ArrayBackedValueStorage abvs1 = new ArrayBackedValueStorage();
+ DataOutput dos1 = abvs1.getDataOutput();
+ try {
+ AInt32 ai = new AInt32(dimension);
+ AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(ai.getType())
+ .serialize(ai, dos1);
+ } catch (HyracksDataException e) {
+ throw new AlgebricksException(e);
+ }
+ ICopyEvaluatorFactory dimensionEvalFactory = new ConstantEvalFactory(
+ Arrays.copyOf(abvs1.getByteArray(), abvs1.getLength()));
- for (int i = 0; i < numOfFields; i++) {
- ArrayBackedValueStorage abvs2 = new ArrayBackedValueStorage();
- DataOutput dos2 = abvs2.getDataOutput();
- try {
- AInt32 ai = new AInt32(i);
- AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(ai.getType()).serialize(ai, dos2);
- } catch (HyracksDataException e) {
- throw new AlgebricksException(e);
- }
- ICopyEvaluatorFactory coordinateEvalFactory = new ConstantEvalFactory(Arrays.copyOf(abvs2.getByteArray(),
- abvs2.getLength()));
+ for (int i = 0; i < numOfFields; i++) {
+ ArrayBackedValueStorage abvs2 = new ArrayBackedValueStorage();
+ DataOutput dos2 = abvs2.getDataOutput();
+ try {
+ AInt32 ai = new AInt32(i);
+ AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(ai.getType()).serialize(ai,
+ dos2);
+ } catch (HyracksDataException e) {
+ throw new AlgebricksException(e);
+ }
+ ICopyEvaluatorFactory coordinateEvalFactory = new ConstantEvalFactory(
+ Arrays.copyOf(abvs2.getByteArray(), abvs2.getLength()));
- evalFactories[i] = new CreateMBREvalFactory(evalFactory, dimensionEvalFactory, coordinateEvalFactory);
- }
- return evalFactories;
- }
+ evalFactories[i] = new CreateMBREvalFactory(evalFactory,
+ dimensionEvalFactory, coordinateEvalFactory);
+ }
+ return evalFactories;
+ }
- @SuppressWarnings("unchecked")
- @Override
- public Triple<ICopyEvaluatorFactory, ScalarFunctionCallExpression, IAType> partitioningEvaluatorFactory(
- ARecordType recType, String fldName) throws AlgebricksException {
- String[] names = recType.getFieldNames();
- int n = names.length;
- for (int i = 0; i < n; i++) {
- if (names[i].equals(fldName)) {
- ICopyEvaluatorFactory recordEvalFactory = new ColumnAccessEvalFactory(
- GlobalConfig.DEFAULT_INPUT_DATA_COLUMN);
- ArrayBackedValueStorage abvs = new ArrayBackedValueStorage();
- DataOutput dos = abvs.getDataOutput();
- try {
- AInt32 ai = new AInt32(i);
- AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(ai.getType()).serialize(ai,
- dos);
- } catch (HyracksDataException e) {
- throw new AlgebricksException(e);
- }
- ICopyEvaluatorFactory fldIndexEvalFactory = new ConstantEvalFactory(Arrays.copyOf(abvs.getByteArray(),
- abvs.getLength()));
- ICopyEvaluatorFactory evalFactory = new FieldAccessByIndexEvalFactory(recordEvalFactory,
- fldIndexEvalFactory, recType);
- IFunctionInfo finfoAccess = AsterixBuiltinFunctions
- .getAsterixFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_BY_INDEX);
+ @SuppressWarnings("unchecked")
+ @Override
+ public Triple<ICopyEvaluatorFactory, ScalarFunctionCallExpression, IAType> partitioningEvaluatorFactory(
+ ARecordType recType, String fldName) throws AlgebricksException {
+ String[] names = recType.getFieldNames();
+ int n = names.length;
+ for (int i = 0; i < n; i++) {
+ if (names[i].equals(fldName)) {
+ ICopyEvaluatorFactory recordEvalFactory = new ColumnAccessEvalFactory(
+ GlobalConfig.DEFAULT_INPUT_DATA_COLUMN);
+ ArrayBackedValueStorage abvs = new ArrayBackedValueStorage();
+ DataOutput dos = abvs.getDataOutput();
+ try {
+ AInt32 ai = new AInt32(i);
+ AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(ai.getType()).serialize(
+ ai, dos);
+ } catch (HyracksDataException e) {
+ throw new AlgebricksException(e);
+ }
+ ICopyEvaluatorFactory fldIndexEvalFactory = new ConstantEvalFactory(
+ Arrays.copyOf(abvs.getByteArray(), abvs.getLength()));
+ ICopyEvaluatorFactory evalFactory = new FieldAccessByIndexEvalFactory(
+ recordEvalFactory, fldIndexEvalFactory, recType);
+ IFunctionInfo finfoAccess = AsterixBuiltinFunctions
+ .getAsterixFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_BY_INDEX);
- ScalarFunctionCallExpression partitionFun = new ScalarFunctionCallExpression(finfoAccess,
- new MutableObject<ILogicalExpression>(new VariableReferenceExpression(METADATA_DUMMY_VAR)),
- new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
- new AInt32(i)))));
- return new Triple<ICopyEvaluatorFactory, ScalarFunctionCallExpression, IAType>(evalFactory,
- partitionFun, recType.getFieldTypes()[i]);
- }
- }
- throw new AlgebricksException("Could not find field " + fldName + " in the schema.");
- }
+ ScalarFunctionCallExpression partitionFun = new ScalarFunctionCallExpression(
+ finfoAccess,
+ new MutableObject<ILogicalExpression>(
+ new VariableReferenceExpression(
+ METADATA_DUMMY_VAR)),
+ new MutableObject<ILogicalExpression>(
+ new ConstantExpression(
+ new AsterixConstantValue(new AInt32(i)))));
+ return new Triple<ICopyEvaluatorFactory, ScalarFunctionCallExpression, IAType>(
+ evalFactory, partitionFun, recType.getFieldTypes()[i]);
+ }
+ }
+ throw new AlgebricksException("Could not find field " + fldName
+ + " in the schema.");
+ }
- @Override
- public IFunctionDescriptor resolveFunction(ILogicalExpression expr, IVariableTypeEnvironment context)
- throws AlgebricksException {
- FunctionIdentifier fnId = ((AbstractFunctionCallExpression) expr).getFunctionIdentifier();
- IFunctionManager mgr = FunctionManagerHolder.getFunctionManager();
- IFunctionDescriptor fd = mgr.lookupFunction(fnId);
- if (fd == null) {
- throw new AsterixRuntimeException("Unresolved function " + fnId);
- }
- typeInference(expr, fd, context);
- return fd;
- }
+ @Override
+ public IFunctionDescriptor resolveFunction(ILogicalExpression expr,
+ IVariableTypeEnvironment context) throws AlgebricksException {
+ FunctionIdentifier fnId = ((AbstractFunctionCallExpression) expr)
+ .getFunctionIdentifier();
+ IFunctionManager mgr = FunctionManagerHolder.getFunctionManager();
+ IFunctionDescriptor fd = mgr.lookupFunction(fnId);
+ if (fd == null) {
+ throw new AsterixRuntimeException("Unresolved function " + fnId);
+ }
+ typeInference(expr, fd, context);
+ return fd;
+ }
- private void typeInference(ILogicalExpression expr, IFunctionDescriptor fd, IVariableTypeEnvironment context)
- throws AlgebricksException {
- if (fd.getIdentifier().equals(AsterixBuiltinFunctions.LISTIFY)) {
- AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) expr;
- if (f.getArguments().size() == 0) {
- ((ListifyAggregateDescriptor) fd).reset(new AOrderedListType(null, null));
- } else {
- IAType itemType = (IAType) context.getType(f.getArguments().get(0).getValue());
- // Convert UNION types into ANY.
- if (itemType instanceof AUnionType) {
- itemType = BuiltinType.ANY;
- }
- ((ListifyAggregateDescriptor) fd).reset(new AOrderedListType(itemType, null));
- }
- }
- if (fd.getIdentifier().equals(AsterixBuiltinFunctions.CAST_RECORD)) {
- ARecordType rt = (ARecordType) TypeComputerUtilities.getRequiredType((AbstractFunctionCallExpression) expr);
- ARecordType it = (ARecordType) TypeComputerUtilities.getInputType((AbstractFunctionCallExpression) expr);
- ((CastRecordDescriptor) fd).reset(rt, it);
- }
- if (fd.getIdentifier().equals(AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR)) {
- ARecordType rt = (ARecordType) context.getType(expr);
- ((OpenRecordConstructorDescriptor) fd).reset(rt,
- computeOpenFields((AbstractFunctionCallExpression) expr, rt));
- }
- if (fd.getIdentifier().equals(AsterixBuiltinFunctions.CLOSED_RECORD_CONSTRUCTOR)) {
- ((ClosedRecordConstructorDescriptor) fd).reset((ARecordType) context.getType(expr));
- }
- if (fd.getIdentifier().equals(AsterixBuiltinFunctions.ORDERED_LIST_CONSTRUCTOR)) {
- ((OrderedListConstructorDescriptor) fd).reset((AOrderedListType) context.getType(expr));
- }
- if (fd.getIdentifier().equals(AsterixBuiltinFunctions.UNORDERED_LIST_CONSTRUCTOR)) {
- ((UnorderedListConstructorDescriptor) fd).reset((AUnorderedListType) context.getType(expr));
- }
- if (fd.getIdentifier().equals(AsterixBuiltinFunctions.FIELD_ACCESS_BY_INDEX)) {
- AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) expr;
- IAType t = (IAType) context.getType(fce.getArguments().get(0).getValue());
- switch (t.getTypeTag()) {
- case RECORD: {
- ARecordType recType = (ARecordType) t;
- ((FieldAccessByIndexDescriptor) fd).reset(recType);
- break;
- }
- case UNION: {
- AUnionType unionT = (AUnionType) t;
- if (unionT.isNullableType()) {
- IAType t2 = unionT.getUnionList().get(1);
- if (t2.getTypeTag() == ATypeTag.RECORD) {
- ARecordType recType = (ARecordType) t2;
- ((FieldAccessByIndexDescriptor) fd).reset(recType);
- break;
- }
- }
- throw new NotImplementedException("field-access-by-index for data of type " + t);
- }
- default: {
- throw new NotImplementedException("field-access-by-index for data of type " + t);
- }
- }
- }
- }
+ private void typeInference(ILogicalExpression expr, IFunctionDescriptor fd,
+ IVariableTypeEnvironment context) throws AlgebricksException {
+ if (fd.getIdentifier().equals(AsterixBuiltinFunctions.LISTIFY)) {
+ AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) expr;
+ if (f.getArguments().size() == 0) {
+ ((ListifyAggregateDescriptor) fd).reset(new AOrderedListType(
+ null, null));
+ } else {
+ IAType itemType = (IAType) context.getType(f.getArguments()
+ .get(0).getValue());
+ // Convert UNION types into ANY.
+ if (itemType instanceof AUnionType) {
+ itemType = BuiltinType.ANY;
+ }
+ ((ListifyAggregateDescriptor) fd).reset(new AOrderedListType(
+ itemType, null));
+ }
+ }
+ if (fd.getIdentifier().equals(AsterixBuiltinFunctions.CAST_RECORD)) {
+ ARecordType rt = (ARecordType) TypeComputerUtilities
+ .getRequiredType((AbstractFunctionCallExpression) expr);
+ ARecordType it = (ARecordType) TypeComputerUtilities
+ .getInputType((AbstractFunctionCallExpression) expr);
+ ((CastRecordDescriptor) fd).reset(rt, it);
+ }
+ if (fd.getIdentifier().equals(
+ AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR)) {
+ ARecordType rt = (ARecordType) context.getType(expr);
+ ((OpenRecordConstructorDescriptor) fd)
+ .reset(rt,
+ computeOpenFields(
+ (AbstractFunctionCallExpression) expr, rt));
+ }
+ if (fd.getIdentifier().equals(
+ AsterixBuiltinFunctions.CLOSED_RECORD_CONSTRUCTOR)) {
+ ((ClosedRecordConstructorDescriptor) fd)
+ .reset((ARecordType) context.getType(expr));
+ }
+ if (fd.getIdentifier().equals(
+ AsterixBuiltinFunctions.ORDERED_LIST_CONSTRUCTOR)) {
+ ((OrderedListConstructorDescriptor) fd)
+ .reset((AOrderedListType) context.getType(expr));
+ }
+ if (fd.getIdentifier().equals(
+ AsterixBuiltinFunctions.UNORDERED_LIST_CONSTRUCTOR)) {
+ ((UnorderedListConstructorDescriptor) fd)
+ .reset((AUnorderedListType) context.getType(expr));
+ }
+ if (fd.getIdentifier().equals(
+ AsterixBuiltinFunctions.FIELD_ACCESS_BY_INDEX)) {
+ AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) expr;
+ IAType t = (IAType) context.getType(fce.getArguments().get(0)
+ .getValue());
+ switch (t.getTypeTag()) {
+ case RECORD: {
+ ARecordType recType = (ARecordType) t;
+ ((FieldAccessByIndexDescriptor) fd).reset(recType);
+ break;
+ }
+ case UNION: {
+ AUnionType unionT = (AUnionType) t;
+ if (unionT.isNullableType()) {
+ IAType t2 = unionT.getUnionList().get(1);
+ if (t2.getTypeTag() == ATypeTag.RECORD) {
+ ARecordType recType = (ARecordType) t2;
+ ((FieldAccessByIndexDescriptor) fd).reset(recType);
+ break;
+ }
+ }
+ throw new NotImplementedException(
+ "field-access-by-index for data of type " + t);
+ }
+ default: {
+ throw new NotImplementedException(
+ "field-access-by-index for data of type " + t);
+ }
+ }
+ }
+ }
- private boolean[] computeOpenFields(AbstractFunctionCallExpression expr, ARecordType recType) {
- int n = expr.getArguments().size() / 2;
- boolean[] open = new boolean[n];
- for (int i = 0; i < n; i++) {
- Mutable<ILogicalExpression> argRef = expr.getArguments().get(2 * i);
- ILogicalExpression arg = argRef.getValue();
- if (arg.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
- String fn = ((AString) ((AsterixConstantValue) ((ConstantExpression) arg).getValue()).getObject())
- .getStringValue();
- open[i] = true;
- for (String s : recType.getFieldNames()) {
- if (s.equals(fn)) {
- open[i] = false;
- break;
- }
- }
- } else {
- open[i] = true;
- }
- }
- return open;
- }
+ private boolean[] computeOpenFields(AbstractFunctionCallExpression expr,
+ ARecordType recType) {
+ int n = expr.getArguments().size() / 2;
+ boolean[] open = new boolean[n];
+ for (int i = 0; i < n; i++) {
+ Mutable<ILogicalExpression> argRef = expr.getArguments().get(2 * i);
+ ILogicalExpression arg = argRef.getValue();
+ if (arg.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
+ String fn = ((AString) ((AsterixConstantValue) ((ConstantExpression) arg)
+ .getValue()).getObject()).getStringValue();
+ open[i] = true;
+ for (String s : recType.getFieldNames()) {
+ if (s.equals(fn)) {
+ open[i] = false;
+ break;
+ }
+ }
+ } else {
+ open[i] = true;
+ }
+ }
+ return open;
+ }
- @Override
- public IPrinterFactoryProvider getPrinterFactoryProvider() {
- return AqlPrinterFactoryProvider.INSTANCE;
- }
+ @Override
+ public IPrinterFactoryProvider getPrinterFactoryProvider() {
+ return AqlPrinterFactoryProvider.INSTANCE;
+ }
- @SuppressWarnings("unchecked")
- @Override
- public ICopyEvaluatorFactory getConstantEvalFactory(IAlgebricksConstantValue value) throws AlgebricksException {
- IAObject obj = null;
- if (value.isNull()) {
- obj = ANull.NULL;
- } else if (value.isTrue()) {
- obj = ABoolean.TRUE;
- } else if (value.isFalse()) {
- obj = ABoolean.FALSE;
- } else {
- AsterixConstantValue acv = (AsterixConstantValue) value;
- obj = acv.getObject();
- }
- ArrayBackedValueStorage abvs = new ArrayBackedValueStorage();
- DataOutput dos = abvs.getDataOutput();
- try {
- AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(obj.getType()).serialize(obj, dos);
- } catch (HyracksDataException e) {
- throw new AlgebricksException(e);
- }
- return new ConstantEvalFactory(Arrays.copyOf(abvs.getByteArray(), abvs.getLength()));
- }
+ @SuppressWarnings("unchecked")
+ @Override
+ public ICopyEvaluatorFactory getConstantEvalFactory(
+ IAlgebricksConstantValue value) throws AlgebricksException {
+ IAObject obj = null;
+ if (value.isNull()) {
+ obj = ANull.NULL;
+ } else if (value.isTrue()) {
+ obj = ABoolean.TRUE;
+ } else if (value.isFalse()) {
+ obj = ABoolean.FALSE;
+ } else {
+ AsterixConstantValue acv = (AsterixConstantValue) value;
+ obj = acv.getObject();
+ }
+ ArrayBackedValueStorage abvs = new ArrayBackedValueStorage();
+ DataOutput dos = abvs.getDataOutput();
+ try {
+ AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(obj.getType()).serialize(obj,
+ dos);
+ } catch (HyracksDataException e) {
+ throw new AlgebricksException(e);
+ }
+ return new ConstantEvalFactory(Arrays.copyOf(abvs.getByteArray(),
+ abvs.getLength()));
+ }
- @Override
- public IBinaryIntegerInspectorFactory getBinaryIntegerInspectorFactory() {
- return AqlBinaryIntegerInspector.FACTORY;
- }
+ @Override
+ public IBinaryIntegerInspectorFactory getBinaryIntegerInspectorFactory() {
+ return AqlBinaryIntegerInspector.FACTORY;
+ }
- @Override
- public ITupleParserFactory createTupleParser(ARecordType recType, IParseFileSplitsDecl decl) {
- if (decl.isDelimitedFileFormat()) {
- int n = recType.getFieldTypes().length;
- IValueParserFactory[] fieldParserFactories = new IValueParserFactory[n];
- for (int i = 0; i < n; i++) {
- ATypeTag tag = recType.getFieldTypes()[i].getTypeTag();
- IValueParserFactory vpf = typeToValueParserFactMap.get(tag);
- if (vpf == null) {
- throw new NotImplementedException("No value parser factory for delimited fields of type " + tag);
- }
- fieldParserFactories[i] = vpf;
- }
- return new NtDelimitedDataTupleParserFactory(recType, fieldParserFactories, decl.getDelimChar());
- } else {
- return new AdmSchemafullRecordParserFactory(recType);
- }
- }
+ @Override
+ public ITupleParserFactory createTupleParser(ARecordType recType,
+ IParseFileSplitsDecl decl) {
+ if (decl.isDelimitedFileFormat()) {
+ int n = recType.getFieldTypes().length;
+ IValueParserFactory[] fieldParserFactories = new IValueParserFactory[n];
+ for (int i = 0; i < n; i++) {
+ ATypeTag tag = recType.getFieldTypes()[i].getTypeTag();
+ IValueParserFactory vpf = typeToValueParserFactMap.get(tag);
+ if (vpf == null) {
+ throw new NotImplementedException(
+ "No value parser factory for delimited fields of type "
+ + tag);
+ }
+ fieldParserFactories[i] = vpf;
+ }
+ return new NtDelimitedDataTupleParserFactory(recType,
+ fieldParserFactories, decl.getDelimChar());
+ } else {
+ return new AdmSchemafullRecordParserFactory(recType);
+ }
+ }
- @Override
- public ITupleParserFactory createTupleParser(ARecordType recType, boolean delimitedFormat, Character delimiter) {
- if (delimitedFormat) {
- int n = recType.getFieldTypes().length;
- IValueParserFactory[] fieldParserFactories = new IValueParserFactory[n];
- for (int i = 0; i < n; i++) {
- ATypeTag tag = recType.getFieldTypes()[i].getTypeTag();
- IValueParserFactory vpf = typeToValueParserFactMap.get(tag);
- if (vpf == null) {
- throw new NotImplementedException("No value parser factory for delimited fields of type " + tag);
- }
- fieldParserFactories[i] = vpf;
- }
- return new NtDelimitedDataTupleParserFactory(recType, fieldParserFactories, delimiter);
- } else {
- return new AdmSchemafullRecordParserFactory(recType);
- }
- }
+ @Override
+ public ITupleParserFactory createTupleParser(ARecordType recType,
+ boolean delimitedFormat, Character delimiter) {
+ if (delimitedFormat) {
+ int n = recType.getFieldTypes().length;
+ IValueParserFactory[] fieldParserFactories = new IValueParserFactory[n];
+ for (int i = 0; i < n; i++) {
+ ATypeTag tag = recType.getFieldTypes()[i].getTypeTag();
+ IValueParserFactory vpf = typeToValueParserFactMap.get(tag);
+ if (vpf == null) {
+ throw new NotImplementedException(
+ "No value parser factory for delimited fields of type "
+ + tag);
+ }
+ fieldParserFactories[i] = vpf;
+ }
+ return new NtDelimitedDataTupleParserFactory(recType,
+ fieldParserFactories, delimiter);
+ } else {
+ return new AdmSchemafullRecordParserFactory(recType);
+ }
+ }
- @Override
- public INullWriterFactory getNullWriterFactory() {
- return AqlNullWriterFactory.INSTANCE;
- }
+ @Override
+ public INullWriterFactory getNullWriterFactory() {
+ return AqlNullWriterFactory.INSTANCE;
+ }
- @Override
- public IExpressionEvalSizeComputer getExpressionEvalSizeComputer() {
- return new IExpressionEvalSizeComputer() {
- @Override
- public int getEvalSize(ILogicalExpression expr, IVariableEvalSizeEnvironment env)
- throws AlgebricksException {
- switch (expr.getExpressionTag()) {
- case CONSTANT: {
- ConstantExpression c = (ConstantExpression) expr;
- if (c == ConstantExpression.NULL) {
- return 1;
- } else if (c == ConstantExpression.FALSE || c == ConstantExpression.TRUE) {
- return 2;
- } else {
- AsterixConstantValue acv = (AsterixConstantValue) c.getValue();
- IAObject o = acv.getObject();
- switch (o.getType().getTypeTag()) {
- case DOUBLE: {
- return 9;
- }
- case BOOLEAN: {
- return 2;
- }
- case NULL: {
- return 1;
- }
- case INT32: {
- return 5;
- }
- case INT64: {
- return 9;
- }
- default: {
- // TODO
- return -1;
- }
- }
- }
- }
- case FUNCTION_CALL: {
- AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) expr;
- if (f.getFunctionIdentifier().equals(AsterixBuiltinFunctions.TID)) {
- return 5;
- } else {
- // TODO
- return -1;
- }
- }
- default: {
- // TODO
- return -1;
- }
- }
- }
- };
- }
+ @Override
+ public IExpressionEvalSizeComputer getExpressionEvalSizeComputer() {
+ return new IExpressionEvalSizeComputer() {
+ @Override
+ public int getEvalSize(ILogicalExpression expr,
+ IVariableEvalSizeEnvironment env)
+ throws AlgebricksException {
+ switch (expr.getExpressionTag()) {
+ case CONSTANT: {
+ ConstantExpression c = (ConstantExpression) expr;
+ if (c == ConstantExpression.NULL) {
+ return 1;
+ } else if (c == ConstantExpression.FALSE
+ || c == ConstantExpression.TRUE) {
+ return 2;
+ } else {
+ AsterixConstantValue acv = (AsterixConstantValue) c
+ .getValue();
+ IAObject o = acv.getObject();
+ switch (o.getType().getTypeTag()) {
+ case DOUBLE: {
+ return 9;
+ }
+ case BOOLEAN: {
+ return 2;
+ }
+ case NULL: {
+ return 1;
+ }
+ case INT32: {
+ return 5;
+ }
+ case INT64: {
+ return 9;
+ }
+ default: {
+ // TODO
+ return -1;
+ }
+ }
+ }
+ }
+ case FUNCTION_CALL: {
+ AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) expr;
+ if (f.getFunctionIdentifier().equals(
+ AsterixBuiltinFunctions.TID)) {
+ return 5;
+ } else {
+ // TODO
+ return -1;
+ }
+ }
+ default: {
+ // TODO
+ return -1;
+ }
+ }
+ }
+ };
+ }
- @Override
- public INormalizedKeyComputerFactoryProvider getNormalizedKeyComputerFactoryProvider() {
- return AqlNormalizedKeyComputerFactoryProvider.INSTANCE;
- }
+ @Override
+ public INormalizedKeyComputerFactoryProvider getNormalizedKeyComputerFactoryProvider() {
+ return AqlNormalizedKeyComputerFactoryProvider.INSTANCE;
+ }
+
+ @Override
+ public IBinaryHashFunctionFamilyProvider getBinaryHashFunctionFamilyProvider() {
+ return AqlBinaryHashFunctionFamilyProvider.INSTANCE;
+ }
}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/ADMDataParser.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/ADMDataParser.java
new file mode 100644
index 0000000..2e64ad4
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/ADMDataParser.java
@@ -0,0 +1,1024 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.operators.file;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayDeque;
+import java.util.BitSet;
+import java.util.List;
+import java.util.Queue;
+
+import edu.uci.ics.asterix.runtime.operators.file.adm.AdmLexer;
+import edu.uci.ics.asterix.runtime.operators.file.adm.AdmLexerException;
+import edu.uci.ics.asterix.builders.IARecordBuilder;
+import edu.uci.ics.asterix.builders.IAsterixListBuilder;
+import edu.uci.ics.asterix.builders.OrderedListBuilder;
+import edu.uci.ics.asterix.builders.RecordBuilder;
+import edu.uci.ics.asterix.builders.UnorderedListBuilder;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ACircleSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateTimeSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AIntervalSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ALineSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.APoint3DSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.APointSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.APolygonSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ARectangleSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ATimeSerializerDeserializer;
+import edu.uci.ics.asterix.om.base.ABoolean;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.types.AOrderedListType;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.AUnionType;
+import edu.uci.ics.asterix.om.types.AUnorderedListType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+
+/**
+ * Parser for ADM formatted data.
+ */
+public class ADMDataParser extends AbstractDataParser implements IDataParser {
+
+ protected AdmLexer admLexer;
+ protected ARecordType recordType;
+ protected boolean datasetRec;
+
+ private int nullableFieldId = 0;
+
+ private Queue<ArrayBackedValueStorage> baaosPool = new ArrayDeque<ArrayBackedValueStorage>();
+ private Queue<IARecordBuilder> recordBuilderPool = new ArrayDeque<IARecordBuilder>();
+ private Queue<IAsterixListBuilder> orderedListBuilderPool = new ArrayDeque<IAsterixListBuilder>();
+ private Queue<IAsterixListBuilder> unorderedListBuilderPool = new ArrayDeque<IAsterixListBuilder>();
+
+ private String mismatchErrorMessage = "Mismatch Type, expecting a value of type ";
+
+ @Override
+ public boolean parse(DataOutput out) throws HyracksDataException {
+ try {
+ return parseAdmInstance((IAType) recordType, datasetRec, out);
+ } catch (Exception e) {
+ throw new HyracksDataException(e);
+ }
+ }
+
+ @Override
+ public void initialize(InputStream in, ARecordType recordType, boolean datasetRec) throws AsterixException {
+ this.recordType = recordType;
+ this.datasetRec = datasetRec;
+ try {
+ admLexer = new AdmLexer(new java.io.InputStreamReader(in));
+ } catch (IOException e) {
+ throw new AsterixException(e);
+ }
+ }
+
+ protected boolean parseAdmInstance(IAType objectType, boolean datasetRec, DataOutput out) throws AsterixException,
+ IOException {
+ int token;
+ try {
+ token = admLexer.next();
+ } catch (AdmLexerException e) {
+ throw new AsterixException(e);
+ }
+ if (token == AdmLexer.TOKEN_EOF) {
+ return false;
+ } else {
+ admFromLexerStream(token, objectType, out, datasetRec);
+ return true;
+ }
+ }
+
+ private void admFromLexerStream(int token, IAType objectType, DataOutput out, Boolean datasetRec)
+ throws AsterixException, IOException {
+
+ switch (token) {
+ case AdmLexer.TOKEN_NULL_LITERAL: {
+ if (checkType(ATypeTag.NULL, objectType, out)) {
+ nullSerde.serialize(ANull.NULL, out);
+ } else
+ throw new AsterixException(" This field can not be null ");
+ break;
+ }
+ case AdmLexer.TOKEN_TRUE_LITERAL: {
+ if (checkType(ATypeTag.BOOLEAN, objectType, out)) {
+ booleanSerde.serialize(ABoolean.TRUE, out);
+ } else
+ throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
+ break;
+ }
+ case AdmLexer.TOKEN_BOOLEAN_CONS: {
+ parseConstructor(ATypeTag.BOOLEAN, objectType, out);
+ break;
+ }
+ case AdmLexer.TOKEN_FALSE_LITERAL: {
+ if (checkType(ATypeTag.BOOLEAN, objectType, out)) {
+ booleanSerde.serialize(ABoolean.FALSE, out);
+ } else
+ throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
+ break;
+ }
+ case AdmLexer.TOKEN_DOUBLE_LITERAL: {
+ if (checkType(ATypeTag.DOUBLE, objectType, out)) {
+ aDouble.setValue(Double.parseDouble(admLexer.getLastTokenImage()));
+ doubleSerde.serialize(aDouble, out);
+ } else
+ throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
+ break;
+ }
+ case AdmLexer.TOKEN_DOUBLE_CONS: {
+ parseConstructor(ATypeTag.DOUBLE, objectType, out);
+ break;
+ }
+ case AdmLexer.TOKEN_FLOAT_LITERAL: {
+ if (checkType(ATypeTag.FLOAT, objectType, out)) {
+ aFloat.setValue(Float.parseFloat(admLexer.getLastTokenImage()));
+ floatSerde.serialize(aFloat, out);
+ } else
+ throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
+ break;
+ }
+ case AdmLexer.TOKEN_FLOAT_CONS: {
+ parseConstructor(ATypeTag.FLOAT, objectType, out);
+ break;
+ }
+ case AdmLexer.TOKEN_INT8_LITERAL: {
+ if (checkType(ATypeTag.INT8, objectType, out)) {
+ parseInt8(admLexer.getLastTokenImage(), out);
+ } else
+ throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
+ break;
+ }
+ case AdmLexer.TOKEN_INT8_CONS: {
+ parseConstructor(ATypeTag.INT8, objectType, out);
+ break;
+ }
+ case AdmLexer.TOKEN_INT16_LITERAL: {
+ if (checkType(ATypeTag.INT16, objectType, out)) {
+ parseInt16(admLexer.getLastTokenImage(), out);
+ } else
+ throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
+ break;
+ }
+ case AdmLexer.TOKEN_INT16_CONS: {
+ parseConstructor(ATypeTag.INT16, objectType, out);
+ break;
+ }
+ case AdmLexer.TOKEN_INT_LITERAL:
+ case AdmLexer.TOKEN_INT32_LITERAL: {
+ if (checkType(ATypeTag.INT32, objectType, out)) {
+ parseInt32(admLexer.getLastTokenImage(), out);
+ } else
+ throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
+ break;
+ }
+ case AdmLexer.TOKEN_INT32_CONS: {
+ parseConstructor(ATypeTag.INT32, objectType, out);
+ break;
+ }
+ case AdmLexer.TOKEN_INT64_LITERAL: {
+ if (checkType(ATypeTag.INT64, objectType, out)) {
+ parseInt64(admLexer.getLastTokenImage(), out);
+ } else
+ throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
+ break;
+ }
+ case AdmLexer.TOKEN_INT64_CONS: {
+ parseConstructor(ATypeTag.INT64, objectType, out);
+ break;
+ }
+ case AdmLexer.TOKEN_STRING_LITERAL: {
+ if (checkType(ATypeTag.STRING, objectType, out)) {
+ aString.setValue(admLexer.getLastTokenImage().substring(1,
+ admLexer.getLastTokenImage().length() - 1));
+ stringSerde.serialize(aString, out);
+ } else
+ throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
+ break;
+ }
+ case AdmLexer.TOKEN_STRING_CONS: {
+ parseConstructor(ATypeTag.STRING, objectType, out);
+ break;
+ }
+ case AdmLexer.TOKEN_DATE_CONS: {
+ parseConstructor(ATypeTag.DATE, objectType, out);
+ break;
+ }
+ case AdmLexer.TOKEN_TIME_CONS: {
+ parseConstructor(ATypeTag.TIME, objectType, out);
+ break;
+ }
+ case AdmLexer.TOKEN_DATETIME_CONS: {
+ parseConstructor(ATypeTag.DATETIME, objectType, out);
+ break;
+ }
+ case AdmLexer.TOKEN_INTERVAL_DATE_CONS: {
+ try {
+ if (checkType(ATypeTag.INTERVAL, objectType, out)) {
+ if (admLexer.next() == AdmLexer.TOKEN_CONSTRUCTOR_OPEN) {
+ if (admLexer.next() == AdmLexer.TOKEN_STRING_CONS) {
+ AIntervalSerializerDeserializer.parseDate(admLexer.getLastTokenImage(), out);
+
+ if (admLexer.next() == AdmLexer.TOKEN_CONSTRUCTOR_CLOSE) {
+ break;
+ }
+ }
+ }
+ }
+ } catch (AdmLexerException ex) {
+ throw new AsterixException(ex);
+ }
+ throw new AsterixException("Wrong interval data parsing for date interval.");
+ }
+ case AdmLexer.TOKEN_INTERVAL_TIME_CONS: {
+ try {
+ if (checkType(ATypeTag.INTERVAL, objectType, out)) {
+ if (admLexer.next() == AdmLexer.TOKEN_CONSTRUCTOR_OPEN) {
+ if (admLexer.next() == AdmLexer.TOKEN_STRING_CONS) {
+ AIntervalSerializerDeserializer.parseTime(admLexer.getLastTokenImage(), out);
+
+ if (admLexer.next() == AdmLexer.TOKEN_CONSTRUCTOR_CLOSE) {
+ break;
+ }
+ }
+ }
+ }
+ } catch (AdmLexerException ex) {
+ throw new AsterixException(ex);
+ }
+ throw new AsterixException("Wrong interval data parsing for time interval.");
+ }
+ case AdmLexer.TOKEN_INTERVAL_DATETIME_CONS: {
+ try {
+ if (checkType(ATypeTag.INTERVAL, objectType, out)) {
+ if (admLexer.next() == AdmLexer.TOKEN_CONSTRUCTOR_OPEN) {
+ if (admLexer.next() == AdmLexer.TOKEN_STRING_CONS) {
+ AIntervalSerializerDeserializer.parseDatetime(admLexer.getLastTokenImage(), out);
+
+ if (admLexer.next() == AdmLexer.TOKEN_CONSTRUCTOR_CLOSE) {
+ break;
+ }
+ }
+ }
+ }
+ } catch (AdmLexerException ex) {
+ throw new AsterixException(ex);
+ }
+ throw new AsterixException("Wrong interval data parsing for datetime interval.");
+ }
+ case AdmLexer.TOKEN_DURATION_CONS: {
+ parseConstructor(ATypeTag.DURATION, objectType, out);
+ break;
+ }
+ case AdmLexer.TOKEN_POINT_CONS: {
+ parseConstructor(ATypeTag.POINT, objectType, out);
+ break;
+ }
+ case AdmLexer.TOKEN_POINT3D_CONS: {
+ parseConstructor(ATypeTag.POINT3D, objectType, out);
+ break;
+ }
+ case AdmLexer.TOKEN_CIRCLE_CONS: {
+ parseConstructor(ATypeTag.CIRCLE, objectType, out);
+ break;
+ }
+ case AdmLexer.TOKEN_RECTANGLE_CONS: {
+ parseConstructor(ATypeTag.RECTANGLE, objectType, out);
+ break;
+ }
+ case AdmLexer.TOKEN_LINE_CONS: {
+ parseConstructor(ATypeTag.LINE, objectType, out);
+ break;
+ }
+ case AdmLexer.TOKEN_POLYGON_CONS: {
+ parseConstructor(ATypeTag.POLYGON, objectType, out);
+ break;
+ }
+ case AdmLexer.TOKEN_START_UNORDERED_LIST: {
+ if (checkType(ATypeTag.UNORDEREDLIST, objectType, out)) {
+ objectType = getComplexType(objectType, ATypeTag.UNORDEREDLIST);
+ parseUnorderedList((AUnorderedListType) objectType, out);
+ } else
+ throw new AsterixException(mismatchErrorMessage + objectType.getTypeTag());
+ break;
+ }
+
+ case AdmLexer.TOKEN_START_ORDERED_LIST: {
+ if (checkType(ATypeTag.ORDEREDLIST, objectType, out)) {
+ objectType = getComplexType(objectType, ATypeTag.ORDEREDLIST);
+ parseOrderedList((AOrderedListType) objectType, out);
+ } else
+ throw new AsterixException(mismatchErrorMessage + objectType.getTypeTag());
+ break;
+ }
+ case AdmLexer.TOKEN_START_RECORD: {
+ if (checkType(ATypeTag.RECORD, objectType, out)) {
+ objectType = getComplexType(objectType, ATypeTag.RECORD);
+ parseRecord((ARecordType) objectType, out, datasetRec);
+ } else
+ throw new AsterixException(mismatchErrorMessage + objectType.getTypeTag());
+ break;
+ }
+ case AdmLexer.TOKEN_EOF: {
+ break;
+ }
+ default: {
+ throw new AsterixException("Unexpected ADM token kind: " + AdmLexer.tokenKindToString(token) + ".");
+ }
+ }
+ }
+
+ private void parseDatetime(String datetime, DataOutput out) throws AsterixException, IOException {
+ try {
+ ADateTimeSerializerDeserializer.parse(datetime, out);
+ } catch (HyracksDataException e) {
+ throw new AsterixException(e);
+ }
+ }
+
+ private void parseDuration(String duration, DataOutput out) throws AsterixException {
+ try {
+ ADurationSerializerDeserializer.parse(duration, out);
+ } catch (HyracksDataException e) {
+ throw new AsterixException(e);
+ }
+
+ }
+
+ private IAType getComplexType(IAType aObjectType, ATypeTag tag) {
+
+ if (aObjectType == null) {
+ return null;
+ }
+
+ if (aObjectType.getTypeTag() == tag)
+ return aObjectType;
+
+ if (aObjectType.getTypeTag() == ATypeTag.UNION) {
+ unionList = ((AUnionType) aObjectType).getUnionList();
+ for (int i = 0; i < unionList.size(); i++)
+ if (unionList.get(i).getTypeTag() == tag) {
+ return unionList.get(i);
+ }
+ }
+ return null; // wont get here
+ }
+
+ List<IAType> unionList;
+
+ private boolean checkType(ATypeTag expectedTypeTag, IAType aObjectType, DataOutput out) throws IOException {
+
+ if (aObjectType == null)
+ return true;
+
+ if (aObjectType.getTypeTag() != ATypeTag.UNION) {
+ if (expectedTypeTag == aObjectType.getTypeTag())
+ return true;
+ } else { // union
+ unionList = ((AUnionType) aObjectType).getUnionList();
+ for (int i = 0; i < unionList.size(); i++)
+ if (unionList.get(i).getTypeTag() == expectedTypeTag)
+ return true;
+ }
+ return false;
+ }
+
+ private void parseRecord(ARecordType recType, DataOutput out, Boolean datasetRec) throws IOException,
+ AsterixException {
+
+ ArrayBackedValueStorage fieldValueBuffer = getTempBuffer();
+ ArrayBackedValueStorage fieldNameBuffer = getTempBuffer();
+ IARecordBuilder recBuilder = getRecordBuilder();
+
+ // Boolean[] nulls = null;
+ BitSet nulls = null;
+ if (datasetRec) {
+ if (recType != null) {
+ nulls = new BitSet(recType.getFieldNames().length);
+ recBuilder.reset(recType);
+ } else
+ recBuilder.reset(null);
+ } else if (recType != null) {
+ nulls = new BitSet(recType.getFieldNames().length);
+ recBuilder.reset(recType);
+ } else
+ recBuilder.reset(null);
+
+ recBuilder.init();
+ int token;
+ boolean inRecord = true;
+ boolean expectingRecordField = false;
+ boolean first = true;
+
+ Boolean openRecordField = false;
+ int fieldId = 0;
+ IAType fieldType = null;
+ do {
+ token = nextToken();
+ switch (token) {
+ case AdmLexer.TOKEN_END_RECORD: {
+ if (expectingRecordField) {
+ throw new AsterixException("Found END_RECORD while expecting a record field.");
+ }
+ inRecord = false;
+ break;
+ }
+ case AdmLexer.TOKEN_STRING_LITERAL: {
+ // we've read the name of the field
+ // now read the content
+ fieldNameBuffer.reset();
+ fieldValueBuffer.reset();
+ expectingRecordField = false;
+
+ if (recType != null) {
+ String fldName = admLexer.getLastTokenImage().substring(1,
+ admLexer.getLastTokenImage().length() - 1);
+ fieldId = recBuilder.getFieldId(fldName);
+ if (fieldId < 0 && !recType.isOpen()) {
+ throw new AsterixException("This record is closed, you can not add extra fields !!");
+ } else if (fieldId < 0 && recType.isOpen()) {
+ aStringFieldName.setValue(admLexer.getLastTokenImage().substring(1,
+ admLexer.getLastTokenImage().length() - 1));
+ stringSerde.serialize(aStringFieldName, fieldNameBuffer.getDataOutput());
+ openRecordField = true;
+ fieldType = null;
+ } else {
+ // a closed field
+ nulls.set(fieldId);
+ fieldType = recType.getFieldTypes()[fieldId];
+ openRecordField = false;
+ }
+ } else {
+ aStringFieldName.setValue(admLexer.getLastTokenImage().substring(1,
+ admLexer.getLastTokenImage().length() - 1));
+ stringSerde.serialize(aStringFieldName, fieldNameBuffer.getDataOutput());
+ openRecordField = true;
+ fieldType = null;
+ }
+
+ token = nextToken();
+ if (token != AdmLexer.TOKEN_COLON) {
+ throw new AsterixException("Unexpected ADM token kind: " + AdmLexer.tokenKindToString(token)
+ + " while expecting \":\".");
+ }
+
+ token = nextToken();
+ this.admFromLexerStream(token, fieldType, fieldValueBuffer.getDataOutput(), false);
+ if (openRecordField) {
+ if (fieldValueBuffer.getByteArray()[0] != ATypeTag.NULL.serialize())
+ recBuilder.addField(fieldNameBuffer, fieldValueBuffer);
+ } else if (recType.getFieldTypes()[fieldId].getTypeTag() == ATypeTag.UNION) {
+ if (NonTaggedFormatUtil.isOptionalField((AUnionType) recType.getFieldTypes()[fieldId])) {
+ if (fieldValueBuffer.getByteArray()[0] != ATypeTag.NULL.serialize()) {
+ recBuilder.addField(fieldId, fieldValueBuffer);
+ }
+ }
+ } else {
+ recBuilder.addField(fieldId, fieldValueBuffer);
+ }
+
+ break;
+ }
+ case AdmLexer.TOKEN_COMMA: {
+ if (first) {
+ throw new AsterixException("Found COMMA before any record field.");
+ }
+ if (expectingRecordField) {
+ throw new AsterixException("Found COMMA while expecting a record field.");
+ }
+ expectingRecordField = true;
+ break;
+ }
+ default: {
+ throw new AsterixException("Unexpected ADM token kind: " + AdmLexer.tokenKindToString(token)
+ + " while parsing record fields.");
+ }
+ }
+ first = false;
+ } while (inRecord);
+
+ if (recType != null) {
+ nullableFieldId = checkNullConstraints(recType, nulls);
+ if (nullableFieldId != -1)
+ throw new AsterixException("Field " + nullableFieldId + " can not be null");
+ }
+ recBuilder.write(out, true);
+ returnRecordBuilder(recBuilder);
+ returnTempBuffer(fieldNameBuffer);
+ returnTempBuffer(fieldValueBuffer);
+ }
+
+ private int checkNullConstraints(ARecordType recType, BitSet nulls) {
+
+ boolean isNull = false;
+ for (int i = 0; i < recType.getFieldTypes().length; i++)
+ if (nulls.get(i) == false) {
+ IAType type = recType.getFieldTypes()[i];
+ if (type.getTypeTag() != ATypeTag.NULL && type.getTypeTag() != ATypeTag.UNION)
+ return i;
+
+ if (type.getTypeTag() == ATypeTag.UNION) { // union
+ unionList = ((AUnionType) type).getUnionList();
+ for (int j = 0; j < unionList.size(); j++)
+ if (unionList.get(j).getTypeTag() == ATypeTag.NULL) {
+ isNull = true;
+ break;
+ }
+ if (!isNull)
+ return i;
+ }
+ }
+ return -1;
+ }
+
+ private void parseOrderedList(AOrderedListType oltype, DataOutput out) throws IOException, AsterixException {
+
+ ArrayBackedValueStorage itemBuffer = getTempBuffer();
+ OrderedListBuilder orderedListBuilder = (OrderedListBuilder) getOrderedListBuilder();
+
+ IAType itemType = null;
+ if (oltype != null)
+ itemType = oltype.getItemType();
+ orderedListBuilder.reset(oltype);
+
+ int token;
+ boolean inList = true;
+ boolean expectingListItem = false;
+ boolean first = true;
+ do {
+ token = nextToken();
+ if (token == AdmLexer.TOKEN_END_ORDERED_LIST) {
+ if (expectingListItem) {
+ throw new AsterixException("Found END_COLLECTION while expecting a list item.");
+ }
+ inList = false;
+ } else if (token == AdmLexer.TOKEN_COMMA) {
+ if (first) {
+ throw new AsterixException("Found COMMA before any list item.");
+ }
+ if (expectingListItem) {
+ throw new AsterixException("Found COMMA while expecting a list item.");
+ }
+ expectingListItem = true;
+ } else {
+ expectingListItem = false;
+ itemBuffer.reset();
+
+ admFromLexerStream(token, itemType, itemBuffer.getDataOutput(), false);
+ orderedListBuilder.addItem(itemBuffer);
+ }
+ first = false;
+ } while (inList);
+ orderedListBuilder.write(out, true);
+ returnOrderedListBuilder(orderedListBuilder);
+ returnTempBuffer(itemBuffer);
+ }
+
+ private void parseUnorderedList(AUnorderedListType uoltype, DataOutput out) throws IOException, AsterixException {
+
+ ArrayBackedValueStorage itemBuffer = getTempBuffer();
+ UnorderedListBuilder unorderedListBuilder = (UnorderedListBuilder) getUnorderedListBuilder();
+
+ IAType itemType = null;
+
+ if (uoltype != null)
+ itemType = uoltype.getItemType();
+ unorderedListBuilder.reset(uoltype);
+
+ int token;
+ boolean inList = true;
+ boolean expectingListItem = false;
+ boolean first = true;
+ do {
+ token = nextToken();
+ if (token == AdmLexer.TOKEN_END_UNORDERED_LIST) {
+ if (expectingListItem) {
+ throw new AsterixException("Found END_COLLECTION while expecting a list item.");
+ }
+ inList = false;
+ } else if (token == AdmLexer.TOKEN_COMMA) {
+ if (first) {
+ throw new AsterixException("Found COMMA before any list item.");
+ }
+ if (expectingListItem) {
+ throw new AsterixException("Found COMMA while expecting a list item.");
+ }
+ expectingListItem = true;
+ } else {
+ expectingListItem = false;
+ itemBuffer.reset();
+ admFromLexerStream(token, itemType, itemBuffer.getDataOutput(), false);
+ unorderedListBuilder.addItem(itemBuffer);
+ }
+ first = false;
+ } while (inList);
+ unorderedListBuilder.write(out, true);
+ returnUnorderedListBuilder(unorderedListBuilder);
+ returnTempBuffer(itemBuffer);
+ }
+
+ private int nextToken() throws AsterixException {
+ try {
+ return admLexer.next();
+ } catch (AdmLexerException e) {
+ throw new AsterixException(e);
+ } catch (IOException e) {
+ throw new AsterixException(e);
+ }
+ }
+
+ private IARecordBuilder getRecordBuilder() {
+ RecordBuilder recBuilder = (RecordBuilder) recordBuilderPool.poll();
+ if (recBuilder != null)
+ return recBuilder;
+ else
+ return new RecordBuilder();
+ }
+
+ private void returnRecordBuilder(IARecordBuilder recBuilder) {
+ this.recordBuilderPool.add(recBuilder);
+ }
+
+ private IAsterixListBuilder getOrderedListBuilder() {
+ OrderedListBuilder orderedListBuilder = (OrderedListBuilder) orderedListBuilderPool.poll();
+ if (orderedListBuilder != null)
+ return orderedListBuilder;
+ else
+ return new OrderedListBuilder();
+ }
+
+ private void returnOrderedListBuilder(IAsterixListBuilder orderedListBuilder) {
+ this.orderedListBuilderPool.add(orderedListBuilder);
+ }
+
+ private IAsterixListBuilder getUnorderedListBuilder() {
+ UnorderedListBuilder unorderedListBuilder = (UnorderedListBuilder) unorderedListBuilderPool.poll();
+ if (unorderedListBuilder != null)
+ return unorderedListBuilder;
+ else
+ return new UnorderedListBuilder();
+ }
+
+ private void returnUnorderedListBuilder(IAsterixListBuilder unorderedListBuilder) {
+ this.unorderedListBuilderPool.add(unorderedListBuilder);
+ }
+
+ private ArrayBackedValueStorage getTempBuffer() {
+ ArrayBackedValueStorage tmpBaaos = baaosPool.poll();
+ if (tmpBaaos != null) {
+ return tmpBaaos;
+ } else {
+ return new ArrayBackedValueStorage();
+ }
+ }
+
+ private void returnTempBuffer(ArrayBackedValueStorage tempBaaos) {
+ baaosPool.add(tempBaaos);
+ }
+
+ private void parseConstructor(ATypeTag typeTag, IAType objectType, DataOutput out) throws AsterixException {
+ try {
+ int token = admLexer.next();
+ if (token == AdmLexer.TOKEN_CONSTRUCTOR_OPEN) {
+ if (checkType(typeTag, objectType, out)) {
+ token = admLexer.next();
+ if (token == AdmLexer.TOKEN_STRING_LITERAL) {
+ switch (typeTag) {
+ case BOOLEAN:
+ parseBoolean(
+ admLexer.getLastTokenImage().substring(1,
+ admLexer.getLastTokenImage().length() - 1), out);
+ break;
+ case INT8:
+ parseInt8(
+ admLexer.getLastTokenImage().substring(1,
+ admLexer.getLastTokenImage().length() - 1), out);
+ break;
+ case INT16:
+ parseInt16(
+ admLexer.getLastTokenImage().substring(1,
+ admLexer.getLastTokenImage().length() - 1), out);
+ break;
+ case INT32:
+ parseInt32(
+ admLexer.getLastTokenImage().substring(1,
+ admLexer.getLastTokenImage().length() - 1), out);
+ break;
+ case INT64:
+ parseInt64(
+ admLexer.getLastTokenImage().substring(1,
+ admLexer.getLastTokenImage().length() - 1), out);
+ break;
+ case FLOAT:
+ aFloat.setValue(Float.parseFloat(admLexer.getLastTokenImage().substring(1,
+ admLexer.getLastTokenImage().length() - 1)));
+ floatSerde.serialize(aFloat, out);
+ break;
+ case DOUBLE:
+ aDouble.setValue(Double.parseDouble(admLexer.getLastTokenImage().substring(1,
+ admLexer.getLastTokenImage().length() - 1)));
+ doubleSerde.serialize(aDouble, out);
+ break;
+ case STRING:
+ aString.setValue(admLexer.getLastTokenImage().substring(1,
+ admLexer.getLastTokenImage().length() - 1));
+ stringSerde.serialize(aString, out);
+ break;
+ case TIME:
+ parseTime(
+ admLexer.getLastTokenImage().substring(1,
+ admLexer.getLastTokenImage().length() - 1), out);
+ break;
+ case DATE:
+ parseDate(
+ admLexer.getLastTokenImage().substring(1,
+ admLexer.getLastTokenImage().length() - 1), out);
+ break;
+ case DATETIME:
+ parseDatetime(
+ admLexer.getLastTokenImage().substring(1,
+ admLexer.getLastTokenImage().length() - 1), out);
+ break;
+ case DURATION:
+ parseDuration(
+ admLexer.getLastTokenImage().substring(1,
+ admLexer.getLastTokenImage().length() - 1), out);
+ break;
+ case POINT:
+ parsePoint(
+ admLexer.getLastTokenImage().substring(1,
+ admLexer.getLastTokenImage().length() - 1), out);
+ break;
+ case POINT3D:
+ parsePoint3d(
+ admLexer.getLastTokenImage().substring(1,
+ admLexer.getLastTokenImage().length() - 1), out);
+ break;
+ case CIRCLE:
+ parseCircle(
+ admLexer.getLastTokenImage().substring(1,
+ admLexer.getLastTokenImage().length() - 1), out);
+ break;
+ case RECTANGLE:
+ parseRectangle(
+ admLexer.getLastTokenImage().substring(1,
+ admLexer.getLastTokenImage().length() - 1), out);
+ break;
+ case LINE:
+ parseLine(
+ admLexer.getLastTokenImage().substring(1,
+ admLexer.getLastTokenImage().length() - 1), out);
+ break;
+ case POLYGON:
+ parsePolygon(
+ admLexer.getLastTokenImage().substring(1,
+ admLexer.getLastTokenImage().length() - 1), out);
+ break;
+ default:
+ throw new AsterixException("Missing deserializer method for constructor: "
+ + AdmLexer.tokenKindToString(token) + ".");
+
+ }
+ token = admLexer.next();
+ if (token == AdmLexer.TOKEN_CONSTRUCTOR_CLOSE)
+ return;
+ }
+ }
+ }
+ } catch (Exception e) {
+ throw new AsterixException(e);
+ }
+ throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
+ }
+
+ private void parseBoolean(String bool, DataOutput out) throws AsterixException {
+ String errorMessage = "This can not be an instance of boolean";
+ try {
+ if (bool.equals("true"))
+ booleanSerde.serialize(ABoolean.TRUE, out);
+ else if (bool.equals("false"))
+ booleanSerde.serialize(ABoolean.FALSE, out);
+ else
+ throw new AsterixException(errorMessage);
+ } catch (HyracksDataException e) {
+ throw new AsterixException(errorMessage);
+ }
+ }
+
+ private void parseInt8(String int8, DataOutput out) throws AsterixException {
+ String errorMessage = "This can not be an instance of int8";
+ try {
+ boolean positive = true;
+ byte value = 0;
+ int offset = 0;
+
+ if (int8.charAt(offset) == '+')
+ offset++;
+ else if (int8.charAt(offset) == '-') {
+ offset++;
+ positive = false;
+ }
+ for (; offset < int8.length(); offset++) {
+ if (int8.charAt(offset) >= '0' && int8.charAt(offset) <= '9')
+ value = (byte) (value * 10 + int8.charAt(offset) - '0');
+ else if (int8.charAt(offset) == 'i' && int8.charAt(offset + 1) == '8' && offset + 2 == int8.length())
+ break;
+ else
+ throw new AsterixException(errorMessage);
+ }
+ if (value < 0)
+ throw new AsterixException(errorMessage);
+ if (value > 0 && !positive)
+ value *= -1;
+ aInt8.setValue(value);
+ int8Serde.serialize(aInt8, out);
+ } catch (HyracksDataException e) {
+ throw new AsterixException(errorMessage);
+ }
+ }
+
+ private void parseInt16(String int16, DataOutput out) throws AsterixException {
+ String errorMessage = "This can not be an instance of int16";
+ try {
+ boolean positive = true;
+ short value = 0;
+ int offset = 0;
+
+ if (int16.charAt(offset) == '+')
+ offset++;
+ else if (int16.charAt(offset) == '-') {
+ offset++;
+ positive = false;
+ }
+ for (; offset < int16.length(); offset++) {
+ if (int16.charAt(offset) >= '0' && int16.charAt(offset) <= '9')
+ value = (short) (value * 10 + int16.charAt(offset) - '0');
+ else if (int16.charAt(offset) == 'i' && int16.charAt(offset + 1) == '1'
+ && int16.charAt(offset + 2) == '6' && offset + 3 == int16.length())
+ break;
+ else
+ throw new AsterixException(errorMessage);
+ }
+ if (value < 0)
+ throw new AsterixException(errorMessage);
+ if (value > 0 && !positive)
+ value *= -1;
+ aInt16.setValue(value);
+ int16Serde.serialize(aInt16, out);
+ } catch (HyracksDataException e) {
+ throw new AsterixException(errorMessage);
+ }
+ }
+
+ private void parseInt32(String int32, DataOutput out) throws AsterixException {
+
+ String errorMessage = "This can not be an instance of int32";
+ try {
+ boolean positive = true;
+ int value = 0;
+ int offset = 0;
+
+ if (int32.charAt(offset) == '+')
+ offset++;
+ else if (int32.charAt(offset) == '-') {
+ offset++;
+ positive = false;
+ }
+ for (; offset < int32.length(); offset++) {
+ if (int32.charAt(offset) >= '0' && int32.charAt(offset) <= '9')
+ value = (value * 10 + int32.charAt(offset) - '0');
+ else if (int32.charAt(offset) == 'i' && int32.charAt(offset + 1) == '3'
+ && int32.charAt(offset + 2) == '2' && offset + 3 == int32.length())
+ break;
+ else
+ throw new AsterixException(errorMessage);
+ }
+ if (value < 0)
+ throw new AsterixException(errorMessage);
+ if (value > 0 && !positive)
+ value *= -1;
+
+ aInt32.setValue(value);
+ int32Serde.serialize(aInt32, out);
+ } catch (HyracksDataException e) {
+ throw new AsterixException(errorMessage);
+ }
+ }
+
+ private void parseInt64(String int64, DataOutput out) throws AsterixException {
+ String errorMessage = "This can not be an instance of int64";
+ try {
+ boolean positive = true;
+ long value = 0;
+ int offset = 0;
+
+ if (int64.charAt(offset) == '+')
+ offset++;
+ else if (int64.charAt(offset) == '-') {
+ offset++;
+ positive = false;
+ }
+ for (; offset < int64.length(); offset++) {
+ if (int64.charAt(offset) >= '0' && int64.charAt(offset) <= '9')
+ value = (value * 10 + int64.charAt(offset) - '0');
+ else if (int64.charAt(offset) == 'i' && int64.charAt(offset + 1) == '6'
+ && int64.charAt(offset + 2) == '4' && offset + 3 == int64.length())
+ break;
+ else
+ throw new AsterixException(errorMessage);
+ }
+ if (value < 0)
+ throw new AsterixException(errorMessage);
+ if (value > 0 && !positive)
+ value *= -1;
+
+ aInt64.setValue(value);
+ int64Serde.serialize(aInt64, out);
+ } catch (HyracksDataException e) {
+ throw new AsterixException(errorMessage);
+ }
+ }
+
+ private void parsePoint(String point, DataOutput out) throws AsterixException {
+ try {
+ APointSerializerDeserializer.parse(point, out);
+ } catch (HyracksDataException e) {
+ throw new AsterixException(e);
+ }
+ }
+
+ private void parsePoint3d(String point3d, DataOutput out) throws AsterixException {
+ try {
+ APoint3DSerializerDeserializer.parse(point3d, out);
+ } catch (HyracksDataException e) {
+ throw new AsterixException(e);
+ }
+ }
+
+ private void parseCircle(String circle, DataOutput out) throws AsterixException {
+ try {
+ ACircleSerializerDeserializer.parse(circle, out);
+ } catch (HyracksDataException e) {
+ throw new AsterixException(e);
+ }
+ }
+
+ private void parseRectangle(String rectangle, DataOutput out) throws AsterixException {
+ try {
+ ARectangleSerializerDeserializer.parse(rectangle, out);
+ } catch (HyracksDataException e) {
+ throw new AsterixException(e);
+ }
+ }
+
+ private void parseLine(String line, DataOutput out) throws AsterixException {
+ try {
+ ALineSerializerDeserializer.parse(line, out);
+ } catch (HyracksDataException e) {
+ throw new AsterixException(e);
+ }
+ }
+
+ private void parsePolygon(String polygon, DataOutput out) throws AsterixException, IOException {
+ try {
+ APolygonSerializerDeserializer.parse(polygon, out);
+ } catch (HyracksDataException e) {
+ throw new AsterixException(e);
+ }
+ }
+
+ private void parseTime(String time, DataOutput out) throws AsterixException {
+ try {
+ ATimeSerializerDeserializer.parse(time, out);
+ } catch (HyracksDataException e) {
+ throw new AsterixException(e);
+ }
+ }
+
+ private void parseDate(String date, DataOutput out) throws AsterixException, IOException {
+ try {
+ ADateSerializerDeserializer.parse(date, out);
+ } catch (HyracksDataException e) {
+ throw new AsterixException(e);
+ }
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AbstractDataParser.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AbstractDataParser.java
new file mode 100644
index 0000000..fc2d7ca
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AbstractDataParser.java
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.operators.file;
+
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ABoolean;
+import edu.uci.ics.asterix.om.base.ADouble;
+import edu.uci.ics.asterix.om.base.AFloat;
+import edu.uci.ics.asterix.om.base.AInt16;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.AInt64;
+import edu.uci.ics.asterix.om.base.AInt8;
+import edu.uci.ics.asterix.om.base.AMutableDouble;
+import edu.uci.ics.asterix.om.base.AMutableFloat;
+import edu.uci.ics.asterix.om.base.AMutableInt16;
+import edu.uci.ics.asterix.om.base.AMutableInt32;
+import edu.uci.ics.asterix.om.base.AMutableInt64;
+import edu.uci.ics.asterix.om.base.AMutableInt8;
+import edu.uci.ics.asterix.om.base.AMutableString;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.AString;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+
+/**
+ * Base class for data parsers. Includes the common set of definitions for
+ * serializers/deserializers for built-in ADM types.
+ */
+public abstract class AbstractDataParser implements IDataParser {
+
+ protected AMutableInt8 aInt8 = new AMutableInt8((byte) 0);
+ protected AMutableInt16 aInt16 = new AMutableInt16((short) 0);
+ protected AMutableInt32 aInt32 = new AMutableInt32(0);
+ protected AMutableInt64 aInt64 = new AMutableInt64(0);
+ protected AMutableDouble aDouble = new AMutableDouble(0);
+ protected AMutableFloat aFloat = new AMutableFloat(0);
+ protected AMutableString aString = new AMutableString("");
+ protected AMutableString aStringFieldName = new AMutableString("");
+
+ // Serializers
+ @SuppressWarnings("unchecked")
+ protected ISerializerDeserializer<ADouble> doubleSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ADOUBLE);
+ @SuppressWarnings("unchecked")
+ protected ISerializerDeserializer<AString> stringSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ASTRING);
+ @SuppressWarnings("unchecked")
+ protected ISerializerDeserializer<AFloat> floatSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AFLOAT);
+ @SuppressWarnings("unchecked")
+ protected ISerializerDeserializer<AInt8> int8Serde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AINT8);
+ @SuppressWarnings("unchecked")
+ protected ISerializerDeserializer<AInt16> int16Serde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AINT16);
+ @SuppressWarnings("unchecked")
+ protected ISerializerDeserializer<AInt32> int32Serde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AINT32);
+ @SuppressWarnings("unchecked")
+ protected ISerializerDeserializer<AInt64> int64Serde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AINT64);
+ @SuppressWarnings("unchecked")
+ protected ISerializerDeserializer<ABoolean> booleanSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ABOOLEAN);
+ @SuppressWarnings("unchecked")
+ protected ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AbstractTupleParser.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AbstractTupleParser.java
index 6e83689..cb05529 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AbstractTupleParser.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AbstractTupleParser.java
@@ -1,72 +1,91 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.runtime.operators.file;
+import java.io.DataOutput;
+import java.io.IOException;
import java.io.InputStream;
+import java.nio.ByteBuffer;
-import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
-import edu.uci.ics.asterix.om.base.ABoolean;
-import edu.uci.ics.asterix.om.base.ADouble;
-import edu.uci.ics.asterix.om.base.AFloat;
-import edu.uci.ics.asterix.om.base.AInt16;
-import edu.uci.ics.asterix.om.base.AInt32;
-import edu.uci.ics.asterix.om.base.AInt64;
-import edu.uci.ics.asterix.om.base.AInt8;
-import edu.uci.ics.asterix.om.base.AMutableDouble;
-import edu.uci.ics.asterix.om.base.AMutableFloat;
-import edu.uci.ics.asterix.om.base.AMutableInt16;
-import edu.uci.ics.asterix.om.base.AMutableInt32;
-import edu.uci.ics.asterix.om.base.AMutableInt64;
-import edu.uci.ics.asterix.om.base.AMutableInt8;
-import edu.uci.ics.asterix.om.base.AMutableString;
-import edu.uci.ics.asterix.om.base.ANull;
-import edu.uci.ics.asterix.om.base.AString;
-import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.hyracks.api.comm.IFrameWriter;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
import edu.uci.ics.hyracks.dataflow.std.file.ITupleParser;
+/**
+ * An Abstract class implementation for ITupleParser. It provides common
+ * functionality involved in parsing data in an external format and packing
+ * frames with formed tuples.
+ */
public abstract class AbstractTupleParser implements ITupleParser {
- // Mutable Types..
- protected AMutableInt8 aInt8 = new AMutableInt8((byte) 0);
- protected AMutableInt16 aInt16 = new AMutableInt16((short) 0);
- protected AMutableInt32 aInt32 = new AMutableInt32(0);
- protected AMutableInt64 aInt64 = new AMutableInt64(0);
- protected AMutableDouble aDouble = new AMutableDouble(0);
- protected AMutableFloat aFloat = new AMutableFloat(0);
- protected AMutableString aString = new AMutableString("");
- protected AMutableString aStringFieldName = new AMutableString("");
+ protected ArrayTupleBuilder tb = new ArrayTupleBuilder(1);
+ protected DataOutput dos = tb.getDataOutput();
+ protected final FrameTupleAppender appender;
+ protected final ByteBuffer frame;
+ protected final ARecordType recType;
+ protected final IHyracksTaskContext ctx;
- // Serializers
- @SuppressWarnings("unchecked")
- protected ISerializerDeserializer<ADouble> doubleSerde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADOUBLE);
- @SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AString> stringSerde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ASTRING);
- @SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AFloat> floatSerde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AFLOAT);
- @SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AInt8> int8Serde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT8);
- @SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AInt16> int16Serde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT16);
- @SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AInt32> int32Serde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT32);
- @SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AInt64> int64Serde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
- @SuppressWarnings("unchecked")
- protected ISerializerDeserializer<ABoolean> booleanSerde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ABOOLEAN);
- @SuppressWarnings("unchecked")
- protected ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ANULL);
+ public AbstractTupleParser(IHyracksTaskContext ctx, ARecordType recType) {
+ appender = new FrameTupleAppender(ctx.getFrameSize());
+ frame = ctx.allocateFrame();
+ this.recType = recType;
+ this.ctx = ctx;
+ }
-
- @Override
- public abstract void parse(InputStream in, IFrameWriter writer) throws HyracksDataException;
+ public abstract IDataParser getDataParser();
+
+ @Override
+ public void parse(InputStream in, IFrameWriter writer) throws HyracksDataException {
+
+ appender.reset(frame, true);
+ IDataParser parser = getDataParser();
+ try {
+ parser.initialize(in, recType, true);
+ while (true) {
+ tb.reset();
+ if (!parser.parse(tb.getDataOutput())) {
+ break;
+ }
+ tb.addFieldEndOffset();
+ addTupleToFrame(writer);
+ }
+ if (appender.getTupleCount() > 0) {
+ FrameUtils.flushFrame(frame, writer);
+ }
+ } catch (AsterixException ae) {
+ throw new HyracksDataException(ae);
+ } catch (IOException ioe) {
+ throw new HyracksDataException(ioe);
+ }
+ }
+
+ protected void addTupleToFrame(IFrameWriter writer) throws HyracksDataException {
+ if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+ FrameUtils.flushFrame(frame, writer);
+ appender.reset(frame, true);
+ if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+ throw new IllegalStateException();
+ }
+ }
+
+ }
+
}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AdmSchemafullRecordParserFactory.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AdmSchemafullRecordParserFactory.java
index 3b0d1ab..a9287c8 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AdmSchemafullRecordParserFactory.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AdmSchemafullRecordParserFactory.java
@@ -1,70 +1,28 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.runtime.operators.file;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.ByteBuffer;
-import java.util.ArrayDeque;
-import java.util.BitSet;
-import java.util.List;
-import java.util.Queue;
-
-import edu.uci.ics.asterix.adm.parser.nontagged.AdmLexer;
-import edu.uci.ics.asterix.adm.parser.nontagged.AdmLexerConstants;
-import edu.uci.ics.asterix.adm.parser.nontagged.ParseException;
-import edu.uci.ics.asterix.adm.parser.nontagged.Token;
-import edu.uci.ics.asterix.builders.IARecordBuilder;
-import edu.uci.ics.asterix.builders.IAsterixListBuilder;
-import edu.uci.ics.asterix.builders.OrderedListBuilder;
-import edu.uci.ics.asterix.builders.RecordBuilder;
-import edu.uci.ics.asterix.builders.UnorderedListBuilder;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ACircleSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateTimeSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ALineSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.APoint3DSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.APointSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.APolygonSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ARectangleSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ATimeSerializerDeserializer;
-import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
-import edu.uci.ics.asterix.om.base.ABoolean;
-import edu.uci.ics.asterix.om.base.ADouble;
-import edu.uci.ics.asterix.om.base.AFloat;
-import edu.uci.ics.asterix.om.base.AInt16;
-import edu.uci.ics.asterix.om.base.AInt32;
-import edu.uci.ics.asterix.om.base.AInt64;
-import edu.uci.ics.asterix.om.base.AInt8;
-import edu.uci.ics.asterix.om.base.AMutableDouble;
-import edu.uci.ics.asterix.om.base.AMutableFloat;
-import edu.uci.ics.asterix.om.base.AMutableInt16;
-import edu.uci.ics.asterix.om.base.AMutableInt32;
-import edu.uci.ics.asterix.om.base.AMutableInt64;
-import edu.uci.ics.asterix.om.base.AMutableInt8;
-import edu.uci.ics.asterix.om.base.AMutableString;
-import edu.uci.ics.asterix.om.base.ANull;
-import edu.uci.ics.asterix.om.base.AString;
-import edu.uci.ics.asterix.om.types.AOrderedListType;
import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.AUnionType;
-import edu.uci.ics.asterix.om.types.AUnorderedListType;
-import edu.uci.ics.asterix.om.types.BuiltinType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
-import edu.uci.ics.hyracks.api.comm.IFrameWriter;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
import edu.uci.ics.hyracks.dataflow.std.file.ITupleParser;
import edu.uci.ics.hyracks.dataflow.std.file.ITupleParserFactory;
+/**
+ * A Tuple parser factory for creating a tuple parser capable of parsing
+ * ADM data.
+ */
public class AdmSchemafullRecordParserFactory implements ITupleParserFactory {
private static final long serialVersionUID = 1L;
@@ -77,930 +35,7 @@
@Override
public ITupleParser createTupleParser(final IHyracksTaskContext ctx) {
- return new ITupleParser() {
- private AdmLexer admLexer;
- private ArrayTupleBuilder tb = new ArrayTupleBuilder(1);
- private DataOutput dos = tb.getDataOutput();
- private FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
- private ByteBuffer frame = ctx.allocateFrame();
-
- private int nullableFieldId = 0;
-
- private Queue<ArrayBackedValueStorage> baaosPool = new ArrayDeque<ArrayBackedValueStorage>();
- private Queue<IARecordBuilder> recordBuilderPool = new ArrayDeque<IARecordBuilder>();
- private Queue<IAsterixListBuilder> orderedListBuilderPool = new ArrayDeque<IAsterixListBuilder>();
- private Queue<IAsterixListBuilder> unorderedListBuilderPool = new ArrayDeque<IAsterixListBuilder>();
-
- private String mismatchErrorMessage = "Mismatch Type, expecting a value of type ";
-
- // Mutable Types..
- private AMutableInt8 aInt8 = new AMutableInt8((byte) 0);
- private AMutableInt16 aInt16 = new AMutableInt16((short) 0);
- private AMutableInt32 aInt32 = new AMutableInt32(0);
- private AMutableInt64 aInt64 = new AMutableInt64(0);
- private AMutableDouble aDouble = new AMutableDouble(0);
- private AMutableFloat aFloat = new AMutableFloat(0);
- private AMutableString aString = new AMutableString("");
- private AMutableString aStringFieldName = new AMutableString("");
-
- // Serializers
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<ADouble> doubleSerde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADOUBLE);
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<AString> stringSerde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ASTRING);
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<AFloat> floatSerde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AFLOAT);
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<AInt8> int8Serde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT8);
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<AInt16> int16Serde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT16);
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<AInt32> int32Serde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT32);
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<AInt64> int64Serde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<ABoolean> booleanSerde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ABOOLEAN);
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ANULL);
-
- @Override
- public void parse(InputStream in, IFrameWriter writer) throws HyracksDataException {
- admLexer = new AdmLexer(in);
- appender.reset(frame, true);
- int tupleNum = 0;
- try {
- while (true) {
- tb.reset();
- if (!parseAdmInstance(recType, true, dos)) {
- break;
- }
- tb.addFieldEndOffset();
- if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
- FrameUtils.flushFrame(frame, writer);
- appender.reset(frame, true);
- if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
- throw new IllegalStateException();
- }
- }
- tupleNum++;
- }
- if (appender.getTupleCount() > 0) {
- FrameUtils.flushFrame(frame, writer);
- }
- } catch (AsterixException ae) {
- throw new HyracksDataException(ae);
- } catch (IOException ioe) {
- throw new HyracksDataException(ioe);
- }
- }
-
- private boolean parseAdmInstance(IAType objectType, Boolean datasetRec, DataOutput out)
- throws AsterixException, IOException {
- Token token;
- try {
- token = admLexer.next();
- } catch (ParseException pe) {
- throw new AsterixException(pe);
- }
- if (token.kind == AdmLexerConstants.EOF) {
- return false;
- } else {
- admFromLexerStream(token, objectType, out, datasetRec);
- return true;
- }
- }
-
- private void admFromLexerStream(Token token, IAType objectType, DataOutput out, Boolean datasetRec)
- throws AsterixException, IOException {
-
- switch (token.kind) {
- case AdmLexerConstants.NULL_LITERAL: {
- if (checkType(ATypeTag.NULL, objectType, out)) {
- nullSerde.serialize(ANull.NULL, out);
- } else
- throw new AsterixException(" This field can not be null ");
- break;
- }
- case AdmLexerConstants.TRUE_LITERAL: {
- if (checkType(ATypeTag.BOOLEAN, objectType, out)) {
- booleanSerde.serialize(ABoolean.TRUE, out);
- } else
- throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.BOOLEAN_CONS: {
- parseConstructor(ATypeTag.BOOLEAN, objectType, out);
- break;
- }
- case AdmLexerConstants.FALSE_LITERAL: {
- if (checkType(ATypeTag.BOOLEAN, objectType, out)) {
- booleanSerde.serialize(ABoolean.FALSE, out);
- } else
- throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.DOUBLE_LITERAL: {
- if (checkType(ATypeTag.DOUBLE, objectType, out)) {
- aDouble.setValue(Double.parseDouble(token.image));
- doubleSerde.serialize(aDouble, out);
- } else
- throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.DOUBLE_CONS: {
- parseConstructor(ATypeTag.DOUBLE, objectType, out);
- break;
- }
- case AdmLexerConstants.FLOAT_LITERAL: {
- if (checkType(ATypeTag.FLOAT, objectType, out)) {
- aFloat.setValue(Float.parseFloat(token.image));
- floatSerde.serialize(aFloat, out);
- } else
- throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.FLOAT_CONS: {
- parseConstructor(ATypeTag.FLOAT, objectType, out);
- break;
- }
- case AdmLexerConstants.INT8_LITERAL: {
- if (checkType(ATypeTag.INT8, objectType, out)) {
- parseInt8(token.image, out);
- } else
- throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.INT8_CONS: {
- parseConstructor(ATypeTag.INT8, objectType, out);
- break;
- }
- case AdmLexerConstants.INT16_LITERAL: {
- if (checkType(ATypeTag.INT16, objectType, out)) {
- parseInt16(token.image, out);
- } else
- throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.INT16_CONS: {
- parseConstructor(ATypeTag.INT16, objectType, out);
- break;
- }
- case AdmLexerConstants.INT_LITERAL:
- case AdmLexerConstants.INT32_LITERAL: {
- if (checkType(ATypeTag.INT32, objectType, out)) {
- parseInt32(token.image, out);
- } else
- throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.INT32_CONS: {
- parseConstructor(ATypeTag.INT32, objectType, out);
- break;
- }
- case AdmLexerConstants.INT64_LITERAL: {
- if (checkType(ATypeTag.INT64, objectType, out)) {
- parseInt64(token.image, out);
- } else
- throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.INT64_CONS: {
- parseConstructor(ATypeTag.INT64, objectType, out);
- break;
- }
- case AdmLexerConstants.STRING_LITERAL: {
- if (checkType(ATypeTag.STRING, objectType, out)) {
- aString.setValue(token.image.substring(1, token.image.length() - 1));
- stringSerde.serialize(aString, out);
- } else
- throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.STRING_CONS: {
- parseConstructor(ATypeTag.STRING, objectType, out);
- break;
- }
- case AdmLexerConstants.DATE_CONS: {
- parseConstructor(ATypeTag.DATE, objectType, out);
- break;
- }
- case AdmLexerConstants.TIME_CONS: {
- parseConstructor(ATypeTag.TIME, objectType, out);
- break;
- }
- case AdmLexerConstants.DATETIME_CONS: {
- parseConstructor(ATypeTag.DATETIME, objectType, out);
- break;
- }
- case AdmLexerConstants.DURATION_CONS: {
- parseConstructor(ATypeTag.DURATION, objectType, out);
- break;
- }
- case AdmLexerConstants.POINT_CONS: {
- parseConstructor(ATypeTag.POINT, objectType, out);
- break;
- }
- case AdmLexerConstants.POINT3D_CONS: {
- parseConstructor(ATypeTag.POINT3D, objectType, out);
- break;
- }
- case AdmLexerConstants.CIRCLE_CONS: {
- parseConstructor(ATypeTag.CIRCLE, objectType, out);
- break;
- }
- case AdmLexerConstants.RECTANGLE_CONS: {
- parseConstructor(ATypeTag.RECTANGLE, objectType, out);
- break;
- }
- case AdmLexerConstants.LINE_CONS: {
- parseConstructor(ATypeTag.LINE, objectType, out);
- break;
- }
- case AdmLexerConstants.POLYGON_CONS: {
- parseConstructor(ATypeTag.POLYGON, objectType, out);
- break;
- }
- case AdmLexerConstants.START_UNORDERED_LIST: {
- if (checkType(ATypeTag.UNORDEREDLIST, objectType, out)) {
- objectType = getComplexType(objectType, ATypeTag.UNORDEREDLIST);
- parseUnorderedList((AUnorderedListType) objectType, out);
- } else
- throw new AsterixException(mismatchErrorMessage + objectType.getTypeTag());
- break;
- }
-
- case AdmLexerConstants.START_ORDERED_LIST: {
- if (checkType(ATypeTag.ORDEREDLIST, objectType, out)) {
- objectType = getComplexType(objectType, ATypeTag.ORDEREDLIST);
- parseOrderedList((AOrderedListType) objectType, out);
- } else
- throw new AsterixException(mismatchErrorMessage + objectType.getTypeTag());
- break;
- }
- case AdmLexerConstants.START_RECORD: {
- if (checkType(ATypeTag.RECORD, objectType, out)) {
- objectType = getComplexType(objectType, ATypeTag.RECORD);
- parseRecord((ARecordType) objectType, out, datasetRec);
- } else
- throw new AsterixException(mismatchErrorMessage + objectType.getTypeTag());
- break;
- }
- case AdmLexerConstants.EOF: {
- break;
- }
- default: {
- throw new AsterixException("Unexpected ADM token kind: "
- + admLexer.tokenKindToString(token.kind) + ".");
- }
- }
- }
-
- private void parseConstructor(ATypeTag typeTag, IAType objectType, DataOutput out) throws AsterixException {
- try {
- Token token = admLexer.next();
- if (token.kind == AdmLexerConstants.CONSTRUCTOR_OPEN) {
- if (checkType(typeTag, objectType, out)) {
- token = admLexer.next();
- if (token.kind == AdmLexerConstants.STRING_LITERAL) {
- switch (typeTag) {
- case BOOLEAN:
- parseBoolean(token.image.substring(1, token.image.length() - 1), out);
- break;
- case INT8:
- parseInt8(token.image.substring(1, token.image.length() - 1), out);
- break;
- case INT16:
- parseInt16(token.image.substring(1, token.image.length() - 1), out);
- break;
- case INT32:
- parseInt32(token.image.substring(1, token.image.length() - 1), out);
- break;
- case INT64:
- parseInt64(token.image.substring(1, token.image.length() - 1), out);
- break;
- case FLOAT:
- aFloat.setValue(Float.parseFloat(token.image.substring(1,
- token.image.length() - 1)));
- floatSerde.serialize(aFloat, out);
- break;
- case DOUBLE:
- aDouble.setValue(Double.parseDouble(token.image.substring(1,
- token.image.length() - 1)));
- doubleSerde.serialize(aDouble, out);
- break;
- case STRING:
- aString.setValue(token.image.substring(1, token.image.length() - 1));
- stringSerde.serialize(aString, out);
- break;
- case TIME:
- parseTime(token.image.substring(1, token.image.length() - 1), out);
- break;
- case DATE:
- parseDate(token.image.substring(1, token.image.length() - 1), out);
- break;
- case DATETIME:
- parseDatetime(token.image.substring(1, token.image.length() - 1), out);
- break;
- case DURATION:
- parseDuration(token.image.substring(1, token.image.length() - 1), out);
- break;
- case POINT:
- parsePoint(token.image.substring(1, token.image.length() - 1), out);
- break;
- case POINT3D:
- parsePoint3d(token.image.substring(1, token.image.length() - 1), out);
- break;
- case CIRCLE:
- parseCircle(token.image.substring(1, token.image.length() - 1), out);
- break;
- case RECTANGLE:
- parseRectangle(token.image.substring(1, token.image.length() - 1), out);
- break;
- case LINE:
- parseLine(token.image.substring(1, token.image.length() - 1), out);
- break;
- case POLYGON:
- parsePolygon(token.image.substring(1, token.image.length() - 1), out);
- break;
-
- }
- token = admLexer.next();
- if (token.kind == AdmLexerConstants.CONSTRUCTOR_CLOSE)
- return;
- }
- }
- }
- } catch (Exception e) {
- throw new AsterixException(e);
- }
- throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
- }
-
- private void parseBoolean(String bool, DataOutput out) throws AsterixException {
- String errorMessage = "This can not be an instance of boolean";
- try {
- if (bool.equals("true"))
- booleanSerde.serialize(ABoolean.TRUE, out);
- else if (bool.equals("false"))
- booleanSerde.serialize(ABoolean.FALSE, out);
- else
- throw new AsterixException(errorMessage);
- } catch (HyracksDataException e) {
- throw new AsterixException(errorMessage);
- }
- }
-
- private void parseInt8(String int8, DataOutput out) throws AsterixException {
- String errorMessage = "This can not be an instance of int8";
- try {
- boolean positive = true;
- byte value = 0;
- int offset = 0;
-
- if (int8.charAt(offset) == '+')
- offset++;
- else if (int8.charAt(offset) == '-') {
- offset++;
- positive = false;
- }
- for (; offset < int8.length(); offset++) {
- if (int8.charAt(offset) >= '0' && int8.charAt(offset) <= '9')
- value = (byte) (value * 10 + int8.charAt(offset) - '0');
- else if (int8.charAt(offset) == 'i' && int8.charAt(offset + 1) == '8'
- && offset + 2 == int8.length())
- break;
- else
- throw new AsterixException(errorMessage);
- }
- if (value < 0)
- throw new AsterixException(errorMessage);
- if (value > 0 && !positive)
- value *= -1;
- aInt8.setValue(value);
- int8Serde.serialize(aInt8, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(errorMessage);
- }
- }
-
- private void parseInt16(String int16, DataOutput out) throws AsterixException {
- String errorMessage = "This can not be an instance of int16";
- try {
- boolean positive = true;
- short value = 0;
- int offset = 0;
-
- if (int16.charAt(offset) == '+')
- offset++;
- else if (int16.charAt(offset) == '-') {
- offset++;
- positive = false;
- }
- for (; offset < int16.length(); offset++) {
- if (int16.charAt(offset) >= '0' && int16.charAt(offset) <= '9')
- value = (short) (value * 10 + int16.charAt(offset) - '0');
- else if (int16.charAt(offset) == 'i' && int16.charAt(offset + 1) == '1'
- && int16.charAt(offset + 2) == '6' && offset + 3 == int16.length())
- break;
- else
- throw new AsterixException(errorMessage);
- }
- if (value < 0)
- throw new AsterixException(errorMessage);
- if (value > 0 && !positive)
- value *= -1;
- aInt16.setValue(value);
- int16Serde.serialize(aInt16, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(errorMessage);
- }
- }
-
- private void parseInt32(String int32, DataOutput out) throws AsterixException {
-
- String errorMessage = "This can not be an instance of int32";
- try {
- boolean positive = true;
- int value = 0;
- int offset = 0;
-
- if (int32.charAt(offset) == '+')
- offset++;
- else if (int32.charAt(offset) == '-') {
- offset++;
- positive = false;
- }
- for (; offset < int32.length(); offset++) {
- if (int32.charAt(offset) >= '0' && int32.charAt(offset) <= '9')
- value = (value * 10 + int32.charAt(offset) - '0');
- else if (int32.charAt(offset) == 'i' && int32.charAt(offset + 1) == '3'
- && int32.charAt(offset + 2) == '2' && offset + 3 == int32.length())
- break;
- else
- throw new AsterixException(errorMessage);
- }
- if (value < 0)
- throw new AsterixException(errorMessage);
- if (value > 0 && !positive)
- value *= -1;
-
- aInt32.setValue(value);
- int32Serde.serialize(aInt32, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(errorMessage);
- }
- }
-
- private void parseInt64(String int64, DataOutput out) throws AsterixException {
- String errorMessage = "This can not be an instance of int64";
- try {
- boolean positive = true;
- long value = 0;
- int offset = 0;
-
- if (int64.charAt(offset) == '+')
- offset++;
- else if (int64.charAt(offset) == '-') {
- offset++;
- positive = false;
- }
- for (; offset < int64.length(); offset++) {
- if (int64.charAt(offset) >= '0' && int64.charAt(offset) <= '9')
- value = (value * 10 + int64.charAt(offset) - '0');
- else if (int64.charAt(offset) == 'i' && int64.charAt(offset + 1) == '6'
- && int64.charAt(offset + 2) == '4' && offset + 3 == int64.length())
- break;
- else
- throw new AsterixException(errorMessage);
- }
- if (value < 0)
- throw new AsterixException(errorMessage);
- if (value > 0 && !positive)
- value *= -1;
-
- aInt64.setValue(value);
- int64Serde.serialize(aInt64, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(errorMessage);
- }
- }
-
- private void parsePoint(String point, DataOutput out) throws AsterixException {
- try {
- APointSerializerDeserializer.parse(point, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parsePoint3d(String point3d, DataOutput out) throws AsterixException {
- try {
- APoint3DSerializerDeserializer.parse(point3d, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parseCircle(String circle, DataOutput out) throws AsterixException {
- try {
- ACircleSerializerDeserializer.parse(circle, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parseRectangle(String rectangle, DataOutput out) throws AsterixException {
- try {
- ARectangleSerializerDeserializer.parse(rectangle, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parseLine(String line, DataOutput out) throws AsterixException {
- try {
- ALineSerializerDeserializer.parse(line, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parsePolygon(String polygon, DataOutput out) throws AsterixException, IOException {
- try {
- APolygonSerializerDeserializer.parse(polygon, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parseTime(String time, DataOutput out) throws AsterixException {
- try {
- ATimeSerializerDeserializer.parse(time, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parseDate(String date, DataOutput out) throws AsterixException, IOException {
- try {
- ADateSerializerDeserializer.parse(date, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parseDatetime(String datetime, DataOutput out) throws AsterixException, IOException {
- try {
- ADateTimeSerializerDeserializer.parse(datetime, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parseDuration(String duration, DataOutput out) throws AsterixException {
- try {
- ADurationSerializerDeserializer.parse(duration, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
-
- }
-
- private IAType getComplexType(IAType aObjectType, ATypeTag tag) {
-
- if (aObjectType == null) {
- return null;
- }
-
- if (aObjectType.getTypeTag() == tag)
- return aObjectType;
-
- if (aObjectType.getTypeTag() == ATypeTag.UNION) {
- unionList = ((AUnionType) aObjectType).getUnionList();
- for (int i = 0; i < unionList.size(); i++)
- if (unionList.get(i).getTypeTag() == tag) {
- return unionList.get(i);
- }
- }
- return null; // wont get here
- }
-
- List<IAType> unionList;
-
- private boolean checkType(ATypeTag expectedTypeTag, IAType aObjectType, DataOutput out) throws IOException {
-
- if (aObjectType == null)
- return true;
-
- if (aObjectType.getTypeTag() != ATypeTag.UNION) {
- if (expectedTypeTag == aObjectType.getTypeTag())
- return true;
- } else { // union
- unionList = ((AUnionType) aObjectType).getUnionList();
- for (int i = 0; i < unionList.size(); i++)
- if (unionList.get(i).getTypeTag() == expectedTypeTag)
- return true;
- }
- return false;
- }
-
- private void parseRecord(ARecordType recType, DataOutput out, Boolean datasetRec) throws IOException,
- AsterixException {
-
- ArrayBackedValueStorage fieldValueBuffer = getTempBuffer();
- ArrayBackedValueStorage fieldNameBuffer = getTempBuffer();
- IARecordBuilder recBuilder = getRecordBuilder();
-
- // Boolean[] nulls = null;
- BitSet nulls = null;
- if (datasetRec) {
- if (recType != null) {
- nulls = new BitSet(recType.getFieldNames().length);
- recBuilder.reset(recType);
- } else
- recBuilder.reset(null);
- } else if (recType != null) {
- nulls = new BitSet(recType.getFieldNames().length);
- recBuilder.reset(recType);
- } else
- recBuilder.reset(null);
-
- recBuilder.init();
- Token token = null;
- boolean inRecord = true;
- boolean expectingRecordField = false;
- boolean first = true;
-
- Boolean openRecordField = false;
- int fieldId = 0;
- IAType fieldType = null;
- do {
- token = nextToken();
- switch (token.kind) {
- case AdmLexerConstants.END_RECORD: {
- if (expectingRecordField) {
- throw new AsterixException("Found END_RECORD while expecting a record field.");
- }
- inRecord = false;
- break;
- }
- case AdmLexerConstants.STRING_LITERAL: {
- // we've read the name of the field
- // now read the content
- fieldNameBuffer.reset();
- fieldValueBuffer.reset();
- expectingRecordField = false;
-
- if (recType != null) {
- String fldName = token.image.substring(1, token.image.length() - 1);
- fieldId = recBuilder.getFieldId(fldName);
- if (fieldId < 0 && !recType.isOpen()) {
- throw new AsterixException("This record is closed, you can not add extra fields !!");
- } else if (fieldId < 0 && recType.isOpen()) {
- aStringFieldName.setValue(token.image.substring(1, token.image.length() - 1));
- stringSerde.serialize(aStringFieldName, fieldNameBuffer.getDataOutput());
- openRecordField = true;
- fieldType = null;
- } else {
- // a closed field
- nulls.set(fieldId);
- fieldType = recType.getFieldTypes()[fieldId];
- openRecordField = false;
- }
- } else {
- aStringFieldName.setValue(token.image.substring(1, token.image.length() - 1));
- stringSerde.serialize(aStringFieldName, fieldNameBuffer.getDataOutput());
- openRecordField = true;
- fieldType = null;
- }
-
- token = nextToken();
- if (token.kind != AdmLexerConstants.COLON) {
- throw new AsterixException("Unexpected ADM token kind: "
- + admLexer.tokenKindToString(token.kind) + " while expecting \":\".");
- }
-
- token = nextToken();
- this.admFromLexerStream(token, fieldType, fieldValueBuffer.getDataOutput(), false);
- if (openRecordField) {
- if (fieldValueBuffer.getByteArray()[0] != ATypeTag.NULL.serialize())
- recBuilder.addField(fieldNameBuffer, fieldValueBuffer);
- } else if (recType.getFieldTypes()[fieldId].getTypeTag() == ATypeTag.UNION) {
- if (NonTaggedFormatUtil.isOptionalField((AUnionType) recType.getFieldTypes()[fieldId])) {
- if (fieldValueBuffer.getByteArray()[0] != ATypeTag.NULL.serialize()) {
- recBuilder.addField(fieldId, fieldValueBuffer);
- }
- }
- } else {
- recBuilder.addField(fieldId, fieldValueBuffer);
- }
-
- break;
- }
- case AdmLexerConstants.COMMA: {
- if (first) {
- throw new AsterixException("Found COMMA before any record field.");
- }
- if (expectingRecordField) {
- throw new AsterixException("Found COMMA while expecting a record field.");
- }
- expectingRecordField = true;
- break;
- }
- default: {
- throw new AsterixException("Unexpected ADM token kind: "
- + admLexer.tokenKindToString(token.kind) + " while parsing record fields.");
- }
- }
- first = false;
- } while (inRecord);
-
- if (recType != null) {
- nullableFieldId = checkNullConstraints(recType, nulls);
- if (nullableFieldId != -1)
- throw new AsterixException("Field " + nullableFieldId + " can not be null");
- }
- recBuilder.write(out, true);
- returnRecordBuilder(recBuilder);
- returnTempBuffer(fieldNameBuffer);
- returnTempBuffer(fieldValueBuffer);
- }
-
- private int checkNullConstraints(ARecordType recType, BitSet nulls) {
-
- boolean isNull = false;
- for (int i = 0; i < recType.getFieldTypes().length; i++)
- if (nulls.get(i) == false) {
- IAType type = recType.getFieldTypes()[i];
- if (type.getTypeTag() != ATypeTag.NULL && type.getTypeTag() != ATypeTag.UNION)
- return i;
-
- if (type.getTypeTag() == ATypeTag.UNION) { // union
- unionList = ((AUnionType) type).getUnionList();
- for (int j = 0; j < unionList.size(); j++)
- if (unionList.get(j).getTypeTag() == ATypeTag.NULL) {
- isNull = true;
- break;
- }
- if (!isNull)
- return i;
- }
- }
- return -1;
- }
-
- private void parseOrderedList(AOrderedListType oltype, DataOutput out) throws IOException, AsterixException {
-
- ArrayBackedValueStorage itemBuffer = getTempBuffer();
- OrderedListBuilder orderedListBuilder = (OrderedListBuilder) getOrderedListBuilder();
-
- IAType itemType = null;
- if (oltype != null)
- itemType = oltype.getItemType();
- orderedListBuilder.reset(oltype);
-
- Token token = null;
- boolean inList = true;
- boolean expectingListItem = false;
- boolean first = true;
- do {
- token = nextToken();
- if (token.kind == AdmLexerConstants.END_ORDERED_LIST) {
- if (expectingListItem) {
- throw new AsterixException("Found END_COLLECTION while expecting a list item.");
- }
- inList = false;
- } else if (token.kind == AdmLexerConstants.COMMA) {
- if (first) {
- throw new AsterixException("Found COMMA before any list item.");
- }
- if (expectingListItem) {
- throw new AsterixException("Found COMMA while expecting a list item.");
- }
- expectingListItem = true;
- } else {
- expectingListItem = false;
- itemBuffer.reset();
-
- admFromLexerStream(token, itemType, itemBuffer.getDataOutput(), false);
- orderedListBuilder.addItem(itemBuffer);
- }
- first = false;
- } while (inList);
- orderedListBuilder.write(out, true);
- returnOrderedListBuilder(orderedListBuilder);
- returnTempBuffer(itemBuffer);
- }
-
- private void parseUnorderedList(AUnorderedListType uoltype, DataOutput out) throws IOException,
- AsterixException {
-
- ArrayBackedValueStorage itemBuffer = getTempBuffer();
- UnorderedListBuilder unorderedListBuilder = (UnorderedListBuilder) getUnorderedListBuilder();
-
- IAType itemType = null;
-
- if (uoltype != null)
- itemType = uoltype.getItemType();
- unorderedListBuilder.reset(uoltype);
-
- Token token = null;
- boolean inList = true;
- boolean expectingListItem = false;
- boolean first = true;
- do {
- token = nextToken();
- if (token.kind == AdmLexerConstants.END_UNORDERED_LIST) {
- if (expectingListItem) {
- throw new AsterixException("Found END_COLLECTION while expecting a list item.");
- }
- inList = false;
- } else if (token.kind == AdmLexerConstants.COMMA) {
- if (first) {
- throw new AsterixException("Found COMMA before any list item.");
- }
- if (expectingListItem) {
- throw new AsterixException("Found COMMA while expecting a list item.");
- }
- expectingListItem = true;
- } else {
- expectingListItem = false;
- itemBuffer.reset();
- admFromLexerStream(token, itemType, itemBuffer.getDataOutput(), false);
- unorderedListBuilder.addItem(itemBuffer);
- }
- first = false;
- } while (inList);
- unorderedListBuilder.write(out, true);
- returnUnorderedListBuilder(unorderedListBuilder);
- returnTempBuffer(itemBuffer);
- }
-
- private Token nextToken() throws AsterixException {
- try {
- return admLexer.next();
- } catch (ParseException pe) {
- throw new AsterixException(pe);
- }
- }
-
- private IARecordBuilder getRecordBuilder() {
- RecordBuilder recBuilder = (RecordBuilder) recordBuilderPool.poll();
- if (recBuilder != null)
- return recBuilder;
- else
- return new RecordBuilder();
- }
-
- private void returnRecordBuilder(IARecordBuilder recBuilder) {
- this.recordBuilderPool.add(recBuilder);
- }
-
- private IAsterixListBuilder getOrderedListBuilder() {
- OrderedListBuilder orderedListBuilder = (OrderedListBuilder) orderedListBuilderPool.poll();
- if (orderedListBuilder != null)
- return orderedListBuilder;
- else
- return new OrderedListBuilder();
- }
-
- private void returnOrderedListBuilder(IAsterixListBuilder orderedListBuilder) {
- this.orderedListBuilderPool.add(orderedListBuilder);
- }
-
- private IAsterixListBuilder getUnorderedListBuilder() {
- UnorderedListBuilder unorderedListBuilder = (UnorderedListBuilder) unorderedListBuilderPool.poll();
- if (unorderedListBuilder != null)
- return unorderedListBuilder;
- else
- return new UnorderedListBuilder();
- }
-
- private void returnUnorderedListBuilder(IAsterixListBuilder unorderedListBuilder) {
- this.unorderedListBuilderPool.add(unorderedListBuilder);
- }
-
- private ArrayBackedValueStorage getTempBuffer() {
- ArrayBackedValueStorage tmpBaaos = baaosPool.poll();
- if (tmpBaaos != null) {
- return tmpBaaos;
- } else {
- return new ArrayBackedValueStorage();
- }
- }
-
- private void returnTempBuffer(ArrayBackedValueStorage tempBaaos) {
- baaosPool.add(tempBaaos);
- }
- };
+ return new AdmTupleParser(ctx, recType);
}
+
}
\ No newline at end of file
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AdmTupleParser.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AdmTupleParser.java
index 65223c1..9be4c00 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AdmTupleParser.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AdmTupleParser.java
@@ -1,1046 +1,35 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.runtime.operators.file;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.ByteBuffer;
-import java.util.ArrayDeque;
-import java.util.BitSet;
-import java.util.List;
-import java.util.Queue;
-
-import edu.uci.ics.asterix.adm.parser.nontagged.AdmLexer;
-import edu.uci.ics.asterix.adm.parser.nontagged.AdmLexerConstants;
-import edu.uci.ics.asterix.adm.parser.nontagged.ParseException;
-import edu.uci.ics.asterix.adm.parser.nontagged.Token;
-import edu.uci.ics.asterix.builders.IARecordBuilder;
-import edu.uci.ics.asterix.builders.IAsterixListBuilder;
-import edu.uci.ics.asterix.builders.OrderedListBuilder;
-import edu.uci.ics.asterix.builders.RecordBuilder;
-import edu.uci.ics.asterix.builders.UnorderedListBuilder;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ACircleSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateTimeSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ALineSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.APoint3DSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.APointSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.APolygonSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ARectangleSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ATimeSerializerDeserializer;
-import edu.uci.ics.asterix.om.base.ABoolean;
-import edu.uci.ics.asterix.om.base.ANull;
-import edu.uci.ics.asterix.om.types.AOrderedListType;
import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.AUnionType;
-import edu.uci.ics.asterix.om.types.AUnorderedListType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
-import edu.uci.ics.hyracks.api.comm.IFrameWriter;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
-public class AdmTupleParser extends AbstractTupleParser {
+/**
+ * An extension of AbstractTupleParser that provides functionality for
+ * parsing delimited files.
+ */
+public class AdmTupleParser extends AbstractTupleParser {
- protected AdmLexer admLexer;
- protected ArrayTupleBuilder tb = new ArrayTupleBuilder(1);
- protected DataOutput dos = tb.getDataOutput();
- protected final FrameTupleAppender appender;
- protected final ByteBuffer frame;
- protected final ARecordType recType;
+ public AdmTupleParser(IHyracksTaskContext ctx, ARecordType recType) {
+ super(ctx, recType);
+ }
- private int nullableFieldId = 0;
-
- private Queue<ArrayBackedValueStorage> baaosPool = new ArrayDeque<ArrayBackedValueStorage>();
- private Queue<IARecordBuilder> recordBuilderPool = new ArrayDeque<IARecordBuilder>();
- private Queue<IAsterixListBuilder> orderedListBuilderPool = new ArrayDeque<IAsterixListBuilder>();
- private Queue<IAsterixListBuilder> unorderedListBuilderPool = new ArrayDeque<IAsterixListBuilder>();
-
- private String mismatchErrorMessage = "Mismatch Type, expecting a value of type ";
-
-
- public AdmTupleParser(IHyracksTaskContext ctx, ARecordType recType) {
- appender = new FrameTupleAppender(ctx.getFrameSize());
- frame = ctx.allocateFrame();
- this.recType = recType;
-
- }
-
- @Override
- public void parse(InputStream in, IFrameWriter writer)
- throws HyracksDataException {
- admLexer = new AdmLexer(in);
- appender.reset(frame, true);
- int tupleNum = 0;
- try {
- while (true) {
- tb.reset();
- if (!parseAdmInstance(recType, true, dos)) {
- break;
- }
- tb.addFieldEndOffset();
- if (!appender.append(tb.getFieldEndOffsets(),
- tb.getByteArray(), 0, tb.getSize())) {
- FrameUtils.flushFrame(frame, writer);
- appender.reset(frame, true);
- if (!appender.append(tb.getFieldEndOffsets(),
- tb.getByteArray(), 0, tb.getSize())) {
- throw new IllegalStateException();
- }
- }
- tupleNum++;
- }
- if (appender.getTupleCount() > 0) {
- FrameUtils.flushFrame(frame, writer);
- }
- } catch (AsterixException ae) {
- throw new HyracksDataException(ae);
- } catch (IOException ioe) {
- throw new HyracksDataException(ioe);
- }
- }
-
- protected boolean parseAdmInstance(IAType objectType, Boolean datasetRec,
- DataOutput out) throws AsterixException, IOException {
- Token token;
- try {
- token = admLexer.next();
- } catch (ParseException pe) {
- throw new AsterixException(pe);
- }
- if (token.kind == AdmLexerConstants.EOF) {
- return false;
- } else {
- admFromLexerStream(token, objectType, out, datasetRec);
- return true;
- }
- }
-
- private void admFromLexerStream(Token token, IAType objectType,
- DataOutput out, Boolean datasetRec) throws AsterixException,
- IOException {
-
- switch (token.kind) {
- case AdmLexerConstants.NULL_LITERAL: {
- if (checkType(ATypeTag.NULL, objectType, out)) {
- nullSerde.serialize(ANull.NULL, out);
- } else
- throw new AsterixException(" This field can not be null ");
- break;
- }
- case AdmLexerConstants.TRUE_LITERAL: {
- if (checkType(ATypeTag.BOOLEAN, objectType, out)) {
- booleanSerde.serialize(ABoolean.TRUE, out);
- } else
- throw new AsterixException(mismatchErrorMessage
- + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.BOOLEAN_CONS: {
- parseConstructor(ATypeTag.BOOLEAN, objectType, out);
- break;
- }
- case AdmLexerConstants.FALSE_LITERAL: {
- if (checkType(ATypeTag.BOOLEAN, objectType, out)) {
- booleanSerde.serialize(ABoolean.FALSE, out);
- } else
- throw new AsterixException(mismatchErrorMessage
- + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.DOUBLE_LITERAL: {
- if (checkType(ATypeTag.DOUBLE, objectType, out)) {
- aDouble.setValue(Double.parseDouble(token.image));
- doubleSerde.serialize(aDouble, out);
- } else
- throw new AsterixException(mismatchErrorMessage
- + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.DOUBLE_CONS: {
- parseConstructor(ATypeTag.DOUBLE, objectType, out);
- break;
- }
- case AdmLexerConstants.FLOAT_LITERAL: {
- if (checkType(ATypeTag.FLOAT, objectType, out)) {
- aFloat.setValue(Float.parseFloat(token.image));
- floatSerde.serialize(aFloat, out);
- } else
- throw new AsterixException(mismatchErrorMessage
- + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.FLOAT_CONS: {
- parseConstructor(ATypeTag.FLOAT, objectType, out);
- break;
- }
- case AdmLexerConstants.INT8_LITERAL: {
- if (checkType(ATypeTag.INT8, objectType, out)) {
- parseInt8(token.image, out);
- } else
- throw new AsterixException(mismatchErrorMessage
- + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.INT8_CONS: {
- parseConstructor(ATypeTag.INT8, objectType, out);
- break;
- }
- case AdmLexerConstants.INT16_LITERAL: {
- if (checkType(ATypeTag.INT16, objectType, out)) {
- parseInt16(token.image, out);
- } else
- throw new AsterixException(mismatchErrorMessage
- + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.INT16_CONS: {
- parseConstructor(ATypeTag.INT16, objectType, out);
- break;
- }
- case AdmLexerConstants.INT_LITERAL:
- case AdmLexerConstants.INT32_LITERAL: {
- if (checkType(ATypeTag.INT32, objectType, out)) {
- parseInt32(token.image, out);
- } else
- throw new AsterixException(mismatchErrorMessage
- + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.INT32_CONS: {
- parseConstructor(ATypeTag.INT32, objectType, out);
- break;
- }
- case AdmLexerConstants.INT64_LITERAL: {
- if (checkType(ATypeTag.INT64, objectType, out)) {
- parseInt64(token.image, out);
- } else
- throw new AsterixException(mismatchErrorMessage
- + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.INT64_CONS: {
- parseConstructor(ATypeTag.INT64, objectType, out);
- break;
- }
- case AdmLexerConstants.STRING_LITERAL: {
- if (checkType(ATypeTag.STRING, objectType, out)) {
- aString.setValue(token.image.substring(1,
- token.image.length() - 1));
- stringSerde.serialize(aString, out);
- } else
- throw new AsterixException(mismatchErrorMessage
- + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.STRING_CONS: {
- parseConstructor(ATypeTag.STRING, objectType, out);
- break;
- }
- case AdmLexerConstants.DATE_CONS: {
- parseConstructor(ATypeTag.DATE, objectType, out);
- break;
- }
- case AdmLexerConstants.TIME_CONS: {
- parseConstructor(ATypeTag.TIME, objectType, out);
- break;
- }
- case AdmLexerConstants.DATETIME_CONS: {
- parseConstructor(ATypeTag.DATETIME, objectType, out);
- break;
- }
- case AdmLexerConstants.DURATION_CONS: {
- parseConstructor(ATypeTag.DURATION, objectType, out);
- break;
- }
- case AdmLexerConstants.POINT_CONS: {
- parseConstructor(ATypeTag.POINT, objectType, out);
- break;
- }
- case AdmLexerConstants.POINT3D_CONS: {
- parseConstructor(ATypeTag.POINT3D, objectType, out);
- break;
- }
- case AdmLexerConstants.CIRCLE_CONS: {
- parseConstructor(ATypeTag.CIRCLE, objectType, out);
- break;
- }
- case AdmLexerConstants.RECTANGLE_CONS: {
- parseConstructor(ATypeTag.RECTANGLE, objectType, out);
- break;
- }
- case AdmLexerConstants.LINE_CONS: {
- parseConstructor(ATypeTag.LINE, objectType, out);
- break;
- }
- case AdmLexerConstants.POLYGON_CONS: {
- parseConstructor(ATypeTag.POLYGON, objectType, out);
- break;
- }
- case AdmLexerConstants.START_UNORDERED_LIST: {
- if (checkType(ATypeTag.UNORDEREDLIST, objectType, out)) {
- objectType = getComplexType(objectType, ATypeTag.UNORDEREDLIST);
- parseUnorderedList((AUnorderedListType) objectType, out);
- } else
- throw new AsterixException(mismatchErrorMessage
- + objectType.getTypeTag());
- break;
- }
-
- case AdmLexerConstants.START_ORDERED_LIST: {
- if (checkType(ATypeTag.ORDEREDLIST, objectType, out)) {
- objectType = getComplexType(objectType, ATypeTag.ORDEREDLIST);
- parseOrderedList((AOrderedListType) objectType, out);
- } else
- throw new AsterixException(mismatchErrorMessage
- + objectType.getTypeTag());
- break;
- }
- case AdmLexerConstants.START_RECORD: {
- if (checkType(ATypeTag.RECORD, objectType, out)) {
- objectType = getComplexType(objectType, ATypeTag.RECORD);
- parseRecord((ARecordType) objectType, out, datasetRec);
- } else
- throw new AsterixException(mismatchErrorMessage
- + objectType.getTypeTag());
- break;
- }
- case AdmLexerConstants.EOF: {
- break;
- }
- default: {
- throw new AsterixException("Unexpected ADM token kind: "
- + admLexer.tokenKindToString(token.kind) + ".");
- }
- }
- }
-
- private void parseDatetime(String datetime, DataOutput out)
- throws AsterixException, IOException {
- try {
- ADateTimeSerializerDeserializer.parse(datetime, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parseDuration(String duration, DataOutput out)
- throws AsterixException {
- try {
- ADurationSerializerDeserializer.parse(duration, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
-
- }
-
- private IAType getComplexType(IAType aObjectType, ATypeTag tag) {
-
- if (aObjectType == null) {
- return null;
- }
-
- if (aObjectType.getTypeTag() == tag)
- return aObjectType;
-
- if (aObjectType.getTypeTag() == ATypeTag.UNION) {
- unionList = ((AUnionType) aObjectType).getUnionList();
- for (int i = 0; i < unionList.size(); i++)
- if (unionList.get(i).getTypeTag() == tag) {
- return unionList.get(i);
- }
- }
- return null; // wont get here
- }
-
- List<IAType> unionList;
-
- private boolean checkType(ATypeTag expectedTypeTag, IAType aObjectType,
- DataOutput out) throws IOException {
-
- if (aObjectType == null)
- return true;
-
- if (aObjectType.getTypeTag() != ATypeTag.UNION) {
- if (expectedTypeTag == aObjectType.getTypeTag())
- return true;
- } else { // union
- unionList = ((AUnionType) aObjectType).getUnionList();
- for (int i = 0; i < unionList.size(); i++)
- if (unionList.get(i).getTypeTag() == expectedTypeTag)
- return true;
- }
- return false;
- }
-
- private void parseRecord(ARecordType recType, DataOutput out,
- Boolean datasetRec) throws IOException, AsterixException {
-
- ArrayBackedValueStorage fieldValueBuffer = getTempBuffer();
- ArrayBackedValueStorage fieldNameBuffer = getTempBuffer();
- IARecordBuilder recBuilder = getRecordBuilder();
-
- // Boolean[] nulls = null;
- BitSet nulls = null;
- if (datasetRec) {
- if (recType != null) {
- nulls = new BitSet(recType.getFieldNames().length);
- recBuilder.reset(recType);
- } else
- recBuilder.reset(null);
- } else if (recType != null) {
- nulls = new BitSet(recType.getFieldNames().length);
- recBuilder.reset(recType);
- } else
- recBuilder.reset(null);
-
- recBuilder.init();
- Token token = null;
- boolean inRecord = true;
- boolean expectingRecordField = false;
- boolean first = true;
-
- Boolean openRecordField = false;
- int fieldId = 0;
- IAType fieldType = null;
- do {
- token = nextToken();
- switch (token.kind) {
- case AdmLexerConstants.END_RECORD: {
- if (expectingRecordField) {
- throw new AsterixException(
- "Found END_RECORD while expecting a record field.");
- }
- inRecord = false;
- break;
- }
- case AdmLexerConstants.STRING_LITERAL: {
- // we've read the name of the field
- // now read the content
- fieldNameBuffer.reset();
- fieldValueBuffer.reset();
- expectingRecordField = false;
-
- if (recType != null) {
- String fldName = token.image.substring(1,
- token.image.length() - 1);
- fieldId = recBuilder.getFieldId(fldName);
- if (fieldId < 0 && !recType.isOpen()) {
- throw new AsterixException(
- "This record is closed, you can not add extra fields !!");
- } else if (fieldId < 0 && recType.isOpen()) {
- aStringFieldName.setValue(token.image.substring(1,
- token.image.length() - 1));
- stringSerde.serialize(aStringFieldName,
- fieldNameBuffer.getDataOutput());
- openRecordField = true;
- fieldType = null;
- } else {
- // a closed field
- nulls.set(fieldId);
- fieldType = recType.getFieldTypes()[fieldId];
- openRecordField = false;
- }
- } else {
- aStringFieldName.setValue(token.image.substring(1,
- token.image.length() - 1));
- stringSerde.serialize(aStringFieldName,
- fieldNameBuffer.getDataOutput());
- openRecordField = true;
- fieldType = null;
- }
-
- token = nextToken();
- if (token.kind != AdmLexerConstants.COLON) {
- throw new AsterixException("Unexpected ADM token kind: "
- + admLexer.tokenKindToString(token.kind)
- + " while expecting \":\".");
- }
-
- token = nextToken();
- this.admFromLexerStream(token, fieldType,
- fieldValueBuffer.getDataOutput(), false);
- if (openRecordField) {
- if (fieldValueBuffer.getByteArray()[0] != ATypeTag.NULL
- .serialize())
- recBuilder.addField(fieldNameBuffer, fieldValueBuffer);
- } else if (recType.getFieldTypes()[fieldId].getTypeTag() == ATypeTag.UNION) {
- if (NonTaggedFormatUtil
- .isOptionalField((AUnionType) recType
- .getFieldTypes()[fieldId])) {
- if (fieldValueBuffer.getByteArray()[0] != ATypeTag.NULL
- .serialize()) {
- recBuilder.addField(fieldId, fieldValueBuffer);
- }
- }
- } else {
- recBuilder.addField(fieldId, fieldValueBuffer);
- }
-
- break;
- }
- case AdmLexerConstants.COMMA: {
- if (first) {
- throw new AsterixException(
- "Found COMMA before any record field.");
- }
- if (expectingRecordField) {
- throw new AsterixException(
- "Found COMMA while expecting a record field.");
- }
- expectingRecordField = true;
- break;
- }
- default: {
- throw new AsterixException("Unexpected ADM token kind: "
- + admLexer.tokenKindToString(token.kind)
- + " while parsing record fields.");
- }
- }
- first = false;
- } while (inRecord);
-
- if (recType != null) {
- nullableFieldId = checkNullConstraints(recType, nulls);
- if (nullableFieldId != -1)
- throw new AsterixException("Field " + nullableFieldId
- + " can not be null");
- }
- recBuilder.write(out, true);
- returnRecordBuilder(recBuilder);
- returnTempBuffer(fieldNameBuffer);
- returnTempBuffer(fieldValueBuffer);
- }
-
- private int checkNullConstraints(ARecordType recType, BitSet nulls) {
-
- boolean isNull = false;
- for (int i = 0; i < recType.getFieldTypes().length; i++)
- if (nulls.get(i) == false) {
- IAType type = recType.getFieldTypes()[i];
- if (type.getTypeTag() != ATypeTag.NULL
- && type.getTypeTag() != ATypeTag.UNION)
- return i;
-
- if (type.getTypeTag() == ATypeTag.UNION) { // union
- unionList = ((AUnionType) type).getUnionList();
- for (int j = 0; j < unionList.size(); j++)
- if (unionList.get(j).getTypeTag() == ATypeTag.NULL) {
- isNull = true;
- break;
- }
- if (!isNull)
- return i;
- }
- }
- return -1;
- }
-
- private void parseOrderedList(AOrderedListType oltype, DataOutput out)
- throws IOException, AsterixException {
-
- ArrayBackedValueStorage itemBuffer = getTempBuffer();
- OrderedListBuilder orderedListBuilder = (OrderedListBuilder) getOrderedListBuilder();
-
- IAType itemType = null;
- if (oltype != null)
- itemType = oltype.getItemType();
- orderedListBuilder.reset(oltype);
-
- Token token = null;
- boolean inList = true;
- boolean expectingListItem = false;
- boolean first = true;
- do {
- token = nextToken();
- if (token.kind == AdmLexerConstants.END_ORDERED_LIST) {
- if (expectingListItem) {
- throw new AsterixException(
- "Found END_COLLECTION while expecting a list item.");
- }
- inList = false;
- } else if (token.kind == AdmLexerConstants.COMMA) {
- if (first) {
- throw new AsterixException(
- "Found COMMA before any list item.");
- }
- if (expectingListItem) {
- throw new AsterixException(
- "Found COMMA while expecting a list item.");
- }
- expectingListItem = true;
- } else {
- expectingListItem = false;
- itemBuffer.reset();
-
- admFromLexerStream(token, itemType, itemBuffer.getDataOutput(),
- false);
- orderedListBuilder.addItem(itemBuffer);
- }
- first = false;
- } while (inList);
- orderedListBuilder.write(out, true);
- returnOrderedListBuilder(orderedListBuilder);
- returnTempBuffer(itemBuffer);
- }
-
- private void parseUnorderedList(AUnorderedListType uoltype, DataOutput out)
- throws IOException, AsterixException {
-
- ArrayBackedValueStorage itemBuffer = getTempBuffer();
- UnorderedListBuilder unorderedListBuilder = (UnorderedListBuilder) getUnorderedListBuilder();
-
- IAType itemType = null;
-
- if (uoltype != null)
- itemType = uoltype.getItemType();
- unorderedListBuilder.reset(uoltype);
-
- Token token = null;
- boolean inList = true;
- boolean expectingListItem = false;
- boolean first = true;
- do {
- token = nextToken();
- if (token.kind == AdmLexerConstants.END_UNORDERED_LIST) {
- if (expectingListItem) {
- throw new AsterixException(
- "Found END_COLLECTION while expecting a list item.");
- }
- inList = false;
- } else if (token.kind == AdmLexerConstants.COMMA) {
- if (first) {
- throw new AsterixException(
- "Found COMMA before any list item.");
- }
- if (expectingListItem) {
- throw new AsterixException(
- "Found COMMA while expecting a list item.");
- }
- expectingListItem = true;
- } else {
- expectingListItem = false;
- itemBuffer.reset();
- admFromLexerStream(token, itemType, itemBuffer.getDataOutput(),
- false);
- unorderedListBuilder.addItem(itemBuffer);
- }
- first = false;
- } while (inList);
- unorderedListBuilder.write(out, true);
- returnUnorderedListBuilder(unorderedListBuilder);
- returnTempBuffer(itemBuffer);
- }
-
- private Token nextToken() throws AsterixException {
- try {
- return admLexer.next();
- } catch (ParseException pe) {
- throw new AsterixException(pe);
- }
- }
-
- private IARecordBuilder getRecordBuilder() {
- RecordBuilder recBuilder = (RecordBuilder) recordBuilderPool.poll();
- if (recBuilder != null)
- return recBuilder;
- else
- return new RecordBuilder();
- }
-
- private void returnRecordBuilder(IARecordBuilder recBuilder) {
- this.recordBuilderPool.add(recBuilder);
- }
-
- private IAsterixListBuilder getOrderedListBuilder() {
- OrderedListBuilder orderedListBuilder = (OrderedListBuilder) orderedListBuilderPool
- .poll();
- if (orderedListBuilder != null)
- return orderedListBuilder;
- else
- return new OrderedListBuilder();
- }
-
- private void returnOrderedListBuilder(
- IAsterixListBuilder orderedListBuilder) {
- this.orderedListBuilderPool.add(orderedListBuilder);
- }
-
- private IAsterixListBuilder getUnorderedListBuilder() {
- UnorderedListBuilder unorderedListBuilder = (UnorderedListBuilder) unorderedListBuilderPool
- .poll();
- if (unorderedListBuilder != null)
- return unorderedListBuilder;
- else
- return new UnorderedListBuilder();
- }
-
- private void returnUnorderedListBuilder(
- IAsterixListBuilder unorderedListBuilder) {
- this.unorderedListBuilderPool.add(unorderedListBuilder);
- }
-
- private ArrayBackedValueStorage getTempBuffer() {
- ArrayBackedValueStorage tmpBaaos = baaosPool.poll();
- if (tmpBaaos != null) {
- return tmpBaaos;
- } else {
- return new ArrayBackedValueStorage();
- }
- }
-
- private void returnTempBuffer(ArrayBackedValueStorage tempBaaos) {
- baaosPool.add(tempBaaos);
- }
-
- private void parseConstructor(ATypeTag typeTag, IAType objectType,
- DataOutput out) throws AsterixException {
- try {
- Token token = admLexer.next();
- if (token.kind == AdmLexerConstants.CONSTRUCTOR_OPEN) {
- if (checkType(typeTag, objectType, out)) {
- token = admLexer.next();
- if (token.kind == AdmLexerConstants.STRING_LITERAL) {
- switch (typeTag) {
- case BOOLEAN:
- parseBoolean(
- token.image.substring(1,
- token.image.length() - 1), out);
- break;
- case INT8:
- parseInt8(
- token.image.substring(1,
- token.image.length() - 1), out);
- break;
- case INT16:
- parseInt16(
- token.image.substring(1,
- token.image.length() - 1), out);
- break;
- case INT32:
- parseInt32(
- token.image.substring(1,
- token.image.length() - 1), out);
- break;
- case INT64:
- parseInt64(
- token.image.substring(1,
- token.image.length() - 1), out);
- break;
- case FLOAT:
- aFloat.setValue(Float.parseFloat(token.image
- .substring(1, token.image.length() - 1)));
- floatSerde.serialize(aFloat, out);
- break;
- case DOUBLE:
- aDouble.setValue(Double.parseDouble(token.image
- .substring(1, token.image.length() - 1)));
- doubleSerde.serialize(aDouble, out);
- break;
- case STRING:
- aString.setValue(token.image.substring(1,
- token.image.length() - 1));
- stringSerde.serialize(aString, out);
- break;
- case TIME:
- parseTime(
- token.image.substring(1,
- token.image.length() - 1), out);
- break;
- case DATE:
- parseDate(
- token.image.substring(1,
- token.image.length() - 1), out);
- break;
- case DATETIME:
- parseDatetime(
- token.image.substring(1,
- token.image.length() - 1), out);
- break;
- case DURATION:
- parseDuration(
- token.image.substring(1,
- token.image.length() - 1), out);
- break;
- case POINT:
- parsePoint(
- token.image.substring(1,
- token.image.length() - 1), out);
- break;
- case POINT3D:
- parsePoint3d(
- token.image.substring(1,
- token.image.length() - 1), out);
- break;
- case CIRCLE:
- parseCircle(
- token.image.substring(1,
- token.image.length() - 1), out);
- break;
- case RECTANGLE:
- parseRectangle(
- token.image.substring(1,
- token.image.length() - 1), out);
- break;
- case LINE:
- parseLine(
- token.image.substring(1,
- token.image.length() - 1), out);
- break;
- case POLYGON:
- parsePolygon(
- token.image.substring(1,
- token.image.length() - 1), out);
- break;
-
- }
- token = admLexer.next();
- if (token.kind == AdmLexerConstants.CONSTRUCTOR_CLOSE)
- return;
- }
- }
- }
- } catch (Exception e) {
- throw new AsterixException(e);
- }
- throw new AsterixException(mismatchErrorMessage
- + objectType.getTypeName());
- }
-
- private void parseBoolean(String bool, DataOutput out)
- throws AsterixException {
- String errorMessage = "This can not be an instance of boolean";
- try {
- if (bool.equals("true"))
- booleanSerde.serialize(ABoolean.TRUE, out);
- else if (bool.equals("false"))
- booleanSerde.serialize(ABoolean.FALSE, out);
- else
- throw new AsterixException(errorMessage);
- } catch (HyracksDataException e) {
- throw new AsterixException(errorMessage);
- }
- }
-
- private void parseInt8(String int8, DataOutput out) throws AsterixException {
- String errorMessage = "This can not be an instance of int8";
- try {
- boolean positive = true;
- byte value = 0;
- int offset = 0;
-
- if (int8.charAt(offset) == '+')
- offset++;
- else if (int8.charAt(offset) == '-') {
- offset++;
- positive = false;
- }
- for (; offset < int8.length(); offset++) {
- if (int8.charAt(offset) >= '0' && int8.charAt(offset) <= '9')
- value = (byte) (value * 10 + int8.charAt(offset) - '0');
- else if (int8.charAt(offset) == 'i'
- && int8.charAt(offset + 1) == '8'
- && offset + 2 == int8.length())
- break;
- else
- throw new AsterixException(errorMessage);
- }
- if (value < 0)
- throw new AsterixException(errorMessage);
- if (value > 0 && !positive)
- value *= -1;
- aInt8.setValue(value);
- int8Serde.serialize(aInt8, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(errorMessage);
- }
- }
-
- private void parseInt16(String int16, DataOutput out)
- throws AsterixException {
- String errorMessage = "This can not be an instance of int16";
- try {
- boolean positive = true;
- short value = 0;
- int offset = 0;
-
- if (int16.charAt(offset) == '+')
- offset++;
- else if (int16.charAt(offset) == '-') {
- offset++;
- positive = false;
- }
- for (; offset < int16.length(); offset++) {
- if (int16.charAt(offset) >= '0' && int16.charAt(offset) <= '9')
- value = (short) (value * 10 + int16.charAt(offset) - '0');
- else if (int16.charAt(offset) == 'i'
- && int16.charAt(offset + 1) == '1'
- && int16.charAt(offset + 2) == '6'
- && offset + 3 == int16.length())
- break;
- else
- throw new AsterixException(errorMessage);
- }
- if (value < 0)
- throw new AsterixException(errorMessage);
- if (value > 0 && !positive)
- value *= -1;
- aInt16.setValue(value);
- int16Serde.serialize(aInt16, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(errorMessage);
- }
- }
-
- private void parseInt32(String int32, DataOutput out)
- throws AsterixException {
-
- String errorMessage = "This can not be an instance of int32";
- try {
- boolean positive = true;
- int value = 0;
- int offset = 0;
-
- if (int32.charAt(offset) == '+')
- offset++;
- else if (int32.charAt(offset) == '-') {
- offset++;
- positive = false;
- }
- for (; offset < int32.length(); offset++) {
- if (int32.charAt(offset) >= '0' && int32.charAt(offset) <= '9')
- value = (value * 10 + int32.charAt(offset) - '0');
- else if (int32.charAt(offset) == 'i'
- && int32.charAt(offset + 1) == '3'
- && int32.charAt(offset + 2) == '2'
- && offset + 3 == int32.length())
- break;
- else
- throw new AsterixException(errorMessage);
- }
- if (value < 0)
- throw new AsterixException(errorMessage);
- if (value > 0 && !positive)
- value *= -1;
-
- aInt32.setValue(value);
- int32Serde.serialize(aInt32, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(errorMessage);
- }
- }
-
- private void parseInt64(String int64, DataOutput out)
- throws AsterixException {
- String errorMessage = "This can not be an instance of int64";
- try {
- boolean positive = true;
- long value = 0;
- int offset = 0;
-
- if (int64.charAt(offset) == '+')
- offset++;
- else if (int64.charAt(offset) == '-') {
- offset++;
- positive = false;
- }
- for (; offset < int64.length(); offset++) {
- if (int64.charAt(offset) >= '0' && int64.charAt(offset) <= '9')
- value = (value * 10 + int64.charAt(offset) - '0');
- else if (int64.charAt(offset) == 'i'
- && int64.charAt(offset + 1) == '6'
- && int64.charAt(offset + 2) == '4'
- && offset + 3 == int64.length())
- break;
- else
- throw new AsterixException(errorMessage);
- }
- if (value < 0)
- throw new AsterixException(errorMessage);
- if (value > 0 && !positive)
- value *= -1;
-
- aInt64.setValue(value);
- int64Serde.serialize(aInt64, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(errorMessage);
- }
- }
-
- private void parsePoint(String point, DataOutput out)
- throws AsterixException {
- try {
- APointSerializerDeserializer.parse(point, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parsePoint3d(String point3d, DataOutput out)
- throws AsterixException {
- try {
- APoint3DSerializerDeserializer.parse(point3d, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parseCircle(String circle, DataOutput out)
- throws AsterixException {
- try {
- ACircleSerializerDeserializer.parse(circle, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parseRectangle(String rectangle, DataOutput out)
- throws AsterixException {
- try {
- ARectangleSerializerDeserializer.parse(rectangle, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parseLine(String line, DataOutput out) throws AsterixException {
- try {
- ALineSerializerDeserializer.parse(line, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parsePolygon(String polygon, DataOutput out)
- throws AsterixException, IOException {
- try {
- APolygonSerializerDeserializer.parse(polygon, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parseTime(String time, DataOutput out) throws AsterixException {
- try {
- ATimeSerializerDeserializer.parse(time, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parseDate(String date, DataOutput out)
- throws AsterixException, IOException {
- try {
- ADateSerializerDeserializer.parse(date, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
+ @Override
+ public IDataParser getDataParser() {
+ return new ADMDataParser();
+ }
}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/DelimitedDataParser.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/DelimitedDataParser.java
new file mode 100644
index 0000000..c9560fe
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/DelimitedDataParser.java
@@ -0,0 +1,326 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.operators.file;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.util.Arrays;
+
+import edu.uci.ics.asterix.builders.IARecordBuilder;
+import edu.uci.ics.asterix.builders.RecordBuilder;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.om.base.AMutableString;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParser;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+
+public class DelimitedDataParser extends AbstractDataParser implements IDataParser {
+
+ protected final IValueParserFactory[] valueParserFactories;
+ protected final char fieldDelimiter;
+ protected final ARecordType recordType;
+
+ private IARecordBuilder recBuilder;
+ private ArrayBackedValueStorage fieldValueBuffer;
+ private DataOutput fieldValueBufferOutput;
+ private IValueParser[] valueParsers;
+ private FieldCursor cursor;
+ private byte[] fieldTypeTags;
+ private int[] fldIds;
+ private ArrayBackedValueStorage[] nameBuffers;
+
+ public DelimitedDataParser(ARecordType recordType, IValueParserFactory[] valueParserFactories, char fieldDelimter) {
+ this.recordType = recordType;
+ this.valueParserFactories = valueParserFactories;
+ this.fieldDelimiter = fieldDelimter;
+ }
+
+ @Override
+ public void initialize(InputStream in, ARecordType recordType, boolean datasetRec) throws AsterixException,
+ IOException {
+
+ valueParsers = new IValueParser[valueParserFactories.length];
+ for (int i = 0; i < valueParserFactories.length; ++i) {
+ valueParsers[i] = valueParserFactories[i].createValueParser();
+ }
+
+ fieldValueBuffer = new ArrayBackedValueStorage();
+ fieldValueBufferOutput = fieldValueBuffer.getDataOutput();
+ recBuilder = new RecordBuilder();
+ recBuilder.reset(recordType);
+ recBuilder.init();
+
+ int n = recordType.getFieldNames().length;
+ fieldTypeTags = new byte[n];
+ for (int i = 0; i < n; i++) {
+ ATypeTag tag = recordType.getFieldTypes()[i].getTypeTag();
+ fieldTypeTags[i] = tag.serialize();
+ }
+
+ fldIds = new int[n];
+ nameBuffers = new ArrayBackedValueStorage[n];
+ AMutableString str = new AMutableString(null);
+ for (int i = 0; i < n; i++) {
+ String name = recordType.getFieldNames()[i];
+ fldIds[i] = recBuilder.getFieldId(name);
+ if (fldIds[i] < 0) {
+ if (!recordType.isOpen()) {
+ throw new HyracksDataException("Illegal field " + name + " in closed type " + recordType);
+ } else {
+ nameBuffers[i] = new ArrayBackedValueStorage();
+ fieldNameToBytes(name, str, nameBuffers[i]);
+ }
+ }
+ }
+
+ cursor = new FieldCursor(new InputStreamReader(in));
+
+ }
+
+ @Override
+ public boolean parse(DataOutput out) throws AsterixException, IOException {
+
+ if (cursor.nextRecord()) {
+ recBuilder.reset(recordType);
+ recBuilder.init();
+ for (int i = 0; i < valueParsers.length; ++i) {
+ if (!cursor.nextField()) {
+ break;
+ }
+ fieldValueBuffer.reset();
+ fieldValueBufferOutput.writeByte(fieldTypeTags[i]);
+ valueParsers[i]
+ .parse(cursor.buffer, cursor.fStart, cursor.fEnd - cursor.fStart, fieldValueBufferOutput);
+ if (fldIds[i] < 0) {
+ recBuilder.addField(nameBuffers[i], fieldValueBuffer);
+ } else {
+ recBuilder.addField(fldIds[i], fieldValueBuffer);
+ }
+ }
+ recBuilder.write(out, true);
+ return true;
+ } else {
+ return false;
+ }
+ }
+
+ protected void fieldNameToBytes(String fieldName, AMutableString str, ArrayBackedValueStorage buffer)
+ throws HyracksDataException {
+ buffer.reset();
+ DataOutput out = buffer.getDataOutput();
+ str.setValue(fieldName);
+ try {
+ stringSerde.serialize(str, out);
+ } catch (IOException e) {
+ throw new HyracksDataException(e);
+ }
+ }
+
+ protected enum State {
+ INIT,
+ IN_RECORD,
+ EOR,
+ CR,
+ EOF
+ }
+
+ protected class FieldCursor {
+ private static final int INITIAL_BUFFER_SIZE = 4096;
+ private static final int INCREMENT = 4096;
+
+ private final Reader in;
+
+ private char[] buffer;
+ private int start;
+ private int end;
+ private State state;
+
+ private int fStart;
+ private int fEnd;
+
+ public FieldCursor(Reader in) {
+ this.in = in;
+ buffer = new char[INITIAL_BUFFER_SIZE];
+ start = 0;
+ end = 0;
+ state = State.INIT;
+ }
+
+ public boolean nextRecord() throws IOException {
+ while (true) {
+ switch (state) {
+ case INIT:
+ boolean eof = !readMore();
+ if (eof) {
+ state = State.EOF;
+ return false;
+ } else {
+ state = State.IN_RECORD;
+ return true;
+ }
+
+ case IN_RECORD:
+ int p = start;
+ while (true) {
+ if (p >= end) {
+ int s = start;
+ eof = !readMore();
+ if (eof) {
+ state = State.EOF;
+ return start < end;
+ }
+ p -= (s - start);
+ }
+ char ch = buffer[p];
+ if (ch == '\n') {
+ start = p + 1;
+ state = State.EOR;
+ break;
+ } else if (ch == '\r') {
+ start = p + 1;
+ state = State.CR;
+ break;
+ }
+ ++p;
+ }
+ break;
+
+ case CR:
+ if (start >= end) {
+ eof = !readMore();
+ if (eof) {
+ state = State.EOF;
+ return false;
+ }
+ }
+ char ch = buffer[start];
+ if (ch == '\n') {
+ ++start;
+ state = State.EOR;
+ } else {
+ state = State.IN_RECORD;
+ return true;
+ }
+
+ case EOR:
+ if (start >= end) {
+ eof = !readMore();
+ if (eof) {
+ state = State.EOF;
+ return false;
+ }
+ }
+ state = State.IN_RECORD;
+ return start < end;
+
+ case EOF:
+ return false;
+ }
+ }
+ }
+
+ public boolean nextField() throws IOException {
+ switch (state) {
+ case INIT:
+ case EOR:
+ case EOF:
+ case CR:
+ return false;
+
+ case IN_RECORD:
+ boolean eof;
+ int p = start;
+ while (true) {
+ if (p >= end) {
+ int s = start;
+ eof = !readMore();
+ if (eof) {
+ state = State.EOF;
+ return true;
+ }
+ p -= (s - start);
+ }
+ char ch = buffer[p];
+ if (ch == fieldDelimiter) {
+ fStart = start;
+ fEnd = p;
+ start = p + 1;
+ return true;
+ } else if (ch == '\n') {
+ fStart = start;
+ fEnd = p;
+ start = p + 1;
+ state = State.EOR;
+ return true;
+ } else if (ch == '\r') {
+ fStart = start;
+ fEnd = p;
+ start = p + 1;
+ state = State.CR;
+ return true;
+ }
+ ++p;
+ }
+ }
+ throw new IllegalStateException();
+ }
+
+ protected boolean readMore() throws IOException {
+ if (start > 0) {
+ System.arraycopy(buffer, start, buffer, 0, end - start);
+ }
+ end -= start;
+ start = 0;
+
+ if (end == buffer.length) {
+ buffer = Arrays.copyOf(buffer, buffer.length + INCREMENT);
+ }
+
+ int n = in.read(buffer, end, buffer.length - end);
+ if (n < 0) {
+ return false;
+ }
+ end += n;
+ return true;
+ }
+
+ public int getfStart() {
+ return fStart;
+ }
+
+ public void setfStart(int fStart) {
+ this.fStart = fStart;
+ }
+
+ public int getfEnd() {
+ return fEnd;
+ }
+
+ public void setfEnd(int fEnd) {
+ this.fEnd = fEnd;
+ }
+
+ public char[] getBuffer() {
+ return buffer;
+ }
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/DelimitedDataTupleParser.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/DelimitedDataTupleParser.java
index abd2ade..c029b64 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/DelimitedDataTupleParser.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/DelimitedDataTupleParser.java
@@ -1,331 +1,40 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.runtime.operators.file;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.Reader;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-
-import edu.uci.ics.asterix.builders.IARecordBuilder;
-import edu.uci.ics.asterix.builders.RecordBuilder;
-import edu.uci.ics.asterix.om.base.AMutableString;
import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.hyracks.api.comm.IFrameWriter;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParser;
import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+/**
+ * An extension of AbstractTupleParser that provides functionality for
+ * parsing delimited files.
+ */
public class DelimitedDataTupleParser extends AbstractTupleParser {
- protected final IValueParserFactory[] valueParserFactories;
- protected final char fieldDelimiter;
- protected final IHyracksTaskContext ctx;
- protected final ARecordType recType;
- protected ArrayTupleBuilder tb = new ArrayTupleBuilder(1);
- protected DataOutput recDos = tb.getDataOutput();
- protected final FrameTupleAppender appender;
- protected final ByteBuffer frame;
-
+ private final DelimitedDataParser dataParser;
- public DelimitedDataTupleParser(IHyracksTaskContext ctx,
- ARecordType recType, IValueParserFactory[] valueParserFactories,
- char fieldDelimter) {
- this.valueParserFactories = valueParserFactories;
- this.fieldDelimiter = fieldDelimter;
- this.ctx = ctx;
- this.recType = recType;
- appender = new FrameTupleAppender(ctx.getFrameSize());
- frame = ctx.allocateFrame();
- }
+ public DelimitedDataTupleParser(IHyracksTaskContext ctx, ARecordType recType,
+ IValueParserFactory[] valueParserFactories, char fieldDelimter) {
+ super(ctx, recType);
+ dataParser = new DelimitedDataParser(recType, valueParserFactories, fieldDelimter);
+ }
- @Override
- public void parse(InputStream in, IFrameWriter writer)
- throws HyracksDataException {
- try {
- IValueParser[] valueParsers = new IValueParser[valueParserFactories.length];
- for (int i = 0; i < valueParserFactories.length; ++i) {
- valueParsers[i] = valueParserFactories[i].createValueParser();
- }
-
- appender.reset(frame, true);
-
-
- ArrayBackedValueStorage fieldValueBuffer = new ArrayBackedValueStorage();
- DataOutput fieldValueBufferOutput = fieldValueBuffer
- .getDataOutput();
- IARecordBuilder recBuilder = new RecordBuilder();
- recBuilder.reset(recType);
- recBuilder.init();
-
- int n = recType.getFieldNames().length;
- byte[] fieldTypeTags = new byte[n];
- for (int i = 0; i < n; i++) {
- ATypeTag tag = recType.getFieldTypes()[i].getTypeTag();
- fieldTypeTags[i] = tag.serialize();
- }
-
- int[] fldIds = new int[n];
- ArrayBackedValueStorage[] nameBuffers = new ArrayBackedValueStorage[n];
- AMutableString str = new AMutableString(null);
- for (int i = 0; i < n; i++) {
- String name = recType.getFieldNames()[i];
- fldIds[i] = recBuilder.getFieldId(name);
- if (fldIds[i] < 0) {
- if (!recType.isOpen()) {
- throw new HyracksDataException("Illegal field " + name
- + " in closed type " + recType);
- } else {
- nameBuffers[i] = new ArrayBackedValueStorage();
- fieldNameToBytes(name, str, nameBuffers[i]);
- }
- }
- }
-
- FieldCursor cursor = new FieldCursor(new InputStreamReader(in));
- while (cursor.nextRecord()) {
- tb.reset();
- recBuilder.reset(recType);
- recBuilder.init();
-
- for (int i = 0; i < valueParsers.length; ++i) {
- if (!cursor.nextField()) {
- break;
- }
- fieldValueBuffer.reset();
- fieldValueBufferOutput.writeByte(fieldTypeTags[i]);
- valueParsers[i]
- .parse(cursor.buffer, cursor.fStart, cursor.fEnd
- - cursor.fStart, fieldValueBufferOutput);
- if (fldIds[i] < 0) {
- recBuilder.addField(nameBuffers[i], fieldValueBuffer);
- } else {
- recBuilder.addField(fldIds[i], fieldValueBuffer);
- }
- }
- recBuilder.write(recDos, true);
- tb.addFieldEndOffset();
-
- if (!appender.append(tb.getFieldEndOffsets(),
- tb.getByteArray(), 0, tb.getSize())) {
- FrameUtils.flushFrame(frame, writer);
- appender.reset(frame, true);
- if (!appender.append(tb.getFieldEndOffsets(),
- tb.getByteArray(), 0, tb.getSize())) {
- throw new IllegalStateException();
- }
- }
- }
- if (appender.getTupleCount() > 0) {
- FrameUtils.flushFrame(frame, writer);
- }
- } catch (IOException e) {
- throw new HyracksDataException(e);
- }
- }
-
- protected void fieldNameToBytes(String fieldName, AMutableString str,
- ArrayBackedValueStorage buffer) throws HyracksDataException {
- buffer.reset();
- DataOutput out = buffer.getDataOutput();
- str.setValue(fieldName);
- try {
- stringSerde.serialize(str, out);
- } catch (IOException e) {
- throw new HyracksDataException(e);
- }
- }
-
- protected enum State {
- INIT, IN_RECORD, EOR, CR, EOF
- }
-
- protected class FieldCursor {
- private static final int INITIAL_BUFFER_SIZE = 4096;
- private static final int INCREMENT = 4096;
-
- private final Reader in;
-
- private char[] buffer;
- private int start;
- private int end;
- private State state;
-
- private int fStart;
- private int fEnd;
-
- public FieldCursor(Reader in) {
- this.in = in;
- buffer = new char[INITIAL_BUFFER_SIZE];
- start = 0;
- end = 0;
- state = State.INIT;
- }
-
- public boolean nextRecord() throws IOException {
- while (true) {
- switch (state) {
- case INIT:
- boolean eof = !readMore();
- if (eof) {
- state = State.EOF;
- return false;
- } else {
- state = State.IN_RECORD;
- return true;
- }
-
- case IN_RECORD:
- int p = start;
- while (true) {
- if (p >= end) {
- int s = start;
- eof = !readMore();
- if (eof) {
- state = State.EOF;
- return start < end;
- }
- p -= (s - start);
- }
- char ch = buffer[p];
- if (ch == '\n') {
- start = p + 1;
- state = State.EOR;
- break;
- } else if (ch == '\r') {
- start = p + 1;
- state = State.CR;
- break;
- }
- ++p;
- }
- break;
-
- case CR:
- if (start >= end) {
- eof = !readMore();
- if (eof) {
- state = State.EOF;
- return false;
- }
- }
- char ch = buffer[start];
- if (ch == '\n') {
- ++start;
- state = State.EOR;
- } else {
- state = State.IN_RECORD;
- return true;
- }
-
- case EOR:
- if (start >= end) {
- eof = !readMore();
- if (eof) {
- state = State.EOF;
- return false;
- }
- }
- state = State.IN_RECORD;
- return start < end;
-
- case EOF:
- return false;
- }
- }
- }
-
- public boolean nextField() throws IOException {
- switch (state) {
- case INIT:
- case EOR:
- case EOF:
- case CR:
- return false;
-
- case IN_RECORD:
- boolean eof;
- int p = start;
- while (true) {
- if (p >= end) {
- int s = start;
- eof = !readMore();
- if (eof) {
- state = State.EOF;
- return true;
- }
- p -= (s - start);
- }
- char ch = buffer[p];
- if (ch == fieldDelimiter) {
- fStart = start;
- fEnd = p;
- start = p + 1;
- return true;
- } else if (ch == '\n') {
- fStart = start;
- fEnd = p;
- start = p + 1;
- state = State.EOR;
- return true;
- } else if (ch == '\r') {
- fStart = start;
- fEnd = p;
- start = p + 1;
- state = State.CR;
- return true;
- }
- ++p;
- }
- }
- throw new IllegalStateException();
- }
-
- protected boolean readMore() throws IOException {
- if (start > 0) {
- System.arraycopy(buffer, start, buffer, 0, end - start);
- }
- end -= start;
- start = 0;
-
- if (end == buffer.length) {
- buffer = Arrays.copyOf(buffer, buffer.length + INCREMENT);
- }
-
- int n = in.read(buffer, end, buffer.length - end);
- if (n < 0) {
- return false;
- }
- end += n;
- return true;
- }
-
- public int getfStart() {
- return fStart;
- }
-
- public void setfStart(int fStart) {
- this.fStart = fStart;
- }
-
- public int getfEnd() {
- return fEnd;
- }
-
- public void setfEnd(int fEnd) {
- this.fEnd = fEnd;
- }
-
- public char[] getBuffer() {
- return buffer;
- }
- }
+ @Override
+ public IDataParser getDataParser() {
+ return dataParser;
+ }
}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/IDataParser.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/IDataParser.java
new file mode 100644
index 0000000..f23aeac
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/IDataParser.java
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.operators.file;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.io.InputStream;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.om.types.ARecordType;
+
+/**
+ * Interface implemented by a parser
+ */
+public interface IDataParser {
+
+ /**
+ * Initialize the parser prior to actual parsing.
+ *
+ * @param in
+ * input stream to be parsed
+ * @param recordType
+ * record type associated with input data
+ * @param datasetRec
+ * boolean flag set to true if input data represents dataset
+ * records.
+ * @throws AsterixException
+ * @throws IOException
+ */
+ public void initialize(InputStream in, ARecordType recordType, boolean datasetRec) throws AsterixException,
+ IOException;
+
+ /**
+ * Parse data from source input stream and output ADM records.
+ *
+ * @param out
+ * DataOutput instance that for writing the parser output.
+ * @return
+ * @throws AsterixException
+ * @throws IOException
+ */
+ public boolean parse(DataOutput out) throws AsterixException, IOException;
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/NtDelimitedDataTupleParserFactory.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/NtDelimitedDataTupleParserFactory.java
index cfe94ef..86fba12 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/NtDelimitedDataTupleParserFactory.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/NtDelimitedDataTupleParserFactory.java
@@ -1,42 +1,34 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.runtime.operators.file;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.Reader;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-
-import edu.uci.ics.asterix.builders.IARecordBuilder;
-import edu.uci.ics.asterix.builders.RecordBuilder;
-import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
-import edu.uci.ics.asterix.om.base.AMutableString;
-import edu.uci.ics.asterix.om.base.AString;
import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.BuiltinType;
-import edu.uci.ics.hyracks.api.comm.IFrameWriter;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParser;
import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
import edu.uci.ics.hyracks.dataflow.std.file.ITupleParser;
import edu.uci.ics.hyracks.dataflow.std.file.ITupleParserFactory;
+/**
+ * A tuple parser factory for creating a tuple parser capable of parsing
+ * delimited data.
+ */
public class NtDelimitedDataTupleParserFactory implements ITupleParserFactory {
private static final long serialVersionUID = 1L;
protected ARecordType recordType;
protected IValueParserFactory[] valueParserFactories;
protected char fieldDelimiter;
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<AString> stringSerde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ASTRING);
public NtDelimitedDataTupleParserFactory(ARecordType recordType, IValueParserFactory[] valueParserFactories,
char fieldDelimiter) {
@@ -47,271 +39,7 @@
@Override
public ITupleParser createTupleParser(final IHyracksTaskContext ctx) {
- return new ITupleParser() {
- @Override
- public void parse(InputStream in, IFrameWriter writer) throws HyracksDataException {
- try {
- IValueParser[] valueParsers = new IValueParser[valueParserFactories.length];
- for (int i = 0; i < valueParserFactories.length; ++i) {
- valueParsers[i] = valueParserFactories[i].createValueParser();
- }
- ByteBuffer frame = ctx.allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
- appender.reset(frame, true);
- ArrayTupleBuilder tb = new ArrayTupleBuilder(1);
- DataOutput recDos = tb.getDataOutput();
-
- ArrayBackedValueStorage fieldValueBuffer = new ArrayBackedValueStorage();
- DataOutput fieldValueBufferOutput = fieldValueBuffer.getDataOutput();
- IARecordBuilder recBuilder = new RecordBuilder();
- recBuilder.reset(recordType);
- recBuilder.init();
-
- int n = recordType.getFieldNames().length;
- byte[] fieldTypeTags = new byte[n];
- for (int i = 0; i < n; i++) {
- ATypeTag tag = recordType.getFieldTypes()[i].getTypeTag();
- fieldTypeTags[i] = tag.serialize();
- }
-
- int[] fldIds = new int[n];
- ArrayBackedValueStorage[] nameBuffers = new ArrayBackedValueStorage[n];
- AMutableString str = new AMutableString(null);
- for (int i = 0; i < n; i++) {
- String name = recordType.getFieldNames()[i];
- fldIds[i] = recBuilder.getFieldId(name);
- if (fldIds[i] < 0) {
- if (!recordType.isOpen()) {
- throw new HyracksDataException("Illegal field " + name + " in closed type "
- + recordType);
- } else {
- nameBuffers[i] = new ArrayBackedValueStorage();
- fieldNameToBytes(name, str, nameBuffers[i]);
- }
- }
- }
-
- FieldCursor cursor = new FieldCursor(new InputStreamReader(in));
- while (cursor.nextRecord()) {
- tb.reset();
- recBuilder.reset(recordType);
- recBuilder.init();
-
- for (int i = 0; i < valueParsers.length; ++i) {
- if (!cursor.nextField()) {
- break;
- }
- fieldValueBuffer.reset();
- fieldValueBufferOutput.writeByte(fieldTypeTags[i]);
- valueParsers[i].parse(cursor.buffer, cursor.fStart, cursor.fEnd - cursor.fStart,
- fieldValueBufferOutput);
- if (fldIds[i] < 0) {
- recBuilder.addField(nameBuffers[i], fieldValueBuffer);
- } else {
- recBuilder.addField(fldIds[i], fieldValueBuffer);
- }
- }
- recBuilder.write(recDos, true);
- tb.addFieldEndOffset();
-
- if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
- FrameUtils.flushFrame(frame, writer);
- appender.reset(frame, true);
- if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
- throw new IllegalStateException();
- }
- }
- }
- if (appender.getTupleCount() > 0) {
- FrameUtils.flushFrame(frame, writer);
- }
- } catch (IOException e) {
- throw new HyracksDataException(e);
- }
- }
-
- private void fieldNameToBytes(String fieldName, AMutableString str, ArrayBackedValueStorage buffer)
- throws HyracksDataException {
- buffer.reset();
- DataOutput out = buffer.getDataOutput();
- str.setValue(fieldName);
- try {
- stringSerde.serialize(str, out);
- } catch (IOException e) {
- throw new HyracksDataException(e);
- }
- }
-
- };
- }
-
- private enum State {
- INIT,
- IN_RECORD,
- EOR,
- CR,
- EOF
- }
-
- private class FieldCursor {
- private static final int INITIAL_BUFFER_SIZE = 4096;
- private static final int INCREMENT = 4096;
-
- private final Reader in;
-
- private char[] buffer;
- private int start;
- private int end;
- private State state;
-
- private int fStart;
- private int fEnd;
-
- public FieldCursor(Reader in) {
- this.in = in;
- buffer = new char[INITIAL_BUFFER_SIZE];
- start = 0;
- end = 0;
- state = State.INIT;
- }
-
- public boolean nextRecord() throws IOException {
- while (true) {
- switch (state) {
- case INIT:
- boolean eof = !readMore();
- if (eof) {
- state = State.EOF;
- return false;
- } else {
- state = State.IN_RECORD;
- return true;
- }
-
- case IN_RECORD:
- int p = start;
- while (true) {
- if (p >= end) {
- int s = start;
- eof = !readMore();
- if (eof) {
- state = State.EOF;
- return start < end;
- }
- p -= (s - start);
- }
- char ch = buffer[p];
- if (ch == '\n') {
- start = p + 1;
- state = State.EOR;
- break;
- } else if (ch == '\r') {
- start = p + 1;
- state = State.CR;
- break;
- }
- ++p;
- }
- break;
-
- case CR:
- if (start >= end) {
- eof = !readMore();
- if (eof) {
- state = State.EOF;
- return false;
- }
- }
- char ch = buffer[start];
- if (ch == '\n') {
- ++start;
- state = State.EOR;
- } else {
- state = State.IN_RECORD;
- return true;
- }
-
- case EOR:
- if (start >= end) {
- eof = !readMore();
- if (eof) {
- state = State.EOF;
- return false;
- }
- }
- state = State.IN_RECORD;
- return start < end;
-
- case EOF:
- return false;
- }
- }
- }
-
- public boolean nextField() throws IOException {
- switch (state) {
- case INIT:
- case EOR:
- case EOF:
- case CR:
- return false;
-
- case IN_RECORD:
- boolean eof;
- int p = start;
- while (true) {
- if (p >= end) {
- int s = start;
- eof = !readMore();
- if (eof) {
- state = State.EOF;
- return true;
- }
- p -= (s - start);
- }
- char ch = buffer[p];
- if (ch == fieldDelimiter) {
- fStart = start;
- fEnd = p;
- start = p + 1;
- return true;
- } else if (ch == '\n') {
- fStart = start;
- fEnd = p;
- start = p + 1;
- state = State.EOR;
- return true;
- } else if (ch == '\r') {
- fStart = start;
- fEnd = p;
- start = p + 1;
- state = State.CR;
- return true;
- }
- ++p;
- }
- }
- throw new IllegalStateException();
- }
-
- private boolean readMore() throws IOException {
- if (start > 0) {
- System.arraycopy(buffer, start, buffer, 0, end - start);
- }
- end -= start;
- start = 0;
-
- if (end == buffer.length) {
- buffer = Arrays.copyOf(buffer, buffer.length + INCREMENT);
- }
-
- int n = in.read(buffer, end, buffer.length - end);
- if (n < 0) {
- return false;
- }
- end += n;
- return true;
- }
+ return new DelimitedDataTupleParser(ctx, recordType, valueParserFactories, fieldDelimiter);
}
}
diff --git a/asterix-runtime/src/main/javacc/AdmLexer.jj b/asterix-runtime/src/main/javacc/AdmLexer.jj
deleted file mode 100644
index fbab62f..0000000
--- a/asterix-runtime/src/main/javacc/AdmLexer.jj
+++ /dev/null
@@ -1,150 +0,0 @@
-options {
-
-
- STATIC = false;
-
-}
-
-PARSER_BEGIN(AdmLexer)
-
-package edu.uci.ics.asterix.adm.parser;
-
-import java.io.*;
-
-public class AdmLexer {
-
- public static void main(String args[]) throws ParseException, TokenMgrError, IOException, FileNotFoundException {
- File file = new File(args[0]);
- Reader freader = new BufferedReader(new InputStreamReader
- (new FileInputStream(file), "UTF-8"));
- AdmLexer flexer = new AdmLexer(freader);
- Token t = null;
- do {
- t = flexer.next();
- System.out.println(AdmLexerConstants.tokenImage[t.kind]);
- } while (t.kind != EOF);
- freader.close();
- }
-
- public Token next() throws ParseException {
- return getNextToken();
- }
-
- public String tokenKindToString(int tokenKind) {
- return AdmLexerConstants.tokenImage[tokenKind];
- }
-}
-
-PARSER_END(AdmLexer)
-
-<DEFAULT>
-TOKEN :
-{
- <NULL_LITERAL : "null">
-}
-
-<DEFAULT>
-TOKEN :
-{
- <TRUE_LITERAL : "true">
-}
-
-<DEFAULT>
-TOKEN :
-{
- <FALSE_LITERAL : "false">
-}
-
-
-<DEFAULT>
-TOKEN :
-{
- <INTEGER_LITERAL : ("-")? (<DIGIT>)+ >
-}
-
-
-<DEFAULT>
-TOKEN :
-{
- <#DIGIT : ["0" - "9"]>
-}
-
-
-TOKEN:
-{
- < DOUBLE_LITERAL:
- ("-")? <INTEGER> ( "." <INTEGER> )? (<EXPONENT>)?
- | ("-")? "." <INTEGER>
- >
- | < #EXPONENT: ["e","E"] (["+","-"])? (["0"-"9"])+ >
- | <INTEGER : (<DIGIT>)+ >
- | <FLOAT_LITERAL: <DOUBLE_LITERAL>("f"|"F")>
- }
-
-<DEFAULT>
-TOKEN :
-{
- <STRING_LITERAL : ("\"" (<EscapeQuot> | ~["\""])* "\"") >
- |
- < #EscapeQuot: "\\\"" >
-}
-
-<DEFAULT>
-TOKEN :
-{
- <START_RECORD : "{">
-}
-
-<DEFAULT>
-TOKEN :
-{
- <END_RECORD : "}">
-}
-
-<DEFAULT>
-TOKEN :
-{
- <COMMA : ",">
-}
-
-<DEFAULT>
-TOKEN :
-{
- <COLON : ":">
-}
-
-
-<DEFAULT>
-TOKEN :
-{
- <START_ORDERED_LIST : "[">
-}
-
-<DEFAULT>
-TOKEN :
-{
- <END_ORDERED_LIST : "]">
-}
-
-<DEFAULT>
-TOKEN :
-{
- <START_UNORDERED_LIST : "{{">
-}
-
-<DEFAULT>
-TOKEN :
-{
- <END_UNORDERED_LIST : "}}">
-}
-
-
-
-
-SKIP:
-{
- " "
-| "\t"
-| "\r"
-| "\n"
-}
diff --git a/asterix-runtime/src/main/javacc/nontagged/AdmLexer.jj b/asterix-runtime/src/main/javacc/nontagged/AdmLexer.jj
deleted file mode 100644
index f556d54..0000000
--- a/asterix-runtime/src/main/javacc/nontagged/AdmLexer.jj
+++ /dev/null
@@ -1,362 +0,0 @@
-options {
-
-
- STATIC = false;
-
-}
-
-PARSER_BEGIN(AdmLexer)
-
-package edu.uci.ics.asterix.adm.parser.nontagged;
-
-import java.io.*;
-
-public class AdmLexer {
-
- public static void main(String args[]) throws ParseException, TokenMgrError, IOException, FileNotFoundException {
- File file = new File(args[0]);
- Reader freader = new BufferedReader(new InputStreamReader
- (new FileInputStream(file), "UTF-8"));
- AdmLexer flexer = new AdmLexer(freader);
- Token t = null;
- do {
- t = flexer.next();
- System.out.println(AdmLexerConstants.tokenImage[t.kind]);
- } while (t.kind != EOF);
- freader.close();
- }
-
- public Token next() throws ParseException {
- return getNextToken();
- }
-
- public String tokenKindToString(int tokenKind) {
- return AdmLexerConstants.tokenImage[tokenKind];
- }
-}
-
-PARSER_END(AdmLexer)
-
-<DEFAULT>
-TOKEN :
-{
- <NULL_LITERAL : "null">
-}
-
-<DEFAULT>
-TOKEN :
-{
- <TRUE_LITERAL : "true">
-}
-
-<DEFAULT>
-TOKEN :
-{
- <FALSE_LITERAL : "false">
-}
-
-<DEFAULT>
-TOKEN :
-{
- <BOOLEAN_CONS : ("boolean") >
-}
-
-<DEFAULT>
-TOKEN :
-{
- <CONSTRUCTOR_OPEN : ("(")>
-}
-
-
-<DEFAULT>
-TOKEN :
-{
- <CONSTRUCTOR_CLOSE : (")")>
-}
-
-<DEFAULT>
-TOKEN:
-{
- <INT8_LITERAL : ("-" | "+")? (<DIGIT>)+ ("i8")>
-}
-
-<DEFAULT>
-TOKEN :
-{
- <INT8_CONS : ("int8") >
-}
-
-<DEFAULT>
-TOKEN:
-{
- <INT16_LITERAL : ("-" | "+")? (<DIGIT>)+ ("i16")>
-}
-
-<DEFAULT>
-TOKEN :
-{
- <INT16_CONS : ("int16") >
-}
-
-<DEFAULT>
-TOKEN :
-{
- <INT32_LITERAL : ("-" | "+")? (<DIGIT>)+ ("i32")>
-}
-
-<DEFAULT>
-TOKEN :
-{
- <INT32_CONS : ("int32")>
-}
-
-<DEFAULT>
-TOKEN:
-{
- <INT64_LITERAL : ("-" | "+")? (<DIGIT>)+ ("i64")>
-}
-
-<DEFAULT>
-TOKEN :
-{
- <INT64_CONS : ("int64") >
-}
-
-<DEFAULT>
-TOKEN:
-{
- <INT_LITERAL : ("-" | "+")? (<DIGIT>)+>
-}
-
-<DEFAULT>
-TOKEN :
-{
- <CIRCLE_LITERAL : "P"<DOUBLE_LITERAL>(",") <DOUBLE_LITERAL> ("R") <DOUBLE_LITERAL> >
-}
-
-<DEFAULT>
-TOKEN :
-{
- <CIRCLE_CONS : ("circle") >
-}
-
-
-<DEFAULT>
-TOKEN :
-{
- <TIMEZONE_LITERAL : (("+"|"-")<INTEGER>(":")<INTEGER>) | ("Z") >
-}
-
-<DEFAULT>
-TOKEN :
-{
- <DATE_LITERAL : ("-")?<INTEGER>("-")<INTEGER>("-")<INTEGER> (<TIMEZONE_LITERAL>)? >
-}
-
-<DEFAULT>
-TOKEN :
-{
- <DATE_CONS : ("date")>
-}
-
-<DEFAULT>
-TOKEN :
-{
- <TIME_LITERAL : <INTEGER>(":")<INTEGER>(":")<INTEGER> ( (":")<INTEGER>)? (<TIMEZONE_LITERAL>)? >
-}
-
-<DEFAULT>
-TOKEN :
-{
- <TIME_CONS : ("time")>
-}
-
-<DEFAULT>
-TOKEN :
-{
- <DATETIME_LITERAL : ("-")? <INTEGER>("-")<INTEGER>("-")<INTEGER>("T")<INTEGER>(":")<INTEGER>(":")<INTEGER> ( (":")<INTEGER>)? (<TIMEZONE_LITERAL>)?>
-}
-
-<DEFAULT>
-TOKEN :
-{
- <DATETIME_CONS : ("datetime")>
-}
-
-<DEFAULT>
-TOKEN :
-{
- <DURATION_LITERAL : ("-")? ("D")(<INTEGER>("Y"))?(<INTEGER>("M"))?(<INTEGER>("D"))?(("T")(<INTEGER>("H"))?(<INTEGER>("M"))?(<INTEGER>("S"))?)?>
-}
-
-<DEFAULT>
-TOKEN :
-{
- <DURATION_CONS : ("duration")>
-}
-
-<DEFAULT>
-TOKEN :
-{
- <#DIGIT : ["0" - "9"]>
-}
-
-TOKEN:
-{
- < DOUBLE_LITERAL:
- ("-" | "+")? <INTEGER> ( "." <INTEGER> )? (<EXPONENT>)?
- | ("-" | "+")? "." <INTEGER>
- >
- | < #EXPONENT: ["e","E"] (["+","-"])? (["0"-"9"])+ >
- | <INTEGER : (<DIGIT>)+ >
- | <FLOAT_LITERAL: <DOUBLE_LITERAL>("f"|"F")>
- }
-
-
-<DEFAULT>
-TOKEN :
-{
- <FLOAT_CONS : ("float")>
-}
-
-<DEFAULT>
-TOKEN :
-{
- <DOUBLE_CONS : ("double")>
-}
-
-
-<DEFAULT>
-TOKEN :
-{
- <STRING_LITERAL : ("\"" (<EscapeQuot> | ~["\""])* "\"") >
- |
- < #EscapeQuot: "\\\"" >
-}
-
-<DEFAULT>
-TOKEN :
-{
- <STRING_CONS : ("string")>
-}
-
-
-<DEFAULT>
-TOKEN :
-{
- <POINT_LITERAL : "P"<DOUBLE_LITERAL>(",")<DOUBLE_LITERAL>>
-}
-
-<DEFAULT>
-TOKEN :
-{
- <POINT_CONS : ("point")>
-}
-
-<DEFAULT>
-TOKEN :
-{
- <POINT3D_LITERAL : "P" <DOUBLE_LITERAL>(",") <DOUBLE_LITERAL> (",") <DOUBLE_LITERAL>>
-}
-
-<DEFAULT>
-TOKEN :
-{
- <POINT3D_CONS : ("point3d")>
-}
-
-<DEFAULT>
-TOKEN :
-{
- <LINE_LITERAL : "P"<DOUBLE_LITERAL>(",") <DOUBLE_LITERAL> ("P") <DOUBLE_LITERAL> (",") <DOUBLE_LITERAL>>
-}
-
-<DEFAULT>
-TOKEN :
-{
- <LINE_CONS : ("line")>
-}
-
-<DEFAULT>
-TOKEN :
-{
- <POLYGON_LITERAL : "P"<DOUBLE_LITERAL>(",") <DOUBLE_LITERAL> ("P") <DOUBLE_LITERAL> (",") <DOUBLE_LITERAL> (("P") <DOUBLE_LITERAL> (",") <DOUBLE_LITERAL>)+>
-}
-
-<DEFAULT>
-TOKEN :
-{
- <POLYGON_CONS : ("polygon")>
-}
-
-<DEFAULT>
-TOKEN :
-{
- <RECTANGLE_CONS : ("rectangle")>
-}
-
-<DEFAULT>
-TOKEN :
-{
- <RECTANGLE_LITERAL : "P"<DOUBLE_LITERAL>(",") <DOUBLE_LITERAL> ("P") <DOUBLE_LITERAL> (",") <DOUBLE_LITERAL>>
-}
-
-<DEFAULT>
-TOKEN :
-{
- <START_RECORD : "{">
-}
-
-<DEFAULT>
-TOKEN :
-{
- <END_RECORD : "}">
-}
-
-<DEFAULT>
-TOKEN :
-{
- <COMMA : ",">
-}
-
-<DEFAULT>
-TOKEN :
-{
- <COLON : ":">
-}
-
-
-<DEFAULT>
-TOKEN :
-{
- <START_ORDERED_LIST : "[">
-}
-
-<DEFAULT>
-TOKEN :
-{
- <END_ORDERED_LIST : "]">
-}
-
-<DEFAULT>
-TOKEN :
-{
- <START_UNORDERED_LIST : "{{">
-}
-
-<DEFAULT>
-TOKEN :
-{
- <END_UNORDERED_LIST : "}}">
-}
-
-
-
-
-SKIP:
-{
- " "
-| "\t"
-| "\r"
-| "\n"
-}
diff --git a/asterix-runtime/src/main/resources/adm.grammar b/asterix-runtime/src/main/resources/adm.grammar
new file mode 100644
index 0000000..56c7212
--- /dev/null
+++ b/asterix-runtime/src/main/resources/adm.grammar
@@ -0,0 +1,63 @@
+# LEXER GENERATOR configuration file
+# ---------------------------------------
+# Place *first* the generic configuration
+# then list your grammar.
+
+PACKAGE: edu.uci.ics.asterix.runtime.operators.file.adm
+LEXER_NAME: AdmLexer
+
+TOKENS:
+
+BOOLEAN_CONS = string(boolean)
+INT8_CONS = string(int8)
+INT16_CONS = string(int16)
+INT32_CONS = string(int32)
+INT64_CONS = string(int64)
+FLOAT_CONS = string(float)
+DOUBLE_CONS = string(double)
+DATE_CONS = string(date)
+DATETIME_CONS = string(datetime)
+DURATION_CONS = string(duration)
+STRING_CONS = string(string)
+POINT_CONS = string(point)
+POINT3D_CONS = string(point3d)
+LINE_CONS = string(line)
+POLYGON_CONS = string(polygon)
+RECTANGLE_CONS = string(rectangle)
+CIRCLE_CONS = string(circle)
+TIME_CONS = string(time)
+INTERVAL_TIME_CONS = string(interval_time)
+INTERVAL_DATE_CONS = string(interval_date)
+INTERVAL_DATETIME_CONS = string(interval_datetime)
+
+NULL_LITERAL = string(null)
+TRUE_LITERAL = string(true)
+FALSE_LITERAL = string(false)
+
+CONSTRUCTOR_OPEN = char(()
+CONSTRUCTOR_CLOSE = char())
+START_RECORD = char({)
+END_RECORD = char(})
+COMMA = char(\,)
+COLON = char(:)
+START_ORDERED_LIST = char([)
+END_ORDERED_LIST = char(])
+START_UNORDERED_LIST = string({{)
+END_UNORDERED_LIST = string(}})
+
+STRING_LITERAL = char("), anythingUntil(")
+
+INT_LITERAL = signOrNothing(), digitSequence()
+INT8_LITERAL = token(INT_LITERAL), string(i8)
+INT16_LITERAL = token(INT_LITERAL), string(i16)
+INT32_LITERAL = token(INT_LITERAL), string(i32)
+INT64_LITERAL = token(INT_LITERAL), string(i64)
+
+@EXPONENT = caseInsensitiveChar(e), signOrNothing(), digitSequence()
+
+DOUBLE_LITERAL = signOrNothing(), char(.), digitSequence()
+DOUBLE_LITERAL = signOrNothing(), digitSequence(), char(.), digitSequence()
+DOUBLE_LITERAL = signOrNothing(), digitSequence(), char(.), digitSequence(), token(@EXPONENT)
+DOUBLE_LITERAL = signOrNothing(), digitSequence(), token(@EXPONENT)
+
+FLOAT_LITERAL = token(DOUBLE_LITERAL), caseInsensitiveChar(f)
diff --git a/asterix-test-framework/pom.xml b/asterix-test-framework/pom.xml
index 2f0142e..4042066 100755
--- a/asterix-test-framework/pom.xml
+++ b/asterix-test-framework/pom.xml
@@ -1,4 +1,5 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>asterix</artifactId>
@@ -13,21 +14,22 @@
<artifactId>maven-compiler-plugin</artifactId>
<version>2.0.2</version>
<configuration>
- <source>1.6</source>
- <target>1.6</target>
+ <source>1.7</source>
+ <target>1.7</target>
+ <fork>true</fork>
</configuration>
</plugin>
- <plugin>
- <groupId>org.jvnet.jaxb2.maven2</groupId>
- <artifactId>maven-jaxb2-plugin</artifactId>
- <executions>
- <execution>
- <goals>
- <goal>generate</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
+ <plugin>
+ <groupId>org.jvnet.jaxb2.maven2</groupId>
+ <artifactId>maven-jaxb2-plugin</artifactId>
+ <executions>
+ <execution>
+ <goals>
+ <goal>generate</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
</plugins>
</build>
diff --git a/asterix-tools/pom.xml b/asterix-tools/pom.xml
index 5a8ea70..f6868c5 100644
--- a/asterix-tools/pom.xml
+++ b/asterix-tools/pom.xml
@@ -1,13 +1,12 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
<version>0.0.4-SNAPSHOT</version>
</parent>
- <groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-tools</artifactId>
- <version>0.0.4-SNAPSHOT</version>
<build>
<plugins>
@@ -16,11 +15,35 @@
<artifactId>maven-compiler-plugin</artifactId>
<version>2.0.2</version>
<configuration>
- <source>1.6</source>
- <target>1.6</target>
+ <source>1.7</source>
+ <target>1.7</target>
+ <fork>true</fork>
</configuration>
</plugin>
<plugin>
+ <artifactId>maven-jar-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>aqlclient</id>
+ <goals>
+ <goal>jar</goal>
+ </goals>
+ <phase>package</phase>
+ <configuration>
+ <classifier>aqlclient</classifier>
+ <archive>
+ <manifest>
+ <MainClass>edu.uci.ics.asterix.tools.aqlclient.AqlClient</MainClass>
+ </manifest>
+ </archive>
+ <includes>
+ <include>**/uci/ics/asterix/tools/aqlclient/*</include>
+ </includes>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.7.2</version>
<executions>
@@ -112,6 +135,18 @@
<scope>compile</scope>
</dependency>
<dependency>
+ <groupId>org.apache.httpcomponents</groupId>
+ <artifactId>httpclient</artifactId>
+ <version>4.2.2</version>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.httpcomponents</groupId>
+ <artifactId>httpcore</artifactId>
+ <version>4.2.2</version>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.8.1</version>
diff --git a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/aqlclient/AqlClient.java b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/aqlclient/AqlClient.java
new file mode 100644
index 0000000..911b68b
--- /dev/null
+++ b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/aqlclient/AqlClient.java
@@ -0,0 +1,176 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.tools.aqlclient;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.PrintWriter;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.http.HttpEntity;
+import org.apache.http.HttpResponse;
+import org.apache.http.NameValuePair;
+import org.apache.http.client.entity.UrlEncodedFormEntity;
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.impl.client.DefaultHttpClient;
+import org.apache.http.message.BasicNameValuePair;
+import org.apache.http.util.EntityUtils;
+
+/**
+ * This class is to automate AQL queries for benchmarking.
+ * The code is written by Pouria for the purpose of benchmarking ASTERIX.
+ */
+public class AqlClient {
+
+ static ArrayList<String> qFiles;
+ static ArrayList<String> qNames;
+
+ /*
+ * This code loads a set of AQL-Queries and runs them against Asterix. It
+ * runs the queries for a number of iterations, specified by the user. For
+ * each query it shows the Hyracks time, Asterix Time, and Client time on
+ * the screen, while it also dumps its stats into an output file, to make it
+ * persistent for future use/reference.
+ */
+ public static void main(String args[]) throws Exception {
+ /*
+ * Arguments: args0 - Path to the file, that contains list of query
+ * files. The assumption is that each AQL query, is saved in a file, and
+ * its path is mentioned in arg[0]. Each line of arg[0] file, denotes
+ * one query file. args1 - IP-Address of CC args2 - Path to the output
+ * file, for dumping stats args3 - Number of iterations, you want the
+ * simulation to be run args4 - Set it to true, so you stats, on the
+ * console, as queries are running.
+ */
+ if (args.length != 5) {
+ System.out
+ .println("Usage: [0]=List-of-Query-Files, [1]=ccIPadr, [2]=outputFile, [3]=Iteration# , [4]=boolean-showOnConsole ");
+ return;
+ }
+ qFiles = new ArrayList<String>();
+ qNames = new ArrayList<String>();
+
+ loadQueriesPaths(args[0]);
+ String ccUrl = args[1];
+ String outputFile = args[2];
+ int iteration = Integer.parseInt(args[3]);
+ boolean showOnConsole = Boolean.parseBoolean(args[4]);
+
+ PrintWriter pw = new PrintWriter(new File(outputFile));
+ pw.println("Code\tStatus\tName\tHyracksTime\tAsterixTime\tClientTime\n");
+
+ DefaultHttpClient httpclient = new DefaultHttpClient();
+ HttpPost httpPost = new HttpPost("http://" + ccUrl + ":19001");
+ List<NameValuePair> nvps = new ArrayList<NameValuePair>();
+ nvps.add(new BasicNameValuePair("hyracks-port", "1098"));
+ nvps.add(new BasicNameValuePair("hyracks-ip", ccUrl));
+ nvps.add(new BasicNameValuePair("display-result", "true"));
+ nvps.add(new BasicNameValuePair("query", null)); // it will get its
+ // value in the loop
+ // below
+
+ int ixToremove = nvps.size() - 1;
+ try {
+ for (int i = 0; i < iteration; i++) {
+ for (int x = 0; x < qFiles.size(); x++) {
+ nvps.remove(ixToremove);
+ String query = readQueryFromFile(qFiles.get(x));
+ String qName = qNames.get(x);
+ System.out.println("\n\nNow Running Query " + qName + " in iteration " + i);
+ nvps.add(new BasicNameValuePair("query", query));
+
+ httpPost.setEntity(new UrlEncodedFormEntity(nvps));
+ long s = System.currentTimeMillis();
+ HttpResponse response = httpclient.execute(httpPost);
+ long e = System.currentTimeMillis();
+
+ String status = response.getStatusLine().toString();
+ HttpEntity entity = response.getEntity();
+ String content = EntityUtils.toString(entity);
+ EntityUtils.consume(entity);
+
+ double[] times = extractStats(content);
+ double endToend = ((double) (e - s)) / 1000.00; // This is
+ // the
+ // client-time
+ // (end to
+ // end delay
+ // from
+ // client's
+ // perspective)
+ pw.print(status + "\t" + qName + "\t" + times[0] + "\t" + times[1] + "\t" + endToend + "\n");
+
+ if (showOnConsole) {
+ // System.out.println("Iteration "+i+"\n"+content+"\n");
+ // //Uncomment this line, if you want to see the whole
+ // content of the returned HTML page
+ System.out.print(qName + "\t" + status + "\t" + times[0] + "\t" + times[1] + "\t" + endToend
+ + " (iteration " + (i) + ")\n");
+ }
+ }
+ pw.println();
+ }
+ } finally {
+ pw.close();
+ httpPost.releaseConnection();
+ }
+
+ }
+
+ // Assumption: It contains one query file path per line
+ private static void loadQueriesPaths(String qFilesPath) throws Exception {
+ BufferedReader in = new BufferedReader(new FileReader(qFilesPath));
+ String str;
+ while ((str = in.readLine()) != null) {
+ qFiles.add(str.trim());
+ int nameIx = str.lastIndexOf('/');
+ qNames.add(new String(str.substring(nameIx + 1)));
+ }
+ in.close();
+ }
+
+ private static String readQueryFromFile(String filePath) throws Exception {
+ BufferedReader in = new BufferedReader(new FileReader(filePath));
+ String query = "";
+ String str;
+ while ((str = in.readLine()) != null) {
+ query += str + "\n";
+ }
+ in.close();
+ return query;
+ }
+
+ private static double[] extractStats(String content) {
+ int hyracksTimeIx = content.indexOf("<PRE>Duration:");
+ if (hyracksTimeIx < 0) {
+ return new double[] { -1, -1 };
+ }
+ int endHyracksTimeIx = content.indexOf("</PRE>", hyracksTimeIx);
+ double hTime = Double.parseDouble(content.substring(hyracksTimeIx + 14, endHyracksTimeIx));
+
+ int totalTimeSIx = content.indexOf("Duration", endHyracksTimeIx);
+ if (totalTimeSIx < 0) {
+ return new double[] { hTime, -1 };
+ }
+ int totalTimeEIx = content.indexOf("\n", totalTimeSIx);
+ double tTime = Double.parseDouble(content.substring(totalTimeSIx + 10, totalTimeEIx));
+
+ return new double[] { hTime, tTime };
+ }
+
+}
\ No newline at end of file
diff --git a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapter.java b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapter.java
new file mode 100644
index 0000000..84b989d
--- /dev/null
+++ b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapter.java
@@ -0,0 +1,279 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.tools.external.data;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Map;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.external.adapter.factory.IGenericDatasetAdapterFactory;
+import edu.uci.ics.asterix.external.dataset.adapter.FileSystemBasedAdapter;
+import edu.uci.ics.asterix.external.dataset.adapter.ITypedDatasourceAdapter;
+import edu.uci.ics.asterix.feed.managed.adapter.IManagedFeedAdapter;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.runtime.operators.file.ADMDataParser;
+import edu.uci.ics.asterix.runtime.operators.file.AbstractTupleParser;
+import edu.uci.ics.asterix.runtime.operators.file.DelimitedDataParser;
+import edu.uci.ics.asterix.runtime.operators.file.IDataParser;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.file.ITupleParser;
+import edu.uci.ics.hyracks.dataflow.std.file.ITupleParserFactory;
+
+/**
+ * An adapter that simulates a feed from the contents of a source file. The file can be on the local file
+ * system or on HDFS. The feed ends when the content of the source file has been ingested.
+ */
+public class RateControlledFileSystemBasedAdapter extends FileSystemBasedAdapter implements ITypedDatasourceAdapter,
+ IManagedFeedAdapter {
+
+ private static final long serialVersionUID = 1L;
+
+ public static final String KEY_FILE_SYSTEM = "fs";
+ public static final String LOCAL_FS = "localfs";
+ public static final String HDFS = "hdfs";
+
+ private final FileSystemBasedAdapter coreAdapter;
+ private final Map<String, String> configuration;
+ private final String fileSystem;
+ private final String format;
+
+ public RateControlledFileSystemBasedAdapter(ARecordType atype, Map<String, String> configuration) throws Exception {
+ super(atype);
+ checkRequiredArgs(configuration);
+ fileSystem = configuration.get(KEY_FILE_SYSTEM);
+ String adapterFactoryClass = null;
+ if (fileSystem.equalsIgnoreCase(LOCAL_FS)) {
+ adapterFactoryClass = "edu.uci.ics.asterix.external.adapter.factory.NCFileSystemAdapterFactory";
+ } else if (fileSystem.equals(HDFS)) {
+ adapterFactoryClass = "edu.uci.ics.asterix.external.adapter.factory.HDFSAdapterFactory";
+ } else {
+ throw new AsterixException("Unsupported file system type " + fileSystem);
+ }
+ format = configuration.get(KEY_FORMAT);
+ IGenericDatasetAdapterFactory adapterFactory = (IGenericDatasetAdapterFactory) Class.forName(
+ adapterFactoryClass).newInstance();
+ coreAdapter = (FileSystemBasedAdapter) adapterFactory.createAdapter(configuration, atype);
+ this.configuration = configuration;
+ }
+
+ private void checkRequiredArgs(Map<String, String> configuration) throws Exception {
+ if (configuration.get(KEY_FILE_SYSTEM) == null) {
+ throw new Exception("File system type not specified. (fs=?) File system could be 'localfs' or 'hdfs'");
+ }
+ if (configuration.get(IGenericDatasetAdapterFactory.KEY_TYPE_NAME) == null) {
+ throw new Exception("Record type not specified (output-type-name=?)");
+ }
+ if (configuration.get(KEY_PATH) == null) {
+ throw new Exception("File path not specified (path=?)");
+ }
+ if (configuration.get(KEY_FORMAT) == null) {
+ throw new Exception("File format not specified (format=?)");
+ }
+ }
+
+ @Override
+ public InputStream getInputStream(int partition) throws IOException {
+ return coreAdapter.getInputStream(partition);
+ }
+
+ @Override
+ public void initialize(IHyracksTaskContext ctx) throws Exception {
+ coreAdapter.initialize(ctx);
+ this.ctx = ctx;
+ }
+
+ @Override
+ public void configure(Map<String, String> arguments) throws Exception {
+ coreAdapter.configure(arguments);
+ }
+
+ @Override
+ public AdapterType getAdapterType() {
+ return coreAdapter.getAdapterType();
+ }
+
+ @Override
+ protected ITupleParser getTupleParser() throws Exception {
+ ITupleParser parser = null;
+ if (format.equals(FORMAT_DELIMITED_TEXT)) {
+ parser = getRateControlledDelimitedDataTupleParser((ARecordType) atype);
+ } else if (format.equals(FORMAT_ADM)) {
+ parser = getRateControlledADMDataTupleParser((ARecordType) atype);
+ } else {
+ throw new IllegalArgumentException(" format " + configuration.get(KEY_FORMAT) + " not supported");
+ }
+ return parser;
+
+ }
+
+ protected ITupleParser getRateControlledDelimitedDataTupleParser(ARecordType recordType) throws AsterixException {
+ ITupleParser parser;
+ int n = recordType.getFieldTypes().length;
+ IValueParserFactory[] fieldParserFactories = new IValueParserFactory[n];
+ for (int i = 0; i < n; i++) {
+ ATypeTag tag = recordType.getFieldTypes()[i].getTypeTag();
+ IValueParserFactory vpf = typeToValueParserFactMap.get(tag);
+ if (vpf == null) {
+ throw new NotImplementedException("No value parser factory for delimited fields of type " + tag);
+ }
+ fieldParserFactories[i] = vpf;
+
+ }
+ String delimiterValue = (String) configuration.get(KEY_DELIMITER);
+ if (delimiterValue != null && delimiterValue.length() > 1) {
+ throw new AsterixException("improper delimiter");
+ }
+
+ Character delimiter = delimiterValue.charAt(0);
+ parser = new RateControlledTupleParserFactory(recordType, fieldParserFactories, delimiter, configuration)
+ .createTupleParser(ctx);
+ return parser;
+ }
+
+ protected ITupleParser getRateControlledADMDataTupleParser(ARecordType recordType) throws AsterixException {
+ ITupleParser parser = null;
+ try {
+ parser = new RateControlledTupleParserFactory(recordType, configuration).createTupleParser(ctx);
+ return parser;
+ } catch (Exception e) {
+ throw new AsterixException(e);
+ }
+
+ }
+
+ @Override
+ public ARecordType getAdapterOutputType() {
+ return (ARecordType) atype;
+ }
+
+ @Override
+ public void alter(Map<String, String> properties) {
+ ((RateControlledTupleParser) parser).setInterTupleInterval(Long.parseLong(properties
+ .get(RateControlledTupleParser.INTER_TUPLE_INTERVAL)));
+ }
+
+ @Override
+ public void stop() {
+ ((RateControlledTupleParser) parser).stop();
+ }
+
+ @Override
+ public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
+ return coreAdapter.getPartitionConstraint();
+ }
+}
+
+class RateControlledTupleParserFactory implements ITupleParserFactory {
+
+ private static final long serialVersionUID = 1L;
+
+ private final ARecordType recordType;
+ private final IDataParser dataParser;
+ private final Map<String, String> configuration;
+
+ public RateControlledTupleParserFactory(ARecordType recordType, IValueParserFactory[] valueParserFactories,
+ char fieldDelimiter, Map<String, String> configuration) {
+ this.recordType = recordType;
+ dataParser = new DelimitedDataParser(recordType, valueParserFactories, fieldDelimiter);
+ this.configuration = configuration;
+ }
+
+ public RateControlledTupleParserFactory(ARecordType recordType, Map<String, String> configuration) {
+ this.recordType = recordType;
+ dataParser = new ADMDataParser();
+ this.configuration = configuration;
+ }
+
+ @Override
+ public ITupleParser createTupleParser(IHyracksTaskContext ctx) {
+ return new RateControlledTupleParser(ctx, recordType, dataParser, configuration);
+ }
+
+}
+
+class RateControlledTupleParser extends AbstractTupleParser {
+
+ private final IDataParser dataParser;
+ private long interTupleInterval;
+ private boolean delayConfigured;
+ private boolean continueIngestion = true;
+
+ public static final String INTER_TUPLE_INTERVAL = "tuple-interval";
+
+ public RateControlledTupleParser(IHyracksTaskContext ctx, ARecordType recType, IDataParser dataParser,
+ Map<String, String> configuration) {
+ super(ctx, recType);
+ this.dataParser = dataParser;
+ String propValue = configuration.get(INTER_TUPLE_INTERVAL);
+ if (propValue != null) {
+ interTupleInterval = Long.parseLong(propValue);
+ } else {
+ interTupleInterval = 0;
+ }
+ delayConfigured = interTupleInterval != 0;
+ }
+
+ public void setInterTupleInterval(long val) {
+ this.interTupleInterval = val;
+ this.delayConfigured = val > 0;
+ }
+
+ public void stop() {
+ continueIngestion = false;
+ }
+
+ @Override
+ public IDataParser getDataParser() {
+ return dataParser;
+ }
+
+ @Override
+ public void parse(InputStream in, IFrameWriter writer) throws HyracksDataException {
+
+ appender.reset(frame, true);
+ IDataParser parser = getDataParser();
+ try {
+ parser.initialize(in, recType, true);
+ while (continueIngestion) {
+ tb.reset();
+ if (!parser.parse(tb.getDataOutput())) {
+ break;
+ }
+ tb.addFieldEndOffset();
+ if (delayConfigured) {
+ Thread.sleep(interTupleInterval);
+ }
+ addTupleToFrame(writer);
+ }
+ if (appender.getTupleCount() > 0) {
+ FrameUtils.flushFrame(frame, writer);
+ }
+ } catch (AsterixException ae) {
+ throw new HyracksDataException(ae);
+ } catch (IOException ioe) {
+ throw new HyracksDataException(ioe);
+ } catch (InterruptedException ie) {
+ throw new HyracksDataException(ie);
+ }
+ }
+}
diff --git a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapterFactory.java b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapterFactory.java
new file mode 100644
index 0000000..6c32acb
--- /dev/null
+++ b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapterFactory.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.tools.external.data;
+
+import java.util.Map;
+
+import edu.uci.ics.asterix.external.adapter.factory.IGenericDatasetAdapterFactory;
+import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.IAType;
+
+/**
+ * Factory class for creating @see{RateControllerFileSystemBasedAdapter} The
+ * adapter simulates a feed from the contents of a source file. The file can be
+ * on the local file system or on HDFS. The feed ends when the content of the
+ * source file has been ingested.
+ */
+public class RateControlledFileSystemBasedAdapterFactory implements IGenericDatasetAdapterFactory {
+
+ @Override
+ public IDatasourceAdapter createAdapter(Map<String, String> configuration, IAType type) throws Exception {
+ return new RateControlledFileSystemBasedAdapter((ARecordType) type, configuration);
+ }
+
+ @Override
+ public String getName() {
+ return "file_feed";
+ }
+
+}
\ No newline at end of file
diff --git a/asterix-transactions/pom.xml b/asterix-transactions/pom.xml
index 2637220..27b4b4b 100644
--- a/asterix-transactions/pom.xml
+++ b/asterix-transactions/pom.xml
@@ -1,13 +1,12 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
<version>0.0.4-SNAPSHOT</version>
</parent>
- <groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-transactions</artifactId>
- <version>0.0.4-SNAPSHOT</version>
<build>
<plugins>
@@ -16,8 +15,9 @@
<artifactId>maven-compiler-plugin</artifactId>
<version>2.0.2</version>
<configuration>
- <source>1.6</source>
- <target>1.6</target>
+ <source>1.7</source>
+ <target>1.7</target>
+ <fork>true</fork>
</configuration>
</plugin>
</plugins>
@@ -25,28 +25,25 @@
</build>
<dependencies>
- <dependency>
- <groupId>edu.uci.ics.hyracks</groupId>
- <artifactId>hyracks-storage-am-common</artifactId>
- <version>0.2.2-SNAPSHOT</version>
- <type>jar</type>
- <scope>compile</scope>
- </dependency>
- <dependency>
- <groupId>edu.uci.ics.hyracks</groupId>
- <artifactId>hyracks-storage-am-lsm-btree</artifactId>
- <version>0.2.2-SNAPSHOT</version>
- </dependency>
- <dependency>
- <groupId>edu.uci.ics.hyracks</groupId>
- <artifactId>hyracks-storage-am-lsm-rtree</artifactId>
- <version>0.2.2-SNAPSHOT</version>
- </dependency>
- <dependency>
- <groupId>edu.uci.ics.hyracks</groupId>
- <artifactId>hyracks-storage-am-lsm-invertedindex</artifactId>
- <version>0.2.2-SNAPSHOT</version>
- </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-storage-am-common</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-storage-am-lsm-btree</artifactId>
+ <version>0.2.3-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-storage-am-lsm-rtree</artifactId>
+ <version>0.2.3-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-storage-am-lsm-invertedindex</artifactId>
+ <version>0.2.3-SNAPSHOT</version>
+ </dependency>
</dependencies>
-
+
</project>
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/AbstractOperationCallback.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/AbstractOperationCallback.java
index 1f018b4..9e4c944 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/AbstractOperationCallback.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/AbstractOperationCallback.java
@@ -17,40 +17,32 @@
import edu.uci.ics.asterix.transaction.management.service.locking.ILockManager;
import edu.uci.ics.asterix.transaction.management.service.transaction.DatasetId;
-import edu.uci.ics.asterix.transaction.management.service.transaction.FieldsHashValueGenerator;
import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.MurmurHash128Bit;
public abstract class AbstractOperationCallback {
+
+ private final static long SEED = 0L;
+
protected final DatasetId datasetId;
protected final int[] primaryKeyFields;
- protected final IBinaryHashFunction[] primaryKeyHashFunctions;
protected final ILockManager lockManager;
protected final TransactionContext txnCtx;
protected int transactorLocalNumActiveOperations = 0;
public AbstractOperationCallback(int datasetId, int[] primaryKeyFields,
- IBinaryHashFunctionFactory[] primaryKeyHashFunctionFactories, TransactionContext txnCtx,
- ILockManager lockManager) {
+ TransactionContext txnCtx, ILockManager lockManager) {
this.datasetId = new DatasetId(datasetId);
this.primaryKeyFields = primaryKeyFields;
- if (primaryKeyHashFunctionFactories != null) {
- this.primaryKeyHashFunctions = new IBinaryHashFunction[primaryKeyHashFunctionFactories.length];
- for (int i = 0; i < primaryKeyHashFunctionFactories.length; ++i) {
- this.primaryKeyHashFunctions[i] = primaryKeyHashFunctionFactories[i].createBinaryHashFunction();
- }
- } else {
- this.primaryKeyHashFunctions = null;
- }
this.txnCtx = txnCtx;
this.lockManager = lockManager;
}
- public int computePrimaryKeyHashValue(ITupleReference tuple, int[] primaryKeyFields,
- IBinaryHashFunction[] primaryKeyHashFunctions) {
- return FieldsHashValueGenerator.computeFieldsHashValue(tuple, primaryKeyFields, primaryKeyHashFunctions);
+ public int computePrimaryKeyHashValue(ITupleReference tuple, int[] primaryKeyFields) {
+ long[] longHashes= new long[2];
+ MurmurHash128Bit.hash3_x64_128(tuple, primaryKeyFields, SEED, longHashes);
+ return Math.abs((int) longHashes[0]);
}
public TransactionContext getTransactionContext() {
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/AbstractOperationCallbackFactory.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/AbstractOperationCallbackFactory.java
index 84c747b..386dce5 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/AbstractOperationCallbackFactory.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/AbstractOperationCallbackFactory.java
@@ -19,23 +19,21 @@
import edu.uci.ics.asterix.transaction.management.service.transaction.ITransactionSubsystemProvider;
import edu.uci.ics.asterix.transaction.management.service.transaction.JobId;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
-public abstract class AbstractOperationCallbackFactory implements Serializable{
+public abstract class AbstractOperationCallbackFactory implements Serializable {
+ private static final long serialVersionUID = 1L;
+
protected final JobId jobId;
protected final int datasetId;
protected final int[] primaryKeyFields;
- protected final IBinaryHashFunctionFactory[] primaryKeyHashFunctionFactories;
protected final ITransactionSubsystemProvider txnSubsystemProvider;
protected final byte resourceType;
public AbstractOperationCallbackFactory(JobId jobId, int datasetId, int[] primaryKeyFields,
- IBinaryHashFunctionFactory[] primaryKeyHashFunctionFactories,
ITransactionSubsystemProvider txnSubsystemProvider, byte resourceType) {
this.jobId = jobId;
this.datasetId = datasetId;
this.primaryKeyFields = primaryKeyFields;
- this.primaryKeyHashFunctionFactories = primaryKeyHashFunctionFactories;
this.txnSubsystemProvider = txnSubsystemProvider;
this.resourceType = resourceType;
}
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexModificationOperationCallback.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexModificationOperationCallback.java
index 19e4396..824a324 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexModificationOperationCallback.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexModificationOperationCallback.java
@@ -21,7 +21,6 @@
import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionContext;
import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionManagementConstants.LockManagerConstants.LockMode;
import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionSubsystem;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
@@ -41,9 +40,9 @@
protected final TransactionSubsystem txnSubsystem;
public PrimaryIndexModificationOperationCallback(int datasetId, int[] primaryKeyFields,
- IBinaryHashFunctionFactory[] primaryKeyHashFunctionFactories, TransactionContext txnCtx,
- ILockManager lockManager, TransactionSubsystem txnSubsystem, long resourceId, byte resourceType, IndexOperation indexOp) {
- super(datasetId, primaryKeyFields, primaryKeyHashFunctionFactories, txnCtx, lockManager);
+ TransactionContext txnCtx, ILockManager lockManager,
+ TransactionSubsystem txnSubsystem, long resourceId, byte resourceType, IndexOperation indexOp) {
+ super(datasetId, primaryKeyFields, txnCtx, lockManager);
this.resourceId = resourceId;
this.resourceType = resourceType;
this.indexOp = indexOp;
@@ -52,7 +51,7 @@
@Override
public void before(ITupleReference tuple) throws HyracksDataException {
- int pkHash = computePrimaryKeyHashValue(tuple, primaryKeyFields, primaryKeyHashFunctions);
+ int pkHash = computePrimaryKeyHashValue(tuple, primaryKeyFields);
try {
lockManager.lock(datasetId, pkHash, LockMode.X, txnCtx);
} catch (ACIDException e) {
@@ -63,7 +62,7 @@
@Override
public void found(ITupleReference before, ITupleReference after) throws HyracksDataException {
IndexLogger logger = txnSubsystem.getTreeLoggerRepository().getIndexLogger(resourceId, resourceType);
- int pkHash = computePrimaryKeyHashValue(after, primaryKeyFields, primaryKeyHashFunctions);
+ int pkHash = computePrimaryKeyHashValue(after, primaryKeyFields);
LSMBTreeTupleReference lsmBTreeTuple = (LSMBTreeTupleReference) before;
IndexOperation oldOp = IndexOperation.INSERT;
if (before == null) {
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexModificationOperationCallbackFactory.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexModificationOperationCallbackFactory.java
index f3dee0c..c75ab6f 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexModificationOperationCallbackFactory.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexModificationOperationCallbackFactory.java
@@ -16,13 +16,11 @@
package edu.uci.ics.asterix.transaction.management.opcallbacks;
import edu.uci.ics.asterix.transaction.management.exception.ACIDException;
-import edu.uci.ics.asterix.transaction.management.resource.TransactionalResourceManagerRepository;
import edu.uci.ics.asterix.transaction.management.service.transaction.ITransactionSubsystemProvider;
import edu.uci.ics.asterix.transaction.management.service.transaction.JobId;
import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionContext;
import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionSubsystem;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManager;
import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
@@ -40,9 +38,8 @@
private final IndexOperation indexOp;
public PrimaryIndexModificationOperationCallbackFactory(JobId jobId, int datasetId, int[] primaryKeyFields,
- IBinaryHashFunctionFactory[] primaryKeyHashFunctionFactories,
ITransactionSubsystemProvider txnSubsystemProvider, IndexOperation indexOp, byte resourceType) {
- super(jobId, datasetId, primaryKeyFields, primaryKeyHashFunctionFactories, txnSubsystemProvider, resourceType);
+ super(jobId, datasetId, primaryKeyFields, txnSubsystemProvider, resourceType);
this.indexOp = indexOp;
}
@@ -55,14 +52,14 @@
.getIndexLifecycleManager();
ILSMIndex index = (ILSMIndex) indexLifeCycleManager.getIndex(resourceId);
if (index == null) {
- throw new HyracksDataException("Index(id:"+resourceId+") is not registered.");
+ throw new HyracksDataException("Index(id:" + resourceId + ") is not registered.");
}
try {
TransactionContext txnCtx = txnSubsystem.getTransactionManager().getTransactionContext(jobId);
IModificationOperationCallback modCallback = new PrimaryIndexModificationOperationCallback(datasetId,
- primaryKeyFields, primaryKeyHashFunctionFactories, txnCtx, txnSubsystem.getLockManager(),
- txnSubsystem, resourceId, resourceType, indexOp);
+ primaryKeyFields, txnCtx, txnSubsystem.getLockManager(), txnSubsystem, resourceId, resourceType,
+ indexOp);
txnCtx.registerIndexAndCallback(index, (AbstractOperationCallback) modCallback);
return modCallback;
} catch (ACIDException e) {
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexSearchOperationCallback.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexSearchOperationCallback.java
index 85e8980..4760307 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexSearchOperationCallback.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexSearchOperationCallback.java
@@ -19,7 +19,6 @@
import edu.uci.ics.asterix.transaction.management.service.locking.ILockManager;
import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionContext;
import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionManagementConstants.LockManagerConstants.LockMode;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
@@ -30,14 +29,13 @@
public class PrimaryIndexSearchOperationCallback extends AbstractOperationCallback implements ISearchOperationCallback {
public PrimaryIndexSearchOperationCallback(int datasetId, int[] entityIdFields,
- IBinaryHashFunctionFactory[] entityIdFieldHashFunctionFactories, ILockManager lockManager,
- TransactionContext txnCtx) {
- super(datasetId, entityIdFields, entityIdFieldHashFunctionFactories, txnCtx, lockManager);
+ ILockManager lockManager, TransactionContext txnCtx) {
+ super(datasetId, entityIdFields, txnCtx, lockManager);
}
@Override
public boolean proceed(ITupleReference tuple) throws HyracksDataException {
- int pkHash = computePrimaryKeyHashValue(tuple, primaryKeyFields, primaryKeyHashFunctions);
+ int pkHash = computePrimaryKeyHashValue(tuple, primaryKeyFields);
try {
return lockManager.tryLock(datasetId, pkHash, LockMode.S, txnCtx);
} catch (ACIDException e) {
@@ -47,7 +45,7 @@
@Override
public void reconcile(ITupleReference tuple) throws HyracksDataException {
- int pkHash = computePrimaryKeyHashValue(tuple, primaryKeyFields, primaryKeyHashFunctions);
+ int pkHash = computePrimaryKeyHashValue(tuple, primaryKeyFields);
try {
lockManager.lock(datasetId, pkHash, LockMode.S, txnCtx);
} catch (ACIDException e) {
@@ -57,7 +55,7 @@
@Override
public void cancel(ITupleReference tuple) throws HyracksDataException {
- int pkHash = computePrimaryKeyHashValue(tuple, primaryKeyFields, primaryKeyHashFunctions);
+ int pkHash = computePrimaryKeyHashValue(tuple, primaryKeyFields);
try {
lockManager.unlock(datasetId, pkHash, txnCtx);
} catch (ACIDException e) {
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexSearchOperationCallbackFactory.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexSearchOperationCallbackFactory.java
index 3e6dfcb..fc62b90 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexSearchOperationCallbackFactory.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexSearchOperationCallbackFactory.java
@@ -21,7 +21,6 @@
import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionContext;
import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionSubsystem;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallback;
import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallbackFactory;
@@ -32,9 +31,8 @@
private static final long serialVersionUID = 1L;
public PrimaryIndexSearchOperationCallbackFactory(JobId jobId, int datasetId, int[] entityIdFields,
- IBinaryHashFunctionFactory[] entityIdFieldHashFunctionFactories,
ITransactionSubsystemProvider txnSubsystemProvider, byte resourceType) {
- super(jobId, datasetId, entityIdFields, entityIdFieldHashFunctionFactories, txnSubsystemProvider, resourceType);
+ super(jobId, datasetId, entityIdFields, txnSubsystemProvider, resourceType);
}
@Override
@@ -43,8 +41,8 @@
TransactionSubsystem txnSubsystem = txnSubsystemProvider.getTransactionSubsystem(ctx);
try {
TransactionContext txnCtx = txnSubsystem.getTransactionManager().getTransactionContext(jobId);
- return new PrimaryIndexSearchOperationCallback(datasetId, primaryKeyFields,
- primaryKeyHashFunctionFactories, txnSubsystem.getLockManager(), txnCtx);
+ return new PrimaryIndexSearchOperationCallback(datasetId, primaryKeyFields, txnSubsystem.getLockManager(),
+ txnCtx);
} catch (ACIDException e) {
throw new HyracksDataException(e);
}
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/SecondaryIndexModificationOperationCallback.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/SecondaryIndexModificationOperationCallback.java
index b7d64ec..092f99c 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/SecondaryIndexModificationOperationCallback.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/SecondaryIndexModificationOperationCallback.java
@@ -20,7 +20,6 @@
import edu.uci.ics.asterix.transaction.management.service.logging.IndexLogger;
import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionContext;
import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionSubsystem;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
@@ -41,10 +40,9 @@
protected final TransactionSubsystem txnSubsystem;
public SecondaryIndexModificationOperationCallback(int datasetId, int[] primaryKeyFields,
- IBinaryHashFunctionFactory[] primaryKeyHashFunctionFactories, TransactionContext txnCtx,
- ILockManager lockManager, TransactionSubsystem txnSubsystem, long resourceId, byte resourceType,
- IndexOperation indexOp) {
- super(datasetId, primaryKeyFields, primaryKeyHashFunctionFactories, txnCtx, lockManager);
+ TransactionContext txnCtx, ILockManager lockManager,
+ TransactionSubsystem txnSubsystem, long resourceId, byte resourceType, IndexOperation indexOp) {
+ super(datasetId, primaryKeyFields, txnCtx, lockManager);
this.resourceId = resourceId;
this.resourceType = resourceType;
this.indexOp = indexOp;
@@ -60,7 +58,7 @@
@Override
public void found(ITupleReference before, ITupleReference after) throws HyracksDataException {
IndexLogger logger = txnSubsystem.getTreeLoggerRepository().getIndexLogger(resourceId, resourceType);
- int pkHash = computePrimaryKeyHashValue(after, primaryKeyFields, primaryKeyHashFunctions);
+ int pkHash = computePrimaryKeyHashValue(after, primaryKeyFields);
try {
logger.generateLogRecord(txnSubsystem, txnCtx, datasetId.getId(), pkHash, resourceId, indexOp, after,
oldOp, before);
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/SecondaryIndexModificationOperationCallbackFactory.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/SecondaryIndexModificationOperationCallbackFactory.java
index 49f74bd..672b434 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/SecondaryIndexModificationOperationCallbackFactory.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/SecondaryIndexModificationOperationCallbackFactory.java
@@ -16,13 +16,11 @@
package edu.uci.ics.asterix.transaction.management.opcallbacks;
import edu.uci.ics.asterix.transaction.management.exception.ACIDException;
-import edu.uci.ics.asterix.transaction.management.resource.TransactionalResourceManagerRepository;
import edu.uci.ics.asterix.transaction.management.service.transaction.ITransactionSubsystemProvider;
import edu.uci.ics.asterix.transaction.management.service.transaction.JobId;
import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionContext;
import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionSubsystem;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManager;
import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
@@ -37,29 +35,27 @@
private final IndexOperation indexOp;
public SecondaryIndexModificationOperationCallbackFactory(JobId jobId, int datasetId, int[] primaryKeyFields,
- IBinaryHashFunctionFactory[] primaryKeyHashFunctionFactories,
ITransactionSubsystemProvider txnSubsystemProvider, IndexOperation indexOp, byte resourceType) {
- super(jobId, datasetId, primaryKeyFields, primaryKeyHashFunctionFactories, txnSubsystemProvider, resourceType);
+ super(jobId, datasetId, primaryKeyFields, txnSubsystemProvider, resourceType);
this.indexOp = indexOp;
}
@Override
- public IModificationOperationCallback createModificationOperationCallback(long resourceId, Object resource, IHyracksTaskContext ctx)
- throws HyracksDataException {
-
+ public IModificationOperationCallback createModificationOperationCallback(long resourceId, Object resource,
+ IHyracksTaskContext ctx) throws HyracksDataException {
+
TransactionSubsystem txnSubsystem = txnSubsystemProvider.getTransactionSubsystem(ctx);
IIndexLifecycleManager indexLifeCycleManager = txnSubsystem.getAsterixAppRuntimeContextProvider()
.getIndexLifecycleManager();
ILSMIndex index = (ILSMIndex) indexLifeCycleManager.getIndex(resourceId);
if (index == null) {
- throw new HyracksDataException("Index(id:"+resourceId+") is not registered.");
+ throw new HyracksDataException("Index(id:" + resourceId + ") is not registered.");
}
-
+
try {
TransactionContext txnCtx = txnSubsystem.getTransactionManager().getTransactionContext(jobId);
- return new SecondaryIndexModificationOperationCallback(datasetId, primaryKeyFields,
- primaryKeyHashFunctionFactories, txnCtx, txnSubsystem.getLockManager(), txnSubsystem, resourceId,
- resourceType, indexOp);
+ return new SecondaryIndexModificationOperationCallback(datasetId, primaryKeyFields, txnCtx,
+ txnSubsystem.getLockManager(), txnSubsystem, resourceId, resourceType, indexOp);
} catch (ACIDException e) {
throw new HyracksDataException(e);
}
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/SecondaryIndexSearchOperationCallback.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/SecondaryIndexSearchOperationCallback.java
index 16aefee..c53b651 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/SecondaryIndexSearchOperationCallback.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/SecondaryIndexSearchOperationCallback.java
@@ -26,7 +26,7 @@
ISearchOperationCallback {
public SecondaryIndexSearchOperationCallback() {
- super(-1, null, null, null, null);
+ super(-1, null, null, null);
}
@Override
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/resource/LSMBTreeLocalResourceMetadata.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/resource/LSMBTreeLocalResourceMetadata.java
index b14fa77..e037e95 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/resource/LSMBTreeLocalResourceMetadata.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/resource/LSMBTreeLocalResourceMetadata.java
@@ -20,10 +20,11 @@
public class LSMBTreeLocalResourceMetadata implements ILocalResourceMetadata {
+ private static final long serialVersionUID = 1L;
+
private final ITypeTraits[] typeTraits;
private final IBinaryComparatorFactory[] cmpFactories;
private final int[] bloomFilterKeyFields;
- private final boolean isPrimary;
private final int memPageSize;
private final int memNumPages;
@@ -32,7 +33,6 @@
this.typeTraits = typeTraits;
this.cmpFactories = cmpFactories;
this.bloomFilterKeyFields = bloomFilterKeyFields;
- this.isPrimary = isPrimary;
this.memPageSize = memPageSize;
this.memNumPages = memNumPages;
}
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/resource/LSMInvertedIndexLocalResourceMetadata.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/resource/LSMInvertedIndexLocalResourceMetadata.java
index 1bdbca3..e71d370 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/resource/LSMInvertedIndexLocalResourceMetadata.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/resource/LSMInvertedIndexLocalResourceMetadata.java
@@ -18,6 +18,8 @@
public class LSMInvertedIndexLocalResourceMetadata implements ILocalResourceMetadata {
+ private static final long serialVersionUID = 1L;
+
private final ITypeTraits[] invListTypeTraits;
private final IBinaryComparatorFactory[] invListCmpFactories;
private final ITypeTraits[] tokenTypeTraits;
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/DeadlockDetector.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/DeadlockDetector.java
index 6b6f8e8..900725b 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/DeadlockDetector.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/DeadlockDetector.java
@@ -27,7 +27,6 @@
private JobId tempJobIdObj; //temporary object to avoid object creation
private DatasetId tempDatasetIdObj; //temporary object to avoid object creation
-
public DeadlockDetector(HashMap<JobId, JobInfo> jobHT, HashMap<DatasetId, DatasetLockInfo> datasetResourceHT,
EntityLockInfoManager entityLockInfoManager, EntityInfoManager entityInfoManager,
LockWaiterManager lockWaiterManager) {
@@ -43,10 +42,10 @@
tempJobIdObj = new JobId(0);
tempDatasetIdObj = new DatasetId(0);
}
-
- public boolean isSafeToAdd(DatasetLockInfo dLockInfo, int eLockInfo, int entityInfo, boolean isDatasetLockInfo, boolean isUpgrade) {
+
+ public boolean isSafeToAdd(DatasetLockInfo dLockInfo, int eLockInfo, int entityInfo, boolean isDatasetLockInfo,
+ boolean isUpgrade) {
int holder;
- int nextHolder;
int visitedHolder;
int callerId = entityInfoManager.getJobId(entityInfo);
int datasetId = entityInfoManager.getDatasetId(entityInfo);
@@ -67,7 +66,7 @@
} else {
getHolderList(datasetId, hashValue, holderList);
}
-
+
//check whether this caller is upgrader or not
//if it is upgrader, then handle it as special case in the following manner
//if there is another upgrader or waiter of which lock mode is not compatible with the caller's lock mode,
@@ -79,11 +78,11 @@
//there is no case such that while a job is holding any mode of lock on a dataset and waits for the same dataset as an waiter.
//But the job may wait for the same dataset as an upgrader.
}
-
+
//TODO
//check whether when there are multiple resources, the waiter and upgrader should be distinguished or not.
//The current logic doesn't distinguish these two types of waiter.
-
+
//while holderList is not empty
holderList.beginIterate();
holder = holderList.getNextKey();
@@ -136,6 +135,7 @@
/**
* Get holder list of dataset if hashValue == -1. Get holder list of entity otherwise.
* Where, a holder is a jobId, not entityInfo's slotNum
+ *
* @param datasetId
* @param hashValue
* @param holderList
@@ -147,35 +147,35 @@
int entityInfo;
int waiterObjId;
LockWaiter waiterObj;
-
+
//get datasetLockInfo
tempDatasetIdObj.setId(datasetId);
dLockInfo = datasetResourceHT.get(tempDatasetIdObj);
if (dLockInfo == null) {
return;
}
-
+
if (hashValue == -1) {
//get S/X-lock holders of dataset
entityInfo = dLockInfo.getLastHolder();
- while(entityInfo != -1) {
+ while (entityInfo != -1) {
holderList.put(entityInfoManager.getJobId(entityInfo), 0);
entityInfo = entityInfoManager.getPrevEntityActor(entityInfo);
}
-
+
//get IS/IX-lock holders of dataset
entityHT = dLockInfo.getEntityResourceHT();
entityHT.beginIterate();
entityLockInfo = entityHT.getNextValue();
while (entityLockInfo != -1) {
-
+
//1. add holder of eLockInfo to holerList
entityInfo = entityLockInfoManager.getLastHolder(entityLockInfo);
while (entityInfo != -1) {
holderList.put(entityInfoManager.getJobId(entityInfo), 0);
entityInfo = entityInfoManager.getPrevEntityActor(entityInfo);
}
-
+
//2. add waiter of eLockInfo to holderList since waiter of entityLock is a holder of datasetLock
//(Upgraders need not to be added since upgraders are also holders)
waiterObjId = entityLockInfoManager.getFirstWaiter(entityLockInfo);
@@ -185,7 +185,7 @@
holderList.put(entityInfoManager.getJobId(entityInfo), 0);
waiterObjId = waiterObj.getNextWaiterObjId();
}
-
+
entityLockInfo = entityHT.getNextValue();
}
} else {
@@ -205,6 +205,7 @@
/**
* Get waiting resource list of jobId, where a resource is represented with entityInfo's slot number
+ *
* @param jobId
* @param resourceList
*/
@@ -213,24 +214,23 @@
int waiterId;
LockWaiter waiterObj;
int entityInfo;
-
+
//get JobInfo
tempJobIdObj.setId(jobId);
jobInfo = jobHT.get(tempJobIdObj);
if (IS_DEBUG_MODE) {
if (jobInfo == null) {
- System.out.println(Thread.currentThread().getName()+"jobId:"+jobId);
+ System.out.println(Thread.currentThread().getName() + "jobId:" + jobId);
}
}
-
+
//get WaiterObj
waiterId = jobInfo.getFirstWaitingResource();
- while (waiterId != -1)
- {
+ while (waiterId != -1) {
waiterObj = lockWaiterManager.getLockWaiter(waiterId);
entityInfo = waiterObj.getEntityInfoSlot();
resourceList.put(entityInfo, -1);
- waiterId = waiterObj.getNextWaitingResourceObjId();
+ waiterId = waiterObj.getNextWaitingResourceObjId();
}
return;
}
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/EntityLockInfoManager.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/EntityLockInfoManager.java
index 2542157..59c20f2 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/EntityLockInfoManager.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/EntityLockInfoManager.java
@@ -18,7 +18,6 @@
import java.nio.ByteBuffer;
import java.util.ArrayList;
-import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionManagementConstants.LockManagerConstants;
import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionManagementConstants.LockManagerConstants.LockMode;
/**
@@ -382,7 +381,7 @@
//This entityInfo(i.e., holder) is the last resource held by this job.
jobInfo.setlastHoldingResource(prev);
}
-
+
//jobInfo.decreaseDatasetLockCount(holder);
}
@@ -498,11 +497,11 @@
&& hashVal == entityInfoManager.getPKHashVal(entityInfo)) {
return entityInfo;
}
-// if (LockManager.IS_DEBUG_MODE) {
-// System.out.println("eLockInfo(" + eLockInfo + "),entityInfo(" + entityInfo + "), Request[" + jobId
-// + "," + hashVal + "]:Result[" + entityInfoManager.getJobId(entityInfo) + ","
-// + entityInfoManager.getPKHashVal(entityInfo) + "]");
-// }
+ // if (LockManager.IS_DEBUG_MODE) {
+ // System.out.println("eLockInfo(" + eLockInfo + "),entityInfo(" + entityInfo + "), Request[" + jobId
+ // + "," + hashVal + "]:Result[" + entityInfoManager.getJobId(entityInfo) + ","
+ // + entityInfoManager.getPKHashVal(entityInfo) + "]");
+ // }
entityInfo = entityInfoManager.getPrevEntityActor(entityInfo);
}
@@ -708,9 +707,9 @@
setFirstWaiter(currentSlot, -1);
setUpgrader(currentSlot, -1);
occupiedSlots++;
- if (LockManager.IS_DEBUG_MODE) {
- System.out.println(Thread.currentThread().getName() + " Allocated ELockInfo[" + currentSlot + "]");
- }
+ if (LockManager.IS_DEBUG_MODE) {
+ System.out.println(Thread.currentThread().getName() + " Allocated ELockInfo[" + currentSlot + "]");
+ }
return currentSlot;
}
@@ -718,9 +717,9 @@
setNextFreeSlot(slotNum, freeSlotNum);
freeSlotNum = slotNum;
occupiedSlots--;
- if (LockManager.IS_DEBUG_MODE) {
- System.out.println(Thread.currentThread().getName() + " Deallocated ELockInfo[" + slotNum + "]");
- }
+ if (LockManager.IS_DEBUG_MODE) {
+ System.out.println(Thread.currentThread().getName() + " Deallocated ELockInfo[" + slotNum + "]");
+ }
}
public void deinitialize() {
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManager.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManager.java
index 442eb5b..fb5bb68 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManager.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManager.java
@@ -21,7 +21,6 @@
import java.util.Map.Entry;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
-import java.util.logging.Logger;
import edu.uci.ics.asterix.transaction.management.exception.ACIDException;
import edu.uci.ics.asterix.transaction.management.service.logging.LogType;
@@ -44,8 +43,6 @@
public class LockManager implements ILockManager {
- private static final Logger LOGGER = Logger.getLogger(LockManager.class.getName());
- private static final int LOCK_MANAGER_INITIAL_HASH_TABLE_SIZE = 50;// do we need this?
public static final boolean IS_DEBUG_MODE = false;//true
private TransactionSubsystem txnSubsystem;
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManagerDeterministicUnitTest.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManagerDeterministicUnitTest.java
index 6bf7cd9..c3b47bc 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManagerDeterministicUnitTest.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManagerDeterministicUnitTest.java
@@ -111,14 +111,14 @@
isSuccess = false;
break;
} else {
- log("Processed: "+lockRequest.prettyPrint());
+ log("Processed: " + lockRequest.prettyPrint());
}
} catch (ACIDException e) {
e.printStackTrace();
break;
}
}
-
+
if (isSuccess) {
log("\n*** Test Passed ***");
}
@@ -134,7 +134,7 @@
return validateExpectedResult(false);
} else if (request.requestType == RequestType.WAIT) {
try {
- Thread.sleep((long)request.entityHashValue);
+ Thread.sleep((long) request.entityHashValue);
} catch (InterruptedException e) {
e.printStackTrace();
return false;
@@ -146,9 +146,10 @@
log(request.threadName + " is not in the workerReadyQueue");
return false;
}
- log(Thread.currentThread().getName() + " waiting for "+request.threadName+" to be in the workerReadyQueue["+ i++ +"].");
+ log(Thread.currentThread().getName() + " waiting for " + request.threadName
+ + " to be in the workerReadyQueue[" + i++ + "].");
try {
- Thread.sleep((long)10);
+ Thread.sleep((long) 10);
} catch (InterruptedException e) {
e.printStackTrace();
return false;
@@ -172,9 +173,10 @@
log(request.threadName + " is not in the workerReadyQueue");
return false;
}
- log(Thread.currentThread().getName() + " waiting for "+request.threadName+" to be in the workerReadyQueue["+ i++ +"].");
+ log(Thread.currentThread().getName() + " waiting for " + request.threadName
+ + " to be in the workerReadyQueue[" + i++ + "].");
try {
- Thread.sleep((long)10);
+ Thread.sleep((long) 10);
} catch (InterruptedException e) {
e.printStackTrace();
}
@@ -186,7 +188,7 @@
worker.setWait(false);
worker.notify();
}
-
+
try {
Thread.sleep((long) defaultWaitTime);
} catch (InterruptedException e) {
@@ -241,7 +243,7 @@
}
expectedResultList.add(list);
}
- } else if (requestType.equals("DW")) {
+ } else if (requestType.equals("DW")) {
defaultWaitTime = scanner.nextInt();
log("LockRequest[" + i++ + "]:T" + threadId + "," + requestType + "," + defaultWaitTime);
continue;
@@ -259,8 +261,8 @@
txnContext = new TransactionContext(new JobId(jobId), txnProvider);
jobMap.put(jobId, txnContext);
}
- log("LockRequest[" + i++ + "]:T" + threadId + "," + requestType + ",J" + jobId + ",D" + datasetId + ",E"
- + PKHashVal + "," + lockMode);
+ log("LockRequest[" + i++ + "]:T" + threadId + "," + requestType + ",J" + jobId + ",D" + datasetId
+ + ",E" + PKHashVal + "," + lockMode);
lockRequest = new LockRequest("Thread-" + threadId, getRequestType(requestType), new DatasetId(
datasetId), PKHashVal, getLockMode(lockMode), txnContext);
}
@@ -301,19 +303,19 @@
if (s.equals("RL")) {
return RequestType.RELEASE_LOCKS;
}
-
+
if (s.equals("CSQ")) {
return RequestType.CHECK_SEQUENCE;
}
-
+
if (s.equals("CST")) {
return RequestType.CHECK_SET;
}
-
+
if (s.equals("END")) {
return RequestType.END;
}
-
+
if (s.equals("W")) {
return RequestType.WAIT;
}
@@ -370,7 +372,7 @@
this.lockRequest = null;
needWait = true;
isDone = false;
- isAwaken = false;
+ isAwaken = false;
}
public boolean isAwaken() {
@@ -382,11 +384,10 @@
//initial wait
needWait = true;
isAwaken = false;
-
while (!isDone) {
while (needWait) {
- synchronized(this) {
+ synchronized (this) {
workerReadyQueue.push(this);
try {
this.wait();
@@ -407,14 +408,14 @@
if (lockRequest.txnContext.getStatus() == TransactionContext.TIMED_OUT_STATUS) {
if (lockRequest.txnContext.getTxnState() != TransactionState.ABORTED) {
lockRequest.txnContext.setTxnState(TransactionState.ABORTED);
- log("*** "+ lockRequest.txnContext.getJobId()+ " lock request causing deadlock ***");
- log("Abort --> Releasing all locks acquired by "+ lockRequest.txnContext.getJobId());
+ log("*** " + lockRequest.txnContext.getJobId() + " lock request causing deadlock ***");
+ log("Abort --> Releasing all locks acquired by " + lockRequest.txnContext.getJobId());
try {
lockMgr.releaseLocks(lockRequest.txnContext);
} catch (ACIDException e1) {
e1.printStackTrace();
}
- log("Abort --> Released all locks acquired by "+ lockRequest.txnContext.getJobId());
+ log("Abort --> Released all locks acquired by " + lockRequest.txnContext.getJobId());
}
isDone = true;
} else {
@@ -428,7 +429,7 @@
} catch (InterruptedException e) {
e.printStackTrace();
}
-
+
needWait = true;
isAwaken = false;
}
@@ -527,11 +528,11 @@
int listSize = threadIdList.size();
s.append("ExpectedList(Set):\t");
- for (i=0; i < listSize; i++) {
+ for (i = 0; i < listSize; i++) {
s.append(threadIdList.get(i)).append(" ");
}
s.append("\n");
-
+
while (queueSize < listSize) {
//wait until workers finish its task
try {
@@ -544,7 +545,7 @@
}
if (listSize != queueSize) {
- log("listSize:"+listSize +", queueSize:"+queueSize);
+ log("listSize:" + listSize + ", queueSize:" + queueSize);
return false;
}
@@ -574,9 +575,9 @@
LockRequestWorker worker = null;
int queueSize = workerReadyQueue.size();
int listSize = threadIdList.size();
-
+
s.append("ExpectedList(Sequence):\t");
- for (i=0; i < listSize; i++) {
+ for (i = 0; i < listSize; i++) {
s.append(threadIdList.get(i)).append(" ");
}
s.append("\n");
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManagerRandomUnitTest.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManagerRandomUnitTest.java
index ae1cbb8..89a15ef 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManagerRandomUnitTest.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManagerRandomUnitTest.java
@@ -55,7 +55,7 @@
TransactionContext txnContext = generateTxnContext(txnProvider);
for (int i = 0; i < childCount; i++) {
- System.out.println("Creating " + txnContext.getJobId() + "," + i+ "th EntityLockThread..");
+ System.out.println("Creating " + txnContext.getJobId() + "," + i + "th EntityLockThread..");
t = new Thread(new LockRequestProducer(txnProvider.getLockManager(), txnContext, false, false, false));
t.start();
}
@@ -63,16 +63,16 @@
private static void generateDatasetLockThread(TransactionSubsystem txnProvider) {
Thread t;
-// int childCount = rand.nextInt(MAX_NUM_OF_THREAD_IN_A_JOB);
-// if (MAX_NUM_OF_THREAD_IN_A_JOB != 0 && childCount == 0) {
-// childCount = 1;
-// }
+ // int childCount = rand.nextInt(MAX_NUM_OF_THREAD_IN_A_JOB);
+ // if (MAX_NUM_OF_THREAD_IN_A_JOB != 0 && childCount == 0) {
+ // childCount = 1;
+ // }
int childCount = 1;
-
+
TransactionContext txnContext = generateTxnContext(txnProvider);
for (int i = 0; i < childCount; i++) {
- System.out.println("Creating " + txnContext.getJobId() + "," + i + "th DatasetLockThread..");
+ System.out.println("Creating " + txnContext.getJobId() + "," + i + "th DatasetLockThread..");
t = new Thread(new LockRequestProducer(txnProvider.getLockManager(), txnContext, true, false, false));
t.start();
}
@@ -88,7 +88,7 @@
TransactionContext txnContext = generateTxnContext(txnProvider);
for (i = 0; i < childCount - 1; i++) {
- System.out.println("Creating " + txnContext.getJobId() + "," + i + "th EntityLockUpgradeThread(false)..");
+ System.out.println("Creating " + txnContext.getJobId() + "," + i + "th EntityLockUpgradeThread(false)..");
t = new Thread(new LockRequestProducer(txnProvider.getLockManager(), txnContext, false, true, false));
t.start();
}
@@ -110,7 +110,6 @@
class LockRequestProducer implements Runnable {
- private static final long serialVersionUID = -3191274684985609965L;
private static final int MAX_DATASET_NUM = 10;//10
private static final int MAX_ENTITY_NUM = 30;//30
private static final int MAX_LOCK_MODE_NUM = 2;
@@ -161,7 +160,6 @@
return;
} finally {
-
/*
System.out.println("" + Thread.currentThread().getName() + "\n" + requestHistory.toString() + ""
+ Thread.currentThread().getName() + "\n");
@@ -289,8 +287,8 @@
int datasetId = rand.nextInt(MAX_DATASET_NUM);
int entityHashValue = -1;
byte lockMode = (byte) (rand.nextInt(MAX_LOCK_MODE_NUM));
- LockRequest request = new LockRequest(Thread.currentThread().getName(), requestType, new DatasetId(datasetId), entityHashValue, lockMode,
- txnContext);
+ LockRequest request = new LockRequest(Thread.currentThread().getName(), requestType, new DatasetId(datasetId),
+ entityHashValue, lockMode, txnContext);
requestQueue.add(request);
requestHistory.append(request.prettyPrint());
sendRequest(request);
@@ -303,8 +301,8 @@
int datasetId = lockRequest.datasetIdObj.getId();
int entityHashValue = -1;
byte lockMode = LockMode.S;//lockMode is not used for unlock() call.
- LockRequest request = new LockRequest(Thread.currentThread().getName(), requestType, new DatasetId(datasetId), entityHashValue, lockMode,
- txnContext);
+ LockRequest request = new LockRequest(Thread.currentThread().getName(), requestType, new DatasetId(datasetId),
+ entityHashValue, lockMode, txnContext);
requestQueue.add(request);
requestHistory.append(request.prettyPrint());
sendRequest(request);
@@ -314,8 +312,8 @@
int requestType = rand.nextInt(MAX_LOCK_REQUEST_TYPE_NUM);
int datasetId = rand.nextInt(MAX_DATASET_NUM);
int entityHashValue = rand.nextInt(MAX_ENTITY_NUM);
- LockRequest request = new LockRequest(Thread.currentThread().getName(), requestType, new DatasetId(datasetId), entityHashValue, lockMode,
- txnContext);
+ LockRequest request = new LockRequest(Thread.currentThread().getName(), requestType, new DatasetId(datasetId),
+ entityHashValue, lockMode, txnContext);
requestQueue.add(request);
requestHistory.append(request.prettyPrint());
sendRequest(request);
@@ -348,8 +346,8 @@
int datasetId = lockRequest.datasetIdObj.getId();
int entityHashValue = lockRequest.entityHashValue;
byte lockMode = LockMode.X;
- LockRequest request = new LockRequest(Thread.currentThread().getName(), requestType, new DatasetId(datasetId), entityHashValue, lockMode,
- txnContext);
+ LockRequest request = new LockRequest(Thread.currentThread().getName(), requestType, new DatasetId(
+ datasetId), entityHashValue, lockMode, txnContext);
request.isUpgrade = true;
requestQueue.add(request);
requestHistory.append(request.prettyPrint());
@@ -381,8 +379,8 @@
int datasetId = lockRequest.datasetIdObj.getId();
int entityHashValue = lockRequest.entityHashValue;
byte lockMode = lockRequest.lockMode;
- LockRequest request = new LockRequest(Thread.currentThread().getName(), requestType, new DatasetId(datasetId), entityHashValue, lockMode,
- txnContext);
+ LockRequest request = new LockRequest(Thread.currentThread().getName(), requestType, new DatasetId(
+ datasetId), entityHashValue, lockMode, txnContext);
requestQueue.add(request);
requestHistory.append(request.prettyPrint());
sendRequest(request);
@@ -407,14 +405,14 @@
existLockRequest = true;
break;
}
-
+
if (existLockRequest) {
int requestType = RequestType.RELEASE_LOCKS;
int datasetId = lockRequest.datasetIdObj.getId();
int entityHashValue = lockRequest.entityHashValue;
byte lockMode = lockRequest.lockMode;
- LockRequest request = new LockRequest(Thread.currentThread().getName(), requestType, new DatasetId(datasetId), entityHashValue, lockMode,
- txnContext);
+ LockRequest request = new LockRequest(Thread.currentThread().getName(), requestType, new DatasetId(
+ datasetId), entityHashValue, lockMode, txnContext);
requestQueue.add(request);
requestHistory.append(request.prettyPrint());
sendRequest(request);
@@ -426,19 +424,19 @@
switch (request.requestType) {
case RequestType.LOCK:
try {
- lockMgr.lock(request.datasetIdObj, request.entityHashValue, request.lockMode, request.txnContext);
+ lockMgr.lock(request.datasetIdObj, request.entityHashValue, request.lockMode, request.txnContext);
} catch (ACIDException e) {
if (request.txnContext.getStatus() == TransactionContext.TIMED_OUT_STATUS) {
if (request.txnContext.getTxnState() != TransactionState.ABORTED) {
request.txnContext.setTxnState(TransactionState.ABORTED);
- log("*** "+ request.txnContext.getJobId()+ " lock request causing deadlock ***");
- log("Abort --> Releasing all locks acquired by "+ request.txnContext.getJobId());
+ log("*** " + request.txnContext.getJobId() + " lock request causing deadlock ***");
+ log("Abort --> Releasing all locks acquired by " + request.txnContext.getJobId());
try {
lockMgr.releaseLocks(request.txnContext);
} catch (ACIDException e1) {
e1.printStackTrace();
}
- log("Abort --> Released all locks acquired by "+ request.txnContext.getJobId());
+ log("Abort --> Released all locks acquired by " + request.txnContext.getJobId());
}
isDone = true;
} else {
@@ -448,19 +446,20 @@
break;
case RequestType.INSTANT_LOCK:
try {
- lockMgr.instantLock(request.datasetIdObj, request.entityHashValue, request.lockMode, request.txnContext);
+ lockMgr.instantLock(request.datasetIdObj, request.entityHashValue, request.lockMode,
+ request.txnContext);
} catch (ACIDException e) {
if (request.txnContext.getStatus() == TransactionContext.TIMED_OUT_STATUS) {
if (request.txnContext.getTxnState() != TransactionState.ABORTED) {
request.txnContext.setTxnState(TransactionState.ABORTED);
- log("*** "+ request.txnContext.getJobId()+ " lock request causing deadlock ***");
- log("Abort --> Releasing all locks acquired by "+ request.txnContext.getJobId());
+ log("*** " + request.txnContext.getJobId() + " lock request causing deadlock ***");
+ log("Abort --> Releasing all locks acquired by " + request.txnContext.getJobId());
try {
lockMgr.releaseLocks(request.txnContext);
} catch (ACIDException e1) {
e1.printStackTrace();
}
- log("Abort --> Released all locks acquired by "+ request.txnContext.getJobId());
+ log("Abort --> Released all locks acquired by " + request.txnContext.getJobId());
}
isDone = true;
} else {
@@ -486,13 +485,13 @@
throw new UnsupportedOperationException("Unsupported lock method");
}
try {
- Thread.sleep((long)0);
+ Thread.sleep((long) 0);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
-
+
private void log(String s) {
System.out.println(s);
}
@@ -508,6 +507,7 @@
public boolean isTryLockFailed;
public long requestTime;
public String threadName;
+
public LockRequest(String threadName, int requestType, DatasetId datasetIdObj, int entityHashValue, byte lockMode,
TransactionContext txnContext) {
this.requestType = requestType;
@@ -520,13 +520,13 @@
isUpgrade = false;
isTryLockFailed = false;//used for TryLock request not to call Unlock when the tryLock failed.
}
-
+
public LockRequest(String threadName, int requestType) {
this.requestType = requestType;
this.requestTime = System.currentTimeMillis();
this.threadName = new String(threadName);
}
-
+
//used for "W" request type
public LockRequest(String threadName, int requestType, int waitTime) {
this.requestType = requestType;
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/TimeOutDetector.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/TimeOutDetector.java
index 699fd74..6bc8c6b 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/TimeOutDetector.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/TimeOutDetector.java
@@ -3,7 +3,6 @@
import java.util.LinkedList;
import edu.uci.ics.asterix.transaction.management.exception.ACIDException;
-import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionContext;
/**
* @author pouria, kisskys
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/FileBasedBuffer.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/FileBasedBuffer.java
index 8817b3f..4319b8b 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/FileBasedBuffer.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/FileBasedBuffer.java
@@ -36,7 +36,7 @@
this.filePath = filePath;
this.nextWritePosition = offset;
buffer = ByteBuffer.allocate(size);
- raf = new RandomAccessFile(new File(filePath), "rws");
+ raf = new RandomAccessFile(new File(filePath), "rw");
raf.seek(offset);
fileChannel = raf.getChannel();
fileChannel.read(buffer);
@@ -75,7 +75,7 @@
buffer.position(0);
buffer.limit(size);
fileChannel.write(buffer);
- fileChannel.force(false);
+ fileChannel.force(true);
erase();
}
@@ -126,9 +126,9 @@
@Override
public void reset(String filePath, long nextWritePosition, int size) throws IOException {
if (!filePath.equals(this.filePath)) {
- raf.close();
+ raf.close();//required?
fileChannel.close();
- raf = new RandomAccessFile(filePath, "rws");
+ raf = new RandomAccessFile(filePath, "rw");
this.filePath = filePath;
}
this.nextWritePosition = nextWritePosition;
@@ -139,6 +139,23 @@
buffer.limit(size);
this.size = size;
}
+
+ @Override
+ public void close() throws IOException {
+ fileChannel.close();
+ }
+
+ @Override
+ public void open(String filePath, long offset, int size) throws IOException {
+ raf = new RandomAccessFile(filePath, "rw");
+ this.nextWritePosition = offset;
+ fileChannel = raf.getChannel();
+ fileChannel.position(offset);
+ erase();
+ buffer.position(0);
+ buffer.limit(size);
+ this.size = size;
+ }
public long getNextWritePosition() {
return nextWritePosition;
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/IFileBasedBuffer.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/IFileBasedBuffer.java
index a0620f5..e1f9f95 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/IFileBasedBuffer.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/IFileBasedBuffer.java
@@ -35,4 +35,8 @@
public void setNextWritePosition(long writePosition);
+ public void close() throws IOException;
+
+ public void open(String filePath, long offset, int size) throws IOException;
+
}
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogManager.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogManager.java
index b4257d5..8d66f6b 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogManager.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogManager.java
@@ -103,7 +103,7 @@
public static final int ACTIVE = 1;
}
- private AtomicLong lastFlushedLsn = new AtomicLong(-1);
+ private AtomicLong lastFlushedLSN = new AtomicLong(-1);
/*
* When the transaction eco-system comes to life, the log manager positions
@@ -111,7 +111,7 @@
* lsn value of the next log record to be written after a system (re)start.
* The value is zero when the system is starting for the first time.
*/
- private long startingLsn = 0;
+ private long startingLSN = 0;
/*
* lsn represents the monotonically increasing long value that can be broken
@@ -126,7 +126,7 @@
}
public AtomicLong getLastFlushedLsn() {
- return lastFlushedLsn;
+ return lastFlushedLSN;
}
public AtomicInteger getLogPageStatus(int pageIndex) {
@@ -138,7 +138,7 @@
}
public long incrementLastFlushedLsn(long delta) {
- return lastFlushedLsn.addAndGet(delta);
+ return lastFlushedLSN.addAndGet(delta);
}
public LogManager(TransactionSubsystem provider) throws ACIDException {
@@ -238,7 +238,7 @@
}
public int getLogPageIndex(long lsnValue) {
- return (int) ((lsnValue - startingLsn) / logManagerProperties.getLogPageSize()) % numLogPages;
+ return (int) ((lsnValue - startingLSN) / logManagerProperties.getLogPageSize()) % numLogPages;
}
@@ -256,7 +256,7 @@
* record is (to be) placed.
*/
public int getLogPageOffset(long lsnValue) {
- return (int) (lsnValue - startingLsn) % logManagerProperties.getLogPageSize();
+ return (int) (lsnValue - startingLSN) % logManagerProperties.getLogPageSize();
}
/*
@@ -369,7 +369,7 @@
// Before the count is incremented, if the flusher flushed the allocated page,
// then retry to get new LSN. Otherwise, the log with allocated lsn will be lost.
- if (lastFlushedLsn.get() >= retVal) {
+ if (lastFlushedLSN.get() >= retVal) {
logPageOwnerCount[pageIndex].decrementAndGet();
continue;
}
@@ -654,19 +654,44 @@
throw new ACIDException("Failed to delete a file: " + name);
}
}
+ closeLogPages();
initLSN();
+ openLogPages();
}
private PhysicalLogLocator initLSN() throws ACIDException {
PhysicalLogLocator nextPhysicalLsn = LogUtil.initializeLogAnchor(this);
- startingLsn = nextPhysicalLsn.getLsn();
- lastFlushedLsn.set(startingLsn - 1);
+ startingLSN = nextPhysicalLsn.getLsn();
+ lastFlushedLSN.set(startingLSN - 1);
if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info(" Starting lsn is : " + startingLsn);
+ LOGGER.info(" Starting lsn is : " + startingLSN);
}
- lsn.set(startingLsn);
+ lsn.set(startingLSN);
return nextPhysicalLsn;
}
+
+ private void closeLogPages() throws ACIDException {
+ for (int i=0; i < numLogPages; i++) {
+ try {
+ logPages[i].close();
+ } catch (IOException e) {
+ throw new ACIDException(e);
+ }
+ }
+ }
+
+ private void openLogPages() throws ACIDException {
+ try {
+ String filePath = LogUtil.getLogFilePath(logManagerProperties, LogUtil.getFileId(this, startingLSN));
+ for (int i = 0; i < numLogPages; i++) {
+ logPages[i].open(filePath,
+ LogUtil.getFileOffset(this, startingLSN) + i * logManagerProperties.getLogPageSize(),
+ logManagerProperties.getLogPageSize());
+ }
+ } catch (Exception e) {
+ throw new ACIDException(Thread.currentThread().getName() + " unable to create log buffer", e);
+ }
+ }
@Override
public ILogRecordHelper getLogRecordHelper() {
@@ -684,7 +709,7 @@
if (logicalLogLocator.getLsn() > lsn.get()) {
throw new ACIDException(" invalid lsn " + logicalLogLocator.getLsn());
}
- while (lastFlushedLsn.get() < logicalLogLocator.getLsn());
+ while (lastFlushedLSN.get() < logicalLogLocator.getLsn());
}
/*
diff --git a/pom.xml b/pom.xml
index 62ed003..6c26ae1 100644
--- a/pom.xml
+++ b/pom.xml
@@ -6,6 +6,11 @@
<version>0.0.4-SNAPSHOT</version>
<packaging>pom</packaging>
+ <properties>
+ <algebricks.version>0.2.3-SNAPSHOT</algebricks.version>
+ <hyracks.version>0.2.3-SNAPSHOT</hyracks.version>
+ </properties>
+
<build>
<plugins>
<plugin>
@@ -78,6 +83,7 @@
<module>asterix-metadata</module>
<module>asterix-dist</module>
<module>asterix-test-framework</module>
+ <module>asterix-maven-plugins</module>
</modules>
<repositories>
@@ -136,4 +142,65 @@
<optional>true</optional>
</dependency>
</dependencies>
+ <dependencyManagement>
+ <dependencies>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>algebricks-compiler</artifactId>
+ <version>${algebricks.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-dataflow-std</artifactId>
+ <version>${hyracks.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-control-cc</artifactId>
+ <version>${hyracks.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-control-nc</artifactId>
+ <version>${hyracks.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-server</artifactId>
+ <version>${hyracks.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-cli</artifactId>
+ <version>${hyracks.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-dataflow-hadoop</artifactId>
+ <version>${hyracks.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-storage-am-btree</artifactId>
+ <version>${hyracks.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-storage-am-rtree</artifactId>
+ <version>${hyracks.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>
+ hyracks-storage-am-invertedindex
+ </artifactId>
+ <version>${hyracks.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-storage-am-common</artifactId>
+ <version>${hyracks.version}</version>
+ </dependency>
+ </dependencies>
+ </dependencyManagement>
</project>