Add Support for Upsert Operation
This change allows users to execute upsert commands which
couples delete if found with insert. It locks the primary
keys before doing the search ensuring consistency.
Change-Id: I8999000331795a5949d621d2dd003903e057a521
Reviewed-on: https://asterix-gerrit.ics.uci.edu/477
Tested-by: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Reviewed-by: Till Westmann <tillw@apache.org>
diff --git a/asterix-algebra/pom.xml b/asterix-algebra/pom.xml
index bf38393..22f494b 100644
--- a/asterix-algebra/pom.xml
+++ b/asterix-algebra/pom.xml
@@ -9,123 +9,121 @@
WARRANTIES OR CONDITIONS OF ANY ! KIND, either express or implied. See the
License for the ! specific language governing permissions and limitations
! under the License. ! -->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <artifactId>asterix</artifactId>
- <groupId>org.apache.asterix</groupId>
- <version>0.8.8-SNAPSHOT</version>
- </parent>
- <artifactId>asterix-algebra</artifactId>
-
- <licenses>
- <license>
- <name>Apache License, Version 2.0</name>
- <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
- <distribution>repo</distribution>
- <comments>A business-friendly OSS license</comments>
- </license>
- </licenses>
-
- <build>
- <plugins>
- <plugin>
- <groupId>org.codehaus.mojo</groupId>
- <artifactId>javacc-maven-plugin</artifactId>
- <version>2.6</version>
- <executions>
- <execution>
- <id>javacc</id>
- <goals>
- <goal>javacc</goal>
- </goals>
- <configuration>
- <isStatic>false</isStatic>
- </configuration>
- </execution>
- </executions>
- </plugin>
- </plugins>
- <pluginManagement>
- <plugins>
- <plugin>
- <groupId>org.eclipse.m2e</groupId>
- <artifactId>lifecycle-mapping</artifactId>
- <version>1.0.0</version>
- <configuration>
- <lifecycleMappingMetadata>
- <pluginExecutions>
- <pluginExecution>
- <pluginExecutionFilter>
- <groupId>
- org.codehaus.mojo
- </groupId>
- <artifactId>
- javacc-maven-plugin
- </artifactId>
- <versionRange>
- [2.6,)
- </versionRange>
- <goals>
- <goal>javacc</goal>
- </goals>
- </pluginExecutionFilter>
- <action>
- <ignore />
- </action>
- </pluginExecution>
- </pluginExecutions>
- </lifecycleMappingMetadata>
- </configuration>
- </plugin>
- </plugins>
- </pluginManagement>
- </build>
-
- <dependencies>
- <dependency>
- <groupId>org.apache.asterix</groupId>
- <artifactId>asterix-runtime</artifactId>
- <version>0.8.8-SNAPSHOT</version>
- <scope>compile</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.asterix</groupId>
- <artifactId>asterix-lang-aql</artifactId>
- <version>0.8.8-SNAPSHOT</version>
- <scope>compile</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.asterix</groupId>
- <artifactId>asterix-lang-sqlpp</artifactId>
- <version>0.8.8-SNAPSHOT</version>
- <scope>compile</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.asterix</groupId>
- <artifactId>asterix-metadata</artifactId>
- <version>0.8.8-SNAPSHOT</version>
- <scope>compile</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.asterix</groupId>
- <artifactId>asterix-external-data</artifactId>
- <version>0.8.8-SNAPSHOT</version>
- </dependency>
- <dependency>
- <groupId>org.apache.asterix</groupId>
- <artifactId>asterix-transactions</artifactId>
- <version>0.8.8-SNAPSHOT</version>
- <scope>compile</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hyracks</groupId>
- <artifactId>algebricks-compiler</artifactId>
- </dependency>
- <dependency>
- <groupId>org.json</groupId>
- <artifactId>json</artifactId>
- </dependency>
- </dependencies>
-</project>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <artifactId>asterix</artifactId>
+ <groupId>org.apache.asterix</groupId>
+ <version>0.8.8-SNAPSHOT</version>
+ </parent>
+ <artifactId>asterix-algebra</artifactId>
+ <licenses>
+ <license>
+ <name>Apache License, Version 2.0</name>
+ <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
+ <distribution>repo</distribution>
+ <comments>A business-friendly OSS license</comments>
+ </license>
+ </licenses>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>javacc-maven-plugin</artifactId>
+ <version>2.6</version>
+ <executions>
+ <execution>
+ <id>javacc</id>
+ <goals>
+ <goal>javacc</goal>
+ </goals>
+ <configuration>
+ <isStatic>false</isStatic>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ <pluginManagement>
+ <plugins>
+ <plugin>
+ <groupId>org.eclipse.m2e</groupId>
+ <artifactId>lifecycle-mapping</artifactId>
+ <version>1.0.0</version>
+ <configuration>
+ <lifecycleMappingMetadata>
+ <pluginExecutions>
+ <pluginExecution>
+ <pluginExecutionFilter>
+ <groupId> org.codehaus.mojo
+ </groupId>
+ <artifactId> javacc-maven-plugin
+ </artifactId>
+ <versionRange> [2.6,)
+ </versionRange>
+ <goals>
+ <goal>javacc</goal>
+ </goals>
+ </pluginExecutionFilter>
+ <action>
+ <ignore />
+ </action>
+ </pluginExecution>
+ </pluginExecutions>
+ </lifecycleMappingMetadata>
+ </configuration>
+ </plugin>
+ </plugins>
+ </pluginManagement>
+ </build>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.asterix</groupId>
+ <artifactId>asterix-runtime</artifactId>
+ <version>0.8.8-SNAPSHOT</version>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.asterix</groupId>
+ <artifactId>asterix-lang-common</artifactId>
+ <version>0.8.8-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.asterix</groupId>
+ <artifactId>asterix-lang-aql</artifactId>
+ <version>0.8.8-SNAPSHOT</version>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.asterix</groupId>
+ <artifactId>asterix-lang-sqlpp</artifactId>
+ <version>0.8.8-SNAPSHOT</version>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.asterix</groupId>
+ <artifactId>asterix-metadata</artifactId>
+ <version>0.8.8-SNAPSHOT</version>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.asterix</groupId>
+ <artifactId>asterix-external-data</artifactId>
+ <version>0.8.8-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.asterix</groupId>
+ <artifactId>asterix-transactions</artifactId>
+ <version>0.8.8-SNAPSHOT</version>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hyracks</groupId>
+ <artifactId>algebricks-compiler</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.json</groupId>
+ <artifactId>json</artifactId>
+ </dependency>
+ </dependencies>
+</project>
\ No newline at end of file
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/CommitOperator.java b/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/CommitOperator.java
index 59d17bb..8dd6d33 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/CommitOperator.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/CommitOperator.java
@@ -31,9 +31,11 @@
public class CommitOperator extends AbstractExtensibleLogicalOperator {
private final List<LogicalVariable> primaryKeyLogicalVars;
+ private final LogicalVariable upsertVar;
- public CommitOperator(List<LogicalVariable> primaryKeyLogicalVars) {
+ public CommitOperator(List<LogicalVariable> primaryKeyLogicalVars, LogicalVariable upsertVar) {
this.primaryKeyLogicalVars = primaryKeyLogicalVars;
+ this.upsertVar = upsertVar;
}
@Override
@@ -44,11 +46,12 @@
@Override
public IOperatorExtension newInstance() {
- return new CommitOperator(primaryKeyLogicalVars);
+ return new CommitOperator(primaryKeyLogicalVars, upsertVar);
}
@Override
- public boolean acceptExpressionTransform(ILogicalExpressionReferenceTransform transform) throws AlgebricksException {
+ public boolean acceptExpressionTransform(ILogicalExpressionReferenceTransform transform)
+ throws AlgebricksException {
// TODO Auto-generated method stub
return false;
}
@@ -61,6 +64,9 @@
@Override
public void getUsedVariables(Collection<LogicalVariable> usedVars) {
usedVars.addAll(primaryKeyLogicalVars);
+ if (upsertVar != null) {
+ usedVars.add(upsertVar);
+ }
}
@Override
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/CommitPOperator.java b/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/CommitPOperator.java
index 11570f8..5112fcf 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/CommitPOperator.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/CommitPOperator.java
@@ -43,11 +43,14 @@
private final List<LogicalVariable> primaryKeyLogicalVars;
private final JobId jobId;
private final int datasetId;
+ private final LogicalVariable upsertVar;
- public CommitPOperator(JobId jobId, int datasetId, List<LogicalVariable> primaryKeyLogicalVars) {
+ public CommitPOperator(JobId jobId, int datasetId, List<LogicalVariable> primaryKeyLogicalVars,
+ LogicalVariable upsertVar) {
this.jobId = jobId;
this.datasetId = datasetId;
this.primaryKeyLogicalVars = primaryKeyLogicalVars;
+ this.upsertVar = upsertVar;
}
@Override
@@ -76,15 +79,18 @@
@Override
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
- throws AlgebricksException {
-
+ throws AlgebricksException {
+ AqlMetadataProvider metadataProvider = (AqlMetadataProvider) context.getMetadataProvider();
RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema,
context);
int[] primaryKeyFields = JobGenHelper.variablesToFieldIndexes(primaryKeyLogicalVars, inputSchemas[0]);
-
- AqlMetadataProvider metadataProvider = (AqlMetadataProvider) context.getMetadataProvider();
- CommitRuntimeFactory runtime = new CommitRuntimeFactory(jobId, datasetId, primaryKeyFields,
- metadataProvider.isTemporaryDatasetWriteJob(), metadataProvider.isWriteTransaction());
+ int upsertVarIdx = -1;
+ CommitRuntimeFactory runtime = null;
+ if (upsertVar != null) {
+ upsertVarIdx = inputSchemas[0].findVariable(upsertVar);
+ }
+ runtime = new CommitRuntimeFactory(jobId, datasetId, primaryKeyFields,
+ metadataProvider.isTemporaryDatasetWriteJob(), metadataProvider.isWriteTransaction(), upsertVarIdx);
builder.contributeMicroOperator(op, runtime, recDesc);
ILogicalOperator src = op.getInputs().get(0).getValue();
builder.contributeGraphEdge(src, 0, op, 0);
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/CommitRuntime.java b/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/CommitRuntime.java
index ce80291..7da4db7 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/CommitRuntime.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/CommitRuntime.java
@@ -28,6 +28,8 @@
import org.apache.asterix.common.transactions.ITransactionManager;
import org.apache.asterix.common.transactions.JobId;
import org.apache.asterix.common.transactions.LogRecord;
+import org.apache.asterix.common.utils.TransactionUtil;
+import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.runtime.base.IPushRuntime;
import org.apache.hyracks.api.comm.IFrameWriter;
import org.apache.hyracks.api.context.IHyracksTaskContext;
@@ -42,19 +44,18 @@
private final static long SEED = 0L;
- private final ITransactionManager transactionManager;
- private final ILogManager logMgr;
- private final JobId jobId;
- private final int datasetId;
- private final int[] primaryKeyFields;
- private final boolean isTemporaryDatasetWriteJob;
- private final boolean isWriteTransaction;
- private final long[] longHashes;
- private final LogRecord logRecord;
-
- private ITransactionContext transactionContext;
- private FrameTupleAccessor frameTupleAccessor;
- private final FrameTupleReference frameTupleReference;
+ protected final ITransactionManager transactionManager;
+ protected final ILogManager logMgr;
+ protected final JobId jobId;
+ protected final int datasetId;
+ protected final int[] primaryKeyFields;
+ protected final boolean isTemporaryDatasetWriteJob;
+ protected final boolean isWriteTransaction;
+ protected final long[] longHashes;
+ protected final LogRecord logRecord;
+ protected final FrameTupleReference frameTupleReference;
+ protected ITransactionContext transactionContext;
+ protected FrameTupleAccessor frameTupleAccessor;
public CommitRuntime(IHyracksTaskContext ctx, JobId jobId, int datasetId, int[] primaryKeyFields,
boolean isTemporaryDatasetWriteJob, boolean isWriteTransaction) {
@@ -85,7 +86,6 @@
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
- int pkHash = 0;
frameTupleAccessor.reset(buffer);
int nTuple = frameTupleAccessor.getTupleCount();
for (int t = 0; t < nTuple; t++) {
@@ -103,19 +103,23 @@
transactionContext.notifyOptracker(false);
} else {
frameTupleReference.reset(frameTupleAccessor, t);
- pkHash = computePrimaryKeyHashValue(frameTupleReference, primaryKeyFields);
- logRecord.formEntityCommitLogRecord(transactionContext, datasetId, pkHash, frameTupleReference,
- primaryKeyFields);
try {
+ formLogRecord(buffer, t);
logMgr.log(logRecord);
- } catch (ACIDException e) {
+ } catch (ACIDException | AlgebricksException e) {
throw new HyracksDataException(e);
}
}
}
}
- private int computePrimaryKeyHashValue(ITupleReference tuple, int[] primaryKeyFields) {
+ protected void formLogRecord(ByteBuffer buffer, int t) throws AlgebricksException {
+ int pkHash = computePrimaryKeyHashValue(frameTupleReference, primaryKeyFields);
+ TransactionUtil.formEntityCommitLogRecord(logRecord, transactionContext, datasetId, pkHash, frameTupleReference,
+ primaryKeyFields);
+ }
+
+ protected int computePrimaryKeyHashValue(ITupleReference tuple, int[] primaryKeyFields) {
MurmurHash128Bit.hash3_x64_128(tuple, primaryKeyFields, SEED, longHashes);
return Math.abs((int) longHashes[0]);
}
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/CommitRuntimeFactory.java b/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/CommitRuntimeFactory.java
index 6858db1..7d03796 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/CommitRuntimeFactory.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/CommitRuntimeFactory.java
@@ -34,14 +34,16 @@
private final int[] primaryKeyFields;
private final boolean isTemporaryDatasetWriteJob;
private final boolean isWriteTransaction;
+ private final int upsertVarIdx;
public CommitRuntimeFactory(JobId jobId, int datasetId, int[] primaryKeyFields, boolean isTemporaryDatasetWriteJob,
- boolean isWriteTransaction) {
+ boolean isWriteTransaction, int upsertVarIdx) {
this.jobId = jobId;
this.datasetId = datasetId;
this.primaryKeyFields = primaryKeyFields;
this.isTemporaryDatasetWriteJob = isTemporaryDatasetWriteJob;
this.isWriteTransaction = isWriteTransaction;
+ this.upsertVarIdx = upsertVarIdx;
}
@Override
@@ -51,7 +53,12 @@
@Override
public IPushRuntime createPushRuntime(IHyracksTaskContext ctx) throws AlgebricksException {
- return new CommitRuntime(ctx, jobId, datasetId, primaryKeyFields, isTemporaryDatasetWriteJob,
- isWriteTransaction);
+ if (upsertVarIdx >= 0) {
+ return new UpsertCommitRuntime(ctx, jobId, datasetId, primaryKeyFields, isTemporaryDatasetWriteJob,
+ isWriteTransaction, upsertVarIdx);
+ } else {
+ return new CommitRuntime(ctx, jobId, datasetId, primaryKeyFields, isTemporaryDatasetWriteJob,
+ isWriteTransaction);
+ }
}
}
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/UpsertCommitRuntime.java b/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/UpsertCommitRuntime.java
new file mode 100644
index 0000000..81996d1
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/UpsertCommitRuntime.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.algebra.operators.physical;
+
+import java.nio.ByteBuffer;
+
+import org.apache.asterix.common.transactions.JobId;
+import org.apache.asterix.common.utils.TransactionUtil;
+import org.apache.asterix.dataflow.data.nontagged.serde.ABooleanSerializerDeserializer;
+import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+
+public class UpsertCommitRuntime extends CommitRuntime {
+ private final int upsertIdx;
+
+ public UpsertCommitRuntime(IHyracksTaskContext ctx, JobId jobId, int datasetId, int[] primaryKeyFields,
+ boolean isTemporaryDatasetWriteJob, boolean isWriteTransaction, int upsertIdx) throws AlgebricksException {
+ super(ctx, jobId, datasetId, primaryKeyFields, isTemporaryDatasetWriteJob, isWriteTransaction);
+ this.upsertIdx = upsertIdx;
+ }
+
+ @Override
+ protected void formLogRecord(ByteBuffer buffer, int t) throws AlgebricksException {
+ boolean isNull = ABooleanSerializerDeserializer.getBoolean(buffer.array(),
+ frameTupleAccessor.getFieldSlotsLength() + frameTupleAccessor.getTupleStartOffset(t)
+ + frameTupleAccessor.getFieldStartOffset(t, upsertIdx) + 1);
+ if (isNull) {
+ // Previous record not found (insert)
+ super.formLogRecord(buffer, t);
+ } else {
+ // Previous record found (delete + insert)
+ int pkHash = computePrimaryKeyHashValue(frameTupleReference, primaryKeyFields);
+ TransactionUtil.formEntityUpsertCommitLogRecord(logRecord, transactionContext, datasetId, pkHash,
+ frameTupleReference, primaryKeyFields);
+ }
+ }
+}
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceAutogenerateIDRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceAutogenerateIDRule.java
index 43b2b11..a7327ff 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceAutogenerateIDRule.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceAutogenerateIDRule.java
@@ -44,8 +44,8 @@
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator.Kind;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteUpsertOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteUpsertOperator.Kind;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
import org.apache.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
@@ -69,11 +69,11 @@
// produce insert - assign - assign* - datasource scan
AbstractLogicalOperator currentOp = (AbstractLogicalOperator) opRef.getValue();
- if (currentOp.getOperatorTag() != LogicalOperatorTag.INSERT_DELETE) {
+ if (currentOp.getOperatorTag() != LogicalOperatorTag.INSERT_DELETE_UPSERT) {
return false;
}
- InsertDeleteOperator insertOp = (InsertDeleteOperator) currentOp;
+ InsertDeleteUpsertOperator insertOp = (InsertDeleteUpsertOperator) currentOp;
if (insertOp.getOperation() != Kind.INSERT) {
return false;
}
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java
index 1f0fbb7..de6eebf 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java
@@ -46,7 +46,7 @@
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteUpsertOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
import org.apache.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
@@ -97,16 +97,16 @@
*/
AbstractLogicalOperator op2 = (AbstractLogicalOperator) op1.getInputs().get(0).getValue();
- if (op2.getOperatorTag() == LogicalOperatorTag.INSERT_DELETE) {
- InsertDeleteOperator insertDeleteOp = (InsertDeleteOperator) op2;
- if (insertDeleteOp.getOperation() == InsertDeleteOperator.Kind.DELETE)
+ if (op2.getOperatorTag() == LogicalOperatorTag.INSERT_DELETE_UPSERT) {
+ InsertDeleteUpsertOperator insertDeleteOp = (InsertDeleteUpsertOperator) op2;
+ if (insertDeleteOp.getOperation() == InsertDeleteUpsertOperator.Kind.DELETE)
return false;
// Remember this is the operator we need to modify
op = insertDeleteOp;
// Derive the required ARecordType based on the schema of the AqlDataSource
- InsertDeleteOperator insertDeleteOperator = (InsertDeleteOperator) op2;
+ InsertDeleteUpsertOperator insertDeleteOperator = (InsertDeleteUpsertOperator) op2;
AqlDataSource dataSource = (AqlDataSource) insertDeleteOperator.getDataSource();
IAType[] schemaTypes = dataSource.getSchemaTypes();
requiredRecordType = (ARecordType) schemaTypes[schemaTypes.length - 1];
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceMaterializationForInsertWithSelfScanRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceMaterializationForInsertWithSelfScanRule.java
index adce8ce..916355d 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceMaterializationForInsertWithSelfScanRule.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceMaterializationForInsertWithSelfScanRule.java
@@ -36,7 +36,7 @@
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteUpsertOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.MaterializeOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.physical.MaterializePOperator;
@@ -53,11 +53,11 @@
public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
throws AlgebricksException {
AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
- if (op.getOperatorTag() != LogicalOperatorTag.INSERT_DELETE) {
+ if (op.getOperatorTag() != LogicalOperatorTag.INSERT_DELETE_UPSERT) {
return false;
}
- InsertDeleteOperator insertOp = (InsertDeleteOperator) op;
+ InsertDeleteUpsertOperator insertOp = (InsertDeleteUpsertOperator) op;
boolean sameDataset = checkIfInsertAndScanDatasetsSame(op, ((DatasetDataSource) insertOp.getDataSource())
.getDataset().getDatasetName());
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java
index 952368f..956e6c5 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java
@@ -69,8 +69,9 @@
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator.ExecutionMode;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteUpsertOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteUpsertOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteUpsertOperator.Kind;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.TokenizeOperator;
@@ -93,13 +94,13 @@
return false;
}
AbstractLogicalOperator op1 = (AbstractLogicalOperator) op0.getInputs().get(0).getValue();
- if (op1.getOperatorTag() != LogicalOperatorTag.INSERT_DELETE) {
+ if (op1.getOperatorTag() != LogicalOperatorTag.INSERT_DELETE_UPSERT) {
return false;
}
FunctionIdentifier fid = null;
/** find the record variable */
- InsertDeleteOperator insertOp = (InsertDeleteOperator) op1;
+ InsertDeleteUpsertOperator insertOp = (InsertDeleteUpsertOperator) op1;
ILogicalExpression recordExpr = insertOp.getPayloadExpression().getValue();
List<LogicalVariable> recordVar = new ArrayList<LogicalVariable>();
/** assume the payload is always a single variable expression */
@@ -136,6 +137,15 @@
AssignOperator assignOp2 = (AssignOperator) op2;
recordVar.addAll(assignOp2.getVariables());
}
+
+ /*
+ * At this point, we have the record variable and the insert/delete/upsert operator
+ * Note: We have two operators:
+ * 1. An InsertDeleteOperator (primary)
+ * 2. An IndexInsertDeleteOperator (secondary)
+ * The current insertOp is of the first type
+ */
+
AqlDataSource datasetSource = (AqlDataSource) insertOp.getDataSource();
AqlMetadataProvider mp = (AqlMetadataProvider) context.getMetadataProvider();
String dataverseName = datasetSource.getId().getDataverseName();
@@ -155,7 +165,11 @@
throw new AlgebricksException("Only record types can be indexed.");
}
ARecordType recType = (ARecordType) itemType;
+ // recType might be replaced with enforced record type and we want to keep a reference to the original record
+ // type
+ ARecordType originalRecType = recType;
List<Index> indexes = mp.getDatasetIndexes(dataset.getDataverseName(), dataset.getDatasetName());
+ // Set the top operator pointer to the primary IndexInsertDeleteOperator
ILogicalOperator currentTop = op1;
boolean hasSecondaryIndex = false;
@@ -169,7 +183,7 @@
});
- // Check whether multiple keyword or n-gram indexes exist
+ // Check whether multiple indexes exist
int secondaryIndexTotalCnt = 0;
for (Index index : indexes) {
if (index.isSecondaryIndex()) {
@@ -177,14 +191,16 @@
}
}
- // Initialize inputs to the SINK operator
+ // At this point, we have the data type info, and the indexes info as well
+ // Initialize inputs to the SINK operator Op0 (The SINK) is now without input
if (secondaryIndexTotalCnt > 0) {
op0.getInputs().clear();
}
// Replicate Operator is applied only when doing the bulk-load.
- AbstractLogicalOperator replicateOp = null;
+ ReplicateOperator replicateOp = null;
+ // No need to take care of the upsert case in the bulk load since it doesn't exist as of now
if (secondaryIndexTotalCnt > 1 && insertOp.isBulkload()) {
// Split the logical plan into "each secondary index update branch"
// to replicate each <PK,RECORD> pair.
@@ -195,65 +211,76 @@
currentTop = replicateOp;
}
- // Prepare filtering field information
- List<String> additionalFilteringField = ((InternalDatasetDetails) dataset.getDatasetDetails()).getFilterField();
- List<LogicalVariable> additionalFilteringVars = null;
- List<Mutable<ILogicalExpression>> additionalFilteringAssignExpressions = null;
- List<Mutable<ILogicalExpression>> additionalFilteringExpressions = null;
- AssignOperator additionalFilteringAssign = null;
+ // Prepare filtering field information (This is the filter created using the "filter with" key word in the
+ // create dataset ddl)
+ List<String> filteringFields = ((InternalDatasetDetails) dataset.getDatasetDetails()).getFilterField();
+ List<LogicalVariable> filteringVars = null;
+ List<Mutable<ILogicalExpression>> filteringExpressions = null;
- if (additionalFilteringField != null) {
- additionalFilteringVars = new ArrayList<LogicalVariable>();
- additionalFilteringAssignExpressions = new ArrayList<Mutable<ILogicalExpression>>();
- additionalFilteringExpressions = new ArrayList<Mutable<ILogicalExpression>>();
- prepareVarAndExpression(additionalFilteringField, recType.getFieldNames(), recordVar.get(0),
- additionalFilteringAssignExpressions, additionalFilteringVars, context);
- additionalFilteringAssign = new AssignOperator(additionalFilteringVars,
- additionalFilteringAssignExpressions);
- for (LogicalVariable var : additionalFilteringVars) {
- additionalFilteringExpressions
- .add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(var)));
+ if (filteringFields != null) {
+ // The filter field var already exists. we can simply get it from the insert op
+ filteringVars = new ArrayList<LogicalVariable>();
+ filteringExpressions = new ArrayList<Mutable<ILogicalExpression>>();
+ for (Mutable<ILogicalExpression> filteringExpression : insertOp.getAdditionalFilteringExpressions()) {
+ filteringExpression.getValue().getUsedVariables(filteringVars);
+ for (LogicalVariable var : filteringVars) {
+ filteringExpressions
+ .add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(var)));
+ }
}
}
// Iterate each secondary index and applying Index Update operations.
+ // At first, op1 is the index insert op insertOp
for (Index index : indexes) {
List<LogicalVariable> projectVars = new ArrayList<LogicalVariable>();
- VariableUtilities.getUsedVariables(op1, projectVars);
+ // Q. Why do we add these used variables to the projectVars?
+ // A. We want to always keep the primary keys, the record, and the filtering values
+ // In addition to those, we want to extract and keep the secondary key
+ VariableUtilities.getUsedVariables(insertOp, projectVars);
if (!index.isSecondaryIndex()) {
continue;
}
LogicalVariable enforcedRecordVar = recordVar.get(0);
hasSecondaryIndex = true;
- //if the index is enforcing field types
+ /*
+ * if the index is enforcing field types (For open indexes), We add a cast
+ * operator to ensure type safety
+ */
if (index.isEnforcingKeyFileds()) {
try {
DatasetDataSource ds = (DatasetDataSource) (insertOp.getDataSource());
ARecordType insertRecType = (ARecordType) ds.getSchemaTypes()[ds.getSchemaTypes().length - 1];
+ // A new variable which represents the casted record
LogicalVariable castVar = context.newVar();
+ // create the expected record type = the original + the optional open field
ARecordType enforcedType = createEnforcedType(insertRecType, index);
- //introduce casting to enforced type
+ // introduce casting to enforced type
AbstractFunctionCallExpression castFunc = new ScalarFunctionCallExpression(
FunctionUtil.getFunctionInfo(AsterixBuiltinFunctions.CAST_RECORD));
-
+ // The first argument is the record
castFunc.getArguments()
.add(new MutableObject<ILogicalExpression>(insertOp.getPayloadExpression().getValue()));
TypeComputerUtilities.setRequiredAndInputTypes(castFunc, enforcedType, insertRecType);
+ // AssignOperator puts in the cast var the casted record
AssignOperator newAssignOperator = new AssignOperator(castVar,
new MutableObject<ILogicalExpression>(castFunc));
+ // Connect the current top of the plan to the cast operator
newAssignOperator.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
currentTop = newAssignOperator;
- //project out casted record
projectVars.add(castVar);
enforcedRecordVar = castVar;
context.computeAndSetTypeEnvironmentForOperator(newAssignOperator);
context.computeAndSetTypeEnvironmentForOperator(currentTop);
recType = enforcedType;
+ // We don't need to cast the old rec, we just need an assignment function that extracts the SK
+ // and an expression which reference the new variables.
} catch (AsterixException e) {
throw new AlgebricksException(e);
}
}
+ // Get the secondary fields names and types
List<List<String>> secondaryKeyFields = index.getKeyFieldNames();
List<IAType> secondaryKeyTypes = index.getKeyFieldTypes();
List<LogicalVariable> secondaryKeyVars = new ArrayList<LogicalVariable>();
@@ -264,17 +291,35 @@
prepareVarAndExpression(secondaryKey, recType.getFieldNames(), enforcedRecordVar, expressions,
secondaryKeyVars, context);
}
-
+ // Used with upsert operation
+ // in case of upsert, we need vars and expressions for the old SK as well.
+ List<LogicalVariable> prevSecondaryKeyVars = null;
+ List<Mutable<ILogicalExpression>> prevExpressions = null;
+ List<Mutable<ILogicalExpression>> prevSecondaryExpressions = null;
+ AssignOperator prevSecondaryKeyAssign = null;
+ if (insertOp.getOperation() == Kind.UPSERT) {
+ prevSecondaryKeyVars = new ArrayList<LogicalVariable>();
+ prevExpressions = new ArrayList<Mutable<ILogicalExpression>>();
+ prevSecondaryExpressions = new ArrayList<Mutable<ILogicalExpression>>();
+ for (List<String> secondaryKey : secondaryKeyFields) {
+ prepareVarAndExpression(secondaryKey, originalRecType.getFieldNames(), insertOp.getPrevRecordVar(),
+ prevExpressions, prevSecondaryKeyVars, context);
+ }
+ prevSecondaryKeyAssign = new AssignOperator(prevSecondaryKeyVars, prevExpressions);
+ }
AssignOperator assign = new AssignOperator(secondaryKeyVars, expressions);
ProjectOperator project = new ProjectOperator(projectVars);
-
- if (additionalFilteringAssign != null) {
- additionalFilteringAssign.getInputs().add(new MutableObject<ILogicalOperator>(project));
- assign.getInputs().add(new MutableObject<ILogicalOperator>(additionalFilteringAssign));
- } else {
- assign.getInputs().add(new MutableObject<ILogicalOperator>(project));
+ AssignOperator topAssign = assign;
+ assign.getInputs().add(new MutableObject<ILogicalOperator>(project));
+ if (insertOp.getOperation() == Kind.UPSERT) {
+ projectVars.add(insertOp.getPrevRecordVar());
+ if (filteringFields != null) {
+ // project prev filter value
+ projectVars.add(insertOp.getPrevFilterVar());
+ }
+ prevSecondaryKeyAssign.getInputs().add(new MutableObject<ILogicalOperator>(assign));
+ topAssign = prevSecondaryKeyAssign;
}
-
// Only apply replicate operator when doing bulk-load
if (secondaryIndexTotalCnt > 1 && insertOp.isBulkload()) {
project.getInputs().add(new MutableObject<ILogicalOperator>(replicateOp));
@@ -283,31 +328,38 @@
}
context.computeAndSetTypeEnvironmentForOperator(project);
-
- if (additionalFilteringAssign != null) {
- context.computeAndSetTypeEnvironmentForOperator(additionalFilteringAssign);
- }
-
context.computeAndSetTypeEnvironmentForOperator(assign);
- currentTop = assign;
+ if (insertOp.getOperation() == Kind.UPSERT) {
+ context.computeAndSetTypeEnvironmentForOperator(prevSecondaryKeyAssign);
+ }
+ currentTop = topAssign;
- // BTree, Keyword, or n-gram index case
+ // in case of an Upsert operation, the currentTop is an assign which has the old secondary keys + the new secondary keys
if (index.getIndexType() == IndexType.BTREE || index.getIndexType() == IndexType.SINGLE_PARTITION_WORD_INVIX
|| index.getIndexType() == IndexType.SINGLE_PARTITION_NGRAM_INVIX
|| index.getIndexType() == IndexType.LENGTH_PARTITIONED_WORD_INVIX
|| index.getIndexType() == IndexType.LENGTH_PARTITIONED_NGRAM_INVIX) {
+ // Create an expression per key
for (LogicalVariable secondaryKeyVar : secondaryKeyVars) {
secondaryExpressions.add(
new MutableObject<ILogicalExpression>(new VariableReferenceExpression(secondaryKeyVar)));
}
- Mutable<ILogicalExpression> filterExpression = createFilterExpression(secondaryKeyVars,
- context.getOutputTypeEnvironment(currentTop), false);
+ Mutable<ILogicalExpression> filterExpression = null;
+ if (insertOp.getOperation() == Kind.UPSERT) {
+ for (LogicalVariable oldSecondaryKeyVar : prevSecondaryKeyVars) {
+ prevSecondaryExpressions.add(new MutableObject<ILogicalExpression>(
+ new VariableReferenceExpression(oldSecondaryKeyVar)));
+ }
+ } else {
+ filterExpression = createFilterExpression(secondaryKeyVars,
+ context.getOutputTypeEnvironment(currentTop), false);
+ }
AqlIndex dataSourceIndex = new AqlIndex(index, dataverseName, datasetName, mp);
// Introduce the TokenizeOperator only when doing bulk-load,
// and index type is keyword or n-gram.
if (index.getIndexType() != IndexType.BTREE && insertOp.isBulkload()) {
-
+ // Note: Bulk load case, we don't need to take care of it for upsert operation
// Check whether the index is length-partitioned or not.
// If partitioned, [input variables to TokenizeOperator,
// token, number of token] pairs will be generated and
@@ -358,10 +410,10 @@
tokenUpdate.getInputs().add(new MutableObject<ILogicalOperator>(assign));
context.computeAndSetTypeEnvironmentForOperator(tokenUpdate);
- IndexInsertDeleteOperator indexUpdate = new IndexInsertDeleteOperator(dataSourceIndex,
+ IndexInsertDeleteUpsertOperator indexUpdate = new IndexInsertDeleteUpsertOperator(dataSourceIndex,
insertOp.getPrimaryKeyExpressions(), tokenizeKeyExprs, filterExpression,
insertOp.getOperation(), insertOp.isBulkload());
- indexUpdate.setAdditionalFilteringExpressions(additionalFilteringExpressions);
+ indexUpdate.setAdditionalFilteringExpressions(filteringExpressions);
indexUpdate.getInputs().add(new MutableObject<ILogicalOperator>(tokenUpdate));
context.computeAndSetTypeEnvironmentForOperator(indexUpdate);
@@ -371,10 +423,19 @@
} else {
// When TokenizeOperator is not needed
- IndexInsertDeleteOperator indexUpdate = new IndexInsertDeleteOperator(dataSourceIndex,
+ IndexInsertDeleteUpsertOperator indexUpdate = new IndexInsertDeleteUpsertOperator(dataSourceIndex,
insertOp.getPrimaryKeyExpressions(), secondaryExpressions, filterExpression,
insertOp.getOperation(), insertOp.isBulkload());
- indexUpdate.setAdditionalFilteringExpressions(additionalFilteringExpressions);
+
+ indexUpdate.setAdditionalFilteringExpressions(filteringExpressions);
+ // We add the necessary expressions for upsert
+ if (insertOp.getOperation() == Kind.UPSERT) {
+ indexUpdate.setPrevSecondaryKeyExprs(prevSecondaryExpressions);
+ if (filteringFields != null) {
+ indexUpdate.setPrevAdditionalFilteringExpression(new MutableObject<ILogicalExpression>(
+ new VariableReferenceExpression(insertOp.getPrevFilterVar())));
+ }
+ }
indexUpdate.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
currentTop = indexUpdate;
@@ -387,11 +448,13 @@
}
} else if (index.getIndexType() == IndexType.RTREE) {
+ // Get type, dimensions and number of keys
Pair<IAType, Boolean> keyPairType = Index.getNonNullableOpenFieldType(secondaryKeyTypes.get(0),
secondaryKeyFields.get(0), recType);
IAType spatialType = keyPairType.first;
int dimension = NonTaggedFormatUtil.getNumDimensions(spatialType.getTypeTag());
int numKeys = dimension * 2;
+ // Get variables and expressions
List<LogicalVariable> keyVarList = new ArrayList<LogicalVariable>();
List<Mutable<ILogicalExpression>> keyExprList = new ArrayList<Mutable<ILogicalExpression>>();
for (int i = 0; i < numKeys; i++) {
@@ -414,21 +477,63 @@
AssignOperator assignCoordinates = new AssignOperator(keyVarList, keyExprList);
assignCoordinates.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
context.computeAndSetTypeEnvironmentForOperator(assignCoordinates);
- // We must enforce the filter if the originating spatial type is
- // nullable.
- boolean forceFilter = keyPairType.second;
- Mutable<ILogicalExpression> filterExpression = createFilterExpression(keyVarList,
- context.getOutputTypeEnvironment(assignCoordinates), forceFilter);
+ Mutable<ILogicalExpression> filterExpression = null;
+ AssignOperator originalAssignCoordinates = null;
+ // We do something similar for previous key if the operation is an upsert
+ if (insertOp.getOperation() == Kind.UPSERT) {
+ List<LogicalVariable> originalKeyVarList = new ArrayList<LogicalVariable>();
+ List<Mutable<ILogicalExpression>> originalKeyExprList = new ArrayList<Mutable<ILogicalExpression>>();
+ // we don't do any filtering since nulls are expected here and there
+ for (int i = 0; i < numKeys; i++) {
+ LogicalVariable keyVar = context.newVar();
+ originalKeyVarList.add(keyVar);
+ AbstractFunctionCallExpression createMBR = new ScalarFunctionCallExpression(
+ FunctionUtil.getFunctionInfo(AsterixBuiltinFunctions.CREATE_MBR));
+ createMBR.getArguments().add(new MutableObject<ILogicalExpression>(
+ new VariableReferenceExpression(prevSecondaryKeyVars.get(0))));
+ createMBR.getArguments().add(new MutableObject<ILogicalExpression>(
+ new ConstantExpression(new AsterixConstantValue(new AInt32(dimension)))));
+ createMBR.getArguments().add(new MutableObject<ILogicalExpression>(
+ new ConstantExpression(new AsterixConstantValue(new AInt32(i)))));
+ originalKeyExprList.add(new MutableObject<ILogicalExpression>(createMBR));
+ }
+ for (LogicalVariable secondaryKeyVar : originalKeyVarList) {
+ prevSecondaryExpressions.add(new MutableObject<ILogicalExpression>(
+ new VariableReferenceExpression(secondaryKeyVar)));
+ }
+ originalAssignCoordinates = new AssignOperator(originalKeyVarList, originalKeyExprList);
+ originalAssignCoordinates.getInputs().add(new MutableObject<ILogicalOperator>(assignCoordinates));
+ context.computeAndSetTypeEnvironmentForOperator(originalAssignCoordinates);
+ } else {
+ // We must enforce the filter if the originating spatial type is
+ // nullable.
+ boolean forceFilter = keyPairType.second;
+ filterExpression = createFilterExpression(keyVarList,
+ context.getOutputTypeEnvironment(assignCoordinates), forceFilter);
+ }
AqlIndex dataSourceIndex = new AqlIndex(index, dataverseName, datasetName, mp);
- IndexInsertDeleteOperator indexUpdate = new IndexInsertDeleteOperator(dataSourceIndex,
+ IndexInsertDeleteUpsertOperator indexUpdate = new IndexInsertDeleteUpsertOperator(dataSourceIndex,
insertOp.getPrimaryKeyExpressions(), secondaryExpressions, filterExpression,
insertOp.getOperation(), insertOp.isBulkload());
- indexUpdate.setAdditionalFilteringExpressions(additionalFilteringExpressions);
- indexUpdate.getInputs().add(new MutableObject<ILogicalOperator>(assignCoordinates));
+ indexUpdate.setAdditionalFilteringExpressions(filteringExpressions);
+ if (insertOp.getOperation() == Kind.UPSERT) {
+ // set old secondary key expressions
+ if (filteringFields != null) {
+ indexUpdate.setPrevAdditionalFilteringExpression(new MutableObject<ILogicalExpression>(
+ new VariableReferenceExpression(insertOp.getPrevFilterVar())));
+ }
+ // set filtering expressions
+ indexUpdate.setPrevSecondaryKeyExprs(prevSecondaryExpressions);
+ // assign --> assign previous values --> secondary index upsert
+ indexUpdate.getInputs().add(new MutableObject<ILogicalOperator>(originalAssignCoordinates));
+ } else {
+ indexUpdate.getInputs().add(new MutableObject<ILogicalOperator>(assignCoordinates));
+ }
currentTop = indexUpdate;
context.computeAndSetTypeEnvironmentForOperator(indexUpdate);
if (insertOp.isBulkload()) {
+ // For bulk load, we connect all fanned out insert operator to a single SINK operator
op0.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
}
@@ -440,7 +545,10 @@
}
if (!insertOp.isBulkload()) {
+ // If this is an upsert, we need to
+ // Remove the current input to the SINK operator (It is actually already removed above)
op0.getInputs().clear();
+ // Connect the last index update to the SINK
op0.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
}
return true;
@@ -457,7 +565,7 @@
boolean openRecords = false;
String bridgeName = nestedFieldType.getTypeName();
int j;
- //Build the stack for the enforced type
+ // Build the stack for the enforced type
for (j = 1; j < splits.size(); j++) {
nestedTypeStack.push(new Pair<ARecordType, String>(nestedFieldType, splits.get(j - 1)));
bridgeName = nestedFieldType.getTypeName();
@@ -468,16 +576,16 @@
}
}
if (openRecords == true) {
- //create the smallest record
+ // create the smallest record
enforcedType = new ARecordType(splits.get(splits.size() - 2),
new String[] { splits.get(splits.size() - 1) },
new IAType[] { AUnionType.createNullableType(index.getKeyFieldTypes().get(i)) }, true);
- //create the open part of the nested field
+ // create the open part of the nested field
for (int k = splits.size() - 3; k > (j - 2); k--) {
enforcedType = new ARecordType(splits.get(k), new String[] { splits.get(k + 1) },
new IAType[] { AUnionType.createNullableType(enforcedType) }, true);
}
- //Bridge the gap
+ // Bridge the gap
Pair<ARecordType, String> gapPair = nestedTypeStack.pop();
ARecordType parent = gapPair.first;
@@ -488,8 +596,8 @@
true);
} else {
- //Schema is closed all the way to the field
- //enforced fields are either null or strongly typed
+ // Schema is closed all the way to the field
+ // enforced fields are either null or strongly typed
enforcedType = new ARecordType(nestedFieldType.getTypeName(),
ArrayUtils.addAll(nestedFieldType.getFieldNames(), splits.get(splits.size() - 1)),
ArrayUtils.addAll(nestedFieldType.getFieldTypes(),
@@ -497,7 +605,7 @@
nestedFieldType.isOpen());
}
- //Create the enforcedtype for the nested fields in the schema, from the ground up
+ // Create the enforcedtype for the nested fields in the schema, from the ground up
if (nestedTypeStack.size() > 0) {
while (!nestedTypeStack.isEmpty()) {
Pair<ARecordType, String> nestedTypePair = nestedTypeStack.pop();
@@ -516,53 +624,68 @@
return enforcedType;
}
+ /***
+ * This method takes a list of {fields}: a subset of {recordFields}, the original record variable
+ * and populate expressions with expressions which evaluate to those fields (using field access functions) and
+ * variables to represent them
+ * @param fields
+ * desired fields
+ * @param recordFields
+ * all the record fields
+ * @param recordVar
+ * the record variable
+ * @param expressions
+ * @param vars
+ * @param context
+ * @throws AlgebricksException
+ */
@SuppressWarnings("unchecked")
- private void prepareVarAndExpression(List<String> field, String[] fieldNames, LogicalVariable recordVar,
+ private void prepareVarAndExpression(List<String> fields, String[] recordFields, LogicalVariable recordVar,
List<Mutable<ILogicalExpression>> expressions, List<LogicalVariable> vars, IOptimizationContext context)
throws AlgebricksException {
+ // Get a reference to the record variable
Mutable<ILogicalExpression> varRef = new MutableObject<ILogicalExpression>(
new VariableReferenceExpression(recordVar));
+ // Get the desired field position
int pos = -1;
- if (field.size() == 1) {
- for (int j = 0; j < fieldNames.length; j++) {
- if (fieldNames[j].equals(field.get(0))) {
+ if (fields.size() == 1) {
+ for (int j = 0; j < recordFields.length; j++) {
+ if (recordFields[j].equals(fields.get(0))) {
pos = j;
break;
}
}
}
+ // Field not found --> This is either an open field or a nested field. it can't be accessed by index
+ AbstractFunctionCallExpression func;
if (pos == -1) {
- AbstractFunctionCallExpression func;
- if (field.size() > 1) {
+ if (fields.size() > 1) {
AOrderedList fieldList = new AOrderedList(new AOrderedListType(BuiltinType.ASTRING, null));
- for (int i = 0; i < field.size(); i++) {
- fieldList.add(new AString(field.get(i)));
+ for (int i = 0; i < fields.size(); i++) {
+ fieldList.add(new AString(fields.get(i)));
}
Mutable<ILogicalExpression> fieldRef = new MutableObject<ILogicalExpression>(
new ConstantExpression(new AsterixConstantValue(fieldList)));
- //Create an expression for the nested case
+ // Create an expression for the nested case
func = new ScalarFunctionCallExpression(
FunctionUtil.getFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_NESTED), varRef, fieldRef);
} else {
Mutable<ILogicalExpression> fieldRef = new MutableObject<ILogicalExpression>(
- new ConstantExpression(new AsterixConstantValue(new AString(field.get(0)))));
- //Create an expression for the open field case (By name)
+ new ConstantExpression(new AsterixConstantValue(new AString(fields.get(0)))));
+ // Create an expression for the open field case (By name)
func = new ScalarFunctionCallExpression(
FunctionUtil.getFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME), varRef, fieldRef);
}
- expressions.add(new MutableObject<ILogicalExpression>(func));
- LogicalVariable newVar = context.newVar();
- vars.add(newVar);
} else {
// Assumes the indexed field is in the closed portion of the type.
Mutable<ILogicalExpression> indexRef = new MutableObject<ILogicalExpression>(
new ConstantExpression(new AsterixConstantValue(new AInt32(pos))));
- AbstractFunctionCallExpression func = new ScalarFunctionCallExpression(
+ func = new ScalarFunctionCallExpression(
FunctionUtil.getFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_BY_INDEX), varRef, indexRef);
- expressions.add(new MutableObject<ILogicalExpression>(func));
- LogicalVariable newVar = context.newVar();
- vars.add(newVar);
}
+ expressions.add(new MutableObject<ILogicalExpression>(func));
+ LogicalVariable newVar = context.newVar();
+ vars.add(newVar);
}
@SuppressWarnings("unchecked")
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceStaticTypeCastForInsertRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceStaticTypeCastForInsertRule.java
index fc9ae85..0c847e6 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceStaticTypeCastForInsertRule.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceStaticTypeCastForInsertRule.java
@@ -39,7 +39,7 @@
import org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteUpsertOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
import org.apache.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
@@ -89,15 +89,15 @@
if (op1.getOperatorTag() != LogicalOperatorTag.SINK)
return false;
AbstractLogicalOperator op2 = (AbstractLogicalOperator) op1.getInputs().get(0).getValue();
- if (op2.getOperatorTag() != LogicalOperatorTag.INSERT_DELETE)
+ if (op2.getOperatorTag() != LogicalOperatorTag.INSERT_DELETE_UPSERT)
return false;
- InsertDeleteOperator insertDeleteOp = (InsertDeleteOperator) op2;
- if (insertDeleteOp.getOperation() == InsertDeleteOperator.Kind.DELETE)
+ InsertDeleteUpsertOperator insertDeleteOp = (InsertDeleteUpsertOperator) op2;
+ if (insertDeleteOp.getOperation() == InsertDeleteUpsertOperator.Kind.DELETE)
return false;
/**
* get required record type
*/
- InsertDeleteOperator insertDeleteOperator = (InsertDeleteOperator) op2;
+ InsertDeleteUpsertOperator insertDeleteOperator = (InsertDeleteUpsertOperator) op2;
AqlDataSource dataSource = (AqlDataSource) insertDeleteOperator.getDataSource();
IAType[] schemaTypes = (IAType[]) dataSource.getSchemaTypes();
IAType requiredRecordType = schemaTypes[schemaTypes.length - 1];
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/RemoveSortInFeedIngestionRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/RemoveSortInFeedIngestionRule.java
index c7b7bac..b088607 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/RemoveSortInFeedIngestionRule.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/RemoveSortInFeedIngestionRule.java
@@ -41,7 +41,7 @@
public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
throws AlgebricksException {
AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
- if (op.getOperatorTag() != LogicalOperatorTag.INSERT_DELETE) {
+ if (op.getOperatorTag() != LogicalOperatorTag.INSERT_DELETE_UPSERT) {
return false;
}
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ReplaceSinkOpWithCommitOpRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ReplaceSinkOpWithCommitOpRule.java
index e05c338..f8df183 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ReplaceSinkOpWithCommitOpRule.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ReplaceSinkOpWithCommitOpRule.java
@@ -21,32 +21,38 @@
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.lang3.mutable.Mutable;
-
import org.apache.asterix.algebra.operators.CommitOperator;
import org.apache.asterix.algebra.operators.physical.CommitPOperator;
import org.apache.asterix.common.transactions.JobId;
+import org.apache.asterix.lang.common.util.FunctionUtil;
import org.apache.asterix.metadata.declared.AqlMetadataProvider;
import org.apache.asterix.metadata.declared.DatasetDataSource;
+import org.apache.asterix.om.functions.AsterixBuiltinFunctions;
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import org.apache.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
import org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteUpsertOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteUpsertOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteUpsertOperator.Kind;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
import org.apache.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
public class ReplaceSinkOpWithCommitOpRule implements IAlgebraicRewriteRule {
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
- // TODO Auto-generated method stub
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
@@ -63,20 +69,43 @@
List<Mutable<ILogicalExpression>> primaryKeyExprs = null;
int datasetId = 0;
AbstractLogicalOperator descendantOp = (AbstractLogicalOperator) sinkOperator.getInputs().get(0).getValue();
+ LogicalVariable upsertVar = null;
+ AssignOperator upsertFlagAssign = null;
while (descendantOp != null) {
- if (descendantOp.getOperatorTag() == LogicalOperatorTag.INDEX_INSERT_DELETE) {
- IndexInsertDeleteOperator indexInsertDeleteOperator = (IndexInsertDeleteOperator) descendantOp;
- if (!indexInsertDeleteOperator.isBulkload()) {
- primaryKeyExprs = indexInsertDeleteOperator.getPrimaryKeyExpressions();
- datasetId = ((DatasetDataSource) indexInsertDeleteOperator.getDataSourceIndex().getDataSource())
- .getDataset().getDatasetId();
+ if (descendantOp.getOperatorTag() == LogicalOperatorTag.INDEX_INSERT_DELETE_UPSERT) {
+ IndexInsertDeleteUpsertOperator indexInsertDeleteUpsertOperator = (IndexInsertDeleteUpsertOperator) descendantOp;
+ if (!indexInsertDeleteUpsertOperator.isBulkload()
+ && indexInsertDeleteUpsertOperator.getPrevSecondaryKeyExprs() == null) {
+ primaryKeyExprs = indexInsertDeleteUpsertOperator.getPrimaryKeyExpressions();
+ datasetId = ((DatasetDataSource) indexInsertDeleteUpsertOperator.getDataSourceIndex()
+ .getDataSource()).getDataset().getDatasetId();
break;
}
- } else if (descendantOp.getOperatorTag() == LogicalOperatorTag.INSERT_DELETE) {
- InsertDeleteOperator insertDeleteOperator = (InsertDeleteOperator) descendantOp;
- if (!insertDeleteOperator.isBulkload()) {
- primaryKeyExprs = insertDeleteOperator.getPrimaryKeyExpressions();
- datasetId = ((DatasetDataSource) insertDeleteOperator.getDataSource()).getDataset().getDatasetId();
+ } else if (descendantOp.getOperatorTag() == LogicalOperatorTag.INSERT_DELETE_UPSERT) {
+ InsertDeleteUpsertOperator insertDeleteUpsertOperator = (InsertDeleteUpsertOperator) descendantOp;
+ if (!insertDeleteUpsertOperator.isBulkload()) {
+ primaryKeyExprs = insertDeleteUpsertOperator.getPrimaryKeyExpressions();
+ datasetId = ((DatasetDataSource) insertDeleteUpsertOperator.getDataSource()).getDataset()
+ .getDatasetId();
+ if (insertDeleteUpsertOperator.getOperation() == Kind.UPSERT) {
+ //we need to add a function that checks if previous record was found
+ upsertVar = context.newVar();
+ //introduce casting to enforced type
+ AbstractFunctionCallExpression isNullFunc = new ScalarFunctionCallExpression(
+ FunctionUtil.getFunctionInfo(AsterixBuiltinFunctions.IS_NULL));
+ // The first argument is the record
+ isNullFunc.getArguments().add(new MutableObject<ILogicalExpression>(
+ new VariableReferenceExpression(insertDeleteUpsertOperator.getPrevRecordVar())));
+ // AssignOperator puts in the cast var the casted record
+ upsertFlagAssign = new AssignOperator(upsertVar,
+ new MutableObject<ILogicalExpression>(isNullFunc));
+ // Connect the current top of the plan to the cast operator
+ upsertFlagAssign.getInputs()
+ .add(new MutableObject<ILogicalOperator>(sinkOperator.getInputs().get(0).getValue()));
+ sinkOperator.getInputs().clear();
+ sinkOperator.getInputs().add(new MutableObject<ILogicalOperator>(upsertFlagAssign));
+ context.computeAndSetTypeEnvironmentForOperator(upsertFlagAssign);
+ }
break;
}
}
@@ -102,8 +131,8 @@
JobId jobId = mp.getJobId();
//create the logical and physical operator
- CommitOperator commitOperator = new CommitOperator(primaryKeyLogicalVars);
- CommitPOperator commitPOperator = new CommitPOperator(jobId, datasetId, primaryKeyLogicalVars);
+ CommitOperator commitOperator = new CommitOperator(primaryKeyLogicalVars, upsertVar);
+ CommitPOperator commitPOperator = new CommitPOperator(jobId, datasetId, primaryKeyLogicalVars, upsertVar);
commitOperator.setPhysicalOperator(commitPOperator);
//create ExtensionOperator and put the commitOperator in it.
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SweepIllegalNonfunctionalFunctions.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SweepIllegalNonfunctionalFunctions.java
index 85c7478..9a82309 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SweepIllegalNonfunctionalFunctions.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SweepIllegalNonfunctionalFunctions.java
@@ -39,9 +39,9 @@
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ExternalDataLookupOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteUpsertOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteUpsertOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.LimitOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.MaterializeOperator;
@@ -282,12 +282,12 @@
}
@Override
- public Void visitInsertDeleteOperator(InsertDeleteOperator op, Void tag) throws AlgebricksException {
+ public Void visitInsertDeleteUpsertOperator(InsertDeleteUpsertOperator op, Void tag) throws AlgebricksException {
return null;
}
@Override
- public Void visitIndexInsertDeleteOperator(IndexInsertDeleteOperator op, Void tag) throws AlgebricksException {
+ public Void visitIndexInsertDeleteUpsertOperator(IndexInsertDeleteUpsertOperator op, Void tag) throws AlgebricksException {
return null;
}
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/translator/CompiledStatements.java b/asterix-algebra/src/main/java/org/apache/asterix/translator/CompiledStatements.java
index 7221972..5dc9f18 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/translator/CompiledStatements.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/translator/CompiledStatements.java
@@ -327,6 +327,18 @@
}
}
+ public static class CompiledUpsertStatement extends CompiledInsertStatement {
+
+ public CompiledUpsertStatement(String dataverseName, String datasetName, Query query, int varCounter) {
+ super(dataverseName, datasetName, query, varCounter);
+ }
+
+ @Override
+ public Kind getKind() {
+ return Kind.UPSERT;
+ }
+ }
+
public static class CompiledConnectFeedStatement implements ICompiledDmlStatement {
private String dataverseName;
private String feedName;
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java b/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
index cd0d21a..b8fe0e1 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
@@ -80,6 +80,7 @@
import org.apache.asterix.om.constants.AsterixConstantValue;
import org.apache.asterix.om.functions.AsterixBuiltinFunctions;
import org.apache.asterix.om.functions.AsterixFunctionInfo;
+import org.apache.asterix.om.types.ARecordType;
import org.apache.asterix.om.types.IAType;
import org.apache.asterix.om.util.AsterixAppContextInfo;
import org.apache.asterix.runtime.formats.FormatUtils;
@@ -117,7 +118,7 @@
import org.apache.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteUpsertOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.LimitOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
@@ -238,8 +239,8 @@
additionalFilteringAssignExpressions);
}
- InsertDeleteOperator insertOp = new InsertDeleteOperator(targetDatasource, payloadRef, varRefsForLoading,
- InsertDeleteOperator.Kind.INSERT, true);
+ InsertDeleteUpsertOperator insertOp = new InsertDeleteUpsertOperator(targetDatasource, payloadRef,
+ varRefsForLoading, InsertDeleteUpsertOperator.Kind.INSERT, true);
insertOp.setAdditionalFilteringExpressions(additionalFilteringExpressions);
if (additionalFilteringAssign != null) {
@@ -344,17 +345,38 @@
switch (stmt.getKind()) {
case INSERT: {
- InsertDeleteOperator insertOp = new InsertDeleteOperator(targetDatasource, varRef,
- varRefsForLoading, InsertDeleteOperator.Kind.INSERT, false);
+ InsertDeleteUpsertOperator insertOp = new InsertDeleteUpsertOperator(targetDatasource, varRef,
+ varRefsForLoading, InsertDeleteUpsertOperator.Kind.INSERT, false);
insertOp.setAdditionalFilteringExpressions(additionalFilteringExpressions);
insertOp.getInputs().add(new MutableObject<ILogicalOperator>(assign));
leafOperator = new SinkOperator();
leafOperator.getInputs().add(new MutableObject<ILogicalOperator>(insertOp));
break;
}
+ case UPSERT: {
+ InsertDeleteUpsertOperator upsertOp = new InsertDeleteUpsertOperator(targetDatasource, varRef,
+ varRefsForLoading, InsertDeleteUpsertOperator.Kind.UPSERT, false);
+ upsertOp.setAdditionalFilteringExpressions(additionalFilteringExpressions);
+ upsertOp.getInputs().add(new MutableObject<ILogicalOperator>(assign));
+ // Create and add a new variable used for representing the original record
+ ARecordType recordType = (ARecordType) targetDatasource.getItemType();
+ upsertOp.setPrevRecordVar(context.newVar());
+ upsertOp.setPrevRecordType(recordType);
+ if (additionalFilteringField != null) {
+ upsertOp.setPrevFilterVar(context.newVar());
+ try {
+ upsertOp.setPrevFilterType(recordType.getFieldType(additionalFilteringField.get(0)));
+ } catch (IOException e) {
+ throw new AlgebricksException("unable to get the type of filter field");
+ }
+ }
+ leafOperator = new SinkOperator();
+ leafOperator.getInputs().add(new MutableObject<ILogicalOperator>(upsertOp));
+ break;
+ }
case DELETE: {
- InsertDeleteOperator deleteOp = new InsertDeleteOperator(targetDatasource, varRef,
- varRefsForLoading, InsertDeleteOperator.Kind.DELETE, false);
+ InsertDeleteUpsertOperator deleteOp = new InsertDeleteUpsertOperator(targetDatasource, varRef,
+ varRefsForLoading, InsertDeleteUpsertOperator.Kind.DELETE, false);
deleteOp.setAdditionalFilteringExpressions(additionalFilteringExpressions);
deleteOp.getInputs().add(new MutableObject<ILogicalOperator>(assign));
leafOperator = new SinkOperator();
@@ -362,8 +384,8 @@
break;
}
case CONNECT_FEED: {
- InsertDeleteOperator insertOp = new InsertDeleteOperator(targetDatasource, varRef,
- varRefsForLoading, InsertDeleteOperator.Kind.INSERT, false);
+ InsertDeleteUpsertOperator insertOp = new InsertDeleteUpsertOperator(targetDatasource, varRef,
+ varRefsForLoading, InsertDeleteUpsertOperator.Kind.INSERT, false);
insertOp.setAdditionalFilteringExpressions(additionalFilteringExpressions);
insertOp.getInputs().add(new MutableObject<ILogicalOperator>(assign));
leafOperator = new SinkOperator();
@@ -371,8 +393,8 @@
break;
}
case SUBSCRIBE_FEED: {
- ILogicalOperator insertOp = new InsertDeleteOperator(targetDatasource, varRef, varRefsForLoading,
- InsertDeleteOperator.Kind.INSERT, false);
+ ILogicalOperator insertOp = new InsertDeleteUpsertOperator(targetDatasource, varRef,
+ varRefsForLoading, InsertDeleteUpsertOperator.Kind.INSERT, false);
insertOp.getInputs().add(new MutableObject<ILogicalOperator>(assign));
leafOperator = new SinkOperator();
leafOperator.getInputs().add(new MutableObject<ILogicalOperator>(insertOp));
diff --git a/asterix-app/data/dblp-small/more-dblp-small-id.txt b/asterix-app/data/dblp-small/more-dblp-small-id.txt
new file mode 100644
index 0000000..c5cfad6
--- /dev/null
+++ b/asterix-app/data/dblp-small/more-dblp-small-id.txt
@@ -0,0 +1,13 @@
+1:books/acm/kim95/AnnevelinkACFHK95:aaaaaaaaaaaaaaaaa:Jurgen Annevelink Rafiul Ahad Amelia Carlson Daniel H. Fishman Michael L. Heytens William Kent:2002-01-03 42-68 1995 Modern Database Systems db/books/collections/kim95.html#AnnevelinkACFHK95
+2:books/acm/kim95/Blakeley95:OQL[C++] Extending C++ with an Object Query Capability.:José A. Blakeley:2002-01-03 69-88 Modern Database Systems db/books/collections/kim95.html#Blakeley95 1995
+3:books/acm/kim95/BreitbartGS95:SQL bbbbbbbbbbbbbbbbbb:Yuri Breitbart Hector Garcia-Molina Abraham Silberschatz:2004-03-08 573-591 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#BreitbartGS95 1995
+4:books/acm/kim95/ChristodoulakisK95:ccccccc:Stavros Christodoulakis Leonidas Koveos:2002-01-03 318-337 1995 Modern Database Systems db/books/collections/kim95.html#ChristodoulakisK95
+5:books/acm/kim95/DayalHW95:ddddddddd:Umeshwar Dayal Eric N. Hanson Jennifer Widom:2002-01-03 434-456 1995 Modern Database Systems db/books/collections/kim95.html#DayalHW95
+666:books/acm/kim95/DittrichD95:hhhhhhhhhhhhBMSs SQL Should Do Better A Critique Based on Early Experiences.:Angelika Kotz Dittrich Klaus R. Dittrich:2002-01-03 238-254 1995 Modern Database Systems db/books/collections/kim95.html#DittrichD95
+7:books/acm/kim95/Garcia-MolinaH95:Distributed Databases.:Hector Garcia-Molina Meichun Hsu:2002-01-03 477-493 1995 Modern Database Systems db/books/collections/kim95.html#Garcia-MolinaH95
+101:journals/jacm/GalilHLSW87:An O(n³log n) deterministic and an O(n³) Las Vegs isomorphism test for trivalent graphs.:Zvi Galil Christoph M. Hoffmann Eugene M. Luks Claus-Peter Schnorr Andreas Weber:2003-11-20 513-531 1987 34 J. ACM 3 http //doi.acm.org/10.1145/28869.28870 db/journals/jacm/jacm34.html#GalilHLSW87
+102:conf/focs/GalilHLSW82:An O(n^3 log n) Deterministic and an O(n^3) Probabilistic Isomorphism Test for Trivalent Graphs:Zvi Galil Christoph M. Hoffmann Eugene M. Luks Claus-Peter Schnorr Andreas Weber:2006-04-25 118-125 conf/focs/FOCS23 1982 FOCS db/conf/focs/focs82.html#GalilHLSW82
+103:journals/jacm/GalilT88:An O(n²(m + n log n)log n) min-cost flow algorithm.:Zvi Galil Éva Tardos:2003-11-20 374-386 1988 35 J. ACM 2 http //doi.acm.org/10.1145/42282.214090 db/journals/jacm/jacm35.html#GalilT88
+104:conf/focs/GalilT86:SQL An O(n^2 (m + n log n) log n) Min-Cost Flow Algorithm:Zvi Galil Éva Tardos:2006-04-25 1-9 conf/focs/FOCS27 1986 FOCS db/conf/focs/focs86.html#GalilT86
+105:series/synthesis/2009Weintraub:Jordan Canonical Form Theory and Practice:Steven H. Weintraub:2009-09-06 Jordan Canonical Form Theory and Practice http //dx.doi.org/10.2200/S00218ED1V01Y200908MAS006 http //dx.doi.org/10.2200/S00218ED1V01Y200908MAS006 2009 Synthesis Lectures on Mathematics & Statistics Morgan & Claypool Publishers
+111:series/synthesis/2009Brozos:The Geometry of Walker Manifolds:Miguel Brozos-Vázquez Eduardo García-Río Peter Gilkey Stana Nikcevic Rámon Vázquez-Lorenzo:2009-09-06 The Geometry of Walker Manifolds http //dx.doi.org/10.2200/S00197ED1V01Y200906MAS005 http //dx.doi.org/10.2200/S00197ED1V01Y200906MAS005 2009 Synthesis Lectures on Mathematics & Statistics Morgan & Claypool Publishers
\ No newline at end of file
diff --git a/asterix-app/data/more-fbm-with-send-time.adm b/asterix-app/data/more-fbm-with-send-time.adm
new file mode 100644
index 0000000..faf8ff0
--- /dev/null
+++ b/asterix-app/data/more-fbm-with-send-time.adm
@@ -0,0 +1,10 @@
+{"message-id":1,"author-id":34,"in-response-to":2,"sender-location":point("47.16,77.75"),"message":" love sprint its shortcut-menu is awesome:)","send-time":datetime("2012-01-20T10:10:00")}
+{"message-id":3,"author-id":645,"in-response-to":4,"sender-location":point("48.09,81.01"),"message":" like samsung the plan is amazing","send-time":datetime("2012-03-20T10:10:00")}
+{"message-id":5,"author-id":23,"in-response-to":2,"sender-location":point("34.7,90.76"),"message":" love sprint the customization is mind-blowing","send-time":datetime("2012-05-20T10:10:00")}
+{"message-id":7,"author-id":897,"in-response-to":15,"sender-location":point("32.91,85.05"),"message":" dislike sprint the speed is horrible","send-time":datetime("2012-07-20T10:10:00")}
+{"message-id":9,"author-id":65,"in-response-to":12,"sender-location":point("34.45,96.48"),"message":" love verizon its wireless is good","send-time":datetime("2012-09-20T10:10:00")}
+{"message-id":11,"author-id":232,"in-response-to":1,"sender-location":point("38.97,77.49"),"message":" can't stand at&t its plan is terrible","send-time":datetime("2012-11-20T10:10:00")}
+{"message-id":17,"author-id":10,"in-response-to":6,"sender-location":point("42.26,77.76"),"message":" can't stand t-mobile its voicemail-service is OMG:(","send-time":datetime("2012-12-20T10:10:00")}
+{"message-id":13,"author-id":10,"in-response-to":4,"sender-location":point("42.77,78.92"),"message":" dislike iphone the voice-command is bad:(","send-time":datetime("2013-08-20T10:10:00")}
+{"message-id":19,"author-id":9,"in-response-to":12,"sender-location":point("41.33,85.28"),"message":" love at&t its 3G is good:)","send-time":datetime("2013-09-20T10:10:00")}
+{"message-id":15,"author-id":7,"in-response-to":11,"sender-location":point("44.47,67.11"),"message":" like iphone the voicemail-service is awesome","send-time":datetime("2014-01-20T10:10:00")}
\ No newline at end of file
diff --git a/asterix-app/data/semistructured/tiny01/more-customer.adm b/asterix-app/data/semistructured/tiny01/more-customer.adm
new file mode 100644
index 0000000..7e7770e
--- /dev/null
+++ b/asterix-app/data/semistructured/tiny01/more-customer.adm
@@ -0,0 +1,4 @@
+{ "cid": 112, "name": "Dorie Love", "age": 12, "address": { "number": 2286, "street": "Lake St.", "city": "Los Angeles" }, "interests": {{ "Coffee" }}, "children": [ { "name": "Grady Lave" }, { "name": "Daysi Lave" } ] }
+{ "cid": 94, "name": "Edgardo Dunnegan", "age": 19,"interests": {{ }}, "children": [ { "name": "Lyndia Dunnegan" } ] }
+{ "cid": 619, "name": "Jacques Gaskill", "interests": {{ "Cigars", "Coffee", "Computers", "Wine" }}, "children": [ { "name": "Angelyn Gaskill" }, { "name": "Jeanett Gaskill", "age": 40 }, { "name": "Emelda Gaskill", "age": 34 } ] }
+{ "cid": 92, "name": "Kenny Laychock", "address": { "number": 4790, "street": "Washington St.", "city": "Portland" }, "interests": {{ "Video Games", "Basketball" }}, "children": [ ] }
diff --git a/asterix-app/data/spatial/moreSpatialData.json b/asterix-app/data/spatial/moreSpatialData.json
new file mode 100644
index 0000000..3795511
--- /dev/null
+++ b/asterix-app/data/spatial/moreSpatialData.json
@@ -0,0 +1,9 @@
+{"id": 1, "kwds": "aaaaaaa", "line1": line("4.0,7.0 9.0,7.0"), "line2": line("5.0,8.0 5.0,1.0"), "poly1": polygon("1.0,1.0 1.0,4.0 3.0,4.0 3.0,1.0"), "poly2": polygon("5.0,1.0 5.0,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("0.0,0.0 4.2,7.0"), "circle" : circle("1.0,1.0 10.0"), "point": point("4.1,7.0")}
+{"id": 2, "kwds": "factory hosedan", "line1": line("-4.0,2.0 2.0,2.0"), "line2": line("4.0,7.0 2.0,17.0"), "poly1": polygon("1.0,1.0 1.0,4.0 3.0,4.0 3.0,1.0"), "poly2": polygon("5.0,1.0 5.0,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("2.0,3.0 2.0"), "point": point("4.1,7.0")}
+{"id": 3, "kwds": "bbbbbbb", "line1": line("3.0,0.0 0.0,4.0"), "line2": line("4.0,7.0 2.0,17.0"), "poly1": polygon("1.0,1.0 1.0,4.0 3.0,4.0 3.0,1.0"), "poly2": polygon("1.0,1.0 1.0,4.0 3.0,4.0 3.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("5.5,1.0 10.0"), "point": point("43.5083,-79.3007")}
+{"id": 4, "kwds": "enterprisecamp torcamp", "line1": line("4.0,7.0 2.0,17.0"), "line2": line("4.0,7.0 2.0,17.0"), "poly1": polygon("1.0,1.0 1.0,4.0 3.0,4.0 3.0,1.0 2.0,1.0 1.0,0.0"), "poly2": polygon("2.0,1.0 2.0,2.0 3.0,2.0 3.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("77.0,4.0 30.0"), "point": point("43.5083,-79.3007")}
+{"id": 5, "kwds": "cccccccccc", "line1": line("4.0,7.0 2.0,17.0"), "line2": line("4.0,7.0 2.0,17.0"), "poly1": polygon("100.0,100.0 100.0,400.0 300.0,400.0 300.0,100.0"), "poly2": polygon("5.0,1.0 5.0,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("88.0,1.0 10.0"), "point": point("43.5083,-79.3007")}
+{"id": 67, "kwds": "enterprisecamp torcamp", "line1": line("0.0,5.0 1.0,7.0"), "line2": line("4.0,7.0 2.0,-17.0"), "poly1": polygon("1.0,1.0 1.0,4.0 3.0,4.0 3.0,1.0"), "poly2": polygon("3.1,1.0 2.9,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("1.0,1.0 10.0"), "point": point("43.5083,-79.3007")}
+{"id": 77, "kwds": "enterprisecamp torcamp", "line1": line("0.0,5.0 4.0,7.1"), "line2": line("4.0,7.0 2.0,-17.0"), "poly1": polygon("-5.0,-2.0 -4.0,-1.0 -3.0,-1.0 -2.0,-2.0 -4.0,-4.0 -5.0,-3.0"), "poly2": polygon("3.0,1.0 3.0,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("3.0,6.0 5.0,7.0"), "circle" : circle("13.0,75.0 1.0"), "point": point("43.5083,-79.3007")}
+{"id": 87, "kwds": "enterprisecamp torcamp", "line1": line("4.0,7.0 2.0,17.0"), "line2": line("4.0,7.0 2.0,17.0"), "poly1": polygon("-5.0,-2.0 -4.0,-1.0 -3.0,-1.0 -2.0,-2.0 -4.0,-4.0 -5.0,-3.0"), "poly2": polygon("-3.0,-3.0 -1.0,-3.0 -3.0,-5.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("76.0,87.0 50.0"), "point": point("43.5083,-79.3007")}
+{"id": 12, "kwds": "zzzz", "line1": line("-4.0,2.0 2.0,2.0"), "line2": line("4.0,7.0 2.0,17.0"), "poly1": polygon("1.0,1.0 1.0,4.0 3.0,4.0 3.0,1.0"), "poly2": polygon("5.0,1.0 5.0,4.0 7.0,4.0 7.0,1.0"), "rec": rectangle("0.0,0.0 4.0,4.0"), "circle" : circle("2.0,3.0 2.0"), "point": point("4.1,7.0")}
\ No newline at end of file
diff --git a/asterix-app/data/tpch0.001/other-orders.tbl b/asterix-app/data/tpch0.001/other-orders.tbl
new file mode 100644
index 0000000..933e96a
--- /dev/null
+++ b/asterix-app/data/tpch0.001/other-orders.tbl
@@ -0,0 +1,14 @@
+5954|22|F|167262.34|1992-12-03|1-URGENT|Clerk#000000968|0|requests along the blith|
+5955|99|P|67944.38|1995-03-27|5-LOW|Clerk#000000340|0|deas integrate. fluffily regular pa|
+5956|22|O|118036.54|1998-05-18|1-URGENT|Clerk#000000587|0|le even, express platelets.|
+5957|88|F|230949.45|1993-12-27|2-HIGH|Clerk#000000020|0| dependencies are slyly. bold accounts according to the carefully regular r|
+5958|115|O|145060.41|1995-09-16|3-MEDIUM|Clerk#000000787|0|e final requests detect alongside of the qu|
+5959|23|F|195515.26|1992-05-15|3-MEDIUM|Clerk#000000913|0|into beans use ironic, unusual foxes. carefully regular excuses boost caref|
+5984|70|F|83413.30|1994-06-18|5-LOW|Clerk#000000023|0|ickly final pains haggle along the furiously ironic pinto bea|
+5985|143|F|3942.73|1995-01-12|3-MEDIUM|Clerk#000000417|0|as nag fluffily slyly permanent accounts. regular depo|
+10986|115|F|92187.80|1992-04-22|2-HIGH|Clerk#000000674|0|iously unusual notornis are |
+5986|55|F|92187.80|1992-04-22|2-HIGH|Clerk#000000674|0|iously unusual notornis are |
+10987|64|O|98956.82|1996-08-03|1-URGENT|Clerk#000000464|0| ideas. quietly final accounts haggle blithely pending escapade|
+5987|44|O|98956.82|1996-08-03|1-URGENT|Clerk#000000464|0| ideas. quietly final accounts haggle blithely pending escapade|
+10988|31|F|41655.51|1993-11-22|4-NOT SPECIFIED|Clerk#000000867|0|fully express accounts. final pi|
+5988|11|F|41655.51|1993-11-22|4-NOT SPECIFIED|Clerk#000000867|0|fully express accounts. final pi|
diff --git a/asterix-app/data/upsert/raw-data/more-data.txt b/asterix-app/data/upsert/raw-data/more-data.txt
new file mode 100644
index 0000000..d77c9c9
--- /dev/null
+++ b/asterix-app/data/upsert/raw-data/more-data.txt
@@ -0,0 +1,4 @@
+2,2,Stephen,155
+4,4,Angela,333
+6,6,Tracy,123
+8,8,George,555
diff --git a/asterix-app/data/upsert/raw-data/overlapping.data b/asterix-app/data/upsert/raw-data/overlapping.data
new file mode 100644
index 0000000..a013ac1
--- /dev/null
+++ b/asterix-app/data/upsert/raw-data/overlapping.data
@@ -0,0 +1,5 @@
+2,10,Nadia,155
+4,10,Igor,333
+6,10,Silvester,123
+1,11,Cloud,777
+12,44,Smith,987
diff --git a/asterix-app/data/upsert/raw-data/test-data.txt b/asterix-app/data/upsert/raw-data/test-data.txt
new file mode 100644
index 0000000..e1e4c47
--- /dev/null
+++ b/asterix-app/data/upsert/raw-data/test-data.txt
@@ -0,0 +1,4 @@
+1,2,Mohammed,155
+3,4,Kate,333
+5,6,William,123
+7,8,Stanly,555
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/UpdateAPIServlet.java b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/UpdateAPIServlet.java
index 3852020..5c76c40 100644
--- a/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/UpdateAPIServlet.java
+++ b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/UpdateAPIServlet.java
@@ -41,8 +41,8 @@
@Override
protected List<Statement.Kind> getAllowedStatements() {
- Kind[] statementsArray = { Kind.DATAVERSE_DECL, Kind.DELETE, Kind.INSERT, Kind.UPDATE, Kind.DML_CMD_LIST,
- Kind.LOAD, Kind.CONNECT_FEED, Kind.DISCONNECT_FEED, Kind.SET, Kind.COMPACT,
+ Kind[] statementsArray = { Kind.DATAVERSE_DECL, Kind.DELETE, Kind.INSERT, Kind.UPSERT, Kind.UPDATE,
+ Kind.DML_CMD_LIST, Kind.LOAD, Kind.CONNECT_FEED, Kind.DISCONNECT_FEED, Kind.SET, Kind.COMPACT,
Kind.EXTERNAL_DATASET_REFRESH, Kind.RUN };
return Arrays.asList(statementsArray);
}
diff --git a/asterix-app/src/main/java/org/apache/asterix/aql/translator/QueryTranslator.java b/asterix-app/src/main/java/org/apache/asterix/aql/translator/QueryTranslator.java
index 3b6af6d..4adadf9 100644
--- a/asterix-app/src/main/java/org/apache/asterix/aql/translator/QueryTranslator.java
+++ b/asterix-app/src/main/java/org/apache/asterix/aql/translator/QueryTranslator.java
@@ -162,6 +162,7 @@
import org.apache.asterix.translator.CompiledStatements.CompiledInsertStatement;
import org.apache.asterix.translator.CompiledStatements.CompiledLoadFromFileStatement;
import org.apache.asterix.translator.CompiledStatements.CompiledSubscribeFeedStatement;
+import org.apache.asterix.translator.CompiledStatements.CompiledUpsertStatement;
import org.apache.asterix.translator.CompiledStatements.ICompiledDmlStatement;
import org.apache.asterix.translator.TypeTranslator;
import org.apache.asterix.translator.util.ValidateUtil;
@@ -330,8 +331,9 @@
handleLoadStatement(metadataProvider, stmt, hcc);
break;
}
- case INSERT: {
- handleInsertStatement(metadataProvider, stmt, hcc);
+ case INSERT:
+ case UPSERT: {
+ handleInsertUpsertStatement(metadataProvider, stmt, hcc);
break;
}
case DELETE: {
@@ -853,7 +855,8 @@
fieldType = typeMap.get(typeSignature);
}
if (fieldType == null) {
- throw new AlgebricksException("Unknown type " + fieldExpr.second);
+ throw new AlgebricksException(
+ "Unknown type " + (fieldExpr.second == null ? fieldExpr.first : fieldExpr.second));
}
indexFields.add(fieldExpr.first);
@@ -1817,22 +1820,33 @@
}
}
- private void handleInsertStatement(AqlMetadataProvider metadataProvider, Statement stmt,
+ private void handleInsertUpsertStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
- InsertStatement stmtInsert = (InsertStatement) stmt;
- String dataverseName = getActiveDataverse(stmtInsert.getDataverseName());
- Query query = stmtInsert.getQuery();
+ InsertStatement stmtInsertUpsert = (InsertStatement) stmt;
+ String dataverseName = getActiveDataverse(stmtInsertUpsert.getDataverseName());
+ Query query = stmtInsertUpsert.getQuery();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
- MetadataLockManager.INSTANCE.insertDeleteBegin(dataverseName, dataverseName + "." + stmtInsert.getDatasetName(),
- query.getDataverses(), query.getDatasets());
+ MetadataLockManager.INSTANCE.insertDeleteUpsertBegin(dataverseName,
+ dataverseName + "." + stmtInsertUpsert.getDatasetName(), query.getDataverses(), query.getDatasets());
try {
metadataProvider.setWriteTransaction(true);
- CompiledInsertStatement clfrqs = new CompiledInsertStatement(dataverseName,
- stmtInsert.getDatasetName().getValue(), query, stmtInsert.getVarCounter());
+ CompiledInsertStatement clfrqs = null;
+ switch (stmtInsertUpsert.getKind()) {
+ case INSERT:
+ clfrqs = new CompiledInsertStatement(dataverseName, stmtInsertUpsert.getDatasetName().getValue(),
+ query, stmtInsertUpsert.getVarCounter());
+ break;
+ case UPSERT:
+ clfrqs = new CompiledUpsertStatement(dataverseName, stmtInsertUpsert.getDatasetName().getValue(),
+ query, stmtInsertUpsert.getVarCounter());
+ break;
+ default:
+ throw new AlgebricksException("Unsupported statement type " + stmtInsertUpsert.getKind());
+ }
JobSpecification compiled = rewriteCompileQuery(metadataProvider, query, clfrqs);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
@@ -1848,8 +1862,9 @@
}
throw e;
} finally {
- MetadataLockManager.INSTANCE.insertDeleteEnd(dataverseName,
- dataverseName + "." + stmtInsert.getDatasetName(), query.getDataverses(), query.getDatasets());
+ MetadataLockManager.INSTANCE.insertDeleteUpsertEnd(dataverseName,
+ dataverseName + "." + stmtInsertUpsert.getDatasetName(), query.getDataverses(),
+ query.getDatasets());
}
}
@@ -1861,8 +1876,9 @@
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
- MetadataLockManager.INSTANCE.insertDeleteBegin(dataverseName, dataverseName + "." + stmtDelete.getDatasetName(),
- stmtDelete.getDataverses(), stmtDelete.getDatasets());
+ MetadataLockManager.INSTANCE.insertDeleteUpsertBegin(dataverseName,
+ dataverseName + "." + stmtDelete.getDatasetName(), stmtDelete.getDataverses(),
+ stmtDelete.getDatasets());
try {
metadataProvider.setWriteTransaction(true);
@@ -1884,7 +1900,7 @@
}
throw e;
} finally {
- MetadataLockManager.INSTANCE.insertDeleteEnd(dataverseName,
+ MetadataLockManager.INSTANCE.insertDeleteUpsertEnd(dataverseName,
dataverseName + "." + stmtDelete.getDatasetName(), stmtDelete.getDataverses(),
stmtDelete.getDatasets());
}
@@ -2822,7 +2838,8 @@
readDataverses.add(dataverseNameFrom);
List<String> readDatasets = new ArrayList<String>();
readDatasets.add(datasetNameFrom);
- MetadataLockManager.INSTANCE.insertDeleteBegin(dataverseNameTo, datasetNameTo, readDataverses, readDatasets);
+ MetadataLockManager.INSTANCE.insertDeleteUpsertBegin(dataverseNameTo, datasetNameTo, readDataverses,
+ readDatasets);
try {
prepareRunExternalRuntime(metadataProvider, hcc, pregelixStmt, dataverseNameFrom, dataverseNameTo,
datasetNameFrom, datasetNameTo, mdTxnCtx);
@@ -2864,7 +2881,8 @@
}
throw e;
} finally {
- MetadataLockManager.INSTANCE.insertDeleteEnd(dataverseNameTo, datasetNameTo, readDataverses, readDatasets);
+ MetadataLockManager.INSTANCE.insertDeleteUpsertEnd(dataverseNameTo, datasetNameTo, readDataverses,
+ readDatasets);
}
}
diff --git a/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTest.java b/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTest.java
index 199ae39..4c79965 100644
--- a/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTest.java
+++ b/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTest.java
@@ -56,9 +56,14 @@
@BeforeClass
public static void setUp() throws Exception {
- File outdir = new File(PATH_ACTUAL);
- outdir.mkdirs();
- ExecutionTestUtil.setUp();
+ try {
+ File outdir = new File(PATH_ACTUAL);
+ outdir.mkdirs();
+ ExecutionTestUtil.setUp();
+ } catch (Throwable th) {
+ th.printStackTrace();
+ throw th;
+ }
}
@AfterClass
diff --git a/asterix-app/src/test/java/org/apache/asterix/test/runtime/HDFSCluster.java b/asterix-app/src/test/java/org/apache/asterix/test/runtime/HDFSCluster.java
index eefd40e..4c10eb2 100644
--- a/asterix-app/src/test/java/org/apache/asterix/test/runtime/HDFSCluster.java
+++ b/asterix-app/src/test/java/org/apache/asterix/test/runtime/HDFSCluster.java
@@ -27,10 +27,11 @@
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
import org.apache.hadoop.mapred.JobConf;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
/**
* Manages a Mini (local VM) HDFS cluster with a configured number of datanodes.
- *
* @author ramangrover29
*/
public class HDFSCluster {
@@ -68,8 +69,7 @@
conf.addResource(new Path(basePath + PATH_TO_HADOOP_CONF + "/mapred-site.xml"));
conf.addResource(new Path(basePath + PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));
cleanupLocal();
- //this constructor is deprecated in hadoop 2x
- //dfsCluster = new MiniDFSCluster(nameNodePort, conf, numDataNodes, true, true, StartupOption.REGULAR, null);
+ setLoggingLevel(Level.WARN);
MiniDFSCluster.Builder build = new MiniDFSCluster.Builder(conf);
build.nameNodePort(nameNodePort);
build.numDataNodes(numDataNodes);
@@ -79,6 +79,11 @@
loadData(basePath);
}
+ private void setLoggingLevel(Level level) {
+ Logger rootLogger = Logger.getRootLogger();
+ rootLogger.setLevel(level);
+ }
+
private void loadData(String localDataRoot) throws IOException {
Path destDir = new Path(HDFS_PATH);
dfs.mkdirs(destDir);
diff --git a/asterix-app/src/test/resources/runtimets/queries/upsert/filtered-dataset/filtered-dataset.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/upsert/filtered-dataset/filtered-dataset.1.ddl.aql
new file mode 100644
index 0000000..5eea00b
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/upsert/filtered-dataset/filtered-dataset.1.ddl.aql
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Test filters with upsert pipeline
+ * Expected Res : Success
+ * Date : 13th Jan 2016
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
+use dataverse test;
+
+create type FacebookMessageType as closed {
+ message-id: int64,
+ author-id: int64,
+ in-response-to: int64?,
+ sender-location: point?,
+ message: string,
+ send-time: datetime
+}
+
+create dataset FacebookMessages(FacebookMessageType)
+primary key message-id;
+
+create dataset FilteredFacebookMessages(FacebookMessageType)
+primary key message-id with filter on send-time;
+
+create index AutherIdx on FilteredFacebookMessages(author-id);
+create index MessageIdx on FilteredFacebookMessages(message);
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/upsert/filtered-dataset/filtered-dataset.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/upsert/filtered-dataset/filtered-dataset.2.update.aql
new file mode 100644
index 0000000..eecf79d
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/upsert/filtered-dataset/filtered-dataset.2.update.aql
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+use dataverse test;
+
+load dataset FilteredFacebookMessages using localfs
+(("path"="asterix_nc1://data/fbm-with-send-time.adm"),("format"="adm"));
+
+load dataset FacebookMessages using localfs
+(("path"="asterix_nc1://data/more-fbm-with-send-time.adm"),("format"="adm"));
+
+upsert into dataset FilteredFacebookMessages(
+ for $x in dataset FacebookMessages
+ return $x
+);
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/upsert/filtered-dataset/filtered-dataset.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/upsert/filtered-dataset/filtered-dataset.3.query.aql
new file mode 100644
index 0000000..d1b3925
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/upsert/filtered-dataset/filtered-dataset.3.query.aql
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+use dataverse test;
+
+for $m in dataset('FilteredFacebookMessages')
+where $m.send-time > datetime("2012-08-20T10:10:00")
+return $m;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/upsert/multiple-secondaries/multiple-secondaries.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/upsert/multiple-secondaries/multiple-secondaries.1.ddl.aql
new file mode 100644
index 0000000..70960ed
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/upsert/multiple-secondaries/multiple-secondaries.1.ddl.aql
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type MyRecord as closed {
+ id: int64,
+ point: point,
+ kwds: string,
+ line1: line,
+ line2: line,
+ poly1: polygon,
+ poly2: polygon,
+ rec: rectangle,
+ circle: circle
+}
+
+
+create dataset UpsertTo(MyRecord)
+ primary key id;
+
+ create dataset UpsertFrom(MyRecord)
+ primary key id;
+
+create index btree_index on UpsertTo(kwds);
+create index rtree_index on UpsertTo(point) type rtree;
+create index inverted_index on UpsertTo(kwds) type keyword;
diff --git a/asterix-app/src/test/resources/runtimets/queries/upsert/multiple-secondaries/multiple-secondaries.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/upsert/multiple-secondaries/multiple-secondaries.2.update.aql
new file mode 100644
index 0000000..9e6086d
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/upsert/multiple-secondaries/multiple-secondaries.2.update.aql
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+use dataverse test;
+
+load dataset UpsertTo
+using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="asterix_nc1://data/spatial/spatialData.json"),("format"="adm"));
+
+load dataset UpsertFrom
+using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="asterix_nc1://data/spatial/moreSpatialData.json"),("format"="adm"));
+
+
+upsert into dataset UpsertTo(
+for $x in dataset UpsertFrom
+return $x
+);
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/upsert/multiple-secondaries/multiple-secondaries.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/upsert/multiple-secondaries/multiple-secondaries.3.query.aql
new file mode 100644
index 0000000..a785237
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/upsert/multiple-secondaries/multiple-secondaries.3.query.aql
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+use dataverse test;
+
+for $o in dataset('UpsertTo')
+where spatial-intersect($o.point, create-polygon([4.0,1.0,4.0,4.0,12.0,4.0,12.0,1.0]))
+order by $o.id
+return {"id":$o.id}
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/upsert/nested-index/nested-index.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/upsert/nested-index/nested-index.1.ddl.aql
new file mode 100644
index 0000000..41cbd5a
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/upsert/nested-index/nested-index.1.ddl.aql
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type OrderTypetmp as closed {
+ o_orderkey: int64,
+ o_custkey: int64,
+ o_orderstatus: string,
+ o_totalprice: double,
+ o_orderdate: string,
+ o_orderpriority: string,
+ o_clerk: string,
+ o_shippriority: int64,
+ o_comment: string
+}
+
+create type OrderType as closed {
+nested : OrderTypetmp
+}
+
+create dataset UpsertTo(OrderTypetmp)
+ primary key o_orderkey;
+
+create dataset UpsertFrom(OrderTypetmp)
+ primary key o_orderkey;
+
+ create dataset Orders(OrderType)
+ primary key nested.o_orderkey;
+
+create index idx_Orders_Custkey on Orders(nested.o_custkey);
diff --git a/asterix-app/src/test/resources/runtimets/queries/upsert/nested-index/nested-index.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/upsert/nested-index/nested-index.2.update.aql
new file mode 100644
index 0000000..7a46315
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/upsert/nested-index/nested-index.2.update.aql
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+use dataverse test;
+
+load dataset UpsertTo
+using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+
+load dataset UpsertFrom
+using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="asterix_nc1://data/tpch0.001/other-orders.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+
+insert into dataset Orders
+(
+ for $c in dataset('UpsertTo')
+ return {
+ "nested" : $c
+ }
+);
+
+upsert into dataset Orders
+(
+ for $c in dataset('UpsertFrom')
+ return {
+ "nested" : $c
+ }
+);
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/upsert/nested-index/nested-index.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/upsert/nested-index/nested-index.3.query.aql
new file mode 100644
index 0000000..6af352c
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/upsert/nested-index/nested-index.3.query.aql
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+use dataverse test;
+
+for $o in dataset('Orders')
+where
+ $o.nested.o_custkey < 60
+order by $o.nested.o_orderkey
+return {
+ "o_orderkey": $o.nested.o_orderkey,
+ "o_custkey": $o.nested.o_custkey
+}
diff --git a/asterix-app/src/test/resources/runtimets/queries/upsert/nullable-index/nullable-index.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/upsert/nullable-index/nullable-index.1.ddl.aql
new file mode 100644
index 0000000..c281fe1
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/upsert/nullable-index/nullable-index.1.ddl.aql
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type AddressType as open {
+ number: int64,
+ street: string,
+ city: string
+}
+
+create type CustomerType as open {
+ cid: int64,
+ name: string,
+ age: int64?,
+ address: AddressType?,
+ interests: {{string}},
+ children: [ { name: string, age: int64? } ]
+}
+
+create dataset Customers(CustomerType) primary key cid;
+create dataset MoreCustomers(CustomerType) primary key cid;
+create index age_index on Customers(age);
+
diff --git a/asterix-app/src/test/resources/runtimets/queries/upsert/nullable-index/nullable-index.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/upsert/nullable-index/nullable-index.2.update.aql
new file mode 100644
index 0000000..589eb3f
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/upsert/nullable-index/nullable-index.2.update.aql
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+use dataverse test;
+
+load dataset Customers
+using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="asterix_nc1://data/semistructured/tiny01/customer.adm"),("format"="adm"));
+
+load dataset MoreCustomers
+using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="asterix_nc1://data/semistructured/tiny01/more-customer.adm"),("format"="adm"));
+
+upsert into dataset Customers(
+for $x in dataset MoreCustomers
+return $x
+);
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/upsert/nullable-index/nullable-index.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/upsert/nullable-index/nullable-index.3.query.aql
new file mode 100644
index 0000000..747e1ed
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/upsert/nullable-index/nullable-index.3.query.aql
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+use dataverse test;
+
+for $c in dataset('Customers')
+where $c.age < 20
+order by $c.cid
+return $c
diff --git a/asterix-app/src/test/resources/runtimets/queries/upsert/open-index/open-index.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/upsert/open-index/open-index.1.ddl.aql
new file mode 100644
index 0000000..a1755ce
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/upsert/open-index/open-index.1.ddl.aql
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type OrderType as closed {
+ o_orderkey: int64,
+ o_custkey: int64,
+ o_orderstatus: string,
+ o_totalprice: double,
+ o_orderdate: string,
+ o_orderpriority: string,
+ o_clerk: string,
+ o_shippriority: int64,
+ o_comment: string
+}
+
+create type OrderOpenType as open {
+ o_orderkey: int64,
+ o_orderstatus: string,
+ o_totalprice: double,
+ o_orderdate: string,
+ o_orderpriority: string,
+ o_clerk: string,
+ o_shippriority: int64,
+ o_comment: string
+}
+
+create dataset Orders(OrderType)
+ primary key o_orderkey;
+
+ create dataset OtherOrders(OrderType)
+ primary key o_orderkey;
+
+create dataset OrdersOpen(OrderOpenType)
+primary key o_orderkey;
+
+create index idx_Orders_Custkey on OrdersOpen(o_custkey:int32) enforced;
+
diff --git a/asterix-app/src/test/resources/runtimets/queries/upsert/open-index/open-index.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/upsert/open-index/open-index.2.update.aql
new file mode 100644
index 0000000..5951e05
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/upsert/open-index/open-index.2.update.aql
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+use dataverse test;
+
+load dataset Orders
+using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+
+load dataset OtherOrders
+using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="asterix_nc1://data/tpch0.001/other-orders.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+
+insert into dataset OrdersOpen (
+ for $x in dataset OtherOrders
+ return $x
+);
+
+upsert into dataset OrdersOpen (
+ for $x in dataset Orders
+ return $x
+);
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/upsert/open-index/open-index.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/upsert/open-index/open-index.3.query.aql
new file mode 100644
index 0000000..3b12c24
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/upsert/open-index/open-index.3.query.aql
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+use dataverse test;
+
+for $o in dataset('OrdersOpen')
+where
+ $o.o_custkey > 40
+order by $o.o_orderkey
+return {
+ "o_orderkey": $o.o_orderkey,
+ "o_custkey": $o.o_custkey
+}
diff --git a/asterix-app/src/test/resources/runtimets/queries/upsert/primary-index/primary-index.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/upsert/primary-index/primary-index.1.ddl.aql
new file mode 100644
index 0000000..79c6fa7
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/upsert/primary-index/primary-index.1.ddl.aql
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Upsert into a dataset which doesn't have any secondary indexes
+ * Expected Res : Success
+ * Date : Sep 15th 2015
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type TestType as closed{
+id:int32,
+age:int32,
+name:string,
+salary:double
+};
+
+create dataset UpsertTo("TestType") primary key id;
+create dataset UpsertFrom("TestType") primary key id;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/upsert/primary-index/primary-index.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/upsert/primary-index/primary-index.2.update.aql
new file mode 100644
index 0000000..4446525
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/upsert/primary-index/primary-index.2.update.aql
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Upsert into a dataset which doesn't have any secondary indexes
+ * Expected Res : Success
+ * Date : Sep 15th 2015
+ */
+
+use dataverse test;
+// load first dataset
+load dataset UpsertTo using
+localfs(("format"="delimited-text"),
+ ("path"="asterix_nc1://data/upsert/raw-data/overlapping.data"),
+ ("delimiter"=","));
+// load second dataset
+load dataset UpsertFrom using
+localfs(("format"="delimited-text"),
+ ("path"="asterix_nc1://data/upsert/raw-data/test-data.txt,asterix_nc1://data/upsert/raw-data/more-data.txt"),
+ ("delimiter"=","));
+
+// upsert UpsertFrom into UpsertTo
+use dataverse test;
+upsert into dataset UpsertTo(
+ for $x in dataset UpsertFrom
+ return $x
+);
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/upsert/primary-index/primary-index.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/upsert/primary-index/primary-index.3.query.aql
new file mode 100644
index 0000000..d614ed9
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/upsert/primary-index/primary-index.3.query.aql
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Upsert into a dataset which doesn't have any secondary indexes
+ * Expected Res : Success
+ * Date : Sep 15th 2015
+ */
+
+use dataverse test;
+for $x in dataset UpsertTo
+return $x;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/upsert/primary-secondary-btree/primary-secondary-btree.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/upsert/primary-secondary-btree/primary-secondary-btree.1.ddl.aql
new file mode 100644
index 0000000..258230b
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/upsert/primary-secondary-btree/primary-secondary-btree.1.ddl.aql
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Upsert into a dataset which has a b-tree secondary index
+ * Expected Res : Success
+ * Date : Sep 15th 2015
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type TestType as closed{
+id:int32,
+age:int32,
+name:string,
+salary:double
+};
+
+create dataset UpsertTo("TestType") primary key id;
+create index ageindex on UpsertTo('age');
+create dataset UpsertFrom("TestType") primary key id;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/upsert/primary-secondary-btree/primary-secondary-btree.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/upsert/primary-secondary-btree/primary-secondary-btree.2.update.aql
new file mode 100644
index 0000000..9207acb
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/upsert/primary-secondary-btree/primary-secondary-btree.2.update.aql
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Upsert into a dataset which has a b-tree secondary index
+ * Expected Res : Success
+ * Date : Sep 15th 2015
+ */
+
+use dataverse test;
+// load first dataset
+load dataset UpsertTo using
+localfs(("format"="delimited-text"),
+ ("path"="asterix_nc1://data/upsert/raw-data/overlapping.data"),
+ ("delimiter"=","));
+// load second dataset
+load dataset UpsertFrom using
+localfs(("format"="delimited-text"),
+ ("path"="asterix_nc1://data/upsert/raw-data/test-data.txt,asterix_nc1://data/upsert/raw-data/more-data.txt"),
+ ("delimiter"=","));
+
+// upsert UpsertFrom into UpsertTo
+use dataverse test;
+upsert into dataset UpsertTo(
+ for $x in dataset UpsertFrom
+ return $x
+);
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/upsert/primary-secondary-btree/primary-secondary-btree.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/upsert/primary-secondary-btree/primary-secondary-btree.3.query.aql
new file mode 100644
index 0000000..d52feb8
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/upsert/primary-secondary-btree/primary-secondary-btree.3.query.aql
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Upsert into a dataset which has a b-tree secondary index
+ * Expected Res : Success
+ * Date : Sep 15th 2015
+ */
+
+ // So far this one doesn't use the btree index, need another query
+use dataverse test;
+for $x in dataset UpsertTo
+where $x.age >5
+return $x;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/upsert/primary-secondary-inverted/primary-secondary-inverted.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/upsert/primary-secondary-inverted/primary-secondary-inverted.1.ddl.aql
new file mode 100644
index 0000000..143511e
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/upsert/primary-secondary-inverted/primary-secondary-inverted.1.ddl.aql
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type DBLPType as closed {
+ id: int64,
+ dblpid: string,
+ title: string,
+ authors: string,
+ misc: string
+}
+
+create dataset UpsertToDBLP(DBLPType)
+ primary key id;
+
+create dataset UpsertFromDBLP(DBLPType)
+ primary key id;
+
+create index keyword_index on UpsertToDBLP(title) type keyword;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/upsert/primary-secondary-inverted/primary-secondary-inverted.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/upsert/primary-secondary-inverted/primary-secondary-inverted.2.update.aql
new file mode 100644
index 0000000..054b26e
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/upsert/primary-secondary-inverted/primary-secondary-inverted.2.update.aql
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+use dataverse test;
+
+load dataset UpsertToDBLP
+using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="asterix_nc1://data/dblp-small/dblp-small-id.txt"),("format"="delimited-text"),("delimiter"=":"));
+
+load dataset UpsertFromDBLP
+using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="asterix_nc1://data/dblp-small/more-dblp-small-id.txt"),("format"="delimited-text"),("delimiter"=":"));
+
+upsert into dataset UpsertToDBLP(
+ for $x in dataset UpsertFromDBLP
+ return $x
+);
diff --git a/asterix-app/src/test/resources/runtimets/queries/upsert/primary-secondary-inverted/primary-secondary-inverted.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/upsert/primary-secondary-inverted/primary-secondary-inverted.3.query.aql
new file mode 100644
index 0000000..112f1f9
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/upsert/primary-secondary-inverted/primary-secondary-inverted.3.query.aql
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+use dataverse test;
+
+for $o in dataset('UpsertToDBLP')
+where contains($o.title, "SQL")
+order by $o.id
+return $o
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/upsert/primary-secondary-rtree/primary-secondary-rtree.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/upsert/primary-secondary-rtree/primary-secondary-rtree.1.ddl.aql
new file mode 100644
index 0000000..d147852
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/upsert/primary-secondary-rtree/primary-secondary-rtree.1.ddl.aql
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type MyRecord as closed {
+ id: int64,
+ point: point,
+ kwds: string,
+ line1: line,
+ line2: line,
+ poly1: polygon,
+ poly2: polygon,
+ rec: rectangle,
+ circle: circle
+}
+
+create dataset UpsertTo(MyRecord)
+ primary key id;
+
+create dataset UpsertFrom(MyRecord)
+ primary key id;
+
+create index rtree_index_point on UpsertTo(point) type rtree;
diff --git a/asterix-app/src/test/resources/runtimets/queries/upsert/primary-secondary-rtree/primary-secondary-rtree.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/upsert/primary-secondary-rtree/primary-secondary-rtree.2.update.aql
new file mode 100644
index 0000000..12b554f
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/upsert/primary-secondary-rtree/primary-secondary-rtree.2.update.aql
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+use dataverse test;
+
+load dataset UpsertTo
+using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="asterix_nc1://data/spatial/spatialData.json"),("format"="adm"));
+
+load dataset UpsertFrom
+using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="asterix_nc1://data/spatial/moreSpatialData.json"),("format"="adm"));
+
+upsert into dataset UpsertTo(
+for $x in dataset UpsertFrom
+return $x
+);
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/upsert/primary-secondary-rtree/primary-secondary-rtree.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/upsert/primary-secondary-rtree/primary-secondary-rtree.3.query.aql
new file mode 100644
index 0000000..a785237
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/upsert/primary-secondary-rtree/primary-secondary-rtree.3.query.aql
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+use dataverse test;
+
+for $o in dataset('UpsertTo')
+where spatial-intersect($o.point, create-polygon([4.0,1.0,4.0,4.0,12.0,4.0,12.0,1.0]))
+order by $o.id
+return {"id":$o.id}
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/upsert/upsert-with-self-read/upsert-with-self-read.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/upsert/upsert-with-self-read/upsert-with-self-read.1.ddl.aql
new file mode 100644
index 0000000..3dd1bcc
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/upsert/upsert-with-self-read/upsert-with-self-read.1.ddl.aql
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Upsert into a dataset with self read
+ * Expected Res : Success
+ * Date : Sep 15th 2015
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type TestType as closed{
+id:int32,
+age:int32,
+name:string,
+salary:double
+};
+
+create dataset UpsertTo("TestType") primary key id;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/upsert/upsert-with-self-read/upsert-with-self-read.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/upsert/upsert-with-self-read/upsert-with-self-read.2.update.aql
new file mode 100644
index 0000000..e391384
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/upsert/upsert-with-self-read/upsert-with-self-read.2.update.aql
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Upsert into a dataset with self read
+ * Expected Res : Success
+ * Date : Sep 15th 2015
+ */
+
+use dataverse test;
+// load first dataset
+load dataset UpsertTo using
+localfs(("format"="delimited-text"),
+ ("path"="asterix_nc1://data/upsert/raw-data/overlapping.data"),
+ ("delimiter"=","));
+
+// upsert UpsertFrom into UpsertTo
+upsert into dataset UpsertTo(
+ for $x in dataset UpsertTo
+ return {
+ "id":$x.id,
+ "age":$x.age+1,
+ "name":$x.name,
+ "salary":$x.salary*1.1
+ }
+);
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/upsert/upsert-with-self-read/upsert-with-self-read.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/upsert/upsert-with-self-read/upsert-with-self-read.3.query.aql
new file mode 100644
index 0000000..1344c3c
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/upsert/upsert-with-self-read/upsert-with-self-read.3.query.aql
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description : Upsert into a dataset with self read
+ * Expected Res : Success
+ * Date : Sep 15th 2015
+ */
+
+use dataverse test;
+for $x in dataset UpsertTo
+return $x;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/upsert/filtered-dataset/filtered-dataset.1.adm b/asterix-app/src/test/resources/runtimets/results/upsert/filtered-dataset/filtered-dataset.1.adm
new file mode 100644
index 0000000..2e4795e
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/upsert/filtered-dataset/filtered-dataset.1.adm
@@ -0,0 +1,9 @@
+{ "message-id": 11, "author-id": 232, "in-response-to": 1, "sender-location": point("38.97,77.49"), "message": " can't stand at&t its plan is terrible", "send-time": datetime("2012-11-20T10:10:00.000Z") }
+{ "message-id": 12, "author-id": 10, "in-response-to": 6, "sender-location": point("42.26,77.76"), "message": " can't stand t-mobile its voicemail-service is OMG:(", "send-time": datetime("2012-12-20T10:10:00.000Z") }
+{ "message-id": 14, "author-id": 9, "in-response-to": 12, "sender-location": point("41.33,85.28"), "message": " love at&t its 3G is good:)", "send-time": datetime("2013-09-20T10:10:00.000Z") }
+{ "message-id": 13, "author-id": 10, "in-response-to": 4, "sender-location": point("42.77,78.92"), "message": " dislike iphone the voice-command is bad:(", "send-time": datetime("2013-08-20T10:10:00.000Z") }
+{ "message-id": 15, "author-id": 7, "in-response-to": 11, "sender-location": point("44.47,67.11"), "message": " like iphone the voicemail-service is awesome", "send-time": datetime("2014-01-20T10:10:00.000Z") }
+{ "message-id": 19, "author-id": 9, "in-response-to": 12, "sender-location": point("41.33,85.28"), "message": " love at&t its 3G is good:)", "send-time": datetime("2013-09-20T10:10:00.000Z") }
+{ "message-id": 9, "author-id": 65, "in-response-to": 12, "sender-location": point("34.45,96.48"), "message": " love verizon its wireless is good", "send-time": datetime("2012-09-20T10:10:00.000Z") }
+{ "message-id": 10, "author-id": 1, "in-response-to": 12, "sender-location": point("42.5,70.01"), "message": " can't stand motorola the touch-screen is terrible", "send-time": datetime("2012-10-20T10:10:00.000Z") }
+{ "message-id": 17, "author-id": 10, "in-response-to": 6, "sender-location": point("42.26,77.76"), "message": " can't stand t-mobile its voicemail-service is OMG:(", "send-time": datetime("2012-12-20T10:10:00.000Z") }
diff --git a/asterix-app/src/test/resources/runtimets/results/upsert/multiple-secondaries/multiple-secondaries.1.adm b/asterix-app/src/test/resources/runtimets/results/upsert/multiple-secondaries/multiple-secondaries.1.adm
new file mode 100644
index 0000000..9c85166
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/upsert/multiple-secondaries/multiple-secondaries.1.adm
@@ -0,0 +1 @@
+{ "id": 20 }
diff --git a/asterix-app/src/test/resources/runtimets/results/upsert/nested-index/nested-index.1.adm b/asterix-app/src/test/resources/runtimets/results/upsert/nested-index/nested-index.1.adm
new file mode 100644
index 0000000..351f3c6
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/upsert/nested-index/nested-index.1.adm
@@ -0,0 +1,601 @@
+{ "o_orderkey": 1, "o_custkey": 37 }
+{ "o_orderkey": 5, "o_custkey": 46 }
+{ "o_orderkey": 6, "o_custkey": 56 }
+{ "o_orderkey": 7, "o_custkey": 40 }
+{ "o_orderkey": 64, "o_custkey": 34 }
+{ "o_orderkey": 65, "o_custkey": 17 }
+{ "o_orderkey": 67, "o_custkey": 58 }
+{ "o_orderkey": 68, "o_custkey": 29 }
+{ "o_orderkey": 71, "o_custkey": 4 }
+{ "o_orderkey": 97, "o_custkey": 22 }
+{ "o_orderkey": 101, "o_custkey": 28 }
+{ "o_orderkey": 102, "o_custkey": 1 }
+{ "o_orderkey": 103, "o_custkey": 31 }
+{ "o_orderkey": 130, "o_custkey": 37 }
+{ "o_orderkey": 132, "o_custkey": 28 }
+{ "o_orderkey": 133, "o_custkey": 44 }
+{ "o_orderkey": 134, "o_custkey": 7 }
+{ "o_orderkey": 161, "o_custkey": 17 }
+{ "o_orderkey": 162, "o_custkey": 16 }
+{ "o_orderkey": 164, "o_custkey": 1 }
+{ "o_orderkey": 165, "o_custkey": 28 }
+{ "o_orderkey": 197, "o_custkey": 34 }
+{ "o_orderkey": 199, "o_custkey": 53 }
+{ "o_orderkey": 224, "o_custkey": 4 }
+{ "o_orderkey": 225, "o_custkey": 34 }
+{ "o_orderkey": 227, "o_custkey": 10 }
+{ "o_orderkey": 228, "o_custkey": 46 }
+{ "o_orderkey": 258, "o_custkey": 43 }
+{ "o_orderkey": 259, "o_custkey": 44 }
+{ "o_orderkey": 261, "o_custkey": 47 }
+{ "o_orderkey": 262, "o_custkey": 31 }
+{ "o_orderkey": 288, "o_custkey": 8 }
+{ "o_orderkey": 292, "o_custkey": 23 }
+{ "o_orderkey": 293, "o_custkey": 31 }
+{ "o_orderkey": 294, "o_custkey": 52 }
+{ "o_orderkey": 295, "o_custkey": 19 }
+{ "o_orderkey": 320, "o_custkey": 1 }
+{ "o_orderkey": 323, "o_custkey": 40 }
+{ "o_orderkey": 325, "o_custkey": 41 }
+{ "o_orderkey": 353, "o_custkey": 2 }
+{ "o_orderkey": 358, "o_custkey": 4 }
+{ "o_orderkey": 385, "o_custkey": 34 }
+{ "o_orderkey": 387, "o_custkey": 4 }
+{ "o_orderkey": 388, "o_custkey": 46 }
+{ "o_orderkey": 416, "o_custkey": 41 }
+{ "o_orderkey": 417, "o_custkey": 55 }
+{ "o_orderkey": 421, "o_custkey": 40 }
+{ "o_orderkey": 450, "o_custkey": 49 }
+{ "o_orderkey": 453, "o_custkey": 46 }
+{ "o_orderkey": 454, "o_custkey": 49 }
+{ "o_orderkey": 455, "o_custkey": 13 }
+{ "o_orderkey": 481, "o_custkey": 31 }
+{ "o_orderkey": 483, "o_custkey": 35 }
+{ "o_orderkey": 484, "o_custkey": 55 }
+{ "o_orderkey": 486, "o_custkey": 52 }
+{ "o_orderkey": 516, "o_custkey": 44 }
+{ "o_orderkey": 517, "o_custkey": 10 }
+{ "o_orderkey": 550, "o_custkey": 25 }
+{ "o_orderkey": 576, "o_custkey": 31 }
+{ "o_orderkey": 577, "o_custkey": 56 }
+{ "o_orderkey": 582, "o_custkey": 50 }
+{ "o_orderkey": 583, "o_custkey": 49 }
+{ "o_orderkey": 608, "o_custkey": 26 }
+{ "o_orderkey": 610, "o_custkey": 52 }
+{ "o_orderkey": 642, "o_custkey": 40 }
+{ "o_orderkey": 643, "o_custkey": 58 }
+{ "o_orderkey": 644, "o_custkey": 8 }
+{ "o_orderkey": 646, "o_custkey": 52 }
+{ "o_orderkey": 674, "o_custkey": 34 }
+{ "o_orderkey": 675, "o_custkey": 13 }
+{ "o_orderkey": 676, "o_custkey": 38 }
+{ "o_orderkey": 679, "o_custkey": 49 }
+{ "o_orderkey": 705, "o_custkey": 43 }
+{ "o_orderkey": 708, "o_custkey": 32 }
+{ "o_orderkey": 709, "o_custkey": 37 }
+{ "o_orderkey": 736, "o_custkey": 47 }
+{ "o_orderkey": 738, "o_custkey": 22 }
+{ "o_orderkey": 739, "o_custkey": 1 }
+{ "o_orderkey": 740, "o_custkey": 44 }
+{ "o_orderkey": 770, "o_custkey": 32 }
+{ "o_orderkey": 771, "o_custkey": 46 }
+{ "o_orderkey": 800, "o_custkey": 56 }
+{ "o_orderkey": 803, "o_custkey": 16 }
+{ "o_orderkey": 804, "o_custkey": 50 }
+{ "o_orderkey": 832, "o_custkey": 29 }
+{ "o_orderkey": 833, "o_custkey": 56 }
+{ "o_orderkey": 834, "o_custkey": 43 }
+{ "o_orderkey": 838, "o_custkey": 17 }
+{ "o_orderkey": 839, "o_custkey": 28 }
+{ "o_orderkey": 865, "o_custkey": 4 }
+{ "o_orderkey": 866, "o_custkey": 40 }
+{ "o_orderkey": 867, "o_custkey": 26 }
+{ "o_orderkey": 870, "o_custkey": 34 }
+{ "o_orderkey": 871, "o_custkey": 16 }
+{ "o_orderkey": 896, "o_custkey": 2 }
+{ "o_orderkey": 897, "o_custkey": 49 }
+{ "o_orderkey": 898, "o_custkey": 55 }
+{ "o_orderkey": 900, "o_custkey": 46 }
+{ "o_orderkey": 901, "o_custkey": 13 }
+{ "o_orderkey": 902, "o_custkey": 10 }
+{ "o_orderkey": 903, "o_custkey": 11 }
+{ "o_orderkey": 932, "o_custkey": 41 }
+{ "o_orderkey": 934, "o_custkey": 52 }
+{ "o_orderkey": 935, "o_custkey": 50 }
+{ "o_orderkey": 960, "o_custkey": 35 }
+{ "o_orderkey": 961, "o_custkey": 56 }
+{ "o_orderkey": 962, "o_custkey": 37 }
+{ "o_orderkey": 963, "o_custkey": 26 }
+{ "o_orderkey": 966, "o_custkey": 14 }
+{ "o_orderkey": 992, "o_custkey": 55 }
+{ "o_orderkey": 994, "o_custkey": 2 }
+{ "o_orderkey": 998, "o_custkey": 32 }
+{ "o_orderkey": 1024, "o_custkey": 4 }
+{ "o_orderkey": 1031, "o_custkey": 4 }
+{ "o_orderkey": 1056, "o_custkey": 28 }
+{ "o_orderkey": 1058, "o_custkey": 53 }
+{ "o_orderkey": 1063, "o_custkey": 37 }
+{ "o_orderkey": 1089, "o_custkey": 49 }
+{ "o_orderkey": 1090, "o_custkey": 19 }
+{ "o_orderkey": 1121, "o_custkey": 29 }
+{ "o_orderkey": 1125, "o_custkey": 25 }
+{ "o_orderkey": 1127, "o_custkey": 58 }
+{ "o_orderkey": 1152, "o_custkey": 49 }
+{ "o_orderkey": 1154, "o_custkey": 37 }
+{ "o_orderkey": 1186, "o_custkey": 59 }
+{ "o_orderkey": 1188, "o_custkey": 20 }
+{ "o_orderkey": 1189, "o_custkey": 46 }
+{ "o_orderkey": 1190, "o_custkey": 13 }
+{ "o_orderkey": 1217, "o_custkey": 7 }
+{ "o_orderkey": 1218, "o_custkey": 10 }
+{ "o_orderkey": 1219, "o_custkey": 28 }
+{ "o_orderkey": 1220, "o_custkey": 49 }
+{ "o_orderkey": 1221, "o_custkey": 14 }
+{ "o_orderkey": 1222, "o_custkey": 10 }
+{ "o_orderkey": 1223, "o_custkey": 10 }
+{ "o_orderkey": 1248, "o_custkey": 49 }
+{ "o_orderkey": 1250, "o_custkey": 37 }
+{ "o_orderkey": 1251, "o_custkey": 38 }
+{ "o_orderkey": 1285, "o_custkey": 11 }
+{ "o_orderkey": 1287, "o_custkey": 19 }
+{ "o_orderkey": 1315, "o_custkey": 22 }
+{ "o_orderkey": 1316, "o_custkey": 16 }
+{ "o_orderkey": 1319, "o_custkey": 32 }
+{ "o_orderkey": 1344, "o_custkey": 17 }
+{ "o_orderkey": 1347, "o_custkey": 41 }
+{ "o_orderkey": 1348, "o_custkey": 19 }
+{ "o_orderkey": 1350, "o_custkey": 52 }
+{ "o_orderkey": 1376, "o_custkey": 47 }
+{ "o_orderkey": 1377, "o_custkey": 20 }
+{ "o_orderkey": 1378, "o_custkey": 20 }
+{ "o_orderkey": 1408, "o_custkey": 55 }
+{ "o_orderkey": 1412, "o_custkey": 53 }
+{ "o_orderkey": 1443, "o_custkey": 44 }
+{ "o_orderkey": 1446, "o_custkey": 41 }
+{ "o_orderkey": 1475, "o_custkey": 5 }
+{ "o_orderkey": 1478, "o_custkey": 50 }
+{ "o_orderkey": 1479, "o_custkey": 16 }
+{ "o_orderkey": 1504, "o_custkey": 2 }
+{ "o_orderkey": 1505, "o_custkey": 37 }
+{ "o_orderkey": 1510, "o_custkey": 53 }
+{ "o_orderkey": 1538, "o_custkey": 29 }
+{ "o_orderkey": 1540, "o_custkey": 16 }
+{ "o_orderkey": 1543, "o_custkey": 52 }
+{ "o_orderkey": 1568, "o_custkey": 17 }
+{ "o_orderkey": 1572, "o_custkey": 11 }
+{ "o_orderkey": 1601, "o_custkey": 53 }
+{ "o_orderkey": 1602, "o_custkey": 1 }
+{ "o_orderkey": 1603, "o_custkey": 2 }
+{ "o_orderkey": 1605, "o_custkey": 58 }
+{ "o_orderkey": 1606, "o_custkey": 53 }
+{ "o_orderkey": 1633, "o_custkey": 16 }
+{ "o_orderkey": 1635, "o_custkey": 4 }
+{ "o_orderkey": 1639, "o_custkey": 5 }
+{ "o_orderkey": 1667, "o_custkey": 5 }
+{ "o_orderkey": 1669, "o_custkey": 2 }
+{ "o_orderkey": 1670, "o_custkey": 25 }
+{ "o_orderkey": 1671, "o_custkey": 35 }
+{ "o_orderkey": 1696, "o_custkey": 4 }
+{ "o_orderkey": 1698, "o_custkey": 40 }
+{ "o_orderkey": 1734, "o_custkey": 7 }
+{ "o_orderkey": 1735, "o_custkey": 22 }
+{ "o_orderkey": 1764, "o_custkey": 29 }
+{ "o_orderkey": 1767, "o_custkey": 25 }
+{ "o_orderkey": 1792, "o_custkey": 49 }
+{ "o_orderkey": 1793, "o_custkey": 19 }
+{ "o_orderkey": 1796, "o_custkey": 47 }
+{ "o_orderkey": 1798, "o_custkey": 52 }
+{ "o_orderkey": 1824, "o_custkey": 49 }
+{ "o_orderkey": 1828, "o_custkey": 32 }
+{ "o_orderkey": 1860, "o_custkey": 10 }
+{ "o_orderkey": 1862, "o_custkey": 34 }
+{ "o_orderkey": 1889, "o_custkey": 25 }
+{ "o_orderkey": 1890, "o_custkey": 10 }
+{ "o_orderkey": 1892, "o_custkey": 25 }
+{ "o_orderkey": 1895, "o_custkey": 7 }
+{ "o_orderkey": 1922, "o_custkey": 56 }
+{ "o_orderkey": 1925, "o_custkey": 17 }
+{ "o_orderkey": 1954, "o_custkey": 56 }
+{ "o_orderkey": 1955, "o_custkey": 13 }
+{ "o_orderkey": 1957, "o_custkey": 31 }
+{ "o_orderkey": 1958, "o_custkey": 53 }
+{ "o_orderkey": 1959, "o_custkey": 43 }
+{ "o_orderkey": 1984, "o_custkey": 52 }
+{ "o_orderkey": 1985, "o_custkey": 7 }
+{ "o_orderkey": 1991, "o_custkey": 19 }
+{ "o_orderkey": 2016, "o_custkey": 8 }
+{ "o_orderkey": 2018, "o_custkey": 19 }
+{ "o_orderkey": 2048, "o_custkey": 17 }
+{ "o_orderkey": 2049, "o_custkey": 31 }
+{ "o_orderkey": 2050, "o_custkey": 28 }
+{ "o_orderkey": 2051, "o_custkey": 40 }
+{ "o_orderkey": 2054, "o_custkey": 41 }
+{ "o_orderkey": 2082, "o_custkey": 49 }
+{ "o_orderkey": 2085, "o_custkey": 49 }
+{ "o_orderkey": 2087, "o_custkey": 50 }
+{ "o_orderkey": 2113, "o_custkey": 32 }
+{ "o_orderkey": 2116, "o_custkey": 23 }
+{ "o_orderkey": 2117, "o_custkey": 22 }
+{ "o_orderkey": 2151, "o_custkey": 58 }
+{ "o_orderkey": 2178, "o_custkey": 8 }
+{ "o_orderkey": 2179, "o_custkey": 41 }
+{ "o_orderkey": 2182, "o_custkey": 23 }
+{ "o_orderkey": 2210, "o_custkey": 32 }
+{ "o_orderkey": 2215, "o_custkey": 40 }
+{ "o_orderkey": 2240, "o_custkey": 56 }
+{ "o_orderkey": 2243, "o_custkey": 49 }
+{ "o_orderkey": 2245, "o_custkey": 58 }
+{ "o_orderkey": 2276, "o_custkey": 43 }
+{ "o_orderkey": 2304, "o_custkey": 46 }
+{ "o_orderkey": 2305, "o_custkey": 43 }
+{ "o_orderkey": 2306, "o_custkey": 28 }
+{ "o_orderkey": 2308, "o_custkey": 25 }
+{ "o_orderkey": 2310, "o_custkey": 31 }
+{ "o_orderkey": 2342, "o_custkey": 37 }
+{ "o_orderkey": 2368, "o_custkey": 13 }
+{ "o_orderkey": 2371, "o_custkey": 19 }
+{ "o_orderkey": 2372, "o_custkey": 31 }
+{ "o_orderkey": 2373, "o_custkey": 28 }
+{ "o_orderkey": 2374, "o_custkey": 4 }
+{ "o_orderkey": 2375, "o_custkey": 5 }
+{ "o_orderkey": 2400, "o_custkey": 37 }
+{ "o_orderkey": 2403, "o_custkey": 55 }
+{ "o_orderkey": 2406, "o_custkey": 7 }
+{ "o_orderkey": 2407, "o_custkey": 55 }
+{ "o_orderkey": 2433, "o_custkey": 31 }
+{ "o_orderkey": 2434, "o_custkey": 25 }
+{ "o_orderkey": 2438, "o_custkey": 13 }
+{ "o_orderkey": 2439, "o_custkey": 55 }
+{ "o_orderkey": 2465, "o_custkey": 34 }
+{ "o_orderkey": 2466, "o_custkey": 19 }
+{ "o_orderkey": 2467, "o_custkey": 35 }
+{ "o_orderkey": 2470, "o_custkey": 58 }
+{ "o_orderkey": 2497, "o_custkey": 47 }
+{ "o_orderkey": 2503, "o_custkey": 7 }
+{ "o_orderkey": 2528, "o_custkey": 55 }
+{ "o_orderkey": 2531, "o_custkey": 44 }
+{ "o_orderkey": 2533, "o_custkey": 50 }
+{ "o_orderkey": 2561, "o_custkey": 58 }
+{ "o_orderkey": 2562, "o_custkey": 10 }
+{ "o_orderkey": 2565, "o_custkey": 56 }
+{ "o_orderkey": 2596, "o_custkey": 43 }
+{ "o_orderkey": 2624, "o_custkey": 52 }
+{ "o_orderkey": 2625, "o_custkey": 40 }
+{ "o_orderkey": 2628, "o_custkey": 56 }
+{ "o_orderkey": 2631, "o_custkey": 37 }
+{ "o_orderkey": 2657, "o_custkey": 25 }
+{ "o_orderkey": 2658, "o_custkey": 14 }
+{ "o_orderkey": 2662, "o_custkey": 37 }
+{ "o_orderkey": 2691, "o_custkey": 7 }
+{ "o_orderkey": 2693, "o_custkey": 19 }
+{ "o_orderkey": 2695, "o_custkey": 58 }
+{ "o_orderkey": 2720, "o_custkey": 31 }
+{ "o_orderkey": 2722, "o_custkey": 35 }
+{ "o_orderkey": 2726, "o_custkey": 7 }
+{ "o_orderkey": 2752, "o_custkey": 59 }
+{ "o_orderkey": 2753, "o_custkey": 16 }
+{ "o_orderkey": 2758, "o_custkey": 43 }
+{ "o_orderkey": 2789, "o_custkey": 37 }
+{ "o_orderkey": 2790, "o_custkey": 25 }
+{ "o_orderkey": 2816, "o_custkey": 58 }
+{ "o_orderkey": 2817, "o_custkey": 40 }
+{ "o_orderkey": 2818, "o_custkey": 49 }
+{ "o_orderkey": 2820, "o_custkey": 19 }
+{ "o_orderkey": 2849, "o_custkey": 46 }
+{ "o_orderkey": 2855, "o_custkey": 49 }
+{ "o_orderkey": 2880, "o_custkey": 8 }
+{ "o_orderkey": 2885, "o_custkey": 7 }
+{ "o_orderkey": 2913, "o_custkey": 43 }
+{ "o_orderkey": 2916, "o_custkey": 8 }
+{ "o_orderkey": 2919, "o_custkey": 53 }
+{ "o_orderkey": 2944, "o_custkey": 14 }
+{ "o_orderkey": 2945, "o_custkey": 29 }
+{ "o_orderkey": 2948, "o_custkey": 44 }
+{ "o_orderkey": 2976, "o_custkey": 29 }
+{ "o_orderkey": 2978, "o_custkey": 44 }
+{ "o_orderkey": 2980, "o_custkey": 4 }
+{ "o_orderkey": 2981, "o_custkey": 49 }
+{ "o_orderkey": 3008, "o_custkey": 40 }
+{ "o_orderkey": 3009, "o_custkey": 55 }
+{ "o_orderkey": 3010, "o_custkey": 8 }
+{ "o_orderkey": 3012, "o_custkey": 32 }
+{ "o_orderkey": 3014, "o_custkey": 29 }
+{ "o_orderkey": 3042, "o_custkey": 20 }
+{ "o_orderkey": 3043, "o_custkey": 44 }
+{ "o_orderkey": 3044, "o_custkey": 53 }
+{ "o_orderkey": 3045, "o_custkey": 50 }
+{ "o_orderkey": 3046, "o_custkey": 32 }
+{ "o_orderkey": 3047, "o_custkey": 25 }
+{ "o_orderkey": 3072, "o_custkey": 23 }
+{ "o_orderkey": 3078, "o_custkey": 49 }
+{ "o_orderkey": 3107, "o_custkey": 26 }
+{ "o_orderkey": 3136, "o_custkey": 23 }
+{ "o_orderkey": 3139, "o_custkey": 17 }
+{ "o_orderkey": 3141, "o_custkey": 26 }
+{ "o_orderkey": 3142, "o_custkey": 8 }
+{ "o_orderkey": 3169, "o_custkey": 19 }
+{ "o_orderkey": 3170, "o_custkey": 5 }
+{ "o_orderkey": 3171, "o_custkey": 47 }
+{ "o_orderkey": 3175, "o_custkey": 44 }
+{ "o_orderkey": 3200, "o_custkey": 13 }
+{ "o_orderkey": 3204, "o_custkey": 10 }
+{ "o_orderkey": 3207, "o_custkey": 22 }
+{ "o_orderkey": 3234, "o_custkey": 14 }
+{ "o_orderkey": 3235, "o_custkey": 46 }
+{ "o_orderkey": 3237, "o_custkey": 19 }
+{ "o_orderkey": 3239, "o_custkey": 35 }
+{ "o_orderkey": 3265, "o_custkey": 53 }
+{ "o_orderkey": 3266, "o_custkey": 4 }
+{ "o_orderkey": 3269, "o_custkey": 17 }
+{ "o_orderkey": 3270, "o_custkey": 38 }
+{ "o_orderkey": 3271, "o_custkey": 34 }
+{ "o_orderkey": 3302, "o_custkey": 34 }
+{ "o_orderkey": 3328, "o_custkey": 7 }
+{ "o_orderkey": 3329, "o_custkey": 4 }
+{ "o_orderkey": 3330, "o_custkey": 7 }
+{ "o_orderkey": 3335, "o_custkey": 49 }
+{ "o_orderkey": 3361, "o_custkey": 49 }
+{ "o_orderkey": 3363, "o_custkey": 52 }
+{ "o_orderkey": 3364, "o_custkey": 46 }
+{ "o_orderkey": 3366, "o_custkey": 52 }
+{ "o_orderkey": 3426, "o_custkey": 53 }
+{ "o_orderkey": 3427, "o_custkey": 4 }
+{ "o_orderkey": 3428, "o_custkey": 10 }
+{ "o_orderkey": 3431, "o_custkey": 47 }
+{ "o_orderkey": 3456, "o_custkey": 46 }
+{ "o_orderkey": 3457, "o_custkey": 25 }
+{ "o_orderkey": 3494, "o_custkey": 49 }
+{ "o_orderkey": 3495, "o_custkey": 31 }
+{ "o_orderkey": 3521, "o_custkey": 7 }
+{ "o_orderkey": 3522, "o_custkey": 26 }
+{ "o_orderkey": 3526, "o_custkey": 56 }
+{ "o_orderkey": 3527, "o_custkey": 56 }
+{ "o_orderkey": 3552, "o_custkey": 35 }
+{ "o_orderkey": 3554, "o_custkey": 44 }
+{ "o_orderkey": 3555, "o_custkey": 46 }
+{ "o_orderkey": 3556, "o_custkey": 16 }
+{ "o_orderkey": 3558, "o_custkey": 28 }
+{ "o_orderkey": 3584, "o_custkey": 13 }
+{ "o_orderkey": 3589, "o_custkey": 31 }
+{ "o_orderkey": 3617, "o_custkey": 40 }
+{ "o_orderkey": 3618, "o_custkey": 10 }
+{ "o_orderkey": 3620, "o_custkey": 44 }
+{ "o_orderkey": 3623, "o_custkey": 4 }
+{ "o_orderkey": 3649, "o_custkey": 40 }
+{ "o_orderkey": 3650, "o_custkey": 46 }
+{ "o_orderkey": 3653, "o_custkey": 40 }
+{ "o_orderkey": 3654, "o_custkey": 7 }
+{ "o_orderkey": 3655, "o_custkey": 49 }
+{ "o_orderkey": 3681, "o_custkey": 52 }
+{ "o_orderkey": 3682, "o_custkey": 32 }
+{ "o_orderkey": 3684, "o_custkey": 23 }
+{ "o_orderkey": 3685, "o_custkey": 16 }
+{ "o_orderkey": 3686, "o_custkey": 40 }
+{ "o_orderkey": 3687, "o_custkey": 43 }
+{ "o_orderkey": 3714, "o_custkey": 40 }
+{ "o_orderkey": 3716, "o_custkey": 43 }
+{ "o_orderkey": 3717, "o_custkey": 28 }
+{ "o_orderkey": 3718, "o_custkey": 31 }
+{ "o_orderkey": 3748, "o_custkey": 53 }
+{ "o_orderkey": 3749, "o_custkey": 38 }
+{ "o_orderkey": 3751, "o_custkey": 10 }
+{ "o_orderkey": 3777, "o_custkey": 28 }
+{ "o_orderkey": 3780, "o_custkey": 41 }
+{ "o_orderkey": 3783, "o_custkey": 44 }
+{ "o_orderkey": 3812, "o_custkey": 41 }
+{ "o_orderkey": 3841, "o_custkey": 58 }
+{ "o_orderkey": 3842, "o_custkey": 28 }
+{ "o_orderkey": 3843, "o_custkey": 10 }
+{ "o_orderkey": 3846, "o_custkey": 49 }
+{ "o_orderkey": 3847, "o_custkey": 34 }
+{ "o_orderkey": 3873, "o_custkey": 55 }
+{ "o_orderkey": 3876, "o_custkey": 29 }
+{ "o_orderkey": 3877, "o_custkey": 17 }
+{ "o_orderkey": 3905, "o_custkey": 22 }
+{ "o_orderkey": 3906, "o_custkey": 46 }
+{ "o_orderkey": 3908, "o_custkey": 43 }
+{ "o_orderkey": 3909, "o_custkey": 22 }
+{ "o_orderkey": 3911, "o_custkey": 10 }
+{ "o_orderkey": 3936, "o_custkey": 32 }
+{ "o_orderkey": 3938, "o_custkey": 31 }
+{ "o_orderkey": 3943, "o_custkey": 40 }
+{ "o_orderkey": 3968, "o_custkey": 25 }
+{ "o_orderkey": 3969, "o_custkey": 52 }
+{ "o_orderkey": 4006, "o_custkey": 35 }
+{ "o_orderkey": 4007, "o_custkey": 8 }
+{ "o_orderkey": 4032, "o_custkey": 10 }
+{ "o_orderkey": 4036, "o_custkey": 47 }
+{ "o_orderkey": 4039, "o_custkey": 29 }
+{ "o_orderkey": 4066, "o_custkey": 32 }
+{ "o_orderkey": 4067, "o_custkey": 16 }
+{ "o_orderkey": 4070, "o_custkey": 29 }
+{ "o_orderkey": 4097, "o_custkey": 10 }
+{ "o_orderkey": 4098, "o_custkey": 23 }
+{ "o_orderkey": 4099, "o_custkey": 17 }
+{ "o_orderkey": 4100, "o_custkey": 4 }
+{ "o_orderkey": 4102, "o_custkey": 22 }
+{ "o_orderkey": 4129, "o_custkey": 32 }
+{ "o_orderkey": 4131, "o_custkey": 44 }
+{ "o_orderkey": 4132, "o_custkey": 19 }
+{ "o_orderkey": 4135, "o_custkey": 37 }
+{ "o_orderkey": 4160, "o_custkey": 55 }
+{ "o_orderkey": 4162, "o_custkey": 22 }
+{ "o_orderkey": 4165, "o_custkey": 4 }
+{ "o_orderkey": 4166, "o_custkey": 43 }
+{ "o_orderkey": 4167, "o_custkey": 28 }
+{ "o_orderkey": 4193, "o_custkey": 4 }
+{ "o_orderkey": 4199, "o_custkey": 5 }
+{ "o_orderkey": 4229, "o_custkey": 14 }
+{ "o_orderkey": 4257, "o_custkey": 17 }
+{ "o_orderkey": 4263, "o_custkey": 4 }
+{ "o_orderkey": 4288, "o_custkey": 34 }
+{ "o_orderkey": 4290, "o_custkey": 41 }
+{ "o_orderkey": 4292, "o_custkey": 25 }
+{ "o_orderkey": 4294, "o_custkey": 49 }
+{ "o_orderkey": 4295, "o_custkey": 5 }
+{ "o_orderkey": 4321, "o_custkey": 16 }
+{ "o_orderkey": 4326, "o_custkey": 29 }
+{ "o_orderkey": 4352, "o_custkey": 14 }
+{ "o_orderkey": 4355, "o_custkey": 4 }
+{ "o_orderkey": 4357, "o_custkey": 47 }
+{ "o_orderkey": 4358, "o_custkey": 25 }
+{ "o_orderkey": 4359, "o_custkey": 16 }
+{ "o_orderkey": 4384, "o_custkey": 25 }
+{ "o_orderkey": 4388, "o_custkey": 10 }
+{ "o_orderkey": 4389, "o_custkey": 55 }
+{ "o_orderkey": 4390, "o_custkey": 7 }
+{ "o_orderkey": 4391, "o_custkey": 38 }
+{ "o_orderkey": 4421, "o_custkey": 10 }
+{ "o_orderkey": 4449, "o_custkey": 10 }
+{ "o_orderkey": 4451, "o_custkey": 4 }
+{ "o_orderkey": 4452, "o_custkey": 13 }
+{ "o_orderkey": 4455, "o_custkey": 19 }
+{ "o_orderkey": 4483, "o_custkey": 52 }
+{ "o_orderkey": 4485, "o_custkey": 53 }
+{ "o_orderkey": 4486, "o_custkey": 37 }
+{ "o_orderkey": 4487, "o_custkey": 46 }
+{ "o_orderkey": 4545, "o_custkey": 59 }
+{ "o_orderkey": 4546, "o_custkey": 43 }
+{ "o_orderkey": 4582, "o_custkey": 19 }
+{ "o_orderkey": 4583, "o_custkey": 22 }
+{ "o_orderkey": 4610, "o_custkey": 26 }
+{ "o_orderkey": 4611, "o_custkey": 29 }
+{ "o_orderkey": 4615, "o_custkey": 29 }
+{ "o_orderkey": 4645, "o_custkey": 44 }
+{ "o_orderkey": 4647, "o_custkey": 28 }
+{ "o_orderkey": 4674, "o_custkey": 37 }
+{ "o_orderkey": 4676, "o_custkey": 14 }
+{ "o_orderkey": 4677, "o_custkey": 40 }
+{ "o_orderkey": 4704, "o_custkey": 2 }
+{ "o_orderkey": 4706, "o_custkey": 25 }
+{ "o_orderkey": 4709, "o_custkey": 26 }
+{ "o_orderkey": 4738, "o_custkey": 5 }
+{ "o_orderkey": 4770, "o_custkey": 59 }
+{ "o_orderkey": 4772, "o_custkey": 28 }
+{ "o_orderkey": 4774, "o_custkey": 52 }
+{ "o_orderkey": 4800, "o_custkey": 37 }
+{ "o_orderkey": 4804, "o_custkey": 37 }
+{ "o_orderkey": 4805, "o_custkey": 16 }
+{ "o_orderkey": 4806, "o_custkey": 7 }
+{ "o_orderkey": 4807, "o_custkey": 53 }
+{ "o_orderkey": 4832, "o_custkey": 34 }
+{ "o_orderkey": 4834, "o_custkey": 19 }
+{ "o_orderkey": 4838, "o_custkey": 44 }
+{ "o_orderkey": 4839, "o_custkey": 25 }
+{ "o_orderkey": 4866, "o_custkey": 53 }
+{ "o_orderkey": 4867, "o_custkey": 10 }
+{ "o_orderkey": 4869, "o_custkey": 58 }
+{ "o_orderkey": 4871, "o_custkey": 46 }
+{ "o_orderkey": 4898, "o_custkey": 14 }
+{ "o_orderkey": 4928, "o_custkey": 4 }
+{ "o_orderkey": 4931, "o_custkey": 50 }
+{ "o_orderkey": 4934, "o_custkey": 40 }
+{ "o_orderkey": 4935, "o_custkey": 40 }
+{ "o_orderkey": 4961, "o_custkey": 58 }
+{ "o_orderkey": 4963, "o_custkey": 34 }
+{ "o_orderkey": 4965, "o_custkey": 52 }
+{ "o_orderkey": 4993, "o_custkey": 13 }
+{ "o_orderkey": 4994, "o_custkey": 43 }
+{ "o_orderkey": 4995, "o_custkey": 40 }
+{ "o_orderkey": 4997, "o_custkey": 47 }
+{ "o_orderkey": 4998, "o_custkey": 32 }
+{ "o_orderkey": 5026, "o_custkey": 28 }
+{ "o_orderkey": 5028, "o_custkey": 13 }
+{ "o_orderkey": 5029, "o_custkey": 11 }
+{ "o_orderkey": 5056, "o_custkey": 52 }
+{ "o_orderkey": 5059, "o_custkey": 43 }
+{ "o_orderkey": 5063, "o_custkey": 23 }
+{ "o_orderkey": 5092, "o_custkey": 22 }
+{ "o_orderkey": 5120, "o_custkey": 16 }
+{ "o_orderkey": 5123, "o_custkey": 10 }
+{ "o_orderkey": 5124, "o_custkey": 25 }
+{ "o_orderkey": 5125, "o_custkey": 28 }
+{ "o_orderkey": 5152, "o_custkey": 44 }
+{ "o_orderkey": 5154, "o_custkey": 8 }
+{ "o_orderkey": 5186, "o_custkey": 52 }
+{ "o_orderkey": 5187, "o_custkey": 55 }
+{ "o_orderkey": 5190, "o_custkey": 58 }
+{ "o_orderkey": 5216, "o_custkey": 59 }
+{ "o_orderkey": 5217, "o_custkey": 35 }
+{ "o_orderkey": 5220, "o_custkey": 10 }
+{ "o_orderkey": 5221, "o_custkey": 13 }
+{ "o_orderkey": 5251, "o_custkey": 34 }
+{ "o_orderkey": 5280, "o_custkey": 34 }
+{ "o_orderkey": 5282, "o_custkey": 50 }
+{ "o_orderkey": 5287, "o_custkey": 25 }
+{ "o_orderkey": 5313, "o_custkey": 13 }
+{ "o_orderkey": 5314, "o_custkey": 34 }
+{ "o_orderkey": 5317, "o_custkey": 37 }
+{ "o_orderkey": 5318, "o_custkey": 59 }
+{ "o_orderkey": 5345, "o_custkey": 31 }
+{ "o_orderkey": 5346, "o_custkey": 37 }
+{ "o_orderkey": 5347, "o_custkey": 49 }
+{ "o_orderkey": 5348, "o_custkey": 53 }
+{ "o_orderkey": 5378, "o_custkey": 43 }
+{ "o_orderkey": 5381, "o_custkey": 32 }
+{ "o_orderkey": 5382, "o_custkey": 35 }
+{ "o_orderkey": 5383, "o_custkey": 31 }
+{ "o_orderkey": 5408, "o_custkey": 23 }
+{ "o_orderkey": 5409, "o_custkey": 13 }
+{ "o_orderkey": 5410, "o_custkey": 22 }
+{ "o_orderkey": 5415, "o_custkey": 23 }
+{ "o_orderkey": 5441, "o_custkey": 41 }
+{ "o_orderkey": 5442, "o_custkey": 43 }
+{ "o_orderkey": 5446, "o_custkey": 7 }
+{ "o_orderkey": 5447, "o_custkey": 13 }
+{ "o_orderkey": 5474, "o_custkey": 55 }
+{ "o_orderkey": 5504, "o_custkey": 19 }
+{ "o_orderkey": 5507, "o_custkey": 2 }
+{ "o_orderkey": 5508, "o_custkey": 56 }
+{ "o_orderkey": 5510, "o_custkey": 37 }
+{ "o_orderkey": 5542, "o_custkey": 49 }
+{ "o_orderkey": 5568, "o_custkey": 31 }
+{ "o_orderkey": 5572, "o_custkey": 8 }
+{ "o_orderkey": 5573, "o_custkey": 37 }
+{ "o_orderkey": 5574, "o_custkey": 28 }
+{ "o_orderkey": 5601, "o_custkey": 11 }
+{ "o_orderkey": 5604, "o_custkey": 46 }
+{ "o_orderkey": 5605, "o_custkey": 35 }
+{ "o_orderkey": 5666, "o_custkey": 14 }
+{ "o_orderkey": 5667, "o_custkey": 44 }
+{ "o_orderkey": 5670, "o_custkey": 7 }
+{ "o_orderkey": 5671, "o_custkey": 43 }
+{ "o_orderkey": 5697, "o_custkey": 55 }
+{ "o_orderkey": 5701, "o_custkey": 43 }
+{ "o_orderkey": 5729, "o_custkey": 44 }
+{ "o_orderkey": 5730, "o_custkey": 11 }
+{ "o_orderkey": 5731, "o_custkey": 8 }
+{ "o_orderkey": 5732, "o_custkey": 37 }
+{ "o_orderkey": 5735, "o_custkey": 40 }
+{ "o_orderkey": 5760, "o_custkey": 25 }
+{ "o_orderkey": 5761, "o_custkey": 16 }
+{ "o_orderkey": 5762, "o_custkey": 49 }
+{ "o_orderkey": 5763, "o_custkey": 8 }
+{ "o_orderkey": 5765, "o_custkey": 52 }
+{ "o_orderkey": 5766, "o_custkey": 49 }
+{ "o_orderkey": 5792, "o_custkey": 26 }
+{ "o_orderkey": 5793, "o_custkey": 37 }
+{ "o_orderkey": 5794, "o_custkey": 8 }
+{ "o_orderkey": 5795, "o_custkey": 37 }
+{ "o_orderkey": 5799, "o_custkey": 26 }
+{ "o_orderkey": 5824, "o_custkey": 56 }
+{ "o_orderkey": 5826, "o_custkey": 22 }
+{ "o_orderkey": 5827, "o_custkey": 31 }
+{ "o_orderkey": 5856, "o_custkey": 37 }
+{ "o_orderkey": 5859, "o_custkey": 5 }
+{ "o_orderkey": 5860, "o_custkey": 13 }
+{ "o_orderkey": 5888, "o_custkey": 46 }
+{ "o_orderkey": 5889, "o_custkey": 22 }
+{ "o_orderkey": 5890, "o_custkey": 49 }
+{ "o_orderkey": 5891, "o_custkey": 46 }
+{ "o_orderkey": 5893, "o_custkey": 2 }
+{ "o_orderkey": 5921, "o_custkey": 58 }
+{ "o_orderkey": 5924, "o_custkey": 31 }
+{ "o_orderkey": 5953, "o_custkey": 7 }
+{ "o_orderkey": 5954, "o_custkey": 22 }
+{ "o_orderkey": 5956, "o_custkey": 22 }
+{ "o_orderkey": 5959, "o_custkey": 23 }
+{ "o_orderkey": 5986, "o_custkey": 55 }
+{ "o_orderkey": 5987, "o_custkey": 44 }
+{ "o_orderkey": 5988, "o_custkey": 11 }
+{ "o_orderkey": 10988, "o_custkey": 31 }
diff --git a/asterix-app/src/test/resources/runtimets/results/upsert/nullable-index/nullable-index.1.adm b/asterix-app/src/test/resources/runtimets/results/upsert/nullable-index/nullable-index.1.adm
new file mode 100644
index 0000000..adc9610
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/upsert/nullable-index/nullable-index.1.adm
@@ -0,0 +1,2 @@
+{ "cid": 94, "name": "Edgardo Dunnegan", "age": 19, "address": null, "interests": {{ }}, "children": [ { "name": "Lyndia Dunnegan", "age": null } ] }
+{ "cid": 112, "name": "Dorie Love", "age": 12, "address": { "number": 2286, "street": "Lake St.", "city": "Los Angeles" }, "interests": {{ "Coffee" }}, "children": [ { "name": "Grady Lave", "age": null }, { "name": "Daysi Lave", "age": null } ] }
diff --git a/asterix-app/src/test/resources/runtimets/results/upsert/open-index/open-index.1.adm b/asterix-app/src/test/resources/runtimets/results/upsert/open-index/open-index.1.adm
new file mode 100644
index 0000000..a7539e8
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/upsert/open-index/open-index.1.adm
@@ -0,0 +1,1109 @@
+{ "o_orderkey": 2, "o_custkey": 79 }
+{ "o_orderkey": 3, "o_custkey": 124 }
+{ "o_orderkey": 4, "o_custkey": 137 }
+{ "o_orderkey": 5, "o_custkey": 46 }
+{ "o_orderkey": 6, "o_custkey": 56 }
+{ "o_orderkey": 32, "o_custkey": 131 }
+{ "o_orderkey": 33, "o_custkey": 67 }
+{ "o_orderkey": 34, "o_custkey": 62 }
+{ "o_orderkey": 35, "o_custkey": 128 }
+{ "o_orderkey": 36, "o_custkey": 116 }
+{ "o_orderkey": 37, "o_custkey": 88 }
+{ "o_orderkey": 38, "o_custkey": 125 }
+{ "o_orderkey": 39, "o_custkey": 82 }
+{ "o_orderkey": 66, "o_custkey": 130 }
+{ "o_orderkey": 67, "o_custkey": 58 }
+{ "o_orderkey": 69, "o_custkey": 85 }
+{ "o_orderkey": 70, "o_custkey": 65 }
+{ "o_orderkey": 96, "o_custkey": 109 }
+{ "o_orderkey": 98, "o_custkey": 106 }
+{ "o_orderkey": 99, "o_custkey": 89 }
+{ "o_orderkey": 100, "o_custkey": 148 }
+{ "o_orderkey": 128, "o_custkey": 74 }
+{ "o_orderkey": 129, "o_custkey": 73 }
+{ "o_orderkey": 131, "o_custkey": 94 }
+{ "o_orderkey": 133, "o_custkey": 44 }
+{ "o_orderkey": 135, "o_custkey": 61 }
+{ "o_orderkey": 160, "o_custkey": 83 }
+{ "o_orderkey": 163, "o_custkey": 88 }
+{ "o_orderkey": 166, "o_custkey": 109 }
+{ "o_orderkey": 167, "o_custkey": 121 }
+{ "o_orderkey": 192, "o_custkey": 83 }
+{ "o_orderkey": 193, "o_custkey": 80 }
+{ "o_orderkey": 194, "o_custkey": 62 }
+{ "o_orderkey": 195, "o_custkey": 136 }
+{ "o_orderkey": 196, "o_custkey": 65 }
+{ "o_orderkey": 198, "o_custkey": 112 }
+{ "o_orderkey": 199, "o_custkey": 53 }
+{ "o_orderkey": 226, "o_custkey": 128 }
+{ "o_orderkey": 228, "o_custkey": 46 }
+{ "o_orderkey": 229, "o_custkey": 112 }
+{ "o_orderkey": 230, "o_custkey": 103 }
+{ "o_orderkey": 231, "o_custkey": 91 }
+{ "o_orderkey": 256, "o_custkey": 125 }
+{ "o_orderkey": 257, "o_custkey": 124 }
+{ "o_orderkey": 258, "o_custkey": 43 }
+{ "o_orderkey": 259, "o_custkey": 44 }
+{ "o_orderkey": 260, "o_custkey": 106 }
+{ "o_orderkey": 261, "o_custkey": 47 }
+{ "o_orderkey": 263, "o_custkey": 118 }
+{ "o_orderkey": 289, "o_custkey": 104 }
+{ "o_orderkey": 290, "o_custkey": 118 }
+{ "o_orderkey": 291, "o_custkey": 142 }
+{ "o_orderkey": 294, "o_custkey": 52 }
+{ "o_orderkey": 321, "o_custkey": 124 }
+{ "o_orderkey": 322, "o_custkey": 134 }
+{ "o_orderkey": 324, "o_custkey": 106 }
+{ "o_orderkey": 325, "o_custkey": 41 }
+{ "o_orderkey": 326, "o_custkey": 76 }
+{ "o_orderkey": 327, "o_custkey": 145 }
+{ "o_orderkey": 352, "o_custkey": 107 }
+{ "o_orderkey": 354, "o_custkey": 139 }
+{ "o_orderkey": 355, "o_custkey": 71 }
+{ "o_orderkey": 356, "o_custkey": 148 }
+{ "o_orderkey": 357, "o_custkey": 61 }
+{ "o_orderkey": 359, "o_custkey": 79 }
+{ "o_orderkey": 384, "o_custkey": 115 }
+{ "o_orderkey": 386, "o_custkey": 61 }
+{ "o_orderkey": 388, "o_custkey": 46 }
+{ "o_orderkey": 389, "o_custkey": 127 }
+{ "o_orderkey": 390, "o_custkey": 103 }
+{ "o_orderkey": 391, "o_custkey": 112 }
+{ "o_orderkey": 416, "o_custkey": 41 }
+{ "o_orderkey": 417, "o_custkey": 55 }
+{ "o_orderkey": 418, "o_custkey": 95 }
+{ "o_orderkey": 419, "o_custkey": 118 }
+{ "o_orderkey": 420, "o_custkey": 91 }
+{ "o_orderkey": 422, "o_custkey": 74 }
+{ "o_orderkey": 423, "o_custkey": 104 }
+{ "o_orderkey": 448, "o_custkey": 149 }
+{ "o_orderkey": 449, "o_custkey": 97 }
+{ "o_orderkey": 450, "o_custkey": 49 }
+{ "o_orderkey": 451, "o_custkey": 100 }
+{ "o_orderkey": 452, "o_custkey": 61 }
+{ "o_orderkey": 453, "o_custkey": 46 }
+{ "o_orderkey": 454, "o_custkey": 49 }
+{ "o_orderkey": 480, "o_custkey": 73 }
+{ "o_orderkey": 482, "o_custkey": 127 }
+{ "o_orderkey": 484, "o_custkey": 55 }
+{ "o_orderkey": 485, "o_custkey": 101 }
+{ "o_orderkey": 486, "o_custkey": 52 }
+{ "o_orderkey": 487, "o_custkey": 109 }
+{ "o_orderkey": 512, "o_custkey": 64 }
+{ "o_orderkey": 513, "o_custkey": 61 }
+{ "o_orderkey": 514, "o_custkey": 76 }
+{ "o_orderkey": 515, "o_custkey": 142 }
+{ "o_orderkey": 516, "o_custkey": 44 }
+{ "o_orderkey": 518, "o_custkey": 145 }
+{ "o_orderkey": 519, "o_custkey": 64 }
+{ "o_orderkey": 544, "o_custkey": 94 }
+{ "o_orderkey": 545, "o_custkey": 64 }
+{ "o_orderkey": 546, "o_custkey": 145 }
+{ "o_orderkey": 547, "o_custkey": 100 }
+{ "o_orderkey": 548, "o_custkey": 124 }
+{ "o_orderkey": 549, "o_custkey": 110 }
+{ "o_orderkey": 551, "o_custkey": 91 }
+{ "o_orderkey": 577, "o_custkey": 56 }
+{ "o_orderkey": 578, "o_custkey": 94 }
+{ "o_orderkey": 579, "o_custkey": 68 }
+{ "o_orderkey": 580, "o_custkey": 61 }
+{ "o_orderkey": 581, "o_custkey": 70 }
+{ "o_orderkey": 582, "o_custkey": 50 }
+{ "o_orderkey": 583, "o_custkey": 49 }
+{ "o_orderkey": 609, "o_custkey": 127 }
+{ "o_orderkey": 610, "o_custkey": 52 }
+{ "o_orderkey": 611, "o_custkey": 106 }
+{ "o_orderkey": 612, "o_custkey": 82 }
+{ "o_orderkey": 613, "o_custkey": 139 }
+{ "o_orderkey": 614, "o_custkey": 134 }
+{ "o_orderkey": 615, "o_custkey": 67 }
+{ "o_orderkey": 640, "o_custkey": 97 }
+{ "o_orderkey": 641, "o_custkey": 133 }
+{ "o_orderkey": 643, "o_custkey": 58 }
+{ "o_orderkey": 645, "o_custkey": 115 }
+{ "o_orderkey": 646, "o_custkey": 52 }
+{ "o_orderkey": 647, "o_custkey": 143 }
+{ "o_orderkey": 672, "o_custkey": 109 }
+{ "o_orderkey": 673, "o_custkey": 80 }
+{ "o_orderkey": 677, "o_custkey": 124 }
+{ "o_orderkey": 678, "o_custkey": 131 }
+{ "o_orderkey": 679, "o_custkey": 49 }
+{ "o_orderkey": 704, "o_custkey": 85 }
+{ "o_orderkey": 705, "o_custkey": 43 }
+{ "o_orderkey": 706, "o_custkey": 148 }
+{ "o_orderkey": 707, "o_custkey": 118 }
+{ "o_orderkey": 710, "o_custkey": 133 }
+{ "o_orderkey": 711, "o_custkey": 64 }
+{ "o_orderkey": 736, "o_custkey": 47 }
+{ "o_orderkey": 737, "o_custkey": 121 }
+{ "o_orderkey": 740, "o_custkey": 44 }
+{ "o_orderkey": 741, "o_custkey": 106 }
+{ "o_orderkey": 742, "o_custkey": 103 }
+{ "o_orderkey": 743, "o_custkey": 79 }
+{ "o_orderkey": 768, "o_custkey": 98 }
+{ "o_orderkey": 769, "o_custkey": 80 }
+{ "o_orderkey": 771, "o_custkey": 46 }
+{ "o_orderkey": 772, "o_custkey": 97 }
+{ "o_orderkey": 773, "o_custkey": 133 }
+{ "o_orderkey": 774, "o_custkey": 80 }
+{ "o_orderkey": 775, "o_custkey": 134 }
+{ "o_orderkey": 800, "o_custkey": 56 }
+{ "o_orderkey": 801, "o_custkey": 118 }
+{ "o_orderkey": 802, "o_custkey": 137 }
+{ "o_orderkey": 804, "o_custkey": 50 }
+{ "o_orderkey": 805, "o_custkey": 127 }
+{ "o_orderkey": 806, "o_custkey": 131 }
+{ "o_orderkey": 807, "o_custkey": 145 }
+{ "o_orderkey": 833, "o_custkey": 56 }
+{ "o_orderkey": 834, "o_custkey": 43 }
+{ "o_orderkey": 835, "o_custkey": 65 }
+{ "o_orderkey": 836, "o_custkey": 70 }
+{ "o_orderkey": 837, "o_custkey": 116 }
+{ "o_orderkey": 864, "o_custkey": 139 }
+{ "o_orderkey": 868, "o_custkey": 104 }
+{ "o_orderkey": 869, "o_custkey": 136 }
+{ "o_orderkey": 897, "o_custkey": 49 }
+{ "o_orderkey": 898, "o_custkey": 55 }
+{ "o_orderkey": 899, "o_custkey": 109 }
+{ "o_orderkey": 900, "o_custkey": 46 }
+{ "o_orderkey": 928, "o_custkey": 67 }
+{ "o_orderkey": 929, "o_custkey": 83 }
+{ "o_orderkey": 930, "o_custkey": 131 }
+{ "o_orderkey": 931, "o_custkey": 103 }
+{ "o_orderkey": 932, "o_custkey": 41 }
+{ "o_orderkey": 933, "o_custkey": 97 }
+{ "o_orderkey": 934, "o_custkey": 52 }
+{ "o_orderkey": 935, "o_custkey": 50 }
+{ "o_orderkey": 961, "o_custkey": 56 }
+{ "o_orderkey": 964, "o_custkey": 76 }
+{ "o_orderkey": 965, "o_custkey": 70 }
+{ "o_orderkey": 967, "o_custkey": 110 }
+{ "o_orderkey": 992, "o_custkey": 55 }
+{ "o_orderkey": 993, "o_custkey": 80 }
+{ "o_orderkey": 995, "o_custkey": 116 }
+{ "o_orderkey": 996, "o_custkey": 71 }
+{ "o_orderkey": 997, "o_custkey": 109 }
+{ "o_orderkey": 999, "o_custkey": 61 }
+{ "o_orderkey": 1025, "o_custkey": 103 }
+{ "o_orderkey": 1026, "o_custkey": 73 }
+{ "o_orderkey": 1027, "o_custkey": 128 }
+{ "o_orderkey": 1028, "o_custkey": 70 }
+{ "o_orderkey": 1029, "o_custkey": 130 }
+{ "o_orderkey": 1030, "o_custkey": 134 }
+{ "o_orderkey": 1057, "o_custkey": 76 }
+{ "o_orderkey": 1058, "o_custkey": 53 }
+{ "o_orderkey": 1059, "o_custkey": 127 }
+{ "o_orderkey": 1060, "o_custkey": 140 }
+{ "o_orderkey": 1061, "o_custkey": 103 }
+{ "o_orderkey": 1062, "o_custkey": 106 }
+{ "o_orderkey": 1088, "o_custkey": 148 }
+{ "o_orderkey": 1089, "o_custkey": 49 }
+{ "o_orderkey": 1091, "o_custkey": 83 }
+{ "o_orderkey": 1092, "o_custkey": 124 }
+{ "o_orderkey": 1093, "o_custkey": 101 }
+{ "o_orderkey": 1094, "o_custkey": 145 }
+{ "o_orderkey": 1095, "o_custkey": 145 }
+{ "o_orderkey": 1120, "o_custkey": 140 }
+{ "o_orderkey": 1122, "o_custkey": 121 }
+{ "o_orderkey": 1123, "o_custkey": 73 }
+{ "o_orderkey": 1124, "o_custkey": 80 }
+{ "o_orderkey": 1126, "o_custkey": 145 }
+{ "o_orderkey": 1127, "o_custkey": 58 }
+{ "o_orderkey": 1152, "o_custkey": 49 }
+{ "o_orderkey": 1153, "o_custkey": 121 }
+{ "o_orderkey": 1155, "o_custkey": 149 }
+{ "o_orderkey": 1156, "o_custkey": 133 }
+{ "o_orderkey": 1157, "o_custkey": 97 }
+{ "o_orderkey": 1158, "o_custkey": 142 }
+{ "o_orderkey": 1159, "o_custkey": 70 }
+{ "o_orderkey": 1184, "o_custkey": 89 }
+{ "o_orderkey": 1185, "o_custkey": 74 }
+{ "o_orderkey": 1186, "o_custkey": 59 }
+{ "o_orderkey": 1187, "o_custkey": 134 }
+{ "o_orderkey": 1189, "o_custkey": 46 }
+{ "o_orderkey": 1191, "o_custkey": 112 }
+{ "o_orderkey": 1216, "o_custkey": 122 }
+{ "o_orderkey": 1220, "o_custkey": 49 }
+{ "o_orderkey": 1248, "o_custkey": 49 }
+{ "o_orderkey": 1249, "o_custkey": 149 }
+{ "o_orderkey": 1252, "o_custkey": 149 }
+{ "o_orderkey": 1253, "o_custkey": 115 }
+{ "o_orderkey": 1254, "o_custkey": 70 }
+{ "o_orderkey": 1255, "o_custkey": 122 }
+{ "o_orderkey": 1280, "o_custkey": 97 }
+{ "o_orderkey": 1281, "o_custkey": 62 }
+{ "o_orderkey": 1282, "o_custkey": 116 }
+{ "o_orderkey": 1283, "o_custkey": 118 }
+{ "o_orderkey": 1284, "o_custkey": 134 }
+{ "o_orderkey": 1286, "o_custkey": 109 }
+{ "o_orderkey": 1312, "o_custkey": 112 }
+{ "o_orderkey": 1313, "o_custkey": 148 }
+{ "o_orderkey": 1314, "o_custkey": 143 }
+{ "o_orderkey": 1317, "o_custkey": 100 }
+{ "o_orderkey": 1318, "o_custkey": 128 }
+{ "o_orderkey": 1345, "o_custkey": 95 }
+{ "o_orderkey": 1346, "o_custkey": 76 }
+{ "o_orderkey": 1347, "o_custkey": 41 }
+{ "o_orderkey": 1349, "o_custkey": 64 }
+{ "o_orderkey": 1350, "o_custkey": 52 }
+{ "o_orderkey": 1351, "o_custkey": 106 }
+{ "o_orderkey": 1376, "o_custkey": 47 }
+{ "o_orderkey": 1379, "o_custkey": 65 }
+{ "o_orderkey": 1380, "o_custkey": 137 }
+{ "o_orderkey": 1381, "o_custkey": 127 }
+{ "o_orderkey": 1382, "o_custkey": 133 }
+{ "o_orderkey": 1383, "o_custkey": 121 }
+{ "o_orderkey": 1408, "o_custkey": 55 }
+{ "o_orderkey": 1409, "o_custkey": 143 }
+{ "o_orderkey": 1410, "o_custkey": 113 }
+{ "o_orderkey": 1411, "o_custkey": 95 }
+{ "o_orderkey": 1412, "o_custkey": 53 }
+{ "o_orderkey": 1413, "o_custkey": 91 }
+{ "o_orderkey": 1414, "o_custkey": 77 }
+{ "o_orderkey": 1415, "o_custkey": 79 }
+{ "o_orderkey": 1440, "o_custkey": 98 }
+{ "o_orderkey": 1441, "o_custkey": 122 }
+{ "o_orderkey": 1442, "o_custkey": 112 }
+{ "o_orderkey": 1443, "o_custkey": 44 }
+{ "o_orderkey": 1444, "o_custkey": 134 }
+{ "o_orderkey": 1445, "o_custkey": 115 }
+{ "o_orderkey": 1446, "o_custkey": 41 }
+{ "o_orderkey": 1447, "o_custkey": 91 }
+{ "o_orderkey": 1472, "o_custkey": 149 }
+{ "o_orderkey": 1473, "o_custkey": 94 }
+{ "o_orderkey": 1474, "o_custkey": 70 }
+{ "o_orderkey": 1476, "o_custkey": 145 }
+{ "o_orderkey": 1477, "o_custkey": 76 }
+{ "o_orderkey": 1478, "o_custkey": 50 }
+{ "o_orderkey": 1506, "o_custkey": 148 }
+{ "o_orderkey": 1507, "o_custkey": 121 }
+{ "o_orderkey": 1508, "o_custkey": 103 }
+{ "o_orderkey": 1509, "o_custkey": 64 }
+{ "o_orderkey": 1510, "o_custkey": 53 }
+{ "o_orderkey": 1511, "o_custkey": 79 }
+{ "o_orderkey": 1536, "o_custkey": 94 }
+{ "o_orderkey": 1537, "o_custkey": 109 }
+{ "o_orderkey": 1539, "o_custkey": 112 }
+{ "o_orderkey": 1541, "o_custkey": 94 }
+{ "o_orderkey": 1542, "o_custkey": 143 }
+{ "o_orderkey": 1543, "o_custkey": 52 }
+{ "o_orderkey": 1569, "o_custkey": 104 }
+{ "o_orderkey": 1570, "o_custkey": 124 }
+{ "o_orderkey": 1571, "o_custkey": 103 }
+{ "o_orderkey": 1573, "o_custkey": 148 }
+{ "o_orderkey": 1574, "o_custkey": 134 }
+{ "o_orderkey": 1575, "o_custkey": 145 }
+{ "o_orderkey": 1600, "o_custkey": 94 }
+{ "o_orderkey": 1601, "o_custkey": 53 }
+{ "o_orderkey": 1604, "o_custkey": 113 }
+{ "o_orderkey": 1605, "o_custkey": 58 }
+{ "o_orderkey": 1606, "o_custkey": 53 }
+{ "o_orderkey": 1607, "o_custkey": 149 }
+{ "o_orderkey": 1632, "o_custkey": 67 }
+{ "o_orderkey": 1634, "o_custkey": 70 }
+{ "o_orderkey": 1636, "o_custkey": 79 }
+{ "o_orderkey": 1637, "o_custkey": 73 }
+{ "o_orderkey": 1638, "o_custkey": 139 }
+{ "o_orderkey": 1664, "o_custkey": 64 }
+{ "o_orderkey": 1665, "o_custkey": 76 }
+{ "o_orderkey": 1666, "o_custkey": 95 }
+{ "o_orderkey": 1668, "o_custkey": 142 }
+{ "o_orderkey": 1697, "o_custkey": 76 }
+{ "o_orderkey": 1699, "o_custkey": 85 }
+{ "o_orderkey": 1700, "o_custkey": 65 }
+{ "o_orderkey": 1701, "o_custkey": 130 }
+{ "o_orderkey": 1702, "o_custkey": 67 }
+{ "o_orderkey": 1703, "o_custkey": 134 }
+{ "o_orderkey": 1728, "o_custkey": 64 }
+{ "o_orderkey": 1729, "o_custkey": 133 }
+{ "o_orderkey": 1730, "o_custkey": 124 }
+{ "o_orderkey": 1731, "o_custkey": 128 }
+{ "o_orderkey": 1732, "o_custkey": 146 }
+{ "o_orderkey": 1733, "o_custkey": 148 }
+{ "o_orderkey": 1760, "o_custkey": 115 }
+{ "o_orderkey": 1761, "o_custkey": 106 }
+{ "o_orderkey": 1762, "o_custkey": 77 }
+{ "o_orderkey": 1763, "o_custkey": 121 }
+{ "o_orderkey": 1765, "o_custkey": 73 }
+{ "o_orderkey": 1766, "o_custkey": 139 }
+{ "o_orderkey": 1792, "o_custkey": 49 }
+{ "o_orderkey": 1794, "o_custkey": 140 }
+{ "o_orderkey": 1795, "o_custkey": 94 }
+{ "o_orderkey": 1796, "o_custkey": 47 }
+{ "o_orderkey": 1797, "o_custkey": 125 }
+{ "o_orderkey": 1798, "o_custkey": 52 }
+{ "o_orderkey": 1799, "o_custkey": 61 }
+{ "o_orderkey": 1824, "o_custkey": 49 }
+{ "o_orderkey": 1825, "o_custkey": 148 }
+{ "o_orderkey": 1826, "o_custkey": 82 }
+{ "o_orderkey": 1827, "o_custkey": 106 }
+{ "o_orderkey": 1829, "o_custkey": 112 }
+{ "o_orderkey": 1830, "o_custkey": 133 }
+{ "o_orderkey": 1831, "o_custkey": 71 }
+{ "o_orderkey": 1856, "o_custkey": 106 }
+{ "o_orderkey": 1857, "o_custkey": 133 }
+{ "o_orderkey": 1858, "o_custkey": 143 }
+{ "o_orderkey": 1859, "o_custkey": 61 }
+{ "o_orderkey": 1861, "o_custkey": 70 }
+{ "o_orderkey": 1863, "o_custkey": 74 }
+{ "o_orderkey": 1888, "o_custkey": 121 }
+{ "o_orderkey": 1891, "o_custkey": 61 }
+{ "o_orderkey": 1893, "o_custkey": 125 }
+{ "o_orderkey": 1894, "o_custkey": 76 }
+{ "o_orderkey": 1920, "o_custkey": 110 }
+{ "o_orderkey": 1921, "o_custkey": 88 }
+{ "o_orderkey": 1922, "o_custkey": 56 }
+{ "o_orderkey": 1923, "o_custkey": 136 }
+{ "o_orderkey": 1924, "o_custkey": 76 }
+{ "o_orderkey": 1926, "o_custkey": 94 }
+{ "o_orderkey": 1927, "o_custkey": 140 }
+{ "o_orderkey": 1952, "o_custkey": 67 }
+{ "o_orderkey": 1953, "o_custkey": 149 }
+{ "o_orderkey": 1954, "o_custkey": 56 }
+{ "o_orderkey": 1956, "o_custkey": 127 }
+{ "o_orderkey": 1958, "o_custkey": 53 }
+{ "o_orderkey": 1959, "o_custkey": 43 }
+{ "o_orderkey": 1984, "o_custkey": 52 }
+{ "o_orderkey": 1986, "o_custkey": 149 }
+{ "o_orderkey": 1987, "o_custkey": 100 }
+{ "o_orderkey": 1988, "o_custkey": 109 }
+{ "o_orderkey": 1989, "o_custkey": 118 }
+{ "o_orderkey": 1990, "o_custkey": 119 }
+{ "o_orderkey": 2017, "o_custkey": 101 }
+{ "o_orderkey": 2019, "o_custkey": 136 }
+{ "o_orderkey": 2020, "o_custkey": 73 }
+{ "o_orderkey": 2021, "o_custkey": 70 }
+{ "o_orderkey": 2022, "o_custkey": 62 }
+{ "o_orderkey": 2023, "o_custkey": 118 }
+{ "o_orderkey": 2052, "o_custkey": 91 }
+{ "o_orderkey": 2053, "o_custkey": 142 }
+{ "o_orderkey": 2054, "o_custkey": 41 }
+{ "o_orderkey": 2055, "o_custkey": 97 }
+{ "o_orderkey": 2080, "o_custkey": 95 }
+{ "o_orderkey": 2081, "o_custkey": 121 }
+{ "o_orderkey": 2082, "o_custkey": 49 }
+{ "o_orderkey": 2083, "o_custkey": 101 }
+{ "o_orderkey": 2084, "o_custkey": 80 }
+{ "o_orderkey": 2085, "o_custkey": 49 }
+{ "o_orderkey": 2086, "o_custkey": 142 }
+{ "o_orderkey": 2087, "o_custkey": 50 }
+{ "o_orderkey": 2112, "o_custkey": 64 }
+{ "o_orderkey": 2114, "o_custkey": 79 }
+{ "o_orderkey": 2115, "o_custkey": 106 }
+{ "o_orderkey": 2118, "o_custkey": 134 }
+{ "o_orderkey": 2119, "o_custkey": 64 }
+{ "o_orderkey": 2144, "o_custkey": 136 }
+{ "o_orderkey": 2145, "o_custkey": 134 }
+{ "o_orderkey": 2146, "o_custkey": 118 }
+{ "o_orderkey": 2147, "o_custkey": 100 }
+{ "o_orderkey": 2148, "o_custkey": 130 }
+{ "o_orderkey": 2149, "o_custkey": 101 }
+{ "o_orderkey": 2150, "o_custkey": 82 }
+{ "o_orderkey": 2151, "o_custkey": 58 }
+{ "o_orderkey": 2176, "o_custkey": 104 }
+{ "o_orderkey": 2177, "o_custkey": 136 }
+{ "o_orderkey": 2179, "o_custkey": 41 }
+{ "o_orderkey": 2180, "o_custkey": 76 }
+{ "o_orderkey": 2181, "o_custkey": 76 }
+{ "o_orderkey": 2183, "o_custkey": 113 }
+{ "o_orderkey": 2208, "o_custkey": 68 }
+{ "o_orderkey": 2209, "o_custkey": 91 }
+{ "o_orderkey": 2211, "o_custkey": 92 }
+{ "o_orderkey": 2212, "o_custkey": 118 }
+{ "o_orderkey": 2213, "o_custkey": 122 }
+{ "o_orderkey": 2214, "o_custkey": 115 }
+{ "o_orderkey": 2240, "o_custkey": 56 }
+{ "o_orderkey": 2241, "o_custkey": 103 }
+{ "o_orderkey": 2242, "o_custkey": 82 }
+{ "o_orderkey": 2243, "o_custkey": 49 }
+{ "o_orderkey": 2244, "o_custkey": 127 }
+{ "o_orderkey": 2245, "o_custkey": 58 }
+{ "o_orderkey": 2246, "o_custkey": 113 }
+{ "o_orderkey": 2247, "o_custkey": 95 }
+{ "o_orderkey": 2272, "o_custkey": 139 }
+{ "o_orderkey": 2273, "o_custkey": 136 }
+{ "o_orderkey": 2274, "o_custkey": 104 }
+{ "o_orderkey": 2275, "o_custkey": 149 }
+{ "o_orderkey": 2276, "o_custkey": 43 }
+{ "o_orderkey": 2277, "o_custkey": 89 }
+{ "o_orderkey": 2278, "o_custkey": 142 }
+{ "o_orderkey": 2279, "o_custkey": 80 }
+{ "o_orderkey": 2304, "o_custkey": 46 }
+{ "o_orderkey": 2305, "o_custkey": 43 }
+{ "o_orderkey": 2307, "o_custkey": 106 }
+{ "o_orderkey": 2309, "o_custkey": 100 }
+{ "o_orderkey": 2311, "o_custkey": 73 }
+{ "o_orderkey": 2336, "o_custkey": 142 }
+{ "o_orderkey": 2337, "o_custkey": 142 }
+{ "o_orderkey": 2338, "o_custkey": 140 }
+{ "o_orderkey": 2339, "o_custkey": 109 }
+{ "o_orderkey": 2340, "o_custkey": 65 }
+{ "o_orderkey": 2341, "o_custkey": 82 }
+{ "o_orderkey": 2343, "o_custkey": 73 }
+{ "o_orderkey": 2369, "o_custkey": 110 }
+{ "o_orderkey": 2370, "o_custkey": 142 }
+{ "o_orderkey": 2401, "o_custkey": 148 }
+{ "o_orderkey": 2402, "o_custkey": 67 }
+{ "o_orderkey": 2403, "o_custkey": 55 }
+{ "o_orderkey": 2404, "o_custkey": 77 }
+{ "o_orderkey": 2405, "o_custkey": 73 }
+{ "o_orderkey": 2407, "o_custkey": 55 }
+{ "o_orderkey": 2432, "o_custkey": 103 }
+{ "o_orderkey": 2435, "o_custkey": 73 }
+{ "o_orderkey": 2436, "o_custkey": 125 }
+{ "o_orderkey": 2437, "o_custkey": 85 }
+{ "o_orderkey": 2439, "o_custkey": 55 }
+{ "o_orderkey": 2464, "o_custkey": 145 }
+{ "o_orderkey": 2468, "o_custkey": 112 }
+{ "o_orderkey": 2469, "o_custkey": 124 }
+{ "o_orderkey": 2470, "o_custkey": 58 }
+{ "o_orderkey": 2471, "o_custkey": 89 }
+{ "o_orderkey": 2496, "o_custkey": 136 }
+{ "o_orderkey": 2497, "o_custkey": 47 }
+{ "o_orderkey": 2498, "o_custkey": 97 }
+{ "o_orderkey": 2499, "o_custkey": 121 }
+{ "o_orderkey": 2500, "o_custkey": 133 }
+{ "o_orderkey": 2501, "o_custkey": 67 }
+{ "o_orderkey": 2502, "o_custkey": 70 }
+{ "o_orderkey": 2528, "o_custkey": 55 }
+{ "o_orderkey": 2529, "o_custkey": 136 }
+{ "o_orderkey": 2530, "o_custkey": 128 }
+{ "o_orderkey": 2531, "o_custkey": 44 }
+{ "o_orderkey": 2532, "o_custkey": 94 }
+{ "o_orderkey": 2533, "o_custkey": 50 }
+{ "o_orderkey": 2534, "o_custkey": 76 }
+{ "o_orderkey": 2535, "o_custkey": 121 }
+{ "o_orderkey": 2560, "o_custkey": 131 }
+{ "o_orderkey": 2561, "o_custkey": 58 }
+{ "o_orderkey": 2563, "o_custkey": 62 }
+{ "o_orderkey": 2564, "o_custkey": 77 }
+{ "o_orderkey": 2565, "o_custkey": 56 }
+{ "o_orderkey": 2566, "o_custkey": 86 }
+{ "o_orderkey": 2567, "o_custkey": 70 }
+{ "o_orderkey": 2592, "o_custkey": 101 }
+{ "o_orderkey": 2593, "o_custkey": 92 }
+{ "o_orderkey": 2594, "o_custkey": 79 }
+{ "o_orderkey": 2595, "o_custkey": 74 }
+{ "o_orderkey": 2596, "o_custkey": 43 }
+{ "o_orderkey": 2597, "o_custkey": 104 }
+{ "o_orderkey": 2598, "o_custkey": 112 }
+{ "o_orderkey": 2599, "o_custkey": 149 }
+{ "o_orderkey": 2624, "o_custkey": 52 }
+{ "o_orderkey": 2626, "o_custkey": 139 }
+{ "o_orderkey": 2627, "o_custkey": 149 }
+{ "o_orderkey": 2628, "o_custkey": 56 }
+{ "o_orderkey": 2629, "o_custkey": 139 }
+{ "o_orderkey": 2630, "o_custkey": 85 }
+{ "o_orderkey": 2656, "o_custkey": 77 }
+{ "o_orderkey": 2659, "o_custkey": 83 }
+{ "o_orderkey": 2660, "o_custkey": 127 }
+{ "o_orderkey": 2661, "o_custkey": 74 }
+{ "o_orderkey": 2663, "o_custkey": 95 }
+{ "o_orderkey": 2688, "o_custkey": 98 }
+{ "o_orderkey": 2689, "o_custkey": 103 }
+{ "o_orderkey": 2690, "o_custkey": 94 }
+{ "o_orderkey": 2692, "o_custkey": 62 }
+{ "o_orderkey": 2694, "o_custkey": 121 }
+{ "o_orderkey": 2695, "o_custkey": 58 }
+{ "o_orderkey": 2721, "o_custkey": 79 }
+{ "o_orderkey": 2723, "o_custkey": 61 }
+{ "o_orderkey": 2724, "o_custkey": 137 }
+{ "o_orderkey": 2725, "o_custkey": 89 }
+{ "o_orderkey": 2727, "o_custkey": 74 }
+{ "o_orderkey": 2752, "o_custkey": 59 }
+{ "o_orderkey": 2754, "o_custkey": 145 }
+{ "o_orderkey": 2755, "o_custkey": 118 }
+{ "o_orderkey": 2756, "o_custkey": 118 }
+{ "o_orderkey": 2757, "o_custkey": 76 }
+{ "o_orderkey": 2758, "o_custkey": 43 }
+{ "o_orderkey": 2759, "o_custkey": 116 }
+{ "o_orderkey": 2784, "o_custkey": 95 }
+{ "o_orderkey": 2785, "o_custkey": 148 }
+{ "o_orderkey": 2786, "o_custkey": 79 }
+{ "o_orderkey": 2787, "o_custkey": 103 }
+{ "o_orderkey": 2788, "o_custkey": 124 }
+{ "o_orderkey": 2791, "o_custkey": 121 }
+{ "o_orderkey": 2816, "o_custkey": 58 }
+{ "o_orderkey": 2818, "o_custkey": 49 }
+{ "o_orderkey": 2819, "o_custkey": 103 }
+{ "o_orderkey": 2821, "o_custkey": 118 }
+{ "o_orderkey": 2822, "o_custkey": 79 }
+{ "o_orderkey": 2823, "o_custkey": 79 }
+{ "o_orderkey": 2848, "o_custkey": 70 }
+{ "o_orderkey": 2849, "o_custkey": 46 }
+{ "o_orderkey": 2850, "o_custkey": 100 }
+{ "o_orderkey": 2851, "o_custkey": 145 }
+{ "o_orderkey": 2852, "o_custkey": 91 }
+{ "o_orderkey": 2853, "o_custkey": 94 }
+{ "o_orderkey": 2854, "o_custkey": 139 }
+{ "o_orderkey": 2855, "o_custkey": 49 }
+{ "o_orderkey": 2881, "o_custkey": 100 }
+{ "o_orderkey": 2882, "o_custkey": 121 }
+{ "o_orderkey": 2883, "o_custkey": 121 }
+{ "o_orderkey": 2884, "o_custkey": 92 }
+{ "o_orderkey": 2886, "o_custkey": 109 }
+{ "o_orderkey": 2887, "o_custkey": 109 }
+{ "o_orderkey": 2912, "o_custkey": 94 }
+{ "o_orderkey": 2913, "o_custkey": 43 }
+{ "o_orderkey": 2914, "o_custkey": 109 }
+{ "o_orderkey": 2915, "o_custkey": 94 }
+{ "o_orderkey": 2917, "o_custkey": 91 }
+{ "o_orderkey": 2918, "o_custkey": 118 }
+{ "o_orderkey": 2919, "o_custkey": 53 }
+{ "o_orderkey": 2946, "o_custkey": 125 }
+{ "o_orderkey": 2947, "o_custkey": 70 }
+{ "o_orderkey": 2948, "o_custkey": 44 }
+{ "o_orderkey": 2949, "o_custkey": 137 }
+{ "o_orderkey": 2950, "o_custkey": 136 }
+{ "o_orderkey": 2951, "o_custkey": 74 }
+{ "o_orderkey": 2977, "o_custkey": 73 }
+{ "o_orderkey": 2978, "o_custkey": 44 }
+{ "o_orderkey": 2979, "o_custkey": 133 }
+{ "o_orderkey": 2981, "o_custkey": 49 }
+{ "o_orderkey": 2982, "o_custkey": 85 }
+{ "o_orderkey": 2983, "o_custkey": 62 }
+{ "o_orderkey": 3009, "o_custkey": 55 }
+{ "o_orderkey": 3011, "o_custkey": 91 }
+{ "o_orderkey": 3013, "o_custkey": 143 }
+{ "o_orderkey": 3015, "o_custkey": 103 }
+{ "o_orderkey": 3040, "o_custkey": 112 }
+{ "o_orderkey": 3041, "o_custkey": 113 }
+{ "o_orderkey": 3043, "o_custkey": 44 }
+{ "o_orderkey": 3044, "o_custkey": 53 }
+{ "o_orderkey": 3045, "o_custkey": 50 }
+{ "o_orderkey": 3073, "o_custkey": 136 }
+{ "o_orderkey": 3074, "o_custkey": 67 }
+{ "o_orderkey": 3075, "o_custkey": 127 }
+{ "o_orderkey": 3076, "o_custkey": 92 }
+{ "o_orderkey": 3077, "o_custkey": 121 }
+{ "o_orderkey": 3078, "o_custkey": 49 }
+{ "o_orderkey": 3079, "o_custkey": 100 }
+{ "o_orderkey": 3104, "o_custkey": 70 }
+{ "o_orderkey": 3105, "o_custkey": 137 }
+{ "o_orderkey": 3106, "o_custkey": 145 }
+{ "o_orderkey": 3108, "o_custkey": 85 }
+{ "o_orderkey": 3109, "o_custkey": 124 }
+{ "o_orderkey": 3110, "o_custkey": 88 }
+{ "o_orderkey": 3111, "o_custkey": 133 }
+{ "o_orderkey": 3137, "o_custkey": 136 }
+{ "o_orderkey": 3138, "o_custkey": 139 }
+{ "o_orderkey": 3140, "o_custkey": 145 }
+{ "o_orderkey": 3143, "o_custkey": 107 }
+{ "o_orderkey": 3168, "o_custkey": 136 }
+{ "o_orderkey": 3171, "o_custkey": 47 }
+{ "o_orderkey": 3172, "o_custkey": 89 }
+{ "o_orderkey": 3173, "o_custkey": 148 }
+{ "o_orderkey": 3174, "o_custkey": 127 }
+{ "o_orderkey": 3175, "o_custkey": 44 }
+{ "o_orderkey": 3201, "o_custkey": 97 }
+{ "o_orderkey": 3202, "o_custkey": 88 }
+{ "o_orderkey": 3203, "o_custkey": 127 }
+{ "o_orderkey": 3205, "o_custkey": 148 }
+{ "o_orderkey": 3206, "o_custkey": 122 }
+{ "o_orderkey": 3232, "o_custkey": 82 }
+{ "o_orderkey": 3233, "o_custkey": 140 }
+{ "o_orderkey": 3235, "o_custkey": 46 }
+{ "o_orderkey": 3236, "o_custkey": 142 }
+{ "o_orderkey": 3238, "o_custkey": 61 }
+{ "o_orderkey": 3264, "o_custkey": 94 }
+{ "o_orderkey": 3265, "o_custkey": 53 }
+{ "o_orderkey": 3267, "o_custkey": 112 }
+{ "o_orderkey": 3268, "o_custkey": 142 }
+{ "o_orderkey": 3296, "o_custkey": 148 }
+{ "o_orderkey": 3297, "o_custkey": 139 }
+{ "o_orderkey": 3298, "o_custkey": 116 }
+{ "o_orderkey": 3299, "o_custkey": 91 }
+{ "o_orderkey": 3300, "o_custkey": 118 }
+{ "o_orderkey": 3301, "o_custkey": 133 }
+{ "o_orderkey": 3303, "o_custkey": 145 }
+{ "o_orderkey": 3331, "o_custkey": 91 }
+{ "o_orderkey": 3332, "o_custkey": 143 }
+{ "o_orderkey": 3333, "o_custkey": 92 }
+{ "o_orderkey": 3334, "o_custkey": 76 }
+{ "o_orderkey": 3335, "o_custkey": 49 }
+{ "o_orderkey": 3360, "o_custkey": 103 }
+{ "o_orderkey": 3361, "o_custkey": 49 }
+{ "o_orderkey": 3362, "o_custkey": 140 }
+{ "o_orderkey": 3363, "o_custkey": 52 }
+{ "o_orderkey": 3364, "o_custkey": 46 }
+{ "o_orderkey": 3365, "o_custkey": 82 }
+{ "o_orderkey": 3366, "o_custkey": 52 }
+{ "o_orderkey": 3367, "o_custkey": 73 }
+{ "o_orderkey": 3392, "o_custkey": 74 }
+{ "o_orderkey": 3393, "o_custkey": 98 }
+{ "o_orderkey": 3394, "o_custkey": 149 }
+{ "o_orderkey": 3395, "o_custkey": 149 }
+{ "o_orderkey": 3396, "o_custkey": 149 }
+{ "o_orderkey": 3397, "o_custkey": 130 }
+{ "o_orderkey": 3398, "o_custkey": 67 }
+{ "o_orderkey": 3399, "o_custkey": 122 }
+{ "o_orderkey": 3424, "o_custkey": 103 }
+{ "o_orderkey": 3425, "o_custkey": 115 }
+{ "o_orderkey": 3426, "o_custkey": 53 }
+{ "o_orderkey": 3429, "o_custkey": 146 }
+{ "o_orderkey": 3430, "o_custkey": 113 }
+{ "o_orderkey": 3431, "o_custkey": 47 }
+{ "o_orderkey": 3456, "o_custkey": 46 }
+{ "o_orderkey": 3458, "o_custkey": 95 }
+{ "o_orderkey": 3459, "o_custkey": 119 }
+{ "o_orderkey": 3460, "o_custkey": 82 }
+{ "o_orderkey": 3461, "o_custkey": 100 }
+{ "o_orderkey": 3462, "o_custkey": 133 }
+{ "o_orderkey": 3463, "o_custkey": 89 }
+{ "o_orderkey": 3488, "o_custkey": 148 }
+{ "o_orderkey": 3489, "o_custkey": 109 }
+{ "o_orderkey": 3490, "o_custkey": 91 }
+{ "o_orderkey": 3491, "o_custkey": 83 }
+{ "o_orderkey": 3492, "o_custkey": 103 }
+{ "o_orderkey": 3493, "o_custkey": 82 }
+{ "o_orderkey": 3494, "o_custkey": 49 }
+{ "o_orderkey": 3520, "o_custkey": 125 }
+{ "o_orderkey": 3523, "o_custkey": 149 }
+{ "o_orderkey": 3524, "o_custkey": 94 }
+{ "o_orderkey": 3525, "o_custkey": 109 }
+{ "o_orderkey": 3526, "o_custkey": 56 }
+{ "o_orderkey": 3527, "o_custkey": 56 }
+{ "o_orderkey": 3553, "o_custkey": 91 }
+{ "o_orderkey": 3554, "o_custkey": 44 }
+{ "o_orderkey": 3555, "o_custkey": 46 }
+{ "o_orderkey": 3557, "o_custkey": 121 }
+{ "o_orderkey": 3559, "o_custkey": 106 }
+{ "o_orderkey": 3585, "o_custkey": 139 }
+{ "o_orderkey": 3586, "o_custkey": 121 }
+{ "o_orderkey": 3587, "o_custkey": 79 }
+{ "o_orderkey": 3588, "o_custkey": 119 }
+{ "o_orderkey": 3590, "o_custkey": 149 }
+{ "o_orderkey": 3591, "o_custkey": 136 }
+{ "o_orderkey": 3616, "o_custkey": 128 }
+{ "o_orderkey": 3619, "o_custkey": 149 }
+{ "o_orderkey": 3620, "o_custkey": 44 }
+{ "o_orderkey": 3621, "o_custkey": 142 }
+{ "o_orderkey": 3622, "o_custkey": 91 }
+{ "o_orderkey": 3648, "o_custkey": 125 }
+{ "o_orderkey": 3650, "o_custkey": 46 }
+{ "o_orderkey": 3651, "o_custkey": 100 }
+{ "o_orderkey": 3652, "o_custkey": 107 }
+{ "o_orderkey": 3655, "o_custkey": 49 }
+{ "o_orderkey": 3680, "o_custkey": 127 }
+{ "o_orderkey": 3681, "o_custkey": 52 }
+{ "o_orderkey": 3683, "o_custkey": 88 }
+{ "o_orderkey": 3687, "o_custkey": 43 }
+{ "o_orderkey": 3712, "o_custkey": 64 }
+{ "o_orderkey": 3713, "o_custkey": 149 }
+{ "o_orderkey": 3715, "o_custkey": 65 }
+{ "o_orderkey": 3716, "o_custkey": 43 }
+{ "o_orderkey": 3719, "o_custkey": 118 }
+{ "o_orderkey": 3744, "o_custkey": 65 }
+{ "o_orderkey": 3745, "o_custkey": 112 }
+{ "o_orderkey": 3746, "o_custkey": 74 }
+{ "o_orderkey": 3747, "o_custkey": 149 }
+{ "o_orderkey": 3748, "o_custkey": 53 }
+{ "o_orderkey": 3750, "o_custkey": 97 }
+{ "o_orderkey": 3776, "o_custkey": 85 }
+{ "o_orderkey": 3778, "o_custkey": 106 }
+{ "o_orderkey": 3779, "o_custkey": 74 }
+{ "o_orderkey": 3780, "o_custkey": 41 }
+{ "o_orderkey": 3781, "o_custkey": 139 }
+{ "o_orderkey": 3782, "o_custkey": 65 }
+{ "o_orderkey": 3783, "o_custkey": 44 }
+{ "o_orderkey": 3808, "o_custkey": 79 }
+{ "o_orderkey": 3809, "o_custkey": 148 }
+{ "o_orderkey": 3810, "o_custkey": 100 }
+{ "o_orderkey": 3811, "o_custkey": 80 }
+{ "o_orderkey": 3812, "o_custkey": 41 }
+{ "o_orderkey": 3813, "o_custkey": 146 }
+{ "o_orderkey": 3814, "o_custkey": 118 }
+{ "o_orderkey": 3815, "o_custkey": 104 }
+{ "o_orderkey": 3840, "o_custkey": 100 }
+{ "o_orderkey": 3841, "o_custkey": 58 }
+{ "o_orderkey": 3844, "o_custkey": 79 }
+{ "o_orderkey": 3845, "o_custkey": 89 }
+{ "o_orderkey": 3846, "o_custkey": 49 }
+{ "o_orderkey": 3872, "o_custkey": 134 }
+{ "o_orderkey": 3873, "o_custkey": 55 }
+{ "o_orderkey": 3874, "o_custkey": 119 }
+{ "o_orderkey": 3875, "o_custkey": 118 }
+{ "o_orderkey": 3878, "o_custkey": 88 }
+{ "o_orderkey": 3879, "o_custkey": 142 }
+{ "o_orderkey": 3904, "o_custkey": 149 }
+{ "o_orderkey": 3906, "o_custkey": 46 }
+{ "o_orderkey": 3907, "o_custkey": 67 }
+{ "o_orderkey": 3908, "o_custkey": 43 }
+{ "o_orderkey": 3910, "o_custkey": 64 }
+{ "o_orderkey": 3937, "o_custkey": 94 }
+{ "o_orderkey": 3939, "o_custkey": 70 }
+{ "o_orderkey": 3940, "o_custkey": 149 }
+{ "o_orderkey": 3941, "o_custkey": 136 }
+{ "o_orderkey": 3942, "o_custkey": 76 }
+{ "o_orderkey": 3969, "o_custkey": 52 }
+{ "o_orderkey": 3970, "o_custkey": 76 }
+{ "o_orderkey": 3971, "o_custkey": 104 }
+{ "o_orderkey": 3972, "o_custkey": 124 }
+{ "o_orderkey": 3973, "o_custkey": 103 }
+{ "o_orderkey": 3974, "o_custkey": 94 }
+{ "o_orderkey": 3975, "o_custkey": 118 }
+{ "o_orderkey": 4000, "o_custkey": 70 }
+{ "o_orderkey": 4001, "o_custkey": 115 }
+{ "o_orderkey": 4002, "o_custkey": 104 }
+{ "o_orderkey": 4003, "o_custkey": 112 }
+{ "o_orderkey": 4004, "o_custkey": 70 }
+{ "o_orderkey": 4005, "o_custkey": 140 }
+{ "o_orderkey": 4033, "o_custkey": 83 }
+{ "o_orderkey": 4034, "o_custkey": 94 }
+{ "o_orderkey": 4035, "o_custkey": 118 }
+{ "o_orderkey": 4036, "o_custkey": 47 }
+{ "o_orderkey": 4037, "o_custkey": 121 }
+{ "o_orderkey": 4038, "o_custkey": 94 }
+{ "o_orderkey": 4064, "o_custkey": 130 }
+{ "o_orderkey": 4065, "o_custkey": 80 }
+{ "o_orderkey": 4068, "o_custkey": 125 }
+{ "o_orderkey": 4069, "o_custkey": 73 }
+{ "o_orderkey": 4071, "o_custkey": 148 }
+{ "o_orderkey": 4096, "o_custkey": 139 }
+{ "o_orderkey": 4101, "o_custkey": 142 }
+{ "o_orderkey": 4103, "o_custkey": 106 }
+{ "o_orderkey": 4128, "o_custkey": 139 }
+{ "o_orderkey": 4130, "o_custkey": 104 }
+{ "o_orderkey": 4131, "o_custkey": 44 }
+{ "o_orderkey": 4133, "o_custkey": 101 }
+{ "o_orderkey": 4134, "o_custkey": 97 }
+{ "o_orderkey": 4160, "o_custkey": 55 }
+{ "o_orderkey": 4161, "o_custkey": 118 }
+{ "o_orderkey": 4163, "o_custkey": 64 }
+{ "o_orderkey": 4164, "o_custkey": 94 }
+{ "o_orderkey": 4166, "o_custkey": 43 }
+{ "o_orderkey": 4192, "o_custkey": 146 }
+{ "o_orderkey": 4194, "o_custkey": 106 }
+{ "o_orderkey": 4195, "o_custkey": 104 }
+{ "o_orderkey": 4196, "o_custkey": 106 }
+{ "o_orderkey": 4197, "o_custkey": 92 }
+{ "o_orderkey": 4198, "o_custkey": 143 }
+{ "o_orderkey": 4224, "o_custkey": 70 }
+{ "o_orderkey": 4225, "o_custkey": 128 }
+{ "o_orderkey": 4226, "o_custkey": 92 }
+{ "o_orderkey": 4227, "o_custkey": 133 }
+{ "o_orderkey": 4228, "o_custkey": 110 }
+{ "o_orderkey": 4230, "o_custkey": 140 }
+{ "o_orderkey": 4231, "o_custkey": 86 }
+{ "o_orderkey": 4256, "o_custkey": 118 }
+{ "o_orderkey": 4258, "o_custkey": 92 }
+{ "o_orderkey": 4259, "o_custkey": 104 }
+{ "o_orderkey": 4260, "o_custkey": 142 }
+{ "o_orderkey": 4261, "o_custkey": 118 }
+{ "o_orderkey": 4262, "o_custkey": 88 }
+{ "o_orderkey": 4289, "o_custkey": 125 }
+{ "o_orderkey": 4290, "o_custkey": 41 }
+{ "o_orderkey": 4291, "o_custkey": 89 }
+{ "o_orderkey": 4293, "o_custkey": 103 }
+{ "o_orderkey": 4294, "o_custkey": 49 }
+{ "o_orderkey": 4320, "o_custkey": 115 }
+{ "o_orderkey": 4322, "o_custkey": 142 }
+{ "o_orderkey": 4323, "o_custkey": 104 }
+{ "o_orderkey": 4324, "o_custkey": 73 }
+{ "o_orderkey": 4325, "o_custkey": 130 }
+{ "o_orderkey": 4327, "o_custkey": 146 }
+{ "o_orderkey": 4353, "o_custkey": 73 }
+{ "o_orderkey": 4354, "o_custkey": 145 }
+{ "o_orderkey": 4356, "o_custkey": 97 }
+{ "o_orderkey": 4357, "o_custkey": 47 }
+{ "o_orderkey": 4385, "o_custkey": 122 }
+{ "o_orderkey": 4386, "o_custkey": 61 }
+{ "o_orderkey": 4387, "o_custkey": 110 }
+{ "o_orderkey": 4389, "o_custkey": 55 }
+{ "o_orderkey": 4416, "o_custkey": 149 }
+{ "o_orderkey": 4417, "o_custkey": 67 }
+{ "o_orderkey": 4418, "o_custkey": 61 }
+{ "o_orderkey": 4419, "o_custkey": 104 }
+{ "o_orderkey": 4420, "o_custkey": 109 }
+{ "o_orderkey": 4422, "o_custkey": 70 }
+{ "o_orderkey": 4423, "o_custkey": 64 }
+{ "o_orderkey": 4448, "o_custkey": 70 }
+{ "o_orderkey": 4450, "o_custkey": 106 }
+{ "o_orderkey": 4453, "o_custkey": 65 }
+{ "o_orderkey": 4454, "o_custkey": 142 }
+{ "o_orderkey": 4480, "o_custkey": 85 }
+{ "o_orderkey": 4481, "o_custkey": 148 }
+{ "o_orderkey": 4482, "o_custkey": 82 }
+{ "o_orderkey": 4483, "o_custkey": 52 }
+{ "o_orderkey": 4484, "o_custkey": 131 }
+{ "o_orderkey": 4485, "o_custkey": 53 }
+{ "o_orderkey": 4487, "o_custkey": 46 }
+{ "o_orderkey": 4512, "o_custkey": 70 }
+{ "o_orderkey": 4513, "o_custkey": 85 }
+{ "o_orderkey": 4514, "o_custkey": 97 }
+{ "o_orderkey": 4515, "o_custkey": 140 }
+{ "o_orderkey": 4516, "o_custkey": 130 }
+{ "o_orderkey": 4517, "o_custkey": 113 }
+{ "o_orderkey": 4518, "o_custkey": 125 }
+{ "o_orderkey": 4519, "o_custkey": 136 }
+{ "o_orderkey": 4544, "o_custkey": 112 }
+{ "o_orderkey": 4545, "o_custkey": 59 }
+{ "o_orderkey": 4546, "o_custkey": 43 }
+{ "o_orderkey": 4547, "o_custkey": 109 }
+{ "o_orderkey": 4548, "o_custkey": 127 }
+{ "o_orderkey": 4549, "o_custkey": 64 }
+{ "o_orderkey": 4550, "o_custkey": 118 }
+{ "o_orderkey": 4551, "o_custkey": 109 }
+{ "o_orderkey": 4576, "o_custkey": 139 }
+{ "o_orderkey": 4577, "o_custkey": 79 }
+{ "o_orderkey": 4578, "o_custkey": 91 }
+{ "o_orderkey": 4579, "o_custkey": 106 }
+{ "o_orderkey": 4580, "o_custkey": 82 }
+{ "o_orderkey": 4581, "o_custkey": 79 }
+{ "o_orderkey": 4608, "o_custkey": 80 }
+{ "o_orderkey": 4609, "o_custkey": 133 }
+{ "o_orderkey": 4612, "o_custkey": 61 }
+{ "o_orderkey": 4613, "o_custkey": 133 }
+{ "o_orderkey": 4614, "o_custkey": 61 }
+{ "o_orderkey": 4640, "o_custkey": 97 }
+{ "o_orderkey": 4641, "o_custkey": 134 }
+{ "o_orderkey": 4642, "o_custkey": 148 }
+{ "o_orderkey": 4643, "o_custkey": 67 }
+{ "o_orderkey": 4644, "o_custkey": 94 }
+{ "o_orderkey": 4645, "o_custkey": 44 }
+{ "o_orderkey": 4646, "o_custkey": 83 }
+{ "o_orderkey": 4672, "o_custkey": 79 }
+{ "o_orderkey": 4673, "o_custkey": 82 }
+{ "o_orderkey": 4675, "o_custkey": 86 }
+{ "o_orderkey": 4678, "o_custkey": 88 }
+{ "o_orderkey": 4679, "o_custkey": 88 }
+{ "o_orderkey": 4705, "o_custkey": 98 }
+{ "o_orderkey": 4707, "o_custkey": 91 }
+{ "o_orderkey": 4708, "o_custkey": 85 }
+{ "o_orderkey": 4710, "o_custkey": 100 }
+{ "o_orderkey": 4711, "o_custkey": 142 }
+{ "o_orderkey": 4736, "o_custkey": 139 }
+{ "o_orderkey": 4737, "o_custkey": 79 }
+{ "o_orderkey": 4739, "o_custkey": 148 }
+{ "o_orderkey": 4740, "o_custkey": 68 }
+{ "o_orderkey": 4741, "o_custkey": 127 }
+{ "o_orderkey": 4742, "o_custkey": 64 }
+{ "o_orderkey": 4743, "o_custkey": 97 }
+{ "o_orderkey": 4768, "o_custkey": 136 }
+{ "o_orderkey": 4769, "o_custkey": 121 }
+{ "o_orderkey": 4770, "o_custkey": 59 }
+{ "o_orderkey": 4771, "o_custkey": 95 }
+{ "o_orderkey": 4773, "o_custkey": 122 }
+{ "o_orderkey": 4774, "o_custkey": 52 }
+{ "o_orderkey": 4775, "o_custkey": 128 }
+{ "o_orderkey": 4801, "o_custkey": 88 }
+{ "o_orderkey": 4802, "o_custkey": 130 }
+{ "o_orderkey": 4803, "o_custkey": 124 }
+{ "o_orderkey": 4807, "o_custkey": 53 }
+{ "o_orderkey": 4833, "o_custkey": 133 }
+{ "o_orderkey": 4835, "o_custkey": 146 }
+{ "o_orderkey": 4836, "o_custkey": 65 }
+{ "o_orderkey": 4837, "o_custkey": 130 }
+{ "o_orderkey": 4838, "o_custkey": 44 }
+{ "o_orderkey": 4864, "o_custkey": 88 }
+{ "o_orderkey": 4865, "o_custkey": 85 }
+{ "o_orderkey": 4866, "o_custkey": 53 }
+{ "o_orderkey": 4868, "o_custkey": 76 }
+{ "o_orderkey": 4869, "o_custkey": 58 }
+{ "o_orderkey": 4870, "o_custkey": 103 }
+{ "o_orderkey": 4871, "o_custkey": 46 }
+{ "o_orderkey": 4896, "o_custkey": 85 }
+{ "o_orderkey": 4897, "o_custkey": 80 }
+{ "o_orderkey": 4899, "o_custkey": 61 }
+{ "o_orderkey": 4900, "o_custkey": 137 }
+{ "o_orderkey": 4901, "o_custkey": 79 }
+{ "o_orderkey": 4902, "o_custkey": 139 }
+{ "o_orderkey": 4903, "o_custkey": 92 }
+{ "o_orderkey": 4929, "o_custkey": 149 }
+{ "o_orderkey": 4930, "o_custkey": 149 }
+{ "o_orderkey": 4931, "o_custkey": 50 }
+{ "o_orderkey": 4932, "o_custkey": 122 }
+{ "o_orderkey": 4933, "o_custkey": 94 }
+{ "o_orderkey": 4960, "o_custkey": 124 }
+{ "o_orderkey": 4961, "o_custkey": 58 }
+{ "o_orderkey": 4962, "o_custkey": 104 }
+{ "o_orderkey": 4964, "o_custkey": 101 }
+{ "o_orderkey": 4965, "o_custkey": 52 }
+{ "o_orderkey": 4966, "o_custkey": 70 }
+{ "o_orderkey": 4967, "o_custkey": 98 }
+{ "o_orderkey": 4992, "o_custkey": 62 }
+{ "o_orderkey": 4994, "o_custkey": 43 }
+{ "o_orderkey": 4996, "o_custkey": 133 }
+{ "o_orderkey": 4997, "o_custkey": 47 }
+{ "o_orderkey": 4999, "o_custkey": 85 }
+{ "o_orderkey": 5024, "o_custkey": 124 }
+{ "o_orderkey": 5025, "o_custkey": 121 }
+{ "o_orderkey": 5027, "o_custkey": 148 }
+{ "o_orderkey": 5030, "o_custkey": 106 }
+{ "o_orderkey": 5031, "o_custkey": 139 }
+{ "o_orderkey": 5056, "o_custkey": 52 }
+{ "o_orderkey": 5057, "o_custkey": 64 }
+{ "o_orderkey": 5058, "o_custkey": 119 }
+{ "o_orderkey": 5059, "o_custkey": 43 }
+{ "o_orderkey": 5060, "o_custkey": 112 }
+{ "o_orderkey": 5061, "o_custkey": 101 }
+{ "o_orderkey": 5062, "o_custkey": 61 }
+{ "o_orderkey": 5088, "o_custkey": 130 }
+{ "o_orderkey": 5089, "o_custkey": 130 }
+{ "o_orderkey": 5090, "o_custkey": 89 }
+{ "o_orderkey": 5091, "o_custkey": 148 }
+{ "o_orderkey": 5093, "o_custkey": 79 }
+{ "o_orderkey": 5094, "o_custkey": 106 }
+{ "o_orderkey": 5095, "o_custkey": 97 }
+{ "o_orderkey": 5121, "o_custkey": 133 }
+{ "o_orderkey": 5122, "o_custkey": 70 }
+{ "o_orderkey": 5126, "o_custkey": 112 }
+{ "o_orderkey": 5127, "o_custkey": 73 }
+{ "o_orderkey": 5152, "o_custkey": 44 }
+{ "o_orderkey": 5153, "o_custkey": 113 }
+{ "o_orderkey": 5155, "o_custkey": 77 }
+{ "o_orderkey": 5156, "o_custkey": 125 }
+{ "o_orderkey": 5157, "o_custkey": 142 }
+{ "o_orderkey": 5158, "o_custkey": 76 }
+{ "o_orderkey": 5159, "o_custkey": 106 }
+{ "o_orderkey": 5184, "o_custkey": 85 }
+{ "o_orderkey": 5185, "o_custkey": 148 }
+{ "o_orderkey": 5186, "o_custkey": 52 }
+{ "o_orderkey": 5187, "o_custkey": 55 }
+{ "o_orderkey": 5188, "o_custkey": 140 }
+{ "o_orderkey": 5189, "o_custkey": 71 }
+{ "o_orderkey": 5190, "o_custkey": 58 }
+{ "o_orderkey": 5191, "o_custkey": 77 }
+{ "o_orderkey": 5216, "o_custkey": 59 }
+{ "o_orderkey": 5218, "o_custkey": 82 }
+{ "o_orderkey": 5219, "o_custkey": 88 }
+{ "o_orderkey": 5222, "o_custkey": 80 }
+{ "o_orderkey": 5223, "o_custkey": 149 }
+{ "o_orderkey": 5248, "o_custkey": 70 }
+{ "o_orderkey": 5249, "o_custkey": 103 }
+{ "o_orderkey": 5250, "o_custkey": 97 }
+{ "o_orderkey": 5252, "o_custkey": 91 }
+{ "o_orderkey": 5253, "o_custkey": 148 }
+{ "o_orderkey": 5254, "o_custkey": 112 }
+{ "o_orderkey": 5255, "o_custkey": 64 }
+{ "o_orderkey": 5281, "o_custkey": 124 }
+{ "o_orderkey": 5282, "o_custkey": 50 }
+{ "o_orderkey": 5283, "o_custkey": 131 }
+{ "o_orderkey": 5284, "o_custkey": 61 }
+{ "o_orderkey": 5285, "o_custkey": 70 }
+{ "o_orderkey": 5286, "o_custkey": 116 }
+{ "o_orderkey": 5312, "o_custkey": 65 }
+{ "o_orderkey": 5315, "o_custkey": 139 }
+{ "o_orderkey": 5316, "o_custkey": 100 }
+{ "o_orderkey": 5318, "o_custkey": 59 }
+{ "o_orderkey": 5319, "o_custkey": 98 }
+{ "o_orderkey": 5344, "o_custkey": 109 }
+{ "o_orderkey": 5347, "o_custkey": 49 }
+{ "o_orderkey": 5348, "o_custkey": 53 }
+{ "o_orderkey": 5349, "o_custkey": 67 }
+{ "o_orderkey": 5350, "o_custkey": 76 }
+{ "o_orderkey": 5351, "o_custkey": 122 }
+{ "o_orderkey": 5376, "o_custkey": 149 }
+{ "o_orderkey": 5377, "o_custkey": 64 }
+{ "o_orderkey": 5378, "o_custkey": 43 }
+{ "o_orderkey": 5379, "o_custkey": 89 }
+{ "o_orderkey": 5380, "o_custkey": 148 }
+{ "o_orderkey": 5411, "o_custkey": 61 }
+{ "o_orderkey": 5412, "o_custkey": 142 }
+{ "o_orderkey": 5413, "o_custkey": 94 }
+{ "o_orderkey": 5414, "o_custkey": 100 }
+{ "o_orderkey": 5440, "o_custkey": 130 }
+{ "o_orderkey": 5441, "o_custkey": 41 }
+{ "o_orderkey": 5442, "o_custkey": 43 }
+{ "o_orderkey": 5443, "o_custkey": 131 }
+{ "o_orderkey": 5444, "o_custkey": 130 }
+{ "o_orderkey": 5445, "o_custkey": 115 }
+{ "o_orderkey": 5472, "o_custkey": 70 }
+{ "o_orderkey": 5473, "o_custkey": 65 }
+{ "o_orderkey": 5474, "o_custkey": 55 }
+{ "o_orderkey": 5475, "o_custkey": 139 }
+{ "o_orderkey": 5476, "o_custkey": 91 }
+{ "o_orderkey": 5477, "o_custkey": 107 }
+{ "o_orderkey": 5478, "o_custkey": 116 }
+{ "o_orderkey": 5479, "o_custkey": 70 }
+{ "o_orderkey": 5505, "o_custkey": 95 }
+{ "o_orderkey": 5506, "o_custkey": 91 }
+{ "o_orderkey": 5508, "o_custkey": 56 }
+{ "o_orderkey": 5509, "o_custkey": 80 }
+{ "o_orderkey": 5511, "o_custkey": 79 }
+{ "o_orderkey": 5536, "o_custkey": 116 }
+{ "o_orderkey": 5537, "o_custkey": 118 }
+{ "o_orderkey": 5538, "o_custkey": 139 }
+{ "o_orderkey": 5539, "o_custkey": 119 }
+{ "o_orderkey": 5540, "o_custkey": 130 }
+{ "o_orderkey": 5541, "o_custkey": 143 }
+{ "o_orderkey": 5542, "o_custkey": 49 }
+{ "o_orderkey": 5543, "o_custkey": 115 }
+{ "o_orderkey": 5569, "o_custkey": 109 }
+{ "o_orderkey": 5570, "o_custkey": 112 }
+{ "o_orderkey": 5571, "o_custkey": 103 }
+{ "o_orderkey": 5575, "o_custkey": 103 }
+{ "o_orderkey": 5600, "o_custkey": 95 }
+{ "o_orderkey": 5602, "o_custkey": 130 }
+{ "o_orderkey": 5603, "o_custkey": 71 }
+{ "o_orderkey": 5604, "o_custkey": 46 }
+{ "o_orderkey": 5606, "o_custkey": 149 }
+{ "o_orderkey": 5607, "o_custkey": 92 }
+{ "o_orderkey": 5632, "o_custkey": 79 }
+{ "o_orderkey": 5633, "o_custkey": 79 }
+{ "o_orderkey": 5634, "o_custkey": 68 }
+{ "o_orderkey": 5635, "o_custkey": 70 }
+{ "o_orderkey": 5636, "o_custkey": 122 }
+{ "o_orderkey": 5637, "o_custkey": 103 }
+{ "o_orderkey": 5638, "o_custkey": 109 }
+{ "o_orderkey": 5639, "o_custkey": 145 }
+{ "o_orderkey": 5664, "o_custkey": 119 }
+{ "o_orderkey": 5665, "o_custkey": 100 }
+{ "o_orderkey": 5667, "o_custkey": 44 }
+{ "o_orderkey": 5668, "o_custkey": 109 }
+{ "o_orderkey": 5669, "o_custkey": 74 }
+{ "o_orderkey": 5671, "o_custkey": 43 }
+{ "o_orderkey": 5696, "o_custkey": 142 }
+{ "o_orderkey": 5697, "o_custkey": 55 }
+{ "o_orderkey": 5698, "o_custkey": 95 }
+{ "o_orderkey": 5699, "o_custkey": 142 }
+{ "o_orderkey": 5700, "o_custkey": 143 }
+{ "o_orderkey": 5701, "o_custkey": 43 }
+{ "o_orderkey": 5702, "o_custkey": 97 }
+{ "o_orderkey": 5703, "o_custkey": 121 }
+{ "o_orderkey": 5728, "o_custkey": 80 }
+{ "o_orderkey": 5729, "o_custkey": 44 }
+{ "o_orderkey": 5733, "o_custkey": 101 }
+{ "o_orderkey": 5734, "o_custkey": 94 }
+{ "o_orderkey": 5762, "o_custkey": 49 }
+{ "o_orderkey": 5764, "o_custkey": 131 }
+{ "o_orderkey": 5765, "o_custkey": 52 }
+{ "o_orderkey": 5766, "o_custkey": 49 }
+{ "o_orderkey": 5767, "o_custkey": 118 }
+{ "o_orderkey": 5796, "o_custkey": 149 }
+{ "o_orderkey": 5797, "o_custkey": 122 }
+{ "o_orderkey": 5798, "o_custkey": 106 }
+{ "o_orderkey": 5824, "o_custkey": 56 }
+{ "o_orderkey": 5825, "o_custkey": 61 }
+{ "o_orderkey": 5828, "o_custkey": 127 }
+{ "o_orderkey": 5829, "o_custkey": 125 }
+{ "o_orderkey": 5830, "o_custkey": 85 }
+{ "o_orderkey": 5831, "o_custkey": 139 }
+{ "o_orderkey": 5857, "o_custkey": 124 }
+{ "o_orderkey": 5858, "o_custkey": 64 }
+{ "o_orderkey": 5861, "o_custkey": 139 }
+{ "o_orderkey": 5862, "o_custkey": 64 }
+{ "o_orderkey": 5863, "o_custkey": 65 }
+{ "o_orderkey": 5888, "o_custkey": 46 }
+{ "o_orderkey": 5890, "o_custkey": 49 }
+{ "o_orderkey": 5891, "o_custkey": 46 }
+{ "o_orderkey": 5892, "o_custkey": 101 }
+{ "o_orderkey": 5894, "o_custkey": 71 }
+{ "o_orderkey": 5895, "o_custkey": 64 }
+{ "o_orderkey": 5920, "o_custkey": 119 }
+{ "o_orderkey": 5921, "o_custkey": 58 }
+{ "o_orderkey": 5922, "o_custkey": 143 }
+{ "o_orderkey": 5923, "o_custkey": 101 }
+{ "o_orderkey": 5925, "o_custkey": 146 }
+{ "o_orderkey": 5926, "o_custkey": 76 }
+{ "o_orderkey": 5927, "o_custkey": 116 }
+{ "o_orderkey": 5952, "o_custkey": 148 }
+{ "o_orderkey": 5955, "o_custkey": 94 }
+{ "o_orderkey": 5957, "o_custkey": 89 }
+{ "o_orderkey": 5958, "o_custkey": 115 }
+{ "o_orderkey": 5984, "o_custkey": 70 }
+{ "o_orderkey": 5985, "o_custkey": 143 }
+{ "o_orderkey": 5986, "o_custkey": 115 }
+{ "o_orderkey": 5987, "o_custkey": 64 }
+{ "o_orderkey": 10986, "o_custkey": 115 }
+{ "o_orderkey": 10987, "o_custkey": 64 }
diff --git a/asterix-app/src/test/resources/runtimets/results/upsert/primary-index/primary-index.1.adm b/asterix-app/src/test/resources/runtimets/results/upsert/primary-index/primary-index.1.adm
new file mode 100644
index 0000000..2456990
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/upsert/primary-index/primary-index.1.adm
@@ -0,0 +1,9 @@
+{ "id": 6i32, "age": 6i32, "name": "Tracy", "salary": 123.0d }
+{ "id": 12i32, "age": 44i32, "name": "Smith", "salary": 987.0d }
+{ "id": 1i32, "age": 2i32, "name": "Mohammed", "salary": 155.0d }
+{ "id": 2i32, "age": 2i32, "name": "Stephen", "salary": 155.0d }
+{ "id": 4i32, "age": 4i32, "name": "Angela", "salary": 333.0d }
+{ "id": 8i32, "age": 8i32, "name": "George", "salary": 555.0d }
+{ "id": 3i32, "age": 4i32, "name": "Kate", "salary": 333.0d }
+{ "id": 5i32, "age": 6i32, "name": "William", "salary": 123.0d }
+{ "id": 7i32, "age": 8i32, "name": "Stanly", "salary": 555.0d }
diff --git a/asterix-app/src/test/resources/runtimets/results/upsert/primary-secondary-btree/primary-secondary-btree.1.adm b/asterix-app/src/test/resources/runtimets/results/upsert/primary-secondary-btree/primary-secondary-btree.1.adm
new file mode 100644
index 0000000..fc52536
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/upsert/primary-secondary-btree/primary-secondary-btree.1.adm
@@ -0,0 +1,5 @@
+{ "id": 6i32, "age": 6i32, "name": "Tracy", "salary": 123.0d }
+{ "id": 12i32, "age": 44i32, "name": "Smith", "salary": 987.0d }
+{ "id": 8i32, "age": 8i32, "name": "George", "salary": 555.0d }
+{ "id": 5i32, "age": 6i32, "name": "William", "salary": 123.0d }
+{ "id": 7i32, "age": 8i32, "name": "Stanly", "salary": 555.0d }
diff --git a/asterix-app/src/test/resources/runtimets/results/upsert/primary-secondary-inverted/primary-secondary-inverted.1.adm b/asterix-app/src/test/resources/runtimets/results/upsert/primary-secondary-inverted/primary-secondary-inverted.1.adm
new file mode 100644
index 0000000..9fbfb7f
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/upsert/primary-secondary-inverted/primary-secondary-inverted.1.adm
@@ -0,0 +1,5 @@
+{ "id": 3, "dblpid": "books/acm/kim95/BreitbartGS95", "title": "SQL bbbbbbbbbbbbbbbbbb", "authors": "Yuri Breitbart Hector Garcia-Molina Abraham Silberschatz", "misc": "2004-03-08 573-591 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#BreitbartGS95 1995" }
+{ "id": 10, "dblpid": "books/acm/kim95/KelleyGKRG95", "title": "Schema Architecture of the UniSQL/M Multidatabase System", "authors": "William Kelley Sunit K. Gala Won Kim Tom C. Reyes Bruce Graham", "misc": "2004-03-08 Modern Database Systems books/acm/Kim95 621-648 1995 db/books/collections/kim95.html#KelleyGKRG95" }
+{ "id": 31, "dblpid": "books/acm/kim95/Stout95", "title": "EDA/SQL.", "authors": "Ralph L. Stout", "misc": "2004-03-08 649-663 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#Stout95 1995" }
+{ "id": 104, "dblpid": "conf/focs/GalilT86", "title": "SQL An O(n^2 (m + n log n) log n) Min-Cost Flow Algorithm", "authors": "Zvi Galil Éva Tardos", "misc": "2006-04-25 1-9 conf/focs/FOCS27 1986 FOCS db/conf/focs/focs86.html#GalilT86" }
+{ "id": 666, "dblpid": "books/acm/kim95/DittrichD95", "title": "hhhhhhhhhhhhBMSs SQL Should Do Better A Critique Based on Early Experiences.", "authors": "Angelika Kotz Dittrich Klaus R. Dittrich", "misc": "2002-01-03 238-254 1995 Modern Database Systems db/books/collections/kim95.html#DittrichD95" }
diff --git a/asterix-app/src/test/resources/runtimets/results/upsert/primary-secondary-rtree/primary-secondary-rtree.1.adm b/asterix-app/src/test/resources/runtimets/results/upsert/primary-secondary-rtree/primary-secondary-rtree.1.adm
new file mode 100644
index 0000000..9c85166
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/upsert/primary-secondary-rtree/primary-secondary-rtree.1.adm
@@ -0,0 +1 @@
+{ "id": 20 }
diff --git a/asterix-app/src/test/resources/runtimets/results/upsert/upsert-with-self-read/upsert-with-self-read.1.adm b/asterix-app/src/test/resources/runtimets/results/upsert/upsert-with-self-read/upsert-with-self-read.1.adm
new file mode 100644
index 0000000..37ef294
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/upsert/upsert-with-self-read/upsert-with-self-read.1.adm
@@ -0,0 +1,5 @@
+{ "id": 6i32, "age": 11i32, "name": "Silvester", "salary": 135.3d }
+{ "id": 12i32, "age": 45i32, "name": "Smith", "salary": 1085.7d }
+{ "id": 1i32, "age": 12i32, "name": "Cloud", "salary": 854.7d }
+{ "id": 2i32, "age": 11i32, "name": "Nadia", "salary": 170.5d }
+{ "id": 4i32, "age": 11i32, "name": "Igor", "salary": 366.3d }
diff --git a/asterix-app/src/test/resources/runtimets/testsuite.xml b/asterix-app/src/test/resources/runtimets/testsuite.xml
index 5d55f2d..621c73a 100644
--- a/asterix-app/src/test/resources/runtimets/testsuite.xml
+++ b/asterix-app/src/test/resources/runtimets/testsuite.xml
@@ -1240,8 +1240,65 @@
</test-case>
-->
</test-group>
+ <test-group name="upsert">
+ <test-case FilePath="upsert">
+ <compilation-unit name="primary-secondary-rtree">
+ <output-dir compare="Text">primary-secondary-rtree</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="upsert">
+ <compilation-unit name="upsert-with-self-read">
+ <output-dir compare="Text">upsert-with-self-read</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="upsert">
+ <compilation-unit name="filtered-dataset">
+ <output-dir compare="Text">filtered-dataset</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="upsert">
+ <compilation-unit name="nullable-index">
+ <output-dir compare="Text">nullable-index</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="upsert">
+ <compilation-unit name="nested-index">
+ <output-dir compare="Text">nested-index</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="upsert">
+ <compilation-unit name="open-index">
+ <output-dir compare="Text">open-index</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="upsert">
+ <compilation-unit name="primary-index">
+ <output-dir compare="Text">primary-index</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="upsert">
+ <compilation-unit name="primary-secondary-btree">
+ <output-dir compare="Text">primary-secondary-btree</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="upsert">
+ <compilation-unit name="primary-secondary-inverted">
+ <output-dir compare="Text">primary-secondary-inverted</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="upsert">
+ <compilation-unit name="multiple-secondaries">
+ <output-dir compare="Text">multiple-secondaries</output-dir>
+ </compilation-unit>
+ </test-case>
+ </test-group>
<test-group name="dml">
<test-case FilePath="dml">
+ <compilation-unit name="load-with-ngram-index">
+ <output-dir compare="Text">load-with-ngram-index</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="dml">
<compilation-unit name="insert-duplicated-keys-from-query">
<output-dir compare="Text">insert-duplicated-keys-from-query</output-dir>
<expected-error>org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException: Failed to insert key since key already exists</expected-error>
@@ -1470,11 +1527,6 @@
</compilation-unit>
</test-case>
<test-case FilePath="dml">
- <compilation-unit name="load-with-ngram-index">
- <output-dir compare="Text">load-with-ngram-index</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="dml">
<compilation-unit name="load-with-rtree-index">
<output-dir compare="Text">load-with-rtree-index</output-dir>
</compilation-unit>
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetLifecycleManager.java b/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetLifecycleManager.java
index 93d78e8..53902e1 100644
--- a/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetLifecycleManager.java
+++ b/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetLifecycleManager.java
@@ -35,6 +35,7 @@
import org.apache.asterix.common.ioopcallbacks.AbstractLSMIOOperationCallback;
import org.apache.asterix.common.transactions.ILogManager;
import org.apache.asterix.common.transactions.LogRecord;
+import org.apache.asterix.common.utils.TransactionUtil;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.lifecycle.ILifeCycleComponent;
import org.apache.hyracks.storage.am.common.api.IIndex;
@@ -542,7 +543,7 @@
private void flushDatasetOpenIndexes(DatasetInfo dsInfo, boolean asyncFlush) throws HyracksDataException {
if (!dsInfo.isExternal) {
synchronized (logRecord) {
- logRecord.formFlushLogRecord(dsInfo.datasetID, null, dsInfo.indexes.size());
+ TransactionUtil.formFlushLogRecord(logRecord, dsInfo.datasetID, null, dsInfo.indexes.size());
try {
logManager.log(logRecord);
} catch (ACIDException e) {
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/context/PrimaryIndexOperationTracker.java b/asterix-common/src/main/java/org/apache/asterix/common/context/PrimaryIndexOperationTracker.java
index 2b5a0b0..e5a3473 100644
--- a/asterix-common/src/main/java/org/apache/asterix/common/context/PrimaryIndexOperationTracker.java
+++ b/asterix-common/src/main/java/org/apache/asterix/common/context/PrimaryIndexOperationTracker.java
@@ -28,6 +28,7 @@
import org.apache.asterix.common.transactions.AbstractOperationCallback;
import org.apache.asterix.common.transactions.ILogManager;
import org.apache.asterix.common.transactions.LogRecord;
+import org.apache.asterix.common.utils.TransactionUtil;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.storage.am.common.api.IModificationOperationCallback;
import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
@@ -120,10 +121,9 @@
}
}
}
-
LogRecord logRecord = new LogRecord();
- logRecord.formFlushLogRecord(datasetID, this, logManager.getNodeId(), dsInfo.getDatasetIndexes().size());
-
+ TransactionUtil.formFlushLogRecord(logRecord, datasetID, this, logManager.getNodeId(),
+ dsInfo.getDatasetIndexes().size());
try {
logManager.log(logRecord);
} catch (ACIDException e) {
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/dataflow/AsterixLSMTreeInsertDeleteOperatorDescriptor.java b/asterix-common/src/main/java/org/apache/asterix/common/dataflow/AsterixLSMTreeInsertDeleteOperatorDescriptor.java
index 53039ce..c7304a1 100644
--- a/asterix-common/src/main/java/org/apache/asterix/common/dataflow/AsterixLSMTreeInsertDeleteOperatorDescriptor.java
+++ b/asterix-common/src/main/java/org/apache/asterix/common/dataflow/AsterixLSMTreeInsertDeleteOperatorDescriptor.java
@@ -21,13 +21,16 @@
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.dataflow.IOperatorNodePushable;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import org.apache.hyracks.api.dataflow.value.INullWriterFactory;
import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
import org.apache.hyracks.api.dataflow.value.ITypeTraits;
import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.job.IOperatorDescriptorRegistry;
import org.apache.hyracks.dataflow.std.file.IFileSplitProvider;
import org.apache.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
import org.apache.hyracks.storage.am.common.api.IModificationOperationCallbackFactory;
+import org.apache.hyracks.storage.am.common.api.ISearchOperationCallbackFactory;
import org.apache.hyracks.storage.am.common.api.ITupleFilterFactory;
import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
@@ -43,23 +46,24 @@
/** the name of the index that is being operated upon **/
private final String indexName;
- public AsterixLSMTreeInsertDeleteOperatorDescriptor(IOperatorDescriptorRegistry spec,
- RecordDescriptor recDesc, IStorageManagerInterface storageManager,
- IIndexLifecycleManagerProvider lifecycleManagerProvider, IFileSplitProvider fileSplitProvider,
- ITypeTraits[] typeTraits, IBinaryComparatorFactory[] comparatorFactories, int[] bloomFilterKeyFields,
- int[] fieldPermutation, IndexOperation op, IIndexDataflowHelperFactory dataflowHelperFactory,
- ITupleFilterFactory tupleFilterFactory,
- IModificationOperationCallbackFactory modificationOpCallbackProvider, boolean isPrimary, String indexName) {
+ public AsterixLSMTreeInsertDeleteOperatorDescriptor(IOperatorDescriptorRegistry spec, RecordDescriptor recDesc,
+ IStorageManagerInterface storageManager, IIndexLifecycleManagerProvider lifecycleManagerProvider,
+ IFileSplitProvider fileSplitProvider, ITypeTraits[] typeTraits,
+ IBinaryComparatorFactory[] comparatorFactories, int[] bloomFilterKeyFields, int[] fieldPermutation,
+ IndexOperation op, IIndexDataflowHelperFactory dataflowHelperFactory,
+ ITupleFilterFactory tupleFilterFactory, boolean isPrimary, String indexName,
+ INullWriterFactory nullWriterFactory, IModificationOperationCallbackFactory modificationOpCallbackProvider,
+ ISearchOperationCallbackFactory searchOpCallbackProvider) {
super(spec, recDesc, storageManager, lifecycleManagerProvider, fileSplitProvider, typeTraits,
comparatorFactories, bloomFilterKeyFields, fieldPermutation, op, dataflowHelperFactory,
- tupleFilterFactory, modificationOpCallbackProvider);
+ tupleFilterFactory, nullWriterFactory, modificationOpCallbackProvider, searchOpCallbackProvider);
this.isPrimary = isPrimary;
this.indexName = indexName;
}
@Override
public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
- IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
+ IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
return new AsterixLSMInsertDeleteOperatorNodePushable(this, ctx, partition, fieldPermutation,
recordDescProvider, op, isPrimary);
}
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/transactions/ILogRecord.java b/asterix-common/src/main/java/org/apache/asterix/common/transactions/ILogRecord.java
index 7c76e98..a88c985 100644
--- a/asterix-common/src/main/java/org/apache/asterix/common/transactions/ILogRecord.java
+++ b/asterix-common/src/main/java/org/apache/asterix/common/transactions/ILogRecord.java
@@ -25,7 +25,7 @@
public interface ILogRecord {
- public enum RECORD_STATUS{
+ public enum RECORD_STATUS {
TRUNCATED,
BAD_CHKSUM,
OK
@@ -40,11 +40,6 @@
public void writeLogRecord(ByteBuffer buffer);
- public void formJobTerminateLogRecord(ITransactionContext txnCtx, boolean isCommit);
-
- public void formEntityCommitLogRecord(ITransactionContext txnCtx, int datasetId, int PKHashValue,
- ITupleReference tupleReference, int[] primaryKeyFields);
-
public ITransactionContext getTxnCtx();
public void setTxnCtx(ITransactionContext txnCtx);
@@ -135,6 +130,4 @@
public ByteBuffer getSerializedLog();
- public void formJobTerminateLogRecord(int jobId, boolean isCommit, String nodeId);
-
}
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/transactions/LogRecord.java b/asterix-common/src/main/java/org/apache/asterix/common/transactions/LogRecord.java
index 51eca0b..d4a96a5 100644
--- a/asterix-common/src/main/java/org/apache/asterix/common/transactions/LogRecord.java
+++ b/asterix-common/src/main/java/org/apache/asterix/common/transactions/LogRecord.java
@@ -41,7 +41,7 @@
* NodeIdLength(4)
* NodeId(?)
* ---------------------------
- * [Header2] (12 bytes + PKValueSize) : for entity_commit and update log types
+ * [Header2] (12 bytes + PKValueSize) : for entity_commit, upsert_entity_commit, and update log types
* DatasetId(4) //stored in dataset_dataset in Metadata Node
* PKHashValue(4)
* PKValueSize(4)
@@ -63,7 +63,7 @@
* ---------------------------
* = LogSize =
* 1) JOB_COMMIT_LOG_SIZE: 13 bytes (5 + 8)
- * 2) ENTITY_COMMIT: 25 + PKSize (5 + 12 + PKSize + 8)
+ * 2) ENTITY_COMMIT || UPSERT_ENTITY_COMMIT: 25 + PKSize (5 + 12 + PKSize + 8)
* --> ENTITY_COMMIT_LOG_BASE_SIZE = 25
* 3) UPDATE: 54 + PKValueSize + NewValueSize (5 + 12 + PKValueSize + 20 + 9 + NewValueSize + 8)
* 4) FLUSH: 5 + 8 + DatasetId(4) (In case of serialize: + (8 bytes for LSN) + (4 bytes for number of flushed indexes)
@@ -71,7 +71,7 @@
public class LogRecord implements ILogRecord {
- //------------- fields in a log record (begin) ------------//
+ // ------------- fields in a log record (begin) ------------//
private byte logSource;
private String nodeId;
private int nodeIdLength;
@@ -89,7 +89,7 @@
private int newValueSize;
private ITupleReference newValue;
private long checksum;
- //------------- fields in a log record (end) --------------//
+ // ------------- fields in a log record (end) --------------//
private int PKFieldCnt;
private ITransactionContext txnCtx;
@@ -104,7 +104,7 @@
private IReplicationThread replicationThread;
private ByteBuffer serializedLog;
private final Map<String, byte[]> nodeIdsMap;
- //this field is used for serialized flush logs only to indicate how many indexes were flushed using its LSN.
+ // this field is used for serialized flush logs only to indicate how many indexes were flushed using its LSN.
private int numOfFlushedIndexes;
public LogRecord() {
@@ -142,7 +142,7 @@
if (nodeIdsMap.containsKey(nodeId)) {
buffer.put(nodeIdsMap.get(nodeId));
} else {
- //byte array for node id length and string
+ // byte array for node id length and string
byte[] bytes = new byte[(Integer.SIZE / 8) + nodeId.length()];
buffer.putInt(nodeId.length());
buffer.put(nodeId.getBytes(java.nio.charset.StandardCharsets.UTF_8));
@@ -150,7 +150,7 @@
buffer.get(bytes, 0, bytes.length);
nodeIdsMap.put(nodeId, bytes);
}
- if (logType == LogType.UPDATE || logType == LogType.ENTITY_COMMIT) {
+ if (logType == LogType.UPDATE || logType == LogType.ENTITY_COMMIT || logType == LogType.UPSERT_ENTITY_COMMIT) {
buffer.putInt(datasetId);
buffer.putInt(PKHashValue);
if (PKValueSize <= 0) {
@@ -182,14 +182,14 @@
buffer.putLong(checksum);
}
- //this method is used when replication is enabled to include the log record LSN in the serialized version
+ // this method is used when replication is enabled to include the log record LSN in the serialized version
@Override
public void writeLogRecord(ByteBuffer buffer, long appendLSN) {
int beginOffset = buffer.position();
writeLogRecordCommonFields(buffer);
if (logSource == LogSource.LOCAL) {
- //copy the serialized log to send it to replicas
+ // copy the serialized log to send it to replicas
int serializedLogSize = getSerializedLogSize(logType, logSize);
if (serializedLog == null || serializedLog.capacity() < serializedLogSize) {
@@ -223,14 +223,14 @@
PKValue.getFieldLength(PKFields[i]));
}
} else {
- //since PKValue is already serialized in remote logs, just put it into buffer
+ // since PKValue is already serialized in remote logs, just put it into buffer
buffer.put(PKValue.getFieldData(0), 0, PKValueSize);
}
}
private void writeTuple(ByteBuffer buffer, ITupleReference tuple, int size) {
tupleWriter.writeTuple(tuple, buffer.array(), buffer.position());
- //writeTuple() doesn't change the position of the buffer.
+ // writeTuple() doesn't change the position of the buffer.
buffer.position(buffer.position() + size);
}
@@ -244,21 +244,21 @@
public RECORD_STATUS readLogRecord(ByteBuffer buffer) {
int beginOffset = buffer.position();
- //read header
+ // read header
RECORD_STATUS status = readLogHeader(buffer);
if (status != RECORD_STATUS.OK) {
buffer.position(beginOffset);
return status;
}
- //read body
+ // read body
status = readLogBody(buffer, false);
if (status != RECORD_STATUS.OK) {
buffer.position(beginOffset);
return status;
}
- //attempt to read checksum
+ // attempt to read checksum
if (buffer.remaining() < CHKSUM_LEN) {
buffer.position(beginOffset);
return RECORD_STATUS.TRUNCATED;
@@ -272,7 +272,7 @@
}
private RECORD_STATUS readLogHeader(ByteBuffer buffer) {
- //first we need the logtype and Job ID, if the buffer isn't that big, then no dice.
+ // first we need the logtype and Job ID, if the buffer isn't that big, then no dice.
if (buffer.remaining() < ALL_RECORD_HEADER_LEN) {
return RECORD_STATUS.TRUNCATED;
}
@@ -280,14 +280,14 @@
logType = buffer.get();
jobId = buffer.getInt();
nodeIdLength = buffer.getInt();
- //attempt to read node id
+ // attempt to read node id
if (buffer.remaining() < nodeIdLength) {
return RECORD_STATUS.TRUNCATED;
}
- //read node id string
+ // read node id string
nodeId = new String(buffer.array(), buffer.position() + buffer.arrayOffset(), nodeIdLength,
java.nio.charset.StandardCharsets.UTF_8);
- //skip node id string bytes
+ // skip node id string bytes
buffer.position(buffer.position() + nodeIdLength);
return RECORD_STATUS.OK;
@@ -299,14 +299,14 @@
datasetId = -1;
PKHashValue = -1;
} else {
- //attempt to read in the dsid, PK hash and PK length
+ // attempt to read in the dsid, PK hash and PK length
if (buffer.remaining() < ENTITYCOMMIT_UPDATE_HEADER_LEN) {
return RECORD_STATUS.TRUNCATED;
}
datasetId = buffer.getInt();
PKHashValue = buffer.getInt();
PKValueSize = buffer.getInt();
- //attempt to read in the PK
+ // attempt to read in the PK
if (buffer.remaining() < PKValueSize) {
return RECORD_STATUS.TRUNCATED;
}
@@ -317,7 +317,7 @@
}
if (logType == LogType.UPDATE) {
- //attempt to read in the previous LSN, log size, new value size, and new record type
+ // attempt to read in the previous LSN, log size, new value size, and new record type
if (buffer.remaining() < UPDATE_LSN_HEADER + UPDATE_BODY_HEADER) {
return RECORD_STATUS.TRUNCATED;
}
@@ -337,7 +337,7 @@
tupleBuffer.put(buffer.array(), buffer.position(), newValueSize);
tupleBuffer.flip();
newValue = readTuple(tupleBuffer, readNewValue, fieldCnt, newValueSize);
- //skip tuple bytes
+ // skip tuple bytes
buffer.position(buffer.position() + newValueSize);
}
} else {
@@ -361,8 +361,8 @@
if (!remoteRecoveryLog || !nodeId.equals(localNodeId)) {
readLogBody(buffer, false);
} else {
- //need to allocate buffer for tuple since the logs will be kept in memory to use during remote recovery
- //TODO when this is redesigned to spill remote recovery logs to disk, this will not be needed
+ // need to allocate buffer for tuple since the logs will be kept in memory to use during remote recovery
+ // TODO when this is redesigned to spill remote recovery logs to disk, this will not be needed
readLogBody(buffer, true);
}
@@ -371,7 +371,7 @@
numOfFlushedIndexes = buffer.getInt();
}
- //remote recovery logs need to have the LSN to check which should be replayed
+ // remote recovery logs need to have the LSN to check which should be replayed
if (remoteRecoveryLog && nodeId.equals(localNodeId)) {
LSN = buffer.getLong();
}
@@ -398,54 +398,6 @@
}
@Override
- public void formJobTerminateLogRecord(ITransactionContext txnCtx, boolean isCommit) {
- this.txnCtx = txnCtx;
- formJobTerminateLogRecord(txnCtx.getJobId().getId(), isCommit, nodeId);
- }
-
- @Override
- public void formJobTerminateLogRecord(int jobId, boolean isCommit, String nodeId) {
- this.logType = isCommit ? LogType.JOB_COMMIT : LogType.ABORT;
- this.jobId = jobId;
- this.datasetId = -1;
- this.PKHashValue = -1;
- setNodeId(nodeId);
- computeAndSetLogSize();
- }
-
- public void formFlushLogRecord(int datasetId, PrimaryIndexOperationTracker opTracker, int numOfFlushedIndexes) {
- formFlushLogRecord(datasetId, opTracker, null, numOfFlushedIndexes);
- }
-
- public void formFlushLogRecord(int datasetId, PrimaryIndexOperationTracker opTracker, String nodeId,
- int numberOfIndexes) {
- this.logType = LogType.FLUSH;
- this.jobId = -1;
- this.datasetId = datasetId;
- this.opTracker = opTracker;
- this.numOfFlushedIndexes = numberOfIndexes;
- if (nodeId != null) {
- setNodeId(nodeId);
- }
- computeAndSetLogSize();
- }
-
- @Override
- public void formEntityCommitLogRecord(ITransactionContext txnCtx, int datasetId, int PKHashValue,
- ITupleReference PKValue, int[] PKFields) {
- this.txnCtx = txnCtx;
- this.logType = LogType.ENTITY_COMMIT;
- this.jobId = txnCtx.getJobId().getId();
- this.datasetId = datasetId;
- this.PKHashValue = PKHashValue;
- this.PKFieldCnt = PKFields.length;
- this.PKValue = PKValue;
- this.PKFields = PKFields;
- computeAndSetPKValueSize();
- computeAndSetLogSize();
- }
-
- @Override
public void computeAndSetPKValueSize() {
int i;
PKValueSize = 0;
@@ -469,6 +421,7 @@
logSize = JOB_TERMINATE_LOG_SIZE;
break;
case LogType.ENTITY_COMMIT:
+ case LogType.UPSERT_ENTITY_COMMIT:
logSize = ENTITY_COMMIT_LOG_BASE_SIZE + PKValueSize;
break;
case LogType.FLUSH:
@@ -511,7 +464,7 @@
buffer.putLong(LSN);
buffer.putInt(numOfFlushedIndexes);
}
- //LSN must be included in all remote recovery logs (not only FLUSH)
+ // LSN must be included in all remote recovery logs (not only FLUSH)
buffer.putLong(LSN);
return buffer.position() - bufferBegin;
}
@@ -612,13 +565,13 @@
private static int getSerializedLogSize(Byte logType, int logSize) {
if (logType == LogType.FLUSH) {
- //LSN
+ // LSN
logSize += (Long.SIZE / 8);
- //num of indexes
+ // num of indexes
logSize += (Integer.SIZE / 8);
}
- //checksum not included in serialized version
+ // checksum not included in serialized version
logSize -= CHKSUM_LEN;
return logSize;
@@ -752,4 +705,12 @@
this.numOfFlushedIndexes = numOfFlushedIndexes;
}
+ public void setPKFieldCnt(int pKFieldCnt) {
+ PKFieldCnt = pKFieldCnt;
+ }
+
+ public void setOpTracker(PrimaryIndexOperationTracker opTracker) {
+ this.opTracker = opTracker;
+ }
+
}
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/transactions/LogType.java b/asterix-common/src/main/java/org/apache/asterix/common/transactions/LogType.java
index 4048415..d6d2657 100644
--- a/asterix-common/src/main/java/org/apache/asterix/common/transactions/LogType.java
+++ b/asterix-common/src/main/java/org/apache/asterix/common/transactions/LogType.java
@@ -25,12 +25,14 @@
public static final byte ENTITY_COMMIT = 2;
public static final byte ABORT = 3;
public static final byte FLUSH = 4;
+ public static final byte UPSERT_ENTITY_COMMIT = 5;
private static final String STRING_UPDATE = "UPDATE";
private static final String STRING_JOB_COMMIT = "JOB_COMMIT";
private static final String STRING_ENTITY_COMMIT = "ENTITY_COMMIT";
private static final String STRING_ABORT = "ABORT";
private static final String STRING_FLUSH = "FLUSH";
+ private static final String STRING_UPSERT_ENTITY_COMMIT = "UPSERT_ENTITY_COMMIT";
private static final String STRING_INVALID_LOG_TYPE = "INVALID_LOG_TYPE";
@@ -46,6 +48,8 @@
return STRING_ABORT;
case LogType.FLUSH:
return STRING_FLUSH;
+ case LogType.UPSERT_ENTITY_COMMIT:
+ return STRING_UPSERT_ENTITY_COMMIT;
default:
return STRING_INVALID_LOG_TYPE;
}
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/utils/TransactionUtil.java b/asterix-common/src/main/java/org/apache/asterix/common/utils/TransactionUtil.java
new file mode 100644
index 0000000..97674e6
--- /dev/null
+++ b/asterix-common/src/main/java/org/apache/asterix/common/utils/TransactionUtil.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.common.utils;
+
+import org.apache.asterix.common.context.PrimaryIndexOperationTracker;
+import org.apache.asterix.common.transactions.ITransactionContext;
+import org.apache.asterix.common.transactions.LogRecord;
+import org.apache.asterix.common.transactions.LogType;
+import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
+
+public class TransactionUtil {
+
+ public static void formJobTerminateLogRecord(ITransactionContext txnCtx, LogRecord logRecord, boolean isCommit) {
+ logRecord.setTxnCtx(txnCtx);
+ TransactionUtil.formJobTerminateLogRecord(logRecord, txnCtx.getJobId().getId(), isCommit,
+ logRecord.getNodeId());
+ }
+
+ public static void formJobTerminateLogRecord(LogRecord logRecord, int jobId, boolean isCommit, String nodeId) {
+ logRecord.setLogType(isCommit ? LogType.JOB_COMMIT : LogType.ABORT);
+ logRecord.setDatasetId(-1);
+ logRecord.setPKHashValue(-1);
+ logRecord.setJobId(jobId);
+ logRecord.setNodeId(nodeId);
+ logRecord.computeAndSetLogSize();
+ }
+
+ public static void formFlushLogRecord(LogRecord logRecord, int datasetId, PrimaryIndexOperationTracker opTracker,
+ int numOfFlushedIndexes) {
+ formFlushLogRecord(logRecord, datasetId, opTracker, null, numOfFlushedIndexes);
+ }
+
+ public static void formFlushLogRecord(LogRecord logRecord, int datasetId, PrimaryIndexOperationTracker opTracker,
+ String nodeId, int numberOfIndexes) {
+ logRecord.setLogType(LogType.FLUSH);
+ logRecord.setJobId(-1);
+ logRecord.setDatasetId(datasetId);
+ logRecord.setOpTracker(opTracker);
+ logRecord.setNumOfFlushedIndexes(numberOfIndexes);
+ if (nodeId != null) {
+ logRecord.setNodeId(nodeId);
+ }
+ logRecord.computeAndSetLogSize();
+ }
+
+ public static void formEntityCommitLogRecord(LogRecord logRecord, ITransactionContext txnCtx, int datasetId,
+ int PKHashValue, ITupleReference PKValue, int[] PKFields) {
+ logRecord.setTxnCtx(txnCtx);
+ logRecord.setLogType(LogType.ENTITY_COMMIT);
+ logRecord.setJobId(txnCtx.getJobId().getId());
+ logRecord.setDatasetId(datasetId);
+ logRecord.setPKHashValue(PKHashValue);
+ logRecord.setPKFieldCnt(PKFields.length);
+ logRecord.setPKValue(PKValue);
+ logRecord.setPKFields(PKFields);
+ logRecord.computeAndSetPKValueSize();
+ logRecord.computeAndSetLogSize();
+ }
+
+ public static void formEntityUpsertCommitLogRecord(LogRecord logRecord, ITransactionContext txnCtx, int datasetId,
+ int PKHashValue, ITupleReference PKValue, int[] PKFields) {
+ logRecord.setTxnCtx(txnCtx);
+ logRecord.setLogType(LogType.UPSERT_ENTITY_COMMIT);
+ logRecord.setJobId(txnCtx.getJobId().getId());
+ logRecord.setDatasetId(datasetId);
+ logRecord.setPKHashValue(PKHashValue);
+ logRecord.setPKFieldCnt(PKFields.length);
+ logRecord.setPKValue(PKValue);
+ logRecord.setPKFields(PKFields);
+ logRecord.computeAndSetPKValueSize();
+ logRecord.computeAndSetLogSize();
+ }
+}
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionProvider.java
index d10ff6d..e2cf013 100755
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionProvider.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionProvider.java
@@ -47,8 +47,6 @@
}
class ExternalScalarFunction extends ExternalFunction implements IExternalScalarFunction, ICopyEvaluator {
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
public ExternalScalarFunction(IExternalFunctionInfo finfo, ICopyEvaluatorFactory args[],
IDataOutputProvider outputProvider) throws AlgebricksException {
super(finfo, args, outputProvider);
@@ -80,8 +78,8 @@
* we want to discard a null object
*/
byte byteOutput = ((ArrayBackedValueStorage) out).getByteArray()[0];
- if (!argumentProvider.isValidResult() || byteOutput == SER_NULL_TYPE_TAG) {
- out.getDataOutput().writeByte(SER_NULL_TYPE_TAG);
+ if (!argumentProvider.isValidResult() || byteOutput == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
+ out.getDataOutput().writeByte(ATypeTag.SERIALIZED_NULL_TYPE_TAG);
}
}
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjects.java b/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjects.java
index 42b0742..97fe983 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjects.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjects.java
@@ -125,8 +125,6 @@
*
*/
public static class JNull implements IJObject {
- public final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
public final static JNull INSTANCE = new JNull();
private JNull() {
@@ -146,7 +144,7 @@
public void serialize(DataOutput dataOutput, boolean writeTypeTag) throws HyracksDataException {
if (writeTypeTag) {
try {
- dataOutput.writeByte(SER_NULL_TYPE_TAG);
+ dataOutput.writeByte(ATypeTag.SERIALIZED_NULL_TYPE_TAG);
} catch (IOException e) {
throw new HyracksDataException(e);
}
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/ADMDataParser.java b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/ADMDataParser.java
index 14a3e2a..d523c6e 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/ADMDataParser.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/ADMDataParser.java
@@ -636,11 +636,11 @@
token = admLexer.next();
this.admFromLexerStream(token, fieldType, fieldValueBuffer.getDataOutput());
if (openRecordField) {
- if (fieldValueBuffer.getByteArray()[0] != ATypeTag.NULL.serialize()) {
+ if (fieldValueBuffer.getByteArray()[0] != ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
recBuilder.addField(fieldNameBuffer, fieldValueBuffer);
}
} else if (NonTaggedFormatUtil.isOptional(recType)) {
- if (fieldValueBuffer.getByteArray()[0] != ATypeTag.NULL.serialize()) {
+ if (fieldValueBuffer.getByteArray()[0] != ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
recBuilder.addField(fieldId, fieldValueBuffer);
}
} else {
diff --git a/asterix-lang-aql/src/main/javacc/AQL.jj b/asterix-lang-aql/src/main/javacc/AQL.jj
index 8f62f74..93e3f68 100644
--- a/asterix-lang-aql/src/main/javacc/AQL.jj
+++ b/asterix-lang-aql/src/main/javacc/AQL.jj
@@ -124,6 +124,7 @@
import org.apache.asterix.lang.common.statement.TypeDecl;
import org.apache.asterix.lang.common.statement.TypeDropStatement;
import org.apache.asterix.lang.common.statement.UpdateStatement;
+import org.apache.asterix.lang.common.statement.UpsertStatement;
import org.apache.asterix.lang.common.statement.WriteStatement;
import org.apache.asterix.lang.common.struct.Identifier;
import org.apache.asterix.lang.common.struct.QuantifiedPair;
@@ -884,12 +885,17 @@
{
Pair<Identifier,Identifier> nameComponents = null;
Query query;
+ boolean upsert = false;
}
{
- "insert" "into" <DATASET> nameComponents = QualifiedName() query = Query()
+ ("insert"|"upsert"{ upsert = true; }) "into" <DATASET> nameComponents = QualifiedName() query = Query()
{
query.setTopLevel(true);
- return new InsertStatement(nameComponents.first, nameComponents.second, query, getVarCounter());
+ if(upsert){
+ return new UpsertStatement(nameComponents.first, nameComponents.second, query, getVarCounter());
+ } else{
+ return new InsertStatement(nameComponents.first, nameComponents.second, query, getVarCounter());
+ }
}
}
diff --git a/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/base/Statement.java b/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/base/Statement.java
index 7cf12c7..3184d1e 100644
--- a/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/base/Statement.java
+++ b/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/base/Statement.java
@@ -26,6 +26,7 @@
DATASET_DROP,
DELETE,
INSERT,
+ UPSERT,
UPDATE,
DML_CMD_LIST,
FUNCTION_DECL,
diff --git a/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/UpsertStatement.java b/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/UpsertStatement.java
new file mode 100644
index 0000000..f415951
--- /dev/null
+++ b/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/UpsertStatement.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.lang.common.statement;
+
+import org.apache.asterix.lang.common.struct.Identifier;
+
+public class UpsertStatement extends InsertStatement {
+
+ public UpsertStatement(Identifier dataverseName, Identifier datasetName, Query query, int varCounter) {
+ super(dataverseName, datasetName, query, varCounter);
+ }
+
+ @Override
+ public Kind getKind() {
+ return Kind.UPSERT;
+ }
+
+}
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlMetadataProvider.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlMetadataProvider.java
index f15540a..f3523da 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlMetadataProvider.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlMetadataProvider.java
@@ -101,6 +101,9 @@
import org.apache.asterix.runtime.formats.FormatUtils;
import org.apache.asterix.runtime.formats.NonTaggedDataFormat;
import org.apache.asterix.runtime.job.listener.JobEventListenerFactory;
+import org.apache.asterix.runtime.operators.AsterixLSMInvertedIndexUpsertOperatorDescriptor;
+import org.apache.asterix.runtime.operators.AsterixLSMTreeUpsertOperatorDescriptor;
+import org.apache.asterix.transaction.management.opcallbacks.LockThenSearchOperationCallbackFactory;
import org.apache.asterix.transaction.management.opcallbacks.PrimaryIndexInstantSearchOperationCallbackFactory;
import org.apache.asterix.transaction.management.opcallbacks.PrimaryIndexModificationOperationCallbackFactory;
import org.apache.asterix.transaction.management.opcallbacks.PrimaryIndexOperationTrackerProvider;
@@ -110,6 +113,7 @@
import org.apache.asterix.transaction.management.opcallbacks.SecondaryIndexSearchOperationCallbackFactory;
import org.apache.asterix.transaction.management.opcallbacks.TempDatasetPrimaryIndexModificationOperationCallbackFactory;
import org.apache.asterix.transaction.management.opcallbacks.TempDatasetSecondaryIndexModificationOperationCallbackFactory;
+import org.apache.asterix.transaction.management.opcallbacks.UpsertOperationCallbackFactory;
import org.apache.asterix.transaction.management.service.transaction.AsterixRuntimeComponentsProvider;
import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
@@ -1184,7 +1188,8 @@
op = new AsterixLSMTreeInsertDeleteOperatorDescriptor(spec, recordDesc,
appContext.getStorageManagerInterface(), appContext.getIndexLifecycleManagerProvider(),
splitsAndConstraint.first, typeTraits, comparatorFactories, bloomFilterKeyFields,
- fieldPermutation, indexOp, idfh, null, modificationCallbackFactory, true, indexName);
+ fieldPermutation, indexOp, idfh, null, true, indexName, null, modificationCallbackFactory,
+ NoOpOperationCallbackFactory.INSTANCE);
}
return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(op, splitsAndConstraint.second);
@@ -1670,7 +1675,8 @@
LSMBTreeIOOperationCallbackFactory.INSTANCE,
storageProperties.getBloomFilterFalsePositiveRate(), false, filterTypeTraits,
filterCmpFactories, btreeFields, filterFields, !temp),
- filterFactory, modificationCallbackFactory, false, indexName);
+ filterFactory, false, indexName, null, modificationCallbackFactory,
+ NoOpOperationCallbackFactory.INSTANCE);
}
return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(op, splitsAndConstraint.second);
} catch (MetadataException e) {
@@ -2029,7 +2035,8 @@
proposeLinearizer(nestedKeyType.getTypeTag(), comparatorFactories.length),
storageProperties.getBloomFilterFalsePositiveRate(), rtreeFields, btreeFields,
filterTypeTraits, filterCmpFactories, filterFields, !temp),
- filterFactory, modificationCallbackFactory, false, indexName);
+ filterFactory, false, indexName, null, modificationCallbackFactory,
+ NoOpOperationCallbackFactory.INSTANCE);
}
return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(op, splitsAndConstraint.second);
} catch (MetadataException | IOException e) {
@@ -2237,4 +2244,721 @@
throw new AlgebricksException(e);
}
}
+
+ //TODO: refactor this method
+ @Override
+ public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getUpsertRuntime(
+ IDataSource<AqlSourceId> dataSource, IOperatorSchema propagatedSchema, IVariableTypeEnvironment typeEnv,
+ List<LogicalVariable> primaryKeys, LogicalVariable payload, List<LogicalVariable> filterKeys,
+ LogicalVariable prevPayload, RecordDescriptor recordDesc, JobGenContext context, JobSpecification spec)
+ throws AlgebricksException {
+ String datasetName = dataSource.getId().getDatasourceName();
+ Dataset dataset = findDataset(dataSource.getId().getDataverseName(), datasetName);
+ if (dataset == null) {
+ throw new AlgebricksException(
+ "Unknown dataset " + datasetName + " in dataverse " + dataSource.getId().getDataverseName());
+ }
+ boolean temp = dataset.getDatasetDetails().isTemp();
+ isTemporaryDatasetWriteJob = isTemporaryDatasetWriteJob && temp;
+
+ int numKeys = primaryKeys.size();
+ int numFilterFields = DatasetUtils.getFilterField(dataset) == null ? 0 : 1;
+ // Move key fields to front. {keys, record, filters}
+ int[] fieldPermutation = new int[numKeys + 1 + numFilterFields];
+ int[] bloomFilterKeyFields = new int[numKeys];
+ int i = 0;
+ // set the keys' permutations
+ for (LogicalVariable varKey : primaryKeys) {
+ int idx = propagatedSchema.findVariable(varKey);
+ fieldPermutation[i] = idx;
+ bloomFilterKeyFields[i] = i;
+ i++;
+ }
+ // set the record permutation
+ fieldPermutation[numKeys] = propagatedSchema.findVariable(payload);
+ // set the filters' permutations.
+ if (numFilterFields > 0) {
+ int idx = propagatedSchema.findVariable(filterKeys.get(0));
+ fieldPermutation[numKeys + 1] = idx;
+ }
+
+ try {
+ Index primaryIndex = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataset.getDataverseName(),
+ dataset.getDatasetName(), dataset.getDatasetName());
+ String indexName = primaryIndex.getIndexName();
+
+ String itemTypeName = dataset.getItemTypeName();
+ ARecordType itemType = (ARecordType) MetadataManager.INSTANCE
+ .getDatatype(mdTxnCtx, dataSource.getId().getDataverseName(), itemTypeName).getDatatype();
+
+ ITypeTraits[] typeTraits = DatasetUtils.computeTupleTypeTraits(dataset, itemType);
+
+ IAsterixApplicationContextInfo appContext = (IAsterixApplicationContextInfo) context.getAppContext();
+ IBinaryComparatorFactory[] comparatorFactories = DatasetUtils.computeKeysBinaryComparatorFactories(dataset,
+ itemType, context.getBinaryComparatorFactoryProvider());
+ Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = splitProviderAndPartitionConstraintsForDataset(
+ dataSource.getId().getDataverseName(), datasetName, indexName, temp);
+
+ // prepare callback
+ JobId jobId = ((JobEventListenerFactory) spec.getJobletEventListenerFactory()).getJobId();
+ int datasetId = dataset.getDatasetId();
+ int[] primaryKeyFields = new int[numKeys];
+ for (i = 0; i < numKeys; i++) {
+ primaryKeyFields[i] = i;
+ }
+
+ ITypeTraits[] filterTypeTraits = DatasetUtils.computeFilterTypeTraits(dataset, itemType);
+ IBinaryComparatorFactory[] filterCmpFactories = DatasetUtils.computeFilterBinaryComparatorFactories(dataset,
+ itemType, context.getBinaryComparatorFactoryProvider());
+ int[] filterFields = DatasetUtils.createFilterFields(dataset);
+ int[] btreeFields = DatasetUtils.createBTreeFieldsWhenThereisAFilter(dataset);
+
+ TransactionSubsystemProvider txnSubsystemProvider = new TransactionSubsystemProvider();
+ IModificationOperationCallbackFactory modificationCallbackFactory = temp
+ ? new TempDatasetPrimaryIndexModificationOperationCallbackFactory(jobId, datasetId,
+ primaryKeyFields, txnSubsystemProvider, IndexOperation.UPSERT, ResourceType.LSM_BTREE)
+ : new UpsertOperationCallbackFactory(jobId, datasetId, primaryKeyFields, txnSubsystemProvider,
+ IndexOperation.UPSERT, ResourceType.LSM_BTREE);
+
+ LockThenSearchOperationCallbackFactory searchCallbackFactory = new LockThenSearchOperationCallbackFactory(
+ jobId, datasetId, primaryKeyFields, txnSubsystemProvider, ResourceType.LSM_BTREE);
+
+ Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils
+ .getMergePolicyFactory(dataset, mdTxnCtx);
+ IIndexDataflowHelperFactory idfh = new LSMBTreeDataflowHelperFactory(
+ new AsterixVirtualBufferCacheProvider(datasetId), compactionInfo.first, compactionInfo.second,
+ new PrimaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+ AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
+ storageProperties.getBloomFilterFalsePositiveRate(), true, filterTypeTraits, filterCmpFactories,
+ btreeFields, filterFields, !temp);
+ AsterixLSMTreeUpsertOperatorDescriptor op;
+
+ ITypeTraits[] outputTypeTraits = new ITypeTraits[recordDesc.getFieldCount() + 1 + numFilterFields];
+ ISerializerDeserializer[] outputSerDes = new ISerializerDeserializer[recordDesc.getFieldCount() + 1
+ + numFilterFields];
+ for (int j = 0; j < recordDesc.getFieldCount(); j++) {
+ outputTypeTraits[j] = recordDesc.getTypeTraits()[j];
+ outputSerDes[j] = recordDesc.getFields()[j];
+ }
+ outputSerDes[outputSerDes.length - 1 - numFilterFields] = FormatUtils.getDefaultFormat().getSerdeProvider()
+ .getSerializerDeserializer(itemType);
+ outputTypeTraits[outputTypeTraits.length - 1 - numFilterFields] = FormatUtils.getDefaultFormat()
+ .getTypeTraitProvider().getTypeTrait(itemType);
+ int fieldIdx = -1;
+ if (numFilterFields > 0) {
+ String filterField = DatasetUtils.getFilterField(dataset).get(0);
+ for (i = 0; i < itemType.getFieldNames().length; i++) {
+ if (itemType.getFieldNames()[i].equals(filterField)) {
+ break;
+ }
+ }
+ fieldIdx = i;
+ outputTypeTraits[outputTypeTraits.length - 1] = FormatUtils.getDefaultFormat().getTypeTraitProvider()
+ .getTypeTrait(itemType.getFieldTypes()[fieldIdx]);
+ outputSerDes[outputSerDes.length - 1] = FormatUtils.getDefaultFormat().getSerdeProvider()
+ .getSerializerDeserializer(itemType.getFieldTypes()[fieldIdx]);
+ }
+
+ RecordDescriptor outputRecordDesc = new RecordDescriptor(outputSerDes, outputTypeTraits);
+ op = new AsterixLSMTreeUpsertOperatorDescriptor(spec, outputRecordDesc,
+ appContext.getStorageManagerInterface(), appContext.getIndexLifecycleManagerProvider(),
+ splitsAndConstraint.first, typeTraits, comparatorFactories, bloomFilterKeyFields, fieldPermutation,
+ idfh, null, true, indexName, context.getNullWriterFactory(), modificationCallbackFactory,
+ searchCallbackFactory, null);
+ op.setType(itemType);
+ op.setFilterIndex(fieldIdx);
+ return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(op, splitsAndConstraint.second);
+
+ } catch (MetadataException me) {
+ throw new AlgebricksException(me);
+ }
+ }
+
+ // TODO refactor this method
+ @Override
+ public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getIndexUpsertRuntime(
+ IDataSourceIndex<String, AqlSourceId> dataSourceIndex, IOperatorSchema propagatedSchema,
+ IOperatorSchema[] inputSchemas, IVariableTypeEnvironment typeEnv, List<LogicalVariable> primaryKeys,
+ List<LogicalVariable> secondaryKeys, List<LogicalVariable> additionalFilteringKeys,
+ ILogicalExpression filterExpr, List<LogicalVariable> prevSecondaryKeys,
+ LogicalVariable prevAdditionalFilteringKey, RecordDescriptor recordDesc, JobGenContext context,
+ JobSpecification spec) throws AlgebricksException {
+ String indexName = dataSourceIndex.getId();
+ String dataverseName = dataSourceIndex.getDataSource().getId().getDataverseName();
+ String datasetName = dataSourceIndex.getDataSource().getId().getDatasourceName();
+
+ Dataset dataset = findDataset(dataverseName, datasetName);
+ if (dataset == null) {
+ throw new AlgebricksException("Unknown dataset " + datasetName);
+ }
+ Index secondaryIndex;
+ try {
+ secondaryIndex = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataset.getDataverseName(),
+ dataset.getDatasetName(), indexName);
+ } catch (MetadataException e) {
+ throw new AlgebricksException(e);
+ }
+ AsterixTupleFilterFactory filterFactory = createTupleFilterFactory(inputSchemas, typeEnv, filterExpr, context);
+ ArrayList<LogicalVariable> prevAdditionalFilteringKeys = null;
+ if (prevAdditionalFilteringKey != null) {
+ prevAdditionalFilteringKeys = new ArrayList<LogicalVariable>();
+ prevAdditionalFilteringKeys.add(prevAdditionalFilteringKey);
+ }
+ switch (secondaryIndex.getIndexType()) {
+ case BTREE: {
+ return getBTreeUpsertRuntime(dataverseName, datasetName, indexName, propagatedSchema, typeEnv,
+ primaryKeys, secondaryKeys, additionalFilteringKeys, filterFactory, recordDesc, context, spec,
+ prevSecondaryKeys, prevAdditionalFilteringKeys);
+ }
+ case RTREE: {
+ return getRTreeUpsertRuntime(dataverseName, datasetName, indexName, propagatedSchema, typeEnv,
+ primaryKeys, secondaryKeys, additionalFilteringKeys, filterFactory, recordDesc, context, spec,
+ prevSecondaryKeys, prevAdditionalFilteringKeys);
+ }
+ case SINGLE_PARTITION_WORD_INVIX:
+ case SINGLE_PARTITION_NGRAM_INVIX:
+ case LENGTH_PARTITIONED_WORD_INVIX:
+ case LENGTH_PARTITIONED_NGRAM_INVIX: {
+ return getInvertedIndexUpsertRuntime(dataverseName, datasetName, indexName, propagatedSchema, typeEnv,
+ primaryKeys, secondaryKeys, additionalFilteringKeys, filterFactory, recordDesc, context, spec,
+ secondaryIndex.getIndexType(), prevSecondaryKeys, prevAdditionalFilteringKeys);
+ }
+ default: {
+ throw new AlgebricksException(
+ "upsert is not implemented for index type: " + secondaryIndex.getIndexType());
+ }
+ }
+ }
+
+ //TODO: refactor this method
+ private Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getInvertedIndexUpsertRuntime(String dataverseName,
+ String datasetName, String indexName, IOperatorSchema propagatedSchema, IVariableTypeEnvironment typeEnv,
+ List<LogicalVariable> primaryKeys, List<LogicalVariable> secondaryKeys,
+ List<LogicalVariable> additionalFilteringKeys, AsterixTupleFilterFactory filterFactory,
+ RecordDescriptor recordDesc, JobGenContext context, JobSpecification spec, IndexType indexType,
+ List<LogicalVariable> prevSecondaryKeys, List<LogicalVariable> prevAdditionalFilteringKeys)
+ throws AlgebricksException {
+ // Check the index is length-partitioned or not.
+ boolean isPartitioned;
+ if (indexType == IndexType.LENGTH_PARTITIONED_WORD_INVIX
+ || indexType == IndexType.LENGTH_PARTITIONED_NGRAM_INVIX) {
+ isPartitioned = true;
+ } else {
+ isPartitioned = false;
+ }
+
+ // Sanity checks.
+ if (primaryKeys.size() > 1) {
+ throw new AlgebricksException("Cannot create inverted index on dataset with composite primary key.");
+ }
+ // The size of secondaryKeys can be two if it receives input from its
+ // TokenizeOperator- [token, number of token]
+ if (secondaryKeys.size() > 1 && !isPartitioned) {
+ throw new AlgebricksException("Cannot create composite inverted index on multiple fields.");
+ } else if (secondaryKeys.size() > 2 && isPartitioned) {
+ throw new AlgebricksException("Cannot create composite inverted index on multiple fields.");
+ }
+
+ Dataset dataset = findDataset(dataverseName, datasetName);
+ if (dataset == null) {
+ throw new AlgebricksException("Unknown dataset " + datasetName + " in dataverse " + dataverseName);
+ }
+ boolean temp = dataset.getDatasetDetails().isTemp();
+ isTemporaryDatasetWriteJob = isTemporaryDatasetWriteJob && temp;
+
+ // For tokenization, sorting and loading.
+ // One token (+ optional partitioning field) + primary keys: [token,
+ // number of token, PK]
+ int numKeys = primaryKeys.size() + secondaryKeys.size();
+ int numTokenKeyPairFields = (!isPartitioned) ? 1 + primaryKeys.size() : 2 + primaryKeys.size();
+ int numFilterFields = DatasetUtils.getFilterField(dataset) == null ? 0 : 1;
+
+ // generate field permutations
+ int[] fieldPermutation = new int[numKeys + numFilterFields];
+ int[] modificationCallbackPrimaryKeyFields = new int[primaryKeys.size()];
+ int i = 0;
+ int j = 0;
+
+ // If the index is partitioned: [token, number of token]
+ // Otherwise: [token]
+ for (LogicalVariable varKey : secondaryKeys) {
+ int idx = propagatedSchema.findVariable(varKey);
+ fieldPermutation[i] = idx;
+ i++;
+ }
+ for (LogicalVariable varKey : primaryKeys) {
+ int idx = propagatedSchema.findVariable(varKey);
+ fieldPermutation[i] = idx;
+ modificationCallbackPrimaryKeyFields[j] = i;
+ i++;
+ j++;
+ }
+ if (numFilterFields > 0) {
+ int idx = propagatedSchema.findVariable(additionalFilteringKeys.get(0));
+ fieldPermutation[numKeys] = idx;
+ }
+
+ // Find permutations for prev value
+ int[] prevFieldPermutation = new int[numKeys + numFilterFields];
+ i = 0;
+
+ // If the index is partitioned: [token, number of token]
+ // Otherwise: [token]
+ for (LogicalVariable varKey : prevSecondaryKeys) {
+ int idx = propagatedSchema.findVariable(varKey);
+ prevFieldPermutation[i] = idx;
+ i++;
+ }
+
+ for (int k = 0; k < primaryKeys.size(); k++) {
+ prevFieldPermutation[k + i] = fieldPermutation[k + i];
+ i++;
+ }
+
+ if (numFilterFields > 0) {
+ int idx = propagatedSchema.findVariable(prevAdditionalFilteringKeys.get(0));
+ prevFieldPermutation[numKeys] = idx;
+ }
+
+ String itemTypeName = dataset.getItemTypeName();
+ IAType itemType;
+ try {
+ itemType = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataset.getDataverseName(), itemTypeName)
+ .getDatatype();
+
+ if (itemType.getTypeTag() != ATypeTag.RECORD) {
+ throw new AlgebricksException("Only record types can be indexed.");
+ }
+
+ ARecordType recType = (ARecordType) itemType;
+
+ // Index parameters.
+ Index secondaryIndex = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataset.getDataverseName(),
+ dataset.getDatasetName(), indexName);
+
+ List<List<String>> secondaryKeyExprs = secondaryIndex.getKeyFieldNames();
+ List<IAType> secondaryKeyTypes = secondaryIndex.getKeyFieldTypes();
+
+ int numTokenFields = 0;
+
+ // SecondaryKeys.size() can be two if it comes from the bulkload.
+ // In this case, [token, number of token] are the secondaryKeys.
+ if (!isPartitioned || secondaryKeys.size() > 1) {
+ numTokenFields = secondaryKeys.size();
+ } else if (isPartitioned && secondaryKeys.size() == 1) {
+ numTokenFields = secondaryKeys.size() + 1;
+ }
+
+ ITypeTraits[] tokenTypeTraits = new ITypeTraits[numTokenFields];
+ ITypeTraits[] invListsTypeTraits = new ITypeTraits[primaryKeys.size()];
+ IBinaryComparatorFactory[] tokenComparatorFactories = new IBinaryComparatorFactory[numTokenFields];
+ IBinaryComparatorFactory[] invListComparatorFactories = DatasetUtils.computeKeysBinaryComparatorFactories(
+ dataset, recType, context.getBinaryComparatorFactoryProvider());
+
+ IAType secondaryKeyType = null;
+
+ Pair<IAType, Boolean> keyPairType = Index.getNonNullableOpenFieldType(secondaryKeyTypes.get(0),
+ secondaryKeyExprs.get(0), recType);
+ secondaryKeyType = keyPairType.first;
+
+ List<List<String>> partitioningKeys = DatasetUtils.getPartitioningKeys(dataset);
+
+ i = 0;
+ for (List<String> partitioningKey : partitioningKeys) {
+ IAType keyType = recType.getSubFieldType(partitioningKey);
+ invListsTypeTraits[i] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(keyType);
+ ++i;
+ }
+
+ tokenComparatorFactories[0] = NonTaggedFormatUtil.getTokenBinaryComparatorFactory(secondaryKeyType);
+ tokenTypeTraits[0] = NonTaggedFormatUtil.getTokenTypeTrait(secondaryKeyType);
+ if (isPartitioned) {
+ // The partitioning field is hardcoded to be a short *without*
+ // an Asterix type tag.
+ tokenComparatorFactories[1] = PointableBinaryComparatorFactory.of(ShortPointable.FACTORY);
+ tokenTypeTraits[1] = ShortPointable.TYPE_TRAITS;
+ }
+ IBinaryTokenizerFactory tokenizerFactory = NonTaggedFormatUtil.getBinaryTokenizerFactory(
+ secondaryKeyType.getTypeTag(), indexType, secondaryIndex.getGramLength());
+
+ ITypeTraits[] filterTypeTraits = DatasetUtils.computeFilterTypeTraits(dataset, recType);
+ IBinaryComparatorFactory[] filterCmpFactories = DatasetUtils.computeFilterBinaryComparatorFactories(dataset,
+ recType, context.getBinaryComparatorFactoryProvider());
+
+ int[] filterFields = null;
+ int[] invertedIndexFields = null;
+ int[] filterFieldsForNonBulkLoadOps = null;
+ int[] invertedIndexFieldsForNonBulkLoadOps = null;
+ if (filterTypeTraits != null) {
+ filterFields = new int[1];
+ filterFields[0] = numTokenFields + primaryKeys.size();
+ invertedIndexFields = new int[numTokenFields + primaryKeys.size()];
+ for (int k = 0; k < invertedIndexFields.length; k++) {
+ invertedIndexFields[k] = k;
+ }
+
+ filterFieldsForNonBulkLoadOps = new int[numFilterFields];
+ filterFieldsForNonBulkLoadOps[0] = numTokenKeyPairFields;
+ invertedIndexFieldsForNonBulkLoadOps = new int[numTokenKeyPairFields];
+ for (int k = 0; k < invertedIndexFieldsForNonBulkLoadOps.length; k++) {
+ invertedIndexFieldsForNonBulkLoadOps[k] = k;
+ }
+ }
+
+ IAsterixApplicationContextInfo appContext = (IAsterixApplicationContextInfo) context.getAppContext();
+ Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = splitProviderAndPartitionConstraintsForDataset(
+ dataverseName, datasetName, indexName, temp);
+
+ // prepare callback
+ JobId jobId = ((JobEventListenerFactory) spec.getJobletEventListenerFactory()).getJobId();
+ int datasetId = dataset.getDatasetId();
+ TransactionSubsystemProvider txnSubsystemProvider = new TransactionSubsystemProvider();
+ IModificationOperationCallbackFactory modificationCallbackFactory = temp
+ ? new TempDatasetSecondaryIndexModificationOperationCallbackFactory(jobId, datasetId,
+ modificationCallbackPrimaryKeyFields, txnSubsystemProvider, IndexOperation.UPSERT,
+ ResourceType.LSM_INVERTED_INDEX)
+ : new SecondaryIndexModificationOperationCallbackFactory(jobId, datasetId,
+ modificationCallbackPrimaryKeyFields, txnSubsystemProvider, IndexOperation.UPSERT,
+ ResourceType.LSM_INVERTED_INDEX);
+
+ Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils
+ .getMergePolicyFactory(dataset, mdTxnCtx);
+ IIndexDataflowHelperFactory indexDataFlowFactory;
+ if (!isPartitioned) {
+ indexDataFlowFactory = new LSMInvertedIndexDataflowHelperFactory(
+ new AsterixVirtualBufferCacheProvider(datasetId), compactionInfo.first, compactionInfo.second,
+ new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+ AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+ LSMInvertedIndexIOOperationCallbackFactory.INSTANCE,
+ storageProperties.getBloomFilterFalsePositiveRate(), invertedIndexFields, filterTypeTraits,
+ filterCmpFactories, filterFields, filterFieldsForNonBulkLoadOps,
+ invertedIndexFieldsForNonBulkLoadOps, !temp);
+ } else {
+ indexDataFlowFactory = new PartitionedLSMInvertedIndexDataflowHelperFactory(
+ new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()), compactionInfo.first,
+ compactionInfo.second, new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+ AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+ LSMInvertedIndexIOOperationCallbackFactory.INSTANCE,
+ storageProperties.getBloomFilterFalsePositiveRate(), invertedIndexFields, filterTypeTraits,
+ filterCmpFactories, filterFields, filterFieldsForNonBulkLoadOps,
+ invertedIndexFieldsForNonBulkLoadOps, !temp);
+ }
+ IOperatorDescriptor op = new AsterixLSMInvertedIndexUpsertOperatorDescriptor(spec, recordDesc,
+ appContext.getStorageManagerInterface(), splitsAndConstraint.first,
+ appContext.getIndexLifecycleManagerProvider(), tokenTypeTraits, tokenComparatorFactories,
+ invListsTypeTraits, invListComparatorFactories, tokenizerFactory, fieldPermutation,
+ indexDataFlowFactory, filterFactory, modificationCallbackFactory, indexName, prevFieldPermutation);
+
+ return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(op, splitsAndConstraint.second);
+ } catch (MetadataException e) {
+ throw new AlgebricksException(e);
+ } catch (IOException e) {
+ throw new AlgebricksException(e);
+ }
+ }
+
+ //TODO: refactor this method
+ private Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getRTreeUpsertRuntime(String dataverseName,
+ String datasetName, String indexName, IOperatorSchema propagatedSchema, IVariableTypeEnvironment typeEnv,
+ List<LogicalVariable> primaryKeys, List<LogicalVariable> secondaryKeys,
+ List<LogicalVariable> additionalFilteringKeys, AsterixTupleFilterFactory filterFactory,
+ RecordDescriptor recordDesc, JobGenContext context, JobSpecification spec,
+ List<LogicalVariable> prevSecondaryKeys, List<LogicalVariable> prevAdditionalFilteringKeys)
+ throws AlgebricksException {
+ try {
+ Dataset dataset = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseName, datasetName);
+
+ boolean temp = dataset.getDatasetDetails().isTemp();
+ isTemporaryDatasetWriteJob = isTemporaryDatasetWriteJob && temp;
+
+ String itemTypeName = dataset.getItemTypeName();
+ IAType itemType = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataverseName, itemTypeName).getDatatype();
+ if (itemType.getTypeTag() != ATypeTag.RECORD) {
+ throw new AlgebricksException("Only record types can be indexed.");
+ }
+ ARecordType recType = (ARecordType) itemType;
+ Index secondaryIndex = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataset.getDataverseName(),
+ dataset.getDatasetName(), indexName);
+
+ List<List<String>> secondaryKeyExprs = secondaryIndex.getKeyFieldNames();
+ List<IAType> secondaryKeyTypes = secondaryIndex.getKeyFieldTypes();
+ Pair<IAType, Boolean> keyPairType = Index.getNonNullableOpenFieldType(secondaryKeyTypes.get(0),
+ secondaryKeyExprs.get(0), recType);
+ IAType spatialType = keyPairType.first;
+
+ int dimension = NonTaggedFormatUtil.getNumDimensions(spatialType.getTypeTag());
+ int numSecondaryKeys = dimension * 2;
+ int numPrimaryKeys = primaryKeys.size();
+ int numKeys = numSecondaryKeys + numPrimaryKeys;
+ ITypeTraits[] typeTraits = new ITypeTraits[numKeys];
+ IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[numSecondaryKeys];
+
+ int numFilterFields = DatasetUtils.getFilterField(dataset) == null ? 0 : 1;
+ int[] fieldPermutation = new int[numKeys + numFilterFields];
+ int[] modificationCallbackPrimaryKeyFields = new int[primaryKeys.size()];
+ int i = 0;
+ int j = 0;
+
+ // Get field permutation for new value
+ for (LogicalVariable varKey : secondaryKeys) {
+ int idx = propagatedSchema.findVariable(varKey);
+ fieldPermutation[i] = idx;
+ i++;
+ }
+ for (LogicalVariable varKey : primaryKeys) {
+ int idx = propagatedSchema.findVariable(varKey);
+ fieldPermutation[i] = idx;
+ modificationCallbackPrimaryKeyFields[j] = i;
+ i++;
+ j++;
+ }
+
+ if (numFilterFields > 0) {
+ int idx = propagatedSchema.findVariable(additionalFilteringKeys.get(0));
+ fieldPermutation[numKeys] = idx;
+ }
+
+ // Get field permutation for previous value
+ int[] prevFieldPermutation = new int[numKeys + numFilterFields];
+ i = 0;
+
+ // Get field permutation for new value
+ for (LogicalVariable varKey : prevSecondaryKeys) {
+ int idx = propagatedSchema.findVariable(varKey);
+ prevFieldPermutation[i] = idx;
+ i++;
+ }
+ for (int k = 0; k < numPrimaryKeys; k++) {
+ prevFieldPermutation[k + i] = fieldPermutation[k + i];
+ i++;
+ }
+
+ if (numFilterFields > 0) {
+ int idx = propagatedSchema.findVariable(prevAdditionalFilteringKeys.get(0));
+ prevFieldPermutation[numKeys] = idx;
+ }
+
+ IAType nestedKeyType = NonTaggedFormatUtil.getNestedSpatialType(spatialType.getTypeTag());
+ IPrimitiveValueProviderFactory[] valueProviderFactories = new IPrimitiveValueProviderFactory[numSecondaryKeys];
+ for (i = 0; i < numSecondaryKeys; i++) {
+ comparatorFactories[i] = AqlBinaryComparatorFactoryProvider.INSTANCE
+ .getBinaryComparatorFactory(nestedKeyType, true);
+ typeTraits[i] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(nestedKeyType);
+ valueProviderFactories[i] = AqlPrimitiveValueProviderFactory.INSTANCE;
+ }
+ List<List<String>> partitioningKeys = DatasetUtils.getPartitioningKeys(dataset);
+ for (List<String> partitioningKey : partitioningKeys) {
+ IAType keyType = recType.getSubFieldType(partitioningKey);
+ typeTraits[i] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(keyType);
+ ++i;
+ }
+
+ IBinaryComparatorFactory[] primaryComparatorFactories = DatasetUtils.computeKeysBinaryComparatorFactories(
+ dataset, recType, context.getBinaryComparatorFactoryProvider());
+ IAsterixApplicationContextInfo appContext = (IAsterixApplicationContextInfo) context.getAppContext();
+ Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = splitProviderAndPartitionConstraintsForDataset(
+ dataverseName, datasetName, indexName, temp);
+ int[] btreeFields = new int[primaryComparatorFactories.length];
+ for (int k = 0; k < btreeFields.length; k++) {
+ btreeFields[k] = k + numSecondaryKeys;
+ }
+
+ ITypeTraits[] filterTypeTraits = DatasetUtils.computeFilterTypeTraits(dataset, recType);
+ IBinaryComparatorFactory[] filterCmpFactories = DatasetUtils.computeFilterBinaryComparatorFactories(dataset,
+ recType, context.getBinaryComparatorFactoryProvider());
+ int[] filterFields = null;
+ int[] rtreeFields = null;
+ if (filterTypeTraits != null) {
+ filterFields = new int[1];
+ filterFields[0] = numSecondaryKeys + numPrimaryKeys;
+ rtreeFields = new int[numSecondaryKeys + numPrimaryKeys];
+ for (int k = 0; k < rtreeFields.length; k++) {
+ rtreeFields[k] = k;
+ }
+ }
+
+ // prepare callback
+ JobId jobId = ((JobEventListenerFactory) spec.getJobletEventListenerFactory()).getJobId();
+ int datasetId = dataset.getDatasetId();
+ TransactionSubsystemProvider txnSubsystemProvider = new TransactionSubsystemProvider();
+ IModificationOperationCallbackFactory modificationCallbackFactory = temp
+ ? new TempDatasetSecondaryIndexModificationOperationCallbackFactory(jobId, datasetId,
+ modificationCallbackPrimaryKeyFields, txnSubsystemProvider, IndexOperation.UPSERT,
+ ResourceType.LSM_RTREE)
+ : new SecondaryIndexModificationOperationCallbackFactory(jobId, datasetId,
+ modificationCallbackPrimaryKeyFields, txnSubsystemProvider, IndexOperation.UPSERT,
+ ResourceType.LSM_RTREE);
+
+ Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils
+ .getMergePolicyFactory(dataset, mdTxnCtx);
+ AsterixLSMTreeUpsertOperatorDescriptor op = new AsterixLSMTreeUpsertOperatorDescriptor(spec, recordDesc,
+ appContext.getStorageManagerInterface(), appContext.getIndexLifecycleManagerProvider(),
+ splitsAndConstraint.first, typeTraits, comparatorFactories, null, fieldPermutation,
+ new LSMRTreeDataflowHelperFactory(valueProviderFactories, RTreePolicyType.RTREE,
+ primaryComparatorFactories, new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
+ compactionInfo.first, compactionInfo.second,
+ new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+ AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+ LSMRTreeIOOperationCallbackFactory.INSTANCE,
+ proposeLinearizer(nestedKeyType.getTypeTag(), comparatorFactories.length),
+ storageProperties.getBloomFilterFalsePositiveRate(), rtreeFields, btreeFields,
+ filterTypeTraits, filterCmpFactories, filterFields, !temp),
+ filterFactory, false, indexName, null, modificationCallbackFactory,
+ NoOpOperationCallbackFactory.INSTANCE, prevFieldPermutation);
+ return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(op, splitsAndConstraint.second);
+ } catch (MetadataException | IOException e) {
+ throw new AlgebricksException(e);
+ }
+ }
+
+ //TODO: refactor this method
+ private Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getBTreeUpsertRuntime(String dataverseName,
+ String datasetName, String indexName, IOperatorSchema propagatedSchema, IVariableTypeEnvironment typeEnv,
+ List<LogicalVariable> primaryKeys, List<LogicalVariable> secondaryKeys,
+ List<LogicalVariable> additionalFilteringKeys, AsterixTupleFilterFactory filterFactory,
+ RecordDescriptor recordDesc, JobGenContext context, JobSpecification spec,
+ List<LogicalVariable> prevSecondaryKeys, List<LogicalVariable> prevAdditionalFilteringKeys)
+ throws AlgebricksException {
+ // we start with the btree
+ Dataset dataset = findDataset(dataverseName, datasetName);
+ if (dataset == null) {
+ throw new AlgebricksException("Unknown dataset " + datasetName + " in dataverse " + dataverseName);
+ }
+ boolean temp = dataset.getDatasetDetails().isTemp();
+ isTemporaryDatasetWriteJob = isTemporaryDatasetWriteJob && temp;
+
+ int numKeys = primaryKeys.size() + secondaryKeys.size();
+ int numFilterFields = DatasetUtils.getFilterField(dataset) == null ? 0 : 1;
+
+ // generate field permutations
+ int[] fieldPermutation = new int[numKeys + numFilterFields];
+ int[] bloomFilterKeyFields = new int[secondaryKeys.size()];
+ int[] modificationCallbackPrimaryKeyFields = new int[primaryKeys.size()];
+ int i = 0;
+ int j = 0;
+ for (LogicalVariable varKey : secondaryKeys) {
+ int idx = propagatedSchema.findVariable(varKey);
+ fieldPermutation[i] = idx;
+ bloomFilterKeyFields[i] = i;
+ i++;
+ }
+ for (LogicalVariable varKey : primaryKeys) {
+ int idx = propagatedSchema.findVariable(varKey);
+ fieldPermutation[i] = idx;
+ modificationCallbackPrimaryKeyFields[j] = i;
+ i++;
+ j++;
+ }
+ // Filter can only be one field!
+ if (numFilterFields > 0) {
+ int idx = propagatedSchema.findVariable(additionalFilteringKeys.get(0));
+ fieldPermutation[numKeys] = idx;
+ }
+
+ // generate field permutations for prev record
+ int[] prevFieldPermutation = new int[numKeys + numFilterFields];
+ int k = 0;
+ for (LogicalVariable varKey : prevSecondaryKeys) {
+ int idx = propagatedSchema.findVariable(varKey);
+ prevFieldPermutation[k] = idx;
+ k++;
+ }
+ for (LogicalVariable varKey : primaryKeys) {
+ int idx = propagatedSchema.findVariable(varKey);
+ prevFieldPermutation[k] = idx;
+ k++;
+ }
+ // Filter can only be one field!
+ if (numFilterFields > 0) {
+ int idx = propagatedSchema.findVariable(prevAdditionalFilteringKeys.get(0));
+ prevFieldPermutation[numKeys] = idx;
+ }
+
+ String itemTypeName = dataset.getItemTypeName();
+ IAType itemType;
+ try {
+ itemType = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataset.getDataverseName(), itemTypeName)
+ .getDatatype();
+
+ if (itemType.getTypeTag() != ATypeTag.RECORD) {
+ throw new AlgebricksException("Only record types can be indexed.");
+ }
+
+ ARecordType recType = (ARecordType) itemType;
+
+ // Index parameters.
+ Index secondaryIndex = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataset.getDataverseName(),
+ dataset.getDatasetName(), indexName);
+
+ ITypeTraits[] filterTypeTraits = DatasetUtils.computeFilterTypeTraits(dataset, recType);
+ IBinaryComparatorFactory[] filterCmpFactories = DatasetUtils.computeFilterBinaryComparatorFactories(dataset,
+ recType, context.getBinaryComparatorFactoryProvider());
+ int[] filterFields = null;
+ int[] btreeFields = null;
+ if (filterTypeTraits != null) {
+ filterFields = new int[1];
+ filterFields[0] = numKeys;
+ btreeFields = new int[numKeys];
+ for (int l = 0; l < btreeFields.length; l++) {
+ btreeFields[l] = l;
+ }
+ }
+
+ List<List<String>> secondaryKeyNames = secondaryIndex.getKeyFieldNames();
+ List<IAType> secondaryKeyTypes = secondaryIndex.getKeyFieldTypes();
+ ITypeTraits[] typeTraits = new ITypeTraits[numKeys];
+ IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[numKeys];
+ for (i = 0; i < secondaryKeys.size(); ++i) {
+ Pair<IAType, Boolean> keyPairType = Index.getNonNullableOpenFieldType(secondaryKeyTypes.get(i),
+ secondaryKeyNames.get(i), recType);
+ IAType keyType = keyPairType.first;
+ comparatorFactories[i] = AqlBinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(keyType,
+ true);
+ typeTraits[i] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(keyType);
+ }
+ List<List<String>> partitioningKeys = DatasetUtils.getPartitioningKeys(dataset);
+ for (List<String> partitioningKey : partitioningKeys) {
+ IAType keyType = recType.getSubFieldType(partitioningKey);
+ comparatorFactories[i] = AqlBinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(keyType,
+ true);
+ typeTraits[i] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(keyType);
+ ++i;
+ }
+
+ IAsterixApplicationContextInfo appContext = (IAsterixApplicationContextInfo) context.getAppContext();
+ Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = splitProviderAndPartitionConstraintsForDataset(
+ dataverseName, datasetName, indexName, temp);
+
+ // prepare callback
+ JobId jobId = ((JobEventListenerFactory) spec.getJobletEventListenerFactory()).getJobId();
+ int datasetId = dataset.getDatasetId();
+ TransactionSubsystemProvider txnSubsystemProvider = new TransactionSubsystemProvider();
+ IModificationOperationCallbackFactory modificationCallbackFactory = temp
+ ? new TempDatasetSecondaryIndexModificationOperationCallbackFactory(jobId, datasetId,
+ modificationCallbackPrimaryKeyFields, txnSubsystemProvider, IndexOperation.UPSERT,
+ ResourceType.LSM_BTREE)
+ : new SecondaryIndexModificationOperationCallbackFactory(jobId, datasetId,
+ modificationCallbackPrimaryKeyFields, txnSubsystemProvider, IndexOperation.UPSERT,
+ ResourceType.LSM_BTREE);
+
+ Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils
+ .getMergePolicyFactory(dataset, mdTxnCtx);
+ IIndexDataflowHelperFactory idfh = new LSMBTreeDataflowHelperFactory(
+ new AsterixVirtualBufferCacheProvider(datasetId), compactionInfo.first, compactionInfo.second,
+ new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+ AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
+ storageProperties.getBloomFilterFalsePositiveRate(), false, filterTypeTraits, filterCmpFactories,
+ btreeFields, filterFields, !temp);
+ AsterixLSMTreeUpsertOperatorDescriptor op = new AsterixLSMTreeUpsertOperatorDescriptor(spec, recordDesc,
+ appContext.getStorageManagerInterface(), appContext.getIndexLifecycleManagerProvider(),
+ splitsAndConstraint.first, typeTraits, comparatorFactories, bloomFilterKeyFields, fieldPermutation,
+ idfh, filterFactory, false, indexName, null, modificationCallbackFactory,
+ NoOpOperationCallbackFactory.INSTANCE, prevFieldPermutation);
+ return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(op, splitsAndConstraint.second);
+ } catch (MetadataException e) {
+ throw new AlgebricksException(e);
+ } catch (IOException e) {
+ throw new AlgebricksException(e);
+ }
+ }
}
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/MetadataLockManager.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/MetadataLockManager.java
index 8a95f87..03fc4c0 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/MetadataLockManager.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/MetadataLockManager.java
@@ -391,7 +391,7 @@
releaseDataverseReadLock(dataverseName);
}
- public void insertDeleteBegin(String dataverseName, String datasetFullyQualifiedName, List<String> dataverses,
+ public void insertDeleteUpsertBegin(String dataverseName, String datasetFullyQualifiedName, List<String> dataverses,
List<String> datasets) {
dataverses.add(dataverseName);
datasets.add(datasetFullyQualifiedName);
@@ -420,7 +420,7 @@
}
}
- public void insertDeleteEnd(String dataverseName, String datasetFullyQualifiedName, List<String> dataverses,
+ public void insertDeleteUpsertEnd(String dataverseName, String datasetFullyQualifiedName, List<String> dataverses,
List<String> datasets) {
String previous = null;
for (int i = dataverses.size() - 1; i >= 0; i--) {
diff --git a/asterix-om/src/main/java/org/apache/asterix/builders/AbstractListBuilder.java b/asterix-om/src/main/java/org/apache/asterix/builders/AbstractListBuilder.java
index a74f0fa..2b05761 100644
--- a/asterix-om/src/main/java/org/apache/asterix/builders/AbstractListBuilder.java
+++ b/asterix-om/src/main/java/org/apache/asterix/builders/AbstractListBuilder.java
@@ -32,9 +32,6 @@
import org.apache.hyracks.storage.am.common.ophelpers.IntArrayList;
public abstract class AbstractListBuilder implements IAsterixListBuilder {
-
- protected static final byte serNullTypeTag = ATypeTag.NULL.serialize();
-
protected final GrowableArray outputStorage;
protected final DataOutputStream outputStream;
protected final IntArrayList offsets;
@@ -80,13 +77,13 @@
@Override
public void addItem(IValueReference item) throws HyracksDataException {
try {
- if (!fixedSize && (item.getByteArray()[0] != serNullTypeTag || itemTypeTag == ATypeTag.ANY))
+ if (!fixedSize && (item.getByteArray()[0] != ATypeTag.SERIALIZED_NULL_TYPE_TAG || itemTypeTag == ATypeTag.ANY))
this.offsets.add(outputStorage.getLength());
if (itemTypeTag == ATypeTag.ANY
- || (itemTypeTag == ATypeTag.NULL && item.getByteArray()[0] == serNullTypeTag)) {
+ || (itemTypeTag == ATypeTag.NULL && item.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG)) {
this.numberOfItems++;
this.outputStream.write(item.getByteArray(), item.getStartOffset(), item.getLength());
- } else if (item.getByteArray()[0] != serNullTypeTag) {
+ } else if (item.getByteArray()[0] != ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
this.numberOfItems++;
this.outputStream.write(item.getByteArray(), item.getStartOffset() + 1, item.getLength() - 1);
}
@@ -110,8 +107,8 @@
if (!fixedSize) {
offsetPosition += 8;
for (int i = 0; i < offsets.size(); i++) {
- SerializerDeserializerUtil.writeIntToByteArray(offsetArray, offsets.get(i) + metadataInfoSize
- + headerSize, offsetPosition);
+ SerializerDeserializerUtil.writeIntToByteArray(offsetArray,
+ offsets.get(i) + metadataInfoSize + headerSize, offsetPosition);
offsetPosition += 4;
}
}
diff --git a/asterix-om/src/main/java/org/apache/asterix/builders/RecordBuilder.java b/asterix-om/src/main/java/org/apache/asterix/builders/RecordBuilder.java
index e3ed6fb..a36238d 100644
--- a/asterix-om/src/main/java/org/apache/asterix/builders/RecordBuilder.java
+++ b/asterix-om/src/main/java/org/apache/asterix/builders/RecordBuilder.java
@@ -44,8 +44,6 @@
public class RecordBuilder implements IARecordBuilder {
private final static int DEFAULT_NUM_OPEN_FIELDS = 10;
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte RECORD_TYPE_TAG = ATypeTag.RECORD.serialize();
private final UTF8StringSerializerDeserializer utf8SerDer = new UTF8StringSerializerDeserializer();
private int openPartOffsetArraySize;
@@ -163,7 +161,7 @@
// +1 because we do not store the value tag.
closedPartOutputStream.write(value.getByteArray(), value.getStartOffset() + 1, len);
numberOfClosedFields++;
- if (isNullable && value.getByteArray()[value.getStartOffset()] != SER_NULL_TYPE_TAG) {
+ if (isNullable && value.getByteArray()[value.getStartOffset()] != ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullBitMap[id / 8] |= (byte) (1 << (7 - (id % 8)));
}
}
@@ -173,7 +171,7 @@
// We assume the tag is not included (closed field)
closedPartOutputStream.write(value, 0, value.length);
numberOfClosedFields++;
- if (isNullable && value[0] != SER_NULL_TYPE_TAG) {
+ if (isNullable && value[0] != ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullBitMap[id / 8] |= (byte) (1 << (7 - (id % 8)));
}
}
@@ -255,7 +253,7 @@
// write the record header
if (writeTypeTag) {
- out.writeByte(RECORD_TYPE_TAG);
+ out.writeByte(ATypeTag.SERIALIZED_RECORD_TYPE_TAG);
}
out.writeInt(recordLength);
if (isOpen) {
diff --git a/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/AqlNullWriterFactory.java b/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/AqlNullWriterFactory.java
index ed19224..8fa881a 100644
--- a/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/AqlNullWriterFactory.java
+++ b/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/AqlNullWriterFactory.java
@@ -42,7 +42,7 @@
@Override
public void writeNull(DataOutput out) throws HyracksDataException {
try {
- out.writeByte(ATypeTag.NULL.serialize());
+ out.writeByte(ATypeTag.SERIALIZED_NULL_TYPE_TAG);
} catch (IOException e) {
throw new HyracksDataException(e);
}
diff --git a/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AObjectAscBinaryComparatorFactory.java b/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AObjectAscBinaryComparatorFactory.java
index 6a24cf3..4ce9617 100644
--- a/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AObjectAscBinaryComparatorFactory.java
+++ b/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AObjectAscBinaryComparatorFactory.java
@@ -113,13 +113,13 @@
// Normally, comparing between NULL and non-NULL values should return UNKNOWN as the result.
// However, at this point, we assume that NULL check between two types is already done.
// Therefore, inside this method, we return an order between two values even if one value is NULL.
- if (b1[s1] == ATypeTag.NULL.serialize()) {
- if (b2[s2] == ATypeTag.NULL.serialize())
+ if (b1[s1] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
+ if (b2[s2] == ATypeTag.SERIALIZED_NULL_TYPE_TAG)
return 0;
else
return -1;
} else {
- if (b2[s2] == ATypeTag.NULL.serialize())
+ if (b2[s2] == ATypeTag.SERIALIZED_NULL_TYPE_TAG)
return 1;
}
diff --git a/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/ListItemBinaryComparatorFactory.java b/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/ListItemBinaryComparatorFactory.java
index 77b8b3a..8907bca 100644
--- a/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/ListItemBinaryComparatorFactory.java
+++ b/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/ListItemBinaryComparatorFactory.java
@@ -29,8 +29,8 @@
import org.apache.hyracks.data.std.primitive.DoublePointable;
import org.apache.hyracks.data.std.primitive.FloatPointable;
import org.apache.hyracks.data.std.primitive.IntegerPointable;
-import org.apache.hyracks.data.std.primitive.UTF8StringPointable;
import org.apache.hyracks.data.std.primitive.UTF8StringLowercasePointable;
+import org.apache.hyracks.data.std.primitive.UTF8StringPointable;
public class ListItemBinaryComparatorFactory implements IBinaryComparatorFactory {
@@ -84,13 +84,13 @@
@Override
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) throws HyracksDataException {
- if (b1[s1] == ATypeTag.NULL.serialize()) {
- if (b2[s2] == ATypeTag.NULL.serialize())
+ if (b1[s1] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
+ if (b2[s2] == ATypeTag.SERIALIZED_NULL_TYPE_TAG)
return 0;
else
return -1;
} else {
- if (b2[s2] == ATypeTag.NULL.serialize())
+ if (b2[s2] == ATypeTag.SERIALIZED_NULL_TYPE_TAG)
return 1;
}
diff --git a/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/AqlBinaryBooleanInspectorImpl.java b/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/AqlBinaryBooleanInspectorImpl.java
index 6d09bf8..247e6fd 100644
--- a/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/AqlBinaryBooleanInspectorImpl.java
+++ b/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/AqlBinaryBooleanInspectorImpl.java
@@ -34,14 +34,12 @@
}
};
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
private AqlBinaryBooleanInspectorImpl() {
}
@Override
public boolean getBooleanValue(byte[] bytes, int offset, int length) {
- if (bytes[offset] == SER_NULL_TYPE_TAG)
+ if (bytes[offset] == ATypeTag.SERIALIZED_NULL_TYPE_TAG)
return false;
/** check if the runtime type is boolean */
ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(bytes[offset]);
diff --git a/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/AqlBinaryTokenizerFactoryProvider.java b/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/AqlBinaryTokenizerFactoryProvider.java
index e73f769..52248c8 100644
--- a/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/AqlBinaryTokenizerFactoryProvider.java
+++ b/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/AqlBinaryTokenizerFactoryProvider.java
@@ -35,10 +35,12 @@
public static final AqlBinaryTokenizerFactoryProvider INSTANCE = new AqlBinaryTokenizerFactoryProvider();
private static final IBinaryTokenizerFactory aqlStringTokenizer = new DelimitedUTF8StringBinaryTokenizerFactory(
- true, true, new UTF8WordTokenFactory(ATypeTag.STRING.serialize(), ATypeTag.INT32.serialize()));
+ true, true,
+ new UTF8WordTokenFactory(ATypeTag.SERIALIZED_STRING_TYPE_TAG, ATypeTag.SERIALIZED_INT32_TYPE_TAG));
private static final IBinaryTokenizerFactory aqlHashingStringTokenizer = new DelimitedUTF8StringBinaryTokenizerFactory(
- true, true, new HashedUTF8WordTokenFactory(ATypeTag.INT32.serialize(), ATypeTag.INT32.serialize()));
+ true, true,
+ new HashedUTF8WordTokenFactory(ATypeTag.SERIALIZED_INT32_TYPE_TAG, ATypeTag.SERIALIZED_INT32_TYPE_TAG));
private static final IBinaryTokenizerFactory orderedListTokenizer = new AOrderedListBinaryTokenizerFactory(
new AListElementTokenFactory());
@@ -77,7 +79,8 @@
return null;
} else {
return new NGramUTF8StringBinaryTokenizerFactory(gramLength, usePrePost, true, true,
- new UTF8NGramTokenFactory(ATypeTag.STRING.serialize(), ATypeTag.INT32.serialize()));
+ new UTF8NGramTokenFactory(ATypeTag.SERIALIZED_STRING_TYPE_TAG,
+ ATypeTag.SERIALIZED_INT32_TYPE_TAG));
}
}
case ORDEREDLIST: {
diff --git a/asterix-om/src/main/java/org/apache/asterix/om/pointables/ARecordVisitablePointable.java b/asterix-om/src/main/java/org/apache/asterix/om/pointables/ARecordVisitablePointable.java
index 03e3895..67b62d6 100644
--- a/asterix-om/src/main/java/org/apache/asterix/om/pointables/ARecordVisitablePointable.java
+++ b/asterix-om/src/main/java/org/apache/asterix/om/pointables/ARecordVisitablePointable.java
@@ -19,6 +19,11 @@
package org.apache.asterix.om.pointables;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.dataflow.data.nontagged.AqlNullWriterFactory;
import org.apache.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
@@ -35,11 +40,6 @@
import org.apache.hyracks.api.dataflow.value.INullWriter;
import org.apache.hyracks.util.string.UTF8StringWriter;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
/**
* This class interprets the binary data representation of a record. One can
* call getFieldNames, getFieldTypeTags and getFieldValues to get pointable
@@ -52,6 +52,7 @@
* object pool based allocator, in order to have object reuse
*/
static IObjectFactory<IVisitablePointable, IAType> FACTORY = new IObjectFactory<IVisitablePointable, IAType>() {
+ @Override
public IVisitablePointable create(IAType type) {
return new ARecordVisitablePointable((ARecordType) type);
}
@@ -114,7 +115,7 @@
// add type name Reference (including a astring type tag)
int nameStart = typeBos.size();
- typeDos.writeByte(ATypeTag.STRING.serialize());
+ typeDos.writeByte(ATypeTag.SERIALIZED_STRING_TYPE_TAG);
utf8Writer.writeUTF8(fieldNameStrs[i], typeDos);
int nameEnd = typeBos.size();
IVisitablePointable typeNameReference = AFlatValuePointable.FACTORY.create(null);
@@ -183,8 +184,7 @@
boolean hasNullableFields = NonTaggedFormatUtil.hasNullableField(inputRecType);
if (hasNullableFields) {
nullBitMapOffset = s;
- offsetArrayOffset = s
- + (this.numberOfSchemaFields % 8 == 0 ? numberOfSchemaFields / 8
+ offsetArrayOffset = s + (this.numberOfSchemaFields % 8 == 0 ? numberOfSchemaFields / 8
: numberOfSchemaFields / 8 + 1);
} else {
offsetArrayOffset = s;
@@ -238,7 +238,7 @@
int fieldValueLength = NonTaggedFormatUtil.getFieldValueLength(b, fieldOffset, ATypeTag.STRING,
false);
int fnstart = dataBos.size();
- dataDos.writeByte(ATypeTag.STRING.serialize());
+ dataDos.writeByte(ATypeTag.SERIALIZED_STRING_TYPE_TAG);
dataDos.write(b, fieldOffset, fieldValueLength);
int fnend = dataBos.size();
IVisitablePointable fieldName = allocator.allocateEmpty();
diff --git a/asterix-om/src/main/java/org/apache/asterix/om/pointables/cast/ARecordCaster.java b/asterix-om/src/main/java/org/apache/asterix/om/pointables/cast/ARecordCaster.java
index d89ae6a..d632d70 100644
--- a/asterix-om/src/main/java/org/apache/asterix/om/pointables/cast/ARecordCaster.java
+++ b/asterix-om/src/main/java/org/apache/asterix/om/pointables/cast/ARecordCaster.java
@@ -19,6 +19,14 @@
package org.apache.asterix.om.pointables.cast;
+import java.io.ByteArrayOutputStream;
+import java.io.DataOutput;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.List;
+
import org.apache.asterix.builders.RecordBuilder;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.common.exceptions.TypeException;
@@ -47,14 +55,6 @@
import org.apache.hyracks.data.std.util.ByteArrayAccessibleOutputStream;
import org.apache.hyracks.util.string.UTF8StringWriter;
-import java.io.ByteArrayOutputStream;
-import java.io.DataOutput;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.io.PrintStream;
-import java.util.ArrayList;
-import java.util.List;
-
/**
* This class is to do the runtime type cast for a record. It is ONLY visible to
* ACastVisitor.
@@ -75,8 +75,8 @@
private final IVisitablePointable nullReference = allocator.allocateEmpty();
private final IVisitablePointable nullTypeTag = allocator.allocateEmpty();
- private final IBinaryComparator fieldNameComparator = PointableBinaryComparatorFactory.of(
- UTF8StringPointable.FACTORY).createBinaryComparator();
+ private final IBinaryComparator fieldNameComparator = PointableBinaryComparatorFactory
+ .of(UTF8StringPointable.FACTORY).createBinaryComparator();
private final ByteArrayAccessibleOutputStream outputBos = new ByteArrayAccessibleOutputStream();
private final DataOutputStream outputDos = new DataOutputStream(outputBos);
@@ -107,7 +107,7 @@
int end = bos.size();
nullReference.set(bos.getByteArray(), start, end - start);
start = bos.size();
- dos.write(ATypeTag.NULL.serialize());
+ dos.write(ATypeTag.SERIALIZED_NULL_TYPE_TAG);
end = bos.size();
nullTypeTag.set(bos.getByteArray(), start, end - start);
} catch (IOException e) {
@@ -115,8 +115,8 @@
}
}
- public void castRecord(ARecordVisitablePointable recordAccessor, IVisitablePointable resultAccessor, ARecordType reqType,
- ACastVisitor visitor) throws IOException, TypeException {
+ public void castRecord(ARecordVisitablePointable recordAccessor, IVisitablePointable resultAccessor,
+ ARecordType reqType, ACastVisitor visitor) throws IOException, TypeException {
List<IVisitablePointable> fieldNames = recordAccessor.getFieldNames();
List<IVisitablePointable> fieldTypeTags = recordAccessor.getFieldTypeTags();
List<IVisitablePointable> fieldValues = recordAccessor.getFieldValues();
@@ -214,15 +214,15 @@
IVisitablePointable reqFieldTypeTag = reqFieldTypeTags.get(reqFnPos);
if (fieldTypeTag.equals(reqFieldTypeTag) || (
// match the null type of optional field
- optionalFields[reqFnPos] && fieldTypeTag.equals(nullTypeTag))) {
+ optionalFields[reqFnPos] && fieldTypeTag.equals(nullTypeTag))) {
fieldPermutation[reqFnPos] = fnPos;
openFields[fnPos] = false;
} else {
// if mismatch, check whether input type can be promoted to the required type
- ATypeTag inputTypeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(fieldTypeTag
- .getByteArray()[fieldTypeTag.getStartOffset()]);
- ATypeTag requiredTypeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(reqFieldTypeTag
- .getByteArray()[reqFieldTypeTag.getStartOffset()]);
+ ATypeTag inputTypeTag = EnumDeserializer.ATYPETAGDESERIALIZER
+ .deserialize(fieldTypeTag.getByteArray()[fieldTypeTag.getStartOffset()]);
+ ATypeTag requiredTypeTag = EnumDeserializer.ATYPETAGDESERIALIZER
+ .deserialize(reqFieldTypeTag.getByteArray()[reqFieldTypeTag.getStartOffset()]);
if (ATypeHierarchy.canPromote(inputTypeTag, requiredTypeTag)
|| ATypeHierarchy.canDemote(inputTypeTag, requiredTypeTag)) {
@@ -255,8 +255,8 @@
//print the field type
IVisitablePointable fieldType = fieldTypeTags.get(i);
- ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(fieldType.getByteArray()[fieldType
- .getStartOffset()]);
+ ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER
+ .deserialize(fieldType.getByteArray()[fieldType.getStartOffset()]);
ps.print(typeTag);
//collect the output message
@@ -281,8 +281,8 @@
}
private void writeOutput(List<IVisitablePointable> fieldNames, List<IVisitablePointable> fieldTypeTags,
- List<IVisitablePointable> fieldValues, DataOutput output, ACastVisitor visitor) throws IOException,
- AsterixException {
+ List<IVisitablePointable> fieldValues, DataOutput output, ACastVisitor visitor)
+ throws IOException, AsterixException {
// reset the states of the record builder
recBuilder.reset(cachedReqType);
recBuilder.init();
diff --git a/asterix-om/src/main/java/org/apache/asterix/om/pointables/nonvisitor/ARecordPointable.java b/asterix-om/src/main/java/org/apache/asterix/om/pointables/nonvisitor/ARecordPointable.java
index ee91aab..ac50312 100644
--- a/asterix-om/src/main/java/org/apache/asterix/om/pointables/nonvisitor/ARecordPointable.java
+++ b/asterix-om/src/main/java/org/apache/asterix/om/pointables/nonvisitor/ARecordPointable.java
@@ -216,10 +216,10 @@
// Closed field accessors.
// -----------------------
- public void getClosedFieldValue(ARecordType recordType, int fieldId, DataOutput dOut) throws IOException,
- AsterixException {
+ public void getClosedFieldValue(ARecordType recordType, int fieldId, DataOutput dOut)
+ throws IOException, AsterixException {
if (isClosedFieldNull(recordType, fieldId)) {
- dOut.writeByte(ATypeTag.NULL.serialize());
+ dOut.writeByte(ATypeTag.SERIALIZED_NULL_TYPE_TAG);
} else {
dOut.write(getClosedFieldTag(recordType, fieldId));
dOut.write(bytes, getClosedFieldOffset(recordType, fieldId), getClosedFieldSize(recordType, fieldId));
@@ -231,7 +231,7 @@
}
public void getClosedFieldName(ARecordType recordType, int fieldId, DataOutput dOut) throws IOException {
- dOut.writeByte(ATypeTag.STRING.serialize());
+ dOut.writeByte(ATypeTag.SERIALIZED_STRING_TYPE_TAG);
utf8Writer.writeUTF8(getClosedFieldName(recordType, fieldId), dOut);
}
@@ -281,8 +281,8 @@
// Open field accessors.
// -----------------------
- public void getOpenFieldValue(ARecordType recordType, int fieldId, DataOutput dOut) throws IOException,
- AsterixException {
+ public void getOpenFieldValue(ARecordType recordType, int fieldId, DataOutput dOut)
+ throws IOException, AsterixException {
dOut.write(bytes, getOpenFieldValueOffset(recordType, fieldId), getOpenFieldValueSize(recordType, fieldId));
}
@@ -297,7 +297,7 @@
}
public void getOpenFieldName(ARecordType recordType, int fieldId, DataOutput dOut) throws IOException {
- dOut.writeByte(ATypeTag.STRING.serialize());
+ dOut.writeByte(ATypeTag.SERIALIZED_STRING_TYPE_TAG);
dOut.write(bytes, getOpenFieldNameOffset(recordType, fieldId), getOpenFieldNameSize(recordType, fieldId));
}
diff --git a/asterix-om/src/main/java/org/apache/asterix/om/types/ATypeTag.java b/asterix-om/src/main/java/org/apache/asterix/om/types/ATypeTag.java
index 0a341f0..6196c60 100644
--- a/asterix-om/src/main/java/org/apache/asterix/om/types/ATypeTag.java
+++ b/asterix-om/src/main/java/org/apache/asterix/om/types/ATypeTag.java
@@ -24,9 +24,7 @@
/**
* There is a unique tag for each primitive type and for each kind of
- * non-primitive type in the object model.
- *
- * @author Nicola
+ * non-primitive type in the object model
*/
public enum ATypeTag implements IEnumSerializer {
INT8(1),
@@ -69,19 +67,39 @@
UUID(38),
SHORTWITHOUTTYPEINFO(40);
- private byte value;
-
- private ATypeTag(int value) {
- this.value = (byte) value;
- }
-
- @Override
- public byte serialize() {
- return value;
- }
-
+ /*
+ * Serialized Tags begin
+ */
+ public static final byte SERIALIZED_STRING_TYPE_TAG = STRING.serialize();
+ public static final byte SERIALIZED_NULL_TYPE_TAG = NULL.serialize();
+ public static final byte SERIALIZED_DOUBLE_TYPE_TAG = DOUBLE.serialize();
+ public static final byte SERIALIZED_RECORD_TYPE_TAG = RECORD.serialize();
+ public static final byte SERIALIZED_INT32_TYPE_TAG = INT32.serialize();
+ public static final byte SERIALIZED_ORDEREDLIST_TYPE_TAG = ORDEREDLIST.serialize();
+ public static final byte SERIALIZED_UNORDEREDLIST_TYPE_TAG = UNORDEREDLIST.serialize();
+ public static final byte SERIALIZED_POLYGON_TYPE_TAG = POLYGON.serialize();
+ public static final byte SERIALIZED_DATE_TYPE_TAG = DATE.serialize();
+ public static final byte SERIALIZED_TIME_TYPE_TAG = TIME.serialize();
+ public static final byte SERIALIZED_DATETIME_TYPE_TAG = DATETIME.serialize();
+ public static final byte SERIALIZED_SYSTEM_NULL_TYPE_TAG = SYSTEM_NULL.serialize();
+ public static final byte SERIALIZED_DURATION_TYPE_TAG = DURATION.serialize();
+ public static final byte SERIALIZED_DAY_TIME_DURATION_TYPE_TAG = DAYTIMEDURATION.serialize();
+ public static final byte SERIALIZED_POINT_TYPE_TAG = POINT.serialize();
+ public static final byte SERIALIZED_INTERVAL_TYPE_TAG = INTERVAL.serialize();
+ public static final byte SERIALIZED_CIRCLE_TYPE_TAG = CIRCLE.serialize();
+ public static final byte SERIALIZED_YEAR_MONTH_DURATION_TYPE_TAG = YEARMONTHDURATION.serialize();
+ public static final byte SERIALIZED_LINE_TYPE_TAG = LINE.serialize();
+ public static final byte SERIALIZED_RECTANGLE_TYPE_TAG = RECTANGLE.serialize();
+ public static final byte SERIALIZED_BOOLEAN_TYPE_TAG = BOOLEAN.serialize();
+ public static final byte SERIALIZED_INT8_TYPE_TAG = INT8.serialize();
+ public static final byte SERIALIZED_INT16_TYPE_TAG = INT16.serialize();
+ public static final byte SERIALIZED_INT64_TYPE_TAG = INT64.serialize();
+ public static final byte SERIALIZED_FLOAT_TYPE_TAG = FLOAT.serialize();
+ /*
+ * Serialized Tags end
+ */
public static final int TYPE_COUNT = ATypeTag.values().length;
-
+ private byte value;
public static final ATypeTag[] VALUE_TYPE_MAPPING;
static {
@@ -96,6 +114,15 @@
VALUE_TYPE_MAPPING = typeList.toArray(new ATypeTag[typeList.size()]);
}
+ private ATypeTag(int value) {
+ this.value = (byte) value;
+ }
+
+ @Override
+ public byte serialize() {
+ return value;
+ }
+
public boolean isDerivedType() {
if (this == ATypeTag.RECORD || this == ATypeTag.ORDEREDLIST || this == ATypeTag.UNORDEREDLIST
|| this == ATypeTag.UNION)
diff --git a/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationChannel.java b/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationChannel.java
index fd1df0b..e4d94b4 100644
--- a/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationChannel.java
+++ b/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationChannel.java
@@ -61,11 +61,12 @@
import org.apache.asterix.common.transactions.LogRecord;
import org.apache.asterix.common.transactions.LogSource;
import org.apache.asterix.common.transactions.LogType;
-import org.apache.asterix.replication.functions.ReplicationProtocol;
-import org.apache.asterix.replication.functions.ReplicationProtocol.ReplicationRequestType;
+import org.apache.asterix.common.utils.TransactionUtil;
import org.apache.asterix.replication.functions.ReplicaFilesRequest;
import org.apache.asterix.replication.functions.ReplicaIndexFlushRequest;
import org.apache.asterix.replication.functions.ReplicaLogsRequest;
+import org.apache.asterix.replication.functions.ReplicationProtocol;
+import org.apache.asterix.replication.functions.ReplicationProtocol.ReplicationRequestType;
import org.apache.asterix.replication.logging.RemoteLogMapping;
import org.apache.asterix.replication.storage.LSMComponentLSNSyncTask;
import org.apache.asterix.replication.storage.LSMComponentProperties;
@@ -393,8 +394,8 @@
long fileSize = fileChannel.size();
fileProperties.initialize(filePath, fileSize, replicaId, false,
IMetaDataPageManager.INVALID_LSN_OFFSET, false);
- outBuffer = ReplicationProtocol.writeFileReplicationRequest(outBuffer,
- fileProperties, ReplicationRequestType.REPLICATE_FILE);
+ outBuffer = ReplicationProtocol.writeFileReplicationRequest(outBuffer, fileProperties,
+ ReplicationRequestType.REPLICATE_FILE);
//send file info
NetworkingUtil.transferBufferToChannel(socketChannel, outBuffer);
@@ -488,13 +489,14 @@
if (remoteLog.getLogType() == LogType.JOB_COMMIT) {
LogRecord jobCommitLog = new LogRecord();
- jobCommitLog.formJobTerminateLogRecord(remoteLog.getJobId(), true, remoteLog.getNodeId());
+ TransactionUtil.formJobTerminateLogRecord(jobCommitLog, remoteLog.getJobId(), true,
+ remoteLog.getNodeId());
jobCommitLog.setReplicationThread(this);
jobCommitLog.setLogSource(LogSource.REMOTE);
logManager.log(jobCommitLog);
} else if (remoteLog.getLogType() == LogType.FLUSH) {
LogRecord flushLog = new LogRecord();
- flushLog.formFlushLogRecord(remoteLog.getDatasetId(), null, remoteLog.getNodeId(),
+ TransactionUtil.formFlushLogRecord(flushLog, remoteLog.getDatasetId(), null, remoteLog.getNodeId(),
remoteLog.getNumOfFlushedIndexes());
flushLog.setReplicationThread(this);
flushLog.setLogSource(LogSource.REMOTE);
@@ -523,8 +525,8 @@
if (logRecord.getLogType() == LogType.JOB_COMMIT) {
//send ACK to requester
try {
- socketChannel.socket().getOutputStream().write(
- (localNodeID + ReplicationProtocol.JOB_COMMIT_ACK + logRecord.getJobId() + "\n")
+ socketChannel.socket().getOutputStream()
+ .write((localNodeID + ReplicationProtocol.JOB_COMMIT_ACK + logRecord.getJobId() + "\n")
.getBytes());
socketChannel.socket().getOutputStream().flush();
} catch (IOException e) {
@@ -647,4 +649,4 @@
}
}
}
-}
+}
\ No newline at end of file
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/AbstractSerializableAvgAggregateFunction.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/AbstractSerializableAvgAggregateFunction.java
index b61f3b2..24e6cb7 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/AbstractSerializableAvgAggregateFunction.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/AbstractSerializableAvgAggregateFunction.java
@@ -95,7 +95,7 @@
try {
state.writeDouble(0.0);
state.writeLong(0);
- state.writeByte(ATypeTag.SYSTEM_NULL.serialize());
+ state.writeByte(ATypeTag.SERIALIZED_SYSTEM_NULL_TYPE_TAG);
} catch (IOException e) {
throw new AlgebricksException(e);
}
@@ -193,9 +193,9 @@
if (GlobalConfig.DEBUG) {
GlobalConfig.ASTERIX_LOGGER.finest("AVG aggregate ran over empty input.");
}
- result.writeByte(ATypeTag.SYSTEM_NULL.serialize());
+ result.writeByte(ATypeTag.SERIALIZED_SYSTEM_NULL_TYPE_TAG);
} else if (aggType == ATypeTag.NULL) {
- result.writeByte(ATypeTag.NULL.serialize());
+ result.writeByte(ATypeTag.SERIALIZED_NULL_TYPE_TAG);
} else {
sumBytes.reset();
aDouble.setValue(sum);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/AbstractSerializableSumAggregateFunction.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/AbstractSerializableSumAggregateFunction.java
index 3eae34a..5e6ca46 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/AbstractSerializableSumAggregateFunction.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/AbstractSerializableSumAggregateFunction.java
@@ -63,15 +63,14 @@
@SuppressWarnings("rawtypes")
public ISerializerDeserializer serde;
- public AbstractSerializableSumAggregateFunction(ICopyEvaluatorFactory[] args)
- throws AlgebricksException {
+ public AbstractSerializableSumAggregateFunction(ICopyEvaluatorFactory[] args) throws AlgebricksException {
eval = args[0].createEvaluator(inputVal);
}
@Override
public void init(DataOutput state) throws AlgebricksException {
try {
- state.writeByte(ATypeTag.SYSTEM_NULL.serialize());
+ state.writeByte(ATypeTag.SERIALIZED_SYSTEM_NULL_TYPE_TAG);
state.writeDouble(0.0);
} catch (IOException e) {
throw new AlgebricksException(e);
@@ -202,8 +201,8 @@
break;
}
default:
- throw new AlgebricksException("SumAggregationFunction: incompatible type for the result ("
- + aggType + "). ");
+ throw new AlgebricksException(
+ "SumAggregationFunction: incompatible type for the result (" + aggType + "). ");
}
} catch (IOException e) {
throw new AlgebricksException(e);
@@ -218,8 +217,11 @@
protected boolean skipStep(byte[] state, int start) {
return false;
}
+
protected abstract void processNull(byte[] state, int start);
+
protected abstract void processSystemNull() throws AlgebricksException;
+
protected abstract void finishSystemNull(DataOutput out) throws IOException;
}
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableAvgAggregateFunction.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableAvgAggregateFunction.java
index d7bb1e1..9773272 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableAvgAggregateFunction.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableAvgAggregateFunction.java
@@ -47,8 +47,9 @@
finish(state, start, len, result);
}
+ @Override
protected void processNull(byte[] state, int start) {
- state[start + AGG_TYPE_OFFSET] = ATypeTag.NULL.serialize();
+ state[start + AGG_TYPE_OFFSET] = ATypeTag.SERIALIZED_NULL_TYPE_TAG;
}
@Override
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableGlobalAvgAggregateFunction.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableGlobalAvgAggregateFunction.java
index a86c699..3a1298c 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableGlobalAvgAggregateFunction.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableGlobalAvgAggregateFunction.java
@@ -48,8 +48,9 @@
finishPartialResults(state, start, len, result);
}
+ @Override
protected void processNull(byte[] state, int start) {
- state[start + AGG_TYPE_OFFSET] = ATypeTag.NULL.serialize();
+ state[start + AGG_TYPE_OFFSET] = ATypeTag.SERIALIZED_NULL_TYPE_TAG;
}
@Override
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableIntermediateAvgAggregateFunction.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableIntermediateAvgAggregateFunction.java
index bf3a596e..af21c9f 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableIntermediateAvgAggregateFunction.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableIntermediateAvgAggregateFunction.java
@@ -48,8 +48,9 @@
finishPartialResults(state, start, len, result);
}
+ @Override
protected void processNull(byte[] state, int start) {
- state[start + AGG_TYPE_OFFSET] = ATypeTag.NULL.serialize();
+ state[start + AGG_TYPE_OFFSET] = ATypeTag.SERIALIZED_NULL_TYPE_TAG;
}
@Override
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableLocalAvgAggregateFunction.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableLocalAvgAggregateFunction.java
index 3f30472..1b7772f 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableLocalAvgAggregateFunction.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableLocalAvgAggregateFunction.java
@@ -48,8 +48,9 @@
finish(state, start, len, result);
}
+ @Override
protected void processNull(byte[] state, int start) {
- state[start + AGG_TYPE_OFFSET] = ATypeTag.NULL.serialize();
+ state[start + AGG_TYPE_OFFSET] = ATypeTag.SERIALIZED_NULL_TYPE_TAG;
}
@Override
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableSumAggregateFunction.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableSumAggregateFunction.java
index fc91d8d..ef0dc3b 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableSumAggregateFunction.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableSumAggregateFunction.java
@@ -38,9 +38,9 @@
this.isLocalAgg = isLocalAgg;
}
+ @Override
protected void processNull(byte[] state, int start) {
- ATypeTag aggType = ATypeTag.NULL;
- state[start + AGG_TYPE_OFFSET] = aggType.serialize();
+ state[start + AGG_TYPE_OFFSET] = ATypeTag.SERIALIZED_NULL_TYPE_TAG;
}
@Override
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractAvgAggregateFunction.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractAvgAggregateFunction.java
index a4f9968..37d4b05 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractAvgAggregateFunction.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractAvgAggregateFunction.java
@@ -182,9 +182,9 @@
if (GlobalConfig.DEBUG) {
GlobalConfig.ASTERIX_LOGGER.finest("AVG aggregate ran over empty input.");
}
- out.writeByte(ATypeTag.SYSTEM_NULL.serialize());
+ out.writeByte(ATypeTag.SERIALIZED_SYSTEM_NULL_TYPE_TAG);
} else if (aggType == ATypeTag.NULL) {
- out.writeByte(ATypeTag.NULL.serialize());
+ out.writeByte(ATypeTag.SERIALIZED_NULL_TYPE_TAG);
} else {
sumBytes.reset();
aDouble.setValue(sum);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractMinMaxAggregateFunction.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractMinMaxAggregateFunction.java
index aaf352a..e934ddb 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractMinMaxAggregateFunction.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractMinMaxAggregateFunction.java
@@ -86,8 +86,8 @@
// Initialize min value.
outputVal.assign(inputVal);
} else if (typeTag != ATypeTag.SYSTEM_NULL && !ATypeHierarchy.isCompatible(typeTag, aggType)) {
- throw new AlgebricksException("Unexpected type " + typeTag + " in aggregation input stream. Expected type "
- + aggType + ".");
+ throw new AlgebricksException(
+ "Unexpected type " + typeTag + " in aggregation input stream. Expected type " + aggType + ".");
} else {
// If a system_null is encountered locally, it would be an error; otherwise if it is seen
@@ -161,7 +161,7 @@
try {
switch (aggType) {
case NULL: {
- out.writeByte(ATypeTag.NULL.serialize());
+ out.writeByte(ATypeTag.SERIALIZED_NULL_TYPE_TAG);
break;
}
case SYSTEM_NULL: {
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/MinMaxAggregateFunction.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/MinMaxAggregateFunction.java
index f227f9a..e0281bf 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/MinMaxAggregateFunction.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/MinMaxAggregateFunction.java
@@ -34,6 +34,7 @@
this.isLocalAgg = isLocalAgg;
}
+ @Override
protected void processNull() {
aggType = ATypeTag.NULL;
}
@@ -54,9 +55,9 @@
protected void finishSystemNull() throws IOException {
// Empty stream. For local agg return system null. For global agg return null.
if (isLocalAgg) {
- out.writeByte(ATypeTag.SYSTEM_NULL.serialize());
+ out.writeByte(ATypeTag.SERIALIZED_SYSTEM_NULL_TYPE_TAG);
} else {
- out.writeByte(ATypeTag.NULL.serialize());
+ out.writeByte(ATypeTag.SERIALIZED_NULL_TYPE_TAG);
}
}
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/SqlMinMaxAggregateFunction.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/SqlMinMaxAggregateFunction.java
index cc8715c..1841ed0 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/SqlMinMaxAggregateFunction.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/SqlMinMaxAggregateFunction.java
@@ -49,9 +49,9 @@
protected void finishSystemNull() throws IOException {
// Empty stream. For local agg return system null. For global agg return null.
if (isLocalAgg) {
- out.writeByte(ATypeTag.SYSTEM_NULL.serialize());
+ out.writeByte(ATypeTag.SERIALIZED_SYSTEM_NULL_TYPE_TAG);
} else {
- out.writeByte(ATypeTag.NULL.serialize());
+ out.writeByte(ATypeTag.SERIALIZED_NULL_TYPE_TAG);
}
}
}
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/CircleCenterAccessor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/CircleCenterAccessor.java
index d8c442a..7bddb09 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/CircleCenterAccessor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/CircleCenterAccessor.java
@@ -46,11 +46,7 @@
public class CircleCenterAccessor extends AbstractScalarFunctionDynamicDescriptor {
private static final long serialVersionUID = 1L;
-
private static final FunctionIdentifier FID = AsterixBuiltinFunctions.GET_CIRCLE_CENTER_ACCESSOR;
- private static final byte SER_CICLE_TAG = ATypeTag.CIRCLE.serialize();
- private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -89,14 +85,14 @@
try {
double cX;
double cY;
- if (bytes[0] == SER_CICLE_TAG) {
+ if (bytes[0] == ATypeTag.SERIALIZED_CIRCLE_TYPE_TAG) {
cX = ADoubleSerializerDeserializer.getDouble(bytes,
ACircleSerializerDeserializer.getCenterPointCoordinateOffset(Coordinate.X));
cY = ADoubleSerializerDeserializer.getDouble(bytes,
ACircleSerializerDeserializer.getCenterPointCoordinateOffset(Coordinate.Y));
aPoint.setValue(cX, cY);
pointSerde.serialize(aPoint, out);
- } else if (bytes[0] == SER_NULL_TYPE_TAG) {
+ } else if (bytes[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
} else {
throw new AlgebricksException("get-center does not support the type: " + bytes[0]
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/CircleRadiusAccessor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/CircleRadiusAccessor.java
index 23a5686..d44f43a 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/CircleRadiusAccessor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/CircleRadiusAccessor.java
@@ -43,13 +43,8 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class CircleRadiusAccessor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
-
private static final FunctionIdentifier FID = AsterixBuiltinFunctions.GET_CIRCLE_RADIUS_ACCESSOR;
- private static final byte SER_CICLE_TAG = ATypeTag.CIRCLE.serialize();
- private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -87,12 +82,12 @@
try {
double radius;
- if (bytes[0] == SER_CICLE_TAG) {
+ if (bytes[0] == ATypeTag.SERIALIZED_CIRCLE_TYPE_TAG) {
radius = ADoubleSerializerDeserializer.getDouble(bytes,
ACircleSerializerDeserializer.getRadiusOffset());
aDouble.setValue(radius);
doubleSerde.serialize(aDouble, out);
- } else if (bytes[0] == SER_NULL_TYPE_TAG) {
+ } else if (bytes[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
} else {
throw new AlgebricksException("get-radius does not support the type: " + bytes[0]
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/LineRectanglePolygonAccessor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/LineRectanglePolygonAccessor.java
index 712e53c..b9f7248 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/LineRectanglePolygonAccessor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/LineRectanglePolygonAccessor.java
@@ -50,15 +50,8 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class LineRectanglePolygonAccessor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
-
private static final FunctionIdentifier FID = AsterixBuiltinFunctions.GET_POINTS_LINE_RECTANGLE_POLYGON_ACCESSOR;
- private static final byte SER_LINE_TAG = ATypeTag.LINE.serialize();
- private static final byte SER_RECTANGLE_TAG = ATypeTag.RECTANGLE.serialize();
- private static final byte SER_POLYGON_TAG = ATypeTag.POLYGON.serialize();
- private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -99,7 +92,7 @@
byte[] bytes = argOut.getByteArray();
try {
- if (bytes[0] == SER_LINE_TAG) {
+ if (bytes[0] == ATypeTag.SERIALIZED_LINE_TYPE_TAG) {
listBuilder.reset(pointListType);
inputVal.reset();
@@ -121,7 +114,7 @@
listBuilder.addItem(inputVal);
listBuilder.write(out, true);
- } else if (bytes[0] == SER_RECTANGLE_TAG) {
+ } else if (bytes[0] == ATypeTag.SERIALIZED_RECTANGLE_TYPE_TAG) {
listBuilder.reset(pointListType);
inputVal.reset();
@@ -143,7 +136,7 @@
listBuilder.addItem(inputVal);
listBuilder.write(out, true);
- } else if (bytes[0] == SER_POLYGON_TAG) {
+ } else if (bytes[0] == ATypeTag.SERIALIZED_POLYGON_TYPE_TAG) {
int numOfPoints = AInt16SerializerDeserializer.getShort(bytes,
APolygonSerializerDeserializer.getNumberOfPointsOffset());
@@ -162,7 +155,7 @@
listBuilder.addItem(inputVal);
}
listBuilder.write(out, true);
- } else if (bytes[0] == SER_NULL_TYPE_TAG) {
+ } else if (bytes[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
} else {
throw new AlgebricksException("get-points does not support the type: " + bytes[0]
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/PointXCoordinateAccessor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/PointXCoordinateAccessor.java
index 62b7a1f..b84ff77 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/PointXCoordinateAccessor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/PointXCoordinateAccessor.java
@@ -44,13 +44,8 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class PointXCoordinateAccessor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
-
private static final FunctionIdentifier FID = AsterixBuiltinFunctions.GET_POINT_X_COORDINATE_ACCESSOR;
- private static final byte SER_POINT_TAG = ATypeTag.POINT.serialize();
- private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -88,12 +83,12 @@
try {
double x;
- if (bytes[0] == SER_POINT_TAG) {
+ if (bytes[0] == ATypeTag.SERIALIZED_POINT_TYPE_TAG) {
x = ADoubleSerializerDeserializer.getDouble(bytes,
APointSerializerDeserializer.getCoordinateOffset(Coordinate.X));
aDouble.setValue(x);
doubleSerde.serialize(aDouble, out);
- } else if (bytes[0] == SER_NULL_TYPE_TAG) {
+ } else if (bytes[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
} else {
throw new AlgebricksException("get-x does not support the type: " + bytes[0]
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/PointYCoordinateAccessor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/PointYCoordinateAccessor.java
index e6e9f51..c03758d 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/PointYCoordinateAccessor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/PointYCoordinateAccessor.java
@@ -48,8 +48,6 @@
private static final long serialVersionUID = 1L;
private static final FunctionIdentifier FID = AsterixBuiltinFunctions.GET_POINT_Y_COORDINATE_ACCESSOR;
- private static final byte SER_POINT_TAG = ATypeTag.POINT.serialize();
- private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@@ -88,12 +86,12 @@
try {
double y;
- if (bytes[0] == SER_POINT_TAG) {
+ if (bytes[0] == ATypeTag.SERIALIZED_POINT_TYPE_TAG) {
y = ADoubleSerializerDeserializer.getDouble(bytes,
APointSerializerDeserializer.getCoordinateOffset(Coordinate.Y));
aDouble.setValue(y);
doubleSerde.serialize(aDouble, out);
- } else if (bytes[0] == SER_NULL_TYPE_TAG) {
+ } else if (bytes[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
} else {
throw new AlgebricksException("get-y does not support the type: " + bytes[0]
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalDayAccessor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalDayAccessor.java
index de1d42d..b8f76d1 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalDayAccessor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalDayAccessor.java
@@ -48,16 +48,7 @@
public class TemporalDayAccessor extends AbstractScalarFunctionDynamicDescriptor {
private static final long serialVersionUID = 1L;
-
private static final FunctionIdentifier FID = AsterixBuiltinFunctions.ACCESSOR_TEMPORAL_DAY;
-
- // allowed input types
- private static final byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
- private static final byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
- private static final byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
- private static final byte SER_DAY_TIME_DURATION_TYPE_TAG = ATypeTag.DAYTIMEDURATION.serialize();
- private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -104,27 +95,27 @@
try {
- if (bytes[0] == SER_DURATION_TYPE_TAG) {
- aMutableInt64.setValue(calSystem.getDurationDay(ADurationSerializerDeserializer
- .getDayTime(bytes, 1)));
+ if (bytes[0] == ATypeTag.SERIALIZED_DURATION_TYPE_TAG) {
+ aMutableInt64.setValue(
+ calSystem.getDurationDay(ADurationSerializerDeserializer.getDayTime(bytes, 1)));
intSerde.serialize(aMutableInt64, out);
return;
}
- if (bytes[0] == SER_DAY_TIME_DURATION_TYPE_TAG) {
- aMutableInt64.setValue(calSystem.getDurationDay(ADayTimeDurationSerializerDeserializer
- .getDayTime(bytes, 1)));
+ if (bytes[0] == ATypeTag.SERIALIZED_DAY_TIME_DURATION_TYPE_TAG) {
+ aMutableInt64.setValue(calSystem
+ .getDurationDay(ADayTimeDurationSerializerDeserializer.getDayTime(bytes, 1)));
intSerde.serialize(aMutableInt64, out);
return;
}
long chrononTimeInMs = 0;
- if (bytes[0] == SER_DATE_TYPE_TAG) {
+ if (bytes[0] == ATypeTag.SERIALIZED_DATE_TYPE_TAG) {
chrononTimeInMs = AInt32SerializerDeserializer.getInt(bytes, 1)
* GregorianCalendarSystem.CHRONON_OF_DAY;
- } else if (bytes[0] == SER_DATETIME_TYPE_TAG) {
+ } else if (bytes[0] == ATypeTag.SERIALIZED_DATETIME_TYPE_TAG) {
chrononTimeInMs = AInt64SerializerDeserializer.getLong(bytes, 1);
- } else if (bytes[0] == SER_NULL_TYPE_TAG) {
+ } else if (bytes[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
} else {
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalHourAccessor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalHourAccessor.java
index cf170ec..6b7dbdb 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalHourAccessor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalHourAccessor.java
@@ -48,16 +48,7 @@
public class TemporalHourAccessor extends AbstractScalarFunctionDynamicDescriptor {
private static final long serialVersionUID = 1L;
-
private static final FunctionIdentifier FID = AsterixBuiltinFunctions.ACCESSOR_TEMPORAL_HOUR;
-
- // allowed input types
- private static final byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
- private static final byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
- private static final byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
- private static final byte SER_DAY_TIME_DURATION_TYPE_TAG = ATypeTag.DAYTIMEDURATION.serialize();
- private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -105,26 +96,26 @@
try {
- if (bytes[0] == SER_DURATION_TYPE_TAG) {
- aMutableInt64.setValue(calSystem.getDurationHour(ADurationSerializerDeserializer
- .getDayTime(bytes, 1)));
+ if (bytes[0] == ATypeTag.SERIALIZED_DURATION_TYPE_TAG) {
+ aMutableInt64.setValue(calSystem
+ .getDurationHour(ADurationSerializerDeserializer.getDayTime(bytes, 1)));
intSerde.serialize(aMutableInt64, out);
return;
}
- if (bytes[0] == SER_DAY_TIME_DURATION_TYPE_TAG) {
- aMutableInt64.setValue(calSystem.getDurationHour(ADayTimeDurationSerializerDeserializer
- .getDayTime(bytes, 1)));
+ if (bytes[0] == ATypeTag.SERIALIZED_DAY_TIME_DURATION_TYPE_TAG) {
+ aMutableInt64.setValue(calSystem
+ .getDurationHour(ADayTimeDurationSerializerDeserializer.getDayTime(bytes, 1)));
intSerde.serialize(aMutableInt64, out);
return;
}
long chrononTimeInMs = 0;
- if (bytes[0] == SER_TIME_TYPE_TAG) {
+ if (bytes[0] == ATypeTag.SERIALIZED_TIME_TYPE_TAG) {
chrononTimeInMs = AInt32SerializerDeserializer.getInt(bytes, 1);
- } else if (bytes[0] == SER_DATETIME_TYPE_TAG) {
+ } else if (bytes[0] == ATypeTag.SERIALIZED_DATETIME_TYPE_TAG) {
chrononTimeInMs = AInt64SerializerDeserializer.getLong(bytes, 1);
- } else if (bytes[0] == SER_NULL_TYPE_TAG) {
+ } else if (bytes[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
} else {
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalEndAccessor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalEndAccessor.java
index b0fa757..8e42b79 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalEndAccessor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalEndAccessor.java
@@ -49,16 +49,7 @@
public class TemporalIntervalEndAccessor extends AbstractScalarFunctionDynamicDescriptor {
private static final long serialVersionUID = 1L;
-
private static final FunctionIdentifier FID = AsterixBuiltinFunctions.ACCESSOR_TEMPORAL_INTERVAL_END;
-
- private static final byte SER_INTERVAL_TYPE_TAG = ATypeTag.INTERVAL.serialize();
- private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
- private static final byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
- private static final byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
- private static final byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -67,6 +58,7 @@
}
};
+ @Override
public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
return new ICopyEvaluatorFactory() {
@@ -106,19 +98,19 @@
byte[] bytes = argOut.getByteArray();
try {
- if (bytes[0] == SER_NULL_TYPE_TAG) {
+ if (bytes[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
- } else if (bytes[0] == SER_INTERVAL_TYPE_TAG) {
+ } else if (bytes[0] == ATypeTag.SERIALIZED_INTERVAL_TYPE_TAG) {
byte timeType = AIntervalSerializerDeserializer.getIntervalTimeType(bytes, 1);
long endTime = AIntervalSerializerDeserializer.getIntervalEnd(bytes, 1);
- if (timeType == SER_DATE_TYPE_TAG) {
+ if (timeType == ATypeTag.SERIALIZED_DATE_TYPE_TAG) {
aDate.setValue((int) (endTime));
dateSerde.serialize(aDate, out);
- } else if (timeType == SER_TIME_TYPE_TAG) {
+ } else if (timeType == ATypeTag.SERIALIZED_TIME_TYPE_TAG) {
aTime.setValue((int) (endTime));
timeSerde.serialize(aTime, out);
- } else if (timeType == SER_DATETIME_TYPE_TAG) {
+ } else if (timeType == ATypeTag.SERIALIZED_DATETIME_TYPE_TAG) {
aDateTime.setValue(endTime);
datetimeSerde.serialize(aDateTime, out);
}
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalEndDateAccessor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalEndDateAccessor.java
index c9c2312..38141a2 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalEndDateAccessor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalEndDateAccessor.java
@@ -43,16 +43,8 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class TemporalIntervalEndDateAccessor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
-
private static final FunctionIdentifier FID = AsterixBuiltinFunctions.ACCESSOR_TEMPORAL_INTERVAL_END_DATE;
-
- private static final byte SER_INTERVAL_TYPE_TAG = ATypeTag.INTERVAL.serialize();
- private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
- private static final byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -61,6 +53,7 @@
}
};
+ @Override
public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
return new ICopyEvaluatorFactory() {
@@ -93,24 +86,24 @@
byte[] bytes = argOut.getByteArray();
try {
- if (bytes[0] == SER_NULL_TYPE_TAG) {
+ if (bytes[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
- } else if (bytes[0] == SER_INTERVAL_TYPE_TAG) {
+ } else if (bytes[0] == ATypeTag.SERIALIZED_INTERVAL_TYPE_TAG) {
byte timeType = AIntervalSerializerDeserializer.getIntervalTimeType(bytes, 1);
long endTime = AIntervalSerializerDeserializer.getIntervalEnd(bytes, 1);
- if (timeType == SER_DATE_TYPE_TAG) {
+ if (timeType == ATypeTag.SERIALIZED_DATE_TYPE_TAG) {
aDate.setValue((int) (endTime));
dateSerde.serialize(aDate, out);
} else {
- throw new AlgebricksException(FID.getName()
- + ": expects NULL/INTERVAL(of DATE), but got "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(bytes[0]));
+ throw new AlgebricksException(
+ FID.getName() + ": expects NULL/INTERVAL(of DATE), but got "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(bytes[0]));
}
} else {
- throw new AlgebricksException(FID.getName()
- + ": expects NULL/INTERVAL(of DATE), but got "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(bytes[0]));
+ throw new AlgebricksException(
+ FID.getName() + ": expects NULL/INTERVAL(of DATE), but got "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(bytes[0]));
}
} catch (IOException e) {
throw new AlgebricksException(e);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalEndDatetimeAccessor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalEndDatetimeAccessor.java
index 222d616..e610363 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalEndDatetimeAccessor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalEndDatetimeAccessor.java
@@ -47,12 +47,6 @@
private static final long serialVersionUID = 1L;
private static final FunctionIdentifier FID = AsterixBuiltinFunctions.ACCESSOR_TEMPORAL_INTERVAL_END_DATETIME;
-
- private static final byte SER_INTERVAL_TYPE_TAG = ATypeTag.INTERVAL.serialize();
- private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
- private static final byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -61,6 +55,7 @@
}
};
+ @Override
public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
return new ICopyEvaluatorFactory() {
@@ -92,13 +87,13 @@
byte[] bytes = argOut.getByteArray();
try {
- if (bytes[0] == SER_NULL_TYPE_TAG) {
+ if (bytes[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
- } else if (bytes[0] == SER_INTERVAL_TYPE_TAG) {
+ } else if (bytes[0] == ATypeTag.SERIALIZED_INTERVAL_TYPE_TAG) {
byte timeType = AIntervalSerializerDeserializer.getIntervalTimeType(bytes, 1);
long endTime = AIntervalSerializerDeserializer.getIntervalEnd(bytes, 1);
- if (timeType == SER_DATETIME_TYPE_TAG) {
+ if (timeType == ATypeTag.SERIALIZED_DATETIME_TYPE_TAG) {
aDateTime.setValue(endTime);
datetimeSerde.serialize(aDateTime, out);
} else {
@@ -107,9 +102,9 @@
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(timeType) + ")");
}
} else {
- throw new AlgebricksException(FID.getName()
- + ": expects NULL/INTERVAL(of DATETIME), but got "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(bytes[0]));
+ throw new AlgebricksException(
+ FID.getName() + ": expects NULL/INTERVAL(of DATETIME), but got "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(bytes[0]));
}
} catch (IOException e) {
throw new AlgebricksException(e);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalEndTimeAccessor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalEndTimeAccessor.java
index f684a9c..dc5ff7e 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalEndTimeAccessor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalEndTimeAccessor.java
@@ -43,16 +43,8 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class TemporalIntervalEndTimeAccessor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
-
private static final FunctionIdentifier FID = AsterixBuiltinFunctions.ACCESSOR_TEMPORAL_INTERVAL_END_TIME;
-
- private static final byte SER_INTERVAL_TYPE_TAG = ATypeTag.INTERVAL.serialize();
- private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
- private static final byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -61,6 +53,7 @@
}
};
+ @Override
public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
return new ICopyEvaluatorFactory() {
@@ -92,13 +85,13 @@
byte[] bytes = argOut.getByteArray();
try {
- if (bytes[0] == SER_NULL_TYPE_TAG) {
+ if (bytes[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
- } else if (bytes[0] == SER_INTERVAL_TYPE_TAG) {
+ } else if (bytes[0] == ATypeTag.SERIALIZED_INTERVAL_TYPE_TAG) {
byte timeType = AIntervalSerializerDeserializer.getIntervalTimeType(bytes, 1);
long endTime = AIntervalSerializerDeserializer.getIntervalEnd(bytes, 1);
- if (timeType == SER_TIME_TYPE_TAG) {
+ if (timeType == ATypeTag.SERIALIZED_TIME_TYPE_TAG) {
aTime.setValue((int) (endTime));
timeSerde.serialize(aTime, out);
} else {
@@ -107,9 +100,9 @@
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(timeType) + ")");
}
} else {
- throw new AlgebricksException(FID.getName()
- + ": expects NULL/INTERVAL(of TIME), but got "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(bytes[0]));
+ throw new AlgebricksException(
+ FID.getName() + ": expects NULL/INTERVAL(of TIME), but got "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(bytes[0]));
}
} catch (IOException e) {
throw new AlgebricksException(e);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalStartAccessor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalStartAccessor.java
index 9eb6c44..3c2440b 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalStartAccessor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalStartAccessor.java
@@ -49,16 +49,7 @@
public class TemporalIntervalStartAccessor extends AbstractScalarFunctionDynamicDescriptor {
private static final long serialVersionUID = 1L;
-
private static final FunctionIdentifier FID = AsterixBuiltinFunctions.ACCESSOR_TEMPORAL_INTERVAL_START;
-
- private static final byte SER_INTERVAL_TYPE_TAG = ATypeTag.INTERVAL.serialize();
- private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
- private static final byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
- private static final byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
- private static final byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -67,6 +58,7 @@
}
};
+ @Override
public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
return new ICopyEvaluatorFactory() {
@@ -106,19 +98,19 @@
byte[] bytes = argOut.getByteArray();
try {
- if (bytes[0] == SER_NULL_TYPE_TAG) {
+ if (bytes[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
- } else if (bytes[0] == SER_INTERVAL_TYPE_TAG) {
+ } else if (bytes[0] == ATypeTag.SERIALIZED_INTERVAL_TYPE_TAG) {
byte timeType = AIntervalSerializerDeserializer.getIntervalTimeType(bytes, 1);
long startTime = AIntervalSerializerDeserializer.getIntervalStart(bytes, 1);
- if (timeType == SER_DATE_TYPE_TAG) {
+ if (timeType == ATypeTag.SERIALIZED_DATE_TYPE_TAG) {
aDate.setValue((int) (startTime));
dateSerde.serialize(aDate, out);
- } else if (timeType == SER_TIME_TYPE_TAG) {
+ } else if (timeType == ATypeTag.SERIALIZED_TIME_TYPE_TAG) {
aTime.setValue((int) (startTime));
timeSerde.serialize(aTime, out);
- } else if (timeType == SER_DATETIME_TYPE_TAG) {
+ } else if (timeType == ATypeTag.SERIALIZED_DATETIME_TYPE_TAG) {
aDateTime.setValue(startTime);
datetimeSerde.serialize(aDateTime, out);
}
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalStartDateAccessor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalStartDateAccessor.java
index 604c868..ff2a92b 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalStartDateAccessor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalStartDateAccessor.java
@@ -45,14 +45,7 @@
public class TemporalIntervalStartDateAccessor extends AbstractScalarFunctionDynamicDescriptor {
private static final long serialVersionUID = 1L;
-
private static final FunctionIdentifier FID = AsterixBuiltinFunctions.ACCESSOR_TEMPORAL_INTERVAL_START_DATE;
-
- private static final byte SER_INTERVAL_TYPE_TAG = ATypeTag.INTERVAL.serialize();
- private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
- private static final byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -61,6 +54,7 @@
}
};
+ @Override
public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
return new ICopyEvaluatorFactory() {
@@ -92,13 +86,13 @@
byte[] bytes = argOut.getByteArray();
try {
- if (bytes[0] == SER_NULL_TYPE_TAG) {
+ if (bytes[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
- } else if (bytes[0] == SER_INTERVAL_TYPE_TAG) {
+ } else if (bytes[0] == ATypeTag.SERIALIZED_INTERVAL_TYPE_TAG) {
byte timeType = AIntervalSerializerDeserializer.getIntervalTimeType(bytes, 1);
long startTime = AIntervalSerializerDeserializer.getIntervalStart(bytes, 1);
- if (timeType == SER_DATE_TYPE_TAG) {
+ if (timeType == ATypeTag.SERIALIZED_DATE_TYPE_TAG) {
aDate.setValue((int) (startTime));
dateSerde.serialize(aDate, out);
} else {
@@ -107,9 +101,9 @@
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(timeType) + ")");
}
} else {
- throw new AlgebricksException(FID.getName()
- + ": expects NULL/INTERVAL(of DATE), but got "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(bytes[0]));
+ throw new AlgebricksException(
+ FID.getName() + ": expects NULL/INTERVAL(of DATE), but got "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(bytes[0]));
}
} catch (IOException e) {
throw new AlgebricksException(e);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalStartDatetimeAccessor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalStartDatetimeAccessor.java
index 0b52fbc..c88b8f0 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalStartDatetimeAccessor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalStartDatetimeAccessor.java
@@ -43,16 +43,8 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class TemporalIntervalStartDatetimeAccessor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
-
private static final FunctionIdentifier FID = AsterixBuiltinFunctions.ACCESSOR_TEMPORAL_INTERVAL_START_DATETIME;
-
- private static final byte SER_INTERVAL_TYPE_TAG = ATypeTag.INTERVAL.serialize();
- private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
- private static final byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -61,6 +53,7 @@
}
};
+ @Override
public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
return new ICopyEvaluatorFactory() {
@@ -92,13 +85,13 @@
byte[] bytes = argOut.getByteArray();
try {
- if (bytes[0] == SER_NULL_TYPE_TAG) {
+ if (bytes[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
- } else if (bytes[0] == SER_INTERVAL_TYPE_TAG) {
+ } else if (bytes[0] == ATypeTag.SERIALIZED_INTERVAL_TYPE_TAG) {
byte timeType = AIntervalSerializerDeserializer.getIntervalTimeType(bytes, 1);
long startTime = AIntervalSerializerDeserializer.getIntervalStart(bytes, 1);
- if (timeType == SER_DATETIME_TYPE_TAG) {
+ if (timeType == ATypeTag.SERIALIZED_DATETIME_TYPE_TAG) {
aDateTime.setValue(startTime);
datetimeSerde.serialize(aDateTime, out);
} else {
@@ -107,9 +100,9 @@
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(timeType) + ")");
}
} else {
- throw new AlgebricksException(FID.getName()
- + ": expects NULL/INTERVAL(of DATETIME), but got "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(bytes[0]));
+ throw new AlgebricksException(
+ FID.getName() + ": expects NULL/INTERVAL(of DATETIME), but got "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(bytes[0]));
}
} catch (IOException e) {
throw new AlgebricksException(e);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalStartTimeAccessor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalStartTimeAccessor.java
index 6605857..8308d1f 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalStartTimeAccessor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalStartTimeAccessor.java
@@ -45,14 +45,7 @@
public class TemporalIntervalStartTimeAccessor extends AbstractScalarFunctionDynamicDescriptor {
private static final long serialVersionUID = 1L;
-
private static final FunctionIdentifier FID = AsterixBuiltinFunctions.ACCESSOR_TEMPORAL_INTERVAL_START_TIME;
-
- private static final byte SER_INTERVAL_TYPE_TAG = ATypeTag.INTERVAL.serialize();
- private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
- private static final byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -61,6 +54,7 @@
}
};
+ @Override
public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
return new ICopyEvaluatorFactory() {
@@ -92,13 +86,13 @@
byte[] bytes = argOut.getByteArray();
try {
- if (bytes[0] == SER_NULL_TYPE_TAG) {
+ if (bytes[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
- } else if (bytes[0] == SER_INTERVAL_TYPE_TAG) {
+ } else if (bytes[0] == ATypeTag.SERIALIZED_INTERVAL_TYPE_TAG) {
byte timeType = AIntervalSerializerDeserializer.getIntervalTimeType(bytes, 1);
long startTime = AIntervalSerializerDeserializer.getIntervalStart(bytes, 1);
- if (timeType == SER_TIME_TYPE_TAG) {
+ if (timeType == ATypeTag.SERIALIZED_TIME_TYPE_TAG) {
aTime.setValue((int) (startTime));
timeSerde.serialize(aTime, out);
} else {
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalMillisecondAccessor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalMillisecondAccessor.java
index 8f2c60e..a20e541 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalMillisecondAccessor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalMillisecondAccessor.java
@@ -46,18 +46,8 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class TemporalMillisecondAccessor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
-
private static final FunctionIdentifier FID = AsterixBuiltinFunctions.ACCESSOR_TEMPORAL_MILLISEC;
-
- // allowed input types
- private static final byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
- private static final byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
- private static final byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
- private static final byte SER_DAY_TIME_DURATION_TYPE_TAG = ATypeTag.DAYTIMEDURATION.serialize();
- private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -105,27 +95,26 @@
try {
- if (bytes[0] == SER_DURATION_TYPE_TAG) {
- aMutableInt64.setValue(calSystem.getDurationMillisecond(ADurationSerializerDeserializer
- .getDayTime(bytes, 1)));
+ if (bytes[0] == ATypeTag.SERIALIZED_DURATION_TYPE_TAG) {
+ aMutableInt64.setValue(calSystem
+ .getDurationMillisecond(ADurationSerializerDeserializer.getDayTime(bytes, 1)));
intSerde.serialize(aMutableInt64, out);
return;
}
- if (bytes[0] == SER_DAY_TIME_DURATION_TYPE_TAG) {
- aMutableInt64.setValue(calSystem
- .getDurationMillisecond(ADayTimeDurationSerializerDeserializer.getDayTime(
- bytes, 1)));
+ if (bytes[0] == ATypeTag.SERIALIZED_DAY_TIME_DURATION_TYPE_TAG) {
+ aMutableInt64.setValue(calSystem.getDurationMillisecond(
+ ADayTimeDurationSerializerDeserializer.getDayTime(bytes, 1)));
intSerde.serialize(aMutableInt64, out);
return;
}
long chrononTimeInMs = 0;
- if (bytes[0] == SER_TIME_TYPE_TAG) {
+ if (bytes[0] == ATypeTag.SERIALIZED_TIME_TYPE_TAG) {
chrononTimeInMs = AInt32SerializerDeserializer.getInt(bytes, 1);
- } else if (bytes[0] == SER_DATETIME_TYPE_TAG) {
+ } else if (bytes[0] == ATypeTag.SERIALIZED_DATETIME_TYPE_TAG) {
chrononTimeInMs = AInt64SerializerDeserializer.getLong(bytes, 1);
- } else if (bytes[0] == SER_NULL_TYPE_TAG) {
+ } else if (bytes[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
} else {
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalMinuteAccessor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalMinuteAccessor.java
index cba5e92..07e7c2d 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalMinuteAccessor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalMinuteAccessor.java
@@ -51,13 +51,6 @@
private static final FunctionIdentifier FID = AsterixBuiltinFunctions.ACCESSOR_TEMPORAL_MIN;
- // allowed input types
- private static final byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
- private static final byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
- private static final byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
- private static final byte SER_DAY_TIME_DURATION_TYPE_TAG = ATypeTag.DAYTIMEDURATION.serialize();
- private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -105,27 +98,26 @@
try {
- if (bytes[0] == SER_DURATION_TYPE_TAG) {
- aMutableInt64.setValue(calSystem.getDurationMinute(ADurationSerializerDeserializer
- .getDayTime(bytes, 1)));
+ if (bytes[0] == ATypeTag.SERIALIZED_DURATION_TYPE_TAG) {
+ aMutableInt64.setValue(calSystem
+ .getDurationMinute(ADurationSerializerDeserializer.getDayTime(bytes, 1)));
intSerde.serialize(aMutableInt64, out);
return;
}
- if (bytes[0] == SER_DAY_TIME_DURATION_TYPE_TAG) {
- aMutableInt64
- .setValue(calSystem.getDurationMinute(ADayTimeDurationSerializerDeserializer
- .getDayTime(bytes, 1)));
+ if (bytes[0] == ATypeTag.SERIALIZED_DAY_TIME_DURATION_TYPE_TAG) {
+ aMutableInt64.setValue(calSystem.getDurationMinute(
+ ADayTimeDurationSerializerDeserializer.getDayTime(bytes, 1)));
intSerde.serialize(aMutableInt64, out);
return;
}
long chrononTimeInMs = 0;
- if (bytes[0] == SER_TIME_TYPE_TAG) {
+ if (bytes[0] == ATypeTag.SERIALIZED_TIME_TYPE_TAG) {
chrononTimeInMs = AInt32SerializerDeserializer.getInt(bytes, 1);
- } else if (bytes[0] == SER_DATETIME_TYPE_TAG) {
+ } else if (bytes[0] == ATypeTag.SERIALIZED_DATETIME_TYPE_TAG) {
chrononTimeInMs = AInt64SerializerDeserializer.getLong(bytes, 1);
- } else if (bytes[0] == SER_NULL_TYPE_TAG) {
+ } else if (bytes[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
} else {
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalMonthAccessor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalMonthAccessor.java
index 48856ca..5d721c4 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalMonthAccessor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalMonthAccessor.java
@@ -46,18 +46,8 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class TemporalMonthAccessor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
-
private static final FunctionIdentifier FID = AsterixBuiltinFunctions.ACCESSOR_TEMPORAL_MONTH;
-
- // allowed input types
- private static final byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
- private static final byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
- private static final byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
- private static final byte SER_YEAR_MONTH_TYPE_TAG = ATypeTag.YEARMONTHDURATION.serialize();
- private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -105,28 +95,27 @@
try {
- if (bytes[0] == SER_DURATION_TYPE_TAG) {
- aMutableInt64.setValue(calSystem.getDurationMonth(ADurationSerializerDeserializer
- .getYearMonth(bytes, 1)));
+ if (bytes[0] == ATypeTag.SERIALIZED_DURATION_TYPE_TAG) {
+ aMutableInt64.setValue(calSystem
+ .getDurationMonth(ADurationSerializerDeserializer.getYearMonth(bytes, 1)));
intSerde.serialize(aMutableInt64, out);
return;
}
- if (bytes[0] == SER_YEAR_MONTH_TYPE_TAG) {
- aMutableInt64.setValue(calSystem
- .getDurationMonth(AYearMonthDurationSerializerDeserializer.getYearMonth(bytes,
- 1)));
+ if (bytes[0] == ATypeTag.SERIALIZED_YEAR_MONTH_DURATION_TYPE_TAG) {
+ aMutableInt64.setValue(calSystem.getDurationMonth(
+ AYearMonthDurationSerializerDeserializer.getYearMonth(bytes, 1)));
intSerde.serialize(aMutableInt64, out);
return;
}
long chrononTimeInMs = 0;
- if (bytes[0] == SER_DATE_TYPE_TAG) {
+ if (bytes[0] == ATypeTag.SERIALIZED_DATE_TYPE_TAG) {
chrononTimeInMs = AInt32SerializerDeserializer.getInt(bytes, 1)
* GregorianCalendarSystem.CHRONON_OF_DAY;
- } else if (bytes[0] == SER_DATETIME_TYPE_TAG) {
+ } else if (bytes[0] == ATypeTag.SERIALIZED_DATETIME_TYPE_TAG) {
chrononTimeInMs = AInt64SerializerDeserializer.getLong(bytes, 1);
- } else if (bytes[0] == SER_NULL_TYPE_TAG) {
+ } else if (bytes[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
} else {
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalSecondAccessor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalSecondAccessor.java
index 50780da..49db89d 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalSecondAccessor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalSecondAccessor.java
@@ -46,18 +46,8 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class TemporalSecondAccessor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
-
private static final FunctionIdentifier FID = AsterixBuiltinFunctions.ACCESSOR_TEMPORAL_SEC;
-
- // allowed input types
- private static final byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
- private static final byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
- private static final byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
- private static final byte SER_DAY_TIME_DURATION_TYPE_TAG = ATypeTag.DAYTIMEDURATION.serialize();
- private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -105,27 +95,26 @@
try {
- if (bytes[0] == SER_DURATION_TYPE_TAG) {
- aMutableInt64.setValue(calSystem.getDurationSecond(ADurationSerializerDeserializer
- .getDayTime(bytes, 1)));
+ if (bytes[0] == ATypeTag.SERIALIZED_DURATION_TYPE_TAG) {
+ aMutableInt64.setValue(calSystem
+ .getDurationSecond(ADurationSerializerDeserializer.getDayTime(bytes, 1)));
intSerde.serialize(aMutableInt64, out);
return;
}
- if (bytes[0] == SER_DAY_TIME_DURATION_TYPE_TAG) {
- aMutableInt64
- .setValue(calSystem.getDurationSecond(ADayTimeDurationSerializerDeserializer
- .getDayTime(bytes, 1)));
+ if (bytes[0] == ATypeTag.SERIALIZED_DAY_TIME_DURATION_TYPE_TAG) {
+ aMutableInt64.setValue(calSystem.getDurationSecond(
+ ADayTimeDurationSerializerDeserializer.getDayTime(bytes, 1)));
intSerde.serialize(aMutableInt64, out);
return;
}
long chrononTimeInMs = 0;
- if (bytes[0] == SER_TIME_TYPE_TAG) {
+ if (bytes[0] == ATypeTag.SERIALIZED_TIME_TYPE_TAG) {
chrononTimeInMs = AInt32SerializerDeserializer.getInt(bytes, 1);
- } else if (bytes[0] == SER_DATETIME_TYPE_TAG) {
+ } else if (bytes[0] == ATypeTag.SERIALIZED_DATETIME_TYPE_TAG) {
chrononTimeInMs = AInt64SerializerDeserializer.getLong(bytes, 1);
- } else if (bytes[0] == SER_NULL_TYPE_TAG) {
+ } else if (bytes[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
} else {
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalYearAccessor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalYearAccessor.java
index 0358751..928d6ea 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalYearAccessor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalYearAccessor.java
@@ -48,19 +48,8 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class TemporalYearAccessor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
-
private static final FunctionIdentifier FID = AsterixBuiltinFunctions.ACCESSOR_TEMPORAL_YEAR;
-
- // allowed input types
- private static final byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
- private static final byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
- private static final byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
- private static final byte SER_YEAR_MONTH_DURATION_TYPE_TAG = ATypeTag.YEARMONTHDURATION.serialize();
- private static final byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -107,46 +96,41 @@
try {
- if (bytes[0] == SER_DURATION_TYPE_TAG) {
- aMutableInt64.setValue(calSystem.getDurationYear(ADurationSerializerDeserializer
- .getYearMonth(bytes, 1)));
+ if (bytes[0] == ATypeTag.SERIALIZED_DURATION_TYPE_TAG) {
+ aMutableInt64.setValue(calSystem
+ .getDurationYear(ADurationSerializerDeserializer.getYearMonth(bytes, 1)));
intSerde.serialize(aMutableInt64, out);
return;
}
- if (bytes[0] == SER_YEAR_MONTH_DURATION_TYPE_TAG) {
- aMutableInt64.setValue(calSystem
- .getDurationYear(AYearMonthDurationSerializerDeserializer
- .getYearMonth(bytes, 1)));
+ if (bytes[0] == ATypeTag.SERIALIZED_YEAR_MONTH_DURATION_TYPE_TAG) {
+ aMutableInt64.setValue(calSystem.getDurationYear(
+ AYearMonthDurationSerializerDeserializer.getYearMonth(bytes, 1)));
intSerde.serialize(aMutableInt64, out);
return;
}
long chrononTimeInMs = 0;
- if (bytes[0] == SER_DATE_TYPE_TAG) {
+ if (bytes[0] == ATypeTag.SERIALIZED_DATE_TYPE_TAG) {
chrononTimeInMs = AInt32SerializerDeserializer.getInt(bytes, 1)
* GregorianCalendarSystem.CHRONON_OF_DAY;
- } else if (bytes[0] == SER_DATETIME_TYPE_TAG) {
+ } else if (bytes[0] == ATypeTag.SERIALIZED_DATETIME_TYPE_TAG) {
chrononTimeInMs = AInt64SerializerDeserializer.getLong(bytes, 1);
- } else if (bytes[0] == SER_NULL_TYPE_TAG) {
+ } else if (bytes[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
- } else if (bytes[0] == SER_STRING_TYPE_TAG) {
+ } else if (bytes[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
int year;
strExprPtr.set(bytes, 1, bytes.length);
strIter.reset(strExprPtr);
char firstChar = strIter.next();
if (firstChar == '-') {
// in case of a negative year
- year = -1
- * ((strIter.next() - '0') * 1000
- + (strIter.next() - '0') * 100
+ year = -1 * ((strIter.next() - '0') * 1000 + (strIter.next() - '0') * 100
+ (strIter.next() - '0') * 10 + (strIter.next() - '0'));
} else {
- year = (firstChar - '0') * 1000
- + (strIter.next() - '0') * 100
- + (strIter.next() - '0') * 10
- + (strIter.next() - '0');
+ year = (firstChar - '0') * 1000 + (strIter.next() - '0') * 100
+ + (strIter.next() - '0') * 10 + (strIter.next() - '0');
}
aMutableInt64.setValue(year);
intSerde.serialize(aMutableInt64, out);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/ClosedRecordConstructorEvalFactory.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/ClosedRecordConstructorEvalFactory.java
index d9f2949..55e7d15 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/ClosedRecordConstructorEvalFactory.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/ClosedRecordConstructorEvalFactory.java
@@ -65,7 +65,6 @@
private IARecordBuilder recBuilder = new RecordBuilder();
private ARecordType recType;
private ArrayBackedValueStorage fieldValueBuffer = new ArrayBackedValueStorage();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
private boolean first = true;
public ClosedRecordConstructorEval(ARecordType recType, ICopyEvaluator[] evalFields,
@@ -87,7 +86,7 @@
for (int i = 0; i < evalFields.length; i++) {
fieldValueBuffer.reset();
evalFields[i].evaluate(tuple);
- if (fieldValueBuffer.getByteArray()[0] != SER_NULL_TYPE_TAG) {
+ if (fieldValueBuffer.getByteArray()[0] != ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
recBuilder.addField(i, fieldValueBuffer);
}
}
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/CreateMBREvalFactory.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/CreateMBREvalFactory.java
index 58d1359..d4563a8 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/CreateMBREvalFactory.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/CreateMBREvalFactory.java
@@ -45,8 +45,6 @@
private static final long serialVersionUID = 1L;
- private static final byte SER_INT32_TYPE_TAG = ATypeTag.INT32.serialize();
-
private ICopyEvaluatorFactory recordEvalFactory;
private ICopyEvaluatorFactory dimensionEvalFactory;
private ICopyEvaluatorFactory coordinateEvalFactory;
@@ -82,8 +80,8 @@
eval2.evaluate(tuple);
// type-check: (Point/Line/Polygon/Circle/Rectangle/Null, Int32, Int32)
- if (outInput1.getByteArray()[0] != SER_INT32_TYPE_TAG
- || outInput2.getByteArray()[0] != SER_INT32_TYPE_TAG) {
+ if (outInput1.getByteArray()[0] != ATypeTag.SERIALIZED_INT32_TYPE_TAG
+ || outInput2.getByteArray()[0] != ATypeTag.SERIALIZED_INT32_TYPE_TAG) {
throw new AlgebricksException(
"Expects Types: (Point/Line/Polygon/Circle/Rectangle/Null, Int32, Int32).");
}
@@ -99,8 +97,8 @@
case POINT:
switch (coordinate) {
case 0: // 0 is for min x, 1 is for min y, 2
- // for
- // max x, and 3 for max y
+ // for
+ // max x, and 3 for max y
case 2: {
double x = ADoubleSerializerDeserializer.getDouble(outInput0.getByteArray(),
APointSerializerDeserializer.getCoordinateOffset(Coordinate.X));
@@ -117,8 +115,8 @@
}
break;
default: {
- throw new NotImplementedException(coordinate
- + " is not a valid coordinate option");
+ throw new NotImplementedException(
+ coordinate + " is not a valid coordinate option");
}
}
break;
@@ -170,8 +168,8 @@
}
break;
default: {
- throw new NotImplementedException(coordinate
- + " is not a valid coordinate option");
+ throw new NotImplementedException(
+ coordinate + " is not a valid coordinate option");
}
}
break;
@@ -182,9 +180,9 @@
case 0: {
value = Double.MAX_VALUE;
for (int i = 0; i < numOfPoints; i++) {
- double x = ADoubleSerializerDeserializer
- .getDouble(outInput0.getByteArray(), APolygonSerializerDeserializer
- .getCoordinateOffset(i, Coordinate.X));
+ double x = ADoubleSerializerDeserializer.getDouble(outInput0.getByteArray(),
+ APolygonSerializerDeserializer.getCoordinateOffset(i,
+ Coordinate.X));
value = Math.min(x, value);
}
}
@@ -192,9 +190,9 @@
case 1: {
value = Double.MAX_VALUE;
for (int i = 0; i < numOfPoints; i++) {
- double y = ADoubleSerializerDeserializer
- .getDouble(outInput0.getByteArray(), APolygonSerializerDeserializer
- .getCoordinateOffset(i, Coordinate.Y));
+ double y = ADoubleSerializerDeserializer.getDouble(outInput0.getByteArray(),
+ APolygonSerializerDeserializer.getCoordinateOffset(i,
+ Coordinate.Y));
value = Math.min(y, value);
}
}
@@ -202,9 +200,9 @@
case 2: {
value = Double.MIN_VALUE;
for (int i = 0; i < numOfPoints; i++) {
- double x = ADoubleSerializerDeserializer
- .getDouble(outInput0.getByteArray(), APolygonSerializerDeserializer
- .getCoordinateOffset(i, Coordinate.X));
+ double x = ADoubleSerializerDeserializer.getDouble(outInput0.getByteArray(),
+ APolygonSerializerDeserializer.getCoordinateOffset(i,
+ Coordinate.X));
value = Math.max(x, value);
}
}
@@ -212,16 +210,16 @@
case 3: {
value = Double.MIN_VALUE;
for (int i = 0; i < numOfPoints; i++) {
- double y = ADoubleSerializerDeserializer
- .getDouble(outInput0.getByteArray(), APolygonSerializerDeserializer
- .getCoordinateOffset(i, Coordinate.Y));
+ double y = ADoubleSerializerDeserializer.getDouble(outInput0.getByteArray(),
+ APolygonSerializerDeserializer.getCoordinateOffset(i,
+ Coordinate.Y));
value = Math.max(y, value);
}
}
break;
default: {
- throw new NotImplementedException(coordinate
- + " is not a valid coordinate option");
+ throw new NotImplementedException(
+ coordinate + " is not a valid coordinate option");
}
}
break;
@@ -272,8 +270,8 @@
}
break;
default: {
- throw new NotImplementedException(coordinate
- + " is not a valid coordinate option");
+ throw new NotImplementedException(
+ coordinate + " is not a valid coordinate option");
}
}
break;
@@ -305,13 +303,13 @@
}
break;
default: {
- throw new NotImplementedException(coordinate
- + " is not a valid coordinate option");
+ throw new NotImplementedException(
+ coordinate + " is not a valid coordinate option");
}
}
break;
case NULL: {
- out.writeByte(ATypeTag.NULL.serialize());
+ out.writeByte(ATypeTag.SERIALIZED_NULL_TYPE_TAG);
return;
}
default:
@@ -323,7 +321,7 @@
} else {
throw new NotImplementedException(dimension + "D is not supported");
}
- out.writeByte(ATypeTag.DOUBLE.serialize());
+ out.writeByte(ATypeTag.SERIALIZED_DOUBLE_TYPE_TAG);
out.writeDouble(value);
} catch (HyracksDataException hde) {
throw new AlgebricksException(hde);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ABooleanConstructorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ABooleanConstructorDescriptor.java
index d865e3e..c07d6f4 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ABooleanConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ABooleanConstructorDescriptor.java
@@ -43,11 +43,9 @@
import org.apache.hyracks.util.string.UTF8StringUtil;
public class ABooleanConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new ABooleanConstructorDescriptor();
}
@@ -84,19 +82,19 @@
outInput.reset();
eval.evaluate(tuple);
byte[] serString = outInput.getByteArray();
- if (serString[0] == SER_STRING_TYPE_TAG) {
- if (utf8BinaryComparator
- .compare(serString, 1, outInput.getLength(), TRUE, 0, TRUE.length) == 0) {
+ if (serString[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
+ if (utf8BinaryComparator.compare(serString, 1, outInput.getLength(), TRUE, 0,
+ TRUE.length) == 0) {
booleanSerde.serialize(ABoolean.TRUE, out);
return;
- } else if (utf8BinaryComparator
- .compare(serString, 1, outInput.getLength(), FALSE, 0, FALSE.length) == 0) {
+ } else if (utf8BinaryComparator.compare(serString, 1, outInput.getLength(), FALSE, 0,
+ FALSE.length) == 0) {
booleanSerde.serialize(ABoolean.FALSE, out);
return;
} else
throw new AlgebricksException(errorMessage);
- } else if (serString[0] == SER_NULL_TYPE_TAG)
+ } else if (serString[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG)
nullSerde.serialize(ANull.NULL, out);
else
throw new AlgebricksException(errorMessage);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ACircleConstructorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ACircleConstructorDescriptor.java
index e4a33f5..e91cf0d 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ACircleConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ACircleConstructorDescriptor.java
@@ -45,9 +45,8 @@
public class ACircleConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
private static final long serialVersionUID = 1L;
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new ACircleConstructorDescriptor();
}
@@ -85,8 +84,8 @@
outInput.reset();
eval.evaluate(tuple);
byte[] serString = outInput.getByteArray();
- if (serString[0] == SER_STRING_TYPE_TAG) {
- utf8Ptr.set(serString, 1, outInput.getLength()-1);
+ if (serString[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
+ utf8Ptr.set(serString, 1, outInput.getLength() - 1);
String s = utf8Ptr.toString();
int commaIndex = s.indexOf(',');
int spaceIndex = s.indexOf(' ', commaIndex + 1);
@@ -94,7 +93,7 @@
Double.parseDouble(s.substring(commaIndex + 1, spaceIndex)));
aCircle.setValue(aPoint, Double.parseDouble(s.substring(spaceIndex + 1, s.length())));
circleSerde.serialize(aCircle, out);
- } else if (serString[0] == SER_NULL_TYPE_TAG)
+ } else if (serString[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG)
nullSerde.serialize(ANull.NULL, out);
else
throw new AlgebricksException(errorMessage);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ADateConstructorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ADateConstructorDescriptor.java
index a73dec6..5c43f15 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ADateConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ADateConstructorDescriptor.java
@@ -44,12 +44,9 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class ADateConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new ADateConstructorDescriptor();
}
@@ -86,9 +83,9 @@
outInput.reset();
eval.evaluate(tuple);
byte[] serString = outInput.getByteArray();
- if (serString[0] == SER_STRING_TYPE_TAG) {
+ if (serString[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
- utf8Ptr.set(serString, 1, outInput.getLength()-1);
+ utf8Ptr.set(serString, 1, outInput.getLength() - 1);
int stringLength = utf8Ptr.getUTF8Length();
// the string to be parsed should be at least 8 characters: YYYYMMDD
@@ -102,7 +99,7 @@
while (serString[startOffset] == ' ') {
startOffset++;
}
- int endOffset = startOffset + stringLength - 1 ;
+ int endOffset = startOffset + stringLength - 1;
while (serString[endOffset] == ' ') {
endOffset--;
}
@@ -119,7 +116,7 @@
aDate.setValue((int) (chrononTimeInMs / GregorianCalendarSystem.CHRONON_OF_DAY) - temp);
dateSerde.serialize(aDate, out);
- } else if (serString[0] == SER_NULL_TYPE_TAG) {
+ } else if (serString[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
} else {
throw new AlgebricksException(errorMessage);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ADateTimeConstructorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ADateTimeConstructorDescriptor.java
index 0775e80..3f6a746 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ADateTimeConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ADateTimeConstructorDescriptor.java
@@ -44,12 +44,9 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class ADateTimeConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new ADateTimeConstructorDescriptor();
}
@@ -85,7 +82,7 @@
outInput.reset();
eval.evaluate(tuple);
byte[] serString = outInput.getByteArray();
- if (serString[0] == SER_STRING_TYPE_TAG) {
+ if (serString[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
utf8Ptr.set(serString, 1, outInput.getLength() - 1);
int stringLength = utf8Ptr.getUTF8Length();
@@ -108,16 +105,15 @@
}
}
- long chrononTimeInMs = ADateParserFactory
- .parseDatePart(serString, startOffset, timeOffset);
+ long chrononTimeInMs = ADateParserFactory.parseDatePart(serString, startOffset,
+ timeOffset);
- chrononTimeInMs += ATimeParserFactory
- .parseTimePart(serString, startOffset + timeOffset + 1,
- stringLength - timeOffset - 1);
+ chrononTimeInMs += ATimeParserFactory.parseTimePart(serString,
+ startOffset + timeOffset + 1, stringLength - timeOffset - 1);
aDateTime.setValue(chrononTimeInMs);
datetimeSerde.serialize(aDateTime, out);
- } else if (serString[0] == SER_NULL_TYPE_TAG) {
+ } else if (serString[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
} else {
throw new AlgebricksException(errorMessage);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ADayTimeDurationConstructorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ADayTimeDurationConstructorDescriptor.java
index 67cd3d5..0054848 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ADayTimeDurationConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ADayTimeDurationConstructorDescriptor.java
@@ -45,9 +45,6 @@
public class ADayTimeDurationConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
private static final long serialVersionUID = 1L;
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -86,17 +83,17 @@
eval.evaluate(tuple);
byte[] serString = outInput.getByteArray();
- if (serString[0] == SER_STRING_TYPE_TAG) {
+ if (serString[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
- utf8Ptr.set(serString, 1, outInput.getLength() -1);
+ utf8Ptr.set(serString, 1, outInput.getLength() - 1);
int stringLength = utf8Ptr.getUTF8Length();
int startOffset = utf8Ptr.getCharStartOffset();
- ADurationParserFactory.parseDuration(serString, startOffset, stringLength, aDayTimeDuration,
- ADurationParseOption.DAY_TIME);
+ ADurationParserFactory.parseDuration(serString, startOffset, stringLength,
+ aDayTimeDuration, ADurationParseOption.DAY_TIME);
dayTimeDurationSerde.serialize(aDayTimeDuration, out);
- } else if (serString[0] == SER_NULL_TYPE_TAG) {
+ } else if (serString[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
} else {
throw new AlgebricksException(errorMessage);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ADoubleConstructorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ADoubleConstructorDescriptor.java
index 2ad883a..16eea9d 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ADoubleConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ADoubleConstructorDescriptor.java
@@ -45,11 +45,9 @@
import org.apache.hyracks.util.string.UTF8StringUtil;
public class ADoubleConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new ADoubleConstructorDescriptor();
}
@@ -72,13 +70,6 @@
private final byte[] POSITIVE_INF = UTF8StringUtil.writeStringToBytes("INF");
private final byte[] NEGATIVE_INF = UTF8StringUtil.writeStringToBytes("-INF");
private final byte[] NAN = UTF8StringUtil.writeStringToBytes("NaN");
- // private int offset = 3, value = 0, integerPart = 0,
- // fractionPart = 0, exponentPart = 0,
- // pointIndex = 0, eIndex = 1;
- // double doubleValue = 0;
- // boolean positiveInteger = true, positiveExponent = true,
- // expectingInteger = true,
- // expectingFraction = false, expectingExponent = false;
IBinaryComparator utf8BinaryComparator = AqlBinaryComparatorFactoryProvider.UTF8STRING_POINTABLE_INSTANCE
.createBinaryComparator();
private AMutableDouble aDouble = new AMutableDouble(0);
@@ -98,24 +89,23 @@
outInput.reset();
eval.evaluate(tuple);
byte[] serString = outInput.getByteArray();
- if (serString[0] == SER_STRING_TYPE_TAG) {
+ if (serString[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
- if (utf8BinaryComparator
- .compare(serString, 1, outInput.getLength(), POSITIVE_INF, 0, 5) == 0) {
+ if (utf8BinaryComparator.compare(serString, 1, outInput.getLength(), POSITIVE_INF, 0,
+ 5) == 0) {
aDouble.setValue(Double.POSITIVE_INFINITY);
} else if (utf8BinaryComparator.compare(serString, 1, outInput.getLength(),
NEGATIVE_INF, 0, 6) == 0) {
aDouble.setValue(Double.NEGATIVE_INFINITY);
- } else if (utf8BinaryComparator.compare(serString, 1, outInput.getLength(), NAN, 0, 5)
- == 0) {
+ } else if (utf8BinaryComparator.compare(serString, 1, outInput.getLength(), NAN, 0,
+ 5) == 0) {
aDouble.setValue(Double.NaN);
} else {
- // out.writeDouble(parseDouble(serString));
utf8Ptr.set(serString, 1, outInput.getLength() - 1);
aDouble.setValue(Double.parseDouble(utf8Ptr.toString()));
}
doubleSerde.serialize(aDouble, out);
- } else if (serString[0] == SER_NULL_TYPE_TAG)
+ } else if (serString[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG)
nullSerde.serialize(ANull.NULL, out);
else
throw new AlgebricksException(errorMessage);
@@ -123,97 +113,6 @@
throw new AlgebricksException(errorMessage);
}
}
-
- // private double parseDouble(byte[] serString) throws
- // AlgebricksException {
- //
- // if (serString[offset] == '+')
- // offset++;
- // else if (serString[offset] == '-') {
- // offset++;
- // positiveInteger = false;
- // }
- //
- // if ((serString[offset] == '.') || (serString[offset] ==
- // 'e') || (serString[offset] == 'E')
- // || (serString[outInput.getLength() - 1] == '.')
- // || (serString[outInput.getLength() - 1] == 'E')
- // || (serString[outInput.getLength() - 1] == 'e'))
- // throw new AlgebricksException(errorMessage);
- //
- // for (; offset < outInput.getLength(); offset++) {
- // if (serString[offset] >= '0' && serString[offset] <= '9')
- // {
- // value = value * 10 + serString[offset] - '0';
- // } else
- // switch (serString[offset]) {
- // case '.':
- // if (expectingInteger) {
- // if (serString[offset + 1] < '0' || serString[offset + 1]
- // > '9')
- // throw new AlgebricksException(errorMessage);
- // expectingInteger = false;
- // expectingFraction = true;
- // integerPart = value;
- // value = 0;
- // pointIndex = offset;
- // eIndex = outInput.getLength();
- // } else
- // throw new AlgebricksException(errorMessage);
- // break;
- // case 'e':
- // case 'E':
- // if (expectingInteger) {
- // expectingInteger = false;
- // integerPart = value;
- // pointIndex = offset - 1;
- // eIndex = offset;
- // value = 0;
- // expectingExponent = true;
- // } else if (expectingFraction) {
- //
- // expectingFraction = false;
- // fractionPart = value;
- // eIndex = offset;
- // value = 0;
- // expectingExponent = true;
- // } else
- // throw new AlgebricksException();
- //
- // if (serString[offset + 1] == '+')
- // offset++;
- // else if (serString[offset + 1] == '-') {
- // offset++;
- // positiveExponent = false;
- // } else if (serString[offset + 1] < '0' ||
- // serString[offset + 1] > '9')
- // throw new AlgebricksException(errorMessage);
- // break;
- // default:
- // throw new AlgebricksException(errorMessage);
- // }
- // }
- //
- // if (expectingInteger)
- // integerPart = value;
- // else if (expectingFraction)
- // fractionPart = value;
- // else if (expectingExponent)
- // exponentPart = value * (positiveExponent ? 1 : -1);
- //
- // doubleValue = (float) (integerPart + (fractionPart * (1 /
- // Math.pow(10, eIndex - pointIndex - 1))));
- // doubleValue *= (float) Math.pow(10.0, exponentPart);
- // if (integerPart != 0
- // && (doubleValue == Float.POSITIVE_INFINITY || doubleValue
- // == Float.NEGATIVE_INFINITY || doubleValue == 0))
- // throw new AlgebricksException(errorMessage);
- //
- // if (doubleValue > 0 && !positiveInteger)
- // doubleValue *= -1;
- //
- // return doubleValue;
- // }
};
}
};
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ADurationConstructorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ADurationConstructorDescriptor.java
index c51c5f2..2f95a2f 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ADurationConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ADurationConstructorDescriptor.java
@@ -43,12 +43,9 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class ADurationConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new ADurationConstructorDescriptor();
}
@@ -85,17 +82,16 @@
eval.evaluate(tuple);
byte[] serString = outInput.getByteArray();
- if (serString[0] == SER_STRING_TYPE_TAG) {
+ if (serString[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
utf8Ptr.set(serString, 1, outInput.getLength() - 1);
int stringLength = utf8Ptr.getUTF8Length();
- ADurationParserFactory
- .parseDuration(serString, utf8Ptr.getCharStartOffset(), stringLength, aDuration,
- ADurationParseOption.All);
+ ADurationParserFactory.parseDuration(serString, utf8Ptr.getCharStartOffset(),
+ stringLength, aDuration, ADurationParseOption.All);
durationSerde.serialize(aDuration, out);
- } else if (serString[0] == SER_NULL_TYPE_TAG) {
+ } else if (serString[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
} else {
throw new AlgebricksException(errorMessage);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AFloatConstructorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AFloatConstructorDescriptor.java
index 3de5265..d5c2b8c 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AFloatConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AFloatConstructorDescriptor.java
@@ -45,11 +45,9 @@
import org.apache.hyracks.util.string.UTF8StringUtil;
public class AFloatConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new AFloatConstructorDescriptor();
}
@@ -98,23 +96,23 @@
outInput.reset();
eval.evaluate(tuple);
byte[] serString = outInput.getByteArray();
- if (serString[0] == SER_STRING_TYPE_TAG) {
- if (utf8BinaryComparator
- .compare(serString, 1, outInput.getLength(), POSITIVE_INF, 0, 5) == 0) {
+ if (serString[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
+ if (utf8BinaryComparator.compare(serString, 1, outInput.getLength(), POSITIVE_INF, 0,
+ 5) == 0) {
aFloat.setValue(Float.POSITIVE_INFINITY);
} else if (utf8BinaryComparator.compare(serString, 1, outInput.getLength(),
NEGATIVE_INF, 0, 6) == 0) {
aFloat.setValue(Float.NEGATIVE_INFINITY);
- } else if (utf8BinaryComparator.compare(serString, 1, outInput.getLength(), NAN, 0, 5)
- == 0) {
+ } else if (utf8BinaryComparator.compare(serString, 1, outInput.getLength(), NAN, 0,
+ 5) == 0) {
aFloat.setValue(Float.NaN);
} else {
- utf8Ptr.set(serString, 1, outInput.getLength() -1);
+ utf8Ptr.set(serString, 1, outInput.getLength() - 1);
aFloat.setValue(Float.parseFloat(utf8Ptr.toString()));
}
floatSerde.serialize(aFloat, out);
- } else if (serString[0] == SER_NULL_TYPE_TAG)
+ } else if (serString[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG)
nullSerde.serialize(ANull.NULL, out);
else
throw new AlgebricksException(errorMessage);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AInt16ConstructorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AInt16ConstructorDescriptor.java
index c498217..1919344 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AInt16ConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AInt16ConstructorDescriptor.java
@@ -44,9 +44,8 @@
public class AInt16ConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
private static final long serialVersionUID = 1L;
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new AInt16ConstructorDescriptor();
}
@@ -85,8 +84,8 @@
outInput.reset();
eval.evaluate(tuple);
byte[] serString = outInput.getByteArray();
- if (serString[0] == SER_STRING_TYPE_TAG) {
- utf8Ptr.set(serString, 1, outInput.getLength()-1);
+ if (serString[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
+ utf8Ptr.set(serString, 1, outInput.getLength() - 1);
offset = utf8Ptr.getCharStartOffset();
value = 0;
positive = true;
@@ -112,7 +111,7 @@
aInt16.setValue(value);
int16Serde.serialize(aInt16, out);
- } else if (serString[0] == SER_NULL_TYPE_TAG)
+ } else if (serString[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG)
nullSerde.serialize(ANull.NULL, out);
else
throw new AlgebricksException(errorMessage);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AInt32ConstructorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AInt32ConstructorDescriptor.java
index f08972f..40e1289 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AInt32ConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AInt32ConstructorDescriptor.java
@@ -42,11 +42,9 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class AInt32ConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new AInt32ConstructorDescriptor();
}
@@ -84,8 +82,8 @@
outInput.reset();
eval.evaluate(tuple);
byte[] serString = outInput.getByteArray();
- if (serString[0] == SER_STRING_TYPE_TAG) {
- utf8Ptr.set(serString, 1, outInput.getLength()-1);
+ if (serString[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
+ utf8Ptr.set(serString, 1, outInput.getLength() - 1);
offset = utf8Ptr.getCharStartOffset();
value = 0;
positive = true;
@@ -111,7 +109,7 @@
aInt32.setValue(value);
int32Serde.serialize(aInt32, out);
- } else if (serString[0] == SER_NULL_TYPE_TAG)
+ } else if (serString[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG)
nullSerde.serialize(ANull.NULL, out);
else
throw new AlgebricksException(errorMessage);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AInt64ConstructorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AInt64ConstructorDescriptor.java
index eb43c5d..2de9463 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AInt64ConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AInt64ConstructorDescriptor.java
@@ -42,11 +42,9 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class AInt64ConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new AInt64ConstructorDescriptor();
}
@@ -85,8 +83,8 @@
outInput.reset();
eval.evaluate(tuple);
byte[] serString = outInput.getByteArray();
- if (serString[0] == SER_STRING_TYPE_TAG) {
- utf8Ptr.set(serString, 1, outInput.getLength()-1);
+ if (serString[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
+ utf8Ptr.set(serString, 1, outInput.getLength() - 1);
offset = utf8Ptr.getCharStartOffset();
value = 0;
positive = true;
@@ -113,7 +111,7 @@
aInt64.setValue(value);
int64Serde.serialize(aInt64, out);
- } else if (serString[0] == SER_NULL_TYPE_TAG)
+ } else if (serString[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG)
nullSerde.serialize(ANull.NULL, out);
else
throw new AlgebricksException(errorMessage);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AInt8ConstructorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AInt8ConstructorDescriptor.java
index 2c8010b..106464b 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AInt8ConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AInt8ConstructorDescriptor.java
@@ -42,11 +42,9 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class AInt8ConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new AInt8ConstructorDescriptor();
}
@@ -85,8 +83,8 @@
outInput.reset();
eval.evaluate(tuple);
byte[] serString = outInput.getByteArray();
- if (serString[0] == SER_STRING_TYPE_TAG) {
- utf8Ptr.set(serString, 1, outInput.getLength()-1);
+ if (serString[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
+ utf8Ptr.set(serString, 1, outInput.getLength() - 1);
offset = utf8Ptr.getCharStartOffset();
value = 0;
positive = true;
@@ -112,7 +110,7 @@
aInt8.setValue(value);
int8Serde.serialize(aInt8, out);
- } else if (serString[0] == SER_NULL_TYPE_TAG)
+ } else if (serString[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG)
nullSerde.serialize(ANull.NULL, out);
else
throw new AlgebricksException(errorMessage);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalFromDateConstructorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalFromDateConstructorDescriptor.java
index d238938..ecfed56 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalFromDateConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalFromDateConstructorDescriptor.java
@@ -46,14 +46,10 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class AIntervalFromDateConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
public final static FunctionIdentifier FID = AsterixBuiltinFunctions.INTERVAL_CONSTRUCTOR_DATE;
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new AIntervalFromDateConstructorDescriptor();
}
@@ -95,36 +91,36 @@
try {
- if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
- || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
}
long intervalStart = 0, intervalEnd = 0;
- if (argOut0.getByteArray()[0] == SER_DATE_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_DATE_TYPE_TAG) {
intervalStart = ADateSerializerDeserializer.getChronon(argOut0.getByteArray(), 1);
- } else if (argOut0.getByteArray()[0] == SER_STRING_TYPE_TAG) {
+ } else if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
utf8Ptr.set(argOut0.getByteArray(), 1, argOut0.getLength() - 1);
int stringLength = utf8Ptr.getUTF8Length();
- intervalStart = ADateParserFactory
- .parseDatePart(utf8Ptr.getByteArray(), utf8Ptr.getCharStartOffset(),
- stringLength) / GregorianCalendarSystem.CHRONON_OF_DAY;
+ intervalStart = ADateParserFactory.parseDatePart(utf8Ptr.getByteArray(),
+ utf8Ptr.getCharStartOffset(), stringLength)
+ / GregorianCalendarSystem.CHRONON_OF_DAY;
} else {
throw new AlgebricksException(FID.getName()
+ ": expects NULL/STRING/DATE for the first argument, but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0]));
}
- if (argOut1.getByteArray()[0] == SER_DATE_TYPE_TAG) {
+ if (argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_DATE_TYPE_TAG) {
intervalEnd = ADateSerializerDeserializer.getChronon(argOut1.getByteArray(), 1);
- } else if (argOut1.getByteArray()[0] == SER_STRING_TYPE_TAG) {
+ } else if (argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
utf8Ptr.set(argOut1.getByteArray(), 1, argOut1.getLength() - 1);
int stringLength = utf8Ptr.getUTF8Length();
- intervalEnd = ADateParserFactory
- .parseDatePart(utf8Ptr.getByteArray(), utf8Ptr.getCharStartOffset(),
- stringLength) / GregorianCalendarSystem.CHRONON_OF_DAY;
+ intervalEnd = ADateParserFactory.parseDatePart(utf8Ptr.getByteArray(),
+ utf8Ptr.getCharStartOffset(), stringLength)
+ / GregorianCalendarSystem.CHRONON_OF_DAY;
} else {
throw new AlgebricksException(FID.getName()
+ ": expects NULL/STRING/DATE for the second argument, but got "
@@ -132,11 +128,11 @@
}
if (intervalEnd < intervalStart) {
- throw new AlgebricksException(FID.getName()
- + ": interval end must not be less than the interval start.");
+ throw new AlgebricksException(
+ FID.getName() + ": interval end must not be less than the interval start.");
}
- aInterval.setValue(intervalStart, intervalEnd, ATypeTag.DATE.serialize());
+ aInterval.setValue(intervalStart, intervalEnd, ATypeTag.SERIALIZED_DATE_TYPE_TAG);
intervalSerde.serialize(aInterval, out);
} catch (IOException e1) {
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalFromDateTimeConstructorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalFromDateTimeConstructorDescriptor.java
index 939485e..2972912 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalFromDateTimeConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalFromDateTimeConstructorDescriptor.java
@@ -49,11 +49,9 @@
private static final long serialVersionUID = 1L;
public final static FunctionIdentifier FID = AsterixBuiltinFunctions.INTERVAL_CONSTRUCTOR_DATETIME;
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new AIntervalFromDateTimeConstructorDescriptor();
}
@@ -95,17 +93,17 @@
try {
- if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
- || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
}
long intervalStart = 0, intervalEnd = 0;
- if (argOut0.getByteArray()[0] == SER_DATETIME_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_DATETIME_TYPE_TAG) {
intervalStart = ADateTimeSerializerDeserializer.getChronon(argOut0.getByteArray(), 1);
- } else if (argOut0.getByteArray()[0] == SER_STRING_TYPE_TAG) {
+ } else if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
// start datetime
utf8Ptr.set(argOut0.getByteArray(), 1, argOut0.getLength() - 1);
@@ -120,20 +118,19 @@
throw new AlgebricksException(errorMessage + ": missing T");
}
}
- intervalStart = ADateParserFactory
- .parseDatePart(argOut0.getByteArray(), startOffset, timeOffset);
- intervalStart += ATimeParserFactory
- .parseTimePart(argOut0.getByteArray(), startOffset + timeOffset + 1,
- stringLength - timeOffset - 1);
+ intervalStart = ADateParserFactory.parseDatePart(argOut0.getByteArray(), startOffset,
+ timeOffset);
+ intervalStart += ATimeParserFactory.parseTimePart(argOut0.getByteArray(),
+ startOffset + timeOffset + 1, stringLength - timeOffset - 1);
} else {
throw new AlgebricksException(FID.getName()
+ ": expects NULL/STRING/DATETIME for the first argument, but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0]));
}
- if (argOut1.getByteArray()[0] == SER_DATETIME_TYPE_TAG) {
+ if (argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_DATETIME_TYPE_TAG) {
intervalEnd = ADateTimeSerializerDeserializer.getChronon(argOut1.getByteArray(), 1);
- } else if (argOut1.getByteArray()[0] == SER_STRING_TYPE_TAG) {
+ } else if (argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
// start datetime
utf8Ptr.set(argOut1.getByteArray(), 1, argOut1.getLength() - 1);
int stringLength = utf8Ptr.getUTF8Length();
@@ -147,8 +144,8 @@
throw new AlgebricksException(errorMessage + ": missing T");
}
}
- intervalEnd = ADateParserFactory
- .parseDatePart(argOut1.getByteArray(), startOffset, timeOffset);
+ intervalEnd = ADateParserFactory.parseDatePart(argOut1.getByteArray(), startOffset,
+ timeOffset);
intervalEnd += ATimeParserFactory.parseTimePart(argOut1.getByteArray(),
startOffset + timeOffset + 1, stringLength - timeOffset - 1);
} else {
@@ -158,11 +155,11 @@
}
if (intervalEnd < intervalStart) {
- throw new AlgebricksException(FID.getName()
- + ": interval end must not be less than the interval start.");
+ throw new AlgebricksException(
+ FID.getName() + ": interval end must not be less than the interval start.");
}
- aInterval.setValue(intervalStart, intervalEnd, ATypeTag.DATETIME.serialize());
+ aInterval.setValue(intervalStart, intervalEnd, ATypeTag.SERIALIZED_DATETIME_TYPE_TAG);
intervalSerde.serialize(aInterval, out);
} catch (IOException e1) {
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalFromTimeConstructorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalFromTimeConstructorDescriptor.java
index dbb05e5..25bc7df 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalFromTimeConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalFromTimeConstructorDescriptor.java
@@ -46,14 +46,10 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class AIntervalFromTimeConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
public final static FunctionIdentifier FID = AsterixBuiltinFunctions.INTERVAL_CONSTRUCTOR_TIME;
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new AIntervalFromTimeConstructorDescriptor();
}
@@ -95,24 +91,23 @@
try {
- if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
- || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
}
long intervalStart = 0, intervalEnd = 0;
- if (argOut0.getByteArray()[0] == SER_TIME_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_TIME_TYPE_TAG) {
intervalStart = ATimeSerializerDeserializer.getChronon(argOut0.getByteArray(), 1);
- } else if (argOut0.getByteArray()[0] == SER_STRING_TYPE_TAG) {
+ } else if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
utf8Ptr.set(argOut0.getByteArray(), 1, argOut0.getLength() - 1);
// start date
int stringLength = utf8Ptr.getUTF8Length();
- intervalStart = ATimeParserFactory
- .parseTimePart(utf8Ptr.getByteArray(), utf8Ptr.getCharStartOffset(),
- stringLength);
+ intervalStart = ATimeParserFactory.parseTimePart(utf8Ptr.getByteArray(),
+ utf8Ptr.getCharStartOffset(), stringLength);
} else {
throw new AlgebricksException(FID.getName()
+ ": expects NULL/STRING/TIME for the first argument, but got "
@@ -123,16 +118,15 @@
intervalStart += GregorianCalendarSystem.CHRONON_OF_DAY;
}
- if (argOut1.getByteArray()[0] == SER_TIME_TYPE_TAG) {
+ if (argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_TIME_TYPE_TAG) {
intervalEnd = ATimeSerializerDeserializer.getChronon(argOut1.getByteArray(), 1);
- } else if (argOut1.getByteArray()[0] == SER_STRING_TYPE_TAG) {
+ } else if (argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
utf8Ptr.set(argOut1.getByteArray(), 1, argOut1.getLength() - 1);
// start date
int stringLength = utf8Ptr.getUTF8Length();
- intervalEnd = ATimeParserFactory
- .parseTimePart(argOut1.getByteArray(), utf8Ptr.getCharStartOffset(),
- stringLength);
+ intervalEnd = ATimeParserFactory.parseTimePart(argOut1.getByteArray(),
+ utf8Ptr.getCharStartOffset(), stringLength);
} else {
throw new AlgebricksException(FID.getName()
@@ -145,11 +139,11 @@
}
if (intervalEnd < intervalStart) {
- throw new AlgebricksException(FID.getName()
- + ": interval end must not be less than the interval start.");
+ throw new AlgebricksException(
+ FID.getName() + ": interval end must not be less than the interval start.");
}
- aInterval.setValue(intervalStart, intervalEnd, ATypeTag.TIME.serialize());
+ aInterval.setValue(intervalStart, intervalEnd, ATypeTag.SERIALIZED_TIME_TYPE_TAG);
intervalSerde.serialize(aInterval, out);
} catch (IOException e1) {
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalStartFromDateConstructorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalStartFromDateConstructorDescriptor.java
index b82dcac..5638227 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalStartFromDateConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalStartFromDateConstructorDescriptor.java
@@ -56,14 +56,8 @@
private static final long serialVersionUID = 1L;
public final static FunctionIdentifier FID = AsterixBuiltinFunctions.INTERVAL_CONSTRUCTOR_START_FROM_DATE;
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
- private final static byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
- private final static byte SER_YEAR_MONTH_DURATION_TYPE_TAG = ATypeTag.YEARMONTHDURATION.serialize();
- private final static byte SER_DAY_TIME_DURATION_TYPE_TAG = ATypeTag.DAYTIMEDURATION.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new AIntervalStartFromDateConstructorDescriptor();
}
@@ -106,18 +100,18 @@
try {
- if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
- || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
}
long intervalStart = 0, intervalEnd = 0;
- if (argOut0.getByteArray()[0] == SER_DATE_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_DATE_TYPE_TAG) {
intervalStart = ADateSerializerDeserializer.getChronon(argOut0.getByteArray(), 1)
* GregorianCalendarSystem.CHRONON_OF_DAY;
- } else if (argOut0.getByteArray()[0] == SER_STRING_TYPE_TAG) {
+ } else if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
utf8Ptr.set(argOut0.getByteArray(), 1, argOut0.getLength() - 1);
// start date
int stringLength = utf8Ptr.getUTF8Length();
@@ -130,28 +124,27 @@
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0]));
}
- if (argOut1.getByteArray()[0] == SER_DURATION_TYPE_TAG) {
+ if (argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_DURATION_TYPE_TAG) {
intervalEnd = DurationArithmeticOperations.addDuration(intervalStart,
ADurationSerializerDeserializer.getYearMonth(argOut1.getByteArray(), 1),
ADurationSerializerDeserializer.getDayTime(argOut1.getByteArray(), 1), false);
- } else if (argOut1.getByteArray()[0] == SER_DAY_TIME_DURATION_TYPE_TAG) {
- intervalEnd = DurationArithmeticOperations.addDuration(intervalStart,
- 0,
+ } else if (argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_DAY_TIME_DURATION_TYPE_TAG) {
+ intervalEnd = DurationArithmeticOperations.addDuration(intervalStart, 0,
ADayTimeDurationSerializerDeserializer.getDayTime(argOut1.getByteArray(), 1),
false);
- } else if (argOut1.getByteArray()[0] == SER_YEAR_MONTH_DURATION_TYPE_TAG) {
+ } else if (argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_YEAR_MONTH_DURATION_TYPE_TAG) {
intervalEnd = DurationArithmeticOperations.addDuration(intervalStart,
- AYearMonthDurationSerializerDeserializer
- .getYearMonth(argOut1.getByteArray(), 1),
+ AYearMonthDurationSerializerDeserializer.getYearMonth(argOut1.getByteArray(),
+ 1),
0, false);
- } else if (argOut1.getByteArray()[0] == SER_STRING_TYPE_TAG) {
+ } else if (argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
// duration
utf8Ptr.set(argOut1.getByteArray(), 1, argOut1.getLength() - 1);
int stringLength = utf8Ptr.getUTF8Length();
- ADurationParserFactory
- .parseDuration(argOut1.getByteArray(), utf8Ptr.getCharStartOffset(),
- stringLength, aDuration, ADurationParseOption.All);
+ ADurationParserFactory.parseDuration(argOut1.getByteArray(),
+ utf8Ptr.getCharStartOffset(), stringLength, aDuration,
+ ADurationParseOption.All);
intervalEnd = DurationArithmeticOperations.addDuration(intervalStart,
aDuration.getMonths(), aDuration.getMilliseconds(), false);
} else {
@@ -164,11 +157,11 @@
intervalEnd = GregorianCalendarSystem.getChrononInDays(intervalEnd);
if (intervalEnd < intervalStart) {
- throw new AlgebricksException(FID.getName()
- + ": interval end must not be less than the interval start.");
+ throw new AlgebricksException(
+ FID.getName() + ": interval end must not be less than the interval start.");
}
- aInterval.setValue(intervalStart, intervalEnd, ATypeTag.DATE.serialize());
+ aInterval.setValue(intervalStart, intervalEnd, ATypeTag.SERIALIZED_DATE_TYPE_TAG);
intervalSerde.serialize(aInterval, out);
} catch (IOException e1) {
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalStartFromDateTimeConstructorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalStartFromDateTimeConstructorDescriptor.java
index 3ad1cc3..3588c7c 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalStartFromDateTimeConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalStartFromDateTimeConstructorDescriptor.java
@@ -53,17 +53,10 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class AIntervalStartFromDateTimeConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
public final static FunctionIdentifier FID = AsterixBuiltinFunctions.INTERVAL_CONSTRUCTOR_START_FROM_DATETIME;
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
- private final static byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
- private final static byte SER_DAY_TIME_DURATION_TYPE_TAG = ATypeTag.DAYTIMEDURATION.serialize();
- private final static byte SER_YEAR_MONTH_DURATION_TYPE_TAG = ATypeTag.YEARMONTHDURATION.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new AIntervalStartFromDateTimeConstructorDescriptor();
}
@@ -106,17 +99,17 @@
try {
- if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
- || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
}
long intervalStart = 0, intervalEnd = 0;
- if (argOut0.getByteArray()[0] == SER_DATETIME_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_DATETIME_TYPE_TAG) {
intervalStart = ADateTimeSerializerDeserializer.getChronon(argOut0.getByteArray(), 1);
- } else if (argOut0.getByteArray()[0] == SER_STRING_TYPE_TAG) {
+ } else if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
utf8Ptr.set(argOut0.getByteArray(), 1, argOut0.getLength() - 1);
int stringLength = utf8Ptr.getUTF8Length();
@@ -134,8 +127,8 @@
}
}
- intervalStart = ADateParserFactory
- .parseDatePart(argOut0.getByteArray(), startOffset, timeOffset);
+ intervalStart = ADateParserFactory.parseDatePart(argOut0.getByteArray(), startOffset,
+ timeOffset);
intervalStart += ATimeParserFactory.parseTimePart(argOut0.getByteArray(),
startOffset + timeOffset + 1, stringLength - timeOffset - 1);
} else {
@@ -144,29 +137,28 @@
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0]));
}
- if (argOut1.getByteArray()[0] == SER_DURATION_TYPE_TAG) {
+ if (argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_DURATION_TYPE_TAG) {
intervalEnd = DurationArithmeticOperations.addDuration(intervalStart,
ADurationSerializerDeserializer.getYearMonth(argOut1.getByteArray(), 1),
ADurationSerializerDeserializer.getDayTime(argOut1.getByteArray(), 1), false);
- } else if (argOut1.getByteArray()[0] == SER_YEAR_MONTH_DURATION_TYPE_TAG) {
- intervalEnd = DurationArithmeticOperations
- .addDuration(
- intervalStart,
- AYearMonthDurationSerializerDeserializer.getYearMonth(
- argOut1.getByteArray(), 1), 0, false);
- } else if (argOut1.getByteArray()[0] == SER_DAY_TIME_DURATION_TYPE_TAG) {
+ } else if (argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_YEAR_MONTH_DURATION_TYPE_TAG) {
+ intervalEnd = DurationArithmeticOperations.addDuration(intervalStart,
+ AYearMonthDurationSerializerDeserializer.getYearMonth(argOut1.getByteArray(),
+ 1),
+ 0, false);
+ } else if (argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_DAY_TIME_DURATION_TYPE_TAG) {
intervalEnd = DurationArithmeticOperations.addDuration(intervalStart, 0,
ADayTimeDurationSerializerDeserializer.getDayTime(argOut1.getByteArray(), 1),
false);
- } else if (argOut1.getByteArray()[0] == SER_STRING_TYPE_TAG) {
+ } else if (argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
// duration
utf8Ptr.set(argOut1.getByteArray(), 1, argOut1.getLength() - 1);
int stringLength = utf8Ptr.getUTF8Length();
- ADurationParserFactory
- .parseDuration(argOut1.getByteArray(), utf8Ptr.getCharStartOffset(),
- stringLength, aDuration, ADurationParseOption.All);
+ ADurationParserFactory.parseDuration(argOut1.getByteArray(),
+ utf8Ptr.getCharStartOffset(), stringLength, aDuration,
+ ADurationParseOption.All);
intervalEnd = DurationArithmeticOperations.addDuration(intervalStart,
aDuration.getMonths(), aDuration.getMilliseconds(), false);
@@ -177,11 +169,11 @@
}
if (intervalEnd < intervalStart) {
- throw new AlgebricksException(FID.getName()
- + ": interval end must not be less than the interval start.");
+ throw new AlgebricksException(
+ FID.getName() + ": interval end must not be less than the interval start.");
}
- aInterval.setValue(intervalStart, intervalEnd, ATypeTag.DATETIME.serialize());
+ aInterval.setValue(intervalStart, intervalEnd, ATypeTag.SERIALIZED_DATETIME_TYPE_TAG);
intervalSerde.serialize(aInterval, out);
} catch (IOException e1) {
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalStartFromTimeConstructorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalStartFromTimeConstructorDescriptor.java
index 3474ed6..78cdae2 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalStartFromTimeConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalStartFromTimeConstructorDescriptor.java
@@ -52,16 +52,10 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class AIntervalStartFromTimeConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
public final static FunctionIdentifier FID = AsterixBuiltinFunctions.INTERVAL_CONSTRUCTOR_START_FROM_TIME;
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
- private final static byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
- private final static byte SER_DAY_TIME_DURATION_TYPE_TAG = ATypeTag.DAYTIMEDURATION.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new AIntervalStartFromTimeConstructorDescriptor();
}
@@ -104,24 +98,23 @@
try {
- if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
- || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
}
long intervalStart = 0, intervalEnd = 0;
- if (argOut0.getByteArray()[0] == SER_TIME_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_TIME_TYPE_TAG) {
intervalStart = ATimeSerializerDeserializer.getChronon(argOut0.getByteArray(), 1);
- } else if (argOut0.getByteArray()[0] == SER_STRING_TYPE_TAG) {
+ } else if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
utf8Ptr.set(argOut0.getByteArray(), 1, argOut0.getLength() - 1);
int stringLength = utf8Ptr.getUTF8Length();
- intervalStart = ATimeParserFactory
- .parseTimePart(argOut0.getByteArray(), utf8Ptr.getCharStartOffset(),
- stringLength);
+ intervalStart = ATimeParserFactory.parseTimePart(argOut0.getByteArray(),
+ utf8Ptr.getCharStartOffset(), stringLength);
} else {
throw new AlgebricksException(FID.getName()
+ ": expects NULL/STRING/TIME for the first argument, but got "
@@ -132,35 +125,35 @@
intervalStart += GregorianCalendarSystem.CHRONON_OF_DAY;
}
- if (argOut1.getByteArray()[0] == SER_DURATION_TYPE_TAG) {
+ if (argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_DURATION_TYPE_TAG) {
if (ADurationSerializerDeserializer.getYearMonth(argOut1.getByteArray(), 1) != 0) {
- throw new AlgebricksException(FID.getName()
- + ": cannot add a year-month duration to a time value.");
+ throw new AlgebricksException(
+ FID.getName() + ": cannot add a year-month duration to a time value.");
}
intervalEnd = DurationArithmeticOperations.addDuration(intervalStart, 0,
ADurationSerializerDeserializer.getDayTime(argOut1.getByteArray(), 1), false);
- } else if (argOut1.getByteArray()[0] == SER_DAY_TIME_DURATION_TYPE_TAG) {
+ } else if (argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_DAY_TIME_DURATION_TYPE_TAG) {
intervalEnd = DurationArithmeticOperations.addDuration(intervalStart, 0,
ADayTimeDurationSerializerDeserializer.getDayTime(argOut1.getByteArray(), 1),
false);
- } else if (argOut1.getByteArray()[0] == SER_STRING_TYPE_TAG) {
+ } else if (argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
// duration
utf8Ptr.set(argOut1.getByteArray(), 1, argOut1.getLength() - 1);
int stringLength = utf8Ptr.getUTF8Length();
- ADurationParserFactory
- .parseDuration(argOut1.getByteArray(), utf8Ptr.getCharStartOffset(),
- stringLength, aDuration, ADurationParseOption.All);
+ ADurationParserFactory.parseDuration(argOut1.getByteArray(),
+ utf8Ptr.getCharStartOffset(), stringLength, aDuration,
+ ADurationParseOption.All);
if (aDuration.getMonths() != 0) {
- throw new AlgebricksException(FID.getName()
- + ": cannot add a year-month duration to a time value.");
+ throw new AlgebricksException(
+ FID.getName() + ": cannot add a year-month duration to a time value.");
}
intervalEnd = DurationArithmeticOperations.addDuration(intervalStart, 0,
@@ -174,11 +167,11 @@
}
if (intervalEnd < intervalStart) {
- throw new AlgebricksException(FID.getName()
- + ": interval end must not be less than the interval start.");
+ throw new AlgebricksException(
+ FID.getName() + ": interval end must not be less than the interval start.");
}
- aInterval.setValue(intervalStart, intervalEnd, ATypeTag.TIME.serialize());
+ aInterval.setValue(intervalStart, intervalEnd, ATypeTag.SERIALIZED_TIME_TYPE_TAG);
intervalSerde.serialize(aInterval, out);
} catch (IOException e1) {
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ALineConstructorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ALineConstructorDescriptor.java
index bafb778..44e09ce 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ALineConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ALineConstructorDescriptor.java
@@ -45,9 +45,8 @@
public class ALineConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
private static final long serialVersionUID = 1L;
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new ALineConstructorDescriptor();
}
@@ -84,8 +83,8 @@
outInput.reset();
eval.evaluate(tuple);
byte[] serString = outInput.getByteArray();
- if (serString[0] == SER_STRING_TYPE_TAG) {
- utf8Ptr.set(serString, 1, outInput.getLength()-1);
+ if (serString[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
+ utf8Ptr.set(serString, 1, outInput.getLength() - 1);
String s = utf8Ptr.toString();
int commaIndex = s.indexOf(',');
int spaceIndex = s.indexOf(' ', commaIndex + 1);
@@ -96,7 +95,7 @@
Double.parseDouble(s.substring(commaIndex + 1, s.length())));
aLine.setValue(aPoint[0], aPoint[1]);
lineSerde.serialize(aLine, out);
- } else if (serString[0] == SER_NULL_TYPE_TAG)
+ } else if (serString[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG)
nullSerde.serialize(ANull.NULL, out);
else
throw new AlgebricksException(errorMessage);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ANullConstructorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ANullConstructorDescriptor.java
index 4b204f4..15779b5 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ANullConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ANullConstructorDescriptor.java
@@ -42,10 +42,9 @@
import org.apache.hyracks.util.string.UTF8StringUtil;
public class ANullConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new ANullConstructorDescriptor();
}
@@ -78,8 +77,9 @@
outInput.reset();
eval.evaluate(tuple);
byte[] serString = outInput.getByteArray();
- if (serString[0] == SER_STRING_TYPE_TAG) {
- if (utf8BinaryComparator.compare(serString, 1, outInput.getLength(), NULL, 0, NULL.length) == 0) {
+ if (serString[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
+ if (utf8BinaryComparator.compare(serString, 1, outInput.getLength(), NULL, 0,
+ NULL.length) == 0) {
nullSerde.serialize(ANull.NULL, out);
return;
} else
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/APoint3DConstructorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/APoint3DConstructorDescriptor.java
index 97bcd02..f1004bc 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/APoint3DConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/APoint3DConstructorDescriptor.java
@@ -44,9 +44,8 @@
public class APoint3DConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
private static final long serialVersionUID = 1L;
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new APoint3DConstructorDescriptor();
}
@@ -82,8 +81,8 @@
outInput.reset();
eval.evaluate(tuple);
byte[] serString = outInput.getByteArray();
- if (serString[0] == SER_STRING_TYPE_TAG) {
- utf8Ptr.set(serString, 1, outInput.getLength()-1);
+ if (serString[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
+ utf8Ptr.set(serString, 1, outInput.getLength() - 1);
String s = utf8Ptr.toString();
int firstCommaIndex = s.indexOf(',');
int secondCommaIndex = s.indexOf(',', firstCommaIndex + 1);
@@ -92,7 +91,7 @@
Double.parseDouble(s.substring(secondCommaIndex + 1, s.length())));
point3DSerde.serialize(aPoint3D, out);
- } else if (serString[0] == SER_NULL_TYPE_TAG)
+ } else if (serString[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG)
nullSerde.serialize(ANull.NULL, out);
else
throw new AlgebricksException(errorMessage);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/APointConstructorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/APointConstructorDescriptor.java
index 49781d5..43882c9 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/APointConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/APointConstructorDescriptor.java
@@ -42,11 +42,9 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class APointConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new APointConstructorDescriptor();
}
@@ -82,13 +80,13 @@
outInput.reset();
eval.evaluate(tuple);
byte[] serString = outInput.getByteArray();
- if (serString[0] == SER_STRING_TYPE_TAG) {
- utf8Ptr.set(serString, 1, outInput.getLength()-1);
+ if (serString[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
+ utf8Ptr.set(serString, 1, outInput.getLength() - 1);
String s = utf8Ptr.toString();
aPoint.setValue(Double.parseDouble(s.substring(0, s.indexOf(','))),
Double.parseDouble(s.substring(s.indexOf(',') + 1, s.length())));
pointSerde.serialize(aPoint, out);
- } else if (serString[0] == SER_NULL_TYPE_TAG)
+ } else if (serString[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG)
nullSerde.serialize(ANull.NULL, out);
else
throw new AlgebricksException(errorMessage);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/APolygonConstructorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/APolygonConstructorDescriptor.java
index 4fb24a5..4b91e18 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/APolygonConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/APolygonConstructorDescriptor.java
@@ -41,12 +41,9 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class APolygonConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_POLYGON_TYPE_TAG = ATypeTag.POLYGON.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new APolygonConstructorDescriptor();
}
@@ -78,18 +75,18 @@
outInput.reset();
eval.evaluate(tuple);
byte[] serString = outInput.getByteArray();
- if (serString[0] == SER_STRING_TYPE_TAG) {
- utf8Ptr.set(serString, 1, outInput.getLength()-1);
+ if (serString[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
+ utf8Ptr.set(serString, 1, outInput.getLength() - 1);
String s = utf8Ptr.toString();
String[] points = s.split(" ");
if (points.length <= 2)
throw new AlgebricksException(errorMessage);
- out.writeByte(SER_POLYGON_TYPE_TAG);
+ out.writeByte(ATypeTag.SERIALIZED_POLYGON_TYPE_TAG);
out.writeShort(points.length);
for (int i = 0; i < points.length; i++)
APointSerializerDeserializer.serialize(Double.parseDouble(points[i].split(",")[0]),
Double.parseDouble(points[i].split(",")[1]), out);
- } else if (serString[0] == SER_NULL_TYPE_TAG)
+ } else if (serString[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG)
nullSerde.serialize(ANull.NULL, out);
else
throw new AlgebricksException(errorMessage);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ARectangleConstructorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ARectangleConstructorDescriptor.java
index cefa031..5ffa0af 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ARectangleConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ARectangleConstructorDescriptor.java
@@ -43,11 +43,9 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class ARectangleConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new ARectangleConstructorDescriptor();
}
@@ -84,8 +82,8 @@
outInput.reset();
eval.evaluate(tuple);
byte[] serString = outInput.getByteArray();
- if (serString[0] == SER_STRING_TYPE_TAG) {
- utf8Ptr.set(serString, 1, outInput.getLength() -1);
+ if (serString[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
+ utf8Ptr.set(serString, 1, outInput.getLength() - 1);
String s = utf8Ptr.toString();
int commaIndex = s.indexOf(',');
int spaceIndex = s.indexOf(' ', commaIndex + 1);
@@ -103,7 +101,7 @@
"Rectangle arugment must be either (bottom left point, top right point) or (top right point, bottom left point)");
}
rectangle2DSerde.serialize(aRectangle, out);
- } else if (serString[0] == SER_NULL_TYPE_TAG)
+ } else if (serString[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG)
nullSerde.serialize(ANull.NULL, out);
else
throw new AlgebricksException(errorMessage);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AStringConstructorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AStringConstructorDescriptor.java
index daa8ac0..1355ed5 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AStringConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AStringConstructorDescriptor.java
@@ -51,6 +51,7 @@
private static final long serialVersionUID = 1L;
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new AStringConstructorDescriptor();
}
@@ -111,8 +112,7 @@
break;
}
case DOUBLE: {
- double d = ADoubleSerializerDeserializer.getDouble(outInput.getByteArray(),
- 1);
+ double d = ADoubleSerializerDeserializer.getDouble(outInput.getByteArray(), 1);
builder.appendString(String.valueOf(d));
break;
}
@@ -122,13 +122,13 @@
break;
}
case BOOLEAN: {
- boolean b = ABooleanSerializerDeserializer.getBoolean(
- outInput.getByteArray(), 1);
+ boolean b = ABooleanSerializerDeserializer.getBoolean(outInput.getByteArray(),
+ 1);
builder.appendString(String.valueOf(b));
break;
}
- // NotYetImplemented
+ // NotYetImplemented
case CIRCLE:
case DATE:
case DATETIME:
@@ -150,7 +150,7 @@
throw new AlgebricksException("string of " + tt + " not supported");
}
builder.finish();
- out.write(ATypeTag.STRING.serialize());
+ out.write(ATypeTag.SERIALIZED_STRING_TYPE_TAG);
out.write(baaos.getByteArray(), 0, baaos.getLength());
}
} catch (IOException e) {
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ATimeConstructorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ATimeConstructorDescriptor.java
index db8f854..acfc1d0 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ATimeConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ATimeConstructorDescriptor.java
@@ -44,12 +44,9 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class ATimeConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new ATimeConstructorDescriptor();
}
@@ -85,9 +82,9 @@
outInput.reset();
eval.evaluate(tuple);
byte[] serString = outInput.getByteArray();
- if (serString[0] == SER_STRING_TYPE_TAG) {
+ if (serString[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
- utf8Ptr.set(serString, 1, outInput.getLength()-1);
+ utf8Ptr.set(serString, 1, outInput.getLength() - 1);
int stringLength = utf8Ptr.getUTF8Length();
int startOffset = utf8Ptr.getCharStartOffset();
@@ -98,7 +95,8 @@
+ stringLength);
}
- int chrononTimeInMs = ATimeParserFactory.parseTimePart(serString, startOffset, stringLength);
+ int chrononTimeInMs = ATimeParserFactory.parseTimePart(serString, startOffset,
+ stringLength);
if (chrononTimeInMs < 0) {
chrononTimeInMs += GregorianCalendarSystem.CHRONON_OF_DAY;
@@ -107,7 +105,7 @@
aTime.setValue(chrononTimeInMs);
timeSerde.serialize(aTime, out);
- } else if (serString[0] == SER_NULL_TYPE_TAG) {
+ } else if (serString[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
} else {
throw new AlgebricksException(errorMessage);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AUUIDFromStringConstructorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AUUIDFromStringConstructorDescriptor.java
index 6edd393..c7168c4 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AUUIDFromStringConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AUUIDFromStringConstructorDescriptor.java
@@ -47,11 +47,9 @@
* uuid("02a199ca-bf58-412e-bd9f-60a0c975a8ac"))
*/
public class AUUIDFromStringConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new AUUIDFromStringConstructorDescriptor();
}
@@ -90,8 +88,8 @@
outInput.reset();
eval.evaluate(tuple);
byte[] serString = outInput.getByteArray();
- if (serString[0] == SER_STRING_TYPE_TAG) {
- utf8Ptr.set(serString, 1, outInput.getLength()-1);
+ if (serString[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
+ utf8Ptr.set(serString, 1, outInput.getLength() - 1);
msb = 0;
lsb = 0;
tmpLongValue = 0;
@@ -138,7 +136,7 @@
aUUID.setValue(msb, lsb);
uuidSerde.serialize(aUUID, out);
- } else if (serString[0] == SER_NULL_TYPE_TAG)
+ } else if (serString[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG)
nullSerde.serialize(ANull.NULL, out);
else
throw new AlgebricksException(errorMessage);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AYearMonthDurationConstructorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AYearMonthDurationConstructorDescriptor.java
index 3b9c006..ed193d6 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AYearMonthDurationConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AYearMonthDurationConstructorDescriptor.java
@@ -43,11 +43,7 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class AYearMonthDurationConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -86,16 +82,15 @@
eval.evaluate(tuple);
byte[] serString = outInput.getByteArray();
- if (serString[0] == SER_STRING_TYPE_TAG) {
+ if (serString[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
utf8Ptr.set(serString, 1, outInput.getLength() - 1);
int stringLength = utf8Ptr.getUTF8Length();
- ADurationParserFactory
- .parseDuration(serString, utf8Ptr.getCharStartOffset(), stringLength,
- aYearMonthDuration, ADurationParseOption.YEAR_MONTH);
+ ADurationParserFactory.parseDuration(serString, utf8Ptr.getCharStartOffset(),
+ stringLength, aYearMonthDuration, ADurationParseOption.YEAR_MONTH);
yearMonthDurationSerde.serialize(aYearMonthDuration, out);
- } else if (serString[0] == SER_NULL_TYPE_TAG) {
+ } else if (serString[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
} else {
throw new AlgebricksException(errorMessage);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/OpenRecordConstructorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/OpenRecordConstructorDescriptor.java
index 2a7348c..6117452 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/OpenRecordConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/OpenRecordConstructorDescriptor.java
@@ -41,6 +41,7 @@
public class OpenRecordConstructorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new OpenRecordConstructorDescriptor();
}
@@ -99,7 +100,7 @@
evalNames[i].evaluate(tuple);
recBuilder.addField(fieldNameBuffer, fieldValueBuffer);
} else {
- if (fieldValueBuffer.getByteArray()[0] != ATypeTag.NULL.serialize()) {
+ if (fieldValueBuffer.getByteArray()[0] != ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
recBuilder.addField(closedFieldId, fieldValueBuffer);
}
closedFieldId++;
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractBinaryStringBoolEval.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractBinaryStringBoolEval.java
index 0e454179..a6f9183 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractBinaryStringBoolEval.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractBinaryStringBoolEval.java
@@ -37,13 +37,7 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public abstract class AbstractBinaryStringBoolEval implements ICopyEvaluator {
-
private DataOutput dout;
-
- // allowed input types
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
-
private ArrayBackedValueStorage array0 = new ArrayBackedValueStorage();
private ArrayBackedValueStorage array1 = new ArrayBackedValueStorage();
private ICopyEvaluator evalLeft;
@@ -77,11 +71,12 @@
evalRight.evaluate(tuple);
try {
- if (array0.getByteArray()[0] == SER_NULL_TYPE_TAG || array1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (array0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || array1.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, dout);
return;
- } else if (array0.getByteArray()[0] != SER_STRING_TYPE_TAG
- || array1.getByteArray()[0] != SER_STRING_TYPE_TAG) {
+ } else if (array0.getByteArray()[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG
+ || array1.getByteArray()[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
throw new AlgebricksException(funcID.getName() + ": expects input type STRING or NULL, but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(array0.getByteArray()[0]) + " and "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(array1.getByteArray()[0]) + ")!");
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractQuadStringStringEval.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractQuadStringStringEval.java
index 7dbc99a..a08551f 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractQuadStringStringEval.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractQuadStringStringEval.java
@@ -43,8 +43,6 @@
public abstract class AbstractQuadStringStringEval implements ICopyEvaluator {
private DataOutput dout;
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
private ArrayBackedValueStorage array0 = new ArrayBackedValueStorage();
private ArrayBackedValueStorage array1 = new ArrayBackedValueStorage();
private ArrayBackedValueStorage array2 = new ArrayBackedValueStorage();
@@ -71,7 +69,7 @@
public AbstractQuadStringStringEval(DataOutput dout, ICopyEvaluatorFactory eval0, ICopyEvaluatorFactory eval1,
ICopyEvaluatorFactory eval2, ICopyEvaluatorFactory eval3, FunctionIdentifier funcID)
- throws AlgebricksException {
+ throws AlgebricksException {
this.dout = dout;
this.eval0 = eval0.createEvaluator(array0);
this.eval1 = eval1.createEvaluator(array1);
@@ -93,14 +91,16 @@
eval3.evaluate(tuple);
try {
- if (array0.getByteArray()[0] == SER_NULL_TYPE_TAG || array1.getByteArray()[0] == SER_NULL_TYPE_TAG
- || array2.getByteArray()[0] == SER_NULL_TYPE_TAG || array3.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (array0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || array1.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || array2.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || array3.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, dout);
return;
- } else if (array0.getByteArray()[0] != SER_STRING_TYPE_TAG
- || array1.getByteArray()[0] != SER_STRING_TYPE_TAG
- || array2.getByteArray()[0] != SER_STRING_TYPE_TAG
- || array3.getByteArray()[0] != SER_STRING_TYPE_TAG) {
+ } else if (array0.getByteArray()[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG
+ || array1.getByteArray()[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG
+ || array2.getByteArray()[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG
+ || array3.getByteArray()[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
throw new AlgebricksException(funcID.getName()
+ ": expects input type (STRING/NULL, STRING/NULL, STRING/NULL, STRING/NULL), but got ("
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(array0.getByteArray()[0]) + ", "
@@ -127,7 +127,6 @@
}
protected abstract String compute(UTF8StringPointable strPtr1st, UTF8StringPointable strPtr2nd,
- UTF8StringPointable strPtr3rd,
- UTF8StringPointable strPtr4th) throws AlgebricksException;
+ UTF8StringPointable strPtr3rd, UTF8StringPointable strPtr4th) throws AlgebricksException;
}
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractTripleStringBoolEval.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractTripleStringBoolEval.java
index f966c1c..eb32ac2 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractTripleStringBoolEval.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractTripleStringBoolEval.java
@@ -39,8 +39,6 @@
public abstract class AbstractTripleStringBoolEval implements ICopyEvaluator {
private DataOutput dout;
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
private ArrayBackedValueStorage array0 = new ArrayBackedValueStorage();
private ArrayBackedValueStorage array1 = new ArrayBackedValueStorage();
private ArrayBackedValueStorage array2 = new ArrayBackedValueStorage();
@@ -80,19 +78,21 @@
eval2.evaluate(tuple);
try {
- if (array0.getByteArray()[0] == SER_NULL_TYPE_TAG || array1.getByteArray()[0] == SER_NULL_TYPE_TAG
- || array2.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (array0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || array1.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || array2.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, dout);
return;
}
- if (array0.getByteArray()[0] != SER_STRING_TYPE_TAG || array1.getByteArray()[0] != SER_STRING_TYPE_TAG
- || array2.getByteArray()[0] != SER_STRING_TYPE_TAG) {
- throw new AlgebricksException(funcID.getName()
- + ": expects iput type (STRING/NULL, STRING/NULL, STRING) but got ("
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(array0.getByteArray()[0]) + ", "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(array1.getByteArray()[0]) + ", "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(array2.getByteArray()[0]) + ")");
+ if (array0.getByteArray()[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG
+ || array1.getByteArray()[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG
+ || array2.getByteArray()[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
+ throw new AlgebricksException(
+ funcID.getName() + ": expects iput type (STRING/NULL, STRING/NULL, STRING) but got ("
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(array0.getByteArray()[0]) + ", "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(array1.getByteArray()[0]) + ", "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(array2.getByteArray()[0]) + ")");
}
} catch (HyracksDataException e) {
@@ -103,8 +103,7 @@
strPtr2nd.set(array1.getByteArray(), array1.getStartOffset() + 1, array1.getLength());
strPtr3rd.set(array2.getByteArray(), array2.getStartOffset() + 1, array2.getLength());
- ABoolean res = compute(strPtr1st, strPtr2nd, strPtr3rd) ? ABoolean.TRUE
- : ABoolean.FALSE;
+ ABoolean res = compute(strPtr1st, strPtr2nd, strPtr3rd) ? ABoolean.TRUE : ABoolean.FALSE;
try {
boolSerde.serialize(res, dout);
} catch (HyracksDataException e) {
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractTripleStringStringEval.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractTripleStringStringEval.java
index 9e79832..bc602c3 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractTripleStringStringEval.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractTripleStringStringEval.java
@@ -37,10 +37,7 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public abstract class AbstractTripleStringStringEval implements ICopyEvaluator {
-
private DataOutput dout;
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
private ArrayBackedValueStorage array0 = new ArrayBackedValueStorage();
private ArrayBackedValueStorage array1 = new ArrayBackedValueStorage();
private ArrayBackedValueStorage array2 = new ArrayBackedValueStorage();
@@ -82,18 +79,19 @@
eval2.evaluate(tuple);
try {
- if (array0.getByteArray()[0] == SER_NULL_TYPE_TAG || array1.getByteArray()[0] == SER_NULL_TYPE_TAG
- || array2.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (array0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || array1.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || array2.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, dout);
return;
- } else if (array0.getByteArray()[0] != SER_STRING_TYPE_TAG
- || array1.getByteArray()[0] != SER_STRING_TYPE_TAG
- || array2.getByteArray()[0] != SER_STRING_TYPE_TAG) {
- throw new AlgebricksException(funcID.getName()
- + ": expects input type (STRING/NULL, STRING/NULL, STRING/NULL), but got ("
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(array0.getByteArray()[0]) + ", "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(array1.getByteArray()[0]) + ", "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(array2.getByteArray()[0]) + ".");
+ } else if (array0.getByteArray()[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG
+ || array1.getByteArray()[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG
+ || array2.getByteArray()[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
+ throw new AlgebricksException(
+ funcID.getName() + ": expects input type (STRING/NULL, STRING/NULL, STRING/NULL), but got ("
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(array0.getByteArray()[0]) + ", "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(array1.getByteArray()[0]) + ", "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(array2.getByteArray()[0]) + ".");
}
} catch (HyracksDataException e) {
throw new AlgebricksException(e);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AndDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AndDescriptor.java
index 9388f93..e310953 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AndDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AndDescriptor.java
@@ -39,11 +39,9 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class AndDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new AndDescriptor();
}
@@ -86,7 +84,7 @@
for (int i = 0; i < n; i++) {
argOut.reset();
evals[i].evaluate(tuple);
- if (argOut.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
metNull = true;
continue;
}
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AnyCollectionMemberDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AnyCollectionMemberDescriptor.java
index 885c7f0..bc5d26c 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AnyCollectionMemberDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AnyCollectionMemberDescriptor.java
@@ -47,6 +47,7 @@
private static final long serialVersionUID = 1L;
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new AnyCollectionMemberDescriptor();
}
@@ -65,11 +66,7 @@
private static class AnyCollectionMemberEvalFactory implements ICopyEvaluatorFactory {
private static final long serialVersionUID = 1L;
-
private ICopyEvaluatorFactory listEvalFactory;
- private final static byte SER_ORDEREDLIST_TYPE_TAG = ATypeTag.ORDEREDLIST.serialize();
- private final static byte SER_UNORDEREDLIST_TYPE_TAG = ATypeTag.UNORDEREDLIST.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
private byte serItemTypeTag;
private ATypeTag itemTag;
private boolean selfDescList = false;
@@ -99,26 +96,27 @@
evalList.evaluate(tuple);
byte[] serList = outInputList.getByteArray();
- if (serList[0] == SER_NULL_TYPE_TAG) {
+ if (serList[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
}
- if (serList[0] != SER_ORDEREDLIST_TYPE_TAG && serList[0] != SER_UNORDEREDLIST_TYPE_TAG) {
+ if (serList[0] != ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG
+ && serList[0] != ATypeTag.SERIALIZED_UNORDEREDLIST_TYPE_TAG) {
throw new AlgebricksException(AsterixBuiltinFunctions.ANY_COLLECTION_MEMBER.getName()
+ ": expects input type ORDEREDLIST/UNORDEREDLIST, but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(serList[0]));
}
- if (serList[0] == SER_ORDEREDLIST_TYPE_TAG) {
+ if (serList[0] == ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG) {
if (AOrderedListSerializerDeserializer.getNumberOfItems(serList) == 0) {
- out.writeByte(SER_NULL_TYPE_TAG);
+ out.writeByte(ATypeTag.SERIALIZED_NULL_TYPE_TAG);
return;
}
itemOffset = AOrderedListSerializerDeserializer.getItemOffset(serList, 0);
} else {
if (AUnorderedListSerializerDeserializer.getNumberOfItems(serList) == 0) {
- out.writeByte(SER_NULL_TYPE_TAG);
+ out.writeByte(ATypeTag.SERIALIZED_NULL_TYPE_TAG);
return;
}
itemOffset = AUnorderedListSerializerDeserializer.getItemOffset(serList, 0);
@@ -132,7 +130,8 @@
if (selfDescList) {
itemTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(serList[itemOffset]);
- itemLength = NonTaggedFormatUtil.getFieldValueLength(serList, itemOffset, itemTag, true) + 1;
+ itemLength = NonTaggedFormatUtil.getFieldValueLength(serList, itemOffset, itemTag, true)
+ + 1;
out.write(serList, itemOffset, itemLength);
} else {
itemLength = NonTaggedFormatUtil.getFieldValueLength(serList, itemOffset, itemTag, false);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CodePointToStringDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CodePointToStringDescriptor.java
index 3b89eba..6e46a23 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CodePointToStringDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CodePointToStringDescriptor.java
@@ -43,6 +43,7 @@
private static final long serialVersionUID = 1L;
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new CodePointToStringDescriptor();
}
@@ -65,7 +66,7 @@
private final byte[] currentUTF8 = new byte[6];
private final byte[] tempStoreForLength = new byte[5];
- private final byte stringTypeTag = ATypeTag.STRING.serialize();
+ private final byte stringTypeTag = ATypeTag.SERIALIZED_STRING_TYPE_TAG;
@Override
public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
@@ -97,21 +98,21 @@
// calculate length first
int utf_8_len = 0;
for (int i = 0; i < size; i++) {
- int itemOffset = AOrderedListSerializerDeserializer
- .getItemOffset(serOrderedList, i);
+ int itemOffset = AOrderedListSerializerDeserializer.getItemOffset(serOrderedList,
+ i);
int codePoint = 0;
- codePoint = ATypeHierarchy.getIntegerValueWithDifferentTypeTagPosition(
- serOrderedList, itemOffset, 1);
+ codePoint = ATypeHierarchy
+ .getIntegerValueWithDifferentTypeTagPosition(serOrderedList, itemOffset, 1);
utf_8_len += UTF8StringUtil.codePointToUTF8(codePoint, currentUTF8);
}
out.writeByte(stringTypeTag);
UTF8StringUtil.writeUTF8Length(utf_8_len, tempStoreForLength, out);
for (int i = 0; i < size; i++) {
- int itemOffset = AOrderedListSerializerDeserializer
- .getItemOffset(serOrderedList, i);
+ int itemOffset = AOrderedListSerializerDeserializer.getItemOffset(serOrderedList,
+ i);
int codePoint = 0;
- codePoint = ATypeHierarchy.getIntegerValueWithDifferentTypeTagPosition(
- serOrderedList, itemOffset, 1);
+ codePoint = ATypeHierarchy
+ .getIntegerValueWithDifferentTypeTagPosition(serOrderedList, itemOffset, 1);
utf_8_len = UTF8StringUtil.codePointToUTF8(codePoint, currentUTF8);
for (int j = 0; j < utf_8_len; j++) {
out.writeByte(currentUTF8[j]);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CreatePointDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CreatePointDescriptor.java
index 791e2ee..7311e90 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CreatePointDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CreatePointDescriptor.java
@@ -43,14 +43,9 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class CreatePointDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
-
- // allowed input type
- private static final byte SER_DOUBLE_TYPE_TAG = ATypeTag.DOUBLE.serialize();
- private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new CreatePointDescriptor();
}
@@ -84,19 +79,23 @@
eval1.evaluate(tuple);
// type-check: (double, double)
- if ((outInput0.getByteArray()[0] != SER_DOUBLE_TYPE_TAG && outInput0.getByteArray()[0] != SER_NULL_TYPE_TAG)
- || (outInput1.getByteArray()[0] != SER_DOUBLE_TYPE_TAG && outInput1.getByteArray()[0] != SER_NULL_TYPE_TAG)) {
- throw new AlgebricksException(AsterixBuiltinFunctions.CREATE_POINT.getName()
- + ": expects input type: (DOUBLE, DOUBLE) but got ("
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(outInput0.getByteArray()[0])
- + ", "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(outInput1.getByteArray()[0])
- + ").");
+ if ((outInput0.getByteArray()[0] != ATypeTag.SERIALIZED_DOUBLE_TYPE_TAG
+ && outInput0.getByteArray()[0] != ATypeTag.SERIALIZED_NULL_TYPE_TAG)
+ || (outInput1.getByteArray()[0] != ATypeTag.SERIALIZED_DOUBLE_TYPE_TAG
+ && outInput1.getByteArray()[0] != ATypeTag.SERIALIZED_NULL_TYPE_TAG)) {
+ throw new AlgebricksException(
+ AsterixBuiltinFunctions.CREATE_POINT.getName()
+ + ": expects input type: (DOUBLE, DOUBLE) but got ("
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(
+ outInput0.getByteArray()[0])
+ + ", " + EnumDeserializer.ATYPETAGDESERIALIZER
+ .deserialize(outInput1.getByteArray()[0])
+ + ").");
}
try {
- if (outInput0.getByteArray()[0] == SER_NULL_TYPE_TAG
- || outInput1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (outInput0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || outInput1.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ANULL)
.serialize(ANull.NULL, out);
} else {
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CreatePolygonDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CreatePolygonDescriptor.java
index c46c8bf..bba4478 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CreatePolygonDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CreatePolygonDescriptor.java
@@ -46,11 +46,8 @@
public class CreatePolygonDescriptor extends AbstractScalarFunctionDynamicDescriptor {
private static final long serialVersionUID = 1L;
-
- private static final byte SER_ORDEREDLIST_TYPE_TAG = ATypeTag.ORDEREDLIST.serialize();
- private final static byte SER_POLYGON_TYPE_TAG = ATypeTag.POLYGON.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new CreatePolygonDescriptor();
}
@@ -81,7 +78,7 @@
outInputList.reset();
evalList.evaluate(tuple);
byte[] listBytes = outInputList.getByteArray();
- if (listBytes[0] != SER_ORDEREDLIST_TYPE_TAG) {
+ if (listBytes[0] != ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG) {
throw new AlgebricksException(AsterixBuiltinFunctions.CREATE_POLYGON.getName()
+ ": expects input type ORDEREDLIST, but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(listBytes[0]));
@@ -113,17 +110,17 @@
throw new AlgebricksException(
"There must be an even number of double values in the list to form a polygon");
}
- out.writeByte(SER_POLYGON_TYPE_TAG);
+ out.writeByte(ATypeTag.SERIALIZED_POLYGON_TYPE_TAG);
out.writeShort(listAccessor.size() / 2);
for (int i = 0; i < listAccessor.size() / 2; i++) {
int firstDoubleOffset = listAccessor.getItemOffset(i * 2);
int secondDobuleOffset = listAccessor.getItemOffset((i * 2) + 1);
- APointSerializerDeserializer
- .serialize(ADoubleSerializerDeserializer.getDouble(listBytes,
- firstDoubleOffset), ADoubleSerializerDeserializer.getDouble(
- listBytes, secondDobuleOffset), out);
+ APointSerializerDeserializer.serialize(
+ ADoubleSerializerDeserializer.getDouble(listBytes, firstDoubleOffset),
+ ADoubleSerializerDeserializer.getDouble(listBytes, secondDobuleOffset),
+ out);
}
} catch (AsterixException ex) {
throw new AlgebricksException(ex);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/GetItemDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/GetItemDescriptor.java
index 4848957..ab58da2 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/GetItemDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/GetItemDescriptor.java
@@ -47,6 +47,7 @@
private static final long serialVersionUID = 1L;
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new GetItemDescriptor();
}
@@ -68,8 +69,6 @@
private ICopyEvaluatorFactory listEvalFactory;
private ICopyEvaluatorFactory indexEvalFactory;
- private final static byte SER_ORDEREDLIST_TYPE_TAG = ATypeTag.ORDEREDLIST.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
private byte serItemTypeTag;
private ATypeTag itemTag;
private boolean selfDescList = false;
@@ -105,30 +104,28 @@
evalIdx.evaluate(tuple);
byte[] serOrderedList = outInputList.getByteArray();
- if (serOrderedList[0] == SER_NULL_TYPE_TAG) {
+ if (serOrderedList[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
}
- if (serOrderedList[0] == SER_ORDEREDLIST_TYPE_TAG) {
+ if (serOrderedList[0] == ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG) {
itemIndex = ATypeHierarchy.getIntegerValue(outInputIdx.getByteArray(), 0);
} else {
- throw new AlgebricksException(
- AsterixBuiltinFunctions.GET_ITEM.getName()
- + ": expects input type (NULL/ORDEREDLIST, [INT8/16/32/64/FLOAT/DOUBLE]), but got ("
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(serOrderedList[0])
- + ", "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(outInputIdx
- .getByteArray()[0]) + ").");
+ throw new AlgebricksException(AsterixBuiltinFunctions.GET_ITEM.getName()
+ + ": expects input type (NULL/ORDEREDLIST, [INT8/16/32/64/FLOAT/DOUBLE]), but got ("
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(serOrderedList[0]) + ", "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(outInputIdx.getByteArray()[0])
+ + ").");
}
if (itemIndex >= AOrderedListSerializerDeserializer.getNumberOfItems(serOrderedList)) {
- out.writeByte(SER_NULL_TYPE_TAG);
+ out.writeByte(ATypeTag.SERIALIZED_NULL_TYPE_TAG);
return;
}
if (itemIndex < 0)
- throw new AlgebricksException(AsterixBuiltinFunctions.GET_ITEM.getName()
- + ": item index cannot be negative!");
+ throw new AlgebricksException(
+ AsterixBuiltinFunctions.GET_ITEM.getName() + ": item index cannot be negative!");
itemTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(serOrderedList[1]);
if (itemTag == ATypeTag.ANY)
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/IsNullDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/IsNullDescriptor.java
index e3c793a..17065e8 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/IsNullDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/IsNullDescriptor.java
@@ -37,10 +37,7 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class IsNullDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
-
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
public IFunctionDescriptor createFunctionDescriptor() {
@@ -67,7 +64,8 @@
public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
argOut.reset();
eval.evaluate(tuple);
- boolean isNull = argOut.getByteArray()[argOut.getStartOffset()] == SER_NULL_TYPE_TAG;
+ boolean isNull = argOut.getByteArray()[argOut
+ .getStartOffset()] == ATypeTag.SERIALIZED_NULL_TYPE_TAG;
ABoolean res = isNull ? ABoolean.TRUE : ABoolean.FALSE;
try {
aObjSerDer.serialize(res, out);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/LenDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/LenDescriptor.java
index c0d91b2..bfe6a5c 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/LenDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/LenDescriptor.java
@@ -45,12 +45,9 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class LenDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
- private final static byte SER_ORDEREDLIST_TYPE_TAG = ATypeTag.ORDEREDLIST.serialize();
- private final static byte SER_UNORDEREDLIST_TYPE_TAG = ATypeTag.UNORDEREDLIST.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new LenDescriptor();
}
@@ -87,7 +84,7 @@
byte[] serList = inputVal.getByteArray();
- if (serList[0] == SER_NULL_TYPE_TAG) {
+ if (serList[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
try {
nullSerde.serialize(ANull.NULL, out);
} catch (HyracksDataException e) {
@@ -96,14 +93,15 @@
return;
}
- if (serList[0] != SER_ORDEREDLIST_TYPE_TAG && serList[0] != SER_UNORDEREDLIST_TYPE_TAG) {
+ if (serList[0] != ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG
+ && serList[0] != ATypeTag.SERIALIZED_UNORDEREDLIST_TYPE_TAG) {
throw new AlgebricksException(AsterixBuiltinFunctions.LEN.getName()
+ ": expects input type ORDEREDLIST/UNORDEREDLIST but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(serList[0]));
}
int numberOfitems = 0;
- if (serList[0] == SER_ORDEREDLIST_TYPE_TAG)
+ if (serList[0] == ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG)
numberOfitems = AOrderedListSerializerDeserializer.getNumberOfItems(serList);
else
numberOfitems = AUnorderedListSerializerDeserializer.getNumberOfItems(serList);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NotDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NotDescriptor.java
index 06c29a9..1f97509 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NotDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NotDescriptor.java
@@ -41,12 +41,9 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class NotDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
-
- private final static byte SER_BOOLEAN_TYPE_TAG = ATypeTag.BOOLEAN.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new NotDescriptor();
}
@@ -85,11 +82,11 @@
argOut.reset();
eval.evaluate(tuple);
try {
- if (argOut.getByteArray()[0] == SER_BOOLEAN_TYPE_TAG) {
+ if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_BOOLEAN_TYPE_TAG) {
boolean argRes = ABooleanSerializerDeserializer.getBoolean(argOut.getByteArray(), 1);
ABoolean aResult = argRes ? (ABoolean.FALSE) : (ABoolean.TRUE);
booleanSerde.serialize(aResult, out);
- } else if (argOut.getByteArray()[0] == SER_NULL_TYPE_TAG)
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG)
nullSerde.serialize(ANull.NULL, out);
else
throw new AlgebricksException(errorMessage);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NotNullDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NotNullDescriptor.java
index 16b875c..b0e3640 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NotNullDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NotNullDescriptor.java
@@ -44,11 +44,11 @@
private static final long serialVersionUID = 1L;
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new NotNullDescriptor();
}
};
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
@Override
public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) {
@@ -71,7 +71,7 @@
outInput.reset();
eval.evaluate(tuple);
byte[] data = outInput.getByteArray();
- if (data[outInput.getStartOffset()] == SER_NULL_TYPE_TAG) {
+ if (data[outInput.getStartOffset()] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
throw new AlgebricksException(errorMessage);
}
out.write(data, outInput.getStartOffset(), outInput.getLength());
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericAbsDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericAbsDescriptor.java
index 79cacac..d1f6a2d 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericAbsDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericAbsDescriptor.java
@@ -60,6 +60,7 @@
public class NumericAbsDescriptor extends AbstractScalarFunctionDynamicDescriptor {
private static final long serialVersionUID = 1L;
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new NumericAbsDescriptor();
}
@@ -83,13 +84,6 @@
private DataOutput out = output.getDataOutput();
private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
private ICopyEvaluator eval = args[0].createEvaluator(argOut);
- private byte serNullTypeTag = ATypeTag.NULL.serialize();
- private byte serInt8TypeTag = ATypeTag.INT8.serialize();
- private byte serInt16TypeTag = ATypeTag.INT16.serialize();
- private byte serInt32TypeTag = ATypeTag.INT32.serialize();
- private byte serInt64TypeTag = ATypeTag.INT64.serialize();
- private byte serFloatTypeTag = ATypeTag.FLOAT.serialize();
- private byte serDoubleTypeTag = ATypeTag.DOUBLE.serialize();
private AMutableDouble aDouble = new AMutableDouble(0);
private AMutableFloat aFloat = new AMutableFloat(0);
@@ -106,44 +100,44 @@
argOut.reset();
eval.evaluate(tuple);
try {
- if (argOut.getByteArray()[0] == serNullTypeTag) {
+ if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.ANULL);
serde.serialize(ANull.NULL, out);
- } else if (argOut.getByteArray()[0] == serInt8TypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_INT8_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AINT8);
- byte val = (byte) AInt8SerializerDeserializer.getByte(argOut.getByteArray(), 1);
+ byte val = AInt8SerializerDeserializer.getByte(argOut.getByteArray(), 1);
aInt8.setValue((val <= 0) ? (byte) (0 - val) : val);
serde.serialize(aInt8, out);
- } else if (argOut.getByteArray()[0] == serInt16TypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_INT16_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AINT16);
- short val = (short) AInt16SerializerDeserializer.getShort(argOut.getByteArray(), 1);
+ short val = AInt16SerializerDeserializer.getShort(argOut.getByteArray(), 1);
aInt16.setValue((val <= 0) ? (short) (0 - val) : val);
serde.serialize(aInt16, out);
- } else if (argOut.getByteArray()[0] == serInt32TypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_INT32_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AINT32);
- int val = (int) AInt32SerializerDeserializer.getInt(argOut.getByteArray(), 1);
+ int val = AInt32SerializerDeserializer.getInt(argOut.getByteArray(), 1);
aInt32.setValue((val <= 0) ? (0 - val) : val);
serde.serialize(aInt32, out);
- } else if (argOut.getByteArray()[0] == serInt64TypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_INT64_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AINT64);
- long val = (long) AInt64SerializerDeserializer.getLong(argOut.getByteArray(), 1);
+ long val = AInt64SerializerDeserializer.getLong(argOut.getByteArray(), 1);
aInt64.setValue((val <= 0L) ? (0L - val) : val);
serde.serialize(aInt64, out);
- } else if (argOut.getByteArray()[0] == serFloatTypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_FLOAT_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AFLOAT);
- float val = (float) AFloatSerializerDeserializer.getFloat(argOut.getByteArray(), 1);
+ float val = AFloatSerializerDeserializer.getFloat(argOut.getByteArray(), 1);
aFloat.setValue((val <= 0.0f) ? 0.0f - val : val);
serde.serialize(aFloat, out);
- } else if (argOut.getByteArray()[0] == serDoubleTypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_DOUBLE_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.ADOUBLE);
- double val = (double) ADoubleSerializerDeserializer.getDouble(argOut.getByteArray(), 1);
+ double val = ADoubleSerializerDeserializer.getDouble(argOut.getByteArray(), 1);
aDouble.setValue((val <= 0.0D) ? 0.0D - val : val);
serde.serialize(aDouble, out);
} else {
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericCeilingDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericCeilingDescriptor.java
index e77b287..ed4353a 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericCeilingDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericCeilingDescriptor.java
@@ -61,6 +61,7 @@
private static final long serialVersionUID = 1L;
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new NumericCeilingDescriptor();
}
@@ -80,18 +81,9 @@
public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
return new ICopyEvaluator() {
-
private DataOutput out = output.getDataOutput();
private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
private ICopyEvaluator eval = args[0].createEvaluator(argOut);
- private byte serNullTypeTag = ATypeTag.NULL.serialize();
- private byte serInt8TypeTag = ATypeTag.INT8.serialize();
- private byte serInt16TypeTag = ATypeTag.INT16.serialize();
- private byte serInt32TypeTag = ATypeTag.INT32.serialize();
- private byte serInt64TypeTag = ATypeTag.INT64.serialize();
- private byte serFloatTypeTag = ATypeTag.FLOAT.serialize();
- private byte serDoubleTypeTag = ATypeTag.DOUBLE.serialize();
-
private AMutableDouble aDouble = new AMutableDouble(0);
private AMutableFloat aFloat = new AMutableFloat(0);
private AMutableInt64 aInt64 = new AMutableInt64(0);
@@ -107,44 +99,44 @@
argOut.reset();
eval.evaluate(tuple);
try {
- if (argOut.getByteArray()[0] == serNullTypeTag) {
+ if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.ANULL);
serde.serialize(ANull.NULL, out);
- } else if (argOut.getByteArray()[0] == serInt8TypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_INT8_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AINT8);
- byte val = (byte) AInt8SerializerDeserializer.getByte(argOut.getByteArray(), 1);
+ byte val = AInt8SerializerDeserializer.getByte(argOut.getByteArray(), 1);
aInt8.setValue(val);
serde.serialize(aInt8, out);
- } else if (argOut.getByteArray()[0] == serInt16TypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_INT16_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AINT16);
- short val = (short) AInt16SerializerDeserializer.getShort(argOut.getByteArray(), 1);
+ short val = AInt16SerializerDeserializer.getShort(argOut.getByteArray(), 1);
aInt16.setValue(val);
serde.serialize(aInt16, out);
- } else if (argOut.getByteArray()[0] == serInt32TypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_INT32_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AINT32);
- int val = (int) AInt32SerializerDeserializer.getInt(argOut.getByteArray(), 1);
+ int val = AInt32SerializerDeserializer.getInt(argOut.getByteArray(), 1);
aInt32.setValue(val);
serde.serialize(aInt32, out);
- } else if (argOut.getByteArray()[0] == serInt64TypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_INT64_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AINT64);
- long val = (long) AInt64SerializerDeserializer.getLong(argOut.getByteArray(), 1);
+ long val = AInt64SerializerDeserializer.getLong(argOut.getByteArray(), 1);
aInt64.setValue(val);
serde.serialize(aInt64, out);
- } else if (argOut.getByteArray()[0] == serFloatTypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_FLOAT_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AFLOAT);
- float val = (float) AFloatSerializerDeserializer.getFloat(argOut.getByteArray(), 1);
+ float val = AFloatSerializerDeserializer.getFloat(argOut.getByteArray(), 1);
aFloat.setValue((float) Math.ceil(val));
serde.serialize(aFloat, out);
- } else if (argOut.getByteArray()[0] == serDoubleTypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_DOUBLE_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.ADOUBLE);
- double val = (double) ADoubleSerializerDeserializer.getDouble(argOut.getByteArray(), 1);
+ double val = ADoubleSerializerDeserializer.getDouble(argOut.getByteArray(), 1);
aDouble.setValue(Math.ceil(val));
serde.serialize(aDouble, out);
} else {
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericFloorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericFloorDescriptor.java
index 834f2c9..c9f1974 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericFloorDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericFloorDescriptor.java
@@ -60,6 +60,7 @@
public class NumericFloorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
private static final long serialVersionUID = 1L;
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new NumericFloorDescriptor();
}
@@ -83,14 +84,6 @@
private DataOutput out = output.getDataOutput();
private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
private ICopyEvaluator eval = args[0].createEvaluator(argOut);
- private byte serNullTypeTag = ATypeTag.NULL.serialize();
- private byte serInt8TypeTag = ATypeTag.INT8.serialize();
- private byte serInt16TypeTag = ATypeTag.INT16.serialize();
- private byte serInt32TypeTag = ATypeTag.INT32.serialize();
- private byte serInt64TypeTag = ATypeTag.INT64.serialize();
- private byte serFloatTypeTag = ATypeTag.FLOAT.serialize();
- private byte serDoubleTypeTag = ATypeTag.DOUBLE.serialize();
-
private AMutableDouble aDouble = new AMutableDouble(0);
private AMutableFloat aFloat = new AMutableFloat(0);
private AMutableInt64 aInt64 = new AMutableInt64(0);
@@ -106,44 +99,44 @@
argOut.reset();
eval.evaluate(tuple);
try {
- if (argOut.getByteArray()[0] == serNullTypeTag) {
+ if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.ANULL);
serde.serialize(ANull.NULL, out);
- } else if (argOut.getByteArray()[0] == serInt8TypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_INT8_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AINT8);
- byte val = (byte) AInt8SerializerDeserializer.getByte(argOut.getByteArray(), 1);
+ byte val = AInt8SerializerDeserializer.getByte(argOut.getByteArray(), 1);
aInt8.setValue(val);
serde.serialize(aInt8, out);
- } else if (argOut.getByteArray()[0] == serInt16TypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_INT16_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AINT16);
- short val = (short) AInt16SerializerDeserializer.getShort(argOut.getByteArray(), 1);
+ short val = AInt16SerializerDeserializer.getShort(argOut.getByteArray(), 1);
aInt16.setValue(val);
serde.serialize(aInt16, out);
- } else if (argOut.getByteArray()[0] == serInt32TypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_INT32_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AINT32);
- int val = (int) AInt32SerializerDeserializer.getInt(argOut.getByteArray(), 1);
+ int val = AInt32SerializerDeserializer.getInt(argOut.getByteArray(), 1);
aInt32.setValue(val);
serde.serialize(aInt32, out);
- } else if (argOut.getByteArray()[0] == serInt64TypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_INT64_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AINT64);
- long val = (long) AInt64SerializerDeserializer.getLong(argOut.getByteArray(), 1);
+ long val = AInt64SerializerDeserializer.getLong(argOut.getByteArray(), 1);
aInt64.setValue(val);
serde.serialize(aInt64, out);
- } else if (argOut.getByteArray()[0] == serFloatTypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_FLOAT_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AFLOAT);
- float val = (float) AFloatSerializerDeserializer.getFloat(argOut.getByteArray(), 1);
+ float val = AFloatSerializerDeserializer.getFloat(argOut.getByteArray(), 1);
aFloat.setValue((float) Math.floor(val));
serde.serialize(aFloat, out);
- } else if (argOut.getByteArray()[0] == serDoubleTypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_DOUBLE_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.ADOUBLE);
- double val = (double) ADoubleSerializerDeserializer.getDouble(argOut.getByteArray(), 1);
+ double val = ADoubleSerializerDeserializer.getDouble(argOut.getByteArray(), 1);
aDouble.setValue(Math.floor(val));
serde.serialize(aDouble, out);
} else {
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericRoundDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericRoundDescriptor.java
index b78c13d..23bfad8 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericRoundDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericRoundDescriptor.java
@@ -60,6 +60,7 @@
public class NumericRoundDescriptor extends AbstractScalarFunctionDynamicDescriptor {
private static final long serialVersionUID = 1L;
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new NumericRoundDescriptor();
}
@@ -83,14 +84,6 @@
private DataOutput out = output.getDataOutput();
private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
private ICopyEvaluator eval = args[0].createEvaluator(argOut);
- private byte serNullTypeTag = ATypeTag.NULL.serialize();
- private byte serInt8TypeTag = ATypeTag.INT8.serialize();
- private byte serInt16TypeTag = ATypeTag.INT16.serialize();
- private byte serInt32TypeTag = ATypeTag.INT32.serialize();
- private byte serInt64TypeTag = ATypeTag.INT64.serialize();
- private byte serFloatTypeTag = ATypeTag.FLOAT.serialize();
- private byte serDoubleTypeTag = ATypeTag.DOUBLE.serialize();
-
private AMutableDouble aDouble = new AMutableDouble(0);
private AMutableFloat aFloat = new AMutableFloat(0);
private AMutableInt64 aInt64 = new AMutableInt64(0);
@@ -106,45 +99,45 @@
argOut.reset();
eval.evaluate(tuple);
try {
- if (argOut.getByteArray()[0] == serNullTypeTag) {
+ if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.ANULL);
serde.serialize(ANull.NULL, out);
- } else if (argOut.getByteArray()[0] == serInt8TypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_INT8_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AINT8);
- byte val = (byte) AInt8SerializerDeserializer.getByte(argOut.getByteArray(), 1);
+ byte val = AInt8SerializerDeserializer.getByte(argOut.getByteArray(), 1);
aInt8.setValue(val);
serde.serialize(aInt8, out);
- } else if (argOut.getByteArray()[0] == serInt16TypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_INT16_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AINT16);
- short val = (short) AInt16SerializerDeserializer.getShort(argOut.getByteArray(), 1);
+ short val = AInt16SerializerDeserializer.getShort(argOut.getByteArray(), 1);
aInt16.setValue(val);
serde.serialize(aInt16, out);
- } else if (argOut.getByteArray()[0] == serInt32TypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_INT32_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AINT32);
- int val = (int) AInt32SerializerDeserializer.getInt(argOut.getByteArray(), 1);
+ int val = AInt32SerializerDeserializer.getInt(argOut.getByteArray(), 1);
aInt32.setValue(val);
serde.serialize(aInt32, out);
- } else if (argOut.getByteArray()[0] == serInt64TypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_INT64_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AINT64);
- long val = (long) AInt64SerializerDeserializer.getLong(argOut.getByteArray(), 1);
+ long val = AInt64SerializerDeserializer.getLong(argOut.getByteArray(), 1);
aInt64.setValue(val);
serde.serialize(aInt64, out);
- } else if (argOut.getByteArray()[0] == serFloatTypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_FLOAT_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AFLOAT);
- float val = (float) AFloatSerializerDeserializer.getFloat(argOut.getByteArray(), 1);
+ float val = AFloatSerializerDeserializer.getFloat(argOut.getByteArray(), 1);
val = Math.round(val);
aFloat.setValue(val);
serde.serialize(aFloat, out);
- } else if (argOut.getByteArray()[0] == serDoubleTypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_DOUBLE_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.ADOUBLE);
- double val = (double) ADoubleSerializerDeserializer.getDouble(argOut.getByteArray(), 1);
+ double val = ADoubleSerializerDeserializer.getDouble(argOut.getByteArray(), 1);
val = Math.round(val);
aDouble.setValue(val);
serde.serialize(aDouble, out);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericRoundHalfToEven2Descriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericRoundHalfToEven2Descriptor.java
index e448252..cdf9115 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericRoundHalfToEven2Descriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericRoundHalfToEven2Descriptor.java
@@ -62,6 +62,7 @@
private static final long serialVersionUID = 1L;
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new NumericRoundHalfToEven2Descriptor();
}
@@ -86,15 +87,6 @@
private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
private ICopyEvaluator eval = args[0].createEvaluator(argOut);
private ICopyEvaluator precision = args[1].createEvaluator(argOut);
-
- private byte serNullTypeTag = ATypeTag.NULL.serialize();
- private byte serInt8TypeTag = ATypeTag.INT8.serialize();
- private byte serInt16TypeTag = ATypeTag.INT16.serialize();
- private byte serInt32TypeTag = ATypeTag.INT32.serialize();
- private byte serInt64TypeTag = ATypeTag.INT64.serialize();
- private byte serFloatTypeTag = ATypeTag.FLOAT.serialize();
- private byte serDoubleTypeTag = ATypeTag.DOUBLE.serialize();
-
private AMutableDouble aDouble = new AMutableDouble(0);
private AMutableFloat aFloat = new AMutableFloat(0);
private AMutableInt64 aInt64 = new AMutableInt64(0);
@@ -108,13 +100,13 @@
argOut.reset();
precision.evaluate(tuple);
- if (argOut.getByteArray()[0] == serInt8TypeTag) {
- return (int) AInt8SerializerDeserializer.getByte(argOut.getByteArray(), 1);
- } else if (argOut.getByteArray()[0] == serInt16TypeTag) {
- return (int) AInt16SerializerDeserializer.getShort(argOut.getByteArray(), 1);
- } else if (argOut.getByteArray()[0] == serInt32TypeTag) {
- return (int) AInt32SerializerDeserializer.getInt(argOut.getByteArray(), 1);
- } else if (argOut.getByteArray()[0] == serInt64TypeTag) {
+ if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_INT8_TYPE_TAG) {
+ return AInt8SerializerDeserializer.getByte(argOut.getByteArray(), 1);
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_INT16_TYPE_TAG) {
+ return AInt16SerializerDeserializer.getShort(argOut.getByteArray(), 1);
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_INT32_TYPE_TAG) {
+ return AInt32SerializerDeserializer.getInt(argOut.getByteArray(), 1);
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_INT64_TYPE_TAG) {
return (int) AInt64SerializerDeserializer.getLong(argOut.getByteArray(), 1);
} else {
throw new AlgebricksException(AsterixBuiltinFunctions.NUMERIC_ROUND_HALF_TO_EVEN2.getName()
@@ -129,66 +121,64 @@
eval.evaluate(tuple);
try {
- if (argOut.getByteArray()[0] == serNullTypeTag) {
+ if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.ANULL);
serde.serialize(ANull.NULL, out);
- } else if (argOut.getByteArray()[0] == serInt8TypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_INT8_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AINT8);
- byte val = (byte) AInt8SerializerDeserializer.getByte(argOut.getByteArray(), 1);
+ byte val = AInt8SerializerDeserializer.getByte(argOut.getByteArray(), 1);
aInt8.setValue(val);
serde.serialize(aInt8, out);
- } else if (argOut.getByteArray()[0] == serInt16TypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_INT16_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AINT16);
- short val = (short) AInt16SerializerDeserializer.getShort(argOut.getByteArray(), 1);
+ short val = AInt16SerializerDeserializer.getShort(argOut.getByteArray(), 1);
aInt16.setValue(val);
serde.serialize(aInt16, out);
- } else if (argOut.getByteArray()[0] == serInt32TypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_INT32_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AINT32);
- int val = (int) AInt32SerializerDeserializer.getInt(argOut.getByteArray(), 1);
+ int val = AInt32SerializerDeserializer.getInt(argOut.getByteArray(), 1);
aInt32.setValue(val);
serde.serialize(aInt32, out);
- } else if (argOut.getByteArray()[0] == serInt64TypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_INT64_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AINT64);
- long val = (long) AInt64SerializerDeserializer.getLong(argOut.getByteArray(), 1);
+ long val = AInt64SerializerDeserializer.getLong(argOut.getByteArray(), 1);
aInt64.setValue(val);
serde.serialize(aInt64, out);
- } else if (argOut.getByteArray()[0] == serFloatTypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_FLOAT_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AFLOAT);
- float val = (float) AFloatSerializerDeserializer.getFloat(argOut.getByteArray(), 1);
+ float val = AFloatSerializerDeserializer.getFloat(argOut.getByteArray(), 1);
if (Float.isNaN(val) || Float.isInfinite(val) || val == -0.0F || val == 0.0F) {
aFloat.setValue(val);
serde.serialize(aFloat, out);
} else {
BigDecimal r = new BigDecimal(Float.toString(val));
- aFloat.setValue(r.setScale(getPrecision(tuple), BigDecimal.ROUND_HALF_EVEN)
- .floatValue());
+ aFloat.setValue(
+ r.setScale(getPrecision(tuple), BigDecimal.ROUND_HALF_EVEN).floatValue());
serde.serialize(aFloat, out);
}
- } else if (argOut.getByteArray()[0] == serDoubleTypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_DOUBLE_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.ADOUBLE);
- double val = (double) ADoubleSerializerDeserializer.getDouble(argOut.getByteArray(), 1);
+ double val = ADoubleSerializerDeserializer.getDouble(argOut.getByteArray(), 1);
if (Double.isNaN(val) || Double.isInfinite(val) || val == -0.0D || val == 0.0D) {
aDouble.setValue(val);
serde.serialize(aDouble, out);
} else {
BigDecimal r = new BigDecimal(Double.toString(val));
- aDouble.setValue(r.setScale(getPrecision(tuple), BigDecimal.ROUND_HALF_EVEN)
- .doubleValue());
+ aDouble.setValue(
+ r.setScale(getPrecision(tuple), BigDecimal.ROUND_HALF_EVEN).doubleValue());
serde.serialize(aDouble, out);
}
} else {
- throw new NotImplementedException(
- AsterixBuiltinFunctions.NUMERIC_ROUND_HALF_TO_EVEN2.getName()
- + ": not implemented for "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut
- .getByteArray()[0]));
+ throw new NotImplementedException(AsterixBuiltinFunctions.NUMERIC_ROUND_HALF_TO_EVEN2
+ .getName() + ": not implemented for "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut.getByteArray()[0]));
}
} catch (HyracksDataException e) {
throw new AlgebricksException(e);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericRoundHalfToEvenDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericRoundHalfToEvenDescriptor.java
index 06e36c8..edae3fe 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericRoundHalfToEvenDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericRoundHalfToEvenDescriptor.java
@@ -60,6 +60,7 @@
public class NumericRoundHalfToEvenDescriptor extends AbstractScalarFunctionDynamicDescriptor {
private static final long serialVersionUID = 1L;
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new NumericRoundHalfToEvenDescriptor();
}
@@ -83,14 +84,6 @@
private DataOutput out = output.getDataOutput();
private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
private ICopyEvaluator eval = args[0].createEvaluator(argOut);
- private byte serNullTypeTag = ATypeTag.NULL.serialize();
- private byte serInt8TypeTag = ATypeTag.INT8.serialize();
- private byte serInt16TypeTag = ATypeTag.INT16.serialize();
- private byte serInt32TypeTag = ATypeTag.INT32.serialize();
- private byte serInt64TypeTag = ATypeTag.INT64.serialize();
- private byte serFloatTypeTag = ATypeTag.FLOAT.serialize();
- private byte serDoubleTypeTag = ATypeTag.DOUBLE.serialize();
-
private AMutableDouble aDouble = new AMutableDouble(0);
private AMutableFloat aFloat = new AMutableFloat(0);
private AMutableInt64 aInt64 = new AMutableInt64(0);
@@ -106,53 +99,51 @@
argOut.reset();
eval.evaluate(tuple);
try {
- if (argOut.getByteArray()[0] == serNullTypeTag) {
+ if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.ANULL);
serde.serialize(ANull.NULL, out);
return;
- } else if (argOut.getByteArray()[0] == serInt8TypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_INT8_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AINT8);
- byte val = (byte) AInt8SerializerDeserializer.getByte(argOut.getByteArray(), 1);
+ byte val = AInt8SerializerDeserializer.getByte(argOut.getByteArray(), 1);
aInt8.setValue(val);
serde.serialize(aInt8, out);
- } else if (argOut.getByteArray()[0] == serInt16TypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_INT16_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AINT16);
- short val = (short) AInt16SerializerDeserializer.getShort(argOut.getByteArray(), 1);
+ short val = AInt16SerializerDeserializer.getShort(argOut.getByteArray(), 1);
aInt16.setValue(val);
serde.serialize(aInt16, out);
- } else if (argOut.getByteArray()[0] == serInt32TypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_INT32_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AINT32);
- int val = (int) AInt32SerializerDeserializer.getInt(argOut.getByteArray(), 1);
+ int val = AInt32SerializerDeserializer.getInt(argOut.getByteArray(), 1);
aInt32.setValue(val);
serde.serialize(aInt32, out);
- } else if (argOut.getByteArray()[0] == serInt64TypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_INT64_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AINT64);
- long val = (long) AInt64SerializerDeserializer.getLong(argOut.getByteArray(), 1);
+ long val = AInt64SerializerDeserializer.getLong(argOut.getByteArray(), 1);
aInt64.setValue(val);
serde.serialize(aInt64, out);
- } else if (argOut.getByteArray()[0] == serFloatTypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_FLOAT_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AFLOAT);
- float val = (float) AFloatSerializerDeserializer.getFloat(argOut.getByteArray(), 1);
+ float val = AFloatSerializerDeserializer.getFloat(argOut.getByteArray(), 1);
aFloat.setValue((float) Math.rint(val));
serde.serialize(aFloat, out);
- } else if (argOut.getByteArray()[0] == serDoubleTypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_DOUBLE_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.ADOUBLE);
- double val = (double) ADoubleSerializerDeserializer.getDouble(argOut.getByteArray(), 1);
+ double val = ADoubleSerializerDeserializer.getDouble(argOut.getByteArray(), 1);
aDouble.setValue(Math.rint(val));
serde.serialize(aDouble, out);
} else {
- throw new NotImplementedException(
- AsterixBuiltinFunctions.NUMERIC_ROUND_HALF_TO_EVEN.getName()
- + ": not implemented for "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut
- .getByteArray()[0]));
+ throw new NotImplementedException(AsterixBuiltinFunctions.NUMERIC_ROUND_HALF_TO_EVEN
+ .getName() + ": not implemented for "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut.getByteArray()[0]));
}
} catch (HyracksDataException e) {
throw new AlgebricksException(e);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericUnaryMinusDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericUnaryMinusDescriptor.java
index ff1e5b4..ba60873 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericUnaryMinusDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericUnaryMinusDescriptor.java
@@ -56,6 +56,7 @@
private static final long serialVersionUID = 1L;
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new NumericUnaryMinusDescriptor();
}
@@ -74,13 +75,6 @@
private DataOutput out = output.getDataOutput();
private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
private ICopyEvaluator eval = args[0].createEvaluator(argOut);
- private byte serNullTypeTag = ATypeTag.NULL.serialize();
- private byte serInt8TypeTag = ATypeTag.INT8.serialize();
- private byte serInt16TypeTag = ATypeTag.INT16.serialize();
- private byte serInt32TypeTag = ATypeTag.INT32.serialize();
- private byte serInt64TypeTag = ATypeTag.INT64.serialize();
- private byte serFloatTypeTag = ATypeTag.FLOAT.serialize();
- private byte serDoubleTypeTag = ATypeTag.DOUBLE.serialize();
private AMutableDouble aDouble = new AMutableDouble(0);
private AMutableFloat aFloat = new AMutableFloat(0);
private AMutableInt64 aInt64 = new AMutableInt64(0);
@@ -96,37 +90,38 @@
argOut.reset();
eval.evaluate(tuple);
try {
- if (argOut.getByteArray()[0] == serNullTypeTag) {
+ if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.ANULL);
serde.serialize(ANull.NULL, out);
return;
- } else if (argOut.getByteArray()[0] == serInt8TypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_INT8_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AINT8);
aInt8.setValue((byte) -AInt8SerializerDeserializer.getByte(argOut.getByteArray(), 1));
serde.serialize(aInt8, out);
- } else if (argOut.getByteArray()[0] == serInt16TypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_INT16_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AINT16);
- aInt16.setValue((short) -AInt16SerializerDeserializer.getShort(argOut.getByteArray(), 1));
+ aInt16.setValue(
+ (short) -AInt16SerializerDeserializer.getShort(argOut.getByteArray(), 1));
serde.serialize(aInt16, out);
- } else if (argOut.getByteArray()[0] == serInt32TypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_INT32_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AINT32);
aInt32.setValue(-AInt32SerializerDeserializer.getInt(argOut.getByteArray(), 1));
serde.serialize(aInt32, out);
- } else if (argOut.getByteArray()[0] == serInt64TypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_INT64_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AINT64);
aInt64.setValue(-AInt64SerializerDeserializer.getLong(argOut.getByteArray(), 1));
serde.serialize(aInt64, out);
- } else if (argOut.getByteArray()[0] == serFloatTypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_FLOAT_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.AFLOAT);
aFloat.setValue(-AFloatSerializerDeserializer.getFloat(argOut.getByteArray(), 1));
serde.serialize(aFloat, out);
- } else if (argOut.getByteArray()[0] == serDoubleTypeTag) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_DOUBLE_TYPE_TAG) {
serde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.ADOUBLE);
aDouble.setValue(-ADoubleSerializerDeserializer.getDouble(argOut.getByteArray(), 1));
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/OrDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/OrDescriptor.java
index 73adb7b..f6ea1f2 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/OrDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/OrDescriptor.java
@@ -39,10 +39,9 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class OrDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new OrDescriptor();
}
@@ -85,7 +84,7 @@
for (int i = 0; i < n; i++) {
argOut.reset();
evals[i].evaluate(tuple);
- if (argOut.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
metNull = true;
continue;
}
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/PrefixLenDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/PrefixLenDescriptor.java
index 29603da..af8ab19 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/PrefixLenDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/PrefixLenDescriptor.java
@@ -47,15 +47,10 @@
public class PrefixLenDescriptor extends AbstractScalarFunctionDynamicDescriptor {
private static final long serialVersionUID = 1L;
-
- // allowed input types
- private final static byte SER_INT32_TYPE_TAG = ATypeTag.INT32.serialize();
- private final static byte SER_DOUBLE_TYPE_TAG = ATypeTag.DOUBLE.serialize();
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
-
private final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "prefix-len@3",
3);
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new PrefixLenDescriptor();
}
@@ -91,7 +86,7 @@
// length
inputVal.reset();
evalLen.evaluate(tuple);
- if (inputVal.getByteArray()[0] != SER_INT32_TYPE_TAG) {
+ if (inputVal.getByteArray()[0] != ATypeTag.SERIALIZED_INT32_TYPE_TAG) {
throw new AlgebricksException(FID.getName()
+ ": expects type Int32 for the first argument, but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(inputVal.getByteArray()[0]));
@@ -101,18 +96,18 @@
// similarity threshold
inputVal.reset();
evalThreshold.evaluate(tuple);
- if (inputVal.getByteArray()[0] != SER_DOUBLE_TYPE_TAG) {
+ if (inputVal.getByteArray()[0] != ATypeTag.SERIALIZED_DOUBLE_TYPE_TAG) {
throw new AlgebricksException(FID.getName()
+ ": expects type DOUBLE for the second argument, but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(inputVal.getByteArray()[0]));
}
- float similarityThreshold = (float) ADoubleSerializerDeserializer.getDouble(
- inputVal.getByteArray(), 1);
+ float similarityThreshold = (float) ADoubleSerializerDeserializer
+ .getDouble(inputVal.getByteArray(), 1);
// similarity name
inputVal.reset();
evalSimilarity.evaluate(tuple);
- if (inputVal.getByteArray()[0] != SER_STRING_TYPE_TAG) {
+ if (inputVal.getByteArray()[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
throw new AlgebricksException(FID.getName()
+ ": expects type STRING for the third argument, but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(inputVal.getByteArray()[0]));
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/RegExpDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/RegExpDescriptor.java
index 1271465..82071e3 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/RegExpDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/RegExpDescriptor.java
@@ -56,14 +56,9 @@
*/
public class RegExpDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
-
- // allowed input types
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new RegExpDescriptor();
}
@@ -114,11 +109,11 @@
try {
array0.reset();
evalPattern.evaluate(tuple);
- if (array0.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (array0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, dout);
return;
}
- if (array0.getByteArray()[0] != SER_STRING_TYPE_TAG) {
+ if (array0.getByteArray()[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
throw new AlgebricksException(AsterixBuiltinFunctions.REG_EXP.getName()
+ ": expects type STRING/NULL for the first input argument but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(array0.getByteArray()[0]));
@@ -138,19 +133,19 @@
lastPattern.reset();
lastPattern.write(array0.getByteArray(), array0.getStartOffset(), array0.getLength());
// ! object creation !
- DataInputStream di = new DataInputStream(new ByteArrayInputStream(
- lastPattern.getByteArray()));
- AString strPattern = (AString) stringSerde.deserialize(di);
+ DataInputStream di = new DataInputStream(
+ new ByteArrayInputStream(lastPattern.getByteArray()));
+ AString strPattern = stringSerde.deserialize(di);
pattern = Pattern.compile(strPattern.getStringValue());
}
array0.reset();
evalString.evaluate(tuple);
- if (array0.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (array0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, dout);
return;
}
- if (array0.getByteArray()[0] != SER_STRING_TYPE_TAG) {
+ if (array0.getByteArray()[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
throw new AlgebricksException(AsterixBuiltinFunctions.REG_EXP.getName()
+ ": expects type STRING/NULL for the second input argument but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(array0.getByteArray()[0]));
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SimilarityDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SimilarityDescriptor.java
index 1962a93..e66fceb 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SimilarityDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SimilarityDescriptor.java
@@ -55,14 +55,8 @@
private static final long serialVersionUID = 1L;
private final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "similarity@7",
7);
-
- private final static byte SER_DOUBLE_TYPE_TAG = ATypeTag.DOUBLE.serialize();
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_INT32_TYPE_TAG = ATypeTag.INT32.serialize();
- private final static byte SER_ORDEREDLIST_TYPE_TAG = ATypeTag.ORDEREDLIST.serialize();
- private final static byte SER_UNORDEREDLIST_TYPE_TAG = ATypeTag.UNORDEREDLIST.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new SimilarityDescriptor();
}
@@ -106,18 +100,18 @@
// similarity threshold
inputVal.reset();
evalThreshold.evaluate(tuple);
- if (inputVal.getByteArray()[0] != SER_DOUBLE_TYPE_TAG) {
+ if (inputVal.getByteArray()[0] != ATypeTag.SERIALIZED_DOUBLE_TYPE_TAG) {
throw new AlgebricksException(FID.getName()
+ ": expects type DOUBLE for the first argument but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(inputVal.getByteArray()[0]));
}
- float similarityThreshold = (float) ADoubleSerializerDeserializer.getDouble(
- inputVal.getByteArray(), 1);
+ float similarityThreshold = (float) ADoubleSerializerDeserializer
+ .getDouble(inputVal.getByteArray(), 1);
// similarity name
inputVal.reset();
evalSimilarity.evaluate(tuple);
- if (inputVal.getByteArray()[0] != SER_STRING_TYPE_TAG) {
+ if (inputVal.getByteArray()[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
throw new AlgebricksException(FID.getName()
+ ": expects type STRING for the second argument but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(inputVal.getByteArray()[0]));
@@ -127,7 +121,7 @@
inputVal.reset();
evalLen1.evaluate(tuple);
- if (inputVal.getByteArray()[0] != SER_INT32_TYPE_TAG) {
+ if (inputVal.getByteArray()[0] != ATypeTag.SERIALIZED_INT32_TYPE_TAG) {
throw new AlgebricksException(FID.getName()
+ ": expects type INT32 for the thrid argument but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(inputVal.getByteArray()[0]));
@@ -136,7 +130,7 @@
inputVal.reset();
evalLen2.evaluate(tuple);
- if (inputVal.getByteArray()[0] != SER_INT32_TYPE_TAG) {
+ if (inputVal.getByteArray()[0] != ATypeTag.SERIALIZED_INT32_TYPE_TAG) {
throw new AlgebricksException(FID.getName()
+ ": expects type INT32 for the fourth argument but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(inputVal.getByteArray()[0]));
@@ -157,15 +151,16 @@
evalTokens1.evaluate(tuple);
byte[] serList = inputVal.getByteArray();
- if (serList[0] != SER_ORDEREDLIST_TYPE_TAG && serList[0] != SER_UNORDEREDLIST_TYPE_TAG) {
+ if (serList[0] != ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG
+ && serList[0] != ATypeTag.SERIALIZED_UNORDEREDLIST_TYPE_TAG) {
throw new AlgebricksException(FID.getName() + ": not defined for values of type"
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(serList[0]));
}
int lengthTokens1;
- if (serList[0] == SER_ORDEREDLIST_TYPE_TAG) {
- lengthTokens1 = AOrderedListSerializerDeserializer.getNumberOfItems(inputVal
- .getByteArray());
+ if (serList[0] == ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG) {
+ lengthTokens1 = AOrderedListSerializerDeserializer
+ .getNumberOfItems(inputVal.getByteArray());
// read tokens
for (i = 0; i < lengthTokens1; i++) {
int itemOffset;
@@ -177,8 +172,8 @@
tokens1.add(IntegerPointable.getInteger(serList, itemOffset));
}
} else {
- lengthTokens1 = AUnorderedListSerializerDeserializer.getNumberOfItems(inputVal
- .getByteArray());
+ lengthTokens1 = AUnorderedListSerializerDeserializer
+ .getNumberOfItems(inputVal.getByteArray());
// read tokens
for (i = 0; i < lengthTokens1; i++) {
int itemOffset;
@@ -201,15 +196,16 @@
evalTokens2.evaluate(tuple);
serList = inputVal.getByteArray();
- if (serList[0] != SER_ORDEREDLIST_TYPE_TAG && serList[0] != SER_UNORDEREDLIST_TYPE_TAG) {
+ if (serList[0] != ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG
+ && serList[0] != ATypeTag.SERIALIZED_UNORDEREDLIST_TYPE_TAG) {
throw new AlgebricksException(FID.getName() + ": not defined for values of type"
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(serList[0]));
}
int lengthTokens2;
- if (serList[0] == SER_ORDEREDLIST_TYPE_TAG) {
- lengthTokens2 = AOrderedListSerializerDeserializer.getNumberOfItems(inputVal
- .getByteArray());
+ if (serList[0] == ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG) {
+ lengthTokens2 = AOrderedListSerializerDeserializer
+ .getNumberOfItems(inputVal.getByteArray());
// read tokens
for (i = 0; i < lengthTokens2; i++) {
int itemOffset;
@@ -221,8 +217,8 @@
tokens2.add(IntegerPointable.getInteger(serList, itemOffset));
}
} else {
- lengthTokens2 = AUnorderedListSerializerDeserializer.getNumberOfItems(inputVal
- .getByteArray());
+ lengthTokens2 = AUnorderedListSerializerDeserializer
+ .getNumberOfItems(inputVal.getByteArray());
// read tokens
for (i = 0; i < lengthTokens2; i++) {
int itemOffset;
@@ -249,8 +245,8 @@
//
SimilarityMetric.getPartialIntersectSize(tokens1.get(), 0, tokens1.length(), tokens2.get(),
0, tokens2.length(), tokenPrefix, parInter);
- if (similarityFilters.passPositionFilter(parInter.intersectSize, parInter.posXStop,
- length1, parInter.posYStop, length2)) {
+ if (similarityFilters.passPositionFilter(parInter.intersectSize, parInter.posXStop, length1,
+ parInter.posYStop, length2)) {
//
// -- - suffix filter - --
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SpatialAreaDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SpatialAreaDescriptor.java
index 12e2cb0..a3ad867 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SpatialAreaDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SpatialAreaDescriptor.java
@@ -51,6 +51,7 @@
private static final long serialVersionUID = 1L;
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new SpatialAreaDescriptor();
}
@@ -92,14 +93,14 @@
+ ": polygon must have at least 3 points");
}
area = Math.abs(SpatialUtils.polygonArea(argOut.getByteArray(), numOfPoints));
- out.writeByte(ATypeTag.DOUBLE.serialize());
+ out.writeByte(ATypeTag.SERIALIZED_DOUBLE_TYPE_TAG);
out.writeDouble(area);
break;
case CIRCLE:
double radius = ADoubleSerializerDeserializer.getDouble(argOut.getByteArray(),
ACircleSerializerDeserializer.getRadiusOffset());
area = SpatialUtils.pi() * radius * radius;
- out.writeByte(ATypeTag.DOUBLE.serialize());
+ out.writeByte(ATypeTag.SERIALIZED_DOUBLE_TYPE_TAG);
out.writeDouble(area);
break;
case RECTANGLE:
@@ -117,7 +118,7 @@
ARectangleSerializerDeserializer
.getUpperRightCoordinateOffset(Coordinate.Y));
area = (x2 - x1) * (y2 - y1);
- out.writeByte(ATypeTag.DOUBLE.serialize());
+ out.writeByte(ATypeTag.SERIALIZED_DOUBLE_TYPE_TAG);
out.writeDouble(area);
break;
case NULL:
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SpatialDistanceDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SpatialDistanceDescriptor.java
index ad86c64..70adde4 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SpatialDistanceDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SpatialDistanceDescriptor.java
@@ -48,6 +48,7 @@
private static final long serialVersionUID = 1L;
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new SpatialDistanceDescriptor();
}
@@ -97,10 +98,9 @@
APointSerializerDeserializer.getCoordinateOffset(Coordinate.Y));
distance = Math.sqrt(Math.pow(x2 - x1, 2) + Math.pow(y2 - y1, 2));
} else {
- throw new NotImplementedException(
- AsterixBuiltinFunctions.SPATIAL_DISTANCE.getName()
- + ": does not support the type: " + tag1
- + "; it is only implemented for POINT.");
+ throw new NotImplementedException(AsterixBuiltinFunctions.SPATIAL_DISTANCE.getName()
+ + ": does not support the type: " + tag1
+ + "; it is only implemented for POINT.");
}
} else if (tag0 == ATypeTag.NULL || tag1 == ATypeTag.NULL) {
nullSerde.serialize(ANull.NULL, out);
@@ -109,7 +109,7 @@
+ ": does not support the type: " + tag1
+ "; it is only implemented for POINT.");
}
- out.writeByte(ATypeTag.DOUBLE.serialize());
+ out.writeByte(ATypeTag.SERIALIZED_DOUBLE_TYPE_TAG);
out.writeDouble(distance);
} catch (HyracksDataException hde) {
throw new AlgebricksException(hde);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringConcatDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringConcatDescriptor.java
index b3feba7..52734b2 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringConcatDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringConcatDescriptor.java
@@ -45,11 +45,8 @@
public class StringConcatDescriptor extends AbstractScalarFunctionDynamicDescriptor {
private static final long serialVersionUID = 1L;
-
- private static final byte SER_ORDEREDLIST_TYPE_TAG = ATypeTag.ORDEREDLIST.serialize();
- private static final byte SER_UNORDEREDLIST_TYPE_TAG = ATypeTag.UNORDEREDLIST.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new StringConcatDescriptor();
}
@@ -81,8 +78,8 @@
outInputList.reset();
evalList.evaluate(tuple);
byte[] listBytes = outInputList.getByteArray();
- if (listBytes[0] != SER_ORDEREDLIST_TYPE_TAG
- && listBytes[0] != SER_UNORDEREDLIST_TYPE_TAG) {
+ if (listBytes[0] != ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG
+ && listBytes[0] != ATypeTag.SERIALIZED_UNORDEREDLIST_TYPE_TAG) {
throw new AlgebricksException(AsterixBuiltinFunctions.STRING_CONCAT.getName()
+ ": expects input type ORDEREDLIST/UNORDEREDLIST, but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(listBytes[0]));
@@ -113,7 +110,7 @@
}
utf8Len += UTF8StringUtil.getUTFLength(listBytes, itemOffset);
}
- out.writeByte(ATypeTag.STRING.serialize());
+ out.writeByte(ATypeTag.SERIALIZED_STRING_TYPE_TAG);
int cbytes = UTF8StringUtil.encodeUTF8Length(utf8Len, tempLengthArray, 0);
out.write(tempLengthArray, 0, cbytes);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringJoinDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringJoinDescriptor.java
index 650beb0..866d6c4 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringJoinDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringJoinDescriptor.java
@@ -42,13 +42,11 @@
private static final long serialVersionUID = 1L;
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new StringJoinDescriptor();
}
};
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_ORDEREDLIST_TYPE_TAG = ATypeTag.ORDEREDLIST.serialize();
- private final byte stringTypeTag = ATypeTag.STRING.serialize();
@Override
public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) {
@@ -79,14 +77,14 @@
outInputSep.reset();
evalSep.evaluate(tuple);
byte[] serSep = outInputSep.getByteArray();
- if (serOrderedList[0] != SER_ORDEREDLIST_TYPE_TAG
- && serOrderedList[1] != SER_STRING_TYPE_TAG) {
+ if (serOrderedList[0] != ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG
+ && serOrderedList[1] != ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
throw new AlgebricksException(AsterixBuiltinFunctions.STRING_JOIN.getName()
+ ": expects input type ORDEREDLIST but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(serOrderedList[0]));
}
- if (serSep[0] != SER_STRING_TYPE_TAG) {
+ if (serSep[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
throw new AlgebricksException(AsterixBuiltinFunctions.STRING_JOIN.getName()
+ ": expects STRING type for the seperator but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(serSep[0]));
@@ -100,8 +98,8 @@
int sep_meta_len = UTF8StringUtil.getNumBytesToStoreLength(sep_len);
for (int i = 0; i < size; i++) {
- int itemOffset = AOrderedListSerializerDeserializer
- .getItemOffset(serOrderedList, i);
+ int itemOffset = AOrderedListSerializerDeserializer.getItemOffset(serOrderedList,
+ i);
int currentSize = UTF8StringUtil.getUTFLength(serOrderedList, itemOffset);
if (i != size - 1 && currentSize != 0) {
@@ -109,12 +107,12 @@
}
utf_8_len += currentSize;
}
- out.writeByte(stringTypeTag);
+ out.writeByte(ATypeTag.SERIALIZED_STRING_TYPE_TAG);
int length = UTF8StringUtil.encodeUTF8Length(utf_8_len, tempLengthArray, 0);
out.write(tempLengthArray, 0, length);
for (int i = 0; i < size; i++) {
- int itemOffset = AOrderedListSerializerDeserializer
- .getItemOffset(serOrderedList, i);
+ int itemOffset = AOrderedListSerializerDeserializer.getItemOffset(serOrderedList,
+ i);
utf_8_len = UTF8StringUtil.getUTFLength(serOrderedList, itemOffset);
out.write(serOrderedList,
itemOffset + UTF8StringUtil.getNumBytesToStoreLength(utf_8_len), utf_8_len);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringLengthDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringLengthDescriptor.java
index 47ecb5b..5a6c1a0 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringLengthDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringLengthDescriptor.java
@@ -43,7 +43,6 @@
import org.apache.hyracks.util.string.UTF8StringUtil;
public class StringLengthDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -51,8 +50,6 @@
return new StringLengthDescriptor();
}
};
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
@Override
public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) {
@@ -80,11 +77,11 @@
outInput.reset();
eval.evaluate(tuple);
byte[] serString = outInput.getByteArray();
- if (serString[0] == SER_STRING_TYPE_TAG) {
+ if (serString[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
int len = UTF8StringUtil.getUTFLength(outInput.getByteArray(), 1);
result.setValue(len);
int64Serde.serialize(result, out);
- } else if (serString[0] == SER_NULL_TYPE_TAG)
+ } else if (serString[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG)
nullSerde.serialize(ANull.NULL, out);
else {
throw new AlgebricksException(AsterixBuiltinFunctions.STRING_LENGTH.getName()
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringLowerCaseDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringLowerCaseDescriptor.java
index 66212f7..7ae1e89 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringLowerCaseDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringLowerCaseDescriptor.java
@@ -46,12 +46,11 @@
private static final long serialVersionUID = 1L;
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new StringLowerCaseDescriptor();
}
};
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
@Override
public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) {
@@ -70,8 +69,6 @@
private final UTF8StringBuilder builder = new UTF8StringBuilder();
private final UTF8StringPointable string = new UTF8StringPointable();
- private final byte stt = ATypeTag.STRING.serialize();
-
@SuppressWarnings("unchecked")
private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.ANULL);
@@ -84,14 +81,14 @@
eval.evaluate(tuple);
byte[] serString = outInput.getByteArray();
- if (serString[0] == SER_STRING_TYPE_TAG) {
+ if (serString[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
string.set(serString, 1, serString.length);
array.reset();
UTF8StringPointable.lowercase(string, builder, array);
- out.writeByte(stt);
+ out.writeByte(ATypeTag.SERIALIZED_STRING_TYPE_TAG);
out.write(array.getByteArray(), 0, array.getLength());
- } else if (serString[0] == SER_NULL_TYPE_TAG)
+ } else if (serString[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG)
nullSerde.serialize(ANull.NULL, out);
else
throw new AlgebricksException(AsterixBuiltinFunctions.STRING_LOWERCASE.getName()
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringToCodePointDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringToCodePointDescriptor.java
index 654e0b6..f75b8e3 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringToCodePointDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringToCodePointDescriptor.java
@@ -48,11 +48,11 @@
private static final long serialVersionUID = 1L;
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new StringToCodePointDescriptor();
}
};
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
@Override
public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) {
@@ -62,8 +62,7 @@
@Override
public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
return new ICopyEvaluator() {
- protected final DataOutput out = output.getDataOutput();
- ;
+ protected final DataOutput out = output.getDataOutput();;
protected final ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
protected final ICopyEvaluator stringEval = args[0].createEvaluator(argOut);
protected final AOrderedListType intListType = new AOrderedListType(BuiltinType.AINT64, null);
@@ -83,7 +82,7 @@
stringEval.evaluate(tuple);
byte[] serString = argOut.getByteArray();
- if (serString[0] == SER_STRING_TYPE_TAG) {
+ if (serString[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
byte[] bytes = argOut.getByteArray();
int len = UTF8StringUtil.getUTFLength(bytes, 1);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringUpperCaseDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringUpperCaseDescriptor.java
index fc32dfd..02e495f 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringUpperCaseDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringUpperCaseDescriptor.java
@@ -46,12 +46,11 @@
private static final long serialVersionUID = 1L;
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new StringUpperCaseDescriptor();
}
};
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
@Override
public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) {
@@ -66,7 +65,7 @@
private ArrayBackedValueStorage outInput = new ArrayBackedValueStorage();
private ICopyEvaluator eval = args[0].createEvaluator(outInput);
- private final byte stt = ATypeTag.STRING.serialize();
+ private final byte stt = ATypeTag.SERIALIZED_STRING_TYPE_TAG;
private final GrowableArray array = new GrowableArray();
private final UTF8StringBuilder builder = new UTF8StringBuilder();
@@ -84,14 +83,14 @@
eval.evaluate(tuple);
byte[] serString = outInput.getByteArray();
- if (serString[0] == SER_STRING_TYPE_TAG) {
+ if (serString[0] == ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
string.set(serString, 1, serString.length);
array.reset();
UTF8StringPointable.uppercase(string, builder, array);
out.writeByte(stt);
out.write(array.getByteArray(), 0, array.getLength());
- } else if (serString[0] == SER_NULL_TYPE_TAG)
+ } else if (serString[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG)
nullSerde.serialize(ANull.NULL, out);
else
throw new AlgebricksException(AsterixBuiltinFunctions.STRING_UPPERCASE.getName()
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/Substring2Descriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/Substring2Descriptor.java
index f843051..a3076ce 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/Substring2Descriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/Substring2Descriptor.java
@@ -41,13 +41,9 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class Substring2Descriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
-
- // allowed input types
- private static final byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new Substring2Descriptor();
}
@@ -66,7 +62,6 @@
private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
private ICopyEvaluator evalString = args[0].createEvaluator(argOut);
private ICopyEvaluator evalStart = args[1].createEvaluator(argOut);
- private final byte stt = ATypeTag.STRING.serialize();
private final GrowableArray array = new GrowableArray();
private final UTF8StringBuilder builder = new UTF8StringBuilder();
@@ -87,7 +82,7 @@
evalString.evaluate(tuple);
byte[] bytes = argOut.getByteArray();
- if (bytes[0] != SER_STRING_TYPE_TAG) {
+ if (bytes[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
throw new AlgebricksException(AsterixBuiltinFunctions.SUBSTRING2.getName()
+ ": expects type STRING for the first argument but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut.getByteArray()[0]));
@@ -104,7 +99,7 @@
}
try {
- out.writeByte(stt);
+ out.writeByte(ATypeTag.SERIALIZED_STRING_TYPE_TAG);
out.write(array.getByteArray(), 0, array.getLength());
} catch (IOException e) {
throw new AlgebricksException(e);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SubstringAfterDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SubstringAfterDescriptor.java
index ff84986..78f5a66 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SubstringAfterDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SubstringAfterDescriptor.java
@@ -39,14 +39,9 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class SubstringAfterDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
-
- // allowed input types
- private static final byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new SubstringAfterDescriptor();
}
@@ -66,8 +61,6 @@
private ArrayBackedValueStorage array1 = new ArrayBackedValueStorage();
private ICopyEvaluator evalString = args[0].createEvaluator(array0);
private ICopyEvaluator evalPattern = args[1].createEvaluator(array1);
- private final byte stt = ATypeTag.STRING.serialize();
-
private final GrowableArray array = new GrowableArray();
private final UTF8StringBuilder builder = new UTF8StringBuilder();
private final UTF8StringPointable stringPtr = new UTF8StringPointable();
@@ -83,8 +76,10 @@
evalPattern.evaluate(tuple);
byte[] pattern = array1.getByteArray();
- if ((src[0] != SER_STRING_TYPE_TAG && src[0] != SER_NULL_TYPE_TAG)
- || (pattern[0] != SER_STRING_TYPE_TAG && pattern[0] != SER_NULL_TYPE_TAG)) {
+ if ((src[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG
+ && src[0] != ATypeTag.SERIALIZED_NULL_TYPE_TAG)
+ || (pattern[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG
+ && pattern[0] != ATypeTag.SERIALIZED_NULL_TYPE_TAG)) {
throw new AlgebricksException(AsterixBuiltinFunctions.SUBSTRING_AFTER.getName()
+ ": expects input type (STRING/NULL, STRING/NULL) but got ("
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(src[0]) + ", "
@@ -96,7 +91,7 @@
array.reset();
try {
UTF8StringPointable.substrAfter(stringPtr, patternPtr, builder, array);
- out.writeByte(stt);
+ out.writeByte(ATypeTag.SERIALIZED_STRING_TYPE_TAG);
out.write(array.getByteArray(), 0, array.getLength());
} catch (IOException e) {
throw new AlgebricksException(e);
@@ -105,11 +100,11 @@
}
}
- ;
+ ;
}
}
- ;
+ ;
}
@Override
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SubstringBeforeDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SubstringBeforeDescriptor.java
index 3871ed7..dd9a91a 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SubstringBeforeDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SubstringBeforeDescriptor.java
@@ -39,14 +39,9 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class SubstringBeforeDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
-
- // allowed input types
- private static final byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new SubstringBeforeDescriptor();
}
@@ -60,14 +55,11 @@
@Override
public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
return new ICopyEvaluator() {
-
private DataOutput out = output.getDataOutput();
private ArrayBackedValueStorage array0 = new ArrayBackedValueStorage();
private ArrayBackedValueStorage array1 = new ArrayBackedValueStorage();
private ICopyEvaluator evalString = args[0].createEvaluator(array0);
private ICopyEvaluator evalPattern = args[1].createEvaluator(array1);
- private final byte stt = ATypeTag.STRING.serialize();
-
private final GrowableArray array = new GrowableArray();
private final UTF8StringBuilder builder = new UTF8StringBuilder();
private final UTF8StringPointable stringPtr = new UTF8StringPointable();
@@ -83,8 +75,10 @@
evalPattern.evaluate(tuple);
byte[] pattern = array1.getByteArray();
- if ((src[0] != SER_STRING_TYPE_TAG && src[0] != SER_NULL_TYPE_TAG)
- || (pattern[0] != SER_STRING_TYPE_TAG && pattern[0] != SER_NULL_TYPE_TAG)) {
+ if ((src[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG
+ && src[0] != ATypeTag.SERIALIZED_NULL_TYPE_TAG)
+ || (pattern[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG
+ && pattern[0] != ATypeTag.SERIALIZED_NULL_TYPE_TAG)) {
throw new AlgebricksException(AsterixBuiltinFunctions.SUBSTRING_BEFORE.getName()
+ ": expects input type (STRING/NULL, STRING/NULL) but got ("
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(src[0]) + ", "
@@ -96,7 +90,7 @@
array.reset();
try {
UTF8StringPointable.substrBefore(stringPtr, patternPtr, builder, array);
- out.writeByte(stt);
+ out.writeByte(ATypeTag.SERIALIZED_STRING_TYPE_TAG);
out.write(array.getByteArray(), 0, array.getLength());
} catch (IOException e) {
throw new AlgebricksException(e);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SubstringDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SubstringDescriptor.java
index 42d64b3..07b3b64 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SubstringDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SubstringDescriptor.java
@@ -47,6 +47,7 @@
private static final long serialVersionUID = 1L;
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new SubstringDescriptor();
}
@@ -66,7 +67,6 @@
private final ICopyEvaluator evalString = args[0].createEvaluator(argOut);
private final ICopyEvaluator evalStart = args[1].createEvaluator(argOut);
private final ICopyEvaluator evalLen = args[2].createEvaluator(argOut);
- private final byte stt = ATypeTag.STRING.serialize();
private final GrowableArray array = new GrowableArray();
private final UTF8StringBuilder builder = new UTF8StringBuilder();
@@ -91,7 +91,7 @@
start = argOut.getByteArray()[1] - 1;
break;
case INT16:
- start = (int) ShortPointable.getShort(argOut.getByteArray(), 1) - 1;
+ start = ShortPointable.getShort(argOut.getByteArray(), 1) - 1;
break;
case FLOAT:
start = (int) FloatPointable.getFloat(argOut.getByteArray(), 1) - 1;
@@ -122,7 +122,7 @@
len = argOut.getByteArray()[1];
break;
case INT16:
- len = (int) ShortPointable.getShort(argOut.getByteArray(), 1);
+ len = ShortPointable.getShort(argOut.getByteArray(), 1);
break;
case FLOAT:
len = (int) FloatPointable.getFloat(argOut.getByteArray(), 1);
@@ -159,7 +159,7 @@
}
try {
- out.writeByte(stt);
+ out.writeByte(ATypeTag.SERIALIZED_STRING_TYPE_TAG);
out.write(array.getByteArray(), 0, array.getLength());
} catch (IOException e) {
throw new AlgebricksException(e);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/binary/PrintBinaryDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/binary/PrintBinaryDescriptor.java
index e6de4ce..37b414b 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/binary/PrintBinaryDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/binary/PrintBinaryDescriptor.java
@@ -44,7 +44,6 @@
public class PrintBinaryDescriptor extends AbstractScalarFunctionDynamicDescriptor {
private static final long serialVersionUID = 1L;
- private static final byte SER_STRING_BYTE = ATypeTag.STRING.serialize();
@Override
public FunctionIdentifier getIdentifier() {
@@ -101,7 +100,7 @@
throw new AlgebricksException(getIdentifier().getName()
+ ": expects format indicator of \"hex\" or \"base64\" in the 2nd argument");
}
- dataOutput.writeByte(SER_STRING_BYTE);
+ dataOutput.writeByte(ATypeTag.SERIALIZED_STRING_TYPE_TAG);
writer.writeUTF8(stringBuilder.toString(), dataOutput);
} catch (HyracksDataException e) {
throw new AlgebricksException(e);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/FieldAccessByIndexEvalFactory.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/FieldAccessByIndexEvalFactory.java
index da42249..7bc6aef 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/FieldAccessByIndexEvalFactory.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/FieldAccessByIndexEvalFactory.java
@@ -50,8 +50,6 @@
private ICopyEvaluatorFactory fieldIndexEvalFactory;
private int nullBitmapSize;
private ARecordType recordType;
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte SER_RECORD_TYPE_TAG = ATypeTag.RECORD.serialize();
public FieldAccessByIndexEvalFactory(ICopyEvaluatorFactory recordEvalFactory,
ICopyEvaluatorFactory fieldIndexEvalFactory, ARecordType recordType) {
@@ -59,11 +57,11 @@
this.fieldIndexEvalFactory = fieldIndexEvalFactory;
this.recordType = recordType;
this.nullBitmapSize = ARecordType.computeNullBitmapSize(recordType);
-
}
@Override
public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+
return new ICopyEvaluator() {
private DataOutput out = output.getDataOutput();
@@ -81,36 +79,41 @@
private IAType fieldValueType;
private ATypeTag fieldValueTypeTag = ATypeTag.NULL;
+ /*
+ * outInput0: the record
+ * outInput1: the index
+ *
+ * This method outputs into IDataOutputProvider output [field type tag (1 byte)][the field data]
+ */
@Override
public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
try {
outInput0.reset();
eval0.evaluate(tuple);
- outInput1.reset();
- eval1.evaluate(tuple);
byte[] serRecord = outInput0.getByteArray();
- if (serRecord[0] == SER_NULL_TYPE_TAG) {
+ if (serRecord[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
}
- if (serRecord[0] != SER_RECORD_TYPE_TAG) {
+ if (serRecord[0] != ATypeTag.SERIALIZED_RECORD_TYPE_TAG) {
throw new AlgebricksException("Field accessor is not defined for values of type "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(serRecord[0]));
}
-
+ outInput1.reset();
+ eval1.evaluate(tuple);
fieldIndex = IntegerPointable.getInteger(outInput1.getByteArray(), 1);
+ fieldValueType = recordType.getFieldTypes()[fieldIndex];
fieldValueOffset = ARecordSerializerDeserializer.getFieldOffsetById(serRecord, fieldIndex,
nullBitmapSize, recordType.isOpen());
if (fieldValueOffset == 0) {
// the field is null, we checked the null bit map
- out.writeByte(SER_NULL_TYPE_TAG);
+ out.writeByte(ATypeTag.SERIALIZED_NULL_TYPE_TAG);
return;
}
- fieldValueType = recordType.getFieldTypes()[fieldIndex];
if (fieldValueType.getTypeTag().equals(ATypeTag.UNION)) {
if (((AUnionType) fieldValueType).isNullableType()) {
fieldValueTypeTag = ((AUnionType) fieldValueType).getNullableType().getTypeTag();
@@ -137,5 +140,4 @@
}
};
}
-
}
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/FieldAccessByNameEvalFactory.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/FieldAccessByNameEvalFactory.java
index cb33da1..66bedf0 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/FieldAccessByNameEvalFactory.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/FieldAccessByNameEvalFactory.java
@@ -45,9 +45,6 @@
private ICopyEvaluatorFactory recordEvalFactory;
private ICopyEvaluatorFactory fldNameEvalFactory;
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte SER_RECORD_TYPE_TAG = ATypeTag.RECORD.serialize();
-
public FieldAccessByNameEvalFactory(ICopyEvaluatorFactory recordEvalFactory,
ICopyEvaluatorFactory fldNameEvalFactory) {
this.recordEvalFactory = recordEvalFactory;
@@ -81,12 +78,12 @@
eval1.evaluate(tuple);
byte[] serRecord = outInput0.getByteArray();
- if (serRecord[0] == SER_NULL_TYPE_TAG) {
+ if (serRecord[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
}
- if (serRecord[0] != SER_RECORD_TYPE_TAG) {
+ if (serRecord[0] != ATypeTag.SERIALIZED_RECORD_TYPE_TAG) {
throw new AlgebricksException(AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME.getName()
+ ": expects input type NULL or RECORD, but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(serRecord[0]));
@@ -95,7 +92,7 @@
byte[] serFldName = outInput1.getByteArray();
fieldValueOffset = ARecordSerializerDeserializer.getFieldOffsetByName(serRecord, serFldName);
if (fieldValueOffset < 0) {
- out.writeByte(ATypeTag.NULL.serialize());
+ out.writeByte(ATypeTag.SERIALIZED_NULL_TYPE_TAG);
return;
}
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/FieldAccessUtil.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/FieldAccessUtil.java
index a442ef4..f9bb6fc 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/FieldAccessUtil.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/FieldAccessUtil.java
@@ -46,10 +46,6 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class FieldAccessUtil {
-
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte SER_RECORD_TYPE_TAG = ATypeTag.RECORD.serialize();
-
@SuppressWarnings("unchecked")
private static ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.ANULL);
@@ -72,7 +68,7 @@
}
public static boolean checkType(byte tagId, DataOutput out) throws AlgebricksException {
- if (tagId == SER_NULL_TYPE_TAG) {
+ if (tagId == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
try {
nullSerde.serialize(ANull.NULL, out);
} catch (HyracksDataException e) {
@@ -81,7 +77,7 @@
return true;
}
- if (tagId != SER_RECORD_TYPE_TAG) {
+ if (tagId != ATypeTag.SERIALIZED_RECORD_TYPE_TAG) {
throw new AlgebricksException("Field accessor is not defined for values of type "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(tagId));
}
@@ -118,7 +114,7 @@
if (subType.getTypeTag().equals(ATypeTag.UNION)) {
//enforced SubType
subType = ((AUnionType) subType).getNullableType();
- if (subType.getTypeTag().serialize() != SER_RECORD_TYPE_TAG) {
+ if (subType.getTypeTag().serialize() != ATypeTag.SERIALIZED_RECORD_TYPE_TAG) {
throw new AlgebricksException("Field accessor is not defined for values of type " + subTypeTag);
}
if (subType.getTypeTag() == ATypeTag.RECORD) {
@@ -135,7 +131,7 @@
nullBitmapSize, ((ARecordType) subType).isOpen());
if (subFieldOffset == 0) {
// the field is null, we checked the null bit map
- out.writeByte(SER_NULL_TYPE_TAG);
+ out.writeByte(ATypeTag.SERIALIZED_NULL_TYPE_TAG);
return;
}
subType = ((ARecordType) subType).getFieldTypes()[subFieldIndex];
@@ -177,7 +173,7 @@
subFieldOffset = ARecordSerializerDeserializer.getFieldOffsetByName(subRecord,
abvsFields[i].getByteArray());
if (subFieldOffset < 0) {
- out.writeByte(SER_NULL_TYPE_TAG);
+ out.writeByte(ATypeTag.SERIALIZED_NULL_TYPE_TAG);
return;
}
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/GetRecordFieldValueEvalFactory.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/GetRecordFieldValueEvalFactory.java
index 8cabee1..501c743 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/GetRecordFieldValueEvalFactory.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/GetRecordFieldValueEvalFactory.java
@@ -44,8 +44,6 @@
private ICopyEvaluatorFactory fldNameEvalFactory;
private final ARecordType recordType;
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
-
public GetRecordFieldValueEvalFactory(ICopyEvaluatorFactory recordEvalFactory,
ICopyEvaluatorFactory fldNameEvalFactory, ARecordType recordType) {
this.recordEvalFactory = recordEvalFactory;
@@ -89,7 +87,7 @@
eval1.evaluate(tuple);
byte[] serFldName = outInput1.getByteArray();
- if (serFldName[0] != SER_STRING_TYPE_TAG) {
+ if (serFldName[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
}
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/GetRecordFieldsEvalFactory.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/GetRecordFieldsEvalFactory.java
index d8bafc0..d3f7a79 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/GetRecordFieldsEvalFactory.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/GetRecordFieldsEvalFactory.java
@@ -39,15 +39,10 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class GetRecordFieldsEvalFactory implements ICopyEvaluatorFactory {
-
private static final long serialVersionUID = 1L;
-
private ICopyEvaluatorFactory recordEvalFactory;
private final ARecordType recordType;
- private final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final byte SER_RECORD_TYPE_TAG = ATypeTag.RECORD.serialize();
-
public GetRecordFieldsEvalFactory(ICopyEvaluatorFactory recordEvalFactory, ARecordType recordType) {
this.recordEvalFactory = recordEvalFactory;
this.recordType = recordType;
@@ -74,7 +69,7 @@
outInput0.reset();
eval0.evaluate(tuple);
- if (outInput0.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (outInput0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
try {
nullSerde.serialize(ANull.NULL, out);
} catch (HyracksDataException e) {
@@ -82,7 +77,7 @@
}
}
- if (outInput0.getByteArray()[0] != SER_RECORD_TYPE_TAG) {
+ if (outInput0.getByteArray()[0] != ATypeTag.SERIALIZED_RECORD_TYPE_TAG) {
throw new AlgebricksException("Field accessor is not defined for values of type "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(outInput0.getByteArray()[0]));
}
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordAddFieldsDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordAddFieldsDescriptor.java
index c26daca..d9323a5 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordAddFieldsDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordAddFieldsDescriptor.java
@@ -62,6 +62,7 @@
public class RecordAddFieldsDescriptor extends AbstractScalarFunctionDynamicDescriptor {
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new RecordAddFieldsDescriptor();
}
@@ -87,10 +88,6 @@
return new ICopyEvaluatorFactory() {
private static final long serialVersionUID = 1L;
- private final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final byte SER_ORDERED_TYPE_TAG = ATypeTag.ORDEREDLIST.serialize();
- private final byte SER_RECORD_TYPE_TAG = ATypeTag.RECORD.serialize();
-
@SuppressWarnings("unchecked")
private final ISerializerDeserializer<ANull> nullSerDe = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.ANULL);
@@ -145,8 +142,8 @@
eval0.evaluate(tuple);
eval1.evaluate(tuple);
- if (abvs0.getByteArray()[0] == SER_NULL_TYPE_TAG
- || abvs1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (abvs0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || abvs1.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
try {
nullSerDe.serialize(ANull.NULL, output.getDataOutput());
} catch (HyracksDataException e) {
@@ -156,14 +153,14 @@
}
// Make sure we get a valid record
- if (abvs0.getByteArray()[0] != SER_RECORD_TYPE_TAG) {
+ if (abvs0.getByteArray()[0] != ATypeTag.SERIALIZED_RECORD_TYPE_TAG) {
throw new AlgebricksException("Expected an ordederlist of type " + inRecType + " but "
+ "got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(abvs0.getByteArray()[0]));
}
// Make sure we get a valid list
- if (abvs1.getByteArray()[0] != SER_ORDERED_TYPE_TAG) {
+ if (abvs1.getByteArray()[0] != ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG) {
throw new AlgebricksException("Expected an ordederlist of type " + inListType + " but "
+ "got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(abvs1.getByteArray()[0]));
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordMergeDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordMergeDescriptor.java
index 2c1a335..afd6fc1 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordMergeDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordMergeDescriptor.java
@@ -64,12 +64,12 @@
public class RecordMergeDescriptor extends AbstractScalarFunctionDynamicDescriptor {
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new RecordMergeDescriptor();
}
};
private static final long serialVersionUID = 1L;
- private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
private ARecordType outRecType;
private ARecordType inRecType0;
private ARecordType inRecType1;
@@ -119,8 +119,8 @@
eval0.evaluate(tuple);
eval1.evaluate(tuple);
- if (abvs0.getByteArray()[0] == SER_NULL_TYPE_TAG
- || abvs1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (abvs0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || abvs1.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
try {
nullSerDe.serialize(ANull.NULL, output.getDataOutput());
} catch (HyracksDataException e) {
@@ -146,7 +146,7 @@
private void mergeFields(ARecordType combinedType, ARecordVisitablePointable leftRecord,
ARecordVisitablePointable rightRecord, boolean openFromParent, int nestedLevel)
- throws IOException, AsterixException, AlgebricksException {
+ throws IOException, AsterixException, AlgebricksException {
if (rbStack.size() < (nestedLevel + 1)) {
rbStack.add(new RecordBuilder());
}
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordRemoveFieldsDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordRemoveFieldsDescriptor.java
index 0add4f0..ae752f9 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordRemoveFieldsDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordRemoveFieldsDescriptor.java
@@ -23,7 +23,6 @@
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.asterix.om.types.AOrderedListType;
import org.apache.asterix.om.types.ARecordType;
-import org.apache.asterix.om.types.ATypeTag;
import org.apache.asterix.om.types.IAType;
import org.apache.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -31,19 +30,15 @@
import org.apache.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
public class RecordRemoveFieldsDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
-
- private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private static final byte SER_ORDEREDLIST_TYPE_TAG = ATypeTag.ORDEREDLIST.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new RecordRemoveFieldsDescriptor();
}
};
- private RecordRemoveFieldsDescriptor() {
+ private RecordRemoveFieldsDescriptor() {
}
private ARecordType outputRecordType;
@@ -56,6 +51,7 @@
inputListType = (AOrderedListType) inListType;
}
+ @Override
public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
return new RecordRemoveFieldsEvalFactory(args[0], args[1], outputRecordType, inputRecType, inputListType);
}
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordRemoveFieldsEvalFactory.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordRemoveFieldsEvalFactory.java
index a74f055..441f91f 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordRemoveFieldsEvalFactory.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordRemoveFieldsEvalFactory.java
@@ -53,10 +53,6 @@
class RecordRemoveFieldsEvalFactory implements ICopyEvaluatorFactory {
private static final long serialVersionUID = 1L;
-
- private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private static final byte SER_ORDEREDLIST_TYPE_TAG = ATypeTag.ORDEREDLIST.serialize();
- private static final byte SER_RECORD_TYPE_TAG = ATypeTag.RECORD.serialize();
@SuppressWarnings("unchecked")
private final ISerializerDeserializer<ANull> nullSerDe = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(BuiltinType.ANULL);
@@ -95,7 +91,6 @@
private final ArrayBackedValueStorage tabvs = new ArrayBackedValueStorage();
private final Deque<IVisitablePointable> recordPath = new ArrayDeque<>();
-
@Override
public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
outInput0.reset();
@@ -104,7 +99,7 @@
eval0.evaluate(tuple);
eval1.evaluate(tuple);
- if (outInput0.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (outInput0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
try {
nullSerDe.serialize(ANull.NULL, output.getDataOutput());
} catch (HyracksDataException e) {
@@ -113,13 +108,13 @@
return;
}
- if (outInput0.getByteArray()[0] != SER_RECORD_TYPE_TAG) {
+ if (outInput0.getByteArray()[0] != ATypeTag.SERIALIZED_RECORD_TYPE_TAG) {
throw new AlgebricksException(AsterixBuiltinFunctions.REMOVE_FIELDS.getName()
+ ": expects input type " + inputRecType + ", but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(outInput0.getByteArray()[0]));
}
- if (outInput1.getByteArray()[0] != SER_ORDEREDLIST_TYPE_TAG) {
+ if (outInput1.getByteArray()[0] != ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG) {
throw new AlgebricksException(AsterixBuiltinFunctions.REMOVE_FIELDS.getName()
+ ": expects input type " + inputListType + ", but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(outInput1.getByteArray()[0]));
@@ -142,8 +137,8 @@
}
private void processRecord(ARecordType requiredType, ARecordVisitablePointable srp,
- AListVisitablePointable inputList, int nestedLevel) throws IOException, AsterixException,
- AlgebricksException {
+ AListVisitablePointable inputList, int nestedLevel)
+ throws IOException, AsterixException, AlgebricksException {
if (rbStack.size() < (nestedLevel + 1)) {
rbStack.add(new RecordBuilder());
}
@@ -173,8 +168,8 @@
private void addKeptFieldToSubRecord(ARecordType requiredType, IVisitablePointable fieldNamePointable,
IVisitablePointable fieldValuePointable, IVisitablePointable fieldTypePointable,
- AListVisitablePointable inputList, int nestedLevel) throws IOException, AsterixException,
- AlgebricksException {
+ AListVisitablePointable inputList, int nestedLevel)
+ throws IOException, AsterixException, AlgebricksException {
runtimeRecordTypeInfo.reset(requiredType);
int pos = runtimeRecordTypeInfo.getFieldIndex(fieldNamePointable.getByteArray(),
@@ -191,7 +186,8 @@
}
} else { // Open field
if (PointableHelper.sameType(ATypeTag.RECORD, fieldTypePointable)) {
- processRecord(null, (ARecordVisitablePointable) fieldValuePointable, inputList, nestedLevel + 1);
+ processRecord(null, (ARecordVisitablePointable) fieldValuePointable, inputList,
+ nestedLevel + 1);
tabvs.reset();
rbStack.get(nestedLevel + 1).write(tabvs.getDataOutput(), true);
rbStack.get(nestedLevel).addField(fieldNamePointable, tabvs);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/AbstractIntervalLogicFuncDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/AbstractIntervalLogicFuncDescriptor.java
index 4c5a748..da00cd2 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/AbstractIntervalLogicFuncDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/AbstractIntervalLogicFuncDescriptor.java
@@ -38,13 +38,8 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public abstract class AbstractIntervalLogicFuncDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private final static long serialVersionUID = 1L;
- // allowed input types
- private final static byte SER_INTERVAL_TYPE_TAG = ATypeTag.INTERVAL.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
/* (non-Javadoc)
* @see org.apache.asterix.runtime.base.IScalarFunctionDynamicDescriptor#createEvaluatorFactory(org.apache.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory[])
*/
@@ -81,24 +76,27 @@
eval1.evaluate(tuple);
try {
- if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
- || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
}
- if (argOut0.getByteArray()[0] != SER_INTERVAL_TYPE_TAG
- || argOut1.getByteArray()[0] != SER_INTERVAL_TYPE_TAG) {
- throw new AlgebricksException(getIdentifier().getName()
- + ": expects input type (INTERVAL, INTERVAL) but got ("
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0])
- + ", "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut1.getByteArray()[0])
- + ")");
+ if (argOut0.getByteArray()[0] != ATypeTag.SERIALIZED_INTERVAL_TYPE_TAG
+ || argOut1.getByteArray()[0] != ATypeTag.SERIALIZED_INTERVAL_TYPE_TAG) {
+ throw new AlgebricksException(
+ getIdentifier().getName()
+ + ": expects input type (INTERVAL, INTERVAL) but got ("
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(
+ argOut0.getByteArray()[0])
+ + ", " + EnumDeserializer.ATYPETAGDESERIALIZER
+ .deserialize(argOut1.getByteArray()[0])
+ + ")");
}
- if (AIntervalSerializerDeserializer.getIntervalTimeType(argOut0.getByteArray(), 1) != AIntervalSerializerDeserializer
- .getIntervalTimeType(argOut1.getByteArray(), 1)) {
+ if (AIntervalSerializerDeserializer.getIntervalTimeType(argOut0.getByteArray(),
+ 1) != AIntervalSerializerDeserializer.getIntervalTimeType(argOut1.getByteArray(),
+ 1)) {
throw new AlgebricksException(getIdentifier().getName()
+ ": failed to compare intervals with different internal time type.");
}
@@ -107,8 +105,8 @@
AIntervalSerializerDeserializer.getIntervalStart(argOut0.getByteArray(), 1),
AIntervalSerializerDeserializer.getIntervalEnd(argOut0.getByteArray(), 1),
AIntervalSerializerDeserializer.getIntervalStart(argOut1.getByteArray(), 1),
- AIntervalSerializerDeserializer.getIntervalEnd(argOut1.getByteArray(), 1))) ? ABoolean.TRUE
- : ABoolean.FALSE;
+ AIntervalSerializerDeserializer.getIntervalEnd(argOut1.getByteArray(), 1)))
+ ? ABoolean.TRUE : ABoolean.FALSE;
booleanSerde.serialize(res, out);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/AdjustDateTimeForTimeZoneDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/AdjustDateTimeForTimeZoneDescriptor.java
index 424d46b..d8d76b6 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/AdjustDateTimeForTimeZoneDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/AdjustDateTimeForTimeZoneDescriptor.java
@@ -48,12 +48,6 @@
private final static long serialVersionUID = 1L;
public final static FunctionIdentifier FID = AsterixBuiltinFunctions.ADJUST_DATETIME_FOR_TIMEZONE;
-
- // allowed input types
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
-
public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -99,27 +93,27 @@
eval1.evaluate(tuple);
try {
- if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
- || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
}
- if (argOut0.getByteArray()[0] != SER_DATETIME_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] != ATypeTag.SERIALIZED_DATETIME_TYPE_TAG) {
throw new AlgebricksException(FID.getName()
+ ": expects type DATETIME/NULL for parameter 0 but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0]));
}
- if (argOut1.getByteArray()[0] != SER_STRING_TYPE_TAG) {
+ if (argOut1.getByteArray()[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
throw new AlgebricksException(FID.getName()
+ ": expects type STRING/NULL for parameter 1 but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut1.getByteArray()[0]));
}
utf8Ptr.set(argOut1.getByteArray(), 1, argOut1.getLength() - 1);
- int timezone = ATimeParserFactory
- .parseTimezonePart(utf8Ptr.getByteArray(), utf8Ptr.getCharStartOffset());
+ int timezone = ATimeParserFactory.parseTimezonePart(utf8Ptr.getByteArray(),
+ utf8Ptr.getCharStartOffset());
if (!calInstance.validateTimeZone(timezone)) {
throw new AlgebricksException(FID.getName() + ": wrong format for a time zone string!");
@@ -134,7 +128,7 @@
calInstance.getExtendStringRepUntilField(chronon, timezone, sbder, Fields.YEAR,
Fields.MILLISECOND, true);
- out.writeByte(SER_STRING_TYPE_TAG);
+ out.writeByte(ATypeTag.SERIALIZED_STRING_TYPE_TAG);
utf8Writer.writeUTF8(sbder.toString(), out);
} catch (Exception e1) {
throw new AlgebricksException(e1);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/AdjustTimeForTimeZoneDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/AdjustTimeForTimeZoneDescriptor.java
index 32e16e4..527607e 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/AdjustTimeForTimeZoneDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/AdjustTimeForTimeZoneDescriptor.java
@@ -45,15 +45,8 @@
import org.apache.hyracks.util.string.UTF8StringWriter;
public class AdjustTimeForTimeZoneDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private final static long serialVersionUID = 1L;
public final static FunctionIdentifier FID = AsterixBuiltinFunctions.ADJUST_TIME_FOR_TIMEZONE;
-
- // allowed input types
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
-
public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -98,27 +91,27 @@
eval1.evaluate(tuple);
try {
- if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
- || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
}
- if (argOut0.getByteArray()[0] != SER_TIME_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] != ATypeTag.SERIALIZED_TIME_TYPE_TAG) {
throw new AlgebricksException(FID.getName()
+ ": expects type TIME/NULL for parameter 0 but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0]));
}
- if (argOut1.getByteArray()[0] != SER_STRING_TYPE_TAG) {
+ if (argOut1.getByteArray()[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
throw new AlgebricksException(FID.getName()
+ ": expects type STRING/NULL for parameter 1 but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut1.getByteArray()[0]));
}
utf8Ptr.set(argOut1.getByteArray(), 1, argOut1.getLength() - 1);
- int timezone = ATimeParserFactory
- .parseTimezonePart(argOut1.getByteArray(), utf8Ptr.getCharStartOffset());
+ int timezone = ATimeParserFactory.parseTimezonePart(argOut1.getByteArray(),
+ utf8Ptr.getCharStartOffset());
if (!calInstance.validateTimeZone(timezone)) {
throw new AlgebricksException(FID.getName() + ": wrong format for a time zone string!");
@@ -133,7 +126,7 @@
calInstance.getExtendStringRepUntilField(chronon, timezone, sbder, Fields.HOUR,
Fields.MILLISECOND, true);
- out.writeByte(SER_STRING_TYPE_TAG);
+ out.writeByte(ATypeTag.SERIALIZED_STRING_TYPE_TAG);
writer.writeUTF8(sbder.toString(), out);
} catch (Exception e1) {
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/CalendarDuartionFromDateDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/CalendarDuartionFromDateDescriptor.java
index d161927..5678827 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/CalendarDuartionFromDateDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/CalendarDuartionFromDateDescriptor.java
@@ -46,15 +46,8 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class CalendarDuartionFromDateDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private final static long serialVersionUID = 1L;
public final static FunctionIdentifier FID = AsterixBuiltinFunctions.CALENDAR_DURATION_FROM_DATE;
-
- // allowed input types
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
- private final static byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
-
public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -104,28 +97,28 @@
eval1.evaluate(tuple);
try {
- if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
- || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
}
- if (argOut0.getByteArray()[0] != SER_DATE_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] != ATypeTag.SERIALIZED_DATE_TYPE_TAG) {
throw new AlgebricksException(FID.getName()
+ ": expects type DATE/NULL for parameter 0 but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0]));
}
- if (argOut1.getByteArray()[0] != SER_DURATION_TYPE_TAG) {
+ if (argOut1.getByteArray()[0] != ATypeTag.SERIALIZED_DURATION_TYPE_TAG) {
throw new AlgebricksException(FID.getName()
+ ": expects type DURATION/NULL for parameter 1 but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut1.getByteArray()[0]));
}
- int yearMonthDurationInMonths = ADurationSerializerDeserializer.getYearMonth(
- argOut1.getByteArray(), 1);
- long dayTimeDurationInMs = ADurationSerializerDeserializer.getDayTime(
- argOut1.getByteArray(), 1);
+ int yearMonthDurationInMonths = ADurationSerializerDeserializer
+ .getYearMonth(argOut1.getByteArray(), 1);
+ long dayTimeDurationInMs = ADurationSerializerDeserializer
+ .getDayTime(argOut1.getByteArray(), 1);
long startingTimePoint = ADateSerializerDeserializer.getChronon(argOut0.getByteArray(), 1)
* GregorianCalendarSystem.CHRONON_OF_DAY;
@@ -192,7 +185,8 @@
if (day < 0) {
boolean isLeapYear = calInstanct.isLeapYear(year1);
// need to "borrow" the days in previous month to make the day positive; when month is 1 (Jan), Dec will be borrowed
- day += (isLeapYear) ? (GregorianCalendarSystem.DAYS_OF_MONTH_LEAP[(12 + month1 - 2) % 12])
+ day += (isLeapYear)
+ ? (GregorianCalendarSystem.DAYS_OF_MONTH_LEAP[(12 + month1 - 2) % 12])
: (GregorianCalendarSystem.DAYS_OF_MONTH_ORDI[(12 + month1 - 2) % 12]);
month -= 1;
}
@@ -204,17 +198,16 @@
if (negative) {
aDuration.setValue(-1 * (year * GregorianCalendarSystem.MONTHS_IN_A_YEAR + month),
- -1
- * (day * GregorianCalendarSystem.CHRONON_OF_DAY + hour
- * GregorianCalendarSystem.CHRONON_OF_HOUR + min
- * GregorianCalendarSystem.CHRONON_OF_MINUTE + sec
- * GregorianCalendarSystem.CHRONON_OF_SECOND + ms));
+ -1 * (day * GregorianCalendarSystem.CHRONON_OF_DAY
+ + hour * GregorianCalendarSystem.CHRONON_OF_HOUR
+ + min * GregorianCalendarSystem.CHRONON_OF_MINUTE
+ + sec * GregorianCalendarSystem.CHRONON_OF_SECOND + ms));
} else {
- aDuration.setValue(year * GregorianCalendarSystem.MONTHS_IN_A_YEAR + month, day
- * GregorianCalendarSystem.CHRONON_OF_DAY + hour
- * GregorianCalendarSystem.CHRONON_OF_HOUR + min
- * GregorianCalendarSystem.CHRONON_OF_MINUTE + sec
- * GregorianCalendarSystem.CHRONON_OF_SECOND + ms);
+ aDuration.setValue(year * GregorianCalendarSystem.MONTHS_IN_A_YEAR + month,
+ day * GregorianCalendarSystem.CHRONON_OF_DAY
+ + hour * GregorianCalendarSystem.CHRONON_OF_HOUR
+ + min * GregorianCalendarSystem.CHRONON_OF_MINUTE
+ + sec * GregorianCalendarSystem.CHRONON_OF_SECOND + ms);
}
}
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/CalendarDurationFromDateTimeDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/CalendarDurationFromDateTimeDescriptor.java
index 54f80b3..037df2e 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/CalendarDurationFromDateTimeDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/CalendarDurationFromDateTimeDescriptor.java
@@ -62,15 +62,8 @@
* <p/>
*/
public class CalendarDurationFromDateTimeDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private final static long serialVersionUID = 1L;
public final static FunctionIdentifier FID = AsterixBuiltinFunctions.CALENDAR_DURATION_FROM_DATETIME;
-
- // allowed input types
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
- private final static byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
-
public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -118,28 +111,28 @@
eval1.evaluate(tuple);
try {
- if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
- || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
}
- if (argOut0.getByteArray()[0] != SER_DATETIME_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] != ATypeTag.SERIALIZED_DATETIME_TYPE_TAG) {
throw new AlgebricksException(FID.getName()
+ ": expects type DATETIME/NULL for parameter 0 but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0]));
}
- if (argOut1.getByteArray()[0] != SER_DURATION_TYPE_TAG) {
+ if (argOut1.getByteArray()[0] != ATypeTag.SERIALIZED_DURATION_TYPE_TAG) {
throw new AlgebricksException(FID.getName()
+ ": expects type DURATION/NULL for parameter 1 but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut1.getByteArray()[0]));
}
- int yearMonthDurationInMonths = ADurationSerializerDeserializer.getYearMonth(
- argOut1.getByteArray(), 1);
- long dayTimeDurationInMs = ADurationSerializerDeserializer.getDayTime(
- argOut1.getByteArray(), 1);
+ int yearMonthDurationInMonths = ADurationSerializerDeserializer
+ .getYearMonth(argOut1.getByteArray(), 1);
+ long dayTimeDurationInMs = ADurationSerializerDeserializer
+ .getDayTime(argOut1.getByteArray(), 1);
long startingTimePoint = ADateTimeSerializerDeserializer.getChronon(argOut0.getByteArray(),
1);
@@ -206,7 +199,8 @@
if (day < 0) {
boolean isLeapYear = calInstanct.isLeapYear(year1);
// need to "borrow" the days in previous month to make the day positive; when month is 1 (Jan), Dec will be borrowed
- day += (isLeapYear) ? (GregorianCalendarSystem.DAYS_OF_MONTH_LEAP[(12 + month1 - 2) % 12])
+ day += (isLeapYear)
+ ? (GregorianCalendarSystem.DAYS_OF_MONTH_LEAP[(12 + month1 - 2) % 12])
: (GregorianCalendarSystem.DAYS_OF_MONTH_ORDI[(12 + month1 - 2) % 12]);
month -= 1;
}
@@ -218,17 +212,16 @@
if (negative) {
aDuration.setValue(-1 * (year * GregorianCalendarSystem.MONTHS_IN_A_YEAR + month),
- -1
- * (day * GregorianCalendarSystem.CHRONON_OF_DAY + hour
- * GregorianCalendarSystem.CHRONON_OF_HOUR + min
- * GregorianCalendarSystem.CHRONON_OF_MINUTE + sec
- * GregorianCalendarSystem.CHRONON_OF_SECOND + ms));
+ -1 * (day * GregorianCalendarSystem.CHRONON_OF_DAY
+ + hour * GregorianCalendarSystem.CHRONON_OF_HOUR
+ + min * GregorianCalendarSystem.CHRONON_OF_MINUTE
+ + sec * GregorianCalendarSystem.CHRONON_OF_SECOND + ms));
} else {
- aDuration.setValue(year * GregorianCalendarSystem.MONTHS_IN_A_YEAR + month, day
- * GregorianCalendarSystem.CHRONON_OF_DAY + hour
- * GregorianCalendarSystem.CHRONON_OF_HOUR + min
- * GregorianCalendarSystem.CHRONON_OF_MINUTE + sec
- * GregorianCalendarSystem.CHRONON_OF_SECOND + ms);
+ aDuration.setValue(year * GregorianCalendarSystem.MONTHS_IN_A_YEAR + month,
+ day * GregorianCalendarSystem.CHRONON_OF_DAY
+ + hour * GregorianCalendarSystem.CHRONON_OF_HOUR
+ + min * GregorianCalendarSystem.CHRONON_OF_MINUTE
+ + sec * GregorianCalendarSystem.CHRONON_OF_SECOND + ms);
}
}
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DateFromDatetimeDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DateFromDatetimeDescriptor.java
index c7f2d65..5927620 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DateFromDatetimeDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DateFromDatetimeDescriptor.java
@@ -44,14 +44,8 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class DateFromDatetimeDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
public final static FunctionIdentifier FID = AsterixBuiltinFunctions.DATE_FROM_DATETIME;
-
- // allowed input types
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
-
public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -90,19 +84,19 @@
argOut.reset();
eval.evaluate(tuple);
try {
- if (argOut.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
} else {
- if (argOut.getByteArray()[0] != SER_DATETIME_TYPE_TAG) {
+ if (argOut.getByteArray()[0] != ATypeTag.SERIALIZED_DATETIME_TYPE_TAG) {
throw new AlgebricksException(
- FID.getName()
- + ": expects input type DATETIME/NULL but got "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut
- .getByteArray()[0]));
+ FID.getName() + ": expects input type DATETIME/NULL but got "
+ + EnumDeserializer.ATYPETAGDESERIALIZER
+ .deserialize(argOut.getByteArray()[0]));
}
- long datetimeChronon = ADateTimeSerializerDeserializer.getChronon(
- argOut.getByteArray(), 1);
- int dateChrononInDays = (int) (datetimeChronon / GregorianCalendarSystem.CHRONON_OF_DAY);
+ long datetimeChronon = ADateTimeSerializerDeserializer.getChronon(argOut.getByteArray(),
+ 1);
+ int dateChrononInDays = (int) (datetimeChronon
+ / GregorianCalendarSystem.CHRONON_OF_DAY);
if (dateChrononInDays < 0
&& datetimeChronon % GregorianCalendarSystem.CHRONON_OF_DAY != 0) {
dateChrononInDays -= 1;
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DateFromUnixTimeInDaysDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DateFromUnixTimeInDaysDescriptor.java
index 8a82902..d96ffe9 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DateFromUnixTimeInDaysDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DateFromUnixTimeInDaysDescriptor.java
@@ -45,10 +45,6 @@
private final static long serialVersionUID = 1L;
public final static FunctionIdentifier FID = AsterixBuiltinFunctions.DATE_FROM_UNIX_TIME_IN_DAYS;
-
- // allowed input types
- private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -86,7 +82,7 @@
argOut.reset();
eval.evaluate(tuple);
try {
- if (argOut.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
} else {
aDate.setValue(ATypeHierarchy.getIntegerValue(argOut.getByteArray(), 0));
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DatetimeFromDateAndTimeDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DatetimeFromDateAndTimeDescriptor.java
index 02a9fa9..d5f04ab 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DatetimeFromDateAndTimeDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DatetimeFromDateAndTimeDescriptor.java
@@ -45,15 +45,8 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class DatetimeFromDateAndTimeDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
public final static FunctionIdentifier FID = AsterixBuiltinFunctions.DATETIME_FROM_DATE_TIME;
-
- // allowed input types
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
- private final static byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
-
public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -100,25 +93,23 @@
eval1.evaluate(tuple);
try {
- if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
- || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
} else {
- if (argOut0.getByteArray()[0] != SER_DATE_TYPE_TAG
- && argOut1.getByteArray()[0] != SER_TIME_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] != ATypeTag.SERIALIZED_DATE_TYPE_TAG
+ && argOut1.getByteArray()[0] != ATypeTag.SERIALIZED_TIME_TYPE_TAG) {
throw new AlgebricksException(
- FID.getName()
- + ": expects input type (DATE/NULL, TIME/NULL) but got ("
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0
- .getByteArray()[0])
- + ", "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut1
- .getByteArray()[0]) + ").");
+ FID.getName() + ": expects input type (DATE/NULL, TIME/NULL) but got ("
+ + EnumDeserializer.ATYPETAGDESERIALIZER
+ .deserialize(argOut0.getByteArray()[0])
+ + ", " + EnumDeserializer.ATYPETAGDESERIALIZER
+ .deserialize(argOut1.getByteArray()[0])
+ + ").");
}
- long datetimeChronon = ADateSerializerDeserializer
- .getChronon(argOut0.getByteArray(), 1)
+ long datetimeChronon = ADateSerializerDeserializer.getChronon(argOut0.getByteArray(), 1)
* GregorianCalendarSystem.CHRONON_OF_DAY
+ ATimeSerializerDeserializer.getChronon(argOut1.getByteArray(), 1);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DayOfWeekDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DayOfWeekDescriptor.java
index a144172..48e4136 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DayOfWeekDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DayOfWeekDescriptor.java
@@ -45,15 +45,8 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class DayOfWeekDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
public final static FunctionIdentifier FID = AsterixBuiltinFunctions.DAY_OF_WEEK;
-
- // allowed input types
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
- private final static byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
-
// Fixed week day anchor: Thursday, 1 January 1970
private final static int ANCHOR_WEEKDAY = 4;
@@ -95,24 +88,23 @@
argOut.reset();
eval.evaluate(tuple);
try {
- if (argOut.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
} else {
int daysSinceAnchor;
int reminder = 0;
- if (argOut.getByteArray()[0] == SER_DATETIME_TYPE_TAG) {
+ if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_DATETIME_TYPE_TAG) {
daysSinceAnchor = (int) (ADateTimeSerializerDeserializer.getChronon(
argOut.getByteArray(), 1) / GregorianCalendarSystem.CHRONON_OF_DAY);
reminder = (int) (ADateTimeSerializerDeserializer.getChronon(argOut.getByteArray(),
1) % GregorianCalendarSystem.CHRONON_OF_DAY);
- } else if (argOut.getByteArray()[0] == SER_DATE_TYPE_TAG) {
+ } else if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_DATE_TYPE_TAG) {
daysSinceAnchor = ADateSerializerDeserializer.getChronon(argOut.getByteArray(), 1);
} else {
throw new AlgebricksException(
- FID.getName()
- + ": expects input type DATETIME/DATE/NULL but got "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut
- .getByteArray()[0]));
+ FID.getName() + ": expects input type DATETIME/DATE/NULL but got "
+ + EnumDeserializer.ATYPETAGDESERIALIZER
+ .deserialize(argOut.getByteArray()[0]));
}
// adjust the day before 1970-01-01
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DayTimeDurationComparatorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DayTimeDurationComparatorDescriptor.java
index e29f5d2..1a98f3c 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DayTimeDurationComparatorDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DayTimeDurationComparatorDescriptor.java
@@ -46,11 +46,6 @@
private final static long serialVersionUID = 1L;
public final static FunctionIdentifier GREATER_THAN_FID = AsterixBuiltinFunctions.DAY_TIME_DURATION_GREATER_THAN;
public final static FunctionIdentifier LESS_THAN_FID = AsterixBuiltinFunctions.DAY_TIME_DURATION_LESS_THAN;
-
- // allowed input types
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
-
private final boolean isGreaterThan;
private DayTimeDurationComparatorDescriptor(boolean isGreaterThan) {
@@ -105,29 +100,31 @@
eval1.evaluate(tuple);
try {
- if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
- || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
}
- if (argOut0.getByteArray()[0] != SER_DURATION_TYPE_TAG
- || argOut1.getByteArray()[0] != SER_DURATION_TYPE_TAG) {
- throw new AlgebricksException(getIdentifier().getName()
- + ": expects type NULL/DURATION, NULL/DURATION but got "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0])
- + " and "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut1.getByteArray()[0]));
+ if (argOut0.getByteArray()[0] != ATypeTag.SERIALIZED_DURATION_TYPE_TAG
+ || argOut1.getByteArray()[0] != ATypeTag.SERIALIZED_DURATION_TYPE_TAG) {
+ throw new AlgebricksException(
+ getIdentifier().getName()
+ + ": expects type NULL/DURATION, NULL/DURATION but got "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(
+ argOut0.getByteArray()[0])
+ + " and " + EnumDeserializer.ATYPETAGDESERIALIZER
+ .deserialize(argOut1.getByteArray()[0]));
}
if ((ADurationSerializerDeserializer.getYearMonth(argOut0.getByteArray(), 1) != 0)
|| (ADurationSerializerDeserializer.getYearMonth(argOut1.getByteArray(), 1) != 0)) {
- throw new AlgebricksException(getIdentifier().getName()
- + ": only year-month durations are allowed.");
+ throw new AlgebricksException(
+ getIdentifier().getName() + ": only year-month durations are allowed.");
}
- if (ADurationSerializerDeserializer.getDayTime(argOut0.getByteArray(), 1) > ADurationSerializerDeserializer
- .getDayTime(argOut1.getByteArray(), 1)) {
+ if (ADurationSerializerDeserializer.getDayTime(argOut0.getByteArray(),
+ 1) > ADurationSerializerDeserializer.getDayTime(argOut1.getByteArray(), 1)) {
boolSerde.serialize(isGreaterThan ? ABoolean.TRUE : ABoolean.FALSE, out);
} else {
boolSerde.serialize(isGreaterThan ? ABoolean.FALSE : ABoolean.TRUE, out);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DurationEqualDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DurationEqualDescriptor.java
index 1333515..c1493bd 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DurationEqualDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DurationEqualDescriptor.java
@@ -42,14 +42,8 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class DurationEqualDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private final static long serialVersionUID = 1L;
public final static FunctionIdentifier FID = AsterixBuiltinFunctions.DURATION_EQUAL;
-
- // allowed input types
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
-
public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -90,25 +84,27 @@
eval1.evaluate(tuple);
try {
- if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
- || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
}
- if (argOut0.getByteArray()[0] != SER_DURATION_TYPE_TAG
- || argOut1.getByteArray()[0] != SER_DURATION_TYPE_TAG) {
- throw new AlgebricksException(FID.getName()
- + ": expects type NULL/DURATION, NULL/DURATION but got "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0])
- + " and "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut1.getByteArray()[0]));
+ if (argOut0.getByteArray()[0] != ATypeTag.SERIALIZED_DURATION_TYPE_TAG
+ || argOut1.getByteArray()[0] != ATypeTag.SERIALIZED_DURATION_TYPE_TAG) {
+ throw new AlgebricksException(
+ FID.getName() + ": expects type NULL/DURATION, NULL/DURATION but got "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(
+ argOut0.getByteArray()[0])
+ + " and " + EnumDeserializer.ATYPETAGDESERIALIZER
+ .deserialize(argOut1.getByteArray()[0]));
}
- if ((ADurationSerializerDeserializer.getDayTime(argOut0.getByteArray(), 1) == ADurationSerializerDeserializer
- .getDayTime(argOut1.getByteArray(), 1))
- && (ADurationSerializerDeserializer.getYearMonth(argOut0.getByteArray(), 1) == ADurationSerializerDeserializer
- .getYearMonth(argOut1.getByteArray(), 1))) {
+ if ((ADurationSerializerDeserializer.getDayTime(argOut0.getByteArray(),
+ 1) == ADurationSerializerDeserializer.getDayTime(argOut1.getByteArray(), 1))
+ && (ADurationSerializerDeserializer.getYearMonth(argOut0.getByteArray(),
+ 1) == ADurationSerializerDeserializer.getYearMonth(argOut1.getByteArray(),
+ 1))) {
boolSerde.serialize(ABoolean.TRUE, out);
} else {
boolSerde.serialize(ABoolean.FALSE, out);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DurationFromIntervalDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DurationFromIntervalDescriptor.java
index ce00130..b69a257 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DurationFromIntervalDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DurationFromIntervalDescriptor.java
@@ -44,14 +44,8 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class DurationFromIntervalDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
public final static FunctionIdentifier FID = AsterixBuiltinFunctions.DURATION_FROM_INTERVAL;
-
- // allowed input types
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte SER_INTERVAL_TYPE_TAG = ATypeTag.INTERVAL.serialize();
-
public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -91,10 +85,10 @@
eval.evaluate(tuple);
try {
- if (argOut.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
- } else if (argOut.getByteArray()[0] != SER_INTERVAL_TYPE_TAG) {
+ } else if (argOut.getByteArray()[0] != ATypeTag.SERIALIZED_INTERVAL_TYPE_TAG) {
throw new AlgebricksException(FID.getName()
+ ": expects INTERVAL/NULL as the input but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut.getByteArray()[0]));
@@ -102,10 +96,10 @@
long chrononStart = AIntervalSerializerDeserializer.getIntervalStart(argOut.getByteArray(),
1);
long chrononEnd = AIntervalSerializerDeserializer.getIntervalEnd(argOut.getByteArray(), 1);
- byte intervalTypeTag = AIntervalSerializerDeserializer.getIntervalTimeType(
- argOut.getByteArray(), 1);
+ byte intervalTypeTag = AIntervalSerializerDeserializer
+ .getIntervalTimeType(argOut.getByteArray(), 1);
- if (intervalTypeTag == ATypeTag.DATE.serialize()) {
+ if (intervalTypeTag == ATypeTag.SERIALIZED_DATE_TYPE_TAG) {
chrononStart *= GregorianCalendarSystem.CHRONON_OF_DAY;
chrononEnd *= GregorianCalendarSystem.CHRONON_OF_DAY;
}
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DurationFromMonthsDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DurationFromMonthsDescriptor.java
index f3b4616..f66b56b 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DurationFromMonthsDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DurationFromMonthsDescriptor.java
@@ -45,10 +45,6 @@
private final static long serialVersionUID = 1L;
public final static FunctionIdentifier FID = AsterixBuiltinFunctions.DURATION_FROM_MONTHS;
-
- // allowed input types
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -87,7 +83,7 @@
eval0.evaluate(tuple);
try {
- if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
}
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/GetDayTimeDurationDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/GetDayTimeDurationDescriptor.java
index 3f59dbb..2c8ebfd 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/GetDayTimeDurationDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/GetDayTimeDurationDescriptor.java
@@ -43,14 +43,8 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class GetDayTimeDurationDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
public final static FunctionIdentifier FID = AsterixBuiltinFunctions.GET_DAY_TIME_DURATION;
-
- // allowed input types
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
-
public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -89,18 +83,18 @@
eval0.evaluate(tuple);
try {
- if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
}
- if (argOut0.getByteArray()[0] != SER_DURATION_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] != ATypeTag.SERIALIZED_DURATION_TYPE_TAG) {
throw new AlgebricksException(FID.getName() + ": expects NULL/DURATION, but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0]));
}
- aDayTimeDuration.setMilliseconds(ADurationSerializerDeserializer.getDayTime(
- argOut0.getByteArray(), 1));
+ aDayTimeDuration.setMilliseconds(
+ ADurationSerializerDeserializer.getDayTime(argOut0.getByteArray(), 1));
dayTimeDurationSerde.serialize(aDayTimeDuration, out);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/GetOverlappingIntervalDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/GetOverlappingIntervalDescriptor.java
index 5678e10..8e7f0c6 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/GetOverlappingIntervalDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/GetOverlappingIntervalDescriptor.java
@@ -43,12 +43,7 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class GetOverlappingIntervalDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
-
- private static final byte SER_INTERVAL_TYPE_TAG = ATypeTag.INTERVAL.serialize();
- private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
public IFunctionDescriptor createFunctionDescriptor() {
@@ -56,6 +51,7 @@
}
};
+ @Override
public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
return new ICopyEvaluatorFactory() {
@@ -88,22 +84,21 @@
eval1.evaluate(tuple);
try {
- if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
- || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
- } else if (argOut0.getByteArray()[0] == SER_INTERVAL_TYPE_TAG
+ } else if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_INTERVAL_TYPE_TAG
&& argOut0.getByteArray()[0] == argOut1.getByteArray()[0]) {
- byte type0 = AIntervalSerializerDeserializer.getIntervalTimeType(
- argOut0.getByteArray(), 1);
- byte type1 = AIntervalSerializerDeserializer.getIntervalTimeType(
- argOut1.getByteArray(), 1);
+ byte type0 = AIntervalSerializerDeserializer.getIntervalTimeType(argOut0.getByteArray(),
+ 1);
+ byte type1 = AIntervalSerializerDeserializer.getIntervalTimeType(argOut1.getByteArray(),
+ 1);
if (type0 != type1) {
- throw new AlgebricksException(
- getIdentifier().getName()
- + ": expecting two (nullable) interval values with the same internal time type but got interval of "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(type0)
- + " and interval of "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(type1));
+ throw new AlgebricksException(getIdentifier().getName()
+ + ": expecting two (nullable) interval values with the same internal time type but got interval of "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(type0)
+ + " and interval of "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(type1));
}
long start0 = AIntervalSerializerDeserializer.getIntervalStart(argOut0.getByteArray(),
@@ -124,11 +119,13 @@
nullSerde.serialize(ANull.NULL, out);
}
} else {
- throw new AlgebricksException(getIdentifier().getName()
- + ": expecting two (nullable) interval values but got "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0])
- + " and "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[1]));
+ throw new AlgebricksException(
+ getIdentifier().getName()
+ + ": expecting two (nullable) interval values but got "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(
+ argOut0.getByteArray()[0])
+ + " and " + EnumDeserializer.ATYPETAGDESERIALIZER
+ .deserialize(argOut0.getByteArray()[1]));
}
} catch (HyracksDataException hex) {
throw new AlgebricksException(hex);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/GetYearMonthDurationDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/GetYearMonthDurationDescriptor.java
index f788879..84fd408 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/GetYearMonthDurationDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/GetYearMonthDurationDescriptor.java
@@ -43,14 +43,8 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class GetYearMonthDurationDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
public final static FunctionIdentifier FID = AsterixBuiltinFunctions.GET_YEAR_MONTH_DURATION;
-
- // allowed input types
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
-
public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -89,18 +83,18 @@
eval0.evaluate(tuple);
try {
- if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
}
- if (argOut0.getByteArray()[0] != SER_DURATION_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] != ATypeTag.SERIALIZED_DURATION_TYPE_TAG) {
throw new AlgebricksException(FID.getName() + ": expects NULL/DURATION, but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0]));
}
- aYearMonthDuration.setMonths(ADurationSerializerDeserializer.getYearMonth(
- argOut0.getByteArray(), 1));
+ aYearMonthDuration
+ .setMonths(ADurationSerializerDeserializer.getYearMonth(argOut0.getByteArray(), 1));
yearMonthDurationSerde.serialize(aYearMonthDuration, out);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/MillisecondsFromDayTimeDurationDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/MillisecondsFromDayTimeDurationDescriptor.java
index ecaf1bd..a16c59a 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/MillisecondsFromDayTimeDurationDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/MillisecondsFromDayTimeDurationDescriptor.java
@@ -43,14 +43,8 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class MillisecondsFromDayTimeDurationDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private final static long serialVersionUID = 1L;
public final static FunctionIdentifier FID = AsterixBuiltinFunctions.MILLISECONDS_FROM_DAY_TIME_DURATION;
-
- // allowed input types
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte SER_DAY_TIME_DURATION_TYPE_TAG = ATypeTag.DAYTIMEDURATION.serialize();
-
public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -89,18 +83,19 @@
eval0.evaluate(tuple);
try {
- if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
}
- if (argOut0.getByteArray()[0] != SER_DAY_TIME_DURATION_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] != ATypeTag.SERIALIZED_DAY_TIME_DURATION_TYPE_TAG) {
throw new AlgebricksException(FID.getName()
+ ": expects NULL/DAY-TIME-DURATION, but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0]));
}
- aInt64.setValue(ADayTimeDurationSerializerDeserializer.getDayTime(argOut0.getByteArray(), 1));
+ aInt64.setValue(
+ ADayTimeDurationSerializerDeserializer.getDayTime(argOut0.getByteArray(), 1));
int64Serde.serialize(aInt64, out);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/MonthsFromYearMonthDurationDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/MonthsFromYearMonthDurationDescriptor.java
index be1f0ee..e7c7863 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/MonthsFromYearMonthDurationDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/MonthsFromYearMonthDurationDescriptor.java
@@ -43,14 +43,8 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class MonthsFromYearMonthDurationDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private final static long serialVersionUID = 1L;
public final static FunctionIdentifier FID = AsterixBuiltinFunctions.MONTHS_FROM_YEAR_MONTH_DURATION;
-
- // allowed input types
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte SER_YEARMONTHDURATION_TYPE_TAG = ATypeTag.YEARMONTHDURATION.serialize();
-
public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -89,19 +83,19 @@
eval0.evaluate(tuple);
try {
- if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
}
- if (argOut0.getByteArray()[0] != SER_YEARMONTHDURATION_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] != ATypeTag.SERIALIZED_YEAR_MONTH_DURATION_TYPE_TAG) {
throw new AlgebricksException(FID.getName()
+ ": expects NULL/YEAR-MONTH-DURATION, but got "
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0]));
}
- aInt64.setValue(AYearMonthDurationSerializerDeserializer.getYearMonth(
- argOut0.getByteArray(), 1));
+ aInt64.setValue(
+ AYearMonthDurationSerializerDeserializer.getYearMonth(argOut0.getByteArray(), 1));
int64Serde.serialize(aInt64, out);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/OverlapBinsDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/OverlapBinsDescriptor.java
index 2c05989..917f7c3 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/OverlapBinsDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/OverlapBinsDescriptor.java
@@ -112,8 +112,8 @@
if (type0 == ATypeTag.INTERVAL) {
intervalStart = AIntervalSerializerDeserializer.getIntervalStart(argOut0.getByteArray(), 1);
intervalEnd = AIntervalSerializerDeserializer.getIntervalEnd(argOut0.getByteArray(), 1);
- intervalTypeTag = AIntervalSerializerDeserializer.getIntervalTimeType(
- argOut0.getByteArray(), 1);
+ intervalTypeTag = AIntervalSerializerDeserializer
+ .getIntervalTimeType(argOut0.getByteArray(), 1);
} else if (type0 == ATypeTag.NULL) {
try {
nullSerde.serialize(ANull.NULL, out);
@@ -133,10 +133,10 @@
ATypeTag type1 = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut1.getByteArray()[0]);
if (intervalTypeTag != type1.serialize()) {
- if (intervalTypeTag != ATypeTag.NULL.serialize() && type1 != ATypeTag.NULL)
- throw new AlgebricksException(getIdentifier().getName()
- + ": expecting compatible type to " + type0 + "(" + intervalTypeTag
- + ") for the second argument but got " + type1);
+ if (intervalTypeTag != ATypeTag.SERIALIZED_NULL_TYPE_TAG && type1 != ATypeTag.NULL)
+ throw new AlgebricksException(
+ getIdentifier().getName() + ": expecting compatible type to " + type0 + "("
+ + intervalTypeTag + ") for the second argument but got " + type1);
}
long anchorTime = 0;
@@ -159,9 +159,9 @@
}
return;
default:
- throw new AlgebricksException(getIdentifier().getName()
- + ": expecting compatible type to " + type0 + "(" + intervalTypeTag
- + ") for the second argument but got " + type1);
+ throw new AlgebricksException(
+ getIdentifier().getName() + ": expecting compatible type to " + type0 + "("
+ + intervalTypeTag + ") for the second argument but got " + type1);
}
argOut2.reset();
@@ -174,8 +174,8 @@
long firstBinIndex;
switch (type2) {
case YEARMONTHDURATION:
- yearMonth = AYearMonthDurationSerializerDeserializer.getYearMonth(
- argOut2.getByteArray(), 1);
+ yearMonth = AYearMonthDurationSerializerDeserializer
+ .getYearMonth(argOut2.getByteArray(), 1);
int yearStart = GREG_CAL.getYear(anchorTime);
int monthStart = GREG_CAL.getMonthOfYear(anchorTime, yearStart);
@@ -188,13 +188,13 @@
+ ((totalMonths < 0 && totalMonths % yearMonth != 0) ? -1 : 0);
if (firstBinIndex > Integer.MAX_VALUE) {
- throw new AlgebricksException(getIdentifier().getName()
- + ": Overflowing time value to be binned!");
+ throw new AlgebricksException(
+ getIdentifier().getName() + ": Overflowing time value to be binned!");
}
if (firstBinIndex < Integer.MIN_VALUE) {
- throw new AlgebricksException(getIdentifier().getName()
- + ": Underflowing time value to be binned!");
+ throw new AlgebricksException(
+ getIdentifier().getName() + ": Underflowing time value to be binned!");
}
break;
@@ -216,10 +216,9 @@
return;
default:
- throw new AlgebricksException(
- getIdentifier().getName()
- + ": expecting YEARMONTHDURATION/DAYTIMEDURATION for the thrid argument but got "
- + type2);
+ throw new AlgebricksException(getIdentifier().getName()
+ + ": expecting YEARMONTHDURATION/DAYTIMEDURATION for the thrid argument but got "
+ + type2);
}
long binStartChronon, binEndChronon;
@@ -228,25 +227,25 @@
listBuilder.reset(intListType);
try {
- if (intervalTypeTag == ATypeTag.DATE.serialize()) {
+ if (intervalTypeTag == ATypeTag.SERIALIZED_DATE_TYPE_TAG) {
binOffset = 0;
do {
- binStartChronon = DurationArithmeticOperations.addDuration(anchorTime, yearMonth
- * (int) (firstBinIndex + binOffset), dayTime * (firstBinIndex + binOffset),
- false);
- binEndChronon = DurationArithmeticOperations.addDuration(anchorTime, yearMonth
- * ((int) (firstBinIndex + binOffset) + 1), dayTime
- * ((firstBinIndex + binOffset) + 1), false);
- binStartChronon = binStartChronon
- / GregorianCalendarSystem.CHRONON_OF_DAY
- + ((binStartChronon < 0 && binStartChronon
- % GregorianCalendarSystem.CHRONON_OF_DAY != 0) ? -1 : 0);
- binEndChronon = binEndChronon
- / GregorianCalendarSystem.CHRONON_OF_DAY
- + ((binEndChronon < 0 && binEndChronon
- % GregorianCalendarSystem.CHRONON_OF_DAY != 0) ? -1 : 0);
+ binStartChronon = DurationArithmeticOperations.addDuration(anchorTime,
+ yearMonth * (int) (firstBinIndex + binOffset),
+ dayTime * (firstBinIndex + binOffset), false);
+ binEndChronon = DurationArithmeticOperations.addDuration(anchorTime,
+ yearMonth * ((int) (firstBinIndex + binOffset) + 1),
+ dayTime * ((firstBinIndex + binOffset) + 1), false);
+ binStartChronon = binStartChronon / GregorianCalendarSystem.CHRONON_OF_DAY
+ + ((binStartChronon < 0
+ && binStartChronon % GregorianCalendarSystem.CHRONON_OF_DAY != 0)
+ ? -1 : 0);
+ binEndChronon = binEndChronon / GregorianCalendarSystem.CHRONON_OF_DAY
+ + ((binEndChronon < 0
+ && binEndChronon % GregorianCalendarSystem.CHRONON_OF_DAY != 0) ? -1
+ : 0);
aInterval.setValue(binStartChronon, binEndChronon, intervalTypeTag);
listStorage.reset();
intervalSerde.serialize(aInterval, listStorage.getDataOutput());
@@ -254,7 +253,7 @@
binOffset++;
} while (binEndChronon < intervalEnd);
- } else if (intervalTypeTag == ATypeTag.TIME.serialize()) {
+ } else if (intervalTypeTag == ATypeTag.SERIALIZED_TIME_TYPE_TAG) {
if (yearMonth != 0) {
throw new AlgebricksException(getIdentifier().getName()
+ ": cannot create year-month bin for a time value");
@@ -262,53 +261,52 @@
binOffset = 0;
- binStartChronon = DurationArithmeticOperations.addDuration(anchorTime, yearMonth
- * (int) (firstBinIndex + binOffset), dayTime * (firstBinIndex + binOffset),
- true);
- binEndChronon = DurationArithmeticOperations.addDuration(anchorTime, yearMonth
- * ((int) (firstBinIndex + binOffset) + 1), dayTime
- * ((firstBinIndex + binOffset) + 1), true);
+ binStartChronon = DurationArithmeticOperations.addDuration(anchorTime,
+ yearMonth * (int) (firstBinIndex + binOffset),
+ dayTime * (firstBinIndex + binOffset), true);
+ binEndChronon = DurationArithmeticOperations.addDuration(anchorTime,
+ yearMonth * ((int) (firstBinIndex + binOffset) + 1),
+ dayTime * ((firstBinIndex + binOffset) + 1), true);
if (binStartChronon < 0 || binStartChronon >= GregorianCalendarSystem.CHRONON_OF_DAY) {
// avoid the case where a time bin is before 00:00:00 or no early than 24:00:00
- throw new AlgebricksException(
- getIdentifier().getName()
- + ": reaches a bin with the end earlier than the start; probably the window is beyond the time scope. Maybe use DATETIME?");
+ throw new AlgebricksException(getIdentifier().getName()
+ + ": reaches a bin with the end earlier than the start; probably the window is beyond the time scope. Maybe use DATETIME?");
}
- while (!((binStartChronon < intervalStart && binEndChronon <= intervalStart) || (binStartChronon >= intervalEnd && binEndChronon > intervalEnd))) {
+ while (!((binStartChronon < intervalStart && binEndChronon <= intervalStart)
+ || (binStartChronon >= intervalEnd && binEndChronon > intervalEnd))) {
aInterval.setValue(binStartChronon, binEndChronon, intervalTypeTag);
listStorage.reset();
intervalSerde.serialize(aInterval, listStorage.getDataOutput());
listBuilder.addItem(listStorage);
binOffset++;
- binStartChronon = DurationArithmeticOperations.addDuration(anchorTime, yearMonth
- * (int) (firstBinIndex + binOffset), dayTime * (firstBinIndex + binOffset),
- true);
- binEndChronon = DurationArithmeticOperations.addDuration(anchorTime, yearMonth
- * ((int) (firstBinIndex + binOffset) + 1), dayTime
- * ((firstBinIndex + binOffset) + 1), true);
+ binStartChronon = DurationArithmeticOperations.addDuration(anchorTime,
+ yearMonth * (int) (firstBinIndex + binOffset),
+ dayTime * (firstBinIndex + binOffset), true);
+ binEndChronon = DurationArithmeticOperations.addDuration(anchorTime,
+ yearMonth * ((int) (firstBinIndex + binOffset) + 1),
+ dayTime * ((firstBinIndex + binOffset) + 1), true);
if (binStartChronon == GregorianCalendarSystem.CHRONON_OF_DAY) {
break;
}
if (binEndChronon < binStartChronon) {
- throw new AlgebricksException(
- getIdentifier().getName()
- + ": reaches a bin with the end earlier than the start; probably the window is beyond the time scope. Maybe use DATETIME?");
+ throw new AlgebricksException(getIdentifier().getName()
+ + ": reaches a bin with the end earlier than the start; probably the window is beyond the time scope. Maybe use DATETIME?");
}
}
- } else if (intervalTypeTag == ATypeTag.DATETIME.serialize()) {
+ } else if (intervalTypeTag == ATypeTag.SERIALIZED_DATETIME_TYPE_TAG) {
binOffset = 0;
do {
- binStartChronon = DurationArithmeticOperations.addDuration(anchorTime, yearMonth
- * (int) (firstBinIndex + binOffset), dayTime * (firstBinIndex + binOffset),
- false);
- binEndChronon = DurationArithmeticOperations.addDuration(anchorTime, yearMonth
- * ((int) (firstBinIndex + binOffset) + 1), dayTime
- * ((firstBinIndex + binOffset) + 1), false);
+ binStartChronon = DurationArithmeticOperations.addDuration(anchorTime,
+ yearMonth * (int) (firstBinIndex + binOffset),
+ dayTime * (firstBinIndex + binOffset), false);
+ binEndChronon = DurationArithmeticOperations.addDuration(anchorTime,
+ yearMonth * ((int) (firstBinIndex + binOffset) + 1),
+ dayTime * ((firstBinIndex + binOffset) + 1), false);
aInterval.setValue(binStartChronon, binEndChronon, intervalTypeTag);
listStorage.reset();
intervalSerde.serialize(aInterval, listStorage.getDataOutput());
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/ParseDateDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/ParseDateDescriptor.java
index b1fbf7c..d4c93ff 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/ParseDateDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/ParseDateDescriptor.java
@@ -51,14 +51,9 @@
* Multiple format strings can be used by separating them using <b>|(bar)</b>, and the parsing will be successful only when the format string has the <b>exact</b> match with the given data string. This means that a time string like <it>08:23:12 AM</it> will not be valid for the format string <it>h:m:s</it> as there is no AM/PM format character in the format string.
*/
public class ParseDateDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
public final static FunctionIdentifier FID = AsterixBuiltinFunctions.PARSE_DATE;
-
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
private final static DateTimeFormatUtils DT_UTILS = DateTimeFormatUtils.getInstance();
-
public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -101,20 +96,21 @@
eval1.evaluate(tuple);
try {
- if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
- || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
}
- if (argOut0.getByteArray()[0] != SER_STRING_TYPE_TAG
- || argOut1.getByteArray()[0] != SER_STRING_TYPE_TAG) {
- throw new AlgebricksException(getIdentifier().getName()
- + ": expects two strings but got ("
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0])
- + ", "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut1.getByteArray()[0])
- + ")");
+ if (argOut0.getByteArray()[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG
+ || argOut1.getByteArray()[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
+ throw new AlgebricksException(
+ getIdentifier().getName() + ": expects two strings but got ("
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(
+ argOut0.getByteArray()[0])
+ + ", " + EnumDeserializer.ATYPETAGDESERIALIZER
+ .deserialize(argOut1.getByteArray()[0])
+ + ")");
}
utf8Ptr.set(argOut0.getByteArray(), 1, argOut0.getLength() - 1);
int start0 = utf8Ptr.getCharStartOffset();
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/ParseDateTimeDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/ParseDateTimeDescriptor.java
index 2a7be96..bdd0783 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/ParseDateTimeDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/ParseDateTimeDescriptor.java
@@ -46,14 +46,9 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class ParseDateTimeDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
public final static FunctionIdentifier FID = AsterixBuiltinFunctions.PARSE_DATETIME;
-
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
private final static DateTimeFormatUtils DT_UTILS = DateTimeFormatUtils.getInstance();
-
public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -96,26 +91,27 @@
eval1.evaluate(tuple);
try {
- if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
- || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
}
- if (argOut0.getByteArray()[0] != SER_STRING_TYPE_TAG
- || argOut1.getByteArray()[0] != SER_STRING_TYPE_TAG) {
- throw new AlgebricksException(getIdentifier().getName()
- + ": expects two strings but got ("
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0])
- + ", "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut1.getByteArray()[0])
- + ")");
+ if (argOut0.getByteArray()[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG
+ || argOut1.getByteArray()[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
+ throw new AlgebricksException(
+ getIdentifier().getName() + ": expects two strings but got ("
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(
+ argOut0.getByteArray()[0])
+ + ", " + EnumDeserializer.ATYPETAGDESERIALIZER
+ .deserialize(argOut1.getByteArray()[0])
+ + ")");
}
- utf8Ptr.set(argOut0.getByteArray(), 1, argOut0.getLength()-1);
+ utf8Ptr.set(argOut0.getByteArray(), 1, argOut0.getLength() - 1);
int start0 = utf8Ptr.getCharStartOffset();
int length0 = utf8Ptr.getUTF8Length();
- utf8Ptr.set(argOut1.getByteArray(), 1, argOut1.getLength()-1);
+ utf8Ptr.set(argOut1.getByteArray(), 1, argOut1.getLength() - 1);
int start1 = utf8Ptr.getCharStartOffset();
int length1 = utf8Ptr.getUTF8Length();
long chronon = 0;
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/ParseTimeDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/ParseTimeDescriptor.java
index 007ddca..db6e8b3 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/ParseTimeDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/ParseTimeDescriptor.java
@@ -46,12 +46,8 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class ParseTimeDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
public final static FunctionIdentifier FID = AsterixBuiltinFunctions.PARSE_TIME;
-
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
private final static DateTimeFormatUtils DT_UTILS = DateTimeFormatUtils.getInstance();
public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@@ -96,26 +92,27 @@
eval1.evaluate(tuple);
try {
- if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
- || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
}
- if (argOut0.getByteArray()[0] != SER_STRING_TYPE_TAG
- || argOut1.getByteArray()[0] != SER_STRING_TYPE_TAG) {
- throw new AlgebricksException(getIdentifier().getName()
- + ": expects two strings but got ("
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0])
- + ", "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut1.getByteArray()[0])
- + ")");
+ if (argOut0.getByteArray()[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG
+ || argOut1.getByteArray()[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
+ throw new AlgebricksException(
+ getIdentifier().getName() + ": expects two strings but got ("
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(
+ argOut0.getByteArray()[0])
+ + ", " + EnumDeserializer.ATYPETAGDESERIALIZER
+ .deserialize(argOut1.getByteArray()[0])
+ + ")");
}
- utf8Ptr.set(argOut0.getByteArray(), 1, argOut0.getLength()-1);
+ utf8Ptr.set(argOut0.getByteArray(), 1, argOut0.getLength() - 1);
int start0 = utf8Ptr.getCharStartOffset();
int length0 = utf8Ptr.getUTF8Length();
- utf8Ptr.set(argOut1.getByteArray(), 1, argOut1.getLength() -1);
+ utf8Ptr.set(argOut1.getByteArray(), 1, argOut1.getLength() - 1);
int start1 = utf8Ptr.getCharStartOffset();
int length1 = utf8Ptr.getUTF8Length();
long chronon = 0;
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/PrintDateDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/PrintDateDescriptor.java
index d731fff..d15534d 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/PrintDateDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/PrintDateDescriptor.java
@@ -46,13 +46,8 @@
import org.apache.hyracks.util.string.UTF8StringWriter;
public class PrintDateDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
public final static FunctionIdentifier FID = AsterixBuiltinFunctions.PRINT_DATE;
-
- private final static byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
private final static DateTimeFormatUtils DT_UTILS = DateTimeFormatUtils.getInstance();
public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@@ -94,20 +89,21 @@
eval1.evaluate(tuple);
try {
- if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
- || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
}
- if (argOut0.getByteArray()[0] != SER_DATE_TYPE_TAG
- || argOut1.getByteArray()[0] != SER_STRING_TYPE_TAG) {
- throw new AlgebricksException(getIdentifier().getName()
- + ": expects (DATE, STRING) but got ("
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0])
- + ", "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut1.getByteArray()[0])
- + ")");
+ if (argOut0.getByteArray()[0] != ATypeTag.SERIALIZED_DATE_TYPE_TAG
+ || argOut1.getByteArray()[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
+ throw new AlgebricksException(
+ getIdentifier().getName() + ": expects (DATE, STRING) but got ("
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(
+ argOut0.getByteArray()[0])
+ + ", " + EnumDeserializer.ATYPETAGDESERIALIZER
+ .deserialize(argOut1.getByteArray()[0])
+ + ")");
}
long chronon = ADateSerializerDeserializer.getChronon(argOut0.getByteArray(), 1)
@@ -118,7 +114,7 @@
DT_UTILS.printDateTime(chronon, 0, argOut1.getByteArray(), 1 + offset, formatLength, sbder,
DateTimeParseMode.DATE_ONLY);
- out.writeByte(ATypeTag.STRING.serialize());
+ out.writeByte(ATypeTag.SERIALIZED_STRING_TYPE_TAG);
utf8Writer.writeUTF8(sbder.toString(), out);
} catch (IOException ex) {
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/PrintDateTimeDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/PrintDateTimeDescriptor.java
index a31e1e51..ba35a42 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/PrintDateTimeDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/PrintDateTimeDescriptor.java
@@ -45,13 +45,8 @@
import org.apache.hyracks.util.string.UTF8StringWriter;
public class PrintDateTimeDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
public final static FunctionIdentifier FID = AsterixBuiltinFunctions.PRINT_DATETIME;
-
- private final static byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
private final static DateTimeFormatUtils DT_UTILS = DateTimeFormatUtils.getInstance();
public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@@ -94,20 +89,21 @@
eval1.evaluate(tuple);
try {
- if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
- || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
}
- if (argOut0.getByteArray()[0] != SER_DATETIME_TYPE_TAG
- || argOut1.getByteArray()[0] != SER_STRING_TYPE_TAG) {
- throw new AlgebricksException(getIdentifier().getName()
- + ": expects (DATETIME, STRING) but got ("
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0])
- + ", "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut1.getByteArray()[0])
- + ")");
+ if (argOut0.getByteArray()[0] != ATypeTag.SERIALIZED_DATETIME_TYPE_TAG
+ || argOut1.getByteArray()[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
+ throw new AlgebricksException(
+ getIdentifier().getName() + ": expects (DATETIME, STRING) but got ("
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(
+ argOut0.getByteArray()[0])
+ + ", " + EnumDeserializer.ATYPETAGDESERIALIZER
+ .deserialize(argOut1.getByteArray()[0])
+ + ")");
}
long chronon = ADateTimeSerializerDeserializer.getChronon(argOut0.getByteArray(), 1);
utf8Ptr.set(argOut1.getByteArray(), 1, argOut1.getLength() - 1);
@@ -116,7 +112,7 @@
DT_UTILS.printDateTime(chronon, 0, utf8Ptr.getByteArray(), utf8Ptr.getCharStartOffset(),
formatLength, sbder, DateTimeParseMode.DATETIME);
- out.writeByte(ATypeTag.STRING.serialize());
+ out.writeByte(ATypeTag.SERIALIZED_STRING_TYPE_TAG);
utf8Writer.writeUTF8(sbder.toString(), out);
} catch (IOException ex) {
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/PrintTimeDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/PrintTimeDescriptor.java
index 961c44d..f152401 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/PrintTimeDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/PrintTimeDescriptor.java
@@ -45,13 +45,8 @@
import org.apache.hyracks.util.string.UTF8StringWriter;
public class PrintTimeDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
public final static FunctionIdentifier FID = AsterixBuiltinFunctions.PRINT_TIME;
-
- private final static byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
- private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
private final static DateTimeFormatUtils DT_UTILS = DateTimeFormatUtils.getInstance();
public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@@ -94,20 +89,21 @@
eval1.evaluate(tuple);
try {
- if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
- || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
}
- if (argOut0.getByteArray()[0] != SER_TIME_TYPE_TAG
- || argOut1.getByteArray()[0] != SER_STRING_TYPE_TAG) {
- throw new AlgebricksException(getIdentifier().getName()
- + ": expects (TIME, STRING) but got ("
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0])
- + ", "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut1.getByteArray()[0])
- + ")");
+ if (argOut0.getByteArray()[0] != ATypeTag.SERIALIZED_TIME_TYPE_TAG
+ || argOut1.getByteArray()[0] != ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
+ throw new AlgebricksException(
+ getIdentifier().getName() + ": expects (TIME, STRING) but got ("
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(
+ argOut0.getByteArray()[0])
+ + ", " + EnumDeserializer.ATYPETAGDESERIALIZER
+ .deserialize(argOut1.getByteArray()[0])
+ + ")");
}
long chronon = ATimeSerializerDeserializer.getChronon(argOut0.getByteArray(), 1);
@@ -117,7 +113,7 @@
DT_UTILS.printDateTime(chronon, 0, utf8Ptr.getByteArray(), utf8Ptr.getCharStartOffset(),
formatLength, sbder, DateTimeParseMode.TIME_ONLY);
- out.writeByte(ATypeTag.STRING.serialize());
+ out.writeByte(ATypeTag.SERIALIZED_STRING_TYPE_TAG);
writer.writeUTF8(sbder.toString(), out);
} catch (IOException ex) {
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/TimeFromDatetimeDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/TimeFromDatetimeDescriptor.java
index 741eeb1..384cfd7 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/TimeFromDatetimeDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/TimeFromDatetimeDescriptor.java
@@ -47,11 +47,6 @@
private static final long serialVersionUID = 1L;
public final static FunctionIdentifier FID = AsterixBuiltinFunctions.TIME_FROM_DATETIME;
-
- // allowed input types
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
-
public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -93,18 +88,17 @@
argOut.reset();
eval.evaluate(tuple);
try {
- if (argOut.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
} else {
- if (argOut.getByteArray()[0] != SER_DATETIME_TYPE_TAG) {
+ if (argOut.getByteArray()[0] != ATypeTag.SERIALIZED_DATETIME_TYPE_TAG) {
throw new AlgebricksException(
- FID.getName()
- + ": expects input type DATETIME/NULL but got "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut
- .getByteArray()[0]));
+ FID.getName() + ": expects input type DATETIME/NULL but got "
+ + EnumDeserializer.ATYPETAGDESERIALIZER
+ .deserialize(argOut.getByteArray()[0]));
}
- long datetimeChronon = ADateTimeSerializerDeserializer.getChronon(
- argOut.getByteArray(), 1);
+ long datetimeChronon = ADateTimeSerializerDeserializer.getChronon(argOut.getByteArray(),
+ 1);
int timeChronon = (int) (datetimeChronon % GregorianCalendarSystem.CHRONON_OF_DAY);
if (timeChronon < 0) {
timeChronon += GregorianCalendarSystem.CHRONON_OF_DAY;
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/TimeFromUnixTimeInMsDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/TimeFromUnixTimeInMsDescriptor.java
index 6170726..3744873 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/TimeFromUnixTimeInMsDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/TimeFromUnixTimeInMsDescriptor.java
@@ -42,13 +42,8 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class TimeFromUnixTimeInMsDescriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private final static long serialVersionUID = 1L;
public final static FunctionIdentifier FID = AsterixBuiltinFunctions.TIME_FROM_UNIX_TIME_IN_MS;
-
- // allowed input types
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
@@ -89,7 +84,7 @@
argOut.reset();
eval.evaluate(tuple);
try {
- if (argOut.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
} else {
aTime.setValue(ATypeHierarchy.getIntegerValue(argOut.getByteArray(), 0));
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/YearMonthDurationComparatorDecriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/YearMonthDurationComparatorDecriptor.java
index f23fa79..f75665d 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/YearMonthDurationComparatorDecriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/YearMonthDurationComparatorDecriptor.java
@@ -42,15 +42,9 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class YearMonthDurationComparatorDecriptor extends AbstractScalarFunctionDynamicDescriptor {
-
private final static long serialVersionUID = 1L;
public final static FunctionIdentifier GREATER_THAN_FID = AsterixBuiltinFunctions.YEAR_MONTH_DURATION_GREATER_THAN;
public final static FunctionIdentifier LESS_THAN_FID = AsterixBuiltinFunctions.YEAR_MONTH_DURATION_LESS_THAN;
-
- // allowed input types
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
- private final static byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
-
private final boolean isGreaterThan;
private YearMonthDurationComparatorDecriptor(boolean isGreaterThan) {
@@ -105,29 +99,31 @@
eval1.evaluate(tuple);
try {
- if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
- || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+ if (argOut0.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG
+ || argOut1.getByteArray()[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
}
- if (argOut0.getByteArray()[0] != SER_DURATION_TYPE_TAG
- || argOut1.getByteArray()[0] != SER_DURATION_TYPE_TAG) {
- throw new AlgebricksException(getIdentifier().getName()
- + ": expects type NULL/DURATION, NULL/DURATION but got "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0])
- + " and "
- + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut1.getByteArray()[0]));
+ if (argOut0.getByteArray()[0] != ATypeTag.SERIALIZED_DURATION_TYPE_TAG
+ || argOut1.getByteArray()[0] != ATypeTag.SERIALIZED_DURATION_TYPE_TAG) {
+ throw new AlgebricksException(
+ getIdentifier().getName()
+ + ": expects type NULL/DURATION, NULL/DURATION but got "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(
+ argOut0.getByteArray()[0])
+ + " and " + EnumDeserializer.ATYPETAGDESERIALIZER
+ .deserialize(argOut1.getByteArray()[0]));
}
if ((ADurationSerializerDeserializer.getDayTime(argOut0.getByteArray(), 1) != 0)
|| (ADurationSerializerDeserializer.getDayTime(argOut1.getByteArray(), 1) != 0)) {
- throw new AlgebricksException(getIdentifier().getName()
- + ": only year-month durations are allowed.");
+ throw new AlgebricksException(
+ getIdentifier().getName() + ": only year-month durations are allowed.");
}
- if (ADurationSerializerDeserializer.getYearMonth(argOut0.getByteArray(), 1) > ADurationSerializerDeserializer
- .getYearMonth(argOut1.getByteArray(), 1)) {
+ if (ADurationSerializerDeserializer.getYearMonth(argOut0.getByteArray(),
+ 1) > ADurationSerializerDeserializer.getYearMonth(argOut1.getByteArray(), 1)) {
boolSerde.serialize(isGreaterThan ? ABoolean.TRUE : ABoolean.FALSE, out);
} else {
boolSerde.serialize(isGreaterThan ? ABoolean.FALSE : ABoolean.TRUE, out);
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/AsterixLSMInvertedIndexUpsertOperatorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/AsterixLSMInvertedIndexUpsertOperatorDescriptor.java
new file mode 100644
index 0000000..3db3de2
--- /dev/null
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/AsterixLSMInvertedIndexUpsertOperatorDescriptor.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.runtime.operators;
+
+import org.apache.asterix.common.dataflow.AsterixLSMInvertedIndexInsertDeleteOperatorDescriptor;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.dataflow.IOperatorNodePushable;
+import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import org.apache.hyracks.api.dataflow.value.ITypeTraits;
+import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+import org.apache.hyracks.api.job.IOperatorDescriptorRegistry;
+import org.apache.hyracks.dataflow.std.file.IFileSplitProvider;
+import org.apache.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
+import org.apache.hyracks.storage.am.common.api.IModificationOperationCallbackFactory;
+import org.apache.hyracks.storage.am.common.api.ITupleFilterFactory;
+import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
+import org.apache.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
+import org.apache.hyracks.storage.common.IStorageManagerInterface;
+
+public class AsterixLSMInvertedIndexUpsertOperatorDescriptor
+ extends AsterixLSMInvertedIndexInsertDeleteOperatorDescriptor {
+
+ private static final long serialVersionUID = 1L;
+ private final int[] prevFieldPermutation;
+
+ public AsterixLSMInvertedIndexUpsertOperatorDescriptor(IOperatorDescriptorRegistry spec, RecordDescriptor recDesc,
+ IStorageManagerInterface storageManager, IFileSplitProvider fileSplitProvider,
+ IIndexLifecycleManagerProvider lifecycleManagerProvider, ITypeTraits[] tokenTypeTraits,
+ IBinaryComparatorFactory[] tokenComparatorFactories, ITypeTraits[] invListsTypeTraits,
+ IBinaryComparatorFactory[] invListComparatorFactories, IBinaryTokenizerFactory tokenizerFactory,
+ int[] fieldPermutation, IIndexDataflowHelperFactory dataflowHelperFactory,
+ ITupleFilterFactory tupleFilterFactory, IModificationOperationCallbackFactory modificationOpCallbackFactory,
+ String indexName, int[] prevFieldPermutation) {
+ super(spec, recDesc, storageManager, fileSplitProvider, lifecycleManagerProvider, tokenTypeTraits,
+ tokenComparatorFactories, invListsTypeTraits, invListComparatorFactories, tokenizerFactory,
+ fieldPermutation, IndexOperation.UPSERT, dataflowHelperFactory, tupleFilterFactory,
+ modificationOpCallbackFactory, indexName);
+ this.prevFieldPermutation = prevFieldPermutation;
+ }
+
+ @Override
+ public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+ IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
+ return new AsterixLSMSecondaryUpsertOperatorNodePushable(this, ctx, partition, fieldPermutation,
+ recordDescProvider, prevFieldPermutation);
+ }
+}
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/AsterixLSMPrimaryUpsertOperatorNodePushable.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/AsterixLSMPrimaryUpsertOperatorNodePushable.java
new file mode 100644
index 0000000..f35f4d6
--- /dev/null
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/AsterixLSMPrimaryUpsertOperatorNodePushable.java
@@ -0,0 +1,275 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.runtime.operators;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.apache.asterix.common.api.IAsterixAppRuntimeContext;
+import org.apache.asterix.common.dataflow.AsterixLSMIndexUtil;
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.om.pointables.nonvisitor.ARecordPointable;
+import org.apache.asterix.om.types.ARecordType;
+import org.apache.hyracks.api.comm.VSizeFrame;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.dataflow.value.INullWriter;
+import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import org.apache.hyracks.dataflow.common.comm.util.FrameUtils;
+import org.apache.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
+import org.apache.hyracks.storage.am.btree.impls.RangePredicate;
+import org.apache.hyracks.storage.am.btree.util.BTreeUtils;
+import org.apache.hyracks.storage.am.common.api.IIndexCursor;
+import org.apache.hyracks.storage.am.common.api.IModificationOperationCallback.Operation;
+import org.apache.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
+import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
+import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
+import org.apache.hyracks.storage.am.common.tuples.PermutingFrameTupleReference;
+import org.apache.hyracks.storage.am.lsm.btree.impls.LSMBTree;
+import org.apache.hyracks.storage.am.lsm.common.dataflow.LSMIndexInsertUpdateDeleteOperatorNodePushable;
+import org.apache.hyracks.storage.am.lsm.common.impls.LSMTreeIndexAccessor;
+
+public class AsterixLSMPrimaryUpsertOperatorNodePushable extends LSMIndexInsertUpdateDeleteOperatorNodePushable {
+
+ private PermutingFrameTupleReference key;
+ private MultiComparator keySearchCmp;
+ private ArrayTupleBuilder nullTupleBuilder;
+ private INullWriter nullWriter;
+ private ArrayTupleBuilder tb;
+ private DataOutput dos;
+ private LSMBTree lsmIndex;
+ private RangePredicate searchPred;
+ private IIndexCursor cursor;
+ private ITupleReference prevTuple;
+ private int numOfPrimaryKeys;
+ boolean isFiltered = false;
+ private ArrayTupleReference prevTupleWithFilter = new ArrayTupleReference();
+ private ArrayTupleBuilder prevRecWithPKWithFilterValue;
+ private ARecordType recordType;
+ private int presetFieldIndex = -1;
+ private ARecordPointable recPointable;
+ private DataOutput prevDos;
+
+ public AsterixLSMPrimaryUpsertOperatorNodePushable(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
+ int partition, int[] fieldPermutation, IRecordDescriptorProvider recordDescProvider, int numOfPrimaryKeys,
+ ARecordType recordType, int filterFieldIndex) throws HyracksDataException {
+ super(opDesc, ctx, partition, fieldPermutation, recordDescProvider, IndexOperation.UPSERT);
+ // initialize nullWriter
+ this.nullWriter = opDesc.getNullWriterFactory().createNullWriter();
+ // The search key should only have the primary index and not use the permutations.
+ this.key = new PermutingFrameTupleReference();
+ int[] searchKeyPermutations = new int[numOfPrimaryKeys];
+ for (int i = 0; i < searchKeyPermutations.length; i++) {
+ searchKeyPermutations[i] = fieldPermutation[i];
+ }
+ key.setFieldPermutation(searchKeyPermutations);
+ this.numOfPrimaryKeys = numOfPrimaryKeys;
+ if (fieldPermutation.length > numOfPrimaryKeys + 1) {
+ isFiltered = true;
+ this.recordType = recordType;
+ this.presetFieldIndex = filterFieldIndex;
+ this.recPointable = (ARecordPointable) ARecordPointable.FACTORY.createPointable();
+ this.prevRecWithPKWithFilterValue = new ArrayTupleBuilder(fieldPermutation.length);
+ this.prevDos = prevRecWithPKWithFilterValue.getDataOutput();
+ }
+ }
+
+ // we have the permutation which has [pk locations, record location, optional:filter-location]
+ // the index -> we don't need anymore data?
+ // we need to use the primary index opTracker and secondary indexes callbacks for insert/delete since the lock would
+ // have been obtained through searchForUpsert operation
+
+ @Override
+ public void open() throws HyracksDataException {
+ RecordDescriptor inputRecDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getActivityId(), 0);
+ accessor = new FrameTupleAccessor(inputRecDesc);
+ writeBuffer = new VSizeFrame(ctx);
+ writer.open();
+ indexHelper.open();
+ lsmIndex = (LSMBTree) indexHelper.getIndexInstance();
+
+ try {
+ nullTupleBuilder = new ArrayTupleBuilder(1);
+ DataOutput out = nullTupleBuilder.getDataOutput();
+ try {
+ nullWriter.writeNull(out);
+ } catch (IOException e) {
+ throw new HyracksDataException(e);
+ }
+ nullTupleBuilder.addFieldEndOffset();
+ searchPred = createSearchPredicate();
+ tb = new ArrayTupleBuilder(recordDesc.getFieldCount());
+ dos = tb.getDataOutput();
+ appender = new FrameTupleAppender(new VSizeFrame(ctx), true);
+ modCallback = opDesc.getModificationOpCallbackFactory().createModificationOperationCallback(
+ indexHelper.getResourcePath(), indexHelper.getResourceID(), lsmIndex, ctx);
+
+ indexAccessor = lsmIndex.createAccessor(modCallback, opDesc.getSearchOpCallbackFactory()
+ .createSearchOperationCallback(indexHelper.getResourceID(), ctx));
+ cursor = createCursor();
+ frameTuple = new FrameTupleReference();
+ IAsterixAppRuntimeContext runtimeCtx = (IAsterixAppRuntimeContext) ctx.getJobletContext()
+ .getApplicationContext().getApplicationObject();
+ AsterixLSMIndexUtil.checkAndSetFirstLSN(lsmIndex, runtimeCtx.getTransactionSubsystem().getLogManager());
+ } catch (Exception e) {
+ indexHelper.close();
+ throw new HyracksDataException(e);
+ }
+ }
+
+ private void resetSearchPredicate(int tupleIndex) {
+ key.reset(accessor, tupleIndex);
+ }
+
+ protected void writeOutput(int tupleIndex) throws Exception {
+ tb.reset();
+ frameTuple.reset(accessor, tupleIndex);
+ for (int i = 0; i < frameTuple.getFieldCount(); i++) {
+ dos.write(frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
+ tb.addFieldEndOffset();
+ }
+ if (prevTuple != null) {
+ dos.write(prevTuple.getFieldData(numOfPrimaryKeys), prevTuple.getFieldStart(numOfPrimaryKeys),
+ prevTuple.getFieldLength(numOfPrimaryKeys));
+ tb.addFieldEndOffset();
+ // if with filters, append the filter
+ if (isFiltered) {
+ dos.write(prevTuple.getFieldData(numOfPrimaryKeys + 1), prevTuple.getFieldStart(numOfPrimaryKeys + 1),
+ prevTuple.getFieldLength(numOfPrimaryKeys + 1));
+ tb.addFieldEndOffset();
+ }
+ } else {
+ addNullField();
+ // if with filters, append null
+ if (isFiltered) {
+ addNullField();
+ }
+ }
+ FrameUtils.appendToWriter(writer, appender, tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+ }
+
+ private void addNullField() throws IOException {
+ dos.write(nullTupleBuilder.getByteArray());
+ tb.addFieldEndOffset();
+ }
+
+ //TODO: use tryDelete/tryInsert in order to prevent deadlocks
+ @Override
+ public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+ accessor.reset(buffer);
+ LSMTreeIndexAccessor lsmAccessor = (LSMTreeIndexAccessor) indexAccessor;
+ int tupleCount = accessor.getTupleCount();
+
+ try {
+ for (int i = 0; i < tupleCount; i++) {
+ tuple.reset(accessor, i);
+ resetSearchPredicate(i);
+ cursor.reset();
+ lsmAccessor.search(cursor, searchPred);
+ if (cursor.hasNext()) {
+ cursor.next();
+ prevTuple = cursor.getTuple();
+ cursor.reset();
+ modCallback.setOp(Operation.DELETE);
+ if (isFiltered) {
+ prevTuple = getPrevTupleWithFilter(prevTuple);
+ }
+ if (i == 0) {
+ lsmAccessor.delete(prevTuple);
+ } else {
+ lsmAccessor.forceDelete(prevTuple);
+ }
+ } else {
+ prevTuple = null;
+ }
+ modCallback.setOp(Operation.INSERT);
+ if (prevTuple == null && i == 0) {
+ lsmAccessor.insert(tuple);
+ } else {
+ lsmAccessor.forceInsert(tuple);
+ }
+ writeOutput(i);
+ }
+ if (tupleCount > 0) {
+ // All tuples has to move forward to maintain the correctness of the transaction pipeline
+ appender.write(writer, true);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ throw new HyracksDataException(e);
+ }
+ }
+
+ private ITupleReference getPrevTupleWithFilter(ITupleReference prevTuple) throws IOException, AsterixException {
+ prevRecWithPKWithFilterValue.reset();
+ for (int i = 0; i < prevTuple.getFieldCount(); i++) {
+ prevDos.write(prevTuple.getFieldData(i), prevTuple.getFieldStart(i), prevTuple.getFieldLength(i));
+ prevRecWithPKWithFilterValue.addFieldEndOffset();
+ }
+ recPointable.set(prevTuple.getFieldData(numOfPrimaryKeys), prevTuple.getFieldStart(numOfPrimaryKeys),
+ prevTuple.getFieldLength(numOfPrimaryKeys));
+ // copy the field data from prevTuple
+ prevDos.write(recPointable.getClosedFieldType(recordType, presetFieldIndex).getTypeTag().serialize());
+ prevDos.write(recPointable.getByteArray(), recPointable.getClosedFieldOffset(recordType, presetFieldIndex),
+ recPointable.getClosedFieldSize(recordType, presetFieldIndex));
+ prevRecWithPKWithFilterValue.addFieldEndOffset();
+ // prepare the tuple
+ prevTupleWithFilter.reset(prevRecWithPKWithFilterValue.getFieldEndOffsets(),
+ prevRecWithPKWithFilterValue.getByteArray());
+ return prevTupleWithFilter;
+ }
+
+ private RangePredicate createSearchPredicate() {
+ keySearchCmp = BTreeUtils.getSearchMultiComparator(lsmIndex.getComparatorFactories(), key);
+ return new RangePredicate(key, key, true, true, keySearchCmp, keySearchCmp, null, null);
+ }
+
+ protected IIndexCursor createCursor() {
+ return indexAccessor.createSearchCursor(false);
+ }
+
+ @Override
+ public void close() throws HyracksDataException {
+ try {
+ cursor.close();
+ writer.close();
+ } catch (Exception e) {
+ throw new HyracksDataException(e);
+ } finally {
+ indexHelper.close();
+ }
+ }
+
+ @Override
+ public void fail() throws HyracksDataException {
+ writer.fail();
+ }
+
+ @Override
+ public void flush() throws HyracksDataException {
+ writer.flush();
+ }
+}
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/AsterixLSMSecondaryUpsertOperatorNodePushable.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/AsterixLSMSecondaryUpsertOperatorNodePushable.java
new file mode 100644
index 0000000..65dc83f
--- /dev/null
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/AsterixLSMSecondaryUpsertOperatorNodePushable.java
@@ -0,0 +1,137 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.runtime.operators;
+
+import java.nio.ByteBuffer;
+
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.dataflow.common.comm.util.FrameUtils;
+import org.apache.hyracks.storage.am.common.api.IModificationOperationCallback.Operation;
+import org.apache.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
+import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
+import org.apache.hyracks.storage.am.common.tuples.PermutingFrameTupleReference;
+import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexAccessor;
+import org.apache.hyracks.storage.am.lsm.common.dataflow.LSMIndexInsertUpdateDeleteOperatorNodePushable;
+
+/**
+ * This operator node is used for secondary indexes with upsert operations.
+ * It works in the following way:
+ * For each incoming tuple
+ * -If old secondary keys == new secondary keys
+ * --do nothing
+ * -else
+ * --If old secondary keys are null?
+ * ---do nothing
+ * --else
+ * ---delete old secondary keys
+ * --If new keys are null?
+ * ---do nothing
+ * --else
+ * ---insert new keys
+ */
+public class AsterixLSMSecondaryUpsertOperatorNodePushable extends LSMIndexInsertUpdateDeleteOperatorNodePushable {
+
+ private final PermutingFrameTupleReference prevValueTuple = new PermutingFrameTupleReference();
+ private int numberOfFields;
+ private boolean isNewNull = false;
+ private boolean isPrevValueNull = false;
+
+ public AsterixLSMSecondaryUpsertOperatorNodePushable(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
+ int partition, int[] fieldPermutation, IRecordDescriptorProvider recordDescProvider,
+ int[] prevValuePermutation) {
+ super(opDesc, ctx, partition, fieldPermutation, recordDescProvider, IndexOperation.UPSERT);
+ this.prevValueTuple.setFieldPermutation(prevValuePermutation);
+ this.numberOfFields = prevValuePermutation.length;
+ }
+
+ public static boolean equals(byte[] a, int aOffset, int aLength, byte[] b, int bOffset, int bLength) {
+ if (a.length != b.length) {
+ return false;
+ }
+ for (int i = 0; i < a.length; i++) {
+ if (a[aOffset + i] != b[bOffset + i]) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public static boolean equalTuples(PermutingFrameTupleReference t1, PermutingFrameTupleReference t2, int numOfFields)
+ throws HyracksDataException {
+ byte[] t1Data = t1.getFieldData(0);
+ byte[] t2Data = t2.getFieldData(0);
+ for (int i = 0; i < numOfFields; i++) {
+ if (!equals(t1Data, t1.getFieldStart(i), t1.getFieldLength(i), t2Data, t2.getFieldStart(i),
+ t2.getFieldLength(i))) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ private boolean isNull(PermutingFrameTupleReference t1) {
+ return t1.getFieldData(0)[t1.getFieldStart(0)] == ATypeTag.SERIALIZED_NULL_TYPE_TAG;
+ }
+
+ @Override
+ public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+ accessor.reset(buffer);
+ ILSMIndexAccessor lsmAccessor = (ILSMIndexAccessor) indexAccessor;
+ int tupleCount = accessor.getTupleCount();
+ for (int i = 0; i < tupleCount; i++) {
+ try {
+ // if both previous value and new value are null, then we skip
+ tuple.reset(accessor, i);
+ prevValueTuple.reset(accessor, i);
+ isNewNull = isNull(tuple);
+ isPrevValueNull = isNull(prevValueTuple);
+ if (isNewNull && isPrevValueNull) {
+ continue;
+ }
+ // At least, one is not null
+ // If they are equal, then we skip
+ if (equalTuples(tuple, prevValueTuple, numberOfFields)) {
+ continue;
+ }
+ if (!isPrevValueNull) {
+ // previous is not null, we need to delete previous
+ modCallback.setOp(Operation.DELETE);
+ lsmAccessor.forceDelete(prevValueTuple);
+ }
+ if (!isNewNull) {
+ // new is not null, we need to insert the new value
+ modCallback.setOp(Operation.INSERT);
+ lsmAccessor.forceInsert(tuple);
+ }
+
+ } catch (HyracksDataException e) {
+ throw e;
+ } catch (Exception e) {
+ throw new HyracksDataException(e);
+ }
+ }
+ // No partial flushing was necessary. Forward entire frame.
+ writeBuffer.ensureFrameSize(buffer.capacity());
+ FrameUtils.copyAndFlip(buffer, writeBuffer.getBuffer());
+ FrameUtils.flushFrame(writeBuffer.getBuffer(), writer);
+ }
+}
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/AsterixLSMTreeUpsertOperatorDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/AsterixLSMTreeUpsertOperatorDescriptor.java
new file mode 100644
index 0000000..803e15d
--- /dev/null
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/AsterixLSMTreeUpsertOperatorDescriptor.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.runtime.operators;
+
+import org.apache.asterix.common.dataflow.AsterixLSMTreeInsertDeleteOperatorDescriptor;
+import org.apache.asterix.om.types.ARecordType;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.dataflow.IOperatorNodePushable;
+import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import org.apache.hyracks.api.dataflow.value.INullWriterFactory;
+import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import org.apache.hyracks.api.dataflow.value.ITypeTraits;
+import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.api.job.IOperatorDescriptorRegistry;
+import org.apache.hyracks.dataflow.std.file.IFileSplitProvider;
+import org.apache.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
+import org.apache.hyracks.storage.am.common.api.IModificationOperationCallbackFactory;
+import org.apache.hyracks.storage.am.common.api.ISearchOperationCallbackFactory;
+import org.apache.hyracks.storage.am.common.api.ITupleFilterFactory;
+import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
+import org.apache.hyracks.storage.common.IStorageManagerInterface;
+
+public class AsterixLSMTreeUpsertOperatorDescriptor extends AsterixLSMTreeInsertDeleteOperatorDescriptor {
+
+ private static final long serialVersionUID = 1L;
+ private final int[] prevValuePermutation;
+ private ARecordType type;
+ private int filterIndex = -1;
+
+ public AsterixLSMTreeUpsertOperatorDescriptor(IOperatorDescriptorRegistry spec, RecordDescriptor recDesc,
+ IStorageManagerInterface storageManager, IIndexLifecycleManagerProvider lifecycleManagerProvider,
+ IFileSplitProvider fileSplitProvider, ITypeTraits[] typeTraits,
+ IBinaryComparatorFactory[] comparatorFactories, int[] bloomFilterKeyFields, int[] fieldPermutation,
+ IIndexDataflowHelperFactory dataflowHelperFactory, ITupleFilterFactory tupleFilterFactory,
+ boolean isPrimary, String indexName, INullWriterFactory nullWriterFactory,
+ IModificationOperationCallbackFactory modificationOpCallbackProvider,
+ ISearchOperationCallbackFactory searchOpCallbackProvider, int[] prevValuePermutation) {
+ super(spec, recDesc, storageManager, lifecycleManagerProvider, fileSplitProvider, typeTraits,
+ comparatorFactories, bloomFilterKeyFields, fieldPermutation, IndexOperation.UPSERT,
+ dataflowHelperFactory, tupleFilterFactory, isPrimary, indexName, nullWriterFactory,
+ modificationOpCallbackProvider, searchOpCallbackProvider);
+ this.prevValuePermutation = prevValuePermutation;
+ }
+
+ @Override
+ public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+ IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
+ return isPrimary()
+ ? new AsterixLSMPrimaryUpsertOperatorNodePushable(this, ctx, partition, fieldPermutation,
+ recordDescProvider, comparatorFactories.length, type, filterIndex)
+ : new AsterixLSMSecondaryUpsertOperatorNodePushable(this, ctx, partition, fieldPermutation,
+ recordDescProvider, prevValuePermutation);
+ }
+
+ public void setType(ARecordType type) {
+ this.type = type;
+ }
+
+ public void setFilterIndex(int filterIndex) {
+ this.filterIndex = filterIndex;
+ }
+}
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/unnestingfunctions/std/SubsetCollectionDescriptor.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/unnestingfunctions/std/SubsetCollectionDescriptor.java
index bb87cf4..88d9a4e 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/unnestingfunctions/std/SubsetCollectionDescriptor.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/unnestingfunctions/std/SubsetCollectionDescriptor.java
@@ -47,14 +47,9 @@
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class SubsetCollectionDescriptor extends AbstractUnnestingFunctionDynamicDescriptor {
-
private static final long serialVersionUID = 1L;
-
- private final static byte SER_ORDEREDLIST_TYPE_TAG = ATypeTag.ORDEREDLIST.serialize();
- private final static byte SER_UNORDEREDLIST_TYPE_TAG = ATypeTag.UNORDEREDLIST.serialize();
- private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
-
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ @Override
public IFunctionDescriptor createFunctionDescriptor() {
return new SubsetCollectionDescriptor();
}
@@ -105,16 +100,17 @@
byte[] serList = inputVal.getByteArray();
- if (serList[0] == SER_NULL_TYPE_TAG) {
+ if (serList[0] == ATypeTag.SERIALIZED_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
return;
}
- if (serList[0] != SER_ORDEREDLIST_TYPE_TAG && serList[0] != SER_UNORDEREDLIST_TYPE_TAG) {
+ if (serList[0] != ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG
+ && serList[0] != ATypeTag.SERIALIZED_UNORDEREDLIST_TYPE_TAG) {
throw new AlgebricksException("Subset-collection is not defined for values of type"
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(serList[0]));
}
- if (serList[0] == SER_ORDEREDLIST_TYPE_TAG)
+ if (serList[0] == ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG)
numItemsMax = AOrderedListSerializerDeserializer.getNumberOfItems(serList);
else
numItemsMax = AUnorderedListSerializerDeserializer.getNumberOfItems(serList);
diff --git a/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/AbstractIndexModificationOperationCallback.java b/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/AbstractIndexModificationOperationCallback.java
index a07a109..3b5630f 100644
--- a/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/AbstractIndexModificationOperationCallback.java
+++ b/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/AbstractIndexModificationOperationCallback.java
@@ -26,12 +26,16 @@
import org.apache.asterix.common.transactions.ITransactionSubsystem;
import org.apache.asterix.common.transactions.LogRecord;
import org.apache.asterix.common.transactions.LogType;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
+import org.apache.hyracks.storage.am.common.api.IModificationOperationCallback.Operation;
import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
import org.apache.hyracks.storage.am.common.tuples.SimpleTupleWriter;
public abstract class AbstractIndexModificationOperationCallback extends AbstractOperationCallback {
+ private static final byte INSERT_OP = (byte) IndexOperation.INSERT.ordinal();
+ private static final byte DELETE_OP = (byte) IndexOperation.DELETE.ordinal();
protected final long resourceId;
protected final byte resourceType;
protected final IndexOperation indexOp;
@@ -72,4 +76,15 @@
logRecord.computeAndSetLogSize();
txnSubsystem.getLogManager().log(logRecord);
}
+
+ public void setOp(Operation op) throws HyracksDataException {
+ switch (op) {
+ case DELETE:
+ logRecord.setNewOp(DELETE_OP);
+ break;
+ case INSERT:
+ logRecord.setNewOp(INSERT_OP);
+ break;
+ }
+ }
}
diff --git a/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/LockThenSearchOperationCallback.java b/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/LockThenSearchOperationCallback.java
new file mode 100644
index 0000000..49cea94
--- /dev/null
+++ b/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/LockThenSearchOperationCallback.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.transaction.management.opcallbacks;
+
+import org.apache.asterix.common.exceptions.ACIDException;
+import org.apache.asterix.common.transactions.AbstractOperationCallback;
+import org.apache.asterix.common.transactions.ILockManager;
+import org.apache.asterix.common.transactions.ITransactionContext;
+import org.apache.asterix.transaction.management.service.transaction.TransactionManagementConstants.LockManagerConstants.LockMode;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
+import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
+
+public class LockThenSearchOperationCallback extends AbstractOperationCallback implements ISearchOperationCallback {
+
+ public LockThenSearchOperationCallback(int datasetId, int[] entityIdFields, ILockManager lockManager,
+ ITransactionContext txnCtx) {
+ super(datasetId, entityIdFields, txnCtx, lockManager);
+ }
+
+ @Override
+ public boolean proceed(ITupleReference tuple) throws HyracksDataException {
+ return true;
+ }
+
+ @Override
+ public void reconcile(ITupleReference tuple) throws HyracksDataException {
+ }
+
+ @Override
+ public void cancel(ITupleReference tuple) throws HyracksDataException {
+ }
+
+ @Override
+ public void complete(ITupleReference tuple) throws HyracksDataException {
+ }
+
+ @Override
+ public void before(ITupleReference tuple) throws HyracksDataException {
+ int pkHash = computePrimaryKeyHashValue(tuple, primaryKeyFields);
+ try {
+ lockManager.lock(datasetId, pkHash, LockMode.X, txnCtx);
+ } catch (ACIDException e) {
+ throw new HyracksDataException(e);
+ }
+ }
+}
diff --git a/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/LockThenSearchOperationCallbackFactory.java b/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/LockThenSearchOperationCallbackFactory.java
new file mode 100644
index 0000000..6bfb6cd
--- /dev/null
+++ b/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/LockThenSearchOperationCallbackFactory.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.transaction.management.opcallbacks;
+
+import org.apache.asterix.common.context.ITransactionSubsystemProvider;
+import org.apache.asterix.common.exceptions.ACIDException;
+import org.apache.asterix.common.transactions.AbstractOperationCallbackFactory;
+import org.apache.asterix.common.transactions.ITransactionContext;
+import org.apache.asterix.common.transactions.ITransactionSubsystem;
+import org.apache.asterix.common.transactions.JobId;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
+import org.apache.hyracks.storage.am.common.api.ISearchOperationCallbackFactory;
+
+public class LockThenSearchOperationCallbackFactory extends AbstractOperationCallbackFactory
+ implements ISearchOperationCallbackFactory {
+
+ private static final long serialVersionUID = 1L;
+
+ public LockThenSearchOperationCallbackFactory(JobId jobId, int datasetId, int[] entityIdFields,
+ ITransactionSubsystemProvider txnSubsystemProvider, byte resourceType) {
+ super(jobId, datasetId, entityIdFields, txnSubsystemProvider, resourceType);
+ }
+
+ @Override
+ public ISearchOperationCallback createSearchOperationCallback(long resourceId, IHyracksTaskContext ctx)
+ throws HyracksDataException {
+ ITransactionSubsystem txnSubsystem = txnSubsystemProvider.getTransactionSubsystem(ctx);
+ try {
+ ITransactionContext txnCtx = txnSubsystem.getTransactionManager().getTransactionContext(jobId, false);
+ return new LockThenSearchOperationCallback(datasetId, primaryKeyFields, txnSubsystem.getLockManager(),
+ txnCtx);
+ } catch (ACIDException e) {
+ throw new HyracksDataException(e);
+ }
+ }
+
+}
diff --git a/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/PrimaryIndexInstantSearchOperationCallback.java b/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/PrimaryIndexInstantSearchOperationCallback.java
index 944f07e..b2477cd 100644
--- a/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/PrimaryIndexInstantSearchOperationCallback.java
+++ b/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/PrimaryIndexInstantSearchOperationCallback.java
@@ -31,8 +31,8 @@
/**
* Assumes LSM-BTrees as primary indexes. Implements try/locking and unlocking on primary keys.
*/
-public class PrimaryIndexInstantSearchOperationCallback extends AbstractOperationCallback implements
- ISearchOperationCallback {
+public class PrimaryIndexInstantSearchOperationCallback extends AbstractOperationCallback
+ implements ISearchOperationCallback {
public PrimaryIndexInstantSearchOperationCallback(int datasetId, int[] entityIdFields, ILockManager lockManager,
ITransactionContext txnCtx) {
@@ -73,4 +73,8 @@
throw new HyracksDataException(e);
}
}
+
+ @Override
+ public void before(ITupleReference tuple) throws HyracksDataException {
+ }
}
diff --git a/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/PrimaryIndexModificationOperationCallback.java b/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/PrimaryIndexModificationOperationCallback.java
index 5d6349d..3c34153 100644
--- a/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/PrimaryIndexModificationOperationCallback.java
+++ b/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/PrimaryIndexModificationOperationCallback.java
@@ -33,8 +33,8 @@
* Assumes LSM-BTrees as primary indexes.
* Performs locking on primary keys, and also logs before/after images.
*/
-public class PrimaryIndexModificationOperationCallback extends AbstractIndexModificationOperationCallback implements
- IModificationOperationCallback {
+public class PrimaryIndexModificationOperationCallback extends AbstractIndexModificationOperationCallback
+ implements IModificationOperationCallback {
public PrimaryIndexModificationOperationCallback(int datasetId, int[] primaryKeyFields, ITransactionContext txnCtx,
ILockManager lockManager, ITransactionSubsystem txnSubsystem, long resourceId, byte resourceType,
diff --git a/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/PrimaryIndexSearchOperationCallback.java b/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/PrimaryIndexSearchOperationCallback.java
index 2d22879..9532f9e 100644
--- a/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/PrimaryIndexSearchOperationCallback.java
+++ b/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/PrimaryIndexSearchOperationCallback.java
@@ -39,6 +39,11 @@
}
@Override
+ public void before(ITupleReference tuple) throws HyracksDataException {
+ //no op
+ }
+
+ @Override
public boolean proceed(ITupleReference tuple) throws HyracksDataException {
try {
return lockManager.tryLock(datasetId, -1, LockMode.S, txnCtx);
diff --git a/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/SecondaryIndexModificationOperationCallback.java b/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/SecondaryIndexModificationOperationCallback.java
index cd0c41f..250e28d 100644
--- a/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/SecondaryIndexModificationOperationCallback.java
+++ b/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/SecondaryIndexModificationOperationCallback.java
@@ -33,8 +33,8 @@
* We assume that the modification of the corresponding primary index has already taken an appropriate lock.
* This callback performs logging of the before and/or after images for secondary indexes.
*/
-public class SecondaryIndexModificationOperationCallback extends AbstractIndexModificationOperationCallback implements
- IModificationOperationCallback {
+public class SecondaryIndexModificationOperationCallback extends AbstractIndexModificationOperationCallback
+ implements IModificationOperationCallback {
protected final IndexOperation oldOp;
diff --git a/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/SecondaryIndexSearchOperationCallback.java b/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/SecondaryIndexSearchOperationCallback.java
index 59924d3..ac5f4d4 100644
--- a/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/SecondaryIndexSearchOperationCallback.java
+++ b/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/SecondaryIndexSearchOperationCallback.java
@@ -27,14 +27,19 @@
/**
* Secondary index searches perform no locking at all.
*/
-public class SecondaryIndexSearchOperationCallback extends AbstractOperationCallback implements
- ISearchOperationCallback {
+public class SecondaryIndexSearchOperationCallback extends AbstractOperationCallback
+ implements ISearchOperationCallback {
public SecondaryIndexSearchOperationCallback() {
super(-1, null, null, null);
}
@Override
+ public void before(ITupleReference tuple) throws HyracksDataException {
+ // Do nothing
+ }
+
+ @Override
public boolean proceed(ITupleReference tuple) throws HyracksDataException {
return true;
}
@@ -53,5 +58,4 @@
public void complete(ITupleReference tuple) throws HyracksDataException {
// Do nothing.
}
-
}
diff --git a/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/TempDatasetIndexModificationOperationCallback.java b/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/TempDatasetIndexModificationOperationCallback.java
index b27daea..69aad24 100644
--- a/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/TempDatasetIndexModificationOperationCallback.java
+++ b/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/TempDatasetIndexModificationOperationCallback.java
@@ -34,8 +34,8 @@
* The "before" and "found" method in this callback is empty so that no locking is requested for accessing a temporary
* dataset and no write-ahead log is written for update operations.
*/
-public class TempDatasetIndexModificationOperationCallback extends AbstractIndexModificationOperationCallback implements
- IModificationOperationCallback {
+public class TempDatasetIndexModificationOperationCallback extends AbstractIndexModificationOperationCallback
+ implements IModificationOperationCallback {
public TempDatasetIndexModificationOperationCallback(int datasetId, int[] primaryKeyFields,
ITransactionContext txnCtx, ILockManager lockManager, ITransactionSubsystem txnSubsystem, long resourceId,
diff --git a/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/UpsertOperationCallback.java b/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/UpsertOperationCallback.java
new file mode 100644
index 0000000..dfc622a
--- /dev/null
+++ b/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/UpsertOperationCallback.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.transaction.management.opcallbacks;
+
+import org.apache.asterix.common.exceptions.ACIDException;
+import org.apache.asterix.common.transactions.ILockManager;
+import org.apache.asterix.common.transactions.ITransactionContext;
+import org.apache.asterix.common.transactions.ITransactionSubsystem;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
+import org.apache.hyracks.storage.am.common.api.IModificationOperationCallback;
+import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
+
+public class UpsertOperationCallback extends AbstractIndexModificationOperationCallback
+ implements IModificationOperationCallback {
+
+ public UpsertOperationCallback(int datasetId, int[] primaryKeyFields, ITransactionContext txnCtx,
+ ILockManager lockManager, ITransactionSubsystem txnSubsystem, long resourceId, byte resourceType,
+ IndexOperation indexOp) {
+ super(datasetId, primaryKeyFields, txnCtx, lockManager, txnSubsystem, resourceId, resourceType, indexOp);
+ }
+
+ @Override
+ public void before(ITupleReference tuple) throws HyracksDataException {
+ // Do nothing, as lock has been acquired by preceeding search
+ }
+
+ @Override
+ public void found(ITupleReference before, ITupleReference after) throws HyracksDataException {
+ try {
+ int pkHash = computePrimaryKeyHashValue(after, primaryKeyFields);
+ log(pkHash, after);
+ } catch (ACIDException e) {
+ throw new HyracksDataException(e);
+ }
+ }
+}
diff --git a/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/UpsertOperationCallbackFactory.java b/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/UpsertOperationCallbackFactory.java
new file mode 100644
index 0000000..0c83ab5
--- /dev/null
+++ b/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/UpsertOperationCallbackFactory.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.transaction.management.opcallbacks;
+
+import org.apache.asterix.common.context.ITransactionSubsystemProvider;
+import org.apache.asterix.common.exceptions.ACIDException;
+import org.apache.asterix.common.transactions.AbstractOperationCallback;
+import org.apache.asterix.common.transactions.AbstractOperationCallbackFactory;
+import org.apache.asterix.common.transactions.ITransactionContext;
+import org.apache.asterix.common.transactions.ITransactionSubsystem;
+import org.apache.asterix.common.transactions.JobId;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.storage.am.common.api.IIndexLifecycleManager;
+import org.apache.hyracks.storage.am.common.api.IModificationOperationCallback;
+import org.apache.hyracks.storage.am.common.api.IModificationOperationCallbackFactory;
+import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
+import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndex;
+
+public class UpsertOperationCallbackFactory extends AbstractOperationCallbackFactory
+ implements IModificationOperationCallbackFactory {
+
+ private static final long serialVersionUID = 1L;
+ private final IndexOperation indexOp;
+
+ public UpsertOperationCallbackFactory(JobId jobId, int datasetId, int[] primaryKeyFields,
+ ITransactionSubsystemProvider txnSubsystemProvider, IndexOperation indexOp, byte resourceType) {
+ super(jobId, datasetId, primaryKeyFields, txnSubsystemProvider, resourceType);
+ this.indexOp = indexOp;
+ }
+
+ @Override
+ public IModificationOperationCallback createModificationOperationCallback(String resourceName, long resourceId,
+ Object resource, IHyracksTaskContext ctx) throws HyracksDataException {
+
+ ITransactionSubsystem txnSubsystem = txnSubsystemProvider.getTransactionSubsystem(ctx);
+ IIndexLifecycleManager indexLifeCycleManager = txnSubsystem.getAsterixAppRuntimeContextProvider()
+ .getDatasetLifecycleManager();
+ ILSMIndex index = (ILSMIndex) indexLifeCycleManager.getIndex(resourceName);
+ if (index == null) {
+ throw new HyracksDataException("Index(id:" + resourceId + ") is not registered.");
+ }
+
+ try {
+ ITransactionContext txnCtx = txnSubsystem.getTransactionManager().getTransactionContext(jobId, false);
+ IModificationOperationCallback modCallback = new UpsertOperationCallback(datasetId, primaryKeyFields,
+ txnCtx, txnSubsystem.getLockManager(), txnSubsystem, resourceId, resourceType, indexOp);
+ txnCtx.registerIndexAndCallback(resourceId, index, (AbstractOperationCallback) modCallback, true);
+ return modCallback;
+ } catch (ACIDException e) {
+ throw new HyracksDataException(e);
+ }
+ }
+}
diff --git a/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/logging/LogBuffer.java b/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/logging/LogBuffer.java
index 45ba9bd..6060dd7 100644
--- a/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/logging/LogBuffer.java
+++ b/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/logging/LogBuffer.java
@@ -254,14 +254,20 @@
LogRecord logRecord = logBufferTailReader.next();
while (logRecord != null) {
if (logRecord.getLogSource() == LogSource.LOCAL) {
- if (logRecord.getLogType() == LogType.ENTITY_COMMIT) {
+ if (logRecord.getLogType() == LogType.ENTITY_COMMIT
+ || logRecord.getLogType() == LogType.UPSERT_ENTITY_COMMIT) {
reusableJobId.setId(logRecord.getJobId());
txnCtx = txnSubsystem.getTransactionManager().getTransactionContext(reusableJobId, false);
reusableDsId.setId(logRecord.getDatasetId());
txnSubsystem.getLockManager().unlock(reusableDsId, logRecord.getPKHashValue(), LockMode.ANY,
txnCtx);
txnCtx.notifyOptracker(false);
- } else if (logRecord.getLogType() == LogType.JOB_COMMIT || logRecord.getLogType() == LogType.ABORT) {
+ if (logRecord.getLogType() == LogType.UPSERT_ENTITY_COMMIT) {
+ // since this operation consisted of delete and insert, we need to notify the optracker twice
+ txnCtx.notifyOptracker(false);
+ }
+ } else if (logRecord.getLogType() == LogType.JOB_COMMIT
+ || logRecord.getLogType() == LogType.ABORT) {
reusableJobId.setId(logRecord.getJobId());
txnCtx = txnSubsystem.getTransactionManager().getTransactionContext(reusableJobId, false);
txnCtx.notifyOptracker(true);
diff --git a/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/RecoveryManager.java b/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/RecoveryManager.java
index 2f824df..11dc282 100644
--- a/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/RecoveryManager.java
+++ b/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/RecoveryManager.java
@@ -126,6 +126,7 @@
* of the operation, or the system can be recovered concurrently. This kind of concurrency is
* not supported, yet.
*/
+ @Override
public SystemState getSystemState() throws ACIDException {
//read checkpoint file
CheckpointObject checkpointObject = null;
@@ -180,6 +181,7 @@
}
//This method is used only when replication is disabled. Therefore, there is no need to check logs node ids
+ @Override
public void startRecovery(boolean synchronous) throws IOException, ACIDException {
//delete any recovery files from previous failed recovery attempts
deleteRecoveryTemporaryFiles();
@@ -244,6 +246,7 @@
jobCommitLogCount++;
break;
case LogType.ENTITY_COMMIT:
+ case LogType.UPSERT_ENTITY_COMMIT:
jobId = logRecord.getJobId();
if (!jobId2WinnerEntitiesMap.containsKey(jobId)) {
jobEntityWinners = new JobEntityCommits(jobId);
@@ -376,6 +379,7 @@
case LogType.ENTITY_COMMIT:
case LogType.ABORT:
case LogType.FLUSH:
+ case LogType.UPSERT_ENTITY_COMMIT:
//do nothing
break;
default:
@@ -452,6 +456,7 @@
updateLogCount++;
break;
case LogType.JOB_COMMIT:
+ case LogType.UPSERT_ENTITY_COMMIT:
winnerJobSet.add(Integer.valueOf(logRecord.getJobId()));
jobId2WinnerEntitiesMap.remove(Integer.valueOf(logRecord.getJobId()));
jobCommitLogCount++;
@@ -588,6 +593,7 @@
case LogType.ENTITY_COMMIT:
case LogType.ABORT:
case LogType.FLUSH:
+ case LogType.UPSERT_ENTITY_COMMIT:
//do nothing
break;
@@ -737,6 +743,7 @@
return minMCTFirstLSN;
}
+ @Override
public long getMinFirstLSN() throws HyracksDataException {
long minFirstLSN = getLocalMinFirstLSN();
@@ -749,6 +756,7 @@
return minFirstLSN;
}
+ @Override
public long getLocalMinFirstLSN() throws HyracksDataException {
IDatasetLifecycleManager datasetLifecycleManager = txnSubsystem.getAsterixAppRuntimeContextProvider()
.getDatasetLifecycleManager();
@@ -969,6 +977,7 @@
}
break;
case LogType.ENTITY_COMMIT:
+ case LogType.UPSERT_ENTITY_COMMIT:
jobLoserEntity2LSNsMap.remove(tempKeyTxnId);
entityCommitLogCount++;
if (IS_DEBUG_MODE) {
@@ -1003,6 +1012,8 @@
while (iter.hasNext()) {
Map.Entry<TxnId, List<Long>> loserEntity2LSNsMap = iter.next();
undoLSNSet = loserEntity2LSNsMap.getValue();
+ // The step below is important since the upsert operations must be done in reverse order.
+ Collections.reverse(undoLSNSet);
for (long undoLSN : undoLSNSet) {
//here, all the log records are UPDATE type. So, we don't need to check the type again.
//read the corresponding log record to be undone.
@@ -1637,4 +1648,4 @@
}
return size;
}
-}
+}
\ No newline at end of file
diff --git a/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/transaction/TransactionManager.java b/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/transaction/TransactionManager.java
index 57ced9c..2112097 100644
--- a/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/transaction/TransactionManager.java
+++ b/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/transaction/TransactionManager.java
@@ -32,6 +32,7 @@
import org.apache.asterix.common.transactions.ITransactionManager;
import org.apache.asterix.common.transactions.JobId;
import org.apache.asterix.common.transactions.LogRecord;
+import org.apache.asterix.common.utils.TransactionUtil;
import org.apache.hyracks.api.lifecycle.ILifeCycleComponent;
/**
@@ -58,7 +59,7 @@
try {
if (txnCtx.isWriteTxn()) {
LogRecord logRecord = ((TransactionContext) txnCtx).getLogRecord();
- logRecord.formJobTerminateLogRecord(txnCtx, false);
+ TransactionUtil.formJobTerminateLogRecord(txnCtx, logRecord, false);
txnSubsystem.getLogManager().log(logRecord);
txnSubsystem.getRecoveryManager().rollbackTransaction(txnCtx);
}
@@ -107,7 +108,7 @@
try {
if (txnCtx.isWriteTxn()) {
LogRecord logRecord = ((TransactionContext) txnCtx).getLogRecord();
- logRecord.formJobTerminateLogRecord(txnCtx, true);
+ TransactionUtil.formJobTerminateLogRecord(txnCtx, logRecord, true);
txnSubsystem.getLogManager().log(logRecord);
}
} catch (Exception ae) {
@@ -123,8 +124,8 @@
}
@Override
- public void completedTransaction(ITransactionContext txnContext, DatasetId datasetId, int PKHashVal, boolean success)
- throws ACIDException {
+ public void completedTransaction(ITransactionContext txnContext, DatasetId datasetId, int PKHashVal,
+ boolean success) throws ACIDException {
if (!success) {
abortTransaction(txnContext, datasetId, PKHashVal);
} else {