Merged asterix_stabilization r249:r266, and performed the promised cleanup of building index creation jobspecs.
git-svn-id: https://asterixdb.googlecode.com/svn/branches/asterix-fix-issue-9@267 eaa15691-b419-025a-1212-ee371bd00084
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java
index c9507d9..7f6769e 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java
@@ -233,7 +233,7 @@
}
case CREATE_INDEX: {
CompiledCreateIndexStatement cis = (CompiledCreateIndexStatement) stmt;
- JobSpecification jobSpec = IndexOperations.buildCreateIndexJobSpec(cis, metadata);
+ JobSpecification jobSpec = IndexOperations.buildSecondaryIndexLoadingJobSpec(cis, metadata);
dmlJobs.add(new Job(jobSpec));
break;
}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/DdlTranslator.java b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/DdlTranslator.java
index 422e732..5e891c8 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/DdlTranslator.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/DdlTranslator.java
@@ -83,6 +83,7 @@
import edu.uci.ics.asterix.om.types.BuiltinType;
import edu.uci.ics.asterix.om.types.IAType;
import edu.uci.ics.asterix.translator.AbstractAqlTranslator;
+import edu.uci.ics.asterix.translator.DmlTranslator.CompiledCreateIndexStatement;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
@@ -215,6 +216,9 @@
MetadataManager.INSTANCE.addDataset(mdTxnCtx,
new Dataset(compiledDeclarations.getDataverseName(), datasetName, itemTypeName,
datasetDetails, dsType));
+ if (dd.getDatasetType() == DatasetType.INTERNAL || dd.getDatasetType() == DatasetType.FEED) {
+ runCreateDatasetJob(hcc, datasetName);
+ }
break;
}
@@ -240,7 +244,8 @@
MetadataManager.INSTANCE.addIndex(mdTxnCtx, new Index(compiledDeclarations.getDataverseName(),
datasetName, indexName, stmtCreateIndex.getIndexType(),
stmtCreateIndex.getFieldExprs(), false));
- }
+ runCreateIndexJob(hcc, stmtCreateIndex);
+ }
break;
}
case TYPE_DECL: {
@@ -475,6 +480,26 @@
}
}
+ private void runCreateDatasetJob(IHyracksClientConnection hcc, String datasetName) throws AsterixException,
+ AlgebricksException, Exception {
+ runJob(hcc, DatasetOperations.createDatasetJobSpec(datasetName, compiledDeclarations));
+ }
+
+ private void runCreateIndexJob(IHyracksClientConnection hcc, CreateIndexStatement stmtCreateIndex) throws Exception {
+ // TODO: Eventually CreateIndexStatement and CompiledCreateIndexStatement should be replaced by the corresponding metadata entity.
+ // For now we must still convert to a CompiledCreateIndexStatement here.
+ CompiledCreateIndexStatement createIndexStmt = new CompiledCreateIndexStatement(stmtCreateIndex.getIndexName()
+ .getValue(), stmtCreateIndex.getDatasetName().getValue(), stmtCreateIndex.getFieldExprs(),
+ stmtCreateIndex.getIndexType());
+ JobSpecification spec = IndexOperations.buildSecondaryIndexCreationJobSpec(createIndexStmt,
+ compiledDeclarations);
+ if (spec == null) {
+ throw new AsterixException("Failed to create job spec for creating index '"
+ + stmtCreateIndex.getDatasetName() + "." + stmtCreateIndex.getIndexName() + "'");
+ }
+ runJob(hcc, spec);
+ }
+
private void compileDatasetDropStatement(IHyracksClientConnection hcc, MetadataTransactionContext mdTxnCtx,
String datasetName) throws Exception {
CompiledDatasetDropStatement cds = new CompiledDatasetDropStatement(datasetName);
@@ -495,7 +520,7 @@
private void compileIndexDropStatement(IHyracksClientConnection hcc, MetadataTransactionContext mdTxnCtx,
String datasetName, String indexName) throws Exception {
CompiledIndexDropStatement cds = new CompiledIndexDropStatement(datasetName, indexName);
- runJob(hcc, IndexOperations.createSecondaryIndexDropJobSpec(cds, compiledDeclarations));
+ runJob(hcc, IndexOperations.buildDropSecondaryIndexJobSpec(cds, compiledDeclarations));
MetadataManager.INSTANCE.dropIndex(mdTxnCtx, compiledDeclarations.getDataverseName(), datasetName, indexName);
}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
index fcc0158..2b341db 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
@@ -69,6 +69,7 @@
import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexDropOperatorDescriptor;
import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
@@ -137,6 +138,36 @@
return specs;
}
+ // TODO: Lots of common code in this file. Refactor everything after merging in asterix-fix-issue-9.
+ public static JobSpecification createDatasetJobSpec(String datasetName,
+ AqlCompiledMetadataDeclarations metadata) throws AsterixException, AlgebricksException {
+ AqlCompiledDatasetDecl compiledDatasetDecl = metadata.findDataset(datasetName);
+ if (compiledDatasetDecl == null) {
+ throw new AsterixException("Could not find dataset " + datasetName);
+ }
+ JobSpecification spec = new JobSpecification();
+ IBinaryComparatorFactory[] comparatorFactories = DatasetUtils.computeKeysBinaryComparatorFactories(
+ compiledDatasetDecl, metadata.getFormat().getBinaryComparatorFactoryProvider());
+ ITypeTraits[] typeTraits = DatasetUtils.computeTupleTypeTraits(compiledDatasetDecl, metadata);
+ Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadata
+ .splitProviderAndPartitionConstraintsForInternalOrFeedDataset(datasetName, datasetName);
+ FileSplit[] fs = splitsAndConstraint.first.getFileSplits();
+ StringBuilder sb = new StringBuilder();
+ for (int i = 0; i < fs.length; i++) {
+ sb.append(stringOf(fs[i]) + " ");
+ }
+ LOGGER.info("CREATING File Splits: " + sb.toString());
+ IIndexRegistryProvider<IIndex> indexRegistryProvider = AsterixIndexRegistryProvider.INSTANCE;
+ IStorageManagerInterface storageManager = AsterixStorageManagerInterface.INSTANCE;
+ TreeIndexCreateOperatorDescriptor indexCreateOp = new TreeIndexCreateOperatorDescriptor(spec,
+ storageManager, indexRegistryProvider, splitsAndConstraint.first, typeTraits, comparatorFactories,
+ new BTreeDataflowHelperFactory(), NoOpOperationCallbackProvider.INSTANCE);
+ AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, indexCreateOp,
+ splitsAndConstraint.second);
+ spec.addRoot(indexCreateOp);
+ return spec;
+ }
+
public static Job createLoadDatasetJobSpec(CompiledLoadFromFileStatement loadStmt,
AqlCompiledMetadataDeclarations metadata) throws AsterixException, AlgebricksException {
String datasetName = loadStmt.getDatasetName();
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/IndexOperations.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/IndexOperations.java
index e788e0f..103e429 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/IndexOperations.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/IndexOperations.java
@@ -26,13 +26,19 @@
private static final PhysicalOptimizationConfig physicalOptimizationConfig = OptimizationConfUtil
.getPhysicalOptimizationConfig();
- public static JobSpecification buildCreateIndexJobSpec(CompiledCreateIndexStatement createIndexStmt,
+ public static JobSpecification buildSecondaryIndexCreationJobSpec(CompiledCreateIndexStatement createIndexStmt,
AqlCompiledMetadataDeclarations metadata) throws AsterixException, AlgebricksException {
- SecondaryIndexCreator secondaryIndexCreator = SecondaryIndexCreator.createIndexCreator(createIndexStmt.getIndexType(), physicalOptimizationConfig);
- return secondaryIndexCreator.createJobSpec(createIndexStmt, metadata);
+ SecondaryIndexCreator secondaryIndexCreator = SecondaryIndexCreator.createIndexCreator(createIndexStmt, metadata, physicalOptimizationConfig);
+ return secondaryIndexCreator.buildCreationJobSpec();
}
-
- public static JobSpecification createSecondaryIndexDropJobSpec(CompiledIndexDropStatement deleteStmt,
+
+ public static JobSpecification buildSecondaryIndexLoadingJobSpec(CompiledCreateIndexStatement createIndexStmt,
+ AqlCompiledMetadataDeclarations metadata) throws AsterixException, AlgebricksException {
+ SecondaryIndexCreator secondaryIndexCreator = SecondaryIndexCreator.createIndexCreator(createIndexStmt, metadata, physicalOptimizationConfig);
+ return secondaryIndexCreator.buildLoadingJobSpec();
+ }
+
+ public static JobSpecification buildDropSecondaryIndexJobSpec(CompiledIndexDropStatement deleteStmt,
AqlCompiledMetadataDeclarations datasetDecls) throws AlgebricksException, MetadataException {
String datasetName = deleteStmt.getDatasetName();
String indexName = deleteStmt.getIndexName();
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryBTreeCreator.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryBTreeCreator.java
index 0e4b633..83d7886 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryBTreeCreator.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryBTreeCreator.java
@@ -1,8 +1,8 @@
package edu.uci.ics.asterix.file;
+import edu.uci.ics.asterix.common.context.AsterixIndexRegistryProvider;
+import edu.uci.ics.asterix.common.context.AsterixStorageManagerInterface;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.metadata.declared.AqlCompiledMetadataDeclarations;
-import edu.uci.ics.asterix.translator.DmlTranslator.CompiledCreateIndexStatement;
import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.ConnectorPolicyAssignmentPolicy;
@@ -16,6 +16,8 @@
import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
public class SecondaryBTreeCreator extends SecondaryIndexCreator {
@@ -24,9 +26,21 @@
}
@Override
- public JobSpecification createJobSpec(CompiledCreateIndexStatement createIndexStmt,
- AqlCompiledMetadataDeclarations metadata) throws AsterixException, AlgebricksException {
- init(createIndexStmt, metadata);
+ public JobSpecification buildCreationJobSpec() throws AsterixException, AlgebricksException {
+ JobSpecification spec = new JobSpecification();
+ TreeIndexCreateOperatorDescriptor secondaryIndexCreateOp = new TreeIndexCreateOperatorDescriptor(spec,
+ AsterixStorageManagerInterface.INSTANCE, AsterixIndexRegistryProvider.INSTANCE,
+ secondaryFileSplitProvider, secondaryRecDesc.getTypeTraits(), secondaryComparatorFactories,
+ new BTreeDataflowHelperFactory(), NoOpOperationCallbackProvider.INSTANCE);
+ AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, secondaryIndexCreateOp,
+ secondaryPartitionConstraint);
+ spec.addRoot(secondaryIndexCreateOp);
+ spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+ return spec;
+ }
+
+ @Override
+ public JobSpecification buildLoadingJobSpec() throws AsterixException, AlgebricksException {
JobSpecification spec = new JobSpecification();
// Create dummy key provider for feeding the primary index scan.
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryIndexCreator.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryIndexCreator.java
index d9585fd..b63e019 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryIndexCreator.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryIndexCreator.java
@@ -4,7 +4,6 @@
import java.util.List;
import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
-import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
import edu.uci.ics.asterix.common.context.AsterixIndexRegistryProvider;
import edu.uci.ics.asterix.common.context.AsterixStorageManagerInterface;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
@@ -68,6 +67,8 @@
protected ISerializerDeserializer payloadSerde;
protected IFileSplitProvider primaryFileSplitProvider;
protected AlgebricksPartitionConstraint primaryPartitionConstraint;
+ protected IFileSplitProvider secondaryFileSplitProvider;
+ protected AlgebricksPartitionConstraint secondaryPartitionConstraint;
protected String secondaryIndexName;
protected boolean anySecondaryKeyIsNullable = false;
@@ -77,30 +78,37 @@
protected RecordDescriptor secondaryRecDesc;
protected IEvaluatorFactory[] evalFactories;
- // Prevent public construction.
+ // Prevent public construction. Should be created via createIndexCreator().
protected SecondaryIndexCreator(PhysicalOptimizationConfig physOptConf) {
this.physOptConf = physOptConf;
}
- public static SecondaryIndexCreator createIndexCreator(IndexType indexType, PhysicalOptimizationConfig physOptConf) throws AsterixException {
- switch (indexType) {
+ public static SecondaryIndexCreator createIndexCreator(CompiledCreateIndexStatement createIndexStmt, AqlCompiledMetadataDeclarations metadata, PhysicalOptimizationConfig physOptConf) throws AsterixException, AlgebricksException {
+ SecondaryIndexCreator indexCreator = null;
+ switch (createIndexStmt.getIndexType()) {
case BTREE: {
- return new SecondaryBTreeCreator(physOptConf);
+ indexCreator = new SecondaryBTreeCreator(physOptConf);
+ break;
}
case RTREE: {
- return new SecondaryRTreeCreator(physOptConf);
+ indexCreator = new SecondaryRTreeCreator(physOptConf);
+ break;
}
case KEYWORD: {
- return new SecondaryInvertedIndexCreator(physOptConf);
+ indexCreator = new SecondaryInvertedIndexCreator(physOptConf);
+ break;
}
default: {
- throw new AsterixException("Unknown Index Type: " + indexType);
+ throw new AsterixException("Unknown Index Type: " + createIndexStmt.getIndexType());
}
}
+ indexCreator.init(createIndexStmt, metadata);
+ return indexCreator;
}
- public abstract JobSpecification createJobSpec(CompiledCreateIndexStatement createIndexStmt,
- AqlCompiledMetadataDeclarations metadata) throws AsterixException, AlgebricksException;
+ public abstract JobSpecification buildCreationJobSpec() throws AsterixException, AlgebricksException;
+
+ public abstract JobSpecification buildLoadingJobSpec() throws AsterixException, AlgebricksException;
protected void init(CompiledCreateIndexStatement createIndexStmt, AqlCompiledMetadataDeclarations metadata) throws AsterixException, AlgebricksException {
this.metadata = metadata;
@@ -118,10 +126,14 @@
.getSerializerDeserializer(itemType);
numPrimaryKeys = DatasetUtils.getPartitioningFunctions(datasetDecl).size();
numSecondaryKeys = createIndexStmt.getKeyFields().size();
- Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitProviderAndConstraint = metadata
+ Pair<IFileSplitProvider, AlgebricksPartitionConstraint> primarySplitsAndConstraint = metadata
.splitProviderAndPartitionConstraintsForInternalOrFeedDataset(datasetName, datasetName);
- primaryFileSplitProvider = splitProviderAndConstraint.first;
- primaryPartitionConstraint = splitProviderAndConstraint.second;
+ primaryFileSplitProvider = primarySplitsAndConstraint.first;
+ primaryPartitionConstraint = primarySplitsAndConstraint.second;
+ Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint = metadata
+ .splitProviderAndPartitionConstraintsForInternalOrFeedDataset(datasetName, secondaryIndexName);
+ secondaryFileSplitProvider = secondarySplitsAndConstraint.first;
+ secondaryPartitionConstraint = secondarySplitsAndConstraint.second;
// Must be called in this order.
setPrimaryRecDescAndComparators();
setSecondaryRecDescAndComparators(createIndexStmt.getKeyFields());
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryInvertedIndexCreator.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryInvertedIndexCreator.java
index 4f0d3a5..df2ef13 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryInvertedIndexCreator.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryInvertedIndexCreator.java
@@ -1,58 +1,10 @@
package edu.uci.ics.asterix.file;
-import java.io.DataOutput;
-import java.util.List;
-
-import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
-import edu.uci.ics.asterix.common.context.AsterixIndexRegistryProvider;
-import edu.uci.ics.asterix.common.context.AsterixStorageManagerInterface;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;
-import edu.uci.ics.asterix.formats.nontagged.AqlBinaryTokenizerFactoryProvider;
-import edu.uci.ics.asterix.formats.nontagged.AqlTypeTraitProvider;
-import edu.uci.ics.asterix.metadata.declared.AqlCompiledDatasetDecl;
-import edu.uci.ics.asterix.metadata.declared.AqlCompiledIndexDecl;
-import edu.uci.ics.asterix.metadata.declared.AqlCompiledMetadataDeclarations;
-import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.translator.DmlTranslator.CompiledCreateIndexStatement;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.common.utils.Triple;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.ConnectorPolicyAssignmentPolicy;
import edu.uci.ics.hyracks.algebricks.core.rewriter.base.PhysicalOptimizationConfig;
-import edu.uci.ics.hyracks.algebricks.data.ISerializerDeserializerProvider;
-import edu.uci.ics.hyracks.algebricks.runtime.base.IEvaluatorFactory;
-import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
-import edu.uci.ics.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
-import edu.uci.ics.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.invertedindex.dataflow.BinaryTokenizerOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IBinaryTokenizerFactory;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-@SuppressWarnings("rawtypes")
public class SecondaryInvertedIndexCreator extends SecondaryIndexCreator {
protected SecondaryInvertedIndexCreator(PhysicalOptimizationConfig physOptConf) {
@@ -60,10 +12,16 @@
}
@Override
+ public JobSpecification buildCreationJobSpec() throws AsterixException, AlgebricksException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
// TODO: This code has been completely rewritten in the asterix-fuzzy branch. No tests currently rely
// on this code, so I didn't do any cleanup here.
- public JobSpecification createJobSpec(CompiledCreateIndexStatement createIndexStmt,
- AqlCompiledMetadataDeclarations metadata) throws AsterixException, AlgebricksException {
+ public JobSpecification buildLoadingJobSpec() throws AsterixException, AlgebricksException {
+ /*
JobSpecification spec = new JobSpecification();
String primaryIndexName = createIndexStmt.getDatasetName();
@@ -305,6 +263,7 @@
// ---------- END CONNECT THE OPERATORS
spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
return spec;
+ */
+ return null;
}
-
}
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryRTreeCreator.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryRTreeCreator.java
index 4010fdc..b2db19a 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryRTreeCreator.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryRTreeCreator.java
@@ -2,16 +2,17 @@
import java.util.List;
+import edu.uci.ics.asterix.common.context.AsterixIndexRegistryProvider;
+import edu.uci.ics.asterix.common.context.AsterixStorageManagerInterface;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.dataflow.data.nontagged.valueproviders.AqlPrimitiveValueProviderFactory;
import edu.uci.ics.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
import edu.uci.ics.asterix.formats.nontagged.AqlTypeTraitProvider;
import edu.uci.ics.asterix.metadata.declared.AqlCompiledIndexDecl;
-import edu.uci.ics.asterix.metadata.declared.AqlCompiledMetadataDeclarations;
import edu.uci.ics.asterix.om.types.IAType;
import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
-import edu.uci.ics.asterix.translator.DmlTranslator.CompiledCreateIndexStatement;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.ConnectorPolicyAssignmentPolicy;
@@ -28,6 +29,8 @@
import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeDataflowHelperFactory;
@SuppressWarnings("rawtypes")
@@ -41,6 +44,20 @@
}
@Override
+ public JobSpecification buildCreationJobSpec() throws AsterixException, AlgebricksException {
+ JobSpecification spec = new JobSpecification();
+ TreeIndexCreateOperatorDescriptor secondaryIndexCreateOp = new TreeIndexCreateOperatorDescriptor(spec,
+ AsterixStorageManagerInterface.INSTANCE, AsterixIndexRegistryProvider.INSTANCE,
+ secondaryFileSplitProvider, secondaryRecDesc.getTypeTraits(), secondaryComparatorFactories,
+ new RTreeDataflowHelperFactory(valueProviderFactories), NoOpOperationCallbackProvider.INSTANCE);
+ AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, secondaryIndexCreateOp,
+ secondaryPartitionConstraint);
+ spec.addRoot(secondaryIndexCreateOp);
+ spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+ return spec;
+ }
+
+ @Override
protected void setSecondaryRecDescAndComparators(List<String> secondaryKeyFields) throws AlgebricksException,
AsterixException {
int numSecondaryKeys = secondaryKeyFields.size();
@@ -84,9 +101,7 @@
}
@Override
- public JobSpecification createJobSpec(CompiledCreateIndexStatement createIndexStmt,
- AqlCompiledMetadataDeclarations metadata) throws AsterixException, AlgebricksException {
- init(createIndexStmt, metadata);
+ public JobSpecification buildLoadingJobSpec() throws AsterixException, AlgebricksException {
JobSpecification spec = new JobSpecification();
// Create dummy key provider for feeding the primary index scan.
@@ -122,5 +137,4 @@
spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
return spec;
}
-
}
diff --git a/asterix-app/src/test/resources/runtimets/ignore.txt b/asterix-app/src/test/resources/runtimets/ignore.txt
index 0964e88..2b456ca 100644
--- a/asterix-app/src/test/resources/runtimets/ignore.txt
+++ b/asterix-app/src/test/resources/runtimets/ignore.txt
@@ -15,6 +15,4 @@
fuzzyjoin
failure/q1_pricing_summary_report_failure.aql
open-closed
-dml/insert-into-empty-dataset-with-index_02.aql
-dml/insert-into-empty-dataset-with-index_01.aql
dml/load-from-hdfs.aql
diff --git a/asterix-app/src/test/resources/runtimets/only.txt b/asterix-app/src/test/resources/runtimets/only.txt
index b399dbe..e69de29 100644
--- a/asterix-app/src/test/resources/runtimets/only.txt
+++ b/asterix-app/src/test/resources/runtimets/only.txt
@@ -1 +0,0 @@
-dml/insert-into-empty-dataset-with-index_01.aql
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset-with-index_01.aql b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset-with-index.aql
similarity index 93%
rename from asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset-with-index_01.aql
rename to asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset-with-index.aql
index 2be5372..00b60cb 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset-with-index_01.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset-with-index.aql
@@ -43,7 +43,8 @@
}
);
-write output to nc1:"rttest/dml_insert-into-empty-dataset-with-index_01.adm";
+write output to nc1:"rttest/dml_insert-into-empty-dataset-with-index.adm";
for $c in dataset('LineID')
+where $c.l_suppkey < 100 and $c.l_linenumber<5
order by $c.l_orderkey, $c.l_linenumber
return $c
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset-with-index_02.aql b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset-with-index_02.aql
deleted file mode 100644
index 947b4d4..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset-with-index_02.aql
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Test case Name : insert-into-empty-dataset-with-index.aql
- * Description : Check that we can insert into an empty dataset and its empty secondary indexes
- * Expected Result : Success
- * Date : May 2 2012
- */
-
-drop dataverse test if exists;
-create dataverse test;
-use dataverse test;
-
-create type LineIDType as closed {
- l_orderkey: int32,
- l_linenumber: int32,
- l_suppkey: int32
-}
-
-create dataset LineID(LineIDType)
- partitioned by key l_orderkey, l_linenumber;
-
-create index idx_LineID_partkey on LineID(l_linenumber);
-create index idx_LineID_suppkey on LineID(l_suppkey);
-
-insert into dataset LineID (
-let $x:=1
-let $y:=2
-let $z:=3
-return {
- "l_orderkey": $x,
- "l_linenumber": $y,
- "l_suppkey": $z
-}
-);
-
-insert into dataset LineID (
-let $x:=2
-let $y:=3
-let $z:=4
-return {
- "l_orderkey": $x,
- "l_linenumber": $y,
- "l_suppkey": $z
-}
-);
-
-write output to nc1:"rttest/dml_insert-into-empty-dataset-with-index_02.adm";
-for $c in dataset('LineID')
-order by $c.l_orderkey, $c.l_linenumber
-return $c
diff --git a/asterix-app/src/test/resources/runtimets/results/dml/insert-into-empty-dataset-with-index_01.adm b/asterix-app/src/test/resources/runtimets/results/dml/insert-into-empty-dataset-with-index.adm
similarity index 100%
rename from asterix-app/src/test/resources/runtimets/results/dml/insert-into-empty-dataset-with-index_01.adm
rename to asterix-app/src/test/resources/runtimets/results/dml/insert-into-empty-dataset-with-index.adm
diff --git a/asterix-app/src/test/resources/runtimets/results/dml/insert-into-empty-dataset-with-index_02.adm b/asterix-app/src/test/resources/runtimets/results/dml/insert-into-empty-dataset-with-index_02.adm
deleted file mode 100644
index 1b6c344..0000000
--- a/asterix-app/src/test/resources/runtimets/results/dml/insert-into-empty-dataset-with-index_02.adm
+++ /dev/null
@@ -1,2 +0,0 @@
-{ "l_orderkey": 1, "l_linenumber": 2, "l_suppkey": 3 }
-{ "l_orderkey": 2, "l_linenumber": 3, "l_suppkey": 4 }
diff --git a/asterix-hyracks-glue/src/main/java/edu/uci/ics/asterix/runtime/transaction/TreeIndexInsertUpdateDeleteOperatorDescriptor.java b/asterix-hyracks-glue/src/main/java/edu/uci/ics/asterix/runtime/transaction/TreeIndexInsertUpdateDeleteOperatorDescriptor.java
index 85f9fb1..a80aadc 100644
--- a/asterix-hyracks-glue/src/main/java/edu/uci/ics/asterix/runtime/transaction/TreeIndexInsertUpdateDeleteOperatorDescriptor.java
+++ b/asterix-hyracks-glue/src/main/java/edu/uci/ics/asterix/runtime/transaction/TreeIndexInsertUpdateDeleteOperatorDescriptor.java
@@ -74,7 +74,7 @@
} catch (ACIDException ae) {
throw new RuntimeException(" could not obtain context for invalid transaction id " + transactionId);
}
- return new TreeIndexInsertUpdateDeleteOperatorNodePushable(txnContext, this, ctx, opCallbackProvider,
- partition, fieldPermutation, recordDescProvider, op);
+ return new TreeIndexInsertUpdateDeleteOperatorNodePushable(txnContext, this, ctx, partition, fieldPermutation,
+ recordDescProvider, op);
}
}
diff --git a/asterix-hyracks-glue/src/main/java/edu/uci/ics/asterix/runtime/transaction/TreeIndexInsertUpdateDeleteOperatorNodePushable.java b/asterix-hyracks-glue/src/main/java/edu/uci/ics/asterix/runtime/transaction/TreeIndexInsertUpdateDeleteOperatorNodePushable.java
index 92d4158..0749693 100644
--- a/asterix-hyracks-glue/src/main/java/edu/uci/ics/asterix/runtime/transaction/TreeIndexInsertUpdateDeleteOperatorNodePushable.java
+++ b/asterix-hyracks-glue/src/main/java/edu/uci/ics/asterix/runtime/transaction/TreeIndexInsertUpdateDeleteOperatorNodePushable.java
@@ -36,7 +36,6 @@
import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IOperationCallbackProvider;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
@@ -63,12 +62,11 @@
* been introduced.
*/
public TreeIndexInsertUpdateDeleteOperatorNodePushable(TransactionContext txnContext,
- AbstractTreeIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
- IOperationCallbackProvider opCallbackProvider, int partition, int[] fieldPermutation,
+ AbstractTreeIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx, int partition, int[] fieldPermutation,
IRecordDescriptorProvider recordDescProvider, IndexOp op) {
boolean createIfNotExists = (op == IndexOp.INSERT);
treeIndexHelper = (TreeIndexDataflowHelper) opDesc.getIndexDataflowHelperFactory().createIndexDataflowHelper(
- opDesc, ctx, opCallbackProvider, partition, createIfNotExists);
+ opDesc, ctx, partition);
this.recordDescProvider = recordDescProvider;
this.op = op;
tuple.setFieldPermutation(fieldPermutation);
@@ -103,7 +101,7 @@
writeBuffer = treeIndexHelper.getHyracksTaskContext().allocateFrame();
writer.open();
try {
- treeIndexHelper.init();
+ treeIndexHelper.init(false);
ITreeIndex treeIndex = (ITreeIndex) treeIndexHelper.getIndex();
indexAccessor = treeIndex.createAccessor();
initializeTransactionSupport();
@@ -181,7 +179,20 @@
@Override
public void fail() throws HyracksDataException {
- writer.fail();
+ try {
+ writer.fail();
+ } finally {
+ txnContext.addCloseableResource(new ICloseable() {
+ @Override
+ public void close(TransactionContext txnContext) throws ACIDException {
+ try {
+ treeIndexHelper.deinit();
+ } catch (Exception e) {
+ throw new ACIDException(txnContext, "could not de-initialize " + treeIndexHelper, e);
+ }
+ }
+ });
+ }
}
}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/utils/DatasetUtils.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/utils/DatasetUtils.java
index 3a21bda..b7f0f75 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/utils/DatasetUtils.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/utils/DatasetUtils.java
@@ -12,7 +12,6 @@
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
import edu.uci.ics.hyracks.algebricks.common.utils.Triple;
import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder.OrderKind;
import edu.uci.ics.hyracks.algebricks.data.IBinaryComparatorFactoryProvider;
import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFactoryProvider;
import edu.uci.ics.hyracks.algebricks.runtime.base.IEvaluatorFactory;