Cleanup storage exceptions
Change-Id: I020d2b4b1f4ae48fc2df0b720e70a1ce95867d34
Reviewed-on: https://asterix-gerrit.ics.uci.edu/1619
Tested-by: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Integration-Tests: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Reviewed-by: Yingyi Bu <buyingyi@gmail.com>
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite.xml b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite.xml
index b962ce8..d2379c5 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite.xml
@@ -1650,7 +1650,7 @@
<test-case FilePath="dml">
<compilation-unit name="insert-duplicated-keys-from-query">
<output-dir compare="Text">insert-duplicated-keys-from-query</output-dir>
- <expected-error>Failed to insert key since key already exists</expected-error>
+ <expected-error>Inserting duplicate keys into the primary storage</expected-error>
</compilation-unit>
</test-case>
<test-case FilePath="dml">
@@ -1833,7 +1833,7 @@
<test-case FilePath="dml">
<compilation-unit name="insert-duplicated-keys">
<output-dir compare="Text">insert-duplicated-keys</output-dir>
- <expected-error>org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException: Failed to insert key since key already exists</expected-error>
+ <expected-error>Inserting duplicate keys into the primary storage</expected-error>
</compilation-unit>
</test-case>
<test-case FilePath="dml">
@@ -6742,7 +6742,7 @@
<test-case FilePath="load">
<compilation-unit name="duplicate-key-error">
<output-dir compare="Text">none</output-dir>
- <expected-error>Input stream given to BTree bulk load has duplicates</expected-error>
+ <expected-error>Loading duplicate keys into the primary storage</expected-error>
</compilation-unit>
</test-case>
<test-case FilePath="load">
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
index 3c075e4..6f8b094 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
@@ -1719,7 +1719,7 @@
<test-case FilePath="dml">
<compilation-unit name="insert-duplicated-keys">
<output-dir compare="Text">insert-duplicated-keys</output-dir>
- <expected-error>Failed to insert key since key already exists</expected-error>
+ <expected-error>Inserting duplicate keys into the primary storage</expected-error>
</compilation-unit>
</test-case>
<test-case FilePath="dml">
@@ -7785,7 +7785,7 @@
<test-case FilePath="load">
<compilation-unit name="duplicate-key-error">
<output-dir compare="Text">none</output-dir>
- <expected-error>Input stream given to BTree bulk load has duplicates</expected-error>
+ <expected-error>Loading duplicate keys into the primary storage</expected-error>
</compilation-unit>
</test-case>
<test-case FilePath="load">
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/CorrelatedPrefixMergePolicy.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/CorrelatedPrefixMergePolicy.java
index e16ce78..17da26a 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/CorrelatedPrefixMergePolicy.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/CorrelatedPrefixMergePolicy.java
@@ -28,11 +28,10 @@
import org.apache.asterix.common.api.IDatasetLifecycleManager;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.storage.am.common.api.IResourceLifecycleManager;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent.ComponentState;
+import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndex;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexAccessor;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
@@ -51,8 +50,7 @@
}
@Override
- public void diskComponentAdded(final ILSMIndex index, boolean fullMergeIsRequested)
- throws HyracksDataException, IndexException {
+ public void diskComponentAdded(final ILSMIndex index, boolean fullMergeIsRequested) throws HyracksDataException {
// This merge policy will only look at primary indexes in order to evaluate if a merge operation is needed. If it decides that
// a merge operation is needed, then it will merge *all* the indexes that belong to the dataset. The criteria to decide if a merge
// is needed is the same as the one that is used in the prefix merge policy:
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/ExternalFileIndexAccessor.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/ExternalFileIndexAccessor.java
index a3f277a..cdb40d8 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/ExternalFileIndexAccessor.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/ExternalFileIndexAccessor.java
@@ -43,7 +43,6 @@
import org.apache.hyracks.storage.am.btree.util.BTreeUtils;
import org.apache.hyracks.storage.am.common.api.IIndexCursor;
import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
import org.apache.hyracks.storage.am.lsm.btree.dataflow.ExternalBTreeDataflowHelper;
import org.apache.hyracks.storage.am.lsm.btree.impls.ExternalBTree;
@@ -97,7 +96,7 @@
fileIndexSearchCursor = fileIndexAccessor.createSearchCursor(false);
}
- public void lookup(int fileId, ExternalFile file) throws HyracksDataException, IndexException {
+ public void lookup(int fileId, ExternalFile file) throws HyracksDataException {
// Set search parameters
currentFileNumber.setValue(fileId);
searchKeyTupleBuilder.reset();
@@ -130,8 +129,9 @@
.getStringValue());
file.setSize(((AInt64) externalFileRecord.getValueByPos(FilesIndexDescription.EXTERNAL_FILE_SIZE_FIELD_INDEX))
.getLongValue());
- file.setLastModefiedTime(new Date(((ADateTime) externalFileRecord
- .getValueByPos(FilesIndexDescription.EXTERNAL_FILE_MOD_DATE_FIELD_INDEX)).getChrononTime()));
+ file.setLastModefiedTime(new Date(
+ ((ADateTime) externalFileRecord.getValueByPos(FilesIndexDescription.EXTERNAL_FILE_MOD_DATE_FIELD_INDEX))
+ .getChrononTime()));
}
public void close() throws HyracksDataException {
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalDatasetIndexesCommitOperatorDescriptor.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalDatasetIndexesCommitOperatorDescriptor.java
index 2ddb646..1559469 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalDatasetIndexesCommitOperatorDescriptor.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalDatasetIndexesCommitOperatorDescriptor.java
@@ -26,7 +26,6 @@
import org.apache.hyracks.api.job.IOperatorDescriptorRegistry;
import org.apache.hyracks.storage.am.common.api.IIndex;
import org.apache.hyracks.storage.am.common.api.IIndexDataflowHelper;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
import org.apache.hyracks.storage.am.common.util.IndexFileNameUtil;
import org.apache.hyracks.storage.am.lsm.common.api.ITwoPCIndex;
@@ -41,28 +40,23 @@
IIndexDataflowHelperFactory filesIndexDataflowHelperFactory, IndexInfoOperatorDescriptor fileIndexesInfo,
List<IIndexDataflowHelperFactory> indexesDataflowHelperFactories,
List<IndexInfoOperatorDescriptor> indexesInfos) {
- super(spec, filesIndexDataflowHelperFactory, fileIndexesInfo, indexesDataflowHelperFactories,
- indexesInfos);
+ super(spec, filesIndexDataflowHelperFactory, fileIndexesInfo, indexesDataflowHelperFactories, indexesInfos);
}
@Override
protected void performOpOnIndex(IIndexDataflowHelperFactory indexDataflowHelperFactory, IHyracksTaskContext ctx,
- IndexInfoOperatorDescriptor fileIndexInfo, int partition) {
- try {
- FileReference resourecePath =
- IndexFileNameUtil.getIndexAbsoluteFileRef(fileIndexInfo, partition, ctx.getIOManager());
- LOGGER.warn("performing the operation on " + resourecePath.getFile().getAbsolutePath());
- // Get DataflowHelper
- IIndexDataflowHelper indexHelper =
- indexDataflowHelperFactory.createIndexDataflowHelper(fileIndexInfo, ctx, partition);
- // Get index
- IIndex index = indexHelper.getIndexInstance();
- // commit transaction
- ((ITwoPCIndex) index).commitTransaction();
- LOGGER.warn("operation on " + resourecePath.getFile().getAbsolutePath() + " Succeded");
- } catch (HyracksDataException | IndexException e) {
- throw new IllegalStateException(e);
- }
+ IndexInfoOperatorDescriptor fileIndexInfo, int partition) throws HyracksDataException {
+ FileReference resourecePath =
+ IndexFileNameUtil.getIndexAbsoluteFileRef(fileIndexInfo, partition, ctx.getIOManager());
+ LOGGER.warn("performing the operation on " + resourecePath.getFile().getAbsolutePath());
+ // Get DataflowHelper
+ IIndexDataflowHelper indexHelper =
+ indexDataflowHelperFactory.createIndexDataflowHelper(fileIndexInfo, ctx, partition);
+ // Get index
+ IIndex index = indexHelper.getIndexInstance();
+ // commit transaction
+ ((ITwoPCIndex) index).commitTransaction();
+ LOGGER.warn("operation on " + resourecePath.getFile().getAbsolutePath() + " Succeded");
}
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalFilesIndexOperatorDescriptor.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalFilesIndexOperatorDescriptor.java
index 2bb986d..556fb51 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalFilesIndexOperatorDescriptor.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalFilesIndexOperatorDescriptor.java
@@ -18,7 +18,6 @@
*/
package org.apache.asterix.external.operators;
-import java.io.IOException;
import java.util.List;
import org.apache.asterix.external.indexing.ExternalFile;
@@ -39,7 +38,6 @@
import org.apache.hyracks.storage.am.common.api.IIndexDataflowHelper;
import org.apache.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
import org.apache.hyracks.storage.am.common.api.IMetadataPageManagerFactory;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
@@ -61,9 +59,9 @@
private boolean createNewIndex;
private List<ExternalFile> files;
- public ExternalFilesIndexOperatorDescriptor(IOperatorDescriptorRegistry spec,
- IStorageManager storageManager, IIndexLifecycleManagerProvider lifecycleManagerProvider,
- IFileSplitProvider fileSplitProvider, IIndexDataflowHelperFactory dataflowHelperFactory,
+ public ExternalFilesIndexOperatorDescriptor(IOperatorDescriptorRegistry spec, IStorageManager storageManager,
+ IIndexLifecycleManagerProvider lifecycleManagerProvider, IFileSplitProvider fileSplitProvider,
+ IIndexDataflowHelperFactory dataflowHelperFactory,
ILocalResourceFactoryProvider localResourceFactoryProvider, List<ExternalFile> files,
boolean createNewIndex, IMetadataPageManagerFactory metadataPageManagerFactory) {
super(spec, 0, 0, null, storageManager, lifecycleManagerProvider, fileSplitProvider,
@@ -79,8 +77,8 @@
@Override
public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
- final IIndexDataflowHelper indexHelper = getIndexDataflowHelperFactory().createIndexDataflowHelper(this, ctx,
- partition);
+ final IIndexDataflowHelper indexHelper =
+ getIndexDataflowHelperFactory().createIndexDataflowHelper(this, ctx, partition);
return new AbstractOperatorNodePushable() {
@SuppressWarnings("incomplete-switch")
@@ -95,15 +93,13 @@
try {
IIndex index = indexHelper.getIndexInstance();
// Create bulk loader
- IIndexBulkLoader bulkLoader = index.createBulkLoader(BTree.DEFAULT_FILL_FACTOR, false,
- files.size(), false);
+ IIndexBulkLoader bulkLoader =
+ index.createBulkLoader(BTree.DEFAULT_FILL_FACTOR, false, files.size(), false);
// Load files
for (ExternalFile file : files) {
bulkLoader.add(filesTupleTranslator.getTupleFromFile(file));
}
bulkLoader.end();
- } catch (IndexException | IOException e) {
- throw new HyracksDataException(e);
} finally {
indexHelper.close();
}
@@ -114,8 +110,8 @@
IIndex index = indexHelper.getIndexInstance();
LSMTwoPCBTreeBulkLoader bulkLoader = null;
try {
- bulkLoader = (LSMTwoPCBTreeBulkLoader) ((ExternalBTree) index).createTransactionBulkLoader(
- BTree.DEFAULT_FILL_FACTOR, false, files.size(), false);
+ bulkLoader = (LSMTwoPCBTreeBulkLoader) ((ExternalBTree) index)
+ .createTransactionBulkLoader(BTree.DEFAULT_FILL_FACTOR, false, files.size(), false);
// Load files
// The files must be ordered according to their numbers
for (ExternalFile file : files) {
@@ -130,11 +126,11 @@
}
}
bulkLoader.end();
- } catch (IndexException | IOException e) {
+ } catch (Exception e) {
if (bulkLoader != null) {
bulkLoader.abort();
}
- throw new HyracksDataException(e);
+ throw HyracksDataException.create(e);
} finally {
indexHelper.close();
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalIndexBulkModifyOperatorNodePushable.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalIndexBulkModifyOperatorNodePushable.java
index c95a4a7..d3bd2bb 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalIndexBulkModifyOperatorNodePushable.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalIndexBulkModifyOperatorNodePushable.java
@@ -30,7 +30,6 @@
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleReference;
import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
import org.apache.hyracks.storage.am.common.api.ITwoPCIndexBulkLoader;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.dataflow.IndexBulkLoadOperatorNodePushable;
import org.apache.hyracks.storage.am.lsm.common.api.ITwoPCIndex;
@@ -38,8 +37,8 @@
private final FilesIndexDescription filesIndexDescription = new FilesIndexDescription();
private final int[] deletedFiles;
- private ArrayTupleBuilder buddyBTreeTupleBuilder = new ArrayTupleBuilder(
- filesIndexDescription.FILE_BUDDY_BTREE_RECORD_DESCRIPTOR.getFieldCount());
+ private ArrayTupleBuilder buddyBTreeTupleBuilder =
+ new ArrayTupleBuilder(filesIndexDescription.FILE_BUDDY_BTREE_RECORD_DESCRIPTOR.getFieldCount());
private AMutableInt32 fileNumber = new AMutableInt32(0);
private ArrayTupleReference deleteTuple = new ArrayTupleReference();
@@ -81,11 +80,7 @@
int tupleCount = accessor.getTupleCount();
for (int i = 0; i < tupleCount; i++) {
tuple.reset(accessor, i);
- try {
- bulkLoader.add(tuple);
- } catch (IndexException e) {
- throw new HyracksDataException(e);
- }
+ bulkLoader.add(tuple);
}
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
index 7a04ce8..8080fcb 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
@@ -19,13 +19,14 @@
package org.apache.asterix.metadata;
-import java.io.IOException;
import java.rmi.RemoteException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
import org.apache.asterix.common.api.IAppRuntimeContext;
import org.apache.asterix.common.api.IDatasetLifecycleManager;
@@ -101,6 +102,7 @@
import org.apache.hyracks.api.dataflow.value.IBinaryComparator;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleReference;
@@ -112,9 +114,6 @@
import org.apache.hyracks.storage.am.common.api.IIndexCursor;
import org.apache.hyracks.storage.am.common.api.IModificationOperationCallback;
import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndex;
@@ -123,7 +122,7 @@
public class MetadataNode implements IMetadataNode {
private static final long serialVersionUID = 1L;
-
+ private static final Logger LOGGER = Logger.getLogger(MetadataNode.class.getName());
private static final DatasetId METADATA_DATASET_ID =
new ImmutableDatasetId(MetadataPrimaryIndexes.PROPERTIES_METADATA.getDatasetId());
@@ -142,13 +141,13 @@
super();
}
- public void initialize(IAppRuntimeContext runtimeContext,
- MetadataTupleTranslatorProvider tupleTranslatorProvider, List<IMetadataExtension> metadataExtensions) {
+ public void initialize(IAppRuntimeContext runtimeContext, MetadataTupleTranslatorProvider tupleTranslatorProvider,
+ List<IMetadataExtension> metadataExtensions) {
this.tupleTranslatorProvider = tupleTranslatorProvider;
this.transactionSubsystem = runtimeContext.getTransactionSubsystem();
this.datasetLifecycleManager = runtimeContext.getDatasetLifecycleManager();
- this.metadataStoragePartition = ((IPropertiesProvider) runtimeContext).getMetadataProperties()
- .getMetadataPartition().getPartitionId();
+ this.metadataStoragePartition =
+ ((IPropertiesProvider) runtimeContext).getMetadataProperties().getMetadataPartition().getPartitionId();
if (metadataExtensions != null) {
extensionDatasets = new HashMap<>();
for (IMetadataExtension metadataExtension : metadataExtensions) {
@@ -174,11 +173,11 @@
@Override
public void abortTransaction(JobId jobId) throws RemoteException, ACIDException {
try {
- ITransactionContext txnCtx = transactionSubsystem.getTransactionManager().getTransactionContext(jobId,
- false);
+ ITransactionContext txnCtx =
+ transactionSubsystem.getTransactionManager().getTransactionContext(jobId, false);
transactionSubsystem.getTransactionManager().abortTransaction(txnCtx, DatasetId.NULL, -1);
} catch (ACIDException e) {
- e.printStackTrace();
+ LOGGER.log(Level.WARNING, "Exception aborting transaction", e);
throw e;
}
}
@@ -210,9 +209,7 @@
try {
ITupleReference tuple = tupleTranslator.getTupleFromMetadataEntity(entity);
insertTupleIntoIndex(jobId, index, tuple);
- } catch (TreeIndexDuplicateKeyException e) {
- throw new MetadataException(entity.toString() + " already exists.", e);
- } catch (ACIDException | IndexException | IOException e) {
+ } catch (HyracksDataException | ACIDException e) {
throw new MetadataException(e);
}
}
@@ -231,9 +228,7 @@
try {
ITupleReference tuple = tupleTranslator.getTupleFromMetadataEntity(entity);
deleteTupleFromIndex(jobId, index, tuple);
- } catch (TreeIndexDuplicateKeyException e) {
- throw new MetadataException(entity.toString() + " already exists.", e);
- } catch (ACIDException | IndexException | IOException e) {
+ } catch (HyracksDataException | ACIDException e) {
throw new MetadataException(e);
}
}
@@ -247,15 +242,17 @@
* @param index
* @return
* @throws MetadataException
+ * @throws RemoteException
*/
private <T> List<T> getEntities(JobId jobId, ITupleReference searchKey,
- IMetadataEntityTupleTranslator<T> tupleTranslator, IMetadataIndex index) throws MetadataException {
+ IMetadataEntityTupleTranslator<T> tupleTranslator, IMetadataIndex index)
+ throws MetadataException, RemoteException {
try {
IValueExtractor<T> valueExtractor = new MetadataEntityValueExtractor<>(tupleTranslator);
List<T> results = new ArrayList<>();
searchIndex(jobId, index, searchKey, valueExtractor, results);
return results;
- } catch (IndexException | IOException e) {
+ } catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
@@ -288,8 +285,8 @@
@Override
public <T extends IExtensionMetadataEntity> List<T> getEntities(JobId jobId, IExtensionMetadataSearchKey searchKey)
throws MetadataException, RemoteException {
- ExtensionMetadataDataset<T> index = (ExtensionMetadataDataset<T>) extensionDatasets
- .get(searchKey.getDatasetId());
+ ExtensionMetadataDataset<T> index =
+ (ExtensionMetadataDataset<T>) extensionDatasets.get(searchKey.getDatasetId());
if (index == null) {
throw new MetadataException("Metadata Extension Index: " + searchKey.getDatasetId() + " was not found");
}
@@ -303,10 +300,14 @@
DataverseTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDataverseTupleTranslator(true);
ITupleReference tuple = tupleReaderWriter.getTupleFromMetadataEntity(dataverse);
insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.DATAVERSE_DATASET, tuple);
- } catch (TreeIndexDuplicateKeyException e) {
- throw new MetadataException(
- "A dataverse with this name " + dataverse.getDataverseName() + " already exists.", e);
- } catch (ACIDException | IndexException | IOException e) {
+ } catch (HyracksDataException e) {
+ if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) {
+ throw new MetadataException(
+ "A dataverse with this name " + dataverse.getDataverseName() + " already exists.", e);
+ } else {
+ throw new MetadataException(e);
+ }
+ } catch (ACIDException e) {
throw new MetadataException(e);
}
}
@@ -328,10 +329,14 @@
addIndex(jobId, primaryIndex);
}
- } catch (TreeIndexDuplicateKeyException e) {
- throw new MetadataException("A dataset with this name " + dataset.getDatasetName()
- + " already exists in dataverse '" + dataset.getDataverseName() + "'.", e);
- } catch (ACIDException | IndexException | IOException e) {
+ } catch (HyracksDataException e) {
+ if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) {
+ throw new MetadataException("A dataset with this name " + dataset.getDatasetName()
+ + " already exists in dataverse '" + dataset.getDataverseName() + "'.", e);
+ } else {
+ throw new MetadataException(e);
+ }
+ } catch (ACIDException e) {
throw new MetadataException(e);
}
}
@@ -342,9 +347,13 @@
IndexTupleTranslator tupleWriter = tupleTranslatorProvider.getIndexTupleTranslator(jobId, this, true);
ITupleReference tuple = tupleWriter.getTupleFromMetadataEntity(index);
insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.INDEX_DATASET, tuple);
- } catch (TreeIndexDuplicateKeyException e) {
- throw new MetadataException("An index with name '" + index.getIndexName() + "' already exists.", e);
- } catch (ACIDException | IndexException | IOException e) {
+ } catch (HyracksDataException e) {
+ if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) {
+ throw new MetadataException("An index with name '" + index.getIndexName() + "' already exists.", e);
+ } else {
+ throw new MetadataException(e);
+ }
+ } catch (ACIDException e) {
throw new MetadataException(e);
}
}
@@ -355,9 +364,13 @@
NodeTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getNodeTupleTranslator(true);
ITupleReference tuple = tupleReaderWriter.getTupleFromMetadataEntity(node);
insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.NODE_DATASET, tuple);
- } catch (TreeIndexDuplicateKeyException e) {
- throw new MetadataException("A node with name '" + node.getNodeName() + "' already exists.", e);
- } catch (ACIDException | IndexException | IOException e) {
+ } catch (HyracksDataException e) {
+ if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) {
+ throw new MetadataException("A node with name '" + node.getNodeName() + "' already exists.", e);
+ } else {
+ throw new MetadataException(e);
+ }
+ } catch (ACIDException e) {
throw new MetadataException(e);
}
}
@@ -368,10 +381,14 @@
NodeGroupTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getNodeGroupTupleTranslator(true);
ITupleReference tuple = tupleReaderWriter.getTupleFromMetadataEntity(nodeGroup);
insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.NODEGROUP_DATASET, tuple);
- } catch (TreeIndexDuplicateKeyException e) {
- throw new MetadataException("A nodegroup with name '" + nodeGroup.getNodeGroupName() + "' already exists.",
- e);
- } catch (ACIDException | IndexException | IOException e) {
+ } catch (HyracksDataException e) {
+ if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) {
+ throw new MetadataException(
+ "A nodegroup with name '" + nodeGroup.getNodeGroupName() + "' already exists.", e);
+ } else {
+ throw new MetadataException(e);
+ }
+ } catch (ACIDException e) {
throw new MetadataException(e);
}
}
@@ -379,13 +396,18 @@
@Override
public void addDatatype(JobId jobId, Datatype datatype) throws MetadataException, RemoteException {
try {
- DatatypeTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDataTypeTupleTranslator(jobId, this,
- true);
+ DatatypeTupleTranslator tupleReaderWriter =
+ tupleTranslatorProvider.getDataTypeTupleTranslator(jobId, this, true);
ITupleReference tuple = tupleReaderWriter.getTupleFromMetadataEntity(datatype);
insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.DATATYPE_DATASET, tuple);
- } catch (TreeIndexDuplicateKeyException e) {
- throw new MetadataException("A datatype with name '" + datatype.getDatatypeName() + "' already exists.", e);
- } catch (ACIDException | IndexException | IOException e) {
+ } catch (HyracksDataException e) {
+ if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) {
+ throw new MetadataException("A datatype with name '" + datatype.getDatatypeName() + "' already exists.",
+ e);
+ } else {
+ throw new MetadataException(e);
+ }
+ } catch (ACIDException e) {
throw new MetadataException(e);
}
}
@@ -395,19 +417,25 @@
try {
// Insert into the 'function' dataset.
FunctionTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getFunctionTupleTranslator(true);
+
ITupleReference functionTuple = tupleReaderWriter.getTupleFromMetadataEntity(function);
insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.FUNCTION_DATASET, functionTuple);
- } catch (TreeIndexDuplicateKeyException e) {
- throw new MetadataException("A function with this name " + function.getName() + " and arity "
- + function.getArity() + " already exists in dataverse '" + function.getDataverseName() + "'.", e);
- } catch (ACIDException | IndexException | IOException e) {
+ } catch (HyracksDataException e) {
+ if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) {
+ throw new MetadataException("A function with this name " + function.getName() + " and arity "
+ + function.getArity() + " already exists in dataverse '" + function.getDataverseName() + "'.",
+ e);
+ } else {
+ throw new MetadataException(e);
+ }
+ } catch (ACIDException e) {
throw new MetadataException(e);
}
}
private void insertTupleIntoIndex(JobId jobId, IMetadataIndex metadataIndex, ITupleReference tuple)
- throws ACIDException, HyracksDataException, IndexException {
+ throws ACIDException, HyracksDataException {
long resourceID = metadataIndex.getResourceId();
String resourceName = metadataIndex.getFile().getRelativePath();
ILSMIndex lsmIndex = (ILSMIndex) datasetLifecycleManager.get(resourceName);
@@ -415,13 +443,13 @@
datasetLifecycleManager.open(resourceName);
// prepare a Callback for logging
- IModificationOperationCallback modCallback = createIndexModificationCallback(jobId, resourceID,
- metadataIndex, lsmIndex, Operation.INSERT);
+ IModificationOperationCallback modCallback =
+ createIndexModificationCallback(jobId, resourceID, metadataIndex, lsmIndex, Operation.INSERT);
ILSMIndexAccessor indexAccessor = lsmIndex.createAccessor(modCallback, NoOpOperationCallback.INSTANCE);
- ITransactionContext txnCtx = transactionSubsystem.getTransactionManager().getTransactionContext(jobId,
- false);
+ ITransactionContext txnCtx =
+ transactionSubsystem.getTransactionManager().getTransactionContext(jobId, false);
txnCtx.setWriteTxn(true);
txnCtx.registerIndexAndCallback(resourceID, lsmIndex, (AbstractOperationCallback) modCallback,
metadataIndex.isPrimaryIndex());
@@ -520,9 +548,15 @@
// TODO: Change this to be a BTree specific exception, e.g.,
// BTreeKeyDoesNotExistException.
- } catch (TreeIndexException e) {
- throw new MetadataException("Cannot drop dataverse '" + dataverseName + "' because it doesn't exist.", e);
- } catch (ACIDException | IndexException | IOException e) {
+ } catch (HyracksDataException e) {
+ if (e.getComponent().equals(ErrorCode.HYRACKS)
+ && e.getErrorCode() == ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+ throw new MetadataException("Cannot drop dataverse '" + dataverseName + "' because it doesn't exist.",
+ e);
+ } else {
+ throw new MetadataException(e);
+ }
+ } catch (ACIDException e) {
throw new MetadataException(e);
}
}
@@ -530,12 +564,7 @@
@Override
public void dropDataset(JobId jobId, String dataverseName, String datasetName)
throws MetadataException, RemoteException {
- Dataset dataset;
- try {
- dataset = getDataset(jobId, dataverseName, datasetName);
- } catch (Exception e) {
- throw new MetadataException(e);
- }
+ Dataset dataset = getDataset(jobId, dataverseName, datasetName);
if (dataset == null) {
throw new MetadataException("Cannot drop dataset '" + datasetName + "' because it doesn't exist.");
}
@@ -568,14 +597,17 @@
}
}
}
- } catch (TreeIndexException tie) {
+ } catch (HyracksDataException hde) {
// ignore this exception and continue deleting all relevant
// artifacts.
+ if (!hde.getComponent().equals(ErrorCode.HYRACKS)
+ || hde.getErrorCode() != ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+ throw new MetadataException(hde);
+ }
} finally {
deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.DATASET_DATASET, datasetTuple);
}
-
- } catch (Exception e) {
+ } catch (HyracksDataException | ACIDException e) {
throw new MetadataException(e);
}
}
@@ -591,10 +623,15 @@
deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.INDEX_DATASET, tuple);
// TODO: Change this to be a BTree specific exception, e.g.,
// BTreeKeyDoesNotExistException.
- } catch (TreeIndexException e) {
- throw new MetadataException(
- "Cannot drop index '" + datasetName + "." + indexName + "' because it doesn't exist.", e);
- } catch (Exception e) {
+ } catch (HyracksDataException e) {
+ if (e.getComponent().equals(ErrorCode.HYRACKS)
+ && e.getErrorCode() == ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+ throw new MetadataException(
+ "Cannot drop index '" + datasetName + "." + indexName + "' because it doesn't exist.", e);
+ } else {
+ throw new MetadataException(e);
+ }
+ } catch (ACIDException e) {
throw new MetadataException(e);
}
}
@@ -602,11 +639,7 @@
@Override
public void dropNodegroup(JobId jobId, String nodeGroupName) throws MetadataException, RemoteException {
List<String> datasetNames;
- try {
- datasetNames = getDatasetNamesPartitionedOnThisNodeGroup(jobId, nodeGroupName);
- } catch (Exception e) {
- throw new MetadataException(e);
- }
+ datasetNames = getDatasetNamesPartitionedOnThisNodeGroup(jobId, nodeGroupName);
if (!datasetNames.isEmpty()) {
StringBuilder sb = new StringBuilder();
sb.append("Nodegroup '" + nodeGroupName
@@ -624,9 +657,15 @@
deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.NODEGROUP_DATASET, tuple);
// TODO: Change this to be a BTree specific exception, e.g.,
// BTreeKeyDoesNotExistException.
- } catch (TreeIndexException e) {
- throw new MetadataException("Cannot drop nodegroup '" + nodeGroupName + "' because it doesn't exist", e);
- } catch (Exception e) {
+ } catch (HyracksDataException e) {
+ if (e.getComponent().equals(ErrorCode.HYRACKS)
+ && e.getErrorCode() == ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+ throw new MetadataException("Cannot drop nodegroup '" + nodeGroupName + "' because it doesn't exist",
+ e);
+ } else {
+ throw new MetadataException(e);
+ }
+ } catch (ACIDException e) {
throw new MetadataException(e);
}
}
@@ -655,14 +694,20 @@
// TODO: Change this to be a BTree specific exception, e.g.,
// BTreeKeyDoesNotExistException.
- } catch (TreeIndexException e) {
- throw new MetadataException("Cannot drop type '" + datatypeName + "' because it doesn't exist", e);
- } catch (Exception e) {
+ } catch (HyracksDataException e) {
+ if (e.getComponent().equals(ErrorCode.HYRACKS)
+ && e.getErrorCode() == ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+ throw new MetadataException("Cannot drop type '" + datatypeName + "' because it doesn't exist", e);
+ } else {
+ throw new MetadataException(e);
+ }
+ } catch (ACIDException e) {
throw new MetadataException(e);
}
}
- private void forceDropDatatype(JobId jobId, String dataverseName, String datatypeName) throws MetadataException {
+ private void forceDropDatatype(JobId jobId, String dataverseName, String datatypeName)
+ throws MetadataException, RemoteException {
try {
ITupleReference searchKey = createTuple(dataverseName, datatypeName);
// Searches the index for the tuple to be deleted. Acquires an S
@@ -671,27 +716,32 @@
deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.DATATYPE_DATASET, tuple);
// TODO: Change this to be a BTree specific exception, e.g.,
// BTreeKeyDoesNotExistException.
- } catch (TreeIndexException e) {
- throw new MetadataException("Cannot drop type '" + datatypeName + "' because it doesn't exist", e);
- } catch (ACIDException | IndexException | IOException e) {
+ } catch (HyracksDataException e) {
+ if (e.getComponent().equals(ErrorCode.HYRACKS)
+ && e.getErrorCode() == ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+ throw new MetadataException("Cannot drop type '" + datatypeName + "' because it doesn't exist", e);
+ } else {
+ throw new MetadataException(e);
+ }
+ } catch (ACIDException e) {
throw new MetadataException(e);
}
}
private void deleteTupleFromIndex(JobId jobId, IMetadataIndex metadataIndex, ITupleReference tuple)
- throws ACIDException, HyracksDataException, IndexException {
+ throws ACIDException, HyracksDataException {
long resourceID = metadataIndex.getResourceId();
String resourceName = metadataIndex.getFile().getRelativePath();
ILSMIndex lsmIndex = (ILSMIndex) datasetLifecycleManager.get(resourceName);
try {
datasetLifecycleManager.open(resourceName);
// prepare a Callback for logging
- IModificationOperationCallback modCallback = createIndexModificationCallback(jobId, resourceID,
- metadataIndex, lsmIndex, Operation.DELETE);
+ IModificationOperationCallback modCallback =
+ createIndexModificationCallback(jobId, resourceID, metadataIndex, lsmIndex, Operation.DELETE);
ILSMIndexAccessor indexAccessor = lsmIndex.createAccessor(modCallback, NoOpOperationCallback.INSTANCE);
- ITransactionContext txnCtx = transactionSubsystem.getTransactionManager().getTransactionContext(jobId,
- false);
+ ITransactionContext txnCtx =
+ transactionSubsystem.getTransactionManager().getTransactionContext(jobId, false);
txnCtx.setWriteTxn(true);
txnCtx.registerIndexAndCallback(resourceID, lsmIndex, (AbstractOperationCallback) modCallback,
metadataIndex.isPrimaryIndex());
@@ -712,7 +762,7 @@
List<Dataverse> results = new ArrayList<>();
searchIndex(jobId, MetadataPrimaryIndexes.DATAVERSE_DATASET, null, valueExtractor, results);
return results;
- } catch (IndexException | IOException e) {
+ } catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
@@ -729,7 +779,7 @@
return null;
}
return results.get(0);
- } catch (IndexException | IOException e) {
+ } catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
@@ -744,7 +794,7 @@
List<Dataset> results = new ArrayList<>();
searchIndex(jobId, MetadataPrimaryIndexes.DATASET_DATASET, searchKey, valueExtractor, results);
return results;
- } catch (IndexException | IOException e) {
+ } catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
@@ -758,7 +808,7 @@
List<Feed> results = new ArrayList<>();
searchIndex(jobId, MetadataPrimaryIndexes.FEED_DATASET, searchKey, valueExtractor, results);
return results;
- } catch (IndexException | IOException e) {
+ } catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
@@ -773,21 +823,22 @@
List<Library> results = new ArrayList<>();
searchIndex(jobId, MetadataPrimaryIndexes.LIBRARY_DATASET, searchKey, valueExtractor, results);
return results;
- } catch (IndexException | IOException e) {
+ } catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
- private List<Datatype> getDataverseDatatypes(JobId jobId, String dataverseName) throws MetadataException {
+ private List<Datatype> getDataverseDatatypes(JobId jobId, String dataverseName)
+ throws MetadataException, RemoteException {
try {
ITupleReference searchKey = createTuple(dataverseName);
- DatatypeTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDataTypeTupleTranslator(jobId, this,
- false);
+ DatatypeTupleTranslator tupleReaderWriter =
+ tupleTranslatorProvider.getDataTypeTupleTranslator(jobId, this, false);
IValueExtractor<Datatype> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
List<Datatype> results = new ArrayList<>();
searchIndex(jobId, MetadataPrimaryIndexes.DATATYPE_DATASET, searchKey, valueExtractor, results);
return results;
- } catch (IndexException | IOException e) {
+ } catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
@@ -805,12 +856,12 @@
return null;
}
return results.get(0);
- } catch (IndexException | IOException e) {
+ } catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
- public List<Dataset> getAllDatasets(JobId jobId) throws MetadataException {
+ public List<Dataset> getAllDatasets(JobId jobId) throws MetadataException, RemoteException {
try {
ITupleReference searchKey = null;
DatasetTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDatasetTupleTranslator(false);
@@ -818,26 +869,27 @@
List<Dataset> results = new ArrayList<>();
searchIndex(jobId, MetadataPrimaryIndexes.DATASET_DATASET, searchKey, valueExtractor, results);
return results;
- } catch (IndexException | IOException e) {
+ } catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
- public List<Datatype> getAllDatatypes(JobId jobId) throws MetadataException {
+ public List<Datatype> getAllDatatypes(JobId jobId) throws MetadataException, RemoteException {
try {
ITupleReference searchKey = null;
- DatatypeTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDataTypeTupleTranslator(jobId, this,
- false);
+ DatatypeTupleTranslator tupleReaderWriter =
+ tupleTranslatorProvider.getDataTypeTupleTranslator(jobId, this, false);
IValueExtractor<Datatype> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
List<Datatype> results = new ArrayList<>();
searchIndex(jobId, MetadataPrimaryIndexes.DATATYPE_DATASET, searchKey, valueExtractor, results);
return results;
- } catch (IndexException | IOException e) {
+ } catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
- private void confirmDataverseCanBeDeleted(JobId jobId, String dataverseName) throws MetadataException {
+ private void confirmDataverseCanBeDeleted(JobId jobId, String dataverseName)
+ throws MetadataException, RemoteException {
//If a dataset from a DIFFERENT dataverse
//uses a type from this dataverse
//throw an error
@@ -860,7 +912,7 @@
}
private void confirmDatatypeIsUnusedByDatasets(JobId jobId, String dataverseName, String datatypeName)
- throws MetadataException {
+ throws MetadataException, RemoteException {
//If any dataset uses this type, throw an error
List<Dataset> datasets = getAllDatasets(jobId);
for (Dataset set : datasets) {
@@ -920,7 +972,7 @@
}
public List<String> getDatasetNamesPartitionedOnThisNodeGroup(JobId jobId, String nodegroup)
- throws MetadataException {
+ throws MetadataException, RemoteException {
//this needs to scan the datasets and return the datasets that use this nodegroup
List<String> nodeGroupDatasets = new ArrayList<>();
List<Dataset> datasets = getAllDatasets(jobId);
@@ -938,8 +990,8 @@
throws MetadataException, RemoteException {
try {
ITupleReference searchKey = createTuple(dataverseName, datasetName, indexName);
- IndexTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getIndexTupleTranslator(jobId, this,
- false);
+ IndexTupleTranslator tupleReaderWriter =
+ tupleTranslatorProvider.getIndexTupleTranslator(jobId, this, false);
IValueExtractor<Index> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
List<Index> results = new ArrayList<>();
searchIndex(jobId, MetadataPrimaryIndexes.INDEX_DATASET, searchKey, valueExtractor, results);
@@ -947,7 +999,7 @@
return null;
}
return results.get(0);
- } catch (IndexException | IOException e) {
+ } catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
@@ -957,13 +1009,13 @@
throws MetadataException, RemoteException {
try {
ITupleReference searchKey = createTuple(dataverseName, datasetName);
- IndexTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getIndexTupleTranslator(jobId, this,
- false);
+ IndexTupleTranslator tupleReaderWriter =
+ tupleTranslatorProvider.getIndexTupleTranslator(jobId, this, false);
IValueExtractor<Index> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
List<Index> results = new ArrayList<>();
searchIndex(jobId, MetadataPrimaryIndexes.INDEX_DATASET, searchKey, valueExtractor, results);
return results;
- } catch (IndexException | IOException e) {
+ } catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
@@ -973,8 +1025,8 @@
throws MetadataException, RemoteException {
try {
ITupleReference searchKey = createTuple(dataverseName, datatypeName);
- DatatypeTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDataTypeTupleTranslator(jobId, this,
- false);
+ DatatypeTupleTranslator tupleReaderWriter =
+ tupleTranslatorProvider.getDataTypeTupleTranslator(jobId, this, false);
IValueExtractor<Datatype> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
List<Datatype> results = new ArrayList<>();
searchIndex(jobId, MetadataPrimaryIndexes.DATATYPE_DATASET, searchKey, valueExtractor, results);
@@ -982,7 +1034,7 @@
return null;
}
return results.get(0);
- } catch (IndexException | IOException e) {
+ } catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
@@ -999,7 +1051,7 @@
return null;
}
return results.get(0);
- } catch (IndexException | IOException e) {
+ } catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
@@ -1018,7 +1070,7 @@
return null;
}
return results.get(0);
- } catch (IndexException | IOException e) {
+ } catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
@@ -1039,29 +1091,34 @@
"" + functionSignature.getArity());
// Searches the index for the tuple to be deleted. Acquires an S
// lock on the 'function' dataset.
- ITupleReference functionTuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.FUNCTION_DATASET,
- searchKey);
+ ITupleReference functionTuple =
+ getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.FUNCTION_DATASET, searchKey);
deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.FUNCTION_DATASET, functionTuple);
// TODO: Change this to be a BTree specific exception, e.g.,
// BTreeKeyDoesNotExistException.
- } catch (TreeIndexException e) {
- throw new MetadataException("There is no function with the name " + functionSignature.getName()
- + " and arity " + functionSignature.getArity(), e);
- } catch (ACIDException | IndexException | IOException e) {
+ } catch (HyracksDataException e) {
+ if (e.getComponent().equals(ErrorCode.HYRACKS)
+ && e.getErrorCode() == ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+ throw new MetadataException("There is no function with the name " + functionSignature.getName()
+ + " and arity " + functionSignature.getArity(), e);
+ } else {
+ throw new MetadataException(e);
+ }
+ } catch (ACIDException e) {
throw new MetadataException(e);
}
}
private ITupleReference getTupleToBeDeleted(JobId jobId, IMetadataIndex metadataIndex, ITupleReference searchKey)
- throws MetadataException, IndexException, IOException {
+ throws MetadataException, HyracksDataException, RemoteException {
IValueExtractor<ITupleReference> valueExtractor = new TupleCopyValueExtractor(metadataIndex.getTypeTraits());
List<ITupleReference> results = new ArrayList<>();
searchIndex(jobId, metadataIndex, searchKey, valueExtractor, results);
if (results.isEmpty()) {
// TODO: Temporarily a TreeIndexException to make it get caught by
// caller in the appropriate catch block.
- throw new TreeIndexException("Could not find entry to be deleted.");
+ throw HyracksDataException.create(ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY);
}
// There should be exactly one result returned from the search.
return results.get(0);
@@ -1076,8 +1133,8 @@
String resourceName = index.getFile().toString();
IIndex indexInstance = datasetLifecycleManager.get(resourceName);
datasetLifecycleManager.open(resourceName);
- IIndexAccessor indexAccessor = indexInstance.createAccessor(NoOpOperationCallback.INSTANCE,
- NoOpOperationCallback.INSTANCE);
+ IIndexAccessor indexAccessor =
+ indexInstance.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
ITreeIndexCursor rangeCursor = (ITreeIndexCursor) indexAccessor.createSearchCursor(false);
RangePredicate rangePred = null;
@@ -1086,9 +1143,8 @@
try {
while (rangeCursor.hasNext()) {
rangeCursor.next();
- sb.append(TupleUtils.printTuple(rangeCursor.getTuple(),
- new ISerializerDeserializer[] { SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ASTRING) }));
+ sb.append(TupleUtils.printTuple(rangeCursor.getTuple(), new ISerializerDeserializer[] {
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING) }));
}
} finally {
rangeCursor.close();
@@ -1098,8 +1154,8 @@
index = MetadataPrimaryIndexes.DATASET_DATASET;
indexInstance = datasetLifecycleManager.get(resourceName);
datasetLifecycleManager.open(resourceName);
- indexAccessor = indexInstance.createAccessor(NoOpOperationCallback.INSTANCE,
- NoOpOperationCallback.INSTANCE);
+ indexAccessor =
+ indexInstance.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
rangeCursor = (ITreeIndexCursor) indexAccessor.createSearchCursor(false);
rangePred = null;
@@ -1108,12 +1164,9 @@
try {
while (rangeCursor.hasNext()) {
rangeCursor.next();
- sb.append(TupleUtils.printTuple(rangeCursor.getTuple(),
- new ISerializerDeserializer[] {
- SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ASTRING),
- SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ASTRING) }));
+ sb.append(TupleUtils.printTuple(rangeCursor.getTuple(), new ISerializerDeserializer[] {
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING),
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING) }));
}
} finally {
rangeCursor.close();
@@ -1123,8 +1176,8 @@
index = MetadataPrimaryIndexes.INDEX_DATASET;
indexInstance = datasetLifecycleManager.get(resourceName);
datasetLifecycleManager.open(resourceName);
- indexAccessor = indexInstance.createAccessor(NoOpOperationCallback.INSTANCE,
- NoOpOperationCallback.INSTANCE);
+ indexAccessor =
+ indexInstance.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
rangeCursor = (ITreeIndexCursor) indexAccessor.createSearchCursor(false);
rangePred = null;
@@ -1136,14 +1189,14 @@
sb.append(TupleUtils.printTuple(rangeCursor.getTuple(), new ISerializerDeserializer[] {
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING),
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING),
- SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ASTRING) }));
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING) }));
}
} finally {
rangeCursor.close();
}
datasetLifecycleManager.close(resourceName);
} catch (Exception e) {
+ // Debugging method
e.printStackTrace();
}
return sb.toString();
@@ -1151,7 +1204,7 @@
private <ResultType> void searchIndex(JobId jobId, IMetadataIndex index, ITupleReference searchKey,
IValueExtractor<ResultType> valueExtractor, List<ResultType> results)
- throws MetadataException, IndexException, IOException {
+ throws MetadataException, HyracksDataException, RemoteException {
IBinaryComparatorFactory[] comparatorFactories = index.getKeyBinaryComparatorFactory();
if (index.getFile() == null) {
throw new MetadataException("No file for Index " + index.getDataverseName() + "." + index.getIndexName());
@@ -1159,8 +1212,8 @@
String resourceName = index.getFile().getRelativePath();
IIndex indexInstance = datasetLifecycleManager.get(resourceName);
datasetLifecycleManager.open(resourceName);
- IIndexAccessor indexAccessor = indexInstance.createAccessor(NoOpOperationCallback.INSTANCE,
- NoOpOperationCallback.INSTANCE);
+ IIndexAccessor indexAccessor =
+ indexInstance.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
ITreeIndexCursor rangeCursor = (ITreeIndexCursor) indexAccessor.createSearchCursor(false);
IBinaryComparator[] searchCmps = null;
@@ -1198,8 +1251,8 @@
IIndex indexInstance = datasetLifecycleManager.get(resourceName);
datasetLifecycleManager.open(resourceName);
try {
- IIndexAccessor indexAccessor = indexInstance.createAccessor(NoOpOperationCallback.INSTANCE,
- NoOpOperationCallback.INSTANCE);
+ IIndexAccessor indexAccessor =
+ indexInstance.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
IIndexCursor rangeCursor = indexAccessor.createSearchCursor(false);
DatasetTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDatasetTupleTranslator(false);
@@ -1226,7 +1279,7 @@
datasetLifecycleManager.close(resourceName);
}
- } catch (IndexException | IOException e) {
+ } catch (HyracksDataException e) {
throw new MetadataException(e);
}
@@ -1237,8 +1290,8 @@
// Hyracks version.
public static ITupleReference createTuple(String... fields) {
@SuppressWarnings("unchecked")
- ISerializerDeserializer<AString> stringSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ASTRING);
+ ISerializerDeserializer<AString> stringSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING);
AMutableString aString = new AMutableString("");
ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fields.length);
for (String s : fields) {
@@ -1266,7 +1319,7 @@
List<Function> results = new ArrayList<>();
searchIndex(jobId, MetadataPrimaryIndexes.FUNCTION_DATASET, searchKey, valueExtractor, results);
return results;
- } catch (IndexException | IOException e) {
+ } catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
@@ -1275,17 +1328,18 @@
public void addAdapter(JobId jobId, DatasourceAdapter adapter) throws MetadataException, RemoteException {
try {
// Insert into the 'Adapter' dataset.
- DatasourceAdapterTupleTranslator tupleReaderWriter = tupleTranslatorProvider
- .getAdapterTupleTranslator(true);
+ DatasourceAdapterTupleTranslator tupleReaderWriter =
+ tupleTranslatorProvider.getAdapterTupleTranslator(true);
ITupleReference adapterTuple = tupleReaderWriter.getTupleFromMetadataEntity(adapter);
insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.DATASOURCE_ADAPTER_DATASET, adapterTuple);
-
- } catch (TreeIndexDuplicateKeyException e) {
- throw new MetadataException(
- "A adapter with this name " + adapter.getAdapterIdentifier().getName()
- + " already exists in dataverse '" + adapter.getAdapterIdentifier().getNamespace() + "'.",
- e);
- } catch (ACIDException | IndexException | IOException e) {
+ } catch (HyracksDataException e) {
+ if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) {
+ throw new MetadataException("A adapter with this name " + adapter.getAdapterIdentifier().getName()
+ + " already exists in dataverse '" + adapter.getAdapterIdentifier().getNamespace() + "'.", e);
+ } else {
+ throw new MetadataException(e);
+ }
+ } catch (ACIDException e) {
throw new MetadataException(e);
}
}
@@ -1302,15 +1356,20 @@
ITupleReference searchKey = createTuple(dataverseName, adapterName);
// Searches the index for the tuple to be deleted. Acquires an S
// lock on the 'Adapter' dataset.
- ITupleReference datasetTuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.DATASOURCE_ADAPTER_DATASET,
- searchKey);
+ ITupleReference datasetTuple =
+ getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.DATASOURCE_ADAPTER_DATASET, searchKey);
deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.DATASOURCE_ADAPTER_DATASET, datasetTuple);
// TODO: Change this to be a BTree specific exception, e.g.,
// BTreeKeyDoesNotExistException.
- } catch (TreeIndexException e) {
- throw new MetadataException("Cannot drop adapter '" + adapterName, e);
- } catch (ACIDException | IndexException | IOException e) {
+ } catch (HyracksDataException e) {
+ if (e.getComponent().equals(ErrorCode.HYRACKS)
+ && e.getErrorCode() == ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+ throw new MetadataException("Cannot drop adapter '" + adapterName + " since it doesn't exist", e);
+ } else {
+ throw new MetadataException(e);
+ }
+ } catch (ACIDException e) {
throw new MetadataException(e);
}
@@ -1321,8 +1380,8 @@
throws MetadataException, RemoteException {
try {
ITupleReference searchKey = createTuple(dataverseName, adapterName);
- DatasourceAdapterTupleTranslator tupleReaderWriter = tupleTranslatorProvider
- .getAdapterTupleTranslator(false);
+ DatasourceAdapterTupleTranslator tupleReaderWriter =
+ tupleTranslatorProvider.getAdapterTupleTranslator(false);
List<DatasourceAdapter> results = new ArrayList<>();
IValueExtractor<DatasourceAdapter> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
searchIndex(jobId, MetadataPrimaryIndexes.DATASOURCE_ADAPTER_DATASET, searchKey, valueExtractor, results);
@@ -1330,7 +1389,7 @@
return null;
}
return results.get(0);
- } catch (IndexException | IOException e) {
+ } catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
@@ -1340,15 +1399,18 @@
throws MetadataException, RemoteException {
try {
// Insert into the 'CompactionPolicy' dataset.
- CompactionPolicyTupleTranslator tupleReaderWriter = tupleTranslatorProvider
- .getCompactionPolicyTupleTranslator(true);
+ CompactionPolicyTupleTranslator tupleReaderWriter =
+ tupleTranslatorProvider.getCompactionPolicyTupleTranslator(true);
ITupleReference compactionPolicyTuple = tupleReaderWriter.getTupleFromMetadataEntity(compactionPolicy);
insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.COMPACTION_POLICY_DATASET, compactionPolicyTuple);
-
- } catch (TreeIndexDuplicateKeyException e) {
- throw new MetadataException("A compcation policy with this name " + compactionPolicy.getPolicyName()
- + " already exists in dataverse '" + compactionPolicy.getPolicyName() + "'.", e);
- } catch (ACIDException | IndexException | IOException e) {
+ } catch (HyracksDataException e) {
+ if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) {
+ throw new MetadataException("A compcation policy with this name " + compactionPolicy.getPolicyName()
+ + " already exists in dataverse '" + compactionPolicy.getPolicyName() + "'.", e);
+ } else {
+ throw new MetadataException(e);
+ }
+ } catch (ACIDException e) {
throw new MetadataException(e);
}
}
@@ -1358,8 +1420,8 @@
throws MetadataException, RemoteException {
try {
ITupleReference searchKey = createTuple(dataverse, policyName);
- CompactionPolicyTupleTranslator tupleReaderWriter = tupleTranslatorProvider
- .getCompactionPolicyTupleTranslator(false);
+ CompactionPolicyTupleTranslator tupleReaderWriter =
+ tupleTranslatorProvider.getCompactionPolicyTupleTranslator(false);
List<CompactionPolicy> results = new ArrayList<>();
IValueExtractor<CompactionPolicy> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
searchIndex(jobId, MetadataPrimaryIndexes.COMPACTION_POLICY_DATASET, searchKey, valueExtractor, results);
@@ -1367,7 +1429,7 @@
return results.get(0);
}
return null;
- } catch (IndexException | IOException e) {
+ } catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
@@ -1377,13 +1439,13 @@
throws MetadataException, RemoteException {
try {
ITupleReference searchKey = createTuple(dataverseName);
- DatasourceAdapterTupleTranslator tupleReaderWriter = tupleTranslatorProvider
- .getAdapterTupleTranslator(false);
+ DatasourceAdapterTupleTranslator tupleReaderWriter =
+ tupleTranslatorProvider.getAdapterTupleTranslator(false);
IValueExtractor<DatasourceAdapter> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
List<DatasourceAdapter> results = new ArrayList<>();
searchIndex(jobId, MetadataPrimaryIndexes.DATASOURCE_ADAPTER_DATASET, searchKey, valueExtractor, results);
return results;
- } catch (IndexException | IOException e) {
+ } catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
@@ -1396,10 +1458,14 @@
ITupleReference libraryTuple = tupleReaderWriter.getTupleFromMetadataEntity(library);
insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.LIBRARY_DATASET, libraryTuple);
- } catch (TreeIndexException e) {
- throw new MetadataException("A library with this name " + library.getDataverseName()
- + " already exists in dataverse '" + library.getDataverseName() + "'.", e);
- } catch (ACIDException | IndexException | IOException e) {
+ } catch (HyracksDataException e) {
+ if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) {
+ throw new MetadataException("A library with this name " + library.getDataverseName()
+ + " already exists in dataverse '" + library.getDataverseName() + "'.", e);
+ } else {
+ throw new MetadataException(e);
+ }
+ } catch (ACIDException e) {
throw new MetadataException(e);
}
}
@@ -1416,15 +1482,20 @@
ITupleReference searchKey = createTuple(dataverseName, libraryName);
// Searches the index for the tuple to be deleted. Acquires an S
// lock on the 'Adapter' dataset.
- ITupleReference datasetTuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.LIBRARY_DATASET,
- searchKey);
+ ITupleReference datasetTuple =
+ getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.LIBRARY_DATASET, searchKey);
deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.LIBRARY_DATASET, datasetTuple);
// TODO: Change this to be a BTree specific exception, e.g.,
// BTreeKeyDoesNotExistException.
- } catch (TreeIndexException e) {
- throw new MetadataException("Cannot drop library '" + libraryName, e);
- } catch (ACIDException | IndexException | IOException e) {
+ } catch (HyracksDataException e) {
+ if (e.getComponent().equals(ErrorCode.HYRACKS)
+ && e.getErrorCode() == ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+ throw new MetadataException("Cannot drop library '" + libraryName, e);
+ } else {
+ throw new MetadataException(e);
+ }
+ } catch (ACIDException e) {
throw new MetadataException(e);
}
@@ -1443,7 +1514,7 @@
return null;
}
return results.get(0);
- } catch (IndexException | IOException e) {
+ } catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
@@ -1460,11 +1531,14 @@
FeedPolicyTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getFeedPolicyTupleTranslator(true);
ITupleReference feedPolicyTuple = tupleReaderWriter.getTupleFromMetadataEntity(feedPolicy);
insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.FEED_POLICY_DATASET, feedPolicyTuple);
-
- } catch (TreeIndexException e) {
- throw new MetadataException("A feed policy with this name " + feedPolicy.getPolicyName()
- + " already exists in dataverse '" + feedPolicy.getPolicyName() + "'.", e);
- } catch (ACIDException | IndexException | IOException e) {
+ } catch (HyracksDataException e) {
+ if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) {
+ throw new MetadataException("A feed policy with this name " + feedPolicy.getPolicyName()
+ + " already exists in dataverse '" + feedPolicy.getPolicyName() + "'.", e);
+ } else {
+ throw new MetadataException(e);
+ }
+ } catch (ACIDException e) {
throw new MetadataException(e);
}
}
@@ -1482,7 +1556,7 @@
return results.get(0);
}
return null;
- } catch (IndexException | IOException e) {
+ } catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
@@ -1493,14 +1567,14 @@
FeedConnectionTupleTranslator tupleReaderWriter = new FeedConnectionTupleTranslator(true);
ITupleReference feedConnTuple = tupleReaderWriter.getTupleFromMetadataEntity(feedConnection);
insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.FEED_CONNECTION_DATASET, feedConnTuple);
- } catch (IndexException | ACIDException | IOException e) {
+ } catch (HyracksDataException | ACIDException e) {
throw new MetadataException(e);
}
}
@Override
public List<FeedConnection> getFeedConnections(JobId jobId, String dataverseName, String feedName)
- throws MetadataException {
+ throws MetadataException, RemoteException {
try {
ITupleReference searchKey = createTuple(dataverseName, feedName);
FeedConnectionTupleTranslator tupleReaderWriter = new FeedConnectionTupleTranslator(false);
@@ -1508,14 +1582,14 @@
IValueExtractor<FeedConnection> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
searchIndex(jobId, MetadataPrimaryIndexes.FEED_CONNECTION_DATASET, searchKey, valueExtractor, results);
return results;
- } catch (IndexException | IOException e) {
+ } catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
@Override
public FeedConnection getFeedConnection(JobId jobId, String dataverseName, String feedName, String datasetName)
- throws MetadataException {
+ throws MetadataException, RemoteException {
try {
ITupleReference searchKey = createTuple(dataverseName, feedName, datasetName);
FeedConnectionTupleTranslator tupleReaderWriter = new FeedConnectionTupleTranslator(false);
@@ -1526,20 +1600,20 @@
return results.get(0);
}
return null;
- } catch (IndexException | IOException e) {
+ } catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
@Override
public void dropFeedConnection(JobId jobId, String dataverseName, String feedName, String datasetName)
- throws MetadataException {
+ throws MetadataException, RemoteException {
try {
ITupleReference searchKey = createTuple(dataverseName, feedName, datasetName);
- ITupleReference tuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.FEED_CONNECTION_DATASET,
- searchKey);
+ ITupleReference tuple =
+ getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.FEED_CONNECTION_DATASET, searchKey);
deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.FEED_CONNECTION_DATASET, tuple);
- } catch (IndexException | IOException | ACIDException e) {
+ } catch (HyracksDataException | ACIDException e) {
throw new MetadataException(e);
}
}
@@ -1551,11 +1625,14 @@
FeedTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getFeedTupleTranslator(true);
ITupleReference feedTuple = tupleReaderWriter.getTupleFromMetadataEntity(feed);
insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.FEED_DATASET, feedTuple);
-
- } catch (TreeIndexException e) {
- throw new MetadataException("A feed with this name " + feed.getFeedName() + " already exists in dataverse '"
- + feed.getDataverseName() + "'.", e);
- } catch (ACIDException | IndexException | IOException e) {
+ } catch (HyracksDataException e) {
+ if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) {
+ throw new MetadataException("A feed with this name " + feed.getFeedName()
+ + " already exists in dataverse '" + feed.getDataverseName() + "'.", e);
+ } else {
+ throw new MetadataException(e);
+ }
+ } catch (ACIDException e) {
throw new MetadataException(e);
}
}
@@ -1572,7 +1649,7 @@
return results.get(0);
}
return null;
- } catch (IndexException | IOException e) {
+ } catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
@@ -1587,9 +1664,14 @@
deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.FEED_DATASET, tuple);
// TODO: Change this to be a BTree specific exception, e.g.,
// BTreeKeyDoesNotExistException.
- } catch (TreeIndexException e) {
- throw new MetadataException("Cannot drop feed '" + feedName + "' because it doesn't exist", e);
- } catch (ACIDException | IndexException | IOException e) {
+ } catch (HyracksDataException e) {
+ if (e.getComponent().equals(ErrorCode.HYRACKS)
+ && e.getErrorCode() == ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+ throw new MetadataException("Cannot drop feed '" + feedName + "' because it doesn't exist", e);
+ } else {
+ throw new MetadataException(e);
+ }
+ } catch (ACIDException e) {
throw new MetadataException(e);
}
}
@@ -1601,9 +1683,14 @@
ITupleReference searchKey = createTuple(dataverseName, policyName);
ITupleReference tuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.FEED_POLICY_DATASET, searchKey);
deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.FEED_POLICY_DATASET, tuple);
- } catch (TreeIndexException e) {
- throw new MetadataException("Unknown feed policy " + policyName, e);
- } catch (ACIDException | IndexException | IOException e) {
+ } catch (HyracksDataException e) {
+ if (e.getComponent().equals(ErrorCode.HYRACKS)
+ && e.getErrorCode() == ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+ throw new MetadataException("Unknown feed policy " + policyName, e);
+ } else {
+ throw new MetadataException(e);
+ }
+ } catch (ACIDException e) {
throw new MetadataException(e);
}
}
@@ -1618,7 +1705,7 @@
List<FeedPolicyEntity> results = new ArrayList<>();
searchIndex(jobId, MetadataPrimaryIndexes.FEED_POLICY_DATASET, searchKey, valueExtractor, results);
return results;
- } catch (IndexException | IOException e) {
+ } catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
@@ -1627,15 +1714,19 @@
public void addExternalFile(JobId jobId, ExternalFile externalFile) throws MetadataException, RemoteException {
try {
// Insert into the 'externalFiles' dataset.
- ExternalFileTupleTranslator tupleReaderWriter = tupleTranslatorProvider
- .getExternalFileTupleTranslator(true);
+ ExternalFileTupleTranslator tupleReaderWriter =
+ tupleTranslatorProvider.getExternalFileTupleTranslator(true);
ITupleReference externalFileTuple = tupleReaderWriter.getTupleFromMetadataEntity(externalFile);
insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.EXTERNAL_FILE_DATASET, externalFileTuple);
- } catch (TreeIndexDuplicateKeyException e) {
- throw new MetadataException("An externalFile with this number " + externalFile.getFileNumber()
- + " already exists in dataset '" + externalFile.getDatasetName() + "' in dataverse '"
- + externalFile.getDataverseName() + "'.", e);
- } catch (ACIDException | IndexException | IOException e) {
+ } catch (HyracksDataException e) {
+ if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) {
+ throw new MetadataException("An externalFile with this number " + externalFile.getFileNumber()
+ + " already exists in dataset '" + externalFile.getDatasetName() + "' in dataverse '"
+ + externalFile.getDataverseName() + "'.", e);
+ } else {
+ throw new MetadataException(e);
+ }
+ } catch (ACIDException e) {
throw new MetadataException(e);
}
}
@@ -1644,13 +1735,13 @@
public List<ExternalFile> getExternalFiles(JobId jobId, Dataset dataset) throws MetadataException, RemoteException {
try {
ITupleReference searchKey = createTuple(dataset.getDataverseName(), dataset.getDatasetName());
- ExternalFileTupleTranslator tupleReaderWriter = tupleTranslatorProvider
- .getExternalFileTupleTranslator(false);
+ ExternalFileTupleTranslator tupleReaderWriter =
+ tupleTranslatorProvider.getExternalFileTupleTranslator(false);
IValueExtractor<ExternalFile> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
List<ExternalFile> results = new ArrayList<>();
searchIndex(jobId, MetadataPrimaryIndexes.EXTERNAL_FILE_DATASET, searchKey, valueExtractor, results);
return results;
- } catch (IndexException | IOException e) {
+ } catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
@@ -1663,12 +1754,17 @@
ITupleReference searchKey = createExternalFileSearchTuple(dataverseName, datasetName, fileNumber);
// Searches the index for the tuple to be deleted. Acquires an S
// lock on the 'ExternalFile' dataset.
- ITupleReference datasetTuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.EXTERNAL_FILE_DATASET,
- searchKey);
+ ITupleReference datasetTuple =
+ getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.EXTERNAL_FILE_DATASET, searchKey);
deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.EXTERNAL_FILE_DATASET, datasetTuple);
- } catch (TreeIndexException e) {
- throw new MetadataException("Couldn't drop externalFile.", e);
- } catch (ACIDException | IndexException | IOException e) {
+ } catch (HyracksDataException e) {
+ if (e.getComponent().equals(ErrorCode.HYRACKS)
+ && e.getErrorCode() == ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+ throw new MetadataException("Couldn't drop externalFile.", e);
+ } else {
+ throw new MetadataException(e);
+ }
+ } catch (ACIDException e) {
throw new MetadataException(e);
}
}
@@ -1687,10 +1783,10 @@
@SuppressWarnings("unchecked")
public ITupleReference createExternalFileSearchTuple(String dataverseName, String datasetName, int fileNumber)
throws HyracksDataException {
- ISerializerDeserializer<AString> stringSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ASTRING);
- ISerializerDeserializer<AInt32> intSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT32);
+ ISerializerDeserializer<AString> stringSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING);
+ ISerializerDeserializer<AInt32> intSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT32);
AMutableString aString = new AMutableString("");
ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(3);
@@ -1719,8 +1815,8 @@
throws MetadataException, RemoteException {
try {
ITupleReference searchKey = createExternalFileSearchTuple(dataverseName, datasetName, fileNumber);
- ExternalFileTupleTranslator tupleReaderWriter = tupleTranslatorProvider
- .getExternalFileTupleTranslator(false);
+ ExternalFileTupleTranslator tupleReaderWriter =
+ tupleTranslatorProvider.getExternalFileTupleTranslator(false);
IValueExtractor<ExternalFile> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
List<ExternalFile> results = new ArrayList<>();
searchIndex(jobId, MetadataPrimaryIndexes.EXTERNAL_FILE_DATASET, searchKey, valueExtractor, results);
@@ -1728,7 +1824,7 @@
return null;
}
return results.get(0);
- } catch (IndexException | IOException e) {
+ } catch (HyracksDataException e) {
throw new MetadataException(e);
}
}
@@ -1742,15 +1838,15 @@
searchKey = createTuple(dataset.getDataverseName(), dataset.getDatasetName());
// Searches the index for the tuple to be deleted. Acquires an S
// lock on the 'dataset' dataset.
- ITupleReference datasetTuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.DATASET_DATASET,
- searchKey);
+ ITupleReference datasetTuple =
+ getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.DATASET_DATASET, searchKey);
deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.DATASET_DATASET, datasetTuple);
// Previous tuple was deleted
// Insert into the 'dataset' dataset.
DatasetTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDatasetTupleTranslator(true);
datasetTuple = tupleReaderWriter.getTupleFromMetadataEntity(dataset);
insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.DATASET_DATASET, datasetTuple);
- } catch (ACIDException | IndexException | IOException e) {
+ } catch (HyracksDataException | ACIDException e) {
throw new MetadataException(e);
}
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataEntityTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataEntityTupleTranslator.java
index d8ccbde..9a9f18d 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataEntityTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataEntityTupleTranslator.java
@@ -21,8 +21,10 @@
import java.io.IOException;
import java.io.Serializable;
+import java.rmi.RemoteException;
import org.apache.asterix.metadata.MetadataException;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
/**
@@ -45,7 +47,8 @@
* @throws MetadataException
* @throws IOException
*/
- public T getMetadataEntityFromTuple(ITupleReference tuple) throws MetadataException, IOException;
+ public T getMetadataEntityFromTuple(ITupleReference tuple)
+ throws MetadataException, HyracksDataException, RemoteException;
/**
* Serializes the given metadata entity of type T into an appropriate tuple
@@ -53,7 +56,7 @@
*
* @param metadataEntity
* Metadata entity to be written into a tuple.
- * @throws IOException
+ * @throws HyracksDataException
*/
- public ITupleReference getTupleFromMetadataEntity(T metadataEntity) throws MetadataException, IOException;
+ public ITupleReference getTupleFromMetadataEntity(T metadataEntity) throws MetadataException, HyracksDataException;
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IValueExtractor.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IValueExtractor.java
index e90e29b..c8db613 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IValueExtractor.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IValueExtractor.java
@@ -20,6 +20,7 @@
package org.apache.asterix.metadata.api;
import java.io.IOException;
+import java.rmi.RemoteException;
import org.apache.asterix.common.transactions.JobId;
import org.apache.asterix.metadata.MetadataException;
@@ -46,5 +47,6 @@
* @throws HyracksDataException
* @throws IOException
*/
- public T getValue(JobId jobId, ITupleReference tuple) throws MetadataException, IOException;
+ public T getValue(JobId jobId, ITupleReference tuple)
+ throws MetadataException, HyracksDataException, RemoteException;
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/CompactionPolicyTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/CompactionPolicyTupleTranslator.java
index 4598e1e..2ca7215 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/CompactionPolicyTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/CompactionPolicyTupleTranslator.java
@@ -22,7 +22,6 @@
import java.io.ByteArrayInputStream;
import java.io.DataInput;
import java.io.DataInputStream;
-import java.io.IOException;
import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
import org.apache.asterix.metadata.MetadataException;
@@ -32,6 +31,7 @@
import org.apache.asterix.om.base.ARecord;
import org.apache.asterix.om.base.AString;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
/**
@@ -56,7 +56,7 @@
}
@Override
- public CompactionPolicy getMetadataEntityFromTuple(ITupleReference tuple) throws IOException {
+ public CompactionPolicy getMetadataEntityFromTuple(ITupleReference tuple) throws HyracksDataException {
byte[] serRecord = tuple.getFieldData(COMPACTION_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
int recordStartOffset = tuple.getFieldStart(COMPACTION_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
int recordLength = tuple.getFieldLength(COMPACTION_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
@@ -82,7 +82,7 @@
@Override
public ITupleReference getTupleFromMetadataEntity(CompactionPolicy compactionPolicy)
- throws IOException, MetadataException {
+ throws HyracksDataException, MetadataException {
tupleBuilder.reset();
aString.setValue(compactionPolicy.getDataverseName());
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
index 138cad7..c3c5023 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
@@ -23,7 +23,6 @@
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutput;
-import java.io.IOException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
@@ -83,8 +82,8 @@
public static final int DATASET_PAYLOAD_TUPLE_FIELD_INDEX = 2;
@SuppressWarnings("unchecked")
- protected final ISerializerDeserializer<ARecord> recordSerDes = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(MetadataRecordTypes.DATASET_RECORDTYPE);
+ protected final ISerializerDeserializer<ARecord> recordSerDes =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(MetadataRecordTypes.DATASET_RECORDTYPE);
protected final transient AMutableInt32 aInt32;
protected final transient ISerializerDeserializer<AInt32> aInt32Serde;
protected final transient ArrayBackedValueStorage fieldName = new ArrayBackedValueStorage();
@@ -97,7 +96,7 @@
}
@Override
- public Dataset getMetadataEntityFromTuple(ITupleReference frameTuple) throws IOException {
+ public Dataset getMetadataEntityFromTuple(ITupleReference frameTuple) throws HyracksDataException {
byte[] serRecord = frameTuple.getFieldData(DATASET_PAYLOAD_TUPLE_FIELD_INDEX);
int recordStartOffset = frameTuple.getFieldStart(DATASET_PAYLOAD_TUPLE_FIELD_INDEX);
int recordLength = frameTuple.getFieldLength(DATASET_PAYLOAD_TUPLE_FIELD_INDEX);
@@ -107,32 +106,38 @@
return createDatasetFromARecord(datasetRecord);
}
- protected Dataset createDatasetFromARecord(ARecord datasetRecord) throws IOException {
+ protected Dataset createDatasetFromARecord(ARecord datasetRecord) throws HyracksDataException {
- String dataverseName = ((AString) datasetRecord
- .getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATAVERSENAME_FIELD_INDEX)).getStringValue();
- String datasetName = ((AString) datasetRecord
- .getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATASETNAME_FIELD_INDEX)).getStringValue();
- String typeName = ((AString) datasetRecord
- .getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATATYPENAME_FIELD_INDEX)).getStringValue();
+ String dataverseName =
+ ((AString) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATAVERSENAME_FIELD_INDEX))
+ .getStringValue();
+ String datasetName =
+ ((AString) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATASETNAME_FIELD_INDEX))
+ .getStringValue();
+ String typeName =
+ ((AString) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATATYPENAME_FIELD_INDEX))
+ .getStringValue();
String typeDataverseName = ((AString) datasetRecord
.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATATYPEDATAVERSENAME_FIELD_INDEX)).getStringValue();
DatasetType datasetType = DatasetType.valueOf(
((AString) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATASETTYPE_FIELD_INDEX))
.getStringValue());
IDatasetDetails datasetDetails = null;
- int datasetId = ((AInt32) datasetRecord
- .getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATASETID_FIELD_INDEX)).getIntegerValue();
- int pendingOp = ((AInt32) datasetRecord
- .getValueByPos(MetadataRecordTypes.DATASET_ARECORD_PENDINGOP_FIELD_INDEX)).getIntegerValue();
- String nodeGroupName = ((AString) datasetRecord
- .getValueByPos(MetadataRecordTypes.DATASET_ARECORD_GROUPNAME_FIELD_INDEX)).getStringValue();
+ int datasetId =
+ ((AInt32) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATASETID_FIELD_INDEX))
+ .getIntegerValue();
+ int pendingOp =
+ ((AInt32) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_PENDINGOP_FIELD_INDEX))
+ .getIntegerValue();
+ String nodeGroupName =
+ ((AString) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_GROUPNAME_FIELD_INDEX))
+ .getStringValue();
String compactionPolicy = ((AString) datasetRecord
.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_COMPACTION_POLICY_FIELD_INDEX)).getStringValue();
IACursor cursor = ((AOrderedList) datasetRecord
.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_COMPACTION_POLICY_PROPERTIES_FIELD_INDEX))
.getCursor();
- Map<String, String> compactionPolicyProperties = new LinkedHashMap<String, String>();
+ Map<String, String> compactionPolicyProperties = new LinkedHashMap<>();
String key;
String value;
while (cursor.next()) {
@@ -154,14 +159,14 @@
cursor = ((AOrderedList) datasetDetailsRecord
.getValueByPos(MetadataRecordTypes.INTERNAL_DETAILS_ARECORD_PARTITIONKEY_FIELD_INDEX))
.getCursor();
- List<List<String>> partitioningKey = new ArrayList<List<String>>();
- List<IAType> partitioningKeyType = new ArrayList<IAType>();
+ List<List<String>> partitioningKey = new ArrayList<>();
+ List<IAType> partitioningKeyType = new ArrayList<>();
AOrderedList fieldNameList;
while (cursor.next()) {
fieldNameList = (AOrderedList) cursor.get();
IACursor nestedFieldNameCursor = (fieldNameList.getCursor());
- List<String> nestedFieldName = new ArrayList<String>();
+ List<String> nestedFieldName = new ArrayList<>();
while (nestedFieldNameCursor.next()) {
nestedFieldName.add(((AString) nestedFieldNameCursor.get()).getStringValue());
}
@@ -175,10 +180,10 @@
// Check if there is a filter field.
List<String> filterField = null;
- int filterFieldPos = datasetDetailsRecord.getType()
- .getFieldIndex(InternalDatasetDetails.FILTER_FIELD_NAME);
+ int filterFieldPos =
+ datasetDetailsRecord.getType().getFieldIndex(InternalDatasetDetails.FILTER_FIELD_NAME);
if (filterFieldPos >= 0) {
- filterField = new ArrayList<String>();
+ filterField = new ArrayList<>();
cursor = ((AOrderedList) datasetDetailsRecord.getValueByPos(filterFieldPos)).getCursor();
while (cursor.next()) {
filterField.add(((AString) cursor.get()).getStringValue());
@@ -218,7 +223,7 @@
cursor = ((AOrderedList) datasetDetailsRecord
.getValueByPos(MetadataRecordTypes.EXTERNAL_DETAILS_ARECORD_PROPERTIES_FIELD_INDEX))
.getCursor();
- Map<String, String> properties = new HashMap<String, String>();
+ Map<String, String> properties = new HashMap<>();
while (cursor.next()) {
ARecord field = (ARecord) cursor.get();
key = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_NAME_FIELD_INDEX))
@@ -233,10 +238,9 @@
.getValueByPos(MetadataRecordTypes.EXTERNAL_DETAILS_ARECORD_LAST_REFRESH_TIME_FIELD_INDEX)))
.getChrononTime());
// State
- TransactionState state = TransactionState
- .values()[((AInt32) datasetDetailsRecord.getValueByPos(
- MetadataRecordTypes.EXTERNAL_DETAILS_ARECORD_TRANSACTION_STATE_FIELD_INDEX))
- .getIntegerValue()];
+ TransactionState state = TransactionState.values()[((AInt32) datasetDetailsRecord
+ .getValueByPos(MetadataRecordTypes.EXTERNAL_DETAILS_ARECORD_TRANSACTION_STATE_FIELD_INDEX))
+ .getIntegerValue()];
datasetDetails = new ExternalDatasetDetails(adapter, properties, timestamp, state);
}
@@ -245,11 +249,11 @@
String metaTypeDataverseName = null;
String metaTypeName = null;
- int metaTypeDataverseNameIndex = datasetRecord.getType()
- .getFieldIndex(MetadataRecordTypes.FIELD_NAME_METADATA_DATAVERSE);
+ int metaTypeDataverseNameIndex =
+ datasetRecord.getType().getFieldIndex(MetadataRecordTypes.FIELD_NAME_METADATA_DATAVERSE);
if (metaTypeDataverseNameIndex >= 0) {
- metaTypeDataverseName = ((AString) datasetRecord.getValueByPos(metaTypeDataverseNameIndex))
- .getStringValue();
+ metaTypeDataverseName =
+ ((AString) datasetRecord.getValueByPos(metaTypeDataverseNameIndex)).getStringValue();
int metaTypeNameIndex = datasetRecord.getType().getFieldIndex(MetadataRecordTypes.FIELD_NAME_METATYPE_NAME);
metaTypeName = ((AString) datasetRecord.getValueByPos(metaTypeNameIndex)).getStringValue();
}
@@ -260,7 +264,7 @@
}
@Override
- public ITupleReference getTupleFromMetadataEntity(Dataset dataset) throws IOException, MetadataException {
+ public ITupleReference getTupleFromMetadataEntity(Dataset dataset) throws HyracksDataException, MetadataException {
OrderedListBuilder listBuilder = new OrderedListBuilder();
ArrayBackedValueStorage itemValue = new ArrayBackedValueStorage();
// write the key in the first 2 fields of the tuple
@@ -423,11 +427,11 @@
}
protected Map<String, String> getDatasetHints(ARecord datasetRecord) {
- Map<String, String> hints = new HashMap<String, String>();
+ Map<String, String> hints = new HashMap<>();
String key;
String value;
- AUnorderedList list = (AUnorderedList) datasetRecord
- .getValueByPos(MetadataRecordTypes.DATASET_ARECORD_HINTS_FIELD_INDEX);
+ AUnorderedList list =
+ (AUnorderedList) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_HINTS_FIELD_INDEX);
IACursor cursor = list.getCursor();
while (cursor.next()) {
ARecord field = (ARecord) cursor.get();
@@ -444,8 +448,8 @@
ArrayBackedValueStorage fieldValue = new ArrayBackedValueStorage();
propertyRecordBuilder.reset(MetadataRecordTypes.DATASET_HINTS_RECORDTYPE);
AMutableString aString = new AMutableString("");
- ISerializerDeserializer<AString> stringSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ASTRING);
+ ISerializerDeserializer<AString> stringSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING);
// write field 0
fieldValue.reset();
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java
index 44de381..7f4e28d 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java
@@ -22,7 +22,6 @@
import java.io.ByteArrayInputStream;
import java.io.DataInput;
import java.io.DataInputStream;
-import java.io.IOException;
import java.util.Calendar;
import org.apache.asterix.external.api.IDataSourceAdapter;
@@ -35,6 +34,7 @@
import org.apache.asterix.om.base.ARecord;
import org.apache.asterix.om.base.AString;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
public class DatasourceAdapterTupleTranslator extends AbstractTupleTranslator<DatasourceAdapter> {
@@ -57,7 +57,8 @@
}
@Override
- public DatasourceAdapter getMetadataEntityFromTuple(ITupleReference tuple) throws MetadataException, IOException {
+ public DatasourceAdapter getMetadataEntityFromTuple(ITupleReference tuple)
+ throws MetadataException, HyracksDataException {
byte[] serRecord = tuple.getFieldData(ADAPTER_PAYLOAD_TUPLE_FIELD_INDEX);
int recordStartOffset = tuple.getFieldStart(ADAPTER_PAYLOAD_TUPLE_FIELD_INDEX);
int recordLength = tuple.getFieldLength(ADAPTER_PAYLOAD_TUPLE_FIELD_INDEX);
@@ -84,7 +85,8 @@
}
@Override
- public ITupleReference getTupleFromMetadataEntity(DatasourceAdapter adapter) throws IOException, MetadataException {
+ public ITupleReference getTupleFromMetadataEntity(DatasourceAdapter adapter)
+ throws HyracksDataException, MetadataException {
// write the key in the first 2 fields of the tuple
tupleBuilder.reset();
aString.setValue(adapter.getAdapterIdentifier().getNamespace());
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java
index b81ec29..d202e1a 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java
@@ -23,7 +23,6 @@
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutput;
-import java.io.IOException;
import java.rmi.RemoteException;
import java.util.Calendar;
@@ -53,10 +52,10 @@
import org.apache.asterix.om.types.IAType;
import org.apache.asterix.om.utils.NonTaggedFormatUtil;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
/**
* Translates a Datatype metadata entity to an ITupleReference and vice versa.
@@ -77,8 +76,8 @@
};
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ARecord> recordSerDes = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(MetadataRecordTypes.DATATYPE_RECORDTYPE);
+ private ISerializerDeserializer<ARecord> recordSerDes =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(MetadataRecordTypes.DATATYPE_RECORDTYPE);
private final MetadataNode metadataNode;
private final JobId jobId;
@@ -89,7 +88,8 @@
}
@Override
- public Datatype getMetadataEntityFromTuple(ITupleReference frameTuple) throws MetadataException, IOException {
+ public Datatype getMetadataEntityFromTuple(ITupleReference frameTuple)
+ throws MetadataException, HyracksDataException {
byte[] serRecord = frameTuple.getFieldData(DATATYPE_PAYLOAD_TUPLE_FIELD_INDEX);
int recordStartOffset = frameTuple.getFieldStart(DATATYPE_PAYLOAD_TUPLE_FIELD_INDEX);
int recordLength = frameTuple.getFieldLength(DATATYPE_PAYLOAD_TUPLE_FIELD_INDEX);
@@ -142,8 +142,8 @@
boolean isNullable = ((ABoolean) field
.getValueByPos(MetadataRecordTypes.FIELD_ARECORD_ISNULLABLE_FIELD_INDEX)).getBoolean()
.booleanValue();
- fieldTypes[fieldId] = BuiltinTypeMap.getTypeFromTypeName(metadataNode, jobId,
- dataverseName, fieldTypeName, isNullable);
+ fieldTypes[fieldId] = BuiltinTypeMap.getTypeFromTypeName(metadataNode, jobId, dataverseName,
+ fieldTypeName, isNullable);
fieldId++;
}
return new Datatype(dataverseName, datatypeName,
@@ -163,8 +163,8 @@
.getValueByPos(MetadataRecordTypes.DERIVEDTYPE_ARECORD_ORDEREDLIST_FIELD_INDEX))
.getStringValue();
return new Datatype(dataverseName, datatypeName,
- new AOrderedListType(BuiltinTypeMap.getTypeFromTypeName(metadataNode, jobId,
- dataverseName, orderedlistTypeName, false), datatypeName),
+ new AOrderedListType(BuiltinTypeMap.getTypeFromTypeName(metadataNode, jobId, dataverseName,
+ orderedlistTypeName, false), datatypeName),
isAnonymous);
}
default:
@@ -175,7 +175,8 @@
}
@Override
- public ITupleReference getTupleFromMetadataEntity(Datatype dataType) throws IOException, MetadataException {
+ public ITupleReference getTupleFromMetadataEntity(Datatype dataType)
+ throws HyracksDataException, MetadataException {
// write the key in the first two fields of the tuple
tupleBuilder.reset();
aString.setValue(dataType.getDataverseName());
@@ -368,8 +369,13 @@
} catch (MetadataException e) {
// The nested record type may have been inserted by a previous DDL statement or by
// a previous nested type.
- if (!(e.getCause() instanceof TreeIndexDuplicateKeyException)) {
- throw new HyracksDataException(e);
+ if (!(e.getCause() instanceof HyracksDataException)) {
+ throw HyracksDataException.create(e);
+ } else {
+ HyracksDataException hde = (HyracksDataException) e.getCause();
+ if (!hde.getComponent().equals(ErrorCode.HYRACKS) || hde.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+ throw hde;
+ }
}
} catch (RemoteException e) {
// TODO: This should not be a HyracksDataException. Can't
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java
index ac4fde2..89aaad3 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java
@@ -22,7 +22,6 @@
import java.io.ByteArrayInputStream;
import java.io.DataInput;
import java.io.DataInputStream;
-import java.io.IOException;
import java.util.Calendar;
import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
@@ -36,6 +35,7 @@
import org.apache.asterix.om.base.AString;
import org.apache.asterix.om.types.BuiltinType;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
/**
@@ -52,8 +52,8 @@
protected ISerializerDeserializer<AInt32> aInt32Serde;
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ARecord> recordSerDes = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(MetadataRecordTypes.DATAVERSE_RECORDTYPE);
+ private ISerializerDeserializer<ARecord> recordSerDes =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(MetadataRecordTypes.DATAVERSE_RECORDTYPE);
@SuppressWarnings("unchecked")
protected DataverseTupleTranslator(boolean getTuple) {
@@ -63,7 +63,7 @@
}
@Override
- public Dataverse getMetadataEntityFromTuple(ITupleReference frameTuple) throws IOException {
+ public Dataverse getMetadataEntityFromTuple(ITupleReference frameTuple) throws HyracksDataException {
byte[] serRecord = frameTuple.getFieldData(DATAVERSE_PAYLOAD_TUPLE_FIELD_INDEX);
int recordStartOffset = frameTuple.getFieldStart(DATAVERSE_PAYLOAD_TUPLE_FIELD_INDEX);
int recordLength = frameTuple.getFieldLength(DATAVERSE_PAYLOAD_TUPLE_FIELD_INDEX);
@@ -76,7 +76,8 @@
}
@Override
- public ITupleReference getTupleFromMetadataEntity(Dataverse instance) throws IOException, MetadataException {
+ public ITupleReference getTupleFromMetadataEntity(Dataverse instance)
+ throws HyracksDataException, MetadataException {
// write the key in the first field of the tuple
tupleBuilder.reset();
aString.setValue(instance.getDataverseName());
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/ExternalFileTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/ExternalFileTupleTranslator.java
index 66b6af3..ea04f1d 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/ExternalFileTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/ExternalFileTupleTranslator.java
@@ -21,7 +21,6 @@
import java.io.ByteArrayInputStream;
import java.io.DataInput;
import java.io.DataInputStream;
-import java.io.IOException;
import java.util.Date;
import org.apache.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
@@ -40,6 +39,7 @@
import org.apache.asterix.om.base.AString;
import org.apache.asterix.om.types.BuiltinType;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
public class ExternalFileTupleTranslator extends AbstractTupleTranslator<ExternalFile> {
@@ -58,14 +58,14 @@
protected transient AMutableInt64 aInt64 = new AMutableInt64(0);
@SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AInt32> intSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT32);
+ protected ISerializerDeserializer<AInt32> intSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT32);
@SuppressWarnings("unchecked")
- protected ISerializerDeserializer<ADateTime> dateTimeSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADATETIME);
+ protected ISerializerDeserializer<ADateTime> dateTimeSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADATETIME);
@SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AInt64> longSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ protected ISerializerDeserializer<AInt64> longSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
@SuppressWarnings("unchecked")
private ISerializerDeserializer<ARecord> recordSerDes = SerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(MetadataRecordTypes.EXTERNAL_FILE_RECORDTYPE);
@@ -75,7 +75,8 @@
}
@Override
- public ExternalFile getMetadataEntityFromTuple(ITupleReference tuple) throws MetadataException, IOException {
+ public ExternalFile getMetadataEntityFromTuple(ITupleReference tuple)
+ throws MetadataException, HyracksDataException {
byte[] serRecord = tuple.getFieldData(EXTERNAL_FILE_PAYLOAD_TUPLE_FIELD_INDEX);
int recordStartOffset = tuple.getFieldStart(EXTERNAL_FILE_PAYLOAD_TUPLE_FIELD_INDEX);
int recordLength = tuple.getFieldLength(EXTERNAL_FILE_PAYLOAD_TUPLE_FIELD_INDEX);
@@ -106,7 +107,8 @@
}
@Override
- public ITupleReference getTupleFromMetadataEntity(ExternalFile externalFile) throws MetadataException, IOException {
+ public ITupleReference getTupleFromMetadataEntity(ExternalFile externalFile)
+ throws MetadataException, HyracksDataException {
// write the key in the first 3 fields of the tuple
tupleBuilder.reset();
// dataverse name
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedConnectionTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedConnectionTupleTranslator.java
index 0adcda5..420e4fc 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedConnectionTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedConnectionTupleTranslator.java
@@ -19,6 +19,12 @@
package org.apache.asterix.metadata.entitytupletranslators;
+import java.io.ByteArrayInputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.util.ArrayList;
+import java.util.List;
+
import org.apache.asterix.builders.IARecordBuilder;
import org.apache.asterix.builders.UnorderedListBuilder;
import org.apache.asterix.common.functions.FunctionSignature;
@@ -27,20 +33,18 @@
import org.apache.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
import org.apache.asterix.metadata.bootstrap.MetadataRecordTypes;
import org.apache.asterix.metadata.entities.FeedConnection;
-import org.apache.asterix.om.base.*;
+import org.apache.asterix.om.base.AMissing;
+import org.apache.asterix.om.base.ANull;
+import org.apache.asterix.om.base.ARecord;
+import org.apache.asterix.om.base.AString;
+import org.apache.asterix.om.base.AUnorderedList;
+import org.apache.asterix.om.base.IACursor;
import org.apache.asterix.om.types.AUnorderedListType;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
public class FeedConnectionTupleTranslator extends AbstractTupleTranslator<FeedConnection> {
public static final int FEED_CONN_DATAVERSE_NAME_FIELD_INDEX = 0;
@@ -57,7 +61,8 @@
}
@Override
- public FeedConnection getMetadataEntityFromTuple(ITupleReference frameTuple) throws MetadataException, IOException {
+ public FeedConnection getMetadataEntityFromTuple(ITupleReference frameTuple)
+ throws MetadataException, HyracksDataException {
byte[] serRecord = frameTuple.getFieldData(FEED_CONN_PAYLOAD_TUPLE_FIELD_INDEX);
int recordStartOffset = frameTuple.getFieldStart(FEED_CONN_PAYLOAD_TUPLE_FIELD_INDEX);
int recordLength = frameTuple.getFieldLength(FEED_CONN_PAYLOAD_TUPLE_FIELD_INDEX);
@@ -68,12 +73,14 @@
}
private FeedConnection createFeedConnFromRecord(ARecord feedConnRecord) {
- String dataverseName = ((AString) feedConnRecord
- .getValueByPos(MetadataRecordTypes.FEED_CONN_DATAVERSE_NAME_FIELD_INDEX)).getStringValue();
+ String dataverseName =
+ ((AString) feedConnRecord.getValueByPos(MetadataRecordTypes.FEED_CONN_DATAVERSE_NAME_FIELD_INDEX))
+ .getStringValue();
String feedName = ((AString) feedConnRecord.getValueByPos(MetadataRecordTypes.FEED_CONN_FEED_NAME_FIELD_INDEX))
.getStringValue();
- String datasetName = ((AString) feedConnRecord
- .getValueByPos(MetadataRecordTypes.FEED_CONN_DATASET_NAME_FIELD_INDEX)).getStringValue();
+ String datasetName =
+ ((AString) feedConnRecord.getValueByPos(MetadataRecordTypes.FEED_CONN_DATASET_NAME_FIELD_INDEX))
+ .getStringValue();
String outputType = ((AString) feedConnRecord.getValueByPos(MetadataRecordTypes.FEED_CONN_OUTPUT_TYPE_INDEX))
.getStringValue();
String policyName = ((AString) feedConnRecord.getValueByPos(MetadataRecordTypes.FEED_CONN_POLICY_FIELD_INDEX))
@@ -98,7 +105,8 @@
}
@Override
- public ITupleReference getTupleFromMetadataEntity(FeedConnection me) throws MetadataException, IOException {
+ public ITupleReference getTupleFromMetadataEntity(FeedConnection me)
+ throws MetadataException, HyracksDataException {
tupleBuilder.reset();
// key: dataverse
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedPolicyTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedPolicyTupleTranslator.java
index c82a073..7b631b6 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedPolicyTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedPolicyTupleTranslator.java
@@ -23,7 +23,6 @@
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutput;
-import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
@@ -73,7 +72,7 @@
}
@Override
- public FeedPolicyEntity getMetadataEntityFromTuple(ITupleReference frameTuple) throws IOException {
+ public FeedPolicyEntity getMetadataEntityFromTuple(ITupleReference frameTuple) throws HyracksDataException {
byte[] serRecord = frameTuple.getFieldData(FEED_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
int recordStartOffset = frameTuple.getFieldStart(FEED_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
int recordLength = frameTuple.getFieldLength(FEED_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
@@ -95,7 +94,7 @@
IACursor cursor = ((AUnorderedList) feedPolicyRecord
.getValueByPos(MetadataRecordTypes.FEED_POLICY_ARECORD_PROPERTIES_FIELD_INDEX)).getCursor();
- Map<String, String> policyParamters = new HashMap<String, String>();
+ Map<String, String> policyParamters = new HashMap<>();
String key;
String value;
while (cursor.next()) {
@@ -111,7 +110,7 @@
@Override
public ITupleReference getTupleFromMetadataEntity(FeedPolicyEntity feedPolicy)
- throws IOException, MetadataException {
+ throws HyracksDataException, MetadataException {
// write the key in the first three fields of the tuple
ArrayBackedValueStorage itemValue = new ArrayBackedValueStorage();
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedTupleTranslator.java
index 4503e09..4737d79 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedTupleTranslator.java
@@ -23,7 +23,6 @@
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutput;
-import java.io.IOException;
import java.util.Calendar;
import java.util.HashMap;
import java.util.Map;
@@ -62,15 +61,15 @@
public static final int FEED_PAYLOAD_TUPLE_FIELD_INDEX = 2;
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ARecord> recordSerDes = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(MetadataRecordTypes.FEED_RECORDTYPE);
+ private ISerializerDeserializer<ARecord> recordSerDes =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(MetadataRecordTypes.FEED_RECORDTYPE);
protected FeedTupleTranslator(boolean getTuple) {
super(getTuple, MetadataPrimaryIndexes.FEED_DATASET.getFieldCount());
}
@Override
- public Feed getMetadataEntityFromTuple(ITupleReference frameTuple) throws IOException {
+ public Feed getMetadataEntityFromTuple(ITupleReference frameTuple) throws HyracksDataException {
byte[] serRecord = frameTuple.getFieldData(FEED_PAYLOAD_TUPLE_FIELD_INDEX);
int recordStartOffset = frameTuple.getFieldStart(FEED_PAYLOAD_TUPLE_FIELD_INDEX);
int recordLength = frameTuple.getFieldLength(FEED_PAYLOAD_TUPLE_FIELD_INDEX);
@@ -82,15 +81,16 @@
private Feed createFeedFromARecord(ARecord feedRecord) {
Feed feed;
- String dataverseName = ((AString) feedRecord
- .getValueByPos(MetadataRecordTypes.FEED_ARECORD_DATAVERSE_NAME_FIELD_INDEX)).getStringValue();
+ String dataverseName =
+ ((AString) feedRecord.getValueByPos(MetadataRecordTypes.FEED_ARECORD_DATAVERSE_NAME_FIELD_INDEX))
+ .getStringValue();
String feedName = ((AString) feedRecord.getValueByPos(MetadataRecordTypes.FEED_ARECORD_FEED_NAME_FIELD_INDEX))
.getStringValue();
- AUnorderedList feedConfig = (AUnorderedList) feedRecord
- .getValueByPos(MetadataRecordTypes.FEED_ARECORD_ADAPTOR_CONFIG_INDEX);
- String adapterName = ((AString) feedRecord
- .getValueByPos(MetadataRecordTypes.FEED_ARECORD_ADAPTOR_NAME_INDEX)).getStringValue();
+ AUnorderedList feedConfig =
+ (AUnorderedList) feedRecord.getValueByPos(MetadataRecordTypes.FEED_ARECORD_ADAPTOR_CONFIG_INDEX);
+ String adapterName = ((AString) feedRecord.getValueByPos(MetadataRecordTypes.FEED_ARECORD_ADAPTOR_NAME_INDEX))
+ .getStringValue();
IACursor cursor = feedConfig.getCursor();
@@ -109,7 +109,7 @@
}
@Override
- public ITupleReference getTupleFromMetadataEntity(Feed feed) throws IOException, MetadataException {
+ public ITupleReference getTupleFromMetadataEntity(Feed feed) throws HyracksDataException, MetadataException {
// write the key in the first two fields of the tuple
tupleBuilder.reset();
aString.setValue(feed.getDataverseName());
@@ -182,8 +182,8 @@
ArrayBackedValueStorage fieldValue = new ArrayBackedValueStorage();
propertyRecordBuilder.reset(MetadataRecordTypes.DATASOURCE_ADAPTER_PROPERTIES_RECORDTYPE);
AMutableString aString = new AMutableString("");
- ISerializerDeserializer<AString> stringSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ASTRING);
+ ISerializerDeserializer<AString> stringSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING);
// write field 0
fieldValue.reset();
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java
index 1487f02..64c4035 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java
@@ -22,7 +22,6 @@
import java.io.ByteArrayInputStream;
import java.io.DataInput;
import java.io.DataInputStream;
-import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
@@ -38,6 +37,7 @@
import org.apache.asterix.om.base.IACursor;
import org.apache.asterix.om.types.AOrderedListType;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -57,15 +57,15 @@
public static final int FUNCTION_PAYLOAD_TUPLE_FIELD_INDEX = 3;
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ARecord> recordSerDes = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(MetadataRecordTypes.FUNCTION_RECORDTYPE);
+ private ISerializerDeserializer<ARecord> recordSerDes =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(MetadataRecordTypes.FUNCTION_RECORDTYPE);
protected FunctionTupleTranslator(boolean getTuple) {
super(getTuple, MetadataPrimaryIndexes.FUNCTION_DATASET.getFieldCount());
}
@Override
- public Function getMetadataEntityFromTuple(ITupleReference frameTuple) throws IOException {
+ public Function getMetadataEntityFromTuple(ITupleReference frameTuple) throws HyracksDataException {
byte[] serRecord = frameTuple.getFieldData(FUNCTION_PAYLOAD_TUPLE_FIELD_INDEX);
int recordStartOffset = frameTuple.getFieldStart(FUNCTION_PAYLOAD_TUPLE_FIELD_INDEX);
int recordLength = frameTuple.getFieldLength(FUNCTION_PAYLOAD_TUPLE_FIELD_INDEX);
@@ -87,7 +87,7 @@
IACursor cursor = ((AOrderedList) functionRecord
.getValueByPos(MetadataRecordTypes.FUNCTION_ARECORD_FUNCTION_PARAM_LIST_FIELD_INDEX)).getCursor();
- List<String> params = new ArrayList<String>();
+ List<String> params = new ArrayList<>();
while (cursor.next()) {
params.add(((AString) cursor.get()).getStringValue());
}
@@ -110,7 +110,8 @@
}
@Override
- public ITupleReference getTupleFromMetadataEntity(Function function) throws IOException, MetadataException {
+ public ITupleReference getTupleFromMetadataEntity(Function function)
+ throws HyracksDataException, MetadataException {
// write the key in the first 2 fields of the tuple
tupleBuilder.reset();
aString.setValue(function.getDataverseName());
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java
index 6446e67..f29c7f7 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java
@@ -22,7 +22,7 @@
import java.io.ByteArrayInputStream;
import java.io.DataInput;
import java.io.DataInputStream;
-import java.io.IOException;
+import java.rmi.RemoteException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
@@ -54,6 +54,7 @@
import org.apache.asterix.om.types.IAType;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -103,7 +104,7 @@
}
@Override
- public Index getMetadataEntityFromTuple(ITupleReference frameTuple) throws MetadataException, IOException {
+ public Index getMetadataEntityFromTuple(ITupleReference frameTuple) throws MetadataException, HyracksDataException {
byte[] serRecord = frameTuple.getFieldData(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
int recordStartOffset = frameTuple.getFieldStart(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
int recordLength = frameTuple.getFieldLength(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
@@ -114,14 +115,13 @@
.getStringValue();
String dsName = ((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_DATASETNAME_FIELD_INDEX))
.getStringValue();
- String indexName = ((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_INDEXNAME_FIELD_INDEX))
- .getStringValue();
+ String indexName =
+ ((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_INDEXNAME_FIELD_INDEX)).getStringValue();
IndexType indexStructure = IndexType
.valueOf(((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_INDEXSTRUCTURE_FIELD_INDEX))
.getStringValue());
IACursor fieldNameCursor =
- ((AOrderedList) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_SEARCHKEY_FIELD_INDEX))
- .getCursor();
+ ((AOrderedList) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_SEARCHKEY_FIELD_INDEX)).getCursor();
List<List<String>> searchKey = new ArrayList<>();
AOrderedList fieldNameList;
while (fieldNameCursor.next()) {
@@ -152,8 +152,8 @@
}
Boolean isPrimaryIndex =
((ABoolean) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_ISPRIMARY_FIELD_INDEX)).getBoolean();
- int pendingOp = ((AInt32) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_PENDINGOP_FIELD_INDEX))
- .getIntegerValue();
+ int pendingOp =
+ ((AInt32) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_PENDINGOP_FIELD_INDEX)).getIntegerValue();
// Check if there is a gram length as well.
int gramLength = -1;
int gramLenPos = rec.getType().getFieldIndex(GRAM_LENGTH_FIELD_NAME);
@@ -177,22 +177,26 @@
// index key type information is not persisted, thus we extract type information from the record metadata
if (searchKeyType.isEmpty()) {
- Dataset dSet = metadataNode.getDataset(jobId, dvName, dsName);
- String datatypeName = dSet.getItemTypeName();
- String datatypeDataverseName = dSet.getItemTypeDataverseName();
- ARecordType recordDt =
- (ARecordType) metadataNode.getDatatype(jobId, datatypeDataverseName, datatypeName).getDatatype();
- String metatypeName = dSet.getMetaItemTypeName();
- String metatypeDataverseName = dSet.getMetaItemTypeDataverseName();
- ARecordType metaDt = null;
- if (metatypeName != null && metatypeDataverseName != null) {
- metaDt = (ARecordType) metadataNode.getDatatype(jobId, metatypeDataverseName, metatypeName)
- .getDatatype();
- }
try {
- searchKeyType = KeyFieldTypeUtil.getKeyTypes(recordDt, metaDt, searchKey, keyFieldSourceIndicator);
- } catch (AlgebricksException e) {
- throw new MetadataException(e);
+ Dataset dSet = metadataNode.getDataset(jobId, dvName, dsName);
+ String datatypeName = dSet.getItemTypeName();
+ String datatypeDataverseName = dSet.getItemTypeDataverseName();
+ ARecordType recordDt = (ARecordType) metadataNode
+ .getDatatype(jobId, datatypeDataverseName, datatypeName).getDatatype();
+ String metatypeName = dSet.getMetaItemTypeName();
+ String metatypeDataverseName = dSet.getMetaItemTypeDataverseName();
+ ARecordType metaDt = null;
+ if (metatypeName != null && metatypeDataverseName != null) {
+ metaDt = (ARecordType) metadataNode.getDatatype(jobId, metatypeDataverseName, metatypeName)
+ .getDatatype();
+ }
+ try {
+ searchKeyType = KeyFieldTypeUtil.getKeyTypes(recordDt, metaDt, searchKey, keyFieldSourceIndicator);
+ } catch (AlgebricksException e) {
+ throw new MetadataException(e);
+ }
+ } catch (RemoteException re) {
+ throw HyracksDataException.create(re);
}
}
return new Index(dvName, dsName, indexName, indexStructure, searchKey, keyFieldSourceIndicator, searchKeyType,
@@ -200,7 +204,7 @@
}
@Override
- public ITupleReference getTupleFromMetadataEntity(Index instance) throws IOException {
+ public ITupleReference getTupleFromMetadataEntity(Index instance) throws HyracksDataException {
// write the key in the first 3 fields of the tuple
tupleBuilder.reset();
aString.setValue(instance.getDataverseName());
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/LibraryTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/LibraryTupleTranslator.java
index dfb8ced..1d8cff8 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/LibraryTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/LibraryTupleTranslator.java
@@ -22,7 +22,6 @@
import java.io.ByteArrayInputStream;
import java.io.DataInput;
import java.io.DataInputStream;
-import java.io.IOException;
import java.util.Calendar;
import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
@@ -33,6 +32,7 @@
import org.apache.asterix.om.base.ARecord;
import org.apache.asterix.om.base.AString;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
/**
@@ -49,15 +49,15 @@
public static final int LIBRARY_PAYLOAD_TUPLE_FIELD_INDEX = 2;
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ARecord> recordSerDes = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(MetadataRecordTypes.LIBRARY_RECORDTYPE);
+ private ISerializerDeserializer<ARecord> recordSerDes =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(MetadataRecordTypes.LIBRARY_RECORDTYPE);
protected LibraryTupleTranslator(boolean getTuple) {
super(getTuple, MetadataPrimaryIndexes.LIBRARY_DATASET.getFieldCount());
}
@Override
- public Library getMetadataEntityFromTuple(ITupleReference frameTuple) throws IOException {
+ public Library getMetadataEntityFromTuple(ITupleReference frameTuple) throws HyracksDataException {
byte[] serRecord = frameTuple.getFieldData(LIBRARY_PAYLOAD_TUPLE_FIELD_INDEX);
int recordStartOffset = frameTuple.getFieldStart(LIBRARY_PAYLOAD_TUPLE_FIELD_INDEX);
int recordLength = frameTuple.getFieldLength(LIBRARY_PAYLOAD_TUPLE_FIELD_INDEX);
@@ -79,7 +79,7 @@
}
@Override
- public ITupleReference getTupleFromMetadataEntity(Library library) throws IOException, MetadataException {
+ public ITupleReference getTupleFromMetadataEntity(Library library) throws HyracksDataException, MetadataException {
// write the key in the first 2 fields of the tuple
tupleBuilder.reset();
aString.setValue(library.getDataverseName());
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeGroupTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeGroupTupleTranslator.java
index 6e865d05..c2cdf0d 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeGroupTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeGroupTupleTranslator.java
@@ -22,7 +22,6 @@
import java.io.ByteArrayInputStream;
import java.io.DataInput;
import java.io.DataInputStream;
-import java.io.IOException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
@@ -39,6 +38,7 @@
import org.apache.asterix.om.base.IACursor;
import org.apache.asterix.om.types.AUnorderedListType;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -56,28 +56,28 @@
private transient UnorderedListBuilder listBuilder = new UnorderedListBuilder();
private transient ArrayBackedValueStorage itemValue = new ArrayBackedValueStorage();
- private List<String> nodeNames;
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ARecord> recordSerDes = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(MetadataRecordTypes.NODEGROUP_RECORDTYPE);
+ private ISerializerDeserializer<ARecord> recordSerDes =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(MetadataRecordTypes.NODEGROUP_RECORDTYPE);
protected NodeGroupTupleTranslator(boolean getTuple) {
super(getTuple, MetadataPrimaryIndexes.NODEGROUP_DATASET.getFieldCount());
}
@Override
- public NodeGroup getMetadataEntityFromTuple(ITupleReference frameTuple) throws IOException {
+ public NodeGroup getMetadataEntityFromTuple(ITupleReference frameTuple) throws HyracksDataException {
byte[] serRecord = frameTuple.getFieldData(NODEGROUP_PAYLOAD_TUPLE_FIELD_INDEX);
int recordStartOffset = frameTuple.getFieldStart(NODEGROUP_PAYLOAD_TUPLE_FIELD_INDEX);
int recordLength = frameTuple.getFieldLength(NODEGROUP_PAYLOAD_TUPLE_FIELD_INDEX);
ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
DataInput in = new DataInputStream(stream);
ARecord nodeGroupRecord = recordSerDes.deserialize(in);
- String gpName = ((AString) nodeGroupRecord
- .getValueByPos(MetadataRecordTypes.NODEGROUP_ARECORD_GROUPNAME_FIELD_INDEX)).getStringValue();
+ String gpName =
+ ((AString) nodeGroupRecord.getValueByPos(MetadataRecordTypes.NODEGROUP_ARECORD_GROUPNAME_FIELD_INDEX))
+ .getStringValue();
IACursor cursor = ((AUnorderedList) nodeGroupRecord
.getValueByPos(MetadataRecordTypes.NODEGROUP_ARECORD_NODENAMES_FIELD_INDEX)).getCursor();
- List<String> nodeNames = new ArrayList<String>();
+ List<String> nodeNames = new ArrayList<>();
while (cursor.next()) {
nodeNames.add(((AString) cursor.get()).getStringValue());
}
@@ -85,7 +85,8 @@
}
@Override
- public ITupleReference getTupleFromMetadataEntity(NodeGroup instance) throws IOException, MetadataException {
+ public ITupleReference getTupleFromMetadataEntity(NodeGroup instance)
+ throws HyracksDataException, MetadataException {
// write the key in the first field of the tuple
tupleBuilder.reset();
aString.setValue(instance.getNodeGroupName());
@@ -103,8 +104,8 @@
// write field 1
listBuilder.reset((AUnorderedListType) MetadataRecordTypes.NODEGROUP_RECORDTYPE
.getFieldTypes()[MetadataRecordTypes.NODEGROUP_ARECORD_NODENAMES_FIELD_INDEX]);
- this.nodeNames = instance.getNodeNames();
- for (String nodeName : this.nodeNames) {
+ List<String> nodeNames = instance.getNodeNames();
+ for (String nodeName : nodeNames) {
itemValue.reset();
aString.setValue(nodeName);
stringSerde.serialize(aString, itemValue.getDataOutput());
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeTupleTranslator.java
index 7856ed8..dae11bc 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeTupleTranslator.java
@@ -19,8 +19,6 @@
package org.apache.asterix.metadata.entitytupletranslators;
-import java.io.IOException;
-
import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
import org.apache.asterix.metadata.MetadataException;
import org.apache.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
@@ -31,6 +29,7 @@
import org.apache.asterix.om.types.BuiltinType;
import org.apache.hyracks.algebricks.common.exceptions.NotImplementedException;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
/**
@@ -46,8 +45,8 @@
private transient AMutableInt64 aInt64 = new AMutableInt64(-1);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<AInt64> int64Serde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ private ISerializerDeserializer<AInt64> int64Serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
// @SuppressWarnings("unchecked")
// private ISerializerDeserializer<ARecord> recordSerDes =
@@ -59,7 +58,7 @@
}
@Override
- public Node getMetadataEntityFromTuple(ITupleReference frameTuple) throws IOException {
+ public Node getMetadataEntityFromTuple(ITupleReference frameTuple) {
throw new NotImplementedException();
// TODO: Implement this.
// try {
@@ -85,7 +84,7 @@
}
@Override
- public ITupleReference getTupleFromMetadataEntity(Node instance) throws IOException, MetadataException {
+ public ITupleReference getTupleFromMetadataEntity(Node instance) throws HyracksDataException, MetadataException {
// write the key in the first field of the tuple
tupleBuilder.reset();
aString.setValue(instance.getNodeName());
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/MetadataEntityValueExtractor.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/MetadataEntityValueExtractor.java
index 1841e5d..5bedffa 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/MetadataEntityValueExtractor.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/MetadataEntityValueExtractor.java
@@ -19,12 +19,13 @@
package org.apache.asterix.metadata.valueextractors;
-import java.io.IOException;
+import java.rmi.RemoteException;
import org.apache.asterix.common.transactions.JobId;
import org.apache.asterix.metadata.MetadataException;
import org.apache.asterix.metadata.api.IMetadataEntityTupleTranslator;
import org.apache.asterix.metadata.api.IValueExtractor;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
/**
@@ -38,7 +39,8 @@
}
@Override
- public T getValue(JobId jobId, ITupleReference tuple) throws MetadataException, IOException {
+ public T getValue(JobId jobId, ITupleReference tuple)
+ throws MetadataException, HyracksDataException, RemoteException {
return tupleReaderWriter.getMetadataEntityFromTuple(tuple);
}
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/NestedDatatypeNameValueExtractor.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/NestedDatatypeNameValueExtractor.java
index 9f63ebf..32bed0d 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/NestedDatatypeNameValueExtractor.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/NestedDatatypeNameValueExtractor.java
@@ -27,6 +27,7 @@
import org.apache.asterix.common.transactions.JobId;
import org.apache.asterix.metadata.MetadataException;
import org.apache.asterix.metadata.api.IValueExtractor;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
import org.apache.hyracks.util.string.UTF8StringReader;
@@ -48,19 +49,23 @@
private final UTF8StringReader reader = new UTF8StringReader();
@Override
- public String getValue(JobId jobId, ITupleReference tuple) throws MetadataException, IOException {
+ public String getValue(JobId jobId, ITupleReference tuple) throws MetadataException, HyracksDataException {
byte[] serRecord = tuple.getFieldData(2);
int recordStartOffset = tuple.getFieldStart(2);
int recordLength = tuple.getFieldLength(2);
ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
DataInput in = new DataInputStream(stream);
- String nestedType = reader.readUTF(in);
- if (nestedType.equals(datatypeName)) {
- recordStartOffset = tuple.getFieldStart(1);
- recordLength = tuple.getFieldLength(1);
- stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
- in = new DataInputStream(stream);
- return reader.readUTF(in);
+ try {
+ String nestedType = reader.readUTF(in);
+ if (nestedType.equals(datatypeName)) {
+ recordStartOffset = tuple.getFieldStart(1);
+ recordLength = tuple.getFieldLength(1);
+ stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
+ in = new DataInputStream(stream);
+ return reader.readUTF(in);
+ }
+ } catch (IOException e) {
+ throw HyracksDataException.create(e);
}
return null;
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/TupleCopyValueExtractor.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/TupleCopyValueExtractor.java
index 4c0f48f..fcf69d5 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/TupleCopyValueExtractor.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/TupleCopyValueExtractor.java
@@ -19,13 +19,13 @@
package org.apache.asterix.metadata.valueextractors;
-import java.io.IOException;
import java.nio.ByteBuffer;
import org.apache.asterix.common.transactions.JobId;
import org.apache.asterix.metadata.MetadataException;
import org.apache.asterix.metadata.api.IValueExtractor;
import org.apache.hyracks.api.dataflow.value.ITypeTraits;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
import org.apache.hyracks.storage.am.common.tuples.TypeAwareTupleReference;
import org.apache.hyracks.storage.am.common.tuples.TypeAwareTupleWriter;
@@ -48,7 +48,7 @@
}
@Override
- public ITupleReference getValue(JobId jobId, ITupleReference tuple) throws MetadataException, IOException {
+ public ITupleReference getValue(JobId jobId, ITupleReference tuple) throws MetadataException, HyracksDataException {
int numBytes = tupleWriter.bytesRequired(tuple);
tupleBytes = new byte[numBytes];
tupleWriter.writeTuple(tuple, tupleBytes, 0);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/LSMPrimaryUpsertOperatorNodePushable.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/LSMPrimaryUpsertOperatorNodePushable.java
index 1f18c97..d8c67bf 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/LSMPrimaryUpsertOperatorNodePushable.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/LSMPrimaryUpsertOperatorNodePushable.java
@@ -52,7 +52,6 @@
import org.apache.hyracks.storage.am.btree.util.BTreeUtils;
import org.apache.hyracks.storage.am.common.api.IIndexCursor;
import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
@@ -275,8 +274,8 @@
// callback here before calling nextFrame on the next operator
frameOpCallback.frameCompleted(!firstModification);
appender.write(writer, true);
- } catch (IndexException | IOException | AsterixException e) {
- throw new HyracksDataException(e);
+ } catch (Exception e) {
+ throw HyracksDataException.create(e);
}
}
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalRTreeLocalResourceMetadata.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalRTreeLocalResourceMetadata.java
index 54cd97d..b90fab3 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalRTreeLocalResourceMetadata.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalRTreeLocalResourceMetadata.java
@@ -30,7 +30,6 @@
import org.apache.hyracks.api.io.IIOManager;
import org.apache.hyracks.storage.am.common.api.IMetadataPageManagerFactory;
import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackFactory;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndex;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
@@ -64,17 +63,12 @@
IAppRuntimeContext appCtx = (IAppRuntimeContext) serviceCtx.getApplicationContext();
IIOManager ioManager = appCtx.getIOManager();
FileReference file = ioManager.resolve(resource.getPath());
- try {
- return LSMRTreeUtils.createExternalRTree(ioManager, file, appCtx.getBufferCache(),
- appCtx.getFileMapManager(), typeTraits, rtreeCmpFactories, btreeCmpFactories,
- valueProviderFactories, rtreePolicyType, appCtx.getBloomFilterFalsePositiveRate(),
- mergePolicyFactory.createMergePolicy(mergePolicyProperties,
- appCtx.getDatasetLifecycleManager()),
- opTrackerProvider.getOperationTracker(serviceCtx), appCtx.getLSMIOScheduler(),
- ioOpCallbackFactory.createIoOpCallback(), linearizeCmpFactory, btreeFields, -1, true, isPointMBR,
- metadataPageManagerFactory);
- } catch (TreeIndexException e) {
- throw new HyracksDataException(e);
- }
+ return LSMRTreeUtils.createExternalRTree(ioManager, file, appCtx.getBufferCache(), appCtx.getFileMapManager(),
+ typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories, rtreePolicyType,
+ appCtx.getBloomFilterFalsePositiveRate(),
+ mergePolicyFactory.createMergePolicy(mergePolicyProperties, appCtx.getDatasetLifecycleManager()),
+ opTrackerProvider.getOperationTracker(serviceCtx), appCtx.getLSMIOScheduler(),
+ ioOpCallbackFactory.createIoOpCallback(), linearizeCmpFactory, btreeFields, -1, true, isPointMBR,
+ metadataPageManagerFactory);
}
}
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMInvertedIndexLocalResourceMetadata.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMInvertedIndexLocalResourceMetadata.java
index a682f5a..b7408aa 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMInvertedIndexLocalResourceMetadata.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMInvertedIndexLocalResourceMetadata.java
@@ -30,7 +30,6 @@
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.api.io.IIOManager;
import org.apache.hyracks.storage.am.common.api.IMetadataPageManagerFactory;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackFactory;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndex;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
@@ -89,32 +88,25 @@
int ioDeviceNum = Resource.getIoDeviceNum(ioManager, file.getDeviceHandle());
List<IVirtualBufferCache> virtualBufferCaches =
appCtx.getDatasetLifecycleManager().getVirtualBufferCaches(datasetId(), ioDeviceNum);
- try {
- if (isPartitioned) {
- return InvertedIndexUtils.createPartitionedLSMInvertedIndex(ioManager, virtualBufferCaches,
- appCtx.getFileMapManager(), invListTypeTraits, invListCmpFactories,
- tokenTypeTraits, tokenCmpFactories, tokenizerFactory, appCtx.getBufferCache(),
- file.getAbsolutePath(), appCtx.getBloomFilterFalsePositiveRate(),
- mergePolicyFactory.createMergePolicy(mergePolicyProperties,
- appCtx.getDatasetLifecycleManager()),
- opTrackerProvider.getOperationTracker(serviceCtx), appCtx.getLSMIOScheduler(),
- ioOpCallbackFactory.createIoOpCallback(), invertedIndexFields, filterTypeTraits,
- filterCmpFactories, filterFields, filterFieldsForNonBulkLoadOps,
- invertedIndexFieldsForNonBulkLoadOps, true, metadataPageManagerFactory);
- } else {
- return InvertedIndexUtils.createLSMInvertedIndex(ioManager, virtualBufferCaches,
- appCtx.getFileMapManager(), invListTypeTraits, invListCmpFactories,
- tokenTypeTraits, tokenCmpFactories, tokenizerFactory, appCtx.getBufferCache(),
- file.getAbsolutePath(), appCtx.getBloomFilterFalsePositiveRate(),
- mergePolicyFactory.createMergePolicy(mergePolicyProperties,
- appCtx.getDatasetLifecycleManager()),
- opTrackerProvider.getOperationTracker(serviceCtx), appCtx.getLSMIOScheduler(),
- ioOpCallbackFactory.createIoOpCallback(), invertedIndexFields, filterTypeTraits,
- filterCmpFactories, filterFields, filterFieldsForNonBulkLoadOps,
- invertedIndexFieldsForNonBulkLoadOps, true, metadataPageManagerFactory);
- }
- } catch (IndexException e) {
- throw new HyracksDataException(e);
+ if (isPartitioned) {
+ return InvertedIndexUtils.createPartitionedLSMInvertedIndex(ioManager, virtualBufferCaches,
+ appCtx.getFileMapManager(), invListTypeTraits, invListCmpFactories, tokenTypeTraits,
+ tokenCmpFactories, tokenizerFactory, appCtx.getBufferCache(), file.getAbsolutePath(),
+ appCtx.getBloomFilterFalsePositiveRate(),
+ mergePolicyFactory.createMergePolicy(mergePolicyProperties, appCtx.getDatasetLifecycleManager()),
+ opTrackerProvider.getOperationTracker(serviceCtx), appCtx.getLSMIOScheduler(),
+ ioOpCallbackFactory.createIoOpCallback(), invertedIndexFields, filterTypeTraits, filterCmpFactories,
+ filterFields, filterFieldsForNonBulkLoadOps, invertedIndexFieldsForNonBulkLoadOps, true,
+ metadataPageManagerFactory);
+ } else {
+ return InvertedIndexUtils.createLSMInvertedIndex(ioManager, virtualBufferCaches, appCtx.getFileMapManager(),
+ invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory,
+ appCtx.getBufferCache(), file.getAbsolutePath(), appCtx.getBloomFilterFalsePositiveRate(),
+ mergePolicyFactory.createMergePolicy(mergePolicyProperties, appCtx.getDatasetLifecycleManager()),
+ opTrackerProvider.getOperationTracker(serviceCtx), appCtx.getLSMIOScheduler(),
+ ioOpCallbackFactory.createIoOpCallback(), invertedIndexFields, filterTypeTraits, filterCmpFactories,
+ filterFields, filterFieldsForNonBulkLoadOps, invertedIndexFieldsForNonBulkLoadOps, true,
+ metadataPageManagerFactory);
}
}
}
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMRTreeLocalResourceMetadata.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMRTreeLocalResourceMetadata.java
index c3b7348..127a997 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMRTreeLocalResourceMetadata.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMRTreeLocalResourceMetadata.java
@@ -32,7 +32,6 @@
import org.apache.hyracks.api.io.IIOManager;
import org.apache.hyracks.storage.am.common.api.IMetadataPageManagerFactory;
import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackFactory;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndex;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
@@ -90,17 +89,12 @@
int ioDeviceNum = Resource.getIoDeviceNum(ioManager, file.getDeviceHandle());
List<IVirtualBufferCache> virtualBufferCaches =
appCtx.getDatasetLifecycleManager().getVirtualBufferCaches(datasetId(), ioDeviceNum);
- try {
- return LSMRTreeUtils.createLSMTreeWithAntiMatterTuples(ioManager, virtualBufferCaches, file,
- appCtx.getBufferCache(), appCtx.getFileMapManager(), typeTraits,
- rtreeCmpFactories, btreeCmpFactories, valueProviderFactories, rtreePolicyType,
- mergePolicyFactory.createMergePolicy(mergePolicyProperties,
- appCtx.getDatasetLifecycleManager()),
- opTrackerProvider.getOperationTracker(serviceCtx), appCtx.getLSMIOScheduler(),
- ioOpCallbackFactory.createIoOpCallback(), linearizeCmpFactory, rtreeFields, filterTypeTraits,
- filterCmpFactories, filterFields, true, isPointMBR, metadataPageManagerFactory);
- } catch (TreeIndexException e) {
- throw new HyracksDataException(e);
- }
+ return LSMRTreeUtils.createLSMTreeWithAntiMatterTuples(ioManager, virtualBufferCaches, file,
+ appCtx.getBufferCache(), appCtx.getFileMapManager(), typeTraits, rtreeCmpFactories, btreeCmpFactories,
+ valueProviderFactories, rtreePolicyType,
+ mergePolicyFactory.createMergePolicy(mergePolicyProperties, appCtx.getDatasetLifecycleManager()),
+ opTrackerProvider.getOperationTracker(serviceCtx), appCtx.getLSMIOScheduler(),
+ ioOpCallbackFactory.createIoOpCallback(), linearizeCmpFactory, rtreeFields, filterTypeTraits,
+ filterCmpFactories, filterFields, true, isPointMBR, metadataPageManagerFactory);
}
}
diff --git a/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/meta/AlgebricksMetaOperatorDescriptor.java b/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/meta/AlgebricksMetaOperatorDescriptor.java
index 3ac8c40..ae77d3a 100644
--- a/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/meta/AlgebricksMetaOperatorDescriptor.java
+++ b/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/meta/AlgebricksMetaOperatorDescriptor.java
@@ -78,24 +78,22 @@
public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
final IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
if (inputArity == 0) {
- return createSourceInputPushRuntime(ctx, recordDescProvider, partition, nPartitions);
+ return createSourceInputPushRuntime(ctx);
} else {
- return createOneInputOneOutputPushRuntime(ctx, recordDescProvider, partition, nPartitions);
+ return createOneInputOneOutputPushRuntime(ctx, recordDescProvider);
}
}
- private IOperatorNodePushable createSourceInputPushRuntime(final IHyracksTaskContext ctx,
- final IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
+ private IOperatorNodePushable createSourceInputPushRuntime(final IHyracksTaskContext ctx) {
return new AbstractUnaryOutputSourceOperatorNodePushable() {
@Override
public void initialize() throws HyracksDataException {
IFrameWriter startOfPipeline;
- RecordDescriptor pipelineOutputRecordDescriptor = outputArity > 0
- ? AlgebricksMetaOperatorDescriptor.this.recordDescriptors[0] : null;
-
- PipelineAssembler pa = new PipelineAssembler(pipeline, inputArity, outputArity, null,
- pipelineOutputRecordDescriptor);
+ RecordDescriptor pipelineOutputRecordDescriptor =
+ outputArity > 0 ? AlgebricksMetaOperatorDescriptor.this.recordDescriptors[0] : null;
+ PipelineAssembler pa =
+ new PipelineAssembler(pipeline, inputArity, outputArity, null, pipelineOutputRecordDescriptor);
startOfPipeline = pa.assemblePipeline(writer, ctx);
try {
startOfPipeline.open();
@@ -110,7 +108,7 @@
}
private IOperatorNodePushable createOneInputOneOutputPushRuntime(final IHyracksTaskContext ctx,
- final IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
+ final IRecordDescriptorProvider recordDescProvider) {
return new AbstractUnaryInputUnaryOutputOperatorNodePushable() {
private IFrameWriter startOfPipeline;
@@ -118,8 +116,8 @@
@Override
public void open() throws HyracksDataException {
if (startOfPipeline == null) {
- RecordDescriptor pipelineOutputRecordDescriptor = outputArity > 0
- ? AlgebricksMetaOperatorDescriptor.this.recordDescriptors[0] : null;
+ RecordDescriptor pipelineOutputRecordDescriptor =
+ outputArity > 0 ? AlgebricksMetaOperatorDescriptor.this.recordDescriptors[0] : null;
RecordDescriptor pipelineInputRecordDescriptor = recordDescProvider
.getInputRecordDescriptor(AlgebricksMetaOperatorDescriptor.this.getActivityId(), 0);
PipelineAssembler pa = new PipelineAssembler(pipeline, inputArity, outputArity,
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/ErrorCode.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/ErrorCode.java
index 11b8109..e321a1d 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/ErrorCode.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/exceptions/ErrorCode.java
@@ -66,6 +66,22 @@
public static final int FILE_DOES_NOT_EXISTS = 30;
public static final int UNIDENTIFIED_IO_ERROR_DELETING_DIR = 31;
public static final int RESULT_NO_RECORD = 32;
+ public static final int DUPLICATE_KEY = 33;
+ public static final int LOAD_NON_EMPTY_INDEX = 34;
+ public static final int MODIFY_NOT_SUPPORTED_IN_EXTERNAL_INDEX = 35;
+ public static final int FLUSH_NOT_SUPPORTED_IN_EXTERNAL_INDEX = 36;
+ public static final int UPDATE_OR_DELETE_NON_EXISTENT_KEY = 37;
+ public static final int INDEX_NOT_UPDATABLE = 38;
+ public static final int OCCURRENCE_THRESHOLD_PANIC_EXCEPTION = 39;
+ public static final int UNKNOWN_INVERTED_INDEX_TYPE = 40;
+ public static final int CANNOT_PROPOSE_LINEARIZER_DIFF_DIMENSIONS = 41;
+ public static final int CANNOT_PROPOSE_LINEARIZER_FOR_TYPE = 42;
+ public static final int RECORD_IS_TOO_LARGE = 43;
+ public static final int FAILED_TO_RE_FIND_PARENT = 44;
+ public static final int FAILED_TO_FIND_TUPLE = 45;
+ public static final int UNSORTED_LOAD_INPUT = 46;
+ public static final int OPERATION_EXCEEDED_MAX_RESTARTS = 47;
+ public static final int DUPLICATE_LOAD_INPUT = 48;
// Compilation error codes.
public static final int RULECOLLECTION_NOT_INSTANCE_OF_LIST = 10000;
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/resources/errormsg/en.properties b/hyracks-fullstack/hyracks/hyracks-api/src/main/resources/errormsg/en.properties
index ed054cd..c3c2596 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/resources/errormsg/en.properties
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/resources/errormsg/en.properties
@@ -51,5 +51,21 @@
30 = File %1$s doesn't exists
31 = Unidentified IO error occurred while deleting the dir %1$s
32 = No record for partition %1$s of result set %2$s
+33 = Inserting duplicate keys into the primary storage
+34 = Cannot load an index that is not empty
+35 = Modify not supported in External LSM Inedx
+36 = Flush not supported in External LSM Inedx
+37 = Index key not found
+38 = Index is not updatable
+39 = Merge Threshold is less than or equal to 0
+40 = Unknown inverted index type %1$s
+41 = Cannot propose linearizer if dimensions have different types
+42 = Cannot propose linearizer for type %1$s
+43 = Record size (%1$s) larger than maximum acceptable record size (%2$s)
+44 = Failed to re-find parent of a page in the tree
+45 = Failed to find a tuple in a page
+46 = Unsorted load input
+47 = Operation exceeded the maximum number of restarts %1$s
+48 = Loading duplicate keys into the primary storage
10000 = The given rule collection %1$s is not an instance of the List class.
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/AbstractRTreeOperatorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/AbstractRTreeOperatorTest.java
index 6e4214f..ac69a5a 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/AbstractRTreeOperatorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/AbstractRTreeOperatorTest.java
@@ -54,7 +54,6 @@
import org.apache.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
import org.apache.hyracks.storage.am.common.api.IPageManagerFactory;
import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
import org.apache.hyracks.storage.am.common.dataflow.IndexDropOperatorDescriptor;
import org.apache.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
@@ -69,8 +68,8 @@
import org.apache.hyracks.storage.common.IStorageManager;
import org.apache.hyracks.storage.common.file.TransientLocalResourceFactoryProvider;
import org.apache.hyracks.test.support.TestIndexLifecycleManagerProvider;
-import org.apache.hyracks.test.support.TestStorageManagerComponentHolder;
import org.apache.hyracks.test.support.TestStorageManager;
+import org.apache.hyracks.test.support.TestStorageManagerComponentHolder;
import org.apache.hyracks.tests.am.common.ITreeIndexOperatorTestHelper;
import org.apache.hyracks.tests.integration.AbstractIntegrationTest;
import org.junit.After;
@@ -202,7 +201,7 @@
protected abstract IIndexDataflowHelperFactory createDataFlowHelperFactory(
IPrimitiveValueProviderFactory[] secondaryValueProviderFactories, RTreePolicyType rtreePolicyType,
IBinaryComparatorFactory[] btreeComparatorFactories, ILinearizeComparatorFactory linearizerCmpFactory,
- int[] btreeFields) throws TreeIndexException;
+ int[] btreeFields) throws HyracksDataException;
protected void createPrimaryIndex() throws Exception {
JobSpecification spec = new JobSpecification();
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/RTreeSecondaryIndexInsertOperatorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/RTreeSecondaryIndexInsertOperatorTest.java
index 83318ce..0296a35 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/RTreeSecondaryIndexInsertOperatorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/RTreeSecondaryIndexInsertOperatorTest.java
@@ -27,6 +27,7 @@
import org.apache.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.FileSplit;
import org.apache.hyracks.api.job.JobSpecification;
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
@@ -38,7 +39,6 @@
import org.apache.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
import org.apache.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
import org.apache.hyracks.storage.am.common.freepage.LinkedMetadataPageManagerFactory;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
@@ -116,7 +116,7 @@
protected IIndexDataflowHelperFactory createDataFlowHelperFactory(
IPrimitiveValueProviderFactory[] secondaryValueProviderFactories, RTreePolicyType rtreePolicyType,
IBinaryComparatorFactory[] btreeComparatorFactories, ILinearizeComparatorFactory linearizerCmpFactory,
- int[] btreeFields) throws TreeIndexException {
+ int[] btreeFields) throws HyracksDataException {
return ((RTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory(secondaryValueProviderFactories,
rtreePolicyType, null, true);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/RTreeSecondaryIndexSearchOperatorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/RTreeSecondaryIndexSearchOperatorTest.java
index 7c2676d..092aa9c 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/RTreeSecondaryIndexSearchOperatorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/rtree/RTreeSecondaryIndexSearchOperatorTest.java
@@ -27,6 +27,7 @@
import org.apache.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.FileSplit;
import org.apache.hyracks.api.job.JobSpecification;
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
@@ -38,7 +39,6 @@
import org.apache.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
import org.apache.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
import org.apache.hyracks.storage.am.common.freepage.LinkedMetadataPageManagerFactory;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
@@ -114,7 +114,7 @@
protected IIndexDataflowHelperFactory createDataFlowHelperFactory(
IPrimitiveValueProviderFactory[] secondaryValueProviderFactories, RTreePolicyType rtreePolicyType,
IBinaryComparatorFactory[] btreeComparatorFactories, ILinearizeComparatorFactory linearizerCmpFactory,
- int[] btreeFields) throws TreeIndexException {
+ int[] btreeFields) throws HyracksDataException {
return ((RTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory(secondaryValueProviderFactories,
rtreePolicyType, null, true);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-bloomfilter/src/main/java/org/apache/hyracks/storage/am/bloomfilter/impls/BloomFilter.java b/hyracks-fullstack/hyracks/hyracks-storage-am-bloomfilter/src/main/java/org/apache/hyracks/storage/am/bloomfilter/impls/BloomFilter.java
index 80c3628..dee8271 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-bloomfilter/src/main/java/org/apache/hyracks/storage/am/bloomfilter/impls/BloomFilter.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-bloomfilter/src/main/java/org/apache/hyracks/storage/am/bloomfilter/impls/BloomFilter.java
@@ -25,7 +25,6 @@
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
import org.apache.hyracks.storage.am.common.api.IIndexBulkLoader;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.common.buffercache.IBufferCache;
import org.apache.hyracks.storage.common.buffercache.ICachedPage;
import org.apache.hyracks.storage.common.buffercache.IFIFOPageQueue;
@@ -34,11 +33,11 @@
public class BloomFilter {
- private final static int METADATA_PAGE_ID = 0;
- private final static int NUM_PAGES_OFFSET = 0; // 0
- private final static int NUM_HASHES_USED_OFFSET = NUM_PAGES_OFFSET + 4; // 4
- private final static int NUM_ELEMENTS_OFFSET = NUM_HASHES_USED_OFFSET + 4; // 8
- private final static int NUM_BITS_OFFSET = NUM_ELEMENTS_OFFSET + 8; // 12
+ private static final int METADATA_PAGE_ID = 0;
+ private static final int NUM_PAGES_OFFSET = 0; // 0
+ private static final int NUM_HASHES_USED_OFFSET = NUM_PAGES_OFFSET + 4; // 4
+ private static final int NUM_ELEMENTS_OFFSET = NUM_HASHES_USED_OFFSET + 4; // 8
+ private static final int NUM_BITS_OFFSET = NUM_ELEMENTS_OFFSET + 8; // 12
private final IBufferCache bufferCache;
private final IFileMapProvider fileMapProvider;
@@ -258,7 +257,7 @@
}
@Override
- public void add(ITupleReference tuple) throws IndexException, HyracksDataException {
+ public void add(ITupleReference tuple) throws HyracksDataException {
if (numPages == 0) {
throw new HyracksDataException(
"Cannot add elements to this filter since it is supposed to be empty (number of elements hint passed to the filter during construction was 0).");
@@ -278,7 +277,7 @@
}
@Override
- public void end() throws HyracksDataException, IndexException {
+ public void end() throws HyracksDataException {
allocateAndInitMetaDataPage();
queue.put(metaDataPage);
for (ICachedPage p : pages) {
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/api/IBTreeFrame.java b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/api/IBTreeFrame.java
index e0abb1c..2383192 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/api/IBTreeFrame.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/api/IBTreeFrame.java
@@ -23,12 +23,11 @@
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
import org.apache.hyracks.storage.am.btree.impls.BTreeOpContext.PageValidationInfo;
import org.apache.hyracks.storage.am.common.api.ITreeIndexFrame;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
public interface IBTreeFrame extends ITreeIndexFrame {
- public int findInsertTupleIndex(ITupleReference tuple) throws TreeIndexException;
+ public int findInsertTupleIndex(ITupleReference tuple) throws HyracksDataException;
- public int findDeleteTupleIndex(ITupleReference tuple) throws TreeIndexException;
+ public int findDeleteTupleIndex(ITupleReference tuple) throws HyracksDataException;
public void insertSorted(ITupleReference tuple) throws HyracksDataException;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/api/IBTreeLeafFrame.java b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/api/IBTreeLeafFrame.java
index 2e072ce..37e0ab8 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/api/IBTreeLeafFrame.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/api/IBTreeLeafFrame.java
@@ -22,7 +22,6 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.common.ophelpers.FindTupleMode;
import org.apache.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
@@ -33,9 +32,9 @@
public int findTupleIndex(ITupleReference searchKey, ITreeIndexTupleReference pageTuple, MultiComparator cmp,
FindTupleMode ftm, FindTupleNoExactMatchPolicy ftp) throws HyracksDataException;
- public int findUpdateTupleIndex(ITupleReference tuple) throws TreeIndexException;
+ public int findUpdateTupleIndex(ITupleReference tuple) throws HyracksDataException;
- public int findUpsertTupleIndex(ITupleReference tuple) throws TreeIndexException;
+ public int findUpsertTupleIndex(ITupleReference tuple) throws HyracksDataException;
/**
* @param searchTuple
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/exceptions/BTreeException.java b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/exceptions/BTreeException.java
deleted file mode 100644
index bb50054..0000000
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/exceptions/BTreeException.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.hyracks.storage.am.btree.exceptions;
-
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
-
-public class BTreeException extends TreeIndexException {
-
- protected static final long serialVersionUID = 1L;
-
- public BTreeException(Exception e) {
- super(e);
- }
-
- public BTreeException(String message) {
- super(message);
- }
-}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/exceptions/BTreeNotUpdateableException.java b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/exceptions/BTreeNotUpdateableException.java
deleted file mode 100644
index 5846747..0000000
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/exceptions/BTreeNotUpdateableException.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.hyracks.storage.am.btree.exceptions;
-
-public class BTreeNotUpdateableException extends BTreeException {
- private static final long serialVersionUID = 1L;
-
- public BTreeNotUpdateableException(Exception e) {
- super(e);
- }
-
- public BTreeNotUpdateableException(String message) {
- super(message);
- }
-}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrame.java b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrame.java
index 157c663..1bc6db0 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrame.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrame.java
@@ -24,6 +24,7 @@
import java.util.Collections;
import org.apache.hyracks.api.dataflow.value.ITypeTraits;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
import org.apache.hyracks.storage.am.btree.api.IBTreeLeafFrame;
@@ -38,9 +39,6 @@
import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameCompressor;
import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleReference;
import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexNonExistentKeyException;
import org.apache.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
import org.apache.hyracks.storage.am.common.ophelpers.FindTupleMode;
import org.apache.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
@@ -116,7 +114,7 @@
try {
return compressor.compress(this, cmp);
} catch (Exception e) {
- throw new HyracksDataException(e);
+ throw HyracksDataException.create(e);
}
}
@@ -174,8 +172,7 @@
int tupleLength = tupleEndOff - tupleOff;
System.arraycopy(buf.array(), tupleOff, buf.array(), freeSpace, tupleLength);
- slotManager.setSlot(sortedTupleOffs.get(i).slotOff,
- slotManager.encodeSlotFields(prefixSlotNum, freeSpace));
+ slotManager.setSlot(sortedTupleOffs.get(i).slotOff, slotManager.encodeSlotFields(prefixSlotNum, freeSpace));
freeSpace += tupleLength;
}
@@ -211,13 +208,13 @@
}
frameTuple.resetByTupleIndex(this, tupleIndex);
- tupleSize = tupleWriter.bytesRequired(frameTuple, suffixFieldStart, frameTuple.getFieldCount()
- - suffixFieldStart);
+ tupleSize =
+ tupleWriter.bytesRequired(frameTuple, suffixFieldStart, frameTuple.getFieldCount() - suffixFieldStart);
- buf.putInt(ITreeIndexFrame.Constants.TUPLE_COUNT_OFFSET, buf.getInt(
- ITreeIndexFrame.Constants.TUPLE_COUNT_OFFSET) - 1);
- buf.putInt(TOTAL_FREE_SPACE_OFFSET, buf.getInt(TOTAL_FREE_SPACE_OFFSET) + tupleSize
- + slotManager.getSlotSize());
+ buf.putInt(ITreeIndexFrame.Constants.TUPLE_COUNT_OFFSET,
+ buf.getInt(ITreeIndexFrame.Constants.TUPLE_COUNT_OFFSET) - 1);
+ buf.putInt(TOTAL_FREE_SPACE_OFFSET,
+ buf.getInt(TOTAL_FREE_SPACE_OFFSET) + tupleSize + slotManager.getSlotSize());
}
@Override
@@ -245,8 +242,8 @@
int prefixSlot = buf.getInt(prefixSlotOff);
int numPrefixFields = slotManager.decodeFirstSlotField(prefixSlot);
- int compressedSize = tupleWriter.bytesRequired(tuple, numPrefixFields, tuple.getFieldCount()
- - numPrefixFields);
+ int compressedSize =
+ tupleWriter.bytesRequired(tuple, numPrefixFields, tuple.getFieldCount() - numPrefixFields);
if (compressedSize + slotManager.getSlotSize() <= freeContiguous) {
return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
}
@@ -269,15 +266,15 @@
}
int freeSpace = buf.getInt(ITreeIndexFrame.Constants.FREE_SPACE_OFFSET);
- int bytesWritten = tupleWriter.writeTupleFields(tuple, numPrefixFields,
- tuple.getFieldCount() - numPrefixFields, buf.array(), freeSpace);
+ int bytesWritten = tupleWriter.writeTupleFields(tuple, numPrefixFields, tuple.getFieldCount() - numPrefixFields,
+ buf.array(), freeSpace);
- buf.putInt(ITreeIndexFrame.Constants.TUPLE_COUNT_OFFSET, buf.getInt(
- ITreeIndexFrame.Constants.TUPLE_COUNT_OFFSET) + 1);
- buf.putInt(ITreeIndexFrame.Constants.FREE_SPACE_OFFSET, buf.getInt(ITreeIndexFrame.Constants.FREE_SPACE_OFFSET)
- + bytesWritten);
- buf.putInt(TOTAL_FREE_SPACE_OFFSET, buf.getInt(TOTAL_FREE_SPACE_OFFSET) - bytesWritten
- - slotManager.getSlotSize());
+ buf.putInt(ITreeIndexFrame.Constants.TUPLE_COUNT_OFFSET,
+ buf.getInt(ITreeIndexFrame.Constants.TUPLE_COUNT_OFFSET) + 1);
+ buf.putInt(ITreeIndexFrame.Constants.FREE_SPACE_OFFSET,
+ buf.getInt(ITreeIndexFrame.Constants.FREE_SPACE_OFFSET) + bytesWritten);
+ buf.putInt(TOTAL_FREE_SPACE_OFFSET,
+ buf.getInt(TOTAL_FREE_SPACE_OFFSET) - bytesWritten - slotManager.getSlotSize());
}
@Override
@@ -380,35 +377,28 @@
}
@Override
- public int findInsertTupleIndex(ITupleReference tuple) throws TreeIndexException {
+ public int findInsertTupleIndex(ITupleReference tuple) throws HyracksDataException {
int slot;
- try {
- slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp,
- FindTupleMode.EXCLUSIVE_ERROR_IF_EXISTS, FindTupleNoExactMatchPolicy.HIGHER_KEY);
- } catch (HyracksDataException e) {
- throw new TreeIndexException(e);
- }
+ slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.EXCLUSIVE_ERROR_IF_EXISTS,
+ FindTupleNoExactMatchPolicy.HIGHER_KEY);
+
int tupleIndex = slotManager.decodeSecondSlotField(slot);
// Error indicator is set if there is an exact match.
if (tupleIndex == slotManager.getErrorIndicator()) {
- throw new TreeIndexDuplicateKeyException("Trying to insert duplicate key into leaf node.");
+ throw HyracksDataException.create(ErrorCode.DUPLICATE_KEY);
}
return slot;
}
@Override
- public int findUpsertTupleIndex(ITupleReference tuple) throws TreeIndexException {
+ public int findUpsertTupleIndex(ITupleReference tuple) throws HyracksDataException {
int slot;
- try {
- slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.INCLUSIVE,
- FindTupleNoExactMatchPolicy.HIGHER_KEY);
- } catch (HyracksDataException e) {
- throw new TreeIndexException(e);
- }
+ slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.INCLUSIVE,
+ FindTupleNoExactMatchPolicy.HIGHER_KEY);
int tupleIndex = slotManager.decodeSecondSlotField(slot);
// Error indicator is set if there is an exact match.
if (tupleIndex == slotManager.getErrorIndicator()) {
- throw new TreeIndexDuplicateKeyException("Trying to insert duplicate key into leaf node.");
+ throw HyracksDataException.create(ErrorCode.DUPLICATE_KEY);
}
return slot;
}
@@ -432,37 +422,27 @@
}
@Override
- public int findUpdateTupleIndex(ITupleReference tuple) throws TreeIndexException {
+ public int findUpdateTupleIndex(ITupleReference tuple) throws HyracksDataException {
int slot;
- try {
- slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.EXACT,
- FindTupleNoExactMatchPolicy.HIGHER_KEY);
- } catch (HyracksDataException e) {
- throw new TreeIndexException(e);
- }
+ slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.EXACT,
+ FindTupleNoExactMatchPolicy.HIGHER_KEY);
int tupleIndex = slotManager.decodeSecondSlotField(slot);
// Error indicator is set if there is no exact match.
if (tupleIndex == slotManager.getErrorIndicator()) {
- throw new TreeIndexNonExistentKeyException(
- "Trying to update a tuple with a nonexistent key in leaf node.");
+ throw HyracksDataException.create(ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY);
}
return slot;
}
@Override
- public int findDeleteTupleIndex(ITupleReference tuple) throws TreeIndexException {
+ public int findDeleteTupleIndex(ITupleReference tuple) throws HyracksDataException {
int slot;
- try {
- slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.EXACT,
- FindTupleNoExactMatchPolicy.HIGHER_KEY);
- } catch (HyracksDataException e) {
- throw new TreeIndexException(e);
- }
+ slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.EXACT,
+ FindTupleNoExactMatchPolicy.HIGHER_KEY);
int tupleIndex = slotManager.decodeSecondSlotField(slot);
// Error indicator is set if there is no exact match.
if (tupleIndex == slotManager.getErrorIndicator()) {
- throw new TreeIndexNonExistentKeyException(
- "Trying to delete a tuple with a nonexistent key in leaf node.");
+ throw HyracksDataException.create(ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY);
}
return slot;
}
@@ -579,8 +559,8 @@
}
}
- int bytesWritten = tupleWriter.writeTupleFields(tuple, fieldsToTruncate, tuple.getFieldCount()
- - fieldsToTruncate, buf.array(), freeSpace);
+ int bytesWritten = tupleWriter.writeTupleFields(tuple, fieldsToTruncate,
+ tuple.getFieldCount() - fieldsToTruncate, buf.array(), freeSpace);
// insert slot
int prefixSlotNum = FieldPrefixSlotManager.TUPLE_UNCOMPRESSED;
@@ -593,18 +573,17 @@
slotManager.insertSlot(insSlot, freeSpace);
// update page metadata
- buf.putInt(ITreeIndexFrame.Constants.TUPLE_COUNT_OFFSET, buf.getInt(
- ITreeIndexFrame.Constants.TUPLE_COUNT_OFFSET) + 1);
- buf.putInt(ITreeIndexFrame.Constants.FREE_SPACE_OFFSET, buf.getInt(ITreeIndexFrame.Constants.FREE_SPACE_OFFSET)
- + bytesWritten);
- buf.putInt(TOTAL_FREE_SPACE_OFFSET, buf.getInt(TOTAL_FREE_SPACE_OFFSET) - bytesWritten
- - slotManager.getSlotSize());
+ buf.putInt(ITreeIndexFrame.Constants.TUPLE_COUNT_OFFSET,
+ buf.getInt(ITreeIndexFrame.Constants.TUPLE_COUNT_OFFSET) + 1);
+ buf.putInt(ITreeIndexFrame.Constants.FREE_SPACE_OFFSET,
+ buf.getInt(ITreeIndexFrame.Constants.FREE_SPACE_OFFSET) + bytesWritten);
+ buf.putInt(TOTAL_FREE_SPACE_OFFSET,
+ buf.getInt(TOTAL_FREE_SPACE_OFFSET) - bytesWritten - slotManager.getSlotSize());
}
@Override
public void split(ITreeIndexFrame rightFrame, ITupleReference tuple, ISplitKey splitKey,
- IExtraPageBlockHelper extraPageBlockHelper, IBufferCache bufferCache)
- throws HyracksDataException {
+ IExtraPageBlockHelper extraPageBlockHelper, IBufferCache bufferCache) throws HyracksDataException {
BTreeFieldPrefixNSMLeafFrame rf = (BTreeFieldPrefixNSMLeafFrame) rightFrame;
@@ -669,16 +648,16 @@
int bytesWritten = 0;
if (lastPrefixSlotNum != prefixSlotNum) {
bytesWritten = tupleWriter.writeTuple(framePrefixTuple, right.array(), freeSpace);
- int newPrefixSlot = rf.slotManager
- .encodeSlotFields(framePrefixTuple.getFieldCount(), freeSpace);
+ int newPrefixSlot =
+ rf.slotManager.encodeSlotFields(framePrefixTuple.getFieldCount(), freeSpace);
int prefixSlotOff = rf.slotManager.getPrefixSlotOff(prefixSlotNum);
right.putInt(prefixSlotOff, newPrefixSlot);
lastPrefixSlotNum = prefixSlotNum;
}
int tupleOff = rf.slotManager.decodeSecondSlotField(tupleSlot);
- int newTupleSlot = rf.slotManager.encodeSlotFields(prefixSlotNum
- - (prefixTupleCount - prefixesToRight), tupleOff);
+ int newTupleSlot = rf.slotManager
+ .encodeSlotFields(prefixSlotNum - (prefixTupleCount - prefixesToRight), tupleOff);
right.putInt(tupleSlotOff, newTupleSlot);
freeSpace += bytesWritten;
}
@@ -687,8 +666,8 @@
// move the modified prefix slots on the right page
int prefixSrc = rf.slotManager.getPrefixSlotEndOff();
- int prefixDest = rf.slotManager.getPrefixSlotEndOff() + (prefixTupleCount - prefixesToRight)
- * rf.slotManager.getSlotSize();
+ int prefixDest = rf.slotManager.getPrefixSlotEndOff()
+ + (prefixTupleCount - prefixesToRight) * rf.slotManager.getSlotSize();
int prefixLength = rf.slotManager.getSlotSize() * prefixesToRight;
System.arraycopy(right.array(), prefixSrc, right.array(), prefixDest, prefixLength);
@@ -719,11 +698,7 @@
// insert last key
int targetTupleIndex;
// it's safe to catch this exception since it will have been caught before reaching here
- try {
- targetTupleIndex = ((IBTreeLeafFrame) targetFrame).findInsertTupleIndex(tuple);
- } catch (TreeIndexException e) {
- throw new IllegalStateException(e);
- }
+ targetTupleIndex = ((IBTreeLeafFrame) targetFrame).findInsertTupleIndex(tuple);
targetFrame.insert(tuple, targetTupleIndex);
// set split key to be highest value in left page
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrame.java b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrame.java
index e59523c..6264086 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrame.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrame.java
@@ -33,7 +33,6 @@
import org.apache.hyracks.storage.am.common.api.ITreeIndexFrame;
import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleReference;
import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
import org.apache.hyracks.storage.am.common.frames.TreeIndexNSMFrame;
import org.apache.hyracks.storage.am.common.ophelpers.FindTupleMode;
@@ -71,13 +70,9 @@
}
@Override
- public int findInsertTupleIndex(ITupleReference tuple) throws TreeIndexException {
- try {
- return slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.INCLUSIVE,
- FindTupleNoExactMatchPolicy.HIGHER_KEY);
- } catch (HyracksDataException e) {
- throw new TreeIndexException(e);
- }
+ public int findInsertTupleIndex(ITupleReference tuple) throws HyracksDataException {
+ return slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.INCLUSIVE,
+ FindTupleNoExactMatchPolicy.HIGHER_KEY);
}
@Override
@@ -107,8 +102,8 @@
int tupleSize = bytesWritten + CHILD_PTR_SIZE;
buf.putInt(Constants.TUPLE_COUNT_OFFSET, buf.getInt(Constants.TUPLE_COUNT_OFFSET) + 1);
buf.putInt(Constants.FREE_SPACE_OFFSET, buf.getInt(Constants.FREE_SPACE_OFFSET) + tupleSize);
- buf.putInt(TOTAL_FREE_SPACE_OFFSET, buf.getInt(TOTAL_FREE_SPACE_OFFSET) - tupleSize - slotManager
- .getSlotSize());
+ buf.putInt(TOTAL_FREE_SPACE_OFFSET,
+ buf.getInt(TOTAL_FREE_SPACE_OFFSET) - tupleSize - slotManager.getSlotSize());
// Did we insert into the rightmost slot?
if (slotOff == slotManager.getSlotEndOff()) {
System.arraycopy(tuple.getFieldData(tuple.getFieldCount() - 1), getLeftChildPageOff(tuple) + CHILD_PTR_SIZE,
@@ -127,13 +122,9 @@
}
@Override
- public int findDeleteTupleIndex(ITupleReference tuple) throws TreeIndexException {
- try {
- return slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.INCLUSIVE,
- FindTupleNoExactMatchPolicy.HIGHER_KEY);
- } catch (HyracksDataException e) {
- throw new TreeIndexException(e);
- }
+ public int findDeleteTupleIndex(ITupleReference tuple) throws HyracksDataException {
+ return slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.INCLUSIVE,
+ FindTupleNoExactMatchPolicy.HIGHER_KEY);
}
@Override
@@ -194,17 +185,15 @@
int tupleSize = bytesWritten + CHILD_PTR_SIZE;
buf.putInt(Constants.TUPLE_COUNT_OFFSET, buf.getInt(Constants.TUPLE_COUNT_OFFSET) + 1);
buf.putInt(Constants.FREE_SPACE_OFFSET, buf.getInt(Constants.FREE_SPACE_OFFSET) + tupleSize);
- buf.putInt(TOTAL_FREE_SPACE_OFFSET, buf.getInt(TOTAL_FREE_SPACE_OFFSET) - tupleSize - slotManager
- .getSlotSize());
+ buf.putInt(TOTAL_FREE_SPACE_OFFSET,
+ buf.getInt(TOTAL_FREE_SPACE_OFFSET) - tupleSize - slotManager.getSlotSize());
System.arraycopy(tuple.getFieldData(0), getLeftChildPageOff(tuple) + CHILD_PTR_SIZE, buf.array(),
- RIGHT_LEAF_OFFSET,
- CHILD_PTR_SIZE);
+ RIGHT_LEAF_OFFSET, CHILD_PTR_SIZE);
}
@Override
public void split(ITreeIndexFrame rightFrame, ITupleReference tuple, ISplitKey splitKey,
- IExtraPageBlockHelper extraPageBlockHelper, IBufferCache bufferCache)
- throws HyracksDataException, TreeIndexException {
+ IExtraPageBlockHelper extraPageBlockHelper, IBufferCache bufferCache) throws HyracksDataException {
ByteBuffer right = rightFrame.getBuffer();
int tupleCount = getTupleCount();
@@ -276,11 +265,7 @@
// Insert the saved split key.
int targetTupleIndex;
// it's safe to catch this exception since it will have been caught before reaching here
- try {
- targetTupleIndex = ((BTreeNSMInteriorFrame) targetFrame).findInsertTupleIndex(savedSplitKey.getTuple());
- } catch (TreeIndexException e) {
- throw new IllegalStateException(e);
- }
+ targetTupleIndex = ((BTreeNSMInteriorFrame) targetFrame).findInsertTupleIndex(savedSplitKey.getTuple());
targetFrame.insert(savedSplitKey.getTuple(), targetTupleIndex);
}
@@ -338,8 +323,8 @@
fsm = FindTupleMode.EXCLUSIVE;
}
// Search for a matching key.
- int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, targetCmp, fsm,
- FindTupleNoExactMatchPolicy.HIGHER_KEY);
+ int tupleIndex =
+ slotManager.findTupleIndex(tuple, frameTuple, targetCmp, fsm, FindTupleNoExactMatchPolicy.HIGHER_KEY);
int slotOff = slotManager.getSlotOff(tupleIndex);
// Follow the rightmost (greatest) child pointer.
if (tupleIndex == slotManager.getGreatestKeyIndicator()) {
@@ -423,8 +408,8 @@
for (int i = 0; i < tupleCount; i++) {
int tupleOff = slotManager.getTupleOff(slotManager.getSlotOff(i));
frameTuple.resetByTupleOffset(buf.array(), tupleOff);
- int intVal = IntegerPointable.getInteger(buf.array(),
- frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
+ int intVal =
+ IntegerPointable.getInteger(buf.array(), frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
+ frameTuple.getFieldLength(frameTuple.getFieldCount() - 1));
ret.add(intVal);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/frames/BTreeNSMLeafFrame.java b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/frames/BTreeNSMLeafFrame.java
index 6a957b5..e070b51 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/frames/BTreeNSMLeafFrame.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/frames/BTreeNSMLeafFrame.java
@@ -21,6 +21,7 @@
import java.nio.ByteBuffer;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
import org.apache.hyracks.storage.am.btree.api.IBTreeLeafFrame;
@@ -29,9 +30,6 @@
import org.apache.hyracks.storage.am.common.api.ITreeIndexFrame;
import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleReference;
import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexNonExistentKeyException;
import org.apache.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
import org.apache.hyracks.storage.am.common.frames.TreeIndexNSMFrame;
import org.apache.hyracks.storage.am.common.ophelpers.FindTupleMode;
@@ -79,46 +77,34 @@
}
@Override
- public int findInsertTupleIndex(ITupleReference tuple) throws TreeIndexException {
+ public int findInsertTupleIndex(ITupleReference tuple) throws HyracksDataException {
int tupleIndex;
- try {
- tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.EXCLUSIVE_ERROR_IF_EXISTS,
- FindTupleNoExactMatchPolicy.HIGHER_KEY);
- } catch (HyracksDataException e) {
- throw new TreeIndexException(e);
- }
+ tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.EXCLUSIVE_ERROR_IF_EXISTS,
+ FindTupleNoExactMatchPolicy.HIGHER_KEY);
// Error indicator is set if there is an exact match.
if (tupleIndex == slotManager.getErrorIndicator()) {
- throw new TreeIndexDuplicateKeyException("Trying to insert duplicate key into leaf node.");
+ throw HyracksDataException.create(ErrorCode.DUPLICATE_KEY);
}
return tupleIndex;
}
@Override
- public int findUpdateTupleIndex(ITupleReference tuple) throws TreeIndexException {
+ public int findUpdateTupleIndex(ITupleReference tuple) throws HyracksDataException {
int tupleIndex;
- try {
- tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.EXACT,
- FindTupleNoExactMatchPolicy.HIGHER_KEY);
- } catch (HyracksDataException e) {
- throw new TreeIndexException(e);
- }
+ tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.EXACT,
+ FindTupleNoExactMatchPolicy.HIGHER_KEY);
// Error indicator is set if there is no exact match.
if (tupleIndex == slotManager.getErrorIndicator() || tupleIndex == slotManager.getGreatestKeyIndicator()) {
- throw new TreeIndexNonExistentKeyException("Trying to update a tuple with a nonexistent key in leaf node.");
+ throw HyracksDataException.create(ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY);
}
return tupleIndex;
}
@Override
- public int findUpsertTupleIndex(ITupleReference tuple) throws TreeIndexException {
+ public int findUpsertTupleIndex(ITupleReference tuple) throws HyracksDataException {
int tupleIndex;
- try {
- tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.INCLUSIVE,
- FindTupleNoExactMatchPolicy.HIGHER_KEY);
- } catch (HyracksDataException e) {
- throw new TreeIndexException(e);
- }
+ tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.INCLUSIVE,
+ FindTupleNoExactMatchPolicy.HIGHER_KEY);
// Just return the found tupleIndex. The caller will make the final
// decision whether to insert or update.
return tupleIndex;
@@ -144,17 +130,13 @@
}
@Override
- public int findDeleteTupleIndex(ITupleReference tuple) throws TreeIndexException {
+ public int findDeleteTupleIndex(ITupleReference tuple) throws HyracksDataException {
int tupleIndex;
- try {
- tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.EXACT,
- FindTupleNoExactMatchPolicy.HIGHER_KEY);
- } catch (HyracksDataException e) {
- throw new TreeIndexException(e);
- }
+ tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.EXACT,
+ FindTupleNoExactMatchPolicy.HIGHER_KEY);
// Error indicator is set if there is no exact match.
if (tupleIndex == slotManager.getErrorIndicator() || tupleIndex == slotManager.getGreatestKeyIndicator()) {
- throw new TreeIndexNonExistentKeyException("Trying to delete a tuple with a nonexistent key in leaf node.");
+ throw HyracksDataException.create(ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY);
}
return tupleIndex;
}
@@ -166,8 +148,8 @@
int bytesWritten = tupleWriter.writeTuple(tuple, buf.array(), freeSpace);
buf.putInt(Constants.TUPLE_COUNT_OFFSET, buf.getInt(Constants.TUPLE_COUNT_OFFSET) + 1);
buf.putInt(Constants.FREE_SPACE_OFFSET, buf.getInt(Constants.FREE_SPACE_OFFSET) + bytesWritten);
- buf.putInt(TOTAL_FREE_SPACE_OFFSET, buf.getInt(TOTAL_FREE_SPACE_OFFSET) - bytesWritten - slotManager
- .getSlotSize());
+ buf.putInt(TOTAL_FREE_SPACE_OFFSET,
+ buf.getInt(TOTAL_FREE_SPACE_OFFSET) - bytesWritten - slotManager.getSlotSize());
}
@Override
@@ -207,8 +189,7 @@
@Override
public void split(ITreeIndexFrame rightFrame, ITupleReference tuple, ISplitKey splitKey,
- IExtraPageBlockHelper extraPageBlockHelper, IBufferCache bufferCache)
- throws HyracksDataException {
+ IExtraPageBlockHelper extraPageBlockHelper, IBufferCache bufferCache) throws HyracksDataException {
int tupleSize = getBytesRequiredToWriteTuple(tuple);
@@ -280,11 +261,7 @@
int targetTupleIndex;
// it's safe to catch this exception since it will have been caught
// before reaching here
- try {
- targetTupleIndex = targetFrame.findInsertTupleIndex(tuple);
- } catch (TreeIndexException e) {
- throw new IllegalStateException(e);
- }
+ targetTupleIndex = targetFrame.findInsertTupleIndex(tuple);
targetFrame.insert(tuple, targetTupleIndex);
// Set the split key to be highest key in the left page.
@@ -298,7 +275,7 @@
@Override
public void ensureCapacity(IBufferCache bufferCache, ITupleReference tuple,
- IExtraPageBlockHelper extraPageBlockHelper) throws HyracksDataException {
+ IExtraPageBlockHelper extraPageBlockHelper) throws HyracksDataException {
// we call ensureCapacity() for large tuples- ensure large flag is set
setLargeFlag(true);
int gapBytes = getBytesRequiredToWriteTuple(tuple) - getFreeContiguousSpace();
@@ -308,8 +285,8 @@
}
}
- private void growCapacity(IExtraPageBlockHelper extraPageBlockHelper,
- IBufferCache bufferCache, int deltaPages) throws HyracksDataException {
+ private void growCapacity(IExtraPageBlockHelper extraPageBlockHelper, IBufferCache bufferCache, int deltaPages)
+ throws HyracksDataException {
int framePagesOld = page.getFrameSizeMultiplier();
int newMultiplier = framePagesOld + deltaPages;
@@ -325,8 +302,8 @@
System.arraycopy(buf.array(), oldSlotEnd, buf.array(), slotManager.getSlotEndOff(), oldSlotStart - oldSlotEnd);
// fixup total free space counter
- buf.putInt(TOTAL_FREE_SPACE_OFFSET, buf.getInt(TOTAL_FREE_SPACE_OFFSET) + (bufferCache.getPageSize()
- * deltaPages));
+ buf.putInt(TOTAL_FREE_SPACE_OFFSET,
+ buf.getInt(TOTAL_FREE_SPACE_OFFSET) + (bufferCache.getPageSize() * deltaPages));
}
@Override
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/BTree.java b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/BTree.java
index 653689a..ba84031 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/BTree.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/BTree.java
@@ -27,6 +27,7 @@
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.data.std.primitive.IntegerPointable;
@@ -36,8 +37,6 @@
import org.apache.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
import org.apache.hyracks.storage.am.btree.api.IBTreeLeafFrame;
import org.apache.hyracks.storage.am.btree.api.ITupleAcceptor;
-import org.apache.hyracks.storage.am.btree.exceptions.BTreeException;
-import org.apache.hyracks.storage.am.btree.exceptions.BTreeNotUpdateableException;
import org.apache.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrame;
import org.apache.hyracks.storage.am.btree.impls.BTreeOpContext.PageValidationInfo;
import org.apache.hyracks.storage.am.common.api.IIndexAccessor;
@@ -53,11 +52,6 @@
import org.apache.hyracks.storage.am.common.api.ITreeIndexFrame;
import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
-import org.apache.hyracks.storage.am.common.api.UnsortedInputException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexNonExistentKeyException;
import org.apache.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
import org.apache.hyracks.storage.am.common.impls.AbstractTreeIndex;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
@@ -75,9 +69,9 @@
public static final float DEFAULT_FILL_FACTOR = 0.7f;
- private final static long RESTART_OP = Long.MIN_VALUE;
- private final static long FULL_RESTART_OP = Long.MIN_VALUE + 1;
- private final static int MAX_RESTARTS = 10;
+ private static final long RESTART_OP = Long.MIN_VALUE;
+ private static final long FULL_RESTART_OP = Long.MIN_VALUE + 1;
+ private static final int MAX_RESTARTS = 10;
private final AtomicInteger smoCounter;
private final ReadWriteLock treeLatch;
@@ -116,7 +110,7 @@
} catch (Exception e) {
page.releaseReadLatch();
bufferCache.unpin(page);
- throw new HyracksDataException(e);
+ throw HyracksDataException.create(e);
}
}
@@ -125,8 +119,8 @@
// Stack validation protocol:
// * parent pushes the validation information onto the stack before validation
// * child pops the validation information off of the stack after validating
- BTreeAccessor accessor = (BTreeAccessor) createAccessor(NoOpOperationCallback.INSTANCE,
- NoOpOperationCallback.INSTANCE);
+ BTreeAccessor accessor =
+ (BTreeAccessor) createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
PageValidationInfo pvi = accessor.ctx.createPageValidationInfo(null);
accessor.ctx.validationInfos.addFirst(pvi);
if (isActive) {
@@ -187,7 +181,7 @@
}
private void search(ITreeIndexCursor cursor, ISearchPredicate searchPred, BTreeOpContext ctx)
- throws TreeIndexException, HyracksDataException {
+ throws HyracksDataException {
ctx.reset();
ctx.pred = (RangePredicate) searchPred;
ctx.cursor = cursor;
@@ -241,10 +235,10 @@
ctx.interiorFrame.setPage(originalPage);
}
- private void createNewRoot(BTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
+ private void createNewRoot(BTreeOpContext ctx) throws HyracksDataException {
// Make sure the root is always in the same page.
- ICachedPage leftNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, ctx.splitKey.getLeftPage()),
- false);
+ ICachedPage leftNode =
+ bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, ctx.splitKey.getLeftPage()), false);
leftNode.acquireWriteLatch();
try {
int newLeftId = freePageManager.takePage(ctx.metaFrame);
@@ -282,8 +276,7 @@
int targetTupleIndex = ctx.interiorFrame.findInsertTupleIndex(ctx.splitKey.getTuple());
int tupleSize = ctx.interiorFrame.getBytesRequiredToWriteTuple(ctx.splitKey.getTuple());
if (tupleSize > maxTupleSize) {
- throw new TreeIndexException("Space required for record (" + tupleSize
- + ") larger than maximum acceptable size (" + maxTupleSize + ")");
+ throw HyracksDataException.create(ErrorCode.RECORD_IS_TOO_LARGE, tupleSize, maxTupleSize);
}
ctx.interiorFrame.insert(ctx.splitKey.getTuple(), targetTupleIndex);
} finally {
@@ -296,72 +289,6 @@
}
}
- private void insertUpdateOrDelete(ITupleReference tuple, BTreeOpContext ctx)
- throws HyracksDataException, TreeIndexException {
- ctx.reset();
- ctx.pred.setLowKeyComparator(ctx.cmp);
- ctx.pred.setHighKeyComparator(ctx.cmp);
- ctx.pred.setLowKey(tuple, true);
- ctx.pred.setHighKey(tuple, true);
- ctx.splitKey.reset();
- ctx.splitKey.getTuple().setFieldCount(ctx.cmp.getKeyFieldCount());
- // We use this loop to deal with possibly multiple operation restarts
- // due to ongoing structure modifications during the descent.
- boolean repeatOp = true;
- while (repeatOp && ctx.opRestarts < MAX_RESTARTS) {
- ctx.smoCount = smoCounter.get();
- performOp(rootPage, null, true, ctx);
- // Do we need to restart from the (possibly new) root?
- if (!ctx.pageLsns.isEmpty()) {
- if (ctx.pageLsns.getLast() == FULL_RESTART_OP) {
- ctx.pageLsns.clear();
- continue;
- } else if (ctx.pageLsns.getLast() == RESTART_OP) {
- ctx.pageLsns.removeLast(); // pop the restart op indicator
- continue;
- }
-
- }
- // Split key propagated?
- if (ctx.splitKey.getBuffer() != null) {
- // Insert or update op. Create a new root.
- createNewRoot(ctx);
- }
- unsetSmPages(ctx);
- repeatOp = false;
- }
-
- if (ctx.opRestarts >= MAX_RESTARTS) {
- throw new BTreeException("Operation exceeded the maximum number of restarts");
- }
- }
-
- private void insert(ITupleReference tuple, BTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
- ctx.modificationCallback.before(tuple);
- insertUpdateOrDelete(tuple, ctx);
- }
-
- private void upsert(ITupleReference tuple, BTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
- ctx.modificationCallback.before(tuple);
- insertUpdateOrDelete(tuple, ctx);
- }
-
- private void update(ITupleReference tuple, BTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
- // This call only allows updating of non-key fields.
- // Updating a tuple's key necessitates deleting the old entry, and inserting the new entry.
- // The user of the BTree is responsible for dealing with non-key updates (i.e., doing a delete + insert).
- if (fieldCount == ctx.cmp.getKeyFieldCount()) {
- throw new BTreeNotUpdateableException("Cannot perform updates when the entire tuple forms the key.");
- }
- ctx.modificationCallback.before(tuple);
- insertUpdateOrDelete(tuple, ctx);
- }
-
- private void delete(ITupleReference tuple, BTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
- ctx.modificationCallback.before(tuple);
- insertUpdateOrDelete(tuple, ctx);
- }
-
private boolean insertLeaf(ITupleReference tuple, int targetTupleIndex, int pageId, BTreeOpContext ctx)
throws Exception {
boolean restartOp = false;
@@ -590,8 +517,7 @@
case TOO_LARGE: {
int tupleSize = ctx.interiorFrame.getBytesRequiredToWriteTuple(tuple);
- throw new TreeIndexException("Space required for record (" + tupleSize
- + ") larger than maximum acceptable size (" + maxTupleSize + ")");
+ throw HyracksDataException.create(ErrorCode.RECORD_IS_TOO_LARGE, tupleSize, maxTupleSize);
}
default: {
@@ -606,7 +532,7 @@
// This means that there could be underflow, even an empty page that is
// pointed to by an interior node.
if (ctx.leafFrame.getTupleCount() == 0) {
- throw new TreeIndexNonExistentKeyException("Trying to delete a tuple with a nonexistent key in leaf node.");
+ throw HyracksDataException.create(ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY);
}
int tupleIndex = ctx.leafFrame.findDeleteTupleIndex(tuple);
ITupleReference beforeTuple = ctx.leafFrame.getMatchingKeyTuple(tuple, tupleIndex);
@@ -639,7 +565,7 @@
}
private void performOp(int pageId, ICachedPage parent, boolean parentIsReadLatched, BTreeOpContext ctx)
- throws HyracksDataException, TreeIndexException {
+ throws HyracksDataException {
ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
ctx.interiorFrame.setPage(node);
// this check performs an unprotected read in the page
@@ -701,8 +627,8 @@
case UPDATE: {
// Is there a propagated split key?
if (ctx.splitKey.getBuffer() != null) {
- ICachedPage interiorNode = bufferCache.pin(
- BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+ ICachedPage interiorNode =
+ bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
interiorNode.acquireWriteLatch();
try {
// Insert or update op. Both can cause split keys to propagate upwards.
@@ -719,7 +645,7 @@
case DELETE: {
if (ctx.splitKey.getBuffer() != null) {
- throw new BTreeException(
+ throw new HyracksDataException(
"Split key was propagated during delete. Delete allows empty leaf pages.");
}
break;
@@ -802,7 +728,7 @@
ctx.pageLsns.add(FULL_RESTART_OP);
}
}
- } catch (TreeIndexException e) {
+ } catch (HyracksDataException e) {
if (!ctx.exceptionHandled) {
if (node != null) {
if (isReadLatched) {
@@ -824,7 +750,7 @@
}
bufferCache.unpin(node);
}
- BTreeException wrappedException = new BTreeException(e);
+ HyracksDataException wrappedException = HyracksDataException.create(e);
ctx.exceptionHandled = true;
throw wrappedException;
}
@@ -832,8 +758,8 @@
private BTreeOpContext createOpContext(IIndexAccessor accessor, IModificationOperationCallback modificationCallback,
ISearchOperationCallback searchCallback) {
- return new BTreeOpContext(accessor, leafFrameFactory, interiorFrameFactory,
- freePageManager, cmpFactories, modificationCallback, searchCallback);
+ return new BTreeOpContext(accessor, leafFrameFactory, interiorFrameFactory, freePageManager, cmpFactories,
+ modificationCallback, searchCallback);
}
@SuppressWarnings("rawtypes")
@@ -923,33 +849,33 @@
}
@Override
- public void insert(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
+ public void insert(ITupleReference tuple) throws HyracksDataException {
ctx.setOperation(IndexOperation.INSERT);
- btree.insert(tuple, ctx);
+ insert(tuple, ctx);
}
@Override
- public void update(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
+ public void update(ITupleReference tuple) throws HyracksDataException {
ctx.setOperation(IndexOperation.UPDATE);
- btree.update(tuple, ctx);
+ update(tuple, ctx);
}
@Override
- public void delete(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
+ public void delete(ITupleReference tuple) throws HyracksDataException {
ctx.setOperation(IndexOperation.DELETE);
- btree.delete(tuple, ctx);
+ delete(tuple, ctx);
}
@Override
- public void upsert(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
+ public void upsert(ITupleReference tuple) throws HyracksDataException {
upsertIfConditionElseInsert(tuple, UnconditionalTupleAcceptor.INSTANCE);
}
public void upsertIfConditionElseInsert(ITupleReference tuple, ITupleAcceptor acceptor)
- throws HyracksDataException, TreeIndexException {
+ throws HyracksDataException {
ctx.setOperation(IndexOperation.UPSERT);
ctx.acceptor = acceptor;
- btree.upsert(tuple, ctx);
+ upsert(tuple, ctx);
}
@Override
@@ -959,8 +885,7 @@
}
@Override
- public void search(IIndexCursor cursor, ISearchPredicate searchPred)
- throws HyracksDataException, TreeIndexException {
+ public void search(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException {
ctx.setOperation(IndexOperation.SEARCH);
btree.search((ITreeIndexCursor) cursor, searchPred, ctx);
}
@@ -988,24 +913,84 @@
IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) btree.getLeafFrameFactory().createFrame();
return new BTreeCountingSearchCursor(leafFrame, false);
}
+
+ private void insert(ITupleReference tuple, BTreeOpContext ctx) throws HyracksDataException {
+ ctx.modificationCallback.before(tuple);
+ insertUpdateOrDelete(tuple, ctx);
+ }
+
+ private void upsert(ITupleReference tuple, BTreeOpContext ctx) throws HyracksDataException {
+ ctx.modificationCallback.before(tuple);
+ insertUpdateOrDelete(tuple, ctx);
+ }
+
+ private void update(ITupleReference tuple, BTreeOpContext ctx) throws HyracksDataException {
+ // This call only allows updating of non-key fields.
+ // Updating a tuple's key necessitates deleting the old entry, and inserting the new entry.
+ // The user of the BTree is responsible for dealing with non-key updates (i.e., doing a delete + insert).
+ if (fieldCount == ctx.cmp.getKeyFieldCount()) {
+ HyracksDataException.create(ErrorCode.INDEX_NOT_UPDATABLE);
+ }
+ ctx.modificationCallback.before(tuple);
+ insertUpdateOrDelete(tuple, ctx);
+ }
+
+ private void delete(ITupleReference tuple, BTreeOpContext ctx) throws HyracksDataException {
+ ctx.modificationCallback.before(tuple);
+ insertUpdateOrDelete(tuple, ctx);
+ }
+
+ private void insertUpdateOrDelete(ITupleReference tuple, BTreeOpContext ctx) throws HyracksDataException {
+ ctx.reset();
+ ctx.pred.setLowKeyComparator(ctx.cmp);
+ ctx.pred.setHighKeyComparator(ctx.cmp);
+ ctx.pred.setLowKey(tuple, true);
+ ctx.pred.setHighKey(tuple, true);
+ ctx.splitKey.reset();
+ ctx.splitKey.getTuple().setFieldCount(ctx.cmp.getKeyFieldCount());
+ // We use this loop to deal with possibly multiple operation restarts
+ // due to ongoing structure modifications during the descent.
+ boolean repeatOp = true;
+ while (repeatOp && ctx.opRestarts < MAX_RESTARTS) {
+ ctx.smoCount = smoCounter.get();
+ performOp(rootPage, null, true, ctx);
+ // Do we need to restart from the (possibly new) root?
+ if (!ctx.pageLsns.isEmpty()) {
+ if (ctx.pageLsns.getLast() == FULL_RESTART_OP) {
+ ctx.pageLsns.clear();
+ continue;
+ } else if (ctx.pageLsns.getLast() == RESTART_OP) {
+ ctx.pageLsns.removeLast(); // pop the restart op indicator
+ continue;
+ }
+
+ }
+ // Split key propagated?
+ if (ctx.splitKey.getBuffer() != null) {
+ // Insert or update op. Create a new root.
+ createNewRoot(ctx);
+ }
+ unsetSmPages(ctx);
+ repeatOp = false;
+ }
+
+ if (ctx.opRestarts >= MAX_RESTARTS) {
+ throw HyracksDataException.create(ErrorCode.OPERATION_EXCEEDED_MAX_RESTARTS, MAX_RESTARTS);
+ }
+ }
}
@Override
public IIndexBulkLoader createBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint,
- boolean checkIfEmptyIndex) throws TreeIndexException {
- try {
- return new BTreeBulkLoader(fillFactor, verifyInput);
- } catch (HyracksDataException e) {
- throw new TreeIndexException(e);
- }
+ boolean checkIfEmptyIndex) throws HyracksDataException {
+ return new BTreeBulkLoader(fillFactor, verifyInput);
}
public class BTreeBulkLoader extends AbstractTreeIndex.AbstractTreeIndexBulkLoader {
protected final ISplitKey splitKey;
protected final boolean verifyInput;
- public BTreeBulkLoader(float fillFactor, boolean verifyInput)
- throws TreeIndexException, HyracksDataException {
+ public BTreeBulkLoader(float fillFactor, boolean verifyInput) throws HyracksDataException {
super(fillFactor);
this.verifyInput = verifyInput;
splitKey = new BTreeSplitKey(leafFrame.getTupleWriter().createTupleReference());
@@ -1013,7 +998,7 @@
}
@Override
- public void add(ITupleReference tuple) throws IndexException, HyracksDataException {
+ public void add(ITupleReference tuple) throws HyracksDataException {
try {
int tupleSize = Math.max(leafFrame.getBytesRequiredToWriteTuple(tuple),
interiorFrame.getBytesRequiredToWriteTuple(tuple));
@@ -1062,8 +1047,8 @@
final long dpid = BufferedFileHandle.getDiskPageId(fileId, leafFrontier.pageId);
// calculate required number of pages.
int headerSize = Math.max(leafFrame.getPageHeaderSize(), interiorFrame.getPageHeaderSize());
- final int multiplier = (int) Math
- .ceil((double) tupleSize / (bufferCache.getPageSize() - headerSize));
+ final int multiplier =
+ (int) Math.ceil((double) tupleSize / (bufferCache.getPageSize() - headerSize));
if (multiplier > 1) {
leafFrontier.page = bufferCache.confiscateLargePage(dpid, multiplier,
freePageManager.takeBlock(metaFrame, multiplier - 1));
@@ -1086,26 +1071,24 @@
}
}
((IBTreeLeafFrame) leafFrame).insertSorted(tuple);
- } catch (IndexException | HyracksDataException | RuntimeException e) {
+ } catch (HyracksDataException | RuntimeException e) {
handleException();
throw e;
}
}
- protected void verifyInputTuple(ITupleReference tuple, ITupleReference prevTuple)
- throws IndexException, HyracksDataException {
+ protected void verifyInputTuple(ITupleReference tuple, ITupleReference prevTuple) throws HyracksDataException {
// New tuple should be strictly greater than last tuple.
int cmpResult = cmp.compare(tuple, prevTuple);
if (cmpResult < 0) {
- throw new UnsortedInputException("Input stream given to BTree bulk load is not sorted.");
+ throw HyracksDataException.create(ErrorCode.UNSORTED_LOAD_INPUT);
}
if (cmpResult == 0) {
- throw new TreeIndexDuplicateKeyException("Input stream given to BTree bulk load has duplicates.");
+ throw HyracksDataException.create(ErrorCode.DUPLICATE_LOAD_INPUT);
}
}
- protected void propagateBulk(int level, List<ICachedPage> pagesToWrite)
- throws HyracksDataException, TreeIndexException {
+ protected void propagateBulk(int level, List<ICachedPage> pagesToWrite) throws HyracksDataException {
if (splitKey.getBuffer() == null) {
return;
}
@@ -1121,9 +1104,8 @@
int tupleBytes = tupleWriter.bytesRequired(tuple, 0, cmp.getKeyFieldCount());
int spaceNeeded = tupleBytes + slotSize + 4;
if (tupleBytes > interiorFrame.getMaxTupleSize(BTree.this.bufferCache.getPageSize())) {
- throw new TreeIndexException(
- "Space required for record (" + tupleBytes + ") larger than maximum acceptable size ("
- + interiorFrame.getMaxTupleSize(BTree.this.bufferCache.getPageSize()) + ")");
+ throw HyracksDataException.create(ErrorCode.RECORD_IS_TOO_LARGE, tupleBytes,
+ interiorFrame.getMaxTupleSize(BTree.this.bufferCache.getPageSize()));
}
int spaceUsed = interiorFrame.getBuffer().capacity() - interiorFrame.getTotalFreeSpace();
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/BTreeCountingSearchCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/BTreeCountingSearchCursor.java
index 2297cc5..a02b492 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/BTreeCountingSearchCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/BTreeCountingSearchCursor.java
@@ -78,12 +78,7 @@
public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
// in case open is called multiple times without closing
if (page != null) {
- if (exclusiveLatchNodes) {
- page.releaseWriteLatch(isPageDirty);
- } else {
- page.releaseReadLatch();
- }
- bufferCache.unpin(page);
+ releasePage();
}
page = ((BTreeCursorInitialState) initialState).getPage();
@@ -116,6 +111,15 @@
stopTupleIndex = getHighKeyIndex();
}
+ private void releasePage() throws HyracksDataException {
+ if (exclusiveLatchNodes) {
+ page.releaseWriteLatch(isPageDirty);
+ } else {
+ page.releaseReadLatch();
+ }
+ bufferCache.unpin(page);
+ }
+
private void fetchNextLeafPage(int nextLeafPage) throws HyracksDataException {
do {
ICachedPage nextLeaf = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, nextLeafPage), false);
@@ -205,12 +209,7 @@
@Override
public void close() throws HyracksDataException {
if (page != null) {
- if (exclusiveLatchNodes) {
- page.releaseWriteLatch(isPageDirty);
- } else {
- page.releaseReadLatch();
- }
- bufferCache.unpin(page);
+ releasePage();
}
tupleBuilder.reset();
tupleIndex = 0;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/BTreeRangeSearchCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/BTreeRangeSearchCursor.java
index b4fdd9e..5452604 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/BTreeRangeSearchCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/BTreeRangeSearchCursor.java
@@ -31,7 +31,6 @@
import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.ophelpers.FindTupleMode;
import org.apache.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
@@ -98,6 +97,7 @@
pred = null;
}
+ @Override
public ITupleReference getTuple() {
return frameTuple;
}
@@ -183,11 +183,7 @@
// retraverse the index looking for the reconciled key
reusablePredicate.setLowKey(reconciliationTuple, true);
- try {
- accessor.search(this, reusablePredicate);
- } catch (IndexException e) {
- throw new HyracksDataException(e);
- }
+ accessor.search(this, reusablePredicate);
if (stopTupleIndex < 0 || tupleIndex > stopTupleIndex) {
return false;
@@ -319,7 +315,7 @@
}
}
- public boolean isBloomFilterAware(){
+ public boolean isBloomFilterAware() {
return false;
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/RangePredicate.java b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/RangePredicate.java
index f1a411b..5b7fa42 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/RangePredicate.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/RangePredicate.java
@@ -19,6 +19,7 @@
package org.apache.hyracks.storage.am.btree.impls;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
import org.apache.hyracks.storage.am.common.impls.AbstractSearchPredicate;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
@@ -130,4 +131,27 @@
public void setHighKeyCmp(MultiComparator highKeyCmp) {
this.highKeyCmp = highKeyCmp;
}
+
+ public boolean isPointPredicate(MultiComparator originalKeyComparator) throws HyracksDataException {
+ if (getLowKey() == null) {
+ return false;
+ }
+ if (getHighKey() == null) {
+ return false;
+ }
+ if (!isLowKeyInclusive()) {
+ return false;
+ }
+ if (!isHighKeyInclusive()) {
+ return false;
+ }
+ if (getLowKeyComparator().getKeyFieldCount() != getHighKeyComparator().getKeyFieldCount()) {
+ return false;
+ }
+ if (getLowKeyComparator().getKeyFieldCount() != originalKeyComparator.getKeyFieldCount()) {
+ return false;
+ }
+ return originalKeyComparator.compare(getLowKey(), getHighKey()) == 0;
+
+ }
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/test/java/org/apache/hyracks/storage/am/btree/test/FramewriterTest.java b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/test/java/org/apache/hyracks/storage/am/btree/test/FramewriterTest.java
index d713a92..75b2fdd 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/test/java/org/apache/hyracks/storage/am/btree/test/FramewriterTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/test/java/org/apache/hyracks/storage/am/btree/test/FramewriterTest.java
@@ -48,7 +48,6 @@
import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
import org.apache.hyracks.storage.am.common.api.ISearchOperationCallbackFactory;
import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
import org.apache.hyracks.storage.am.common.dataflow.IndexSearchOperatorNodePushable;
@@ -273,8 +272,8 @@
* @throws HyracksDataException
* @throws IndexException
*/
- public IFrameWriter[] createWriters() throws HyracksDataException, IndexException {
- ArrayList<BTreeSearchOperatorNodePushable> writers = new ArrayList<BTreeSearchOperatorNodePushable>();
+ public IFrameWriter[] createWriters() throws HyracksDataException {
+ ArrayList<BTreeSearchOperatorNodePushable> writers = new ArrayList<>();
AbstractTreeIndexOperatorDescriptor[] opDescs = mockIndexOpDesc();
IRecordDescriptorProvider[] recordDescProviders = mockRecDescProviders();
int partition = 0;
@@ -320,7 +319,7 @@
return rDesc;
}
- public ITreeIndex[] mockIndexes() throws HyracksDataException, IndexException {
+ public ITreeIndex[] mockIndexes() throws HyracksDataException {
IIndexAccessor[] indexAccessors = mockIndexAccessors();
ITreeIndex[] indexes = new ITreeIndex[indexAccessors.length * 2];
int j = 0;
@@ -336,7 +335,7 @@
return indexes;
}
- private IIndexAccessor[] mockIndexAccessors() throws HyracksDataException, IndexException {
+ private IIndexAccessor[] mockIndexAccessors() throws HyracksDataException {
IIndexCursor[] cursors = mockIndexCursors();
IIndexAccessor[] accessors = new IIndexAccessor[cursors.length * 2];
int j = 0;
@@ -367,7 +366,7 @@
return accessors;
}
- private IIndexCursor[] mockIndexCursors() throws HyracksDataException, IndexException {
+ private IIndexCursor[] mockIndexCursors() throws HyracksDataException {
ITupleReference[] tuples = mockTuples();
IIndexCursor[] cursors = new IIndexCursor[tuples.length * 2];
int j = 0;
@@ -392,7 +391,7 @@
return new ITupleReference[] { tuple };
}
- public IIndexDataflowHelper[] mockIndexHelpers() throws HyracksDataException, IndexException {
+ public IIndexDataflowHelper[] mockIndexHelpers() throws HyracksDataException {
ITreeIndex[] indexes = mockIndexes();
IIndexDataflowHelper[] indexHelpers = new IIndexDataflowHelper[indexes.length * 2];
int j = 0;
@@ -412,7 +411,7 @@
return indexHelpers;
}
- public IIndexDataflowHelperFactory[] mockIndexHelperFactories() throws HyracksDataException, IndexException {
+ public IIndexDataflowHelperFactory[] mockIndexHelperFactories() throws HyracksDataException {
IIndexDataflowHelper[] helpers = mockIndexHelpers();
IIndexDataflowHelperFactory[] indexHelperFactories = new IIndexDataflowHelperFactory[helpers.length];
for (int i = 0; i < helpers.length; i++) {
@@ -424,7 +423,7 @@
return indexHelperFactories;
}
- public AbstractTreeIndexOperatorDescriptor[] mockIndexOpDesc() throws HyracksDataException, IndexException {
+ public AbstractTreeIndexOperatorDescriptor[] mockIndexOpDesc() throws HyracksDataException {
IIndexDataflowHelperFactory[] indexDataflowHelperFactories = mockIndexHelperFactories();
ISearchOperationCallbackFactory[] searchOpCallbackFactories = mockSearchOpCallbackFactories();
AbstractTreeIndexOperatorDescriptor[] opDescs =
@@ -464,7 +463,7 @@
CountAnswer[] nextFrames = new CountAnswer[] { nextFrameNormal, nextFrameException, nextFrameError };
CountAnswer[] fails = new CountAnswer[] { failNormal, failException, failError };
CountAnswer[] closes = new CountAnswer[] { closeNormal, closeException, closeError };
- List<IFrameWriter> outputWriters = new ArrayList<IFrameWriter>();
+ List<IFrameWriter> outputWriters = new ArrayList<>();
for (CountAnswer openAnswer : opens) {
for (CountAnswer nextFrameAnswer : nextFrames) {
for (CountAnswer failAnswer : fails) {
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IIndex.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IIndex.java
index 884318e..a5fcc6e 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IIndex.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IIndex.java
@@ -125,7 +125,7 @@
* @throws IndexException
*/
public IIndexBulkLoader createBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint,
- boolean checkIfEmptyIndex) throws IndexException;
+ boolean checkIfEmptyIndex) throws HyracksDataException;
/**
* @return true if the index needs memory components
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IIndexAccessor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IIndexAccessor.java
index 1903222..d981cd7 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IIndexAccessor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IIndexAccessor.java
@@ -40,7 +40,7 @@
* If an index-specific constraint is violated, e.g., the key
* already exists.
*/
- public void insert(ITupleReference tuple) throws HyracksDataException, IndexException;
+ public void insert(ITupleReference tuple) throws HyracksDataException;
/**
* Updates the tuple in the index matching the given tuple with the new
@@ -54,7 +54,7 @@
* @throws IndexException
* If there is no matching tuple in the index.
*/
- public void update(ITupleReference tuple) throws HyracksDataException, IndexException;
+ public void update(ITupleReference tuple) throws HyracksDataException;
/**
* Deletes the tuple in the index matching the given tuple.
@@ -66,7 +66,7 @@
* @throws IndexException
* If there is no matching tuple in the index.
*/
- public void delete(ITupleReference tuple) throws HyracksDataException, IndexException;
+ public void delete(ITupleReference tuple) throws HyracksDataException;
/**
* This operation is only supported by indexes with the notion of a unique key.
@@ -81,7 +81,7 @@
* If there is no matching tuple in the index.
*
*/
- public void upsert(ITupleReference tuple) throws HyracksDataException, IndexException;
+ public void upsert(ITupleReference tuple) throws HyracksDataException;
/**
* Creates a cursor appropriate for passing into search().
@@ -101,5 +101,5 @@
* If the BufferCache throws while un/pinning or un/latching.
* @throws IndexException
*/
- public void search(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException, IndexException;
+ public void search(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IIndexBulkLoader.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IIndexBulkLoader.java
index f371522..9d3f657 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IIndexBulkLoader.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IIndexBulkLoader.java
@@ -32,7 +32,7 @@
* @throws HyracksDataException
* If the BufferCache throws while un/pinning or un/latching.
*/
- public void add(ITupleReference tuple) throws IndexException, HyracksDataException;
+ public void add(ITupleReference tuple) throws HyracksDataException;
/**
* Finalize the bulk loading operation in the given context.
@@ -41,7 +41,7 @@
* @throws HyracksDataException
* If the BufferCache throws while un/pinning or un/latching.
*/
- public void end() throws IndexException, HyracksDataException;
+ public void end() throws HyracksDataException;
/**
* Release all resources held by this bulkloader, with no guarantee of
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IIndexCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IIndexCursor.java
index 17c4b98..2b5f3f6 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IIndexCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IIndexCursor.java
@@ -23,16 +23,48 @@
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
public interface IIndexCursor {
- public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws IndexException,
- HyracksDataException;
+ /**
+ * Opens the cursor
+ * if open succeeds, close must be called.
+ *
+ * @param initialState
+ * @param searchPred
+ * @throws HyracksDataException
+ */
+ void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException;
- public boolean hasNext() throws HyracksDataException, IndexException;
+ /**
+ * True if the cursor has a next value
+ *
+ * @return
+ * @throws HyracksDataException
+ */
+ boolean hasNext() throws HyracksDataException;
- public void next() throws HyracksDataException;
+ /**
+ * Moves the cursor to the next value
+ *
+ * @throws HyracksDataException
+ */
+ void next() throws HyracksDataException;
- public void close() throws HyracksDataException;
+ /**
+ * Closes the cursor
+ *
+ * @throws HyracksDataException
+ */
+ void close() throws HyracksDataException;
- public void reset() throws HyracksDataException, IndexException;
+ /**
+ * Reset the cursor to be reused
+ *
+ * @throws HyracksDataException
+ * @throws IndexException
+ */
+ void reset() throws HyracksDataException;
- public ITupleReference getTuple();
+ /**
+ * @return the tuple pointed to by the cursor
+ */
+ ITupleReference getTuple();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ITreeIndexFrame.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ITreeIndexFrame.java
index 8e7834f..c841f84 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ITreeIndexFrame.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ITreeIndexFrame.java
@@ -89,7 +89,7 @@
public void split(ITreeIndexFrame rightFrame, ITupleReference tuple, ISplitKey splitKey,
IExtraPageBlockHelper extraPageBlockHelper, IBufferCache bufferCache)
- throws HyracksDataException, TreeIndexException;
+ throws HyracksDataException;
public ISlotManager getSlotManager();
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ITwoPCIndexBulkLoader.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ITwoPCIndexBulkLoader.java
index 5a7a4a7..16c0afa 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ITwoPCIndexBulkLoader.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ITwoPCIndexBulkLoader.java
@@ -34,7 +34,7 @@
* @throws HyracksDataException
* If the BufferCache throws while un/pinning or un/latching.
*/
- public void delete(ITupleReference tuple) throws IndexException, HyracksDataException;
+ public void delete(ITupleReference tuple) throws HyracksDataException;
/**
* Abort the bulk modify op
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IndexException.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IndexException.java
deleted file mode 100644
index 0c5d2cc..0000000
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IndexException.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.hyracks.storage.am.common.api;
-
-public class IndexException extends Exception {
- private static final long serialVersionUID = 1L;
-
- public IndexException(Exception e) {
- super(e);
- }
-
- public IndexException(String message) {
- super(message);
- }
-}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/TreeIndexException.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/TreeIndexException.java
deleted file mode 100644
index 080752f..0000000
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/TreeIndexException.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.hyracks.storage.am.common.api;
-
-public class TreeIndexException extends IndexException {
-
- private static final long serialVersionUID = 1L;
- private boolean handled = false;
-
- public TreeIndexException(Exception e) {
- super(e);
- }
-
- public TreeIndexException(String message) {
- super(message);
- }
-
- public void setHandled(boolean handled) {
- this.handled = handled;
- }
-
- public boolean getHandled() {
- return handled;
- }
-}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/UnsortedInputException.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/UnsortedInputException.java
deleted file mode 100644
index 52a8488..0000000
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/UnsortedInputException.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.hyracks.storage.am.common.api;
-
-/**
- * Thrown when trying to bulk load an index with an unsorted input stream.
- */
-public class UnsortedInputException extends IndexException {
- private static final long serialVersionUID = 1L;
-
- public UnsortedInputException(Exception e) {
- super(e);
- }
-
- public UnsortedInputException(String message) {
- super(message);
- }
-}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexBulkLoadOperatorNodePushable.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexBulkLoadOperatorNodePushable.java
index 6439279..12da35f 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexBulkLoadOperatorNodePushable.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexBulkLoadOperatorNodePushable.java
@@ -30,7 +30,6 @@
import org.apache.hyracks.storage.am.common.api.IIndex;
import org.apache.hyracks.storage.am.common.api.IIndexBulkLoader;
import org.apache.hyracks.storage.am.common.api.IIndexDataflowHelper;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.tuples.PermutingFrameTupleReference;
public class IndexBulkLoadOperatorNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
@@ -83,12 +82,7 @@
for (int i = 0; i < tupleCount; i++) {
tuple.reset(accessor, i);
-
- try {
- bulkLoader.add(tuple);
- } catch (IndexException e) {
- throw new HyracksDataException(e);
- }
+ bulkLoader.add(tuple);
}
FrameUtils.flushFrame(buffer, writer);
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexInsertUpdateDeleteOperatorNodePushable.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexInsertUpdateDeleteOperatorNodePushable.java
index 4f01978..0fdbf34 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexInsertUpdateDeleteOperatorNodePushable.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexInsertUpdateDeleteOperatorNodePushable.java
@@ -25,6 +25,7 @@
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
import org.apache.hyracks.dataflow.common.comm.util.FrameUtils;
@@ -36,8 +37,6 @@
import org.apache.hyracks.storage.am.common.api.IModificationOperationCallback;
import org.apache.hyracks.storage.am.common.api.ITupleFilter;
import org.apache.hyracks.storage.am.common.api.ITupleFilterFactory;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexNonExistentKeyException;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
import org.apache.hyracks.storage.am.common.tuples.PermutingFrameTupleReference;
@@ -79,8 +78,8 @@
try {
writer.open();
LocalResource resource = indexHelper.getResource();
- modCallback = opDesc.getModificationOpCallbackFactory().createModificationOperationCallback(resource, ctx,
- this);
+ modCallback =
+ opDesc.getModificationOpCallbackFactory().createModificationOperationCallback(resource, ctx, this);
indexAccessor = index.createAccessor(modCallback, NoOpOperationCallback.INSTANCE);
ITupleFilterFactory tupleFilterFactory = opDesc.getTupleFilterFactory();
if (tupleFilterFactory != null) {
@@ -110,8 +109,11 @@
case INSERT: {
try {
indexAccessor.insert(tuple);
- } catch (TreeIndexDuplicateKeyException e) {
- // ingnore that exception to allow inserting existing keys which becomes an NoOp
+ } catch (HyracksDataException e) {
+ // ignore that exception to allow inserting existing keys which becomes an NoOp
+ if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+ throw e;
+ }
}
break;
}
@@ -126,8 +128,11 @@
case DELETE: {
try {
indexAccessor.delete(tuple);
- } catch (TreeIndexNonExistentKeyException e) {
+ } catch (HyracksDataException e) {
// ingnore that exception to allow deletions of non-existing keys
+ if (e.getErrorCode() != ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+ throw e;
+ }
}
break;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/exceptions/TreeIndexDuplicateKeyException.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/exceptions/TreeIndexDuplicateKeyException.java
deleted file mode 100644
index 3178740..0000000
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/exceptions/TreeIndexDuplicateKeyException.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.hyracks.storage.am.common.exceptions;
-
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
-
-public class TreeIndexDuplicateKeyException extends TreeIndexException {
- private static final long serialVersionUID = 1L;
-
- public TreeIndexDuplicateKeyException(Exception e) {
- super(e);
- }
-
- public TreeIndexDuplicateKeyException(String message) {
- super(message);
- }
-}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/exceptions/TreeIndexNonExistentKeyException.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/exceptions/TreeIndexNonExistentKeyException.java
deleted file mode 100644
index bd5e5c4..0000000
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/exceptions/TreeIndexNonExistentKeyException.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.hyracks.storage.am.common.exceptions;
-
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
-
-public class TreeIndexNonExistentKeyException extends TreeIndexException {
-
- private static final long serialVersionUID = 1L;
-
- public TreeIndexNonExistentKeyException(Exception e) {
- super(e);
- }
-
- public TreeIndexNonExistentKeyException(String message) {
- super(message);
- }
-}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/impls/AbstractTreeIndex.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/impls/AbstractTreeIndex.java
index 0af7939..2c8429d 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/impls/AbstractTreeIndex.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/impls/AbstractTreeIndex.java
@@ -34,8 +34,6 @@
import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
import org.apache.hyracks.storage.am.common.api.ITreeIndexMetadataFrame;
import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
import org.apache.hyracks.storage.common.buffercache.IBufferCache;
import org.apache.hyracks.storage.common.buffercache.ICachedPage;
@@ -70,10 +68,9 @@
protected int bulkloadLeafStart = 0;
- public AbstractTreeIndex(IBufferCache bufferCache, IFileMapProvider fileMapProvider,
- IPageManager freePageManager, ITreeIndexFrameFactory interiorFrameFactory,
- ITreeIndexFrameFactory leafFrameFactory, IBinaryComparatorFactory[] cmpFactories, int fieldCount,
- FileReference file) {
+ public AbstractTreeIndex(IBufferCache bufferCache, IFileMapProvider fileMapProvider, IPageManager freePageManager,
+ ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory leafFrameFactory,
+ IBinaryComparatorFactory[] cmpFactories, int fieldCount, FileReference file) {
this.bufferCache = bufferCache;
this.fileMapProvider = fileMapProvider;
this.freePageManager = freePageManager;
@@ -272,8 +269,7 @@
protected final IFIFOPageQueue queue;
protected List<ICachedPage> pagesToWrite;
- public AbstractTreeIndexBulkLoader(float fillFactor)
- throws TreeIndexException, HyracksDataException {
+ public AbstractTreeIndexBulkLoader(float fillFactor) throws HyracksDataException {
leafFrame = leafFrameFactory.createFrame();
interiorFrame = interiorFrameFactory.createFrame();
metaFrame = freePageManager.createMetadataFrame();
@@ -281,7 +277,7 @@
queue = bufferCache.createFIFOQueue();
if (!isEmptyTree(leafFrame)) {
- throw new TreeIndexException("Cannot bulk-load a non-empty tree.");
+ throw new HyracksDataException("Cannot bulk-load a non-empty tree.");
}
this.cmp = MultiComparator.create(cmpFactories);
@@ -293,8 +289,8 @@
NodeFrontier leafFrontier = new NodeFrontier(leafFrame.createTupleReference());
leafFrontier.pageId = freePageManager.takePage(metaFrame);
- leafFrontier.page = bufferCache
- .confiscatePage(BufferedFileHandle.getDiskPageId(fileId, leafFrontier.pageId));
+ leafFrontier.page =
+ bufferCache.confiscatePage(BufferedFileHandle.getDiskPageId(fileId, leafFrontier.pageId));
interiorFrame.setPage(leafFrontier.page);
interiorFrame.initBuffer((byte) 0);
@@ -310,7 +306,7 @@
}
@Override
- public abstract void add(ITupleReference tuple) throws IndexException, HyracksDataException;
+ public abstract void add(ITupleReference tuple) throws HyracksDataException;
protected void handleException() throws HyracksDataException {
// Unlatch and unpin pages that weren't in the queue to avoid leaking memory.
@@ -356,17 +352,13 @@
ITreeIndexAccessor accessor;
public TreeIndexInsertBulkLoader() throws HyracksDataException {
- accessor = (ITreeIndexAccessor) createAccessor(NoOpOperationCallback.INSTANCE,
- NoOpOperationCallback.INSTANCE);
+ accessor =
+ (ITreeIndexAccessor) createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
}
@Override
public void add(ITupleReference tuple) throws HyracksDataException {
- try {
- accessor.insert(tuple);
- } catch (IndexException e) {
- throw new HyracksDataException(e);
- }
+ accessor.insert(tuple);
}
@Override
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTree.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTree.java
index cc4bfb9..90faa91 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTree.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTree.java
@@ -23,6 +23,7 @@
import java.util.List;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.api.io.IIOManager;
@@ -44,13 +45,11 @@
import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
import org.apache.hyracks.storage.am.common.api.ITwoPCIndexBulkLoader;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
import org.apache.hyracks.storage.am.lsm.btree.tuples.LSMBTreeRefrencingTupleWriterFactory;
-import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
+import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
@@ -100,8 +99,8 @@
ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallback ioOpCallback,
TreeIndexFactory<BTree> transactionBTreeFactory, int version, boolean durable) {
super(ioManager, insertLeafFrameFactory, deleteLeafFrameFactory, fileManager, diskBTreeFactory,
- bulkLoadBTreeFactory, bloomFilterFactory, bloomFilterFalsePositiveRate, diskFileMapProvider,
- fieldCount, cmpFactories, mergePolicy, opTracker, ioScheduler, ioOpCallback, false, durable);
+ bulkLoadBTreeFactory, bloomFilterFactory, bloomFilterFalsePositiveRate, diskFileMapProvider, fieldCount,
+ cmpFactories, mergePolicy, opTracker, ioScheduler, ioOpCallback, false, durable);
this.transactionComponentFactory =
new LSMBTreeDiskComponentFactory(transactionBTreeFactory, bloomFilterFactory, null);
this.secondDiskComponents = new LinkedList<>();
@@ -152,7 +151,7 @@
// The only reason to override the following method is that it uses a different context object
@Override
public void search(ILSMIndexOperationContext ictx, IIndexCursor cursor, ISearchPredicate pred)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
ExternalBTreeOpContext ctx = (ExternalBTreeOpContext) ictx;
List<ILSMComponent> operationalComponents = ctx.getComponentHolder();
ctx.searchInitialState.reset(pred, operationalComponents);
@@ -169,7 +168,7 @@
// in addition, determining whether or not to keep deleted tuples is different here
@Override
public void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
ExternalBTreeOpContext opCtx = createOpContext(NoOpOperationCallback.INSTANCE, -1);
opCtx.setOperation(IndexOperation.MERGE);
List<ILSMComponent> mergingComponents = ctx.getComponentHolder();
@@ -249,20 +248,12 @@
if (diskComponents.size() == 0 && secondDiskComponents.size() == 0) {
//First time activation
List<LSMComponentFileReferences> validFileReferences;
- try {
- validFileReferences = fileManager.cleanupAndGetValidFiles();
- } catch (IndexException e) {
- throw new HyracksDataException(e);
- }
+ validFileReferences = fileManager.cleanupAndGetValidFiles();
for (LSMComponentFileReferences lsmComonentFileReference : validFileReferences) {
LSMBTreeDiskComponent component;
- try {
- component = createDiskComponent(componentFactory,
- lsmComonentFileReference.getInsertIndexFileReference(),
- lsmComonentFileReference.getBloomFilterFileReference(), false);
- } catch (IndexException e) {
- throw new HyracksDataException(e);
- }
+ component =
+ createDiskComponent(componentFactory, lsmComonentFileReference.getInsertIndexFileReference(),
+ lsmComonentFileReference.getBloomFilterFileReference(), false);
diskComponents.add(component);
secondDiskComponents.add(component);
}
@@ -395,8 +386,7 @@
// Not supported
@Override
- public void modify(IIndexOperationContext ictx, ITupleReference tuple)
- throws HyracksDataException, IndexException {
+ public void modify(IIndexOperationContext ictx, ITupleReference tuple) throws HyracksDataException {
throw new UnsupportedOperationException("tuple modify not supported in LSM-Disk-Only-BTree");
}
@@ -409,7 +399,7 @@
// Not supported
@Override
- public ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException, IndexException {
+ public ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException {
throw new UnsupportedOperationException("flush not supported in LSM-Disk-Only-BTree");
}
@@ -451,23 +441,15 @@
// For initial load
@Override
public IIndexBulkLoader createBulkLoader(float fillLevel, boolean verifyInput, long numElementsHint,
- boolean checkIfEmptyIndex) throws TreeIndexException {
- try {
- return new LSMTwoPCBTreeBulkLoader(fillLevel, verifyInput, numElementsHint, checkIfEmptyIndex, false);
- } catch (HyracksDataException e) {
- throw new TreeIndexException(e);
- }
+ boolean checkIfEmptyIndex) throws HyracksDataException {
+ return new LSMTwoPCBTreeBulkLoader(fillLevel, verifyInput, numElementsHint, checkIfEmptyIndex, false);
}
// For transaction bulk load <- could consolidate with the above method ->
@Override
public IIndexBulkLoader createTransactionBulkLoader(float fillLevel, boolean verifyInput, long numElementsHint,
- boolean checkIfEmptyIndex) throws TreeIndexException {
- try {
- return new LSMTwoPCBTreeBulkLoader(fillLevel, verifyInput, numElementsHint, checkIfEmptyIndex, true);
- } catch (HyracksDataException e) {
- throw new TreeIndexException(e);
- }
+ boolean checkIfEmptyIndex) throws HyracksDataException {
+ return new LSMTwoPCBTreeBulkLoader(fillLevel, verifyInput, numElementsHint, checkIfEmptyIndex, true);
}
// The bulk loader used for both initial loading and transaction
@@ -483,24 +465,16 @@
private final ITreeIndexTupleWriterFactory frameTupleWriterFactory;
public LSMTwoPCBTreeBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint,
- boolean checkIfEmptyIndex, boolean isTransaction) throws TreeIndexException, HyracksDataException {
+ boolean checkIfEmptyIndex, boolean isTransaction) throws HyracksDataException {
this.isTransaction = isTransaction;
// Create the appropriate target
if (isTransaction) {
- try {
- component = createTransactionTarget();
- } catch (HyracksDataException | IndexException e) {
- throw new TreeIndexException(e);
- }
+ component = createTransactionTarget();
} else {
if (checkIfEmptyIndex && !isEmptyIndex()) {
- throw new TreeIndexException("Cannot load an index that is not empty");
+ throw HyracksDataException.create(ErrorCode.LOAD_NON_EMPTY_INDEX);
}
- try {
- component = createBulkLoadTarget();
- } catch (HyracksDataException | IndexException e) {
- throw new TreeIndexException(e);
- }
+ component = createBulkLoadTarget();
}
frameTupleWriterFactory =
@@ -518,11 +492,11 @@
// It is expected that the mode was set to insert operation before
// calling add
@Override
- public void add(ITupleReference tuple) throws IndexException, HyracksDataException {
+ public void add(ITupleReference tuple) throws HyracksDataException {
try {
bulkLoader.add(tuple);
builder.add(tuple);
- } catch (IndexException | HyracksDataException | RuntimeException e) {
+ } catch (Exception e) {
cleanupArtifacts();
throw e;
}
@@ -533,7 +507,7 @@
// This is made public in case of a failure, it is better to delete all
// created artifacts.
- public void cleanupArtifacts() throws HyracksDataException, IndexException {
+ public void cleanupArtifacts() throws HyracksDataException {
if (!cleanedUpArtifacts) {
cleanedUpArtifacts = true;
// We make sure to end the bloom filter load to release latches.
@@ -557,7 +531,7 @@
}
@Override
- public void end() throws HyracksDataException, IndexException {
+ public void end() throws HyracksDataException {
if (!cleanedUpArtifacts) {
if (!endedBloomFilterLoad) {
builder.end();
@@ -583,12 +557,12 @@
// It is expected that the mode was set to delete operation before
// calling delete
@Override
- public void delete(ITupleReference tuple) throws IndexException, HyracksDataException {
+ public void delete(ITupleReference tuple) throws HyracksDataException {
((LSMBTreeRefrencingTupleWriterFactory) frameTupleWriterFactory).setMode(IndexOperation.DELETE);
try {
bulkLoader.add(tuple);
builder.add(tuple);
- } catch (IndexException | HyracksDataException | RuntimeException e) {
+ } catch (Exception e) {
cleanupArtifacts();
throw e;
}
@@ -609,12 +583,12 @@
// This method is used to create a target for a bulk modify operation. This
// component must then be either committed or deleted
- private ILSMDiskComponent createTransactionTarget() throws HyracksDataException, IndexException {
+ private ILSMDiskComponent createTransactionTarget() throws HyracksDataException {
LSMComponentFileReferences componentFileRefs;
try {
componentFileRefs = fileManager.getNewTransactionFileReference();
} catch (IOException e) {
- throw new HyracksDataException("Failed to create transaction components", e);
+ throw HyracksDataException.create(e);
}
return createDiskComponent(transactionComponentFactory, componentFileRefs.getInsertIndexFileReference(),
componentFileRefs.getBloomFilterFileReference(), true);
@@ -680,7 +654,7 @@
}
@Override
- public void commitTransaction() throws TreeIndexException, HyracksDataException, IndexException {
+ public void commitTransaction() throws HyracksDataException {
LSMComponentFileReferences componentFileRefrences = fileManager.getTransactionFileReferenceForCommit();
LSMBTreeDiskComponent component = null;
if (componentFileRefrences != null) {
@@ -691,21 +665,13 @@
}
@Override
- public void abortTransaction() throws TreeIndexException {
- try {
- fileManager.deleteTransactionFiles();
- } catch (HyracksDataException e) {
- throw new TreeIndexException(e);
- }
+ public void abortTransaction() throws HyracksDataException {
+ fileManager.deleteTransactionFiles();
}
@Override
- public void recoverTransaction() throws TreeIndexException {
- try {
- fileManager.recoverTransaction();
- } catch (HyracksDataException e) {
- throw new TreeIndexException(e);
- }
+ public void recoverTransaction() throws HyracksDataException {
+ fileManager.recoverTransaction();
}
@Override
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTreeWithBuddy.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTreeWithBuddy.java
index dc28db4..3be596c 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTreeWithBuddy.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTreeWithBuddy.java
@@ -25,6 +25,7 @@
import java.util.Set;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.api.io.IIOManager;
@@ -47,14 +48,12 @@
import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
import org.apache.hyracks.storage.am.common.api.ITwoPCIndexBulkLoader;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
+import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponentFactory;
-import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMHarness;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
@@ -141,21 +140,13 @@
if (diskComponents.size() == 0 && secondDiskComponents.size() == 0) {
//First time activation
List<LSMComponentFileReferences> validFileReferences;
- try {
- validFileReferences = fileManager.cleanupAndGetValidFiles();
- } catch (IndexException e) {
- throw new HyracksDataException(e);
- }
+ validFileReferences = fileManager.cleanupAndGetValidFiles();
for (LSMComponentFileReferences lsmComonentFileReference : validFileReferences) {
LSMBTreeWithBuddyDiskComponent component;
- try {
- component = createDiskComponent(componentFactory,
- lsmComonentFileReference.getInsertIndexFileReference(),
- lsmComonentFileReference.getDeleteIndexFileReference(),
- lsmComonentFileReference.getBloomFilterFileReference(), false);
- } catch (IndexException e) {
- throw new HyracksDataException(e);
- }
+ component =
+ createDiskComponent(componentFactory, lsmComonentFileReference.getInsertIndexFileReference(),
+ lsmComonentFileReference.getDeleteIndexFileReference(),
+ lsmComonentFileReference.getBloomFilterFileReference(), false);
diskComponents.add(component);
secondDiskComponents.add(component);
}
@@ -299,35 +290,25 @@
// For initial load
@Override
public IIndexBulkLoader createBulkLoader(float fillLevel, boolean verifyInput, long numElementsHint,
- boolean checkIfEmptyIndex) throws TreeIndexException {
- try {
- return new LSMTwoPCBTreeWithBuddyBulkLoader(fillLevel, verifyInput, 0, checkIfEmptyIndex, false);
- } catch (HyracksDataException e) {
- throw new TreeIndexException(e);
- }
+ boolean checkIfEmptyIndex) throws HyracksDataException {
+ return new LSMTwoPCBTreeWithBuddyBulkLoader(fillLevel, verifyInput, 0, checkIfEmptyIndex, false);
}
// For transaction bulk load <- could consolidate with the above method ->
@Override
public IIndexBulkLoader createTransactionBulkLoader(float fillLevel, boolean verifyInput, long numElementsHint,
- boolean checkIfEmptyIndex) throws TreeIndexException {
- try {
- return new LSMTwoPCBTreeWithBuddyBulkLoader(fillLevel, verifyInput, numElementsHint, checkIfEmptyIndex,
- true);
- } catch (HyracksDataException e) {
- throw new TreeIndexException(e);
- }
+ boolean checkIfEmptyIndex) throws HyracksDataException {
+ return new LSMTwoPCBTreeWithBuddyBulkLoader(fillLevel, verifyInput, numElementsHint, checkIfEmptyIndex, true);
}
@Override
- public void modify(IIndexOperationContext ictx, ITupleReference tuple)
- throws HyracksDataException, IndexException {
+ public void modify(IIndexOperationContext ictx, ITupleReference tuple) throws HyracksDataException {
throw new UnsupportedOperationException("tuple modify not supported in LSM-Disk-Only-BTree");
}
@Override
public void search(ILSMIndexOperationContext ictx, IIndexCursor cursor, ISearchPredicate pred)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
ExternalBTreeWithBuddyOpContext ctx = (ExternalBTreeWithBuddyOpContext) ictx;
List<ILSMComponent> operationalComponents = ictx.getComponentHolder();
ctx.searchInitialState.setOperationalComponents(operationalComponents);
@@ -337,12 +318,12 @@
@Override
public void scheduleFlush(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
throws HyracksDataException {
- throw new UnsupportedOperationException("flush not supported in LSM-Disk-Only-BTree");
+ throw HyracksDataException.create(ErrorCode.FLUSH_NOT_SUPPORTED_IN_EXTERNAL_INDEX);
}
@Override
- public ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException, IndexException {
- throw new UnsupportedOperationException("flush not supported in LSM-Disk-Only-BTree");
+ public ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException {
+ throw HyracksDataException.create(ErrorCode.FLUSH_NOT_SUPPORTED_IN_EXTERNAL_INDEX);
}
protected LSMComponentFileReferences getMergeTargetFileName(List<ILSMComponent> mergingDiskComponents)
@@ -359,7 +340,7 @@
@Override
public void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
ILSMIndexOperationContext bctx = createOpContext(NoOpOperationCallback.INSTANCE, 0);
bctx.setOperation(IndexOperation.MERGE);
List<ILSMComponent> mergingComponents = ctx.getComponentHolder();
@@ -385,15 +366,14 @@
}
// This method creates the appropriate opContext for the targeted version
- public ExternalBTreeWithBuddyOpContext createOpContext(ISearchOperationCallback searchCallback,
- int targetVersion) {
+ public ExternalBTreeWithBuddyOpContext createOpContext(ISearchOperationCallback searchCallback, int targetVersion) {
return new ExternalBTreeWithBuddyOpContext(btreeCmpFactories, buddyBtreeCmpFactories, searchCallback,
targetVersion, lsmHarness, btreeInteriorFrameFactory, btreeLeafFrameFactory,
buddyBtreeLeafFrameFactory);
}
@Override
- public ILSMDiskComponent merge(ILSMIOOperation operation) throws HyracksDataException, IndexException {
+ public ILSMDiskComponent merge(ILSMIOOperation operation) throws HyracksDataException {
LSMBTreeWithBuddyMergeOperation mergeOp = (LSMBTreeWithBuddyMergeOperation) operation;
ITreeIndexCursor cursor = mergeOp.getCursor();
ISearchPredicate btreeSearchPred = new RangePredicate(null, null, true, true, null, null);
@@ -402,8 +382,8 @@
search(opCtx, cursor, btreeSearchPred);
LSMBTreeWithBuddyDiskComponent mergedComponent =
- createDiskComponent(componentFactory, mergeOp.getBTreeMergeTarget(),
- mergeOp.getBuddyBTreeMergeTarget(), mergeOp.getBloomFilterMergeTarget(), true);
+ createDiskComponent(componentFactory, mergeOp.getBTreeMergeTarget(), mergeOp.getBuddyBTreeMergeTarget(),
+ mergeOp.getBloomFilterMergeTarget(), true);
// In case we must keep the deleted-keys BuddyBTrees, then they must be
// merged *before* merging the b-trees so that
@@ -421,8 +401,8 @@
long numElements = 0L;
for (int i = 0; i < mergeOp.getMergingComponents().size(); ++i) {
- numElements += ((LSMBTreeWithBuddyDiskComponent) mergeOp.getMergingComponents().get(i))
- .getBloomFilter().getNumElements();
+ numElements += ((LSMBTreeWithBuddyDiskComponent) mergeOp.getMergingComponents().get(i)).getBloomFilter()
+ .getNumElements();
}
int maxBucketsPerElement = BloomCalculations.maxBucketsPerElement(numElements);
@@ -613,7 +593,7 @@
private LSMBTreeWithBuddyDiskComponent createDiskComponent(ILSMDiskComponentFactory factory,
FileReference insertFileRef, FileReference deleteFileRef, FileReference bloomFilterFileRef,
- boolean createComponent) throws HyracksDataException, IndexException {
+ boolean createComponent) throws HyracksDataException {
// Create new instance.
LSMBTreeWithBuddyDiskComponent component = (LSMBTreeWithBuddyDiskComponent) factory
.createComponent(new LSMComponentFileReferences(insertFileRef, deleteFileRef, bloomFilterFileRef));
@@ -666,24 +646,16 @@
private final boolean isTransaction;
public LSMTwoPCBTreeWithBuddyBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint,
- boolean checkIfEmptyIndex, boolean isTransaction) throws TreeIndexException, HyracksDataException {
+ boolean checkIfEmptyIndex, boolean isTransaction) throws HyracksDataException {
this.isTransaction = isTransaction;
// Create the appropriate target
if (isTransaction) {
- try {
- component = createTransactionTarget();
- } catch (HyracksDataException | IndexException e) {
- throw new TreeIndexException(e);
- }
+ component = createTransactionTarget();
} else {
if (checkIfEmptyIndex && !isEmptyIndex()) {
- throw new TreeIndexException("Cannot load an index that is not empty");
+ throw HyracksDataException.create(ErrorCode.LOAD_NON_EMPTY_INDEX);
}
- try {
- component = createBulkLoadTarget();
- } catch (HyracksDataException | IndexException e) {
- throw new TreeIndexException(e);
- }
+ component = createBulkLoadTarget();
}
// Create the three loaders
@@ -699,10 +671,10 @@
}
@Override
- public void add(ITupleReference tuple) throws IndexException, HyracksDataException {
+ public void add(ITupleReference tuple) throws HyracksDataException {
try {
btreeBulkLoader.add(tuple);
- } catch (IndexException | HyracksDataException | RuntimeException e) {
+ } catch (Exception e) {
cleanupArtifacts();
throw e;
}
@@ -738,7 +710,7 @@
}
@Override
- public void end() throws HyracksDataException, IndexException {
+ public void end() throws HyracksDataException {
if (!cleanedUpArtifacts) {
if (!endedBloomFilterLoad) {
builder.end();
@@ -765,11 +737,11 @@
}
@Override
- public void delete(ITupleReference tuple) throws IndexException, HyracksDataException {
+ public void delete(ITupleReference tuple) throws HyracksDataException {
try {
buddyBtreeBulkLoader.add(tuple);
builder.add(tuple);
- } catch (IndexException | HyracksDataException | RuntimeException e) {
+ } catch (Exception e) {
cleanupArtifacts();
throw e;
}
@@ -788,12 +760,12 @@
// This method is used to create a target for a bulk modify operation. This
// component must then eventually be either committed or deleted
- private ILSMDiskComponent createTransactionTarget() throws HyracksDataException, IndexException {
+ private ILSMDiskComponent createTransactionTarget() throws HyracksDataException {
LSMComponentFileReferences componentFileRefs;
try {
componentFileRefs = fileManager.getNewTransactionFileReference();
} catch (IOException e) {
- throw new HyracksDataException("Failed to create transaction components", e);
+ throw HyracksDataException.create(e);
}
return createDiskComponent(bulkComponentFactory, componentFileRefs.getInsertIndexFileReference(),
componentFileRefs.getDeleteIndexFileReference(), componentFileRefs.getBloomFilterFileReference(),
@@ -801,11 +773,10 @@
}
}
- protected ILSMDiskComponent createBulkLoadTarget() throws HyracksDataException, IndexException {
+ protected ILSMDiskComponent createBulkLoadTarget() throws HyracksDataException {
LSMComponentFileReferences componentFileRefs = fileManager.getRelFlushFileReference();
return createDiskComponent(bulkComponentFactory, componentFileRefs.getInsertIndexFileReference(),
- componentFileRefs.getDeleteIndexFileReference(), componentFileRefs.getBloomFilterFileReference(),
- true);
+ componentFileRefs.getDeleteIndexFileReference(), componentFileRefs.getBloomFilterFileReference(), true);
}
@Override
@@ -842,7 +813,7 @@
}
@Override
- public void commitTransaction() throws HyracksDataException, IndexException {
+ public void commitTransaction() throws HyracksDataException {
LSMComponentFileReferences componentFileRefrences = fileManager.getTransactionFileReferenceForCommit();
LSMBTreeWithBuddyDiskComponent component = null;
if (componentFileRefrences != null) {
@@ -854,21 +825,13 @@
}
@Override
- public void abortTransaction() throws TreeIndexException {
- try {
- fileManager.deleteTransactionFiles();
- } catch (HyracksDataException e) {
- throw new TreeIndexException(e);
- }
+ public void abortTransaction() throws HyracksDataException {
+ fileManager.deleteTransactionFiles();
}
@Override
- public void recoverTransaction() throws TreeIndexException {
- try {
- fileManager.recoverTransaction();
- } catch (HyracksDataException e) {
- throw new TreeIndexException(e);
- }
+ public void recoverTransaction() throws HyracksDataException {
+ fileManager.recoverTransaction();
}
@Override
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTree.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTree.java
index 3ecb5e0..04c895f 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTree.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTree.java
@@ -25,6 +25,7 @@
import java.util.Set;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.api.io.IIOManager;
@@ -48,9 +49,6 @@
import org.apache.hyracks.storage.am.common.api.ITreeIndex;
import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
import org.apache.hyracks.storage.am.common.impls.AbstractSearchPredicate;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
@@ -177,20 +175,11 @@
List<ILSMDiskComponent> immutableComponents = diskComponents;
immutableComponents.clear();
List<LSMComponentFileReferences> validFileReferences;
- try {
- validFileReferences = fileManager.cleanupAndGetValidFiles();
- } catch (IndexException e) {
- throw new HyracksDataException(e);
- }
+ validFileReferences = fileManager.cleanupAndGetValidFiles();
for (LSMComponentFileReferences lsmComonentFileReference : validFileReferences) {
LSMBTreeDiskComponent component;
- try {
- component =
- createDiskComponent(componentFactory, lsmComonentFileReference.getInsertIndexFileReference(),
- lsmComonentFileReference.getBloomFilterFileReference(), false);
- } catch (IndexException e) {
- throw new HyracksDataException(e);
- }
+ component = createDiskComponent(componentFactory, lsmComonentFileReference.getInsertIndexFileReference(),
+ lsmComonentFileReference.getBloomFilterFileReference(), false);
immutableComponents.add(component);
}
isActivated = true;
@@ -204,8 +193,7 @@
if (flushOnExit) {
BlockingIOOperationCallbackWrapper cb = new BlockingIOOperationCallbackWrapper(ioOpCallback);
- ILSMIndexAccessor accessor =
- createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
+ ILSMIndexAccessor accessor = createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
accessor.scheduleFlush(cb);
try {
cb.waitForIO();
@@ -326,8 +314,7 @@
}
@Override
- public void modify(IIndexOperationContext ictx, ITupleReference tuple)
- throws HyracksDataException, IndexException {
+ public void modify(IIndexOperationContext ictx, ITupleReference tuple) throws HyracksDataException {
LSMBTreeOpContext ctx = (LSMBTreeOpContext) ictx;
ITupleReference indexTuple;
@@ -356,7 +343,7 @@
}
}
- private boolean insert(ITupleReference tuple, LSMBTreeOpContext ctx) throws HyracksDataException, IndexException {
+ private boolean insert(ITupleReference tuple, LSMBTreeOpContext ctx) throws HyracksDataException {
LSMBTreePointSearchCursor searchCursor = ctx.insertSearchCursor;
IIndexCursor memCursor = ctx.memCursor;
RangePredicate predicate = (RangePredicate) ctx.getSearchPredicate();
@@ -370,7 +357,7 @@
memCursor.next();
LSMBTreeTupleReference lsmbtreeTuple = (LSMBTreeTupleReference) memCursor.getTuple();
if (!lsmbtreeTuple.isAntimatter()) {
- throw new TreeIndexDuplicateKeyException("Failed to insert key since key already exists.");
+ throw HyracksDataException.create(ErrorCode.DUPLICATE_KEY);
} else {
memCursor.close();
ctx.currentMutableBTreeAccessor.upsertIfConditionElseInsert(tuple,
@@ -394,7 +381,7 @@
search(ctx, searchCursor, predicate);
try {
if (searchCursor.hasNext()) {
- throw new TreeIndexDuplicateKeyException("Failed to insert key since key already exists.");
+ throw HyracksDataException.create(ErrorCode.DUPLICATE_KEY);
}
} finally {
searchCursor.close();
@@ -408,7 +395,7 @@
@Override
public void search(ILSMIndexOperationContext ictx, IIndexCursor cursor, ISearchPredicate pred)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
LSMBTreeOpContext ctx = (LSMBTreeOpContext) ictx;
List<ILSMComponent> operationalComponents = ctx.getComponentHolder();
ctx.searchInitialState.reset(pred, operationalComponents);
@@ -431,7 +418,7 @@
}
@Override
- public ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException, IndexException {
+ public ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException {
LSMBTreeFlushOperation flushOp = (LSMBTreeFlushOperation) operation;
LSMBTreeMemoryComponent flushingComponent = (LSMBTreeMemoryComponent) flushOp.getFlushingComponent();
IIndexAccessor accessor = flushingComponent.getBTree().createAccessor(NoOpOperationCallback.INSTANCE,
@@ -506,7 +493,7 @@
@Override
public void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
LSMBTreeOpContext opCtx = createOpContext(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
opCtx.setOperation(IndexOperation.MERGE);
List<ILSMComponent> mergingComponents = ctx.getComponentHolder();
@@ -529,7 +516,7 @@
}
@Override
- public ILSMDiskComponent merge(ILSMIOOperation operation) throws HyracksDataException, IndexException {
+ public ILSMDiskComponent merge(ILSMIOOperation operation) throws HyracksDataException {
LSMBTreeMergeOperation mergeOp = (LSMBTreeMergeOperation) operation;
ITreeIndexCursor cursor = mergeOp.getCursor();
RangePredicate rangePred = new RangePredicate(null, null, true, true, null, null);
@@ -587,7 +574,7 @@
protected LSMBTreeDiskComponent createDiskComponent(LSMBTreeDiskComponentFactory factory,
FileReference btreeFileRef, FileReference bloomFilterFileRef, boolean createComponent)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
// Create new BTree instance.
LSMBTreeDiskComponent component =
factory.createComponent(new LSMComponentFileReferences(btreeFileRef, null, bloomFilterFileRef));
@@ -610,15 +597,11 @@
@Override
public IIndexBulkLoader createBulkLoader(float fillLevel, boolean verifyInput, long numElementsHint,
- boolean checkIfEmptyIndex) throws TreeIndexException {
- try {
- return new LSMBTreeBulkLoader(fillLevel, verifyInput, numElementsHint, checkIfEmptyIndex);
- } catch (HyracksDataException e) {
- throw new TreeIndexException(e);
- }
+ boolean checkIfEmptyIndex) throws HyracksDataException {
+ return new LSMBTreeBulkLoader(fillLevel, verifyInput, numElementsHint, checkIfEmptyIndex);
}
- protected ILSMDiskComponent createBulkLoadTarget() throws HyracksDataException, IndexException {
+ protected ILSMDiskComponent createBulkLoadTarget() throws HyracksDataException {
LSMComponentFileReferences componentFileRefs = fileManager.getRelFlushFileReference();
return createDiskComponent(bulkLoadComponentFactory, componentFileRefs.getInsertIndexFileReference(),
componentFileRefs.getBloomFilterFileReference(), true);
@@ -647,15 +630,11 @@
public final MultiComparator filterCmp;
public LSMBTreeBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint,
- boolean checkIfEmptyIndex) throws TreeIndexException, HyracksDataException {
+ boolean checkIfEmptyIndex) throws HyracksDataException {
if (checkIfEmptyIndex && !isEmptyIndex()) {
- throw new TreeIndexException("Cannot load an index that is not empty");
+ throw HyracksDataException.create(ErrorCode.LOAD_NON_EMPTY_INDEX);
}
- try {
- component = createBulkLoadTarget();
- } catch (HyracksDataException | IndexException e) {
- throw new TreeIndexException(e);
- }
+ component = createBulkLoadTarget();
bulkLoader = (BTreeBulkLoader) ((LSMBTreeDiskComponent) component).getBTree().createBulkLoader(fillFactor,
verifyInput, numElementsHint, false);
@@ -681,7 +660,7 @@
}
@Override
- public void add(ITupleReference tuple) throws IndexException, HyracksDataException {
+ public void add(ITupleReference tuple) throws HyracksDataException {
try {
ITupleReference t;
if (indexTuple != null) {
@@ -700,7 +679,7 @@
filterTuple.reset(tuple);
component.getLSMComponentFilter().update(filterTuple, filterCmp);
}
- } catch (IndexException | HyracksDataException | RuntimeException e) {
+ } catch (Exception e) {
cleanupArtifacts();
throw e;
}
@@ -726,7 +705,7 @@
}
@Override
- public void end() throws HyracksDataException, IndexException {
+ public void end() throws HyracksDataException {
if (!cleanedUpArtifacts) {
if (hasBloomFilter && !endedBloomFilterLoad) {
builder.end();
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeFileManager.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeFileManager.java
index 4a12b5a..194eb3a 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeFileManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeFileManager.java
@@ -34,7 +34,6 @@
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.api.io.IIOManager;
import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.lsm.common.impls.AbstractLSMIndexFileManager;
import org.apache.hyracks.storage.am.lsm.common.impls.LSMComponentFileReferences;
import org.apache.hyracks.storage.am.lsm.common.impls.TreeIndexFactory;
@@ -82,7 +81,7 @@
};
@Override
- public List<LSMComponentFileReferences> cleanupAndGetValidFiles() throws HyracksDataException, IndexException {
+ public List<LSMComponentFileReferences> cleanupAndGetValidFiles() throws HyracksDataException {
List<LSMComponentFileReferences> validFiles = new ArrayList<>();
ArrayList<ComparableFileName> allBTreeFiles = new ArrayList<>();
ArrayList<ComparableFileName> allBloomFilterFiles = new ArrayList<>();
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeFlushOperation.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeFlushOperation.java
index a2ed40b..c57c35f 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeFlushOperation.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeFlushOperation.java
@@ -25,7 +25,6 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.api.io.IODeviceHandle;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
@@ -67,7 +66,7 @@
}
@Override
- public Boolean call() throws HyracksDataException, IndexException {
+ public Boolean call() throws HyracksDataException {
accessor.flush(this);
return true;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeMergeOperation.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeMergeOperation.java
index 505a5f1..fbf6c5c 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeMergeOperation.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeMergeOperation.java
@@ -27,7 +27,6 @@
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.api.io.IODeviceHandle;
import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
@@ -79,7 +78,7 @@
}
@Override
- public Boolean call() throws HyracksDataException, IndexException {
+ public Boolean call() throws HyracksDataException {
accessor.merge(this);
return true;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreePointSearchCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreePointSearchCursor.java
index 2fb96c3..77c3573 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreePointSearchCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreePointSearchCursor.java
@@ -32,7 +32,6 @@
import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent.LSMComponentType;
@@ -63,7 +62,7 @@
}
@Override
- public boolean hasNext() throws HyracksDataException, IndexException {
+ public boolean hasNext() throws HyracksDataException {
if (nextHasBeenCalled) {
return false;
} else if (foundTuple) {
@@ -125,7 +124,7 @@
}
@Override
- public void reset() throws HyracksDataException, IndexException {
+ public void reset() throws HyracksDataException {
try {
if (rangeCursors != null) {
for (int i = 0; i < rangeCursors.length; ++i) {
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeRangeSearchCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeRangeSearchCursor.java
index d6c12e2..1b8c151 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeRangeSearchCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeRangeSearchCursor.java
@@ -34,7 +34,6 @@
import org.apache.hyracks.storage.am.common.api.IIndexCursor;
import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent.LSMComponentType;
@@ -62,7 +61,7 @@
}
@Override
- public void reset() throws HyracksDataException, IndexException {
+ public void reset() throws HyracksDataException {
super.reset();
proceed = true;
}
@@ -75,7 +74,7 @@
}
@Override
- protected void checkPriorityQueue() throws HyracksDataException, IndexException {
+ protected void checkPriorityQueue() throws HyracksDataException {
while (!outputPriorityQueue.isEmpty() || needPush == true) {
if (!outputPriorityQueue.isEmpty()) {
PriorityQueueElement checkElement = outputPriorityQueue.peek();
@@ -178,8 +177,7 @@
}
@Override
- public void open(ICursorInitialState initialState, ISearchPredicate searchPred)
- throws HyracksDataException, IndexException {
+ public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
LSMBTreeCursorInitialState lsmInitialState = (LSMBTreeCursorInitialState) initialState;
cmp = lsmInitialState.getOriginalKeyComparator();
operationalComponents = lsmInitialState.getOperationalComponents();
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeSearchCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeSearchCursor.java
index 5d5501d..a368ee9 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeSearchCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeSearchCursor.java
@@ -25,7 +25,6 @@
import org.apache.hyracks.storage.am.common.api.ICursorInitialState;
import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
import org.apache.hyracks.storage.common.buffercache.IBufferCache;
import org.apache.hyracks.storage.common.buffercache.ICachedPage;
@@ -47,42 +46,16 @@
}
@Override
- public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws IndexException,
- HyracksDataException {
-
+ public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
LSMBTreeCursorInitialState lsmInitialState = (LSMBTreeCursorInitialState) initialState;
-
- LSMBTreeSearchType searchType = LSMBTreeSearchType.RANGE;
RangePredicate btreePred = (RangePredicate) searchPred;
- if (btreePred.getLowKey() != null && btreePred.getHighKey() != null) {
- if (btreePred.isLowKeyInclusive() && btreePred.isHighKeyInclusive()) {
- if (btreePred.getLowKeyComparator().getKeyFieldCount() == btreePred.getHighKeyComparator()
- .getKeyFieldCount()) {
- if (btreePred.getLowKeyComparator().getKeyFieldCount() == lsmInitialState
- .getOriginalKeyComparator().getKeyFieldCount()) {
- if (lsmInitialState.getOriginalKeyComparator().compare(btreePred.getLowKey(),
- btreePred.getHighKey()) == 0) {
- searchType = LSMBTreeSearchType.POINT;
- }
- }
- }
- }
- }
- switch (searchType) {
- case POINT:
- currentCursor = pointCursor;
- break;
- case RANGE:
- currentCursor = rangeCursor;
- break;
- default:
- throw new HyracksDataException("Wrong search type");
- }
+ currentCursor =
+ btreePred.isPointPredicate(lsmInitialState.getOriginalKeyComparator()) ? pointCursor : rangeCursor;
currentCursor.open(lsmInitialState, searchPred);
}
@Override
- public boolean hasNext() throws HyracksDataException, IndexException {
+ public boolean hasNext() throws HyracksDataException {
return currentCursor.hasNext();
}
@@ -100,7 +73,7 @@
}
@Override
- public void reset() throws HyracksDataException, IndexException {
+ public void reset() throws HyracksDataException {
if (currentCursor != null) {
currentCursor.reset();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddyAbstractCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddyAbstractCursor.java
index dab48f7..0f03085 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddyAbstractCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddyAbstractCursor.java
@@ -30,7 +30,6 @@
import org.apache.hyracks.storage.am.common.api.ICursorInitialState;
import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
@@ -72,8 +71,7 @@
}
@Override
- public void open(ICursorInitialState initialState, ISearchPredicate searchPred)
- throws IndexException, HyracksDataException {
+ public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
LSMBTreeWithBuddyCursorInitialState lsmInitialState = (LSMBTreeWithBuddyCursorInitialState) initialState;
btreeCmp = lsmInitialState.getBTreeCmp();
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddyFileManager.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddyFileManager.java
index 683c2e9..0173c06 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddyFileManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddyFileManager.java
@@ -33,7 +33,6 @@
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.api.io.IIOManager;
import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.lsm.common.impls.AbstractLSMIndexFileManager;
import org.apache.hyracks.storage.am.lsm.common.impls.LSMComponentFileReferences;
import org.apache.hyracks.storage.am.lsm.common.impls.TreeIndexFactory;
@@ -74,8 +73,8 @@
String baseName = baseDir + ts + SPLIT_STRING + ts;
// Begin timestamp and end timestamp are identical since it is a flush
return new LSMComponentFileReferences(createFlushFile(baseName + SPLIT_STRING + BTREE_STRING),
- createFlushFile(baseName + SPLIT_STRING + BUDDY_BTREE_STRING), createFlushFile(baseName
- + SPLIT_STRING + BLOOM_FILTER_STRING));
+ createFlushFile(baseName + SPLIT_STRING + BUDDY_BTREE_STRING),
+ createFlushFile(baseName + SPLIT_STRING + BLOOM_FILTER_STRING));
}
@Override
@@ -88,12 +87,12 @@
// Get the range of timestamps by taking the earliest and the latest
// timestamps
return new LSMComponentFileReferences(createMergeFile(baseName + SPLIT_STRING + BTREE_STRING),
- createMergeFile(baseName + SPLIT_STRING + BUDDY_BTREE_STRING), createMergeFile(baseName
- + SPLIT_STRING + BLOOM_FILTER_STRING));
+ createMergeFile(baseName + SPLIT_STRING + BUDDY_BTREE_STRING),
+ createMergeFile(baseName + SPLIT_STRING + BLOOM_FILTER_STRING));
}
@Override
- public List<LSMComponentFileReferences> cleanupAndGetValidFiles() throws HyracksDataException, IndexException {
+ public List<LSMComponentFileReferences> cleanupAndGetValidFiles() throws HyracksDataException {
List<LSMComponentFileReferences> validFiles = new ArrayList<>();
ArrayList<ComparableFileName> allBTreeFiles = new ArrayList<>();
ArrayList<ComparableFileName> allBuddyBTreeFiles = new ArrayList<>();
@@ -178,7 +177,8 @@
invalidBloomFilterFile.delete();
} else {
// This scenario should not be possible.
- throw new HyracksDataException("Found LSM files with overlapping but not contained timetamp intervals.");
+ throw new HyracksDataException(
+ "Found LSM files with overlapping but not contained timetamp intervals.");
}
}
@@ -210,8 +210,8 @@
String baseName = baseDir + ts + SPLIT_STRING + ts;
return new LSMComponentFileReferences(createFlushFile(baseName + SPLIT_STRING + BTREE_STRING),
- createFlushFile(baseName + SPLIT_STRING + BUDDY_BTREE_STRING), createFlushFile(baseName
- + SPLIT_STRING + BLOOM_FILTER_STRING));
+ createFlushFile(baseName + SPLIT_STRING + BUDDY_BTREE_STRING),
+ createFlushFile(baseName + SPLIT_STRING + BLOOM_FILTER_STRING));
}
@Override
@@ -230,8 +230,8 @@
// get the actual transaction files
files = dir.list(transactionFilter);
if (files.length < 3) {
- throw new HyracksDataException("LSM Btree with buddy transaction has less than 3 files :"
- + files.length);
+ throw new HyracksDataException(
+ "LSM Btree with buddy transaction has less than 3 files :" + files.length);
}
try {
Files.delete(Paths.get(txnFileName));
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddyMergeOperation.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddyMergeOperation.java
index 2810b62..0806834 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddyMergeOperation.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddyMergeOperation.java
@@ -26,7 +26,6 @@
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.api.io.IODeviceHandle;
import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
@@ -85,7 +84,7 @@
}
@Override
- public Boolean call() throws HyracksDataException, IndexException {
+ public Boolean call() throws HyracksDataException {
accessor.merge(this);
return true;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddySearchCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddySearchCursor.java
index a109f3f..768ef27 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddySearchCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddySearchCursor.java
@@ -22,11 +22,10 @@
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
import org.apache.hyracks.storage.am.common.api.ICursorInitialState;
import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.tuples.PermutingTupleReference;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
-public class LSMBTreeWithBuddySearchCursor extends LSMBTreeWithBuddyAbstractCursor{
+public class LSMBTreeWithBuddySearchCursor extends LSMBTreeWithBuddyAbstractCursor {
private int currentCursor;
private PermutingTupleReference buddyBTreeTuple;
@@ -64,17 +63,13 @@
private void searchNextCursor() throws HyracksDataException {
if (currentCursor < numberOfTrees) {
- try {
- btreeCursors[currentCursor].reset();
- btreeAccessors[currentCursor].search(btreeCursors[currentCursor], btreeRangePredicate);
- } catch (IndexException e) {
- throw new HyracksDataException(e);
- }
+ btreeCursors[currentCursor].reset();
+ btreeAccessors[currentCursor].search(btreeCursors[currentCursor], btreeRangePredicate);
}
}
@Override
- public boolean hasNext() throws HyracksDataException, IndexException {
+ public boolean hasNext() throws HyracksDataException {
if (foundNext) {
return true;
}
@@ -117,7 +112,7 @@
}
@Override
- public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException, IndexException {
+ public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
super.open(initialState, searchPred);
searchNextCursor();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddySortedCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddySortedCursor.java
index d045059..5e356c4 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddySortedCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeWithBuddySortedCursor.java
@@ -21,12 +21,10 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.storage.am.common.api.ICursorInitialState;
import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.tuples.PermutingTupleReference;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
-public class LSMBTreeWithBuddySortedCursor extends
- LSMBTreeWithBuddyAbstractCursor {
+public class LSMBTreeWithBuddySortedCursor extends LSMBTreeWithBuddyAbstractCursor {
// TODO: This class can be removed and instead use a search cursor that uses
// a logic similar
// to the one in LSMRTreeWithAntiMatterTuplesSearchCursor
@@ -35,8 +33,8 @@
private int foundIn = -1;
private PermutingTupleReference buddyBtreeTuple;
- public LSMBTreeWithBuddySortedCursor(ILSMIndexOperationContext opCtx,
- int[] buddyBTreeFields) throws HyracksDataException {
+ public LSMBTreeWithBuddySortedCursor(ILSMIndexOperationContext opCtx, int[] buddyBTreeFields)
+ throws HyracksDataException {
super(opCtx);
this.buddyBtreeTuple = new PermutingTupleReference(buddyBTreeFields);
reset();
@@ -53,12 +51,7 @@
try {
for (int i = 0; i < numberOfTrees; i++) {
btreeCursors[i].reset();
- try {
- btreeAccessors[i].search(btreeCursors[i],
- btreeRangePredicate);
- } catch (IndexException e) {
- throw new HyracksDataException(e);
- }
+ btreeAccessors[i].search(btreeCursors[i], btreeRangePredicate);
if (btreeCursors[i].hasNext()) {
btreeCursors[i].next();
} else {
@@ -67,9 +60,7 @@
}
} catch (Exception e) {
e.printStackTrace();
- throw new HyracksDataException(
- "error while reseting the btrees of the lsm btree with buddy btree",
- e);
+ throw new HyracksDataException("error while reseting the btrees of the lsm btree with buddy btree", e);
} finally {
if (open) {
lsmHarness.endSearch(opCtx);
@@ -78,7 +69,7 @@
}
@Override
- public boolean hasNext() throws HyracksDataException, IndexException {
+ public boolean hasNext() throws HyracksDataException {
while (!foundNext) {
frameTuple = null;
@@ -92,8 +83,9 @@
foundIn = -1;
for (int i = 0; i < numberOfTrees; i++) {
- if (depletedBtreeCursors[i])
+ if (depletedBtreeCursors[i]) {
continue;
+ }
if (frameTuple == null) {
frameTuple = btreeCursors[i].getTuple();
@@ -107,21 +99,17 @@
}
}
- if (foundIn == -1)
+ if (foundIn == -1) {
return false;
+ }
boolean killed = false;
buddyBtreeTuple.reset(frameTuple);
for (int i = 0; i < foundIn; i++) {
- try {
- buddyBtreeCursors[i].reset();
- buddyBtreeRangePredicate.setHighKey(buddyBtreeTuple, true);
- btreeRangePredicate.setLowKey(buddyBtreeTuple, true);
- btreeAccessors[i].search(btreeCursors[i],
- btreeRangePredicate);
- } catch (IndexException e) {
- throw new HyracksDataException(e);
- }
+ buddyBtreeCursors[i].reset();
+ buddyBtreeRangePredicate.setHighKey(buddyBtreeTuple, true);
+ btreeRangePredicate.setLowKey(buddyBtreeTuple, true);
+ btreeAccessors[i].search(btreeCursors[i], btreeRangePredicate);
try {
if (btreeCursors[i].hasNext()) {
killed = true;
@@ -145,20 +133,14 @@
}
@Override
- public void open(ICursorInitialState initialState,
- ISearchPredicate searchPred) throws HyracksDataException,
- IndexException {
+ public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
super.open(initialState, searchPred);
depletedBtreeCursors = new boolean[numberOfTrees];
foundNext = false;
for (int i = 0; i < numberOfTrees; i++) {
btreeCursors[i].reset();
- try {
- btreeAccessors[i].search(btreeCursors[i], btreeRangePredicate);
- } catch (IndexException e) {
- throw new HyracksDataException(e);
- }
+ btreeAccessors[i].search(btreeCursors[i], btreeRangePredicate);
if (btreeCursors[i].hasNext()) {
btreeCursors[i].next();
} else {
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBuddyBTreeMergeCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBuddyBTreeMergeCursor.java
index 0496809..d60a620 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBuddyBTreeMergeCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBuddyBTreeMergeCursor.java
@@ -28,7 +28,6 @@
import org.apache.hyracks.storage.am.common.api.IIndexCursor;
import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
import org.apache.hyracks.storage.am.common.api.ITreeIndexAccessor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
@@ -41,15 +40,12 @@
}
@Override
- protected boolean isDeleted(PriorityQueueElement checkElement)
- throws HyracksDataException, IndexException {
+ protected boolean isDeleted(PriorityQueueElement checkElement) throws HyracksDataException {
return false;
}
@Override
- public void open(ICursorInitialState initialState,
- ISearchPredicate searchPred) throws IndexException,
- HyracksDataException {
+ public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
LSMBTreeWithBuddyCursorInitialState lsmInitialState = (LSMBTreeWithBuddyCursorInitialState) initialState;
cmp = lsmInitialState.getBuddyBTreeCmp();
operationalComponents = lsmInitialState.getOperationalComponents();
@@ -60,19 +56,15 @@
int numBTrees = operationalComponents.size();
rangeCursors = new IIndexCursor[numBTrees];
- RangePredicate btreePredicate = new RangePredicate(null, null, true,
- true, cmp, cmp);
+ RangePredicate btreePredicate = new RangePredicate(null, null, true, true, cmp, cmp);
IIndexAccessor[] btreeAccessors = new ITreeIndexAccessor[numBTrees];
for (int i = 0; i < numBTrees; i++) {
ILSMComponent component = operationalComponents.get(i);
- IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) lsmInitialState
- .getBuddyBTreeLeafFrameFactory().createFrame();
+ IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) lsmInitialState.getBuddyBTreeLeafFrameFactory().createFrame();
rangeCursors[i] = new BTreeRangeSearchCursor(leafFrame, false);
- BTree buddyBtree = (BTree) ((LSMBTreeWithBuddyDiskComponent) component)
- .getBuddyBTree();
- btreeAccessors[i] = buddyBtree.createAccessor(
- NoOpOperationCallback.INSTANCE,
- NoOpOperationCallback.INSTANCE);
+ BTree buddyBtree = ((LSMBTreeWithBuddyDiskComponent) component).getBuddyBTree();
+ btreeAccessors[i] =
+ buddyBtree.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
btreeAccessors[i].search(rangeCursors[i], btreePredicate);
}
setPriorityQueueComparator();
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMHarness.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMHarness.java
index f21c8a3..298d75d 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMHarness.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMHarness.java
@@ -25,7 +25,6 @@
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
import org.apache.hyracks.storage.am.common.api.IIndexCursor;
import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
public interface ILSMHarness {
@@ -39,7 +38,7 @@
* @throws HyracksDataException
* @throws IndexException
*/
- void forceModify(ILSMIndexOperationContext ctx, ITupleReference tuple) throws HyracksDataException, IndexException;
+ void forceModify(ILSMIndexOperationContext ctx, ITupleReference tuple) throws HyracksDataException;
/**
* Modify the index if the memory component is not full, wait for a new memory component if the current one is full
@@ -55,7 +54,7 @@
* @throws IndexException
*/
boolean modify(ILSMIndexOperationContext ctx, boolean tryOperation, ITupleReference tuple)
- throws HyracksDataException, IndexException;
+ throws HyracksDataException;
/**
* Search the index
@@ -70,7 +69,7 @@
* @throws IndexException
*/
void search(ILSMIndexOperationContext ctx, IIndexCursor cursor, ISearchPredicate pred)
- throws HyracksDataException, IndexException;
+ throws HyracksDataException;
/**
* End the search
@@ -89,7 +88,7 @@
* @throws IndexException
*/
void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
- throws HyracksDataException, IndexException;
+ throws HyracksDataException;
/**
* Schedule full merge
@@ -100,7 +99,7 @@
* @throws IndexException
*/
void scheduleFullMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
- throws HyracksDataException, IndexException;
+ throws HyracksDataException;
/**
* Perform a merge operation
@@ -110,7 +109,7 @@
* @throws HyracksDataException
* @throws IndexException
*/
- void merge(ILSMIndexOperationContext ctx, ILSMIOOperation operation) throws HyracksDataException, IndexException;
+ void merge(ILSMIndexOperationContext ctx, ILSMIOOperation operation) throws HyracksDataException;
/**
* Schedule a flush
@@ -129,7 +128,7 @@
* @throws HyracksDataException
* @throws IndexException
*/
- void flush(ILSMIndexOperationContext ctx, ILSMIOOperation operation) throws HyracksDataException, IndexException;
+ void flush(ILSMIndexOperationContext ctx, ILSMIOOperation operation) throws HyracksDataException;
/**
* Add bulk loaded component
@@ -139,7 +138,7 @@
* @throws HyracksDataException
* @throws IndexException
*/
- void addBulkLoadedComponent(ILSMDiskComponent index) throws HyracksDataException, IndexException;
+ void addBulkLoadedComponent(ILSMDiskComponent index) throws HyracksDataException;
/**
* Get index operation tracker
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIOOperation.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIOOperation.java
index d768122..829523c 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIOOperation.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIOOperation.java
@@ -23,7 +23,6 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.IODeviceHandle;
-import org.apache.hyracks.storage.am.common.api.IndexException;
public interface ILSMIOOperation extends Callable<Boolean> {
@@ -37,7 +36,7 @@
Set<IODeviceHandle> getWriteDevices();
@Override
- Boolean call() throws HyracksDataException, IndexException;
+ Boolean call() throws HyracksDataException;
ILSMIOOperationCallback getCallback();
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndex.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndex.java
index 234006f..344674f 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndex.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndex.java
@@ -30,7 +30,6 @@
import org.apache.hyracks.storage.am.common.api.IModificationOperationCallback;
import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.lsm.common.impls.LSMHarness;
/**
@@ -62,19 +61,17 @@
boolean isPrimaryIndex();
- void modify(IIndexOperationContext ictx, ITupleReference tuple) throws HyracksDataException, IndexException;
+ void modify(IIndexOperationContext ictx, ITupleReference tuple) throws HyracksDataException;
- void search(ILSMIndexOperationContext ictx, IIndexCursor cursor, ISearchPredicate pred)
- throws HyracksDataException, IndexException;
+ void search(ILSMIndexOperationContext ictx, IIndexCursor cursor, ISearchPredicate pred) throws HyracksDataException;
void scheduleFlush(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback) throws HyracksDataException;
- ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException, IndexException;
+ ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException;
- void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
- throws HyracksDataException, IndexException;
+ void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback) throws HyracksDataException;
- ILSMDiskComponent merge(ILSMIOOperation operation) throws HyracksDataException, IndexException;
+ ILSMDiskComponent merge(ILSMIOOperation operation) throws HyracksDataException;
void addDiskComponent(ILSMDiskComponent index) throws HyracksDataException;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndexAccessor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndexAccessor.java
index b0f366b..a04c54e 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndexAccessor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndexAccessor.java
@@ -24,8 +24,6 @@
import org.apache.hyracks.data.std.api.IValueReference;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
import org.apache.hyracks.storage.am.common.api.IIndexAccessor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
/**
* Client handle for performing operations
@@ -55,7 +53,7 @@
* @throws IndexException
*/
void scheduleMerge(ILSMIOOperationCallback callback, List<ILSMDiskComponent> components)
- throws HyracksDataException, IndexException;
+ throws HyracksDataException;
/**
* Schedule a full merge
@@ -65,7 +63,7 @@
* @throws HyracksDataException
* @throws IndexException
*/
- void scheduleFullMerge(ILSMIOOperationCallback callback) throws HyracksDataException, IndexException;
+ void scheduleFullMerge(ILSMIOOperationCallback callback) throws HyracksDataException;
/**
* Delete the tuple from the memory component only. Don't replace with antimatter tuple
@@ -75,7 +73,7 @@
* @throws HyracksDataException
* @throws IndexException
*/
- void physicalDelete(ITupleReference tuple) throws HyracksDataException, IndexException;
+ void physicalDelete(ITupleReference tuple) throws HyracksDataException;
/**
* Attempts to insert the given tuple.
@@ -91,7 +89,7 @@
* If an index-specific constraint is violated, e.g., the key
* already exists.
*/
- boolean tryInsert(ITupleReference tuple) throws HyracksDataException, IndexException;
+ boolean tryInsert(ITupleReference tuple) throws HyracksDataException;
/**
* Attempts to delete the given tuple.
@@ -106,7 +104,7 @@
* @throws IndexException
* If there is no matching tuple in the index.
*/
- boolean tryDelete(ITupleReference tuple) throws HyracksDataException, IndexException;
+ boolean tryDelete(ITupleReference tuple) throws HyracksDataException;
/**
* Attempts to update the given tuple.
@@ -122,7 +120,7 @@
* @throws IndexException
* If there is no matching tuple in the index.
*/
- boolean tryUpdate(ITupleReference tuple) throws HyracksDataException, IndexException;
+ boolean tryUpdate(ITupleReference tuple) throws HyracksDataException;
/**
* This operation is only supported by indexes with the notion of a unique key.
@@ -139,7 +137,7 @@
* @throws IndexException
* If there is no matching tuple in the index.
*/
- boolean tryUpsert(ITupleReference tuple) throws HyracksDataException, IndexException;
+ boolean tryUpsert(ITupleReference tuple) throws HyracksDataException;
/**
* Delete the tuple from the memory component only. Don't replace with antimatter tuple
@@ -150,7 +148,7 @@
* @throws HyracksDataException
* @throws IndexException
*/
- void forcePhysicalDelete(ITupleReference tuple) throws HyracksDataException, IndexException;
+ void forcePhysicalDelete(ITupleReference tuple) throws HyracksDataException;
/**
* Insert a new tuple (failing if duplicate key entry is found)
@@ -160,7 +158,7 @@
* @throws HyracksDataException
* @throws IndexException
*/
- void forceInsert(ITupleReference tuple) throws HyracksDataException, IndexException;
+ void forceInsert(ITupleReference tuple) throws HyracksDataException;
/**
* Force deleting an index entry even if the memory component is full
@@ -171,7 +169,7 @@
* @throws HyracksDataException
* @throws IndexException
*/
- void forceDelete(ITupleReference tuple) throws HyracksDataException, IndexException;
+ void forceDelete(ITupleReference tuple) throws HyracksDataException;
/**
* Force upserting the tuple into the memory component even if it is full
@@ -180,7 +178,7 @@
* @throws HyracksDataException
* @throws IndexException
*/
- void forceUpsert(ITupleReference tuple) throws HyracksDataException, IndexException;
+ void forceUpsert(ITupleReference tuple) throws HyracksDataException;
/**
* Schedule a replication for disk components
@@ -202,7 +200,7 @@
* @throws HyracksDataException
* @throws TreeIndexException
*/
- void flush(ILSMIOOperation operation) throws HyracksDataException, IndexException;
+ void flush(ILSMIOOperation operation) throws HyracksDataException;
/**
* Merge all on-disk components.
@@ -210,7 +208,7 @@
* @throws HyracksDataException
* @throws TreeIndexException
*/
- void merge(ILSMIOOperation operation) throws HyracksDataException, IndexException;
+ void merge(ILSMIOOperation operation) throws HyracksDataException;
/**
* Update the metadata of the memory component, wait for the new component if the current one is UNWRITABLE
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndexFileManager.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndexFileManager.java
index ce31e2e..54d64af 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndexFileManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndexFileManager.java
@@ -24,7 +24,6 @@
import java.util.List;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.lsm.common.impls.LSMComponentFileReferences;
/**
@@ -48,7 +47,7 @@
// Deletes invalid files, and returns list of valid files from baseDir.
// The returned valid files are correctly sorted (based on the recency of data).
- public List<LSMComponentFileReferences> cleanupAndGetValidFiles() throws HyracksDataException, IndexException;
+ public List<LSMComponentFileReferences> cleanupAndGetValidFiles() throws HyracksDataException;
public Comparator<String> getFileNameComparator();
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMMergePolicy.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMMergePolicy.java
index c2bd45d..83ba5b7 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMMergePolicy.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMMergePolicy.java
@@ -22,10 +22,9 @@
import java.util.Map;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.storage.am.common.api.IndexException;
public interface ILSMMergePolicy {
- void diskComponentAdded(ILSMIndex index, boolean fullMergeIsRequested) throws HyracksDataException, IndexException;
+ void diskComponentAdded(ILSMIndex index, boolean fullMergeIsRequested) throws HyracksDataException;
void configure(Map<String, String> properties);
@@ -54,5 +53,5 @@
* @throws HyracksDataException
* @throws IndexException
*/
- boolean isMergeLagging(ILSMIndex index) throws HyracksDataException, IndexException;
+ boolean isMergeLagging(ILSMIndex index) throws HyracksDataException;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ITwoPCIndex.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ITwoPCIndex.java
index 65e91b2..5679dc9 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ITwoPCIndex.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ITwoPCIndex.java
@@ -23,8 +23,6 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.storage.am.common.api.IIndexBulkLoader;
import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
// An interface containing the new methods introduced for bulk transactions
public interface ITwoPCIndex {
@@ -33,28 +31,27 @@
* and the bulk loaded component is hidden from the index
*/
public IIndexBulkLoader createTransactionBulkLoader(float fillLevel, boolean verifyInput, long numElementsHint,
- boolean checkIfEmptyIndex) throws TreeIndexException;
+ boolean checkIfEmptyIndex) throws HyracksDataException;
/**
* This function is used to commit the previous transaction if it was resulted in creating any components
*/
- public void commitTransaction() throws TreeIndexException, HyracksDataException, IndexException;
+ public void commitTransaction() throws HyracksDataException;
/**
* This function is used to abort the last transaction
*/
- public void abortTransaction() throws TreeIndexException;
+ public void abortTransaction() throws HyracksDataException;
/**
* This function is used to recover a transaction if the system crashed after the decision to commit
*/
- public void recoverTransaction() throws TreeIndexException;
+ public void recoverTransaction() throws HyracksDataException;
/**
* This function is used to add the committed disk component to the appropriate list and reflect the changes
*/
- public void commitTransactionDiskComponent(ILSMDiskComponent newComponent)
- throws IndexException, HyracksDataException;
+ public void commitTransactionDiskComponent(ILSMDiskComponent newComponent) throws HyracksDataException;
/**
* This function is used to create a version specific accessor to search a specific version
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndexFileManager.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndexFileManager.java
index 36c08668..fca010f 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndexFileManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndexFileManager.java
@@ -41,7 +41,6 @@
import org.apache.hyracks.storage.am.common.api.ITreeIndex;
import org.apache.hyracks.storage.am.common.api.ITreeIndexFrame;
import org.apache.hyracks.storage.am.common.api.ITreeIndexMetadataFrame;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexFileManager;
import org.apache.hyracks.storage.common.buffercache.IBufferCache;
import org.apache.hyracks.storage.common.buffercache.ICachedPage;
@@ -121,7 +120,7 @@
protected void cleanupAndGetValidFilesInternal(FilenameFilter filter,
TreeIndexFactory<? extends ITreeIndex> treeFactory, ArrayList<ComparableFileName> allFiles)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
String[] files = listDirFiles(baseDir, filter);
File dir = new File(baseDir);
for (String fileName : files) {
@@ -159,8 +158,7 @@
}
protected void validateFiles(HashSet<String> groundTruth, ArrayList<ComparableFileName> validFiles,
- FilenameFilter filter, TreeIndexFactory<? extends ITreeIndex> treeFactory)
- throws HyracksDataException, IndexException {
+ FilenameFilter filter, TreeIndexFactory<? extends ITreeIndex> treeFactory) throws HyracksDataException {
ArrayList<ComparableFileName> tmpAllInvListsFiles = new ArrayList<>();
cleanupAndGetValidFilesInternal(filter, treeFactory, tmpAllInvListsFiles);
for (ComparableFileName cmpFileName : tmpAllInvListsFiles) {
@@ -228,7 +226,7 @@
}
@Override
- public List<LSMComponentFileReferences> cleanupAndGetValidFiles() throws HyracksDataException, IndexException {
+ public List<LSMComponentFileReferences> cleanupAndGetValidFiles() throws HyracksDataException {
List<LSMComponentFileReferences> validFiles = new ArrayList<>();
ArrayList<ComparableFileName> allFiles = new ArrayList<>();
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ConstantMergePolicy.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ConstantMergePolicy.java
index b3e1f6f..767699d 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ConstantMergePolicy.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ConstantMergePolicy.java
@@ -23,11 +23,10 @@
import java.util.Map;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent.ComponentState;
+import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndex;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexAccessor;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
@@ -36,8 +35,7 @@
private int numComponents;
@Override
- public void diskComponentAdded(final ILSMIndex index, boolean fullMergeIsRequested)
- throws HyracksDataException, IndexException {
+ public void diskComponentAdded(final ILSMIndex index, boolean fullMergeIsRequested) throws HyracksDataException {
List<ILSMDiskComponent> immutableComponents = index.getImmutableComponents();
if (!areComponentsMergable(immutableComponents)) {
@@ -61,7 +59,7 @@
}
@Override
- public boolean isMergeLagging(ILSMIndex index) throws HyracksDataException, IndexException {
+ public boolean isMergeLagging(ILSMIndex index) throws HyracksDataException {
// see PrefixMergePolicy.isMergeLagging() for the rationale behind this code.
/**
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ExternalIndexHarness.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ExternalIndexHarness.java
index c6346cc..e6c8186 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ExternalIndexHarness.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ExternalIndexHarness.java
@@ -22,15 +22,15 @@
import java.util.logging.Level;
import java.util.logging.Logger;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.replication.IReplicationJob.ReplicationOperation;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
import org.apache.hyracks.storage.am.common.api.IIndexCursor;
import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
-import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
+import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndex;
@@ -114,7 +114,7 @@
}
private void exitComponents(ILSMIndexOperationContext ctx, LSMOperationType opType, ILSMDiskComponent newComponent,
- boolean failedOperation) throws HyracksDataException, IndexException {
+ boolean failedOperation) throws HyracksDataException {
/**
* FLUSH and MERGE operations should always exit the components
* to notify waiting threads.
@@ -168,25 +168,24 @@
}
@Override
- public void forceModify(ILSMIndexOperationContext ctx, ITupleReference tuple)
- throws HyracksDataException, IndexException {
- throw new IndexException("2PC LSM Inedx doesn't support modify");
+ public void forceModify(ILSMIndexOperationContext ctx, ITupleReference tuple) throws HyracksDataException {
+ throw HyracksDataException.create(ErrorCode.MODIFY_NOT_SUPPORTED_IN_EXTERNAL_INDEX);
}
@Override
public boolean modify(ILSMIndexOperationContext ctx, boolean tryOperation, ITupleReference tuple)
- throws HyracksDataException, IndexException {
- throw new IndexException("2PC LSM Inedx doesn't support modify");
+ throws HyracksDataException {
+ throw HyracksDataException.create(ErrorCode.MODIFY_NOT_SUPPORTED_IN_EXTERNAL_INDEX);
}
@Override
public void search(ILSMIndexOperationContext ctx, IIndexCursor cursor, ISearchPredicate pred)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
LSMOperationType opType = LSMOperationType.SEARCH;
getAndEnterComponents(ctx, opType, false);
try {
lsmIndex.search(ctx, cursor, pred);
- } catch (HyracksDataException | IndexException e) {
+ } catch (Exception e) {
exitComponents(ctx, opType, null, true);
throw e;
}
@@ -197,7 +196,7 @@
if (ctx.getOperation() == IndexOperation.SEARCH) {
try {
exitComponents(ctx, LSMOperationType.SEARCH, null, false);
- } catch (IndexException e) {
+ } catch (Exception e) {
throw new HyracksDataException(e);
}
}
@@ -205,7 +204,7 @@
@Override
public void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
if (!getAndEnterComponents(ctx, LSMOperationType.MERGE, true)) {
callback.afterFinalize(LSMOperationType.MERGE, null);
return;
@@ -215,7 +214,7 @@
@Override
public void scheduleFullMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
fullMergeIsRequested.set(true);
if (!getAndEnterComponents(ctx, LSMOperationType.MERGE, true)) {
// If the merge cannot be scheduled because there is already an ongoing merge on subset/all of the components, then
@@ -228,8 +227,7 @@
}
@Override
- public void merge(ILSMIndexOperationContext ctx, ILSMIOOperation operation)
- throws HyracksDataException, IndexException {
+ public void merge(ILSMIndexOperationContext ctx, ILSMIOOperation operation) throws HyracksDataException {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Started a merge operation for index: " + lsmIndex + " ...");
}
@@ -249,7 +247,7 @@
}
@Override
- public void addBulkLoadedComponent(ILSMDiskComponent c) throws HyracksDataException, IndexException {
+ public void addBulkLoadedComponent(ILSMDiskComponent c) throws HyracksDataException {
lsmIndex.markAsValid(c);
synchronized (opTracker) {
lsmIndex.addDiskComponent(c);
@@ -268,7 +266,7 @@
// 1. this needs synchronization since others might be accessing the index (specifically merge operations that might change the lists of components)
// 2. the actions taken by the index itself are different
// 3. the component has already been marked valid by the bulk update operation
- public void addTransactionComponents(ILSMDiskComponent newComponent) throws HyracksDataException, IndexException {
+ public void addTransactionComponents(ILSMDiskComponent newComponent) throws HyracksDataException {
ITwoPCIndex index = (ITwoPCIndex) lsmIndex;
synchronized (opTracker) {
List<ILSMDiskComponent> newerList;
@@ -305,8 +303,7 @@
}
@Override
- public void flush(ILSMIndexOperationContext ctx, ILSMIOOperation operation)
- throws HyracksDataException, IndexException {
+ public void flush(ILSMIndexOperationContext ctx, ILSMIOOperation operation) throws HyracksDataException {
}
@Override
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java
index 01e85d7..494ba27 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java
@@ -32,7 +32,6 @@
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
import org.apache.hyracks.storage.am.common.api.IIndexCursor;
import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
@@ -189,7 +188,7 @@
}
private void exitComponents(ILSMIndexOperationContext ctx, LSMOperationType opType, ILSMDiskComponent newComponent,
- boolean failedOperation) throws HyracksDataException, IndexException {
+ boolean failedOperation) throws HyracksDataException {
/**
* FLUSH and MERGE operations should always exit the components
* to notify waiting threads.
@@ -348,15 +347,14 @@
}
@Override
- public void forceModify(ILSMIndexOperationContext ctx, ITupleReference tuple)
- throws HyracksDataException, IndexException {
+ public void forceModify(ILSMIndexOperationContext ctx, ITupleReference tuple) throws HyracksDataException {
LSMOperationType opType = LSMOperationType.FORCE_MODIFICATION;
modify(ctx, false, tuple, opType);
}
@Override
public boolean modify(ILSMIndexOperationContext ctx, boolean tryOperation, ITupleReference tuple)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
LSMOperationType opType = LSMOperationType.MODIFICATION;
return modify(ctx, tryOperation, tuple, opType);
}
@@ -378,8 +376,6 @@
private void exitAndComplete(ILSMIndexOperationContext ctx, LSMOperationType op) throws HyracksDataException {
try {
exitComponents(ctx, op, null, false);
- } catch (IndexException e) {
- throw new HyracksDataException(e);
} finally {
opTracker.completeOperation(null, op, null, ctx.getModificationCallback());
}
@@ -400,7 +396,7 @@
}
private boolean modify(ILSMIndexOperationContext ctx, boolean tryOperation, ITupleReference tuple,
- LSMOperationType opType) throws HyracksDataException, IndexException {
+ LSMOperationType opType) throws HyracksDataException {
if (!lsmIndex.isMemoryComponentsAllocated()) {
lsmIndex.allocateMemoryComponents();
}
@@ -424,14 +420,14 @@
@Override
public void search(ILSMIndexOperationContext ctx, IIndexCursor cursor, ISearchPredicate pred)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
LSMOperationType opType = LSMOperationType.SEARCH;
ctx.setSearchPredicate(pred);
getAndEnterComponents(ctx, opType, false);
try {
ctx.getSearchOperationCallback().before(pred.getLowKey());
lsmIndex.search(ctx, cursor, pred);
- } catch (HyracksDataException | IndexException e) {
+ } catch (Exception e) {
exitComponents(ctx, opType, null, true);
throw e;
}
@@ -442,7 +438,7 @@
if (ctx.getOperation() == IndexOperation.SEARCH) {
try {
exitComponents(ctx, LSMOperationType.SEARCH, null, false);
- } catch (IndexException e) {
+ } catch (Exception e) {
throw new HyracksDataException(e);
}
}
@@ -459,8 +455,7 @@
}
@Override
- public void flush(ILSMIndexOperationContext ctx, ILSMIOOperation operation)
- throws HyracksDataException, IndexException {
+ public void flush(ILSMIndexOperationContext ctx, ILSMIOOperation operation) throws HyracksDataException {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Started a flush operation for index: " + lsmIndex + " ...");
}
@@ -484,7 +479,7 @@
@Override
public void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
if (!getAndEnterComponents(ctx, LSMOperationType.MERGE, true)) {
callback.afterFinalize(LSMOperationType.MERGE, null);
return;
@@ -494,7 +489,7 @@
@Override
public void scheduleFullMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
fullMergeIsRequested.set(true);
if (!getAndEnterComponents(ctx, LSMOperationType.MERGE, true)) {
// If the merge cannot be scheduled because there is already an ongoing merge on subset/all of the components, then
@@ -507,8 +502,7 @@
}
@Override
- public void merge(ILSMIndexOperationContext ctx, ILSMIOOperation operation)
- throws HyracksDataException, IndexException {
+ public void merge(ILSMIndexOperationContext ctx, ILSMIOOperation operation) throws HyracksDataException {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Started a merge operation for index: " + lsmIndex + " ...");
}
@@ -531,7 +525,7 @@
}
@Override
- public void addBulkLoadedComponent(ILSMDiskComponent c) throws HyracksDataException, IndexException {
+ public void addBulkLoadedComponent(ILSMDiskComponent c) throws HyracksDataException {
lsmIndex.markAsValid(c);
synchronized (opTracker) {
lsmIndex.addDiskComponent(c);
@@ -570,11 +564,7 @@
@Override
public void endReplication(ILSMIndexOperationContext ctx) throws HyracksDataException {
- try {
- exitComponents(ctx, LSMOperationType.REPLICATE, null, false);
- } catch (IndexException e) {
- throw new HyracksDataException(e);
- }
+ exitComponents(ctx, LSMOperationType.REPLICATE, null, false);
}
protected void validateOperationEnterComponentsState(ILSMIndexOperationContext ctx) throws HyracksDataException {
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMIndexSearchCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMIndexSearchCursor.java
index befdd85..4c3a577 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMIndexSearchCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMIndexSearchCursor.java
@@ -27,7 +27,6 @@
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
import org.apache.hyracks.storage.am.common.api.IIndexCursor;
import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMHarness;
@@ -62,10 +61,10 @@
return opCtx;
}
- public void initPriorityQueue() throws HyracksDataException, IndexException {
+ public void initPriorityQueue() throws HyracksDataException {
int pqInitSize = (rangeCursors.length > 0) ? rangeCursors.length : 1;
if (outputPriorityQueue == null) {
- outputPriorityQueue = new PriorityQueue<PriorityQueueElement>(pqInitSize, pqCmp);
+ outputPriorityQueue = new PriorityQueue<>(pqInitSize, pqCmp);
pqes = new PriorityQueueElement[pqInitSize];
for (int i = 0; i < pqInitSize; i++) {
pqes[i] = new PriorityQueueElement(i);
@@ -98,7 +97,7 @@
}
@Override
- public void reset() throws HyracksDataException, IndexException {
+ public void reset() throws HyracksDataException {
outputElement = null;
needPush = false;
@@ -121,7 +120,7 @@
}
@Override
- public boolean hasNext() throws HyracksDataException, IndexException {
+ public boolean hasNext() throws HyracksDataException {
checkPriorityQueue();
return !outputPriorityQueue.isEmpty();
}
@@ -170,7 +169,7 @@
return outputElement.getTuple();
}
- protected boolean pushIntoPriorityQueue(PriorityQueueElement e) throws HyracksDataException, IndexException {
+ protected boolean pushIntoPriorityQueue(PriorityQueueElement e) throws HyracksDataException {
int cursorIndex = e.getCursorIndex();
if (rangeCursors[cursorIndex].hasNext()) {
rangeCursors[cursorIndex].next();
@@ -182,11 +181,11 @@
return false;
}
- protected boolean isDeleted(PriorityQueueElement checkElement) throws HyracksDataException, IndexException {
+ protected boolean isDeleted(PriorityQueueElement checkElement) throws HyracksDataException {
return ((ILSMTreeTupleReference) checkElement.getTuple()).isAntimatter();
}
- protected void checkPriorityQueue() throws HyracksDataException, IndexException {
+ protected void checkPriorityQueue() throws HyracksDataException {
while (!outputPriorityQueue.isEmpty() || (needPush == true)) {
if (!outputPriorityQueue.isEmpty()) {
PriorityQueueElement checkElement = outputPriorityQueue.peek();
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMTreeIndexAccessor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMTreeIndexAccessor.java
index 1f93fc5..8293f4c 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMTreeIndexAccessor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMTreeIndexAccessor.java
@@ -26,7 +26,6 @@
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
import org.apache.hyracks.storage.am.common.api.IIndexCursor;
import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMHarness;
@@ -46,74 +45,74 @@
}
@Override
- public void insert(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public void insert(ITupleReference tuple) throws HyracksDataException {
ctx.setOperation(IndexOperation.INSERT);
lsmHarness.modify(ctx, false, tuple);
}
@Override
- public void update(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public void update(ITupleReference tuple) throws HyracksDataException {
// Update is the same as insert.
ctx.setOperation(IndexOperation.UPDATE);
lsmHarness.modify(ctx, false, tuple);
}
@Override
- public void delete(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public void delete(ITupleReference tuple) throws HyracksDataException {
ctx.setOperation(IndexOperation.DELETE);
lsmHarness.modify(ctx, false, tuple);
}
@Override
- public void upsert(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public void upsert(ITupleReference tuple) throws HyracksDataException {
ctx.setOperation(IndexOperation.UPSERT);
lsmHarness.modify(ctx, false, tuple);
}
@Override
- public boolean tryInsert(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public boolean tryInsert(ITupleReference tuple) throws HyracksDataException {
ctx.setOperation(IndexOperation.INSERT);
return lsmHarness.modify(ctx, true, tuple);
}
@Override
- public boolean tryDelete(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public boolean tryDelete(ITupleReference tuple) throws HyracksDataException {
ctx.setOperation(IndexOperation.DELETE);
return lsmHarness.modify(ctx, true, tuple);
}
@Override
- public boolean tryUpdate(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public boolean tryUpdate(ITupleReference tuple) throws HyracksDataException {
// Update is the same as insert.
ctx.setOperation(IndexOperation.UPDATE);
return lsmHarness.modify(ctx, true, tuple);
}
@Override
- public boolean tryUpsert(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public boolean tryUpsert(ITupleReference tuple) throws HyracksDataException {
ctx.setOperation(IndexOperation.UPSERT);
return lsmHarness.modify(ctx, true, tuple);
}
@Override
- public void search(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException, IndexException {
+ public void search(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException {
ctx.setOperation(IndexOperation.SEARCH);
lsmHarness.search(ctx, cursor, searchPred);
}
@Override
- public void flush(ILSMIOOperation operation) throws HyracksDataException, IndexException {
+ public void flush(ILSMIOOperation operation) throws HyracksDataException {
lsmHarness.flush(ctx, operation);
}
@Override
- public void merge(ILSMIOOperation operation) throws HyracksDataException, IndexException {
+ public void merge(ILSMIOOperation operation) throws HyracksDataException {
ctx.setOperation(IndexOperation.MERGE);
lsmHarness.merge(ctx, operation);
}
@Override
- public void physicalDelete(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public void physicalDelete(ITupleReference tuple) throws HyracksDataException {
ctx.setOperation(IndexOperation.PHYSICALDELETE);
lsmHarness.modify(ctx, false, tuple);
}
@@ -126,7 +125,7 @@
@Override
public void scheduleMerge(ILSMIOOperationCallback callback, List<ILSMDiskComponent> components)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
ctx.setOperation(IndexOperation.MERGE);
ctx.getComponentsToBeMerged().clear();
ctx.getComponentsToBeMerged().addAll(components);
@@ -143,40 +142,40 @@
}
@Override
- public void scheduleFullMerge(ILSMIOOperationCallback callback) throws HyracksDataException, IndexException {
+ public void scheduleFullMerge(ILSMIOOperationCallback callback) throws HyracksDataException {
ctx.setOperation(IndexOperation.FULL_MERGE);
lsmHarness.scheduleFullMerge(ctx, callback);
}
@Override
- public void forcePhysicalDelete(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public void forcePhysicalDelete(ITupleReference tuple) throws HyracksDataException {
ctx.setOperation(IndexOperation.PHYSICALDELETE);
lsmHarness.forceModify(ctx, tuple);
}
@Override
- public void forceInsert(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public void forceInsert(ITupleReference tuple) throws HyracksDataException {
ctx.setOperation(IndexOperation.INSERT);
lsmHarness.forceModify(ctx, tuple);
}
@Override
- public void forceUpsert(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public void forceUpsert(ITupleReference tuple) throws HyracksDataException {
ctx.setOperation(IndexOperation.UPSERT);
lsmHarness.forceModify(ctx, tuple);
}
@Override
- public void forceDelete(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public void forceDelete(ITupleReference tuple) throws HyracksDataException {
ctx.setOperation(IndexOperation.DELETE);
lsmHarness.forceModify(ctx, tuple);
}
@Override
public void updateMeta(IValueReference key, IValueReference value) throws HyracksDataException {
- // a hack because delete only gets the memory component
+ // a hack because delete only gets the memory component
ctx.setOperation(IndexOperation.DELETE);
- lsmHarness.updateMeta(ctx,key,value);
+ lsmHarness.updateMeta(ctx, key, value);
}
@Override
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/NoMergePolicy.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/NoMergePolicy.java
index 86be9c8..94587b8 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/NoMergePolicy.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/NoMergePolicy.java
@@ -21,15 +21,13 @@
import java.util.Map;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndex;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
public class NoMergePolicy implements ILSMMergePolicy {
@Override
- public void diskComponentAdded(final ILSMIndex index, boolean fullMergeIsRequested)
- throws HyracksDataException, IndexException {
+ public void diskComponentAdded(final ILSMIndex index, boolean fullMergeIsRequested) throws HyracksDataException {
// Do nothing
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/PrefixMergePolicy.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/PrefixMergePolicy.java
index 5f36339..23646b9 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/PrefixMergePolicy.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/PrefixMergePolicy.java
@@ -25,11 +25,10 @@
import java.util.Map;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent.ComponentState;
+import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndex;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexAccessor;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
@@ -40,8 +39,7 @@
private int maxToleranceComponentCount;
@Override
- public void diskComponentAdded(final ILSMIndex index, boolean fullMergeIsRequested)
- throws HyracksDataException, IndexException {
+ public void diskComponentAdded(final ILSMIndex index, boolean fullMergeIsRequested) throws HyracksDataException {
ArrayList<ILSMDiskComponent> immutableComponents = new ArrayList<>(index.getImmutableComponents());
@@ -66,7 +64,7 @@
}
@Override
- public boolean isMergeLagging(ILSMIndex index) throws HyracksDataException, IndexException {
+ public boolean isMergeLagging(ILSMIndex index) throws HyracksDataException {
/**
* [for flow-control purpose]
@@ -221,7 +219,7 @@
* @throws HyracksDataException
* @throws IndexException
*/
- private boolean scheduleMerge(final ILSMIndex index) throws HyracksDataException, IndexException {
+ private boolean scheduleMerge(final ILSMIndex index) throws HyracksDataException {
// 1. Look at the candidate components for merging in oldest-first order. If one exists, identify the prefix of the sequence of
// all such components for which the sum of their sizes exceeds MaxMrgCompSz. Schedule a merge of those components into a new component.
// 2. If a merge from 1 doesn't happen, see if the set of candidate components for merging exceeds MaxTolCompCnt. If so, schedule
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndex.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndex.java
index 449c8f9..3a368d4 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndex.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndex.java
@@ -25,13 +25,12 @@
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
import org.apache.hyracks.storage.am.common.api.IIndex;
import org.apache.hyracks.storage.am.common.api.IIndexOperationContext;
-import org.apache.hyracks.storage.am.common.api.IndexException;
public interface IInvertedIndex extends IIndex {
IInvertedListCursor createInvertedListCursor();
void openInvertedListCursor(IInvertedListCursor listCursor, ITupleReference searchKey, IIndexOperationContext ictx)
- throws HyracksDataException, IndexException;
+ throws HyracksDataException;
ITypeTraits[] getInvListTypeTraits();
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexAccessor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexAccessor.java
index 0d15986..f78eda9 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexAccessor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexAccessor.java
@@ -24,16 +24,14 @@
import org.apache.hyracks.storage.am.common.api.IIndexAccessor;
import org.apache.hyracks.storage.am.common.api.IIndexCursor;
import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
public interface IInvertedIndexAccessor extends IIndexAccessor {
public IInvertedListCursor createInvertedListCursor();
public void openInvertedListCursor(IInvertedListCursor listCursor, ITupleReference searchKey)
- throws HyracksDataException, IndexException;
+ throws HyracksDataException;
public IIndexCursor createRangeSearchCursor();
- public void rangeSearch(IIndexCursor cursor, ISearchPredicate searchPred) throws IndexException,
- HyracksDataException;
+ public void rangeSearch(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexSearcher.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexSearcher.java
index c4269272..17e8ad2 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexSearcher.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IInvertedIndexSearcher.java
@@ -26,13 +26,12 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
import org.apache.hyracks.storage.am.common.api.IIndexOperationContext;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.lsm.invertedindex.ondisk.OnDiskInvertedIndexSearchCursor;
import org.apache.hyracks.storage.am.lsm.invertedindex.search.InvertedIndexSearchPredicate;
public interface IInvertedIndexSearcher {
- public void search(OnDiskInvertedIndexSearchCursor resultCursor, InvertedIndexSearchPredicate searchPred, IIndexOperationContext ictx)
- throws HyracksDataException, IndexException;
+ public void search(OnDiskInvertedIndexSearchCursor resultCursor, InvertedIndexSearchPredicate searchPred,
+ IIndexOperationContext ictx) throws HyracksDataException;
public IFrameTupleAccessor createResultFrameTupleAccessor();
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IInvertedListCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IInvertedListCursor.java
index 9ec22d6..6ab6933 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IInvertedListCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IInvertedListCursor.java
@@ -22,17 +22,16 @@
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
public interface IInvertedListCursor extends Comparable<IInvertedListCursor> {
public void reset(int startPageId, int endPageId, int startOff, int numElements);
- public void pinPages() throws HyracksDataException, IndexException;
+ public void pinPages() throws HyracksDataException;
public void unpinPages() throws HyracksDataException;
- public boolean hasNext() throws HyracksDataException, IndexException;
+ public boolean hasNext() throws HyracksDataException;
public void next() throws HyracksDataException;
@@ -47,11 +46,11 @@
public int getStartOff();
- public boolean containsKey(ITupleReference searchTuple, MultiComparator invListCmp) throws HyracksDataException, IndexException;
+ public boolean containsKey(ITupleReference searchTuple, MultiComparator invListCmp) throws HyracksDataException;
// for debugging
@SuppressWarnings("rawtypes")
- public String printInvList(ISerializerDeserializer[] serdes) throws HyracksDataException, IndexException;
+ public String printInvList(ISerializerDeserializer[] serdes) throws HyracksDataException;
@SuppressWarnings("rawtypes")
public String printCurrentElement(ISerializerDeserializer[] serdes) throws HyracksDataException;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IPartitionedInvertedIndex.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IPartitionedInvertedIndex.java
index b7bada3..df8e6f0 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IPartitionedInvertedIndex.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/api/IPartitionedInvertedIndex.java
@@ -19,17 +19,16 @@
package org.apache.hyracks.storage.am.lsm.invertedindex.api;
-import java.util.ArrayList;
+import java.util.List;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.storage.am.common.api.IIndexOperationContext;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.lsm.invertedindex.search.InvertedListPartitions;
public interface IPartitionedInvertedIndex {
public boolean openInvertedListPartitionCursors(IInvertedIndexSearcher searcher, IIndexOperationContext ictx,
short numTokensLowerBound, short numTokensUpperBound, InvertedListPartitions invListPartitions,
- ArrayList<IInvertedListCursor> cursorsOrderedByTokens) throws HyracksDataException, IndexException;
+ List<IInvertedListCursor> cursorsOrderedByTokens) throws HyracksDataException;
public boolean isEmpty();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexDataflowHelper.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexDataflowHelper.java
index fe5e94e..95010cc 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexDataflowHelper.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexDataflowHelper.java
@@ -27,7 +27,6 @@
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.storage.am.common.api.IIndex;
import org.apache.hyracks.storage.am.common.api.IMetadataPageManagerFactory;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
import org.apache.hyracks.storage.am.common.util.IndexFileNameUtil;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackFactory;
@@ -78,25 +77,19 @@
@Override
public IIndex createIndexInstance() throws HyracksDataException {
IInvertedIndexOperatorDescriptor invIndexOpDesc = (IInvertedIndexOperatorDescriptor) opDesc;
- try {
- IBufferCache diskBufferCache = opDesc.getStorageManager().getBufferCache(ctx);
- IFileMapProvider diskFileMapProvider = opDesc.getStorageManager().getFileMapProvider(ctx);
- FileReference fileRef = IndexFileNameUtil.getIndexAbsoluteFileRef(invIndexOpDesc, ctx.getTaskAttemptId()
- .getTaskId().getPartition(), ctx.getIOManager());
- LSMInvertedIndex invIndex = InvertedIndexUtils.createLSMInvertedIndex(ctx.getIOManager(),
- virtualBufferCaches,
- diskFileMapProvider, invIndexOpDesc.getInvListsTypeTraits(),
- invIndexOpDesc.getInvListsComparatorFactories(), invIndexOpDesc.getTokenTypeTraits(),
- invIndexOpDesc.getTokenComparatorFactories(), invIndexOpDesc.getTokenizerFactory(),
- diskBufferCache, fileRef.getFile().getAbsolutePath(), bloomFilterFalsePositiveRate, mergePolicy,
- opTrackerFactory.getOperationTracker(ctx.getJobletContext().getServiceContext()), ioScheduler,
- ioOpCallbackFactory.createIoOpCallback(), invertedIndexFields, filterTypeTraits,
- filterCmpFactories, filterFields, filterFieldsForNonBulkLoadOps,
- invertedIndexFieldsForNonBulkLoadOps, durable, (IMetadataPageManagerFactory) opDesc
- .getPageManagerFactory());
- return invIndex;
- } catch (IndexException e) {
- throw new HyracksDataException(e);
- }
+ IBufferCache diskBufferCache = opDesc.getStorageManager().getBufferCache(ctx);
+ IFileMapProvider diskFileMapProvider = opDesc.getStorageManager().getFileMapProvider(ctx);
+ FileReference fileRef = IndexFileNameUtil.getIndexAbsoluteFileRef(invIndexOpDesc,
+ ctx.getTaskAttemptId().getTaskId().getPartition(), ctx.getIOManager());
+ LSMInvertedIndex invIndex = InvertedIndexUtils.createLSMInvertedIndex(ctx.getIOManager(), virtualBufferCaches,
+ diskFileMapProvider, invIndexOpDesc.getInvListsTypeTraits(),
+ invIndexOpDesc.getInvListsComparatorFactories(), invIndexOpDesc.getTokenTypeTraits(),
+ invIndexOpDesc.getTokenComparatorFactories(), invIndexOpDesc.getTokenizerFactory(), diskBufferCache,
+ fileRef.getFile().getAbsolutePath(), bloomFilterFalsePositiveRate, mergePolicy,
+ opTrackerFactory.getOperationTracker(ctx.getJobletContext().getServiceContext()), ioScheduler,
+ ioOpCallbackFactory.createIoOpCallback(), invertedIndexFields, filterTypeTraits, filterCmpFactories,
+ filterFields, filterFieldsForNonBulkLoadOps, invertedIndexFieldsForNonBulkLoadOps, durable,
+ (IMetadataPageManagerFactory) opDesc.getPageManagerFactory());
+ return invIndex;
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/dataflow/PartitionedLSMInvertedIndexDataflowHelper.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/dataflow/PartitionedLSMInvertedIndexDataflowHelper.java
index 34c5810..07ca516 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/dataflow/PartitionedLSMInvertedIndexDataflowHelper.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/dataflow/PartitionedLSMInvertedIndexDataflowHelper.java
@@ -26,7 +26,6 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.storage.am.common.api.IIndex;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
import org.apache.hyracks.storage.am.common.util.IndexFileNameUtil;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackFactory;
@@ -77,24 +76,19 @@
@Override
public IIndex createIndexInstance() throws HyracksDataException {
IInvertedIndexOperatorDescriptor invIndexOpDesc = (IInvertedIndexOperatorDescriptor) opDesc;
- try {
- IBufferCache diskBufferCache = opDesc.getStorageManager().getBufferCache(ctx);
- IFileMapProvider diskFileMapProvider = opDesc.getStorageManager().getFileMapProvider(ctx);
- FileReference fileRef = IndexFileNameUtil.getIndexAbsoluteFileRef(invIndexOpDesc, ctx.getTaskAttemptId()
- .getTaskId().getPartition(), ctx.getIOManager());
- PartitionedLSMInvertedIndex invIndex = InvertedIndexUtils.createPartitionedLSMInvertedIndex(ctx
- .getIOManager(),
- virtualBufferCaches, diskFileMapProvider, invIndexOpDesc.getInvListsTypeTraits(),
- invIndexOpDesc.getInvListsComparatorFactories(), invIndexOpDesc.getTokenTypeTraits(),
- invIndexOpDesc.getTokenComparatorFactories(), invIndexOpDesc.getTokenizerFactory(),
- diskBufferCache, fileRef.getFile().getAbsolutePath(), bloomFilterFalsePositiveRate, mergePolicy,
- opTrackerFactory.getOperationTracker(ctx.getJobletContext().getServiceContext()), ioScheduler,
- ioOpCallbackFactory.createIoOpCallback(), invertedIndexFields, filterTypeTraits,
- filterCmpFactories, filterFields, filterFieldsForNonBulkLoadOps,
- invertedIndexFieldsForNonBulkLoadOps, durable, opDesc.getPageManagerFactory());
- return invIndex;
- } catch (IndexException e) {
- throw new HyracksDataException(e);
- }
+ IBufferCache diskBufferCache = opDesc.getStorageManager().getBufferCache(ctx);
+ IFileMapProvider diskFileMapProvider = opDesc.getStorageManager().getFileMapProvider(ctx);
+ FileReference fileRef = IndexFileNameUtil.getIndexAbsoluteFileRef(invIndexOpDesc,
+ ctx.getTaskAttemptId().getTaskId().getPartition(), ctx.getIOManager());
+ PartitionedLSMInvertedIndex invIndex = InvertedIndexUtils.createPartitionedLSMInvertedIndex(ctx.getIOManager(),
+ virtualBufferCaches, diskFileMapProvider, invIndexOpDesc.getInvListsTypeTraits(),
+ invIndexOpDesc.getInvListsComparatorFactories(), invIndexOpDesc.getTokenTypeTraits(),
+ invIndexOpDesc.getTokenComparatorFactories(), invIndexOpDesc.getTokenizerFactory(), diskBufferCache,
+ fileRef.getFile().getAbsolutePath(), bloomFilterFalsePositiveRate, mergePolicy,
+ opTrackerFactory.getOperationTracker(ctx.getJobletContext().getServiceContext()), ioScheduler,
+ ioOpCallbackFactory.createIoOpCallback(), invertedIndexFields, filterTypeTraits, filterCmpFactories,
+ filterFields, filterFieldsForNonBulkLoadOps, invertedIndexFieldsForNonBulkLoadOps, durable,
+ opDesc.getPageManagerFactory());
+ return invIndex;
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/exceptions/InvertedIndexException.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/exceptions/InvertedIndexException.java
deleted file mode 100644
index 14187f6..0000000
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/exceptions/InvertedIndexException.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.hyracks.storage.am.lsm.invertedindex.exceptions;
-
-import org.apache.hyracks.storage.am.common.api.IndexException;
-
-public class InvertedIndexException extends IndexException {
- private static final long serialVersionUID = 1L;
-
- public InvertedIndexException(Exception e) {
- super(e);
- }
-
- public InvertedIndexException(String msg) {
- super(msg);
- }
-}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/exceptions/OccurrenceThresholdPanicException.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/exceptions/OccurrenceThresholdPanicException.java
deleted file mode 100644
index 86a0287..0000000
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/exceptions/OccurrenceThresholdPanicException.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.hyracks.storage.am.lsm.invertedindex.exceptions;
-
-
-
-public class OccurrenceThresholdPanicException extends InvertedIndexException {
- private static final long serialVersionUID = 1L;
-
- public OccurrenceThresholdPanicException(String msg) {
- super(msg);
- }
-}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndex.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndex.java
index fc0e4ec..cc2e7fb 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndex.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndex.java
@@ -22,9 +22,12 @@
import java.util.HashSet;
import java.util.List;
import java.util.Set;
+import java.util.logging.Level;
+import java.util.logging.Logger;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ITypeTraits;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.api.io.IIOManager;
@@ -46,8 +49,6 @@
import org.apache.hyracks.storage.am.common.api.IModificationOperationCallback;
import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
import org.apache.hyracks.storage.am.common.impls.AbstractSearchPredicate;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
@@ -89,6 +90,7 @@
import org.apache.hyracks.storage.common.file.IFileMapProvider;
public class LSMInvertedIndex extends AbstractLSMIndex implements IInvertedIndex {
+ private static final Logger LOGGER = Logger.getLogger(LSMInvertedIndex.class.getName());
protected final IBinaryTokenizerFactory tokenizerFactory;
@@ -109,14 +111,13 @@
OnDiskInvertedIndexFactory diskInvIndexFactory, BTreeFactory deletedKeysBTreeFactory,
BloomFilterFactory bloomFilterFactory, ILSMComponentFilterFactory filterFactory,
ILSMComponentFilterFrameFactory filterFrameFactory, LSMComponentFilterManager filterManager,
- double bloomFilterFalsePositiveRate, ILSMIndexFileManager fileManager,
- IFileMapProvider diskFileMapProvider, ITypeTraits[] invListTypeTraits,
- IBinaryComparatorFactory[] invListCmpFactories, ITypeTraits[] tokenTypeTraits,
- IBinaryComparatorFactory[] tokenCmpFactories, IBinaryTokenizerFactory tokenizerFactory,
- ILSMMergePolicy mergePolicy, ILSMOperationTracker opTracker, ILSMIOOperationScheduler ioScheduler,
- ILSMIOOperationCallback ioOpCallback, int[] invertedIndexFields, int[] filterFields,
- int[] filterFieldsForNonBulkLoadOps, int[] invertedIndexFieldsForNonBulkLoadOps, boolean durable)
- throws IndexException, HyracksDataException {
+ double bloomFilterFalsePositiveRate, ILSMIndexFileManager fileManager, IFileMapProvider diskFileMapProvider,
+ ITypeTraits[] invListTypeTraits, IBinaryComparatorFactory[] invListCmpFactories,
+ ITypeTraits[] tokenTypeTraits, IBinaryComparatorFactory[] tokenCmpFactories,
+ IBinaryTokenizerFactory tokenizerFactory, ILSMMergePolicy mergePolicy, ILSMOperationTracker opTracker,
+ ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallback ioOpCallback, int[] invertedIndexFields,
+ int[] filterFields, int[] filterFieldsForNonBulkLoadOps, int[] invertedIndexFieldsForNonBulkLoadOps,
+ boolean durable) throws HyracksDataException {
super(ioManager, virtualBufferCaches, diskInvIndexFactory.getBufferCache(), fileManager, diskFileMapProvider,
bloomFilterFalsePositiveRate, mergePolicy, opTracker, ioScheduler, ioOpCallback, filterFrameFactory,
filterManager, filterFields, durable);
@@ -165,27 +166,18 @@
if (isActivated) {
throw new HyracksDataException("Failed to activate the index since it is already activated.");
}
-
- try {
- List<ILSMDiskComponent> immutableComponents = diskComponents;
- immutableComponents.clear();
- List<LSMComponentFileReferences> validFileReferences = fileManager.cleanupAndGetValidFiles();
- for (LSMComponentFileReferences lsmComonentFileReference : validFileReferences) {
- LSMInvertedIndexDiskComponent component;
- try {
- component = createDiskInvIndexComponent(componentFactory,
- lsmComonentFileReference.getInsertIndexFileReference(),
- lsmComonentFileReference.getDeleteIndexFileReference(),
- lsmComonentFileReference.getBloomFilterFileReference(), false);
- } catch (IndexException e) {
- throw new HyracksDataException(e);
- }
- immutableComponents.add(component);
- }
- isActivated = true;
- } catch (IndexException e) {
- throw new HyracksDataException(e);
+ List<ILSMDiskComponent> immutableComponents = diskComponents;
+ immutableComponents.clear();
+ List<LSMComponentFileReferences> validFileReferences = fileManager.cleanupAndGetValidFiles();
+ for (LSMComponentFileReferences lsmComonentFileReference : validFileReferences) {
+ LSMInvertedIndexDiskComponent component;
+ component = createDiskInvIndexComponent(componentFactory,
+ lsmComonentFileReference.getInsertIndexFileReference(),
+ lsmComonentFileReference.getDeleteIndexFileReference(),
+ lsmComonentFileReference.getBloomFilterFileReference(), false);
+ immutableComponents.add(component);
}
+ isActivated = true;
}
@Override
@@ -215,8 +207,7 @@
}
if (flushOnExit) {
BlockingIOOperationCallbackWrapper cb = new BlockingIOOperationCallbackWrapper(ioOpCallback);
- ILSMIndexAccessor accessor =
- createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
+ ILSMIndexAccessor accessor = createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
accessor.scheduleFlush(cb);
try {
cb.waitForIO();
@@ -322,8 +313,7 @@
* - Insert key into deleted-keys BTree.
*/
@Override
- public void modify(IIndexOperationContext ictx, ITupleReference tuple)
- throws HyracksDataException, IndexException {
+ public void modify(IIndexOperationContext ictx, ITupleReference tuple) throws HyracksDataException {
LSMInvertedIndexOpContext ctx = (LSMInvertedIndexOpContext) ictx;
// TODO: This is a hack to support logging properly in ASTERIX.
// The proper undo operations are only dependent on the after image so
@@ -355,8 +345,12 @@
ctx.keysOnlyTuple.reset(indexTuple);
try {
ctx.currentDeletedKeysBTreeAccessors.insert(ctx.keysOnlyTuple);
- } catch (TreeIndexDuplicateKeyException e) {
- // Key has already been deleted.
+ } catch (HyracksDataException e) {
+ if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+ // Key has already been deleted.
+ LOGGER.log(Level.WARNING, "Failure during index delete operation", e);
+ throw e;
+ }
}
break;
}
@@ -373,7 +367,7 @@
@Override
public void search(ILSMIndexOperationContext ictx, IIndexCursor cursor, ISearchPredicate pred)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
List<ILSMComponent> operationalComponents = ictx.getComponentHolder();
int numComponents = operationalComponents.size();
boolean includeMutableComponent = false;
@@ -387,9 +381,8 @@
IIndexAccessor invIndexAccessor = ((LSMInvertedIndexMemoryComponent) component).getInvIndex()
.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
indexAccessors.add(invIndexAccessor);
- IIndexAccessor deletedKeysAccessor =
- ((LSMInvertedIndexMemoryComponent) component).getDeletedKeysBTree()
- .createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
+ IIndexAccessor deletedKeysAccessor = ((LSMInvertedIndexMemoryComponent) component).getDeletedKeysBTree()
+ .createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
deletedKeysBTreeAccessors.add(deletedKeysAccessor);
} else {
IIndexAccessor invIndexAccessor = ((LSMInvertedIndexDiskComponent) component).getInvIndex()
@@ -465,7 +458,7 @@
}
@Override
- public ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException, IndexException {
+ public ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException {
LSMInvertedIndexFlushOperation flushOp = (LSMInvertedIndexFlushOperation) operation;
// Create an inverted index instance to be bulk loaded.
@@ -553,7 +546,7 @@
@Override
public void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
LSMInvertedIndexOpContext ictx =
createOpContext(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
ictx.setOperation(IndexOperation.MERGE);
@@ -569,8 +562,7 @@
OnDiskInvertedIndex lastInvIndex = (OnDiskInvertedIndex) lastComponent.getInvIndex();
String lastFileName = lastInvIndex.getBTree().getFileReference().getFile().getName();
- LSMComponentFileReferences relMergeFileRefs =
- fileManager.getRelMergeFileReference(firstFileName, lastFileName);
+ LSMComponentFileReferences relMergeFileRefs = fileManager.getRelMergeFileReference(firstFileName, lastFileName);
ILSMIndexAccessor accessor = new LSMInvertedIndexAccessor(lsmHarness, ctx);
ioScheduler.scheduleOperation(new LSMInvertedIndexMergeOperation(accessor, mergingComponents, cursor,
relMergeFileRefs.getInsertIndexFileReference(), relMergeFileRefs.getDeleteIndexFileReference(),
@@ -578,7 +570,7 @@
}
@Override
- public ILSMDiskComponent merge(ILSMIOOperation operation) throws HyracksDataException, IndexException {
+ public ILSMDiskComponent merge(ILSMIOOperation operation) throws HyracksDataException {
LSMInvertedIndexMergeOperation mergeOp = (LSMInvertedIndexMergeOperation) operation;
IIndexCursor cursor = mergeOp.getCursor();
@@ -670,12 +662,8 @@
@Override
public IIndexBulkLoader createBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint,
- boolean checkIfEmptyIndex) throws IndexException {
- try {
- return new LSMInvertedIndexBulkLoader(fillFactor, verifyInput, numElementsHint, checkIfEmptyIndex);
- } catch (HyracksDataException e) {
- throw new IndexException(e);
- }
+ boolean checkIfEmptyIndex) throws HyracksDataException {
+ return new LSMInvertedIndexBulkLoader(fillFactor, verifyInput, numElementsHint, checkIfEmptyIndex);
}
public class LSMInvertedIndexBulkLoader implements IIndexBulkLoader {
@@ -689,17 +677,13 @@
public final MultiComparator filterCmp;
public LSMInvertedIndexBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint,
- boolean checkIfEmptyIndex) throws IndexException, HyracksDataException {
+ boolean checkIfEmptyIndex) throws HyracksDataException {
if (checkIfEmptyIndex && !isEmptyIndex()) {
- throw new IndexException("Cannot load an index that is not empty");
+ throw HyracksDataException.create(ErrorCode.LOAD_NON_EMPTY_INDEX);
}
// Note that by using a flush target file name, we state that the
// new bulk loaded tree is "newer" than any other merged tree.
- try {
- component = createBulkLoadTarget();
- } catch (HyracksDataException | IndexException e) {
- throw new IndexException(e);
- }
+ component = createBulkLoadTarget();
invIndexBulkLoader = ((LSMInvertedIndexDiskComponent) component).getInvIndex().createBulkLoader(fillFactor,
verifyInput, numElementsHint, false);
@@ -719,7 +703,7 @@
}
@Override
- public void add(ITupleReference tuple) throws IndexException, HyracksDataException {
+ public void add(ITupleReference tuple) throws HyracksDataException {
try {
ITupleReference t;
if (indexTuple != null) {
@@ -736,7 +720,7 @@
component.getLSMComponentFilter().update(filterTuple, filterCmp);
}
- } catch (IndexException | HyracksDataException | RuntimeException e) {
+ } catch (Exception e) {
cleanupArtifacts();
throw e;
}
@@ -758,7 +742,7 @@
}
@Override
- public void end() throws IndexException, HyracksDataException {
+ public void end() throws HyracksDataException {
if (!cleanedUpArtifacts) {
if (component.getLSMComponentFilter() != null) {
filterManager.writeFilter(component.getLSMComponentFilter(),
@@ -788,7 +772,7 @@
}
}
- private ILSMDiskComponent createBulkLoadTarget() throws HyracksDataException, IndexException {
+ private ILSMDiskComponent createBulkLoadTarget() throws HyracksDataException {
LSMComponentFileReferences componentFileRefs = fileManager.getRelFlushFileReference();
return createDiskInvIndexComponent(componentFactory, componentFileRefs.getInsertIndexFileReference(),
componentFileRefs.getDeleteIndexFileReference(), componentFileRefs.getBloomFilterFileReference(),
@@ -797,7 +781,7 @@
}
protected InMemoryInvertedIndex createInMemoryInvertedIndex(IVirtualBufferCache virtualBufferCache,
- VirtualFreePageManager virtualFreePageManager, int id) throws IndexException, HyracksDataException {
+ VirtualFreePageManager virtualFreePageManager, int id) throws HyracksDataException {
return InvertedIndexUtils.createInMemoryBTreeInvertedindex(virtualBufferCache, virtualFreePageManager,
invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory,
ioManager.resolveAbsolutePath(fileManager.getBaseDir() + "_virtual_vocab_" + id));
@@ -805,7 +789,7 @@
protected LSMInvertedIndexDiskComponent createDiskInvIndexComponent(ILSMDiskComponentFactory factory,
FileReference dictBTreeFileRef, FileReference btreeFileRef, FileReference bloomFilterFileRef,
- boolean create) throws HyracksDataException, IndexException {
+ boolean create) throws HyracksDataException {
LSMInvertedIndexDiskComponent component = (LSMInvertedIndexDiskComponent) factory
.createComponent(new LSMComponentFileReferences(dictBTreeFileRef, btreeFileRef, bloomFilterFileRef));
if (create) {
@@ -843,7 +827,7 @@
@Override
public void openInvertedListCursor(IInvertedListCursor listCursor, ITupleReference searchKey,
- IIndexOperationContext ictx) throws HyracksDataException, IndexException {
+ IIndexOperationContext ictx) throws HyracksDataException {
throw new UnsupportedOperationException("Cannot open inverted list cursor on lsm inverted index.");
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexAccessor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexAccessor.java
index 604a57c..ec869fa 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexAccessor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexAccessor.java
@@ -25,7 +25,6 @@
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
import org.apache.hyracks.storage.am.common.api.IIndexCursor;
import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMHarness;
@@ -48,31 +47,31 @@
}
@Override
- public void insert(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public void insert(ITupleReference tuple) throws HyracksDataException {
ctx.setOperation(IndexOperation.INSERT);
lsmHarness.modify(ctx, false, tuple);
}
@Override
- public void delete(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public void delete(ITupleReference tuple) throws HyracksDataException {
ctx.setOperation(IndexOperation.DELETE);
lsmHarness.modify(ctx, false, tuple);
}
@Override
- public boolean tryInsert(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public boolean tryInsert(ITupleReference tuple) throws HyracksDataException {
ctx.setOperation(IndexOperation.INSERT);
return lsmHarness.modify(ctx, true, tuple);
}
@Override
- public boolean tryDelete(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public boolean tryDelete(ITupleReference tuple) throws HyracksDataException {
ctx.setOperation(IndexOperation.DELETE);
return lsmHarness.modify(ctx, true, tuple);
}
@Override
- public void search(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException, IndexException {
+ public void search(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException {
ctx.setOperation(IndexOperation.SEARCH);
lsmHarness.search(ctx, cursor, searchPred);
}
@@ -89,13 +88,13 @@
}
@Override
- public void flush(ILSMIOOperation operation) throws HyracksDataException, IndexException {
+ public void flush(ILSMIOOperation operation) throws HyracksDataException {
lsmHarness.flush(ctx, operation);
}
@Override
public void scheduleMerge(ILSMIOOperationCallback callback, List<ILSMDiskComponent> components)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
ctx.setOperation(IndexOperation.MERGE);
ctx.getComponentsToBeMerged().clear();
ctx.getComponentsToBeMerged().addAll(components);
@@ -112,13 +111,13 @@
}
@Override
- public void scheduleFullMerge(ILSMIOOperationCallback callback) throws HyracksDataException, IndexException {
+ public void scheduleFullMerge(ILSMIOOperationCallback callback) throws HyracksDataException {
ctx.setOperation(IndexOperation.FULL_MERGE);
lsmHarness.scheduleFullMerge(ctx, callback);
}
@Override
- public void merge(ILSMIOOperation operation) throws HyracksDataException, IndexException {
+ public void merge(ILSMIOOperation operation) throws HyracksDataException {
lsmHarness.merge(ctx, operation);
}
@@ -128,50 +127,49 @@
}
@Override
- public void rangeSearch(IIndexCursor cursor, ISearchPredicate searchPred)
- throws IndexException, HyracksDataException {
+ public void rangeSearch(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException {
search(cursor, searchPred);
}
@Override
- public void forcePhysicalDelete(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public void forcePhysicalDelete(ITupleReference tuple) throws HyracksDataException {
throw new UnsupportedOperationException("Physical delete not supported by lsm inverted index.");
}
@Override
- public void forceInsert(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public void forceInsert(ITupleReference tuple) throws HyracksDataException {
ctx.setOperation(IndexOperation.INSERT);
lsmHarness.forceModify(ctx, tuple);
}
@Override
- public void forceDelete(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public void forceDelete(ITupleReference tuple) throws HyracksDataException {
ctx.setOperation(IndexOperation.DELETE);
lsmHarness.forceModify(ctx, tuple);
}
@Override
- public void physicalDelete(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public void physicalDelete(ITupleReference tuple) throws HyracksDataException {
throw new UnsupportedOperationException("Physical delete not supported by lsm inverted index.");
}
@Override
- public void update(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public void update(ITupleReference tuple) throws HyracksDataException {
throw new UnsupportedOperationException("Update not supported by lsm inverted index.");
}
@Override
- public void upsert(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public void upsert(ITupleReference tuple) throws HyracksDataException {
throw new UnsupportedOperationException("Upsert not supported by lsm inverted index.");
}
@Override
- public boolean tryUpdate(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public boolean tryUpdate(ITupleReference tuple) throws HyracksDataException {
throw new UnsupportedOperationException("Update not supported by lsm inverted index.");
}
@Override
- public boolean tryUpsert(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public boolean tryUpsert(ITupleReference tuple) throws HyracksDataException {
throw new UnsupportedOperationException("Upsert not supported by lsm inverted index.");
}
@@ -182,7 +180,7 @@
@Override
public void openInvertedListCursor(IInvertedListCursor listCursor, ITupleReference searchKey)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
throw new UnsupportedOperationException("Cannot open inverted list cursor on lsm inverted index.");
}
@@ -201,7 +199,7 @@
}
@Override
- public void forceUpsert(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public void forceUpsert(ITupleReference tuple) throws HyracksDataException {
throw new UnsupportedOperationException("Upsert not supported by lsm inverted index.");
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexDeletedKeysBTreeMergeCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexDeletedKeysBTreeMergeCursor.java
index 4ebabdb..6a875a0 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexDeletedKeysBTreeMergeCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexDeletedKeysBTreeMergeCursor.java
@@ -27,7 +27,6 @@
import org.apache.hyracks.storage.am.common.api.IIndexAccessor;
import org.apache.hyracks.storage.am.common.api.IIndexCursor;
import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
import org.apache.hyracks.storage.am.lsm.common.impls.LSMIndexSearchCursor;
@@ -39,14 +38,14 @@
}
@Override
- protected boolean isDeleted(PriorityQueueElement checkElement) throws HyracksDataException, IndexException {
+ protected boolean isDeleted(PriorityQueueElement checkElement) throws HyracksDataException {
return false;
}
@Override
- public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException,
- IndexException {
- LSMInvertedIndexRangeSearchCursorInitialState lsmInitialState = (LSMInvertedIndexRangeSearchCursorInitialState) initialState;
+ public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
+ LSMInvertedIndexRangeSearchCursorInitialState lsmInitialState =
+ (LSMInvertedIndexRangeSearchCursorInitialState) initialState;
cmp = lsmInitialState.getOriginalKeyComparator();
operationalComponents = lsmInitialState.getOperationalComponents();
// We intentionally set the lsmHarness to null so that we don't call lsmHarness.endSearch() because we already do that when we merge the inverted indexes.
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexFileManager.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexFileManager.java
index 97255f6..aeb1e16 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexFileManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexFileManager.java
@@ -30,7 +30,6 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.api.io.IIOManager;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.lsm.common.impls.AbstractLSMIndexFileManager;
import org.apache.hyracks.storage.am.lsm.common.impls.BTreeFactory;
import org.apache.hyracks.storage.am.lsm.common.impls.LSMComponentFileReferences;
@@ -79,8 +78,8 @@
String baseName = baseDir + ts + SPLIT_STRING + ts;
// Begin timestamp and end timestamp are identical since it is a flush
return new LSMComponentFileReferences(createFlushFile(baseName + SPLIT_STRING + DICT_BTREE_SUFFIX),
- createFlushFile(baseName + SPLIT_STRING + DELETED_KEYS_BTREE_SUFFIX), createFlushFile(baseName
- + SPLIT_STRING + BLOOM_FILTER_STRING));
+ createFlushFile(baseName + SPLIT_STRING + DELETED_KEYS_BTREE_SUFFIX),
+ createFlushFile(baseName + SPLIT_STRING + BLOOM_FILTER_STRING));
}
@Override
@@ -92,12 +91,12 @@
String baseName = baseDir + firstTimestampRange[0] + SPLIT_STRING + lastTimestampRange[1];
// Get the range of timestamps by taking the earliest and the latest timestamps
return new LSMComponentFileReferences(createMergeFile(baseName + SPLIT_STRING + DICT_BTREE_SUFFIX),
- createMergeFile(baseName + SPLIT_STRING + DELETED_KEYS_BTREE_SUFFIX), createMergeFile(baseName
- + SPLIT_STRING + BLOOM_FILTER_STRING));
+ createMergeFile(baseName + SPLIT_STRING + DELETED_KEYS_BTREE_SUFFIX),
+ createMergeFile(baseName + SPLIT_STRING + BLOOM_FILTER_STRING));
}
@Override
- public List<LSMComponentFileReferences> cleanupAndGetValidFiles() throws HyracksDataException, IndexException {
+ public List<LSMComponentFileReferences> cleanupAndGetValidFiles() throws HyracksDataException {
List<LSMComponentFileReferences> validFiles = new ArrayList<>();
ArrayList<ComparableFileName> allDictBTreeFiles = new ArrayList<>();
ArrayList<ComparableFileName> allInvListsFiles = new ArrayList<>();
@@ -133,8 +132,8 @@
if (allDictBTreeFiles.size() == 1 && allInvListsFiles.size() == 1 && allDeletedKeysBTreeFiles.size() == 1
&& allBloomFilterFiles.size() == 1) {
- validFiles.add(new LSMComponentFileReferences(allDictBTreeFiles.get(0).fileRef, allDeletedKeysBTreeFiles
- .get(0).fileRef, allBloomFilterFiles.get(0).fileRef));
+ validFiles.add(new LSMComponentFileReferences(allDictBTreeFiles.get(0).fileRef,
+ allDeletedKeysBTreeFiles.get(0).fileRef, allBloomFilterFiles.get(0).fileRef));
return validFiles;
}
@@ -184,7 +183,8 @@
invalidBloomFilterFile.delete();
} else {
// This scenario should not be possible.
- throw new HyracksDataException("Found LSM files with overlapping but not contained timetamp intervals.");
+ throw new HyracksDataException(
+ "Found LSM files with overlapping but not contained timetamp intervals.");
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexFlushOperation.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexFlushOperation.java
index 03ba304..26eed04 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexFlushOperation.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexFlushOperation.java
@@ -26,7 +26,6 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.api.io.IODeviceHandle;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
@@ -69,7 +68,7 @@
}
@Override
- public Boolean call() throws HyracksDataException, IndexException {
+ public Boolean call() throws HyracksDataException {
accessor.flush(this);
return true;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexMergeOperation.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexMergeOperation.java
index c22af32..ffc1e8e 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexMergeOperation.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexMergeOperation.java
@@ -27,7 +27,6 @@
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.api.io.IODeviceHandle;
import org.apache.hyracks.storage.am.common.api.IIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
@@ -80,7 +79,7 @@
}
@Override
- public Boolean call() throws HyracksDataException, IndexException {
+ public Boolean call() throws HyracksDataException {
accessor.merge(this);
return true;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexRangeSearchCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexRangeSearchCursor.java
index 358317c..0d37056 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexRangeSearchCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexRangeSearchCursor.java
@@ -28,7 +28,6 @@
import org.apache.hyracks.storage.am.common.api.IIndexAccessor;
import org.apache.hyracks.storage.am.common.api.IIndexCursor;
import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
import org.apache.hyracks.storage.am.common.tuples.PermutingTupleReference;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
@@ -56,9 +55,9 @@
}
@Override
- public void open(ICursorInitialState initState, ISearchPredicate searchPred) throws IndexException,
- HyracksDataException {
- LSMInvertedIndexRangeSearchCursorInitialState lsmInitState = (LSMInvertedIndexRangeSearchCursorInitialState) initState;
+ public void open(ICursorInitialState initState, ISearchPredicate searchPred) throws HyracksDataException {
+ LSMInvertedIndexRangeSearchCursorInitialState lsmInitState =
+ (LSMInvertedIndexRangeSearchCursorInitialState) initState;
cmp = lsmInitState.getOriginalKeyComparator();
int numComponents = lsmInitState.getNumComponents();
rangeCursors = new IIndexCursor[numComponents];
@@ -100,7 +99,7 @@
* Check deleted-keys BTrees whether they contain the key in the checkElement's tuple.
*/
@Override
- protected boolean isDeleted(PriorityQueueElement checkElement) throws HyracksDataException, IndexException {
+ protected boolean isDeleted(PriorityQueueElement checkElement) throws HyracksDataException {
keysOnlyTuple.reset(checkElement.getTuple());
int end = checkElement.getCursorIndex();
for (int i = 0; i < end; i++) {
@@ -110,8 +109,6 @@
if (deletedKeysBTreeCursors[i].hasNext()) {
return true;
}
- } catch (IndexException e) {
- throw new HyracksDataException(e);
} finally {
deletedKeysBTreeCursors[i].close();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexSearchCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexSearchCursor.java
index 521d81d..607f957 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexSearchCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexSearchCursor.java
@@ -29,14 +29,12 @@
import org.apache.hyracks.storage.am.common.api.IIndexCursor;
import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent.LSMComponentType;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMHarness;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
import org.apache.hyracks.storage.am.lsm.common.impls.BloomFilterAwareBTreePointSearchCursor;
-import org.apache.hyracks.storage.am.lsm.invertedindex.exceptions.OccurrenceThresholdPanicException;
/**
* Searches the components one-by-one, completely consuming a cursor before moving on to the next one.
@@ -82,8 +80,8 @@
// No need for a bloom filter for the in-memory BTree.
deletedKeysBTreeCursors[i] = deletedKeysBTreeAccessors.get(i).createSearchCursor(false);
} else {
- deletedKeysBTreeCursors[i] = new BloomFilterAwareBTreePointSearchCursor((IBTreeLeafFrame) lsmInitState
- .getgetDeletedKeysBTreeLeafFrameFactory().createFrame(), false,
+ deletedKeysBTreeCursors[i] = new BloomFilterAwareBTreePointSearchCursor(
+ (IBTreeLeafFrame) lsmInitState.getgetDeletedKeysBTreeLeafFrameFactory().createFrame(), false,
((LSMInvertedIndexDiskComponent) operationalComponents.get(i)).getBloomFilter());
}
}
@@ -92,7 +90,7 @@
keySearchPred = new RangePredicate(null, null, true, true, keyCmp, keyCmp);
}
- protected boolean isDeleted(ITupleReference key) throws HyracksDataException, IndexException {
+ protected boolean isDeleted(ITupleReference key) throws HyracksDataException {
keySearchPred.setLowKey(key, true);
keySearchPred.setHighKey(key, true);
for (int i = 0; i < accessorIndex; i++) {
@@ -102,8 +100,6 @@
if (deletedKeysBTreeCursors[i].hasNext()) {
return true;
}
- } catch (IndexException e) {
- throw new HyracksDataException(e);
} finally {
deletedKeysBTreeCursors[i].close();
}
@@ -112,7 +108,7 @@
}
// Move to the next tuple that has not been deleted.
- private boolean nextValidTuple() throws HyracksDataException, IndexException {
+ private boolean nextValidTuple() throws HyracksDataException {
while (currentCursor.hasNext()) {
currentCursor.next();
if (!isDeleted(currentCursor.getTuple())) {
@@ -124,7 +120,7 @@
}
@Override
- public boolean hasNext() throws HyracksDataException, IndexException {
+ public boolean hasNext() throws HyracksDataException {
if (!tupleConsumed) {
return true;
}
@@ -139,13 +135,7 @@
// Current cursor has been exhausted, switch to next accessor/cursor.
currentAccessor = indexAccessors.get(accessorIndex);
currentCursor = currentAccessor.createSearchCursor(false);
- try {
- currentAccessor.search(currentCursor, searchPred);
- } catch (OccurrenceThresholdPanicException e) {
- throw e;
- } catch (IndexException e) {
- throw new HyracksDataException(e);
- }
+ currentAccessor.search(currentCursor, searchPred);
if (nextValidTuple()) {
return true;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/PartitionedLSMInvertedIndex.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/PartitionedLSMInvertedIndex.java
index 0cd19c9..8cd45dc 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/PartitionedLSMInvertedIndex.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/PartitionedLSMInvertedIndex.java
@@ -26,7 +26,6 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.IIOManager;
import org.apache.hyracks.storage.am.bloomfilter.impls.BloomFilterFactory;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponentFilterFactory;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponentFilterFrameFactory;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
@@ -50,25 +49,23 @@
OnDiskInvertedIndexFactory diskInvIndexFactory, BTreeFactory deletedKeysBTreeFactory,
BloomFilterFactory bloomFilterFactory, ILSMComponentFilterFactory filterFactory,
ILSMComponentFilterFrameFactory filterFrameFactory, LSMComponentFilterManager filterManager,
- double bloomFilterFalsePositiveRate, ILSMIndexFileManager fileManager,
- IFileMapProvider diskFileMapProvider, ITypeTraits[] invListTypeTraits,
- IBinaryComparatorFactory[] invListCmpFactories, ITypeTraits[] tokenTypeTraits,
- IBinaryComparatorFactory[] tokenCmpFactories, IBinaryTokenizerFactory tokenizerFactory,
- ILSMMergePolicy mergePolicy, ILSMOperationTracker opTracker, ILSMIOOperationScheduler ioScheduler,
- ILSMIOOperationCallback ioOpCallback, int[] invertedIndexFields, int[] filterFields,
- int[] filterFieldsForNonBulkLoadOps, int[] invertedIndexFieldsForNonBulkLoadOps, boolean durable)
- throws IndexException, HyracksDataException {
+ double bloomFilterFalsePositiveRate, ILSMIndexFileManager fileManager, IFileMapProvider diskFileMapProvider,
+ ITypeTraits[] invListTypeTraits, IBinaryComparatorFactory[] invListCmpFactories,
+ ITypeTraits[] tokenTypeTraits, IBinaryComparatorFactory[] tokenCmpFactories,
+ IBinaryTokenizerFactory tokenizerFactory, ILSMMergePolicy mergePolicy, ILSMOperationTracker opTracker,
+ ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallback ioOpCallback, int[] invertedIndexFields,
+ int[] filterFields, int[] filterFieldsForNonBulkLoadOps, int[] invertedIndexFieldsForNonBulkLoadOps,
+ boolean durable) throws HyracksDataException {
super(ioManager, virtualBufferCaches, diskInvIndexFactory, deletedKeysBTreeFactory, bloomFilterFactory,
- filterFactory,
- filterFrameFactory, filterManager, bloomFilterFalsePositiveRate, fileManager, diskFileMapProvider,
- invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory,
- mergePolicy, opTracker, ioScheduler, ioOpCallback, invertedIndexFields, filterFields,
+ filterFactory, filterFrameFactory, filterManager, bloomFilterFalsePositiveRate, fileManager,
+ diskFileMapProvider, invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories,
+ tokenizerFactory, mergePolicy, opTracker, ioScheduler, ioOpCallback, invertedIndexFields, filterFields,
filterFieldsForNonBulkLoadOps, invertedIndexFieldsForNonBulkLoadOps, durable);
}
@Override
protected InMemoryInvertedIndex createInMemoryInvertedIndex(IVirtualBufferCache virtualBufferCache,
- VirtualFreePageManager virtualFreePageManager, int id) throws IndexException, HyracksDataException {
+ VirtualFreePageManager virtualFreePageManager, int id) throws HyracksDataException {
return InvertedIndexUtils.createPartitionedInMemoryBTreeInvertedindex(virtualBufferCache,
virtualFreePageManager, invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories,
tokenizerFactory, ioManager.resolveAbsolutePath(fileManager.getBaseDir() + "_virtual_vocab_" + id));
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndex.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndex.java
index 583c5f4..1b33e79 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndex.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndex.java
@@ -20,6 +20,7 @@
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ITypeTraits;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -33,9 +34,6 @@
import org.apache.hyracks.storage.am.common.api.IModificationOperationCallback;
import org.apache.hyracks.storage.am.common.api.IPageManager;
import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexNonExistentKeyException;
import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
import org.apache.hyracks.storage.am.lsm.common.api.IVirtualBufferCache;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndex;
@@ -112,7 +110,7 @@
}
public void insert(ITupleReference tuple, BTreeAccessor btreeAccessor, IIndexOperationContext ictx)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
InMemoryInvertedIndexOpContext ctx = (InMemoryInvertedIndexOpContext) ictx;
ctx.tupleIter.reset(tuple);
while (ctx.tupleIter.hasNext()) {
@@ -120,16 +118,19 @@
ITupleReference insertTuple = ctx.tupleIter.getTuple();
try {
btreeAccessor.insert(insertTuple);
- } catch (TreeIndexDuplicateKeyException e) {
- // This exception may be caused by duplicate tokens in the same insert "document".
- // We ignore such duplicate tokens in all inverted-index implementations, hence
- // we can safely ignore this exception.
+ } catch (HyracksDataException e) {
+ if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+ // This exception may be caused by duplicate tokens in the same insert "document".
+ // We ignore such duplicate tokens in all inverted-index implementations, hence
+ // we can safely ignore this exception.
+ throw e;
+ }
}
}
}
public void delete(ITupleReference tuple, BTreeAccessor btreeAccessor, IIndexOperationContext ictx)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
InMemoryInvertedIndexOpContext ctx = (InMemoryInvertedIndexOpContext) ictx;
ctx.tupleIter.reset(tuple);
while (ctx.tupleIter.hasNext()) {
@@ -137,8 +138,11 @@
ITupleReference deleteTuple = ctx.tupleIter.getTuple();
try {
btreeAccessor.delete(deleteTuple);
- } catch (TreeIndexNonExistentKeyException e) {
- // Ignore this exception, since a document may have duplicate tokens.
+ } catch (HyracksDataException e) {
+ if (e.getErrorCode() != ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+ // Ignore this exception, since a document may have duplicate tokens.
+ throw e;
+ }
}
}
}
@@ -156,7 +160,7 @@
@Override
public void openInvertedListCursor(IInvertedListCursor listCursor, ITupleReference searchKey,
- IIndexOperationContext ictx) throws HyracksDataException, IndexException {
+ IIndexOperationContext ictx) throws HyracksDataException {
InMemoryInvertedIndexOpContext ctx = (InMemoryInvertedIndexOpContext) ictx;
ctx.setOperation(IndexOperation.SEARCH);
InMemoryInvertedListCursor inMemListCursor = (InMemoryInvertedListCursor) listCursor;
@@ -192,7 +196,7 @@
@Override
public IIndexBulkLoader createBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint,
- boolean checkIfEmptyIndex) throws IndexException {
+ boolean checkIfEmptyIndex) throws HyracksDataException {
throw new UnsupportedOperationException("Bulk load not supported by in-memory inverted index.");
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndexAccessor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndexAccessor.java
index ffe5259..26fa40a 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndexAccessor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedIndexAccessor.java
@@ -28,7 +28,6 @@
import org.apache.hyracks.storage.am.common.api.IIndexCursor;
import org.apache.hyracks.storage.am.common.api.IIndexOperationContext;
import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexAccessor;
@@ -57,13 +56,13 @@
}
@Override
- public void insert(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public void insert(ITupleReference tuple) throws HyracksDataException {
opCtx.setOperation(IndexOperation.INSERT);
index.insert(tuple, btreeAccessor, opCtx);
}
@Override
- public void delete(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public void delete(ITupleReference tuple) throws HyracksDataException {
opCtx.setOperation(IndexOperation.DELETE);
index.delete(tuple, btreeAccessor, opCtx);
}
@@ -74,7 +73,7 @@
}
@Override
- public void search(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException, IndexException {
+ public void search(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException {
searcher.search((OnDiskInvertedIndexSearchCursor) cursor, (InvertedIndexSearchPredicate) searchPred, opCtx);
}
@@ -85,7 +84,7 @@
@Override
public void openInvertedListCursor(IInvertedListCursor listCursor, ITupleReference searchKey)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
index.openInvertedListCursor(listCursor, searchKey, opCtx);
}
@@ -96,8 +95,7 @@
}
@Override
- public void rangeSearch(IIndexCursor cursor, ISearchPredicate searchPred) throws IndexException,
- HyracksDataException {
+ public void rangeSearch(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException {
btreeAccessor.search(cursor, searchPred);
}
@@ -106,12 +104,12 @@
}
@Override
- public void update(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public void update(ITupleReference tuple) throws HyracksDataException {
throw new UnsupportedOperationException("Update not supported by in-memory inverted index.");
}
@Override
- public void upsert(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public void upsert(ITupleReference tuple) throws HyracksDataException {
throw new UnsupportedOperationException("Upsert not supported by in-memory inverted index.");
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedListCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedListCursor.java
index 9299620..60f8e21 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedListCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/InMemoryInvertedListCursor.java
@@ -28,13 +28,10 @@
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleReference;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
-import org.apache.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
import org.apache.hyracks.dataflow.common.utils.TupleUtils;
import org.apache.hyracks.storage.am.btree.impls.BTree.BTreeAccessor;
import org.apache.hyracks.storage.am.btree.impls.RangePredicate;
import org.apache.hyracks.storage.am.common.api.IIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
import org.apache.hyracks.storage.am.common.tuples.ConcatenatingTupleReference;
import org.apache.hyracks.storage.am.common.tuples.PermutingTupleReference;
@@ -68,7 +65,7 @@
}
public void prepare(BTreeAccessor btreeAccessor, RangePredicate btreePred, MultiComparator tokenFieldsCmp,
- MultiComparator btreeCmp) throws HyracksDataException, IndexException {
+ MultiComparator btreeCmp) throws HyracksDataException {
// Avoid object creation if this.btreeAccessor == btreeAccessor.
if (this.btreeAccessor != btreeAccessor) {
this.btreeAccessor = btreeAccessor;
@@ -87,7 +84,7 @@
return size() - cursor.size();
}
- public void reset(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public void reset(ITupleReference tuple) throws HyracksDataException {
numElements = -1;
// Copy the tokens tuple for later use in btree probes.
TupleUtils.copyTuple(tokenTupleBuilder, tuple, tuple.getFieldCount());
@@ -104,7 +101,7 @@
}
@Override
- public void pinPages() throws HyracksDataException, IndexException {
+ public void pinPages() throws HyracksDataException {
btreePred.setLowKeyComparator(tokenFieldsCmp);
btreePred.setHighKeyComparator(tokenFieldsCmp);
btreePred.setLowKey(tokenTuple, true);
@@ -122,7 +119,7 @@
}
@Override
- public boolean hasNext() throws HyracksDataException, IndexException {
+ public boolean hasNext() throws HyracksDataException {
return btreeCursor.hasNext();
}
@@ -155,8 +152,6 @@
}
} catch (HyracksDataException e) {
e.printStackTrace();
- } catch (IndexException e) {
- e.printStackTrace();
} finally {
try {
countingCursor.close();
@@ -184,8 +179,7 @@
}
@Override
- public boolean containsKey(ITupleReference searchTuple, MultiComparator invListCmp) throws HyracksDataException,
- IndexException {
+ public boolean containsKey(ITupleReference searchTuple, MultiComparator invListCmp) throws HyracksDataException {
// Close cursor if necessary.
unpinPages();
btreeSearchTuple.addTuple(searchTuple);
@@ -195,9 +189,9 @@
btreePred.setHighKey(btreeSearchTuple, true);
try {
btreeAccessor.search(btreeCursor, btreePred);
- } catch (TreeIndexException e) {
+ } catch (Exception e) {
btreeSearchTuple.removeLastTuple();
- throw new HyracksDataException(e);
+ throw HyracksDataException.create(e);
}
boolean containsKey = false;
try {
@@ -212,7 +206,7 @@
@SuppressWarnings("rawtypes")
@Override
- public String printInvList(ISerializerDeserializer[] serdes) throws HyracksDataException, IndexException {
+ public String printInvList(ISerializerDeserializer[] serdes) throws HyracksDataException {
StringBuilder strBuilder = new StringBuilder();
try {
while (btreeCursor.hasNext()) {
@@ -228,11 +222,7 @@
btreeCursor.close();
btreeCursor.reset();
}
- try {
- btreeAccessor.search(btreeCursor, btreePred);
- } catch (TreeIndexException e) {
- throw new HyracksDataException(e);
- }
+ btreeAccessor.search(btreeCursor, btreePred);
return strBuilder.toString();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndex.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndex.java
index 0bea41f..24d1cdd 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndex.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndex.java
@@ -18,7 +18,7 @@
*/
package org.apache.hyracks.storage.am.lsm.invertedindex.inmemory;
-import java.util.ArrayList;
+import java.util.List;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
@@ -32,7 +32,6 @@
import org.apache.hyracks.storage.am.common.api.IModificationOperationCallback;
import org.apache.hyracks.storage.am.common.api.IPageManager;
import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearcher;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
@@ -59,7 +58,7 @@
@Override
public void insert(ITupleReference tuple, BTreeAccessor btreeAccessor, IIndexOperationContext ictx)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
super.insert(tuple, btreeAccessor, ictx);
PartitionedInMemoryInvertedIndexOpContext ctx = (PartitionedInMemoryInvertedIndexOpContext) ictx;
PartitionedInvertedIndexTokenizingTupleIterator tupleIter =
@@ -98,7 +97,7 @@
@Override
public boolean openInvertedListPartitionCursors(IInvertedIndexSearcher searcher, IIndexOperationContext ictx,
short numTokensLowerBound, short numTokensUpperBound, InvertedListPartitions invListPartitions,
- ArrayList<IInvertedListCursor> cursorsOrderedByTokens) throws HyracksDataException, IndexException {
+ List<IInvertedListCursor> cursorsOrderedByTokens) throws HyracksDataException {
short minPartitionIndex;
short maxPartitionIndex;
partitionIndexLock.readLock().lock();
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndex.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndex.java
index 70a5024..1dcd18e 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndex.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndex.java
@@ -46,9 +46,6 @@
import org.apache.hyracks.storage.am.common.api.IPageManagerFactory;
import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
-import org.apache.hyracks.storage.am.common.api.UnsortedInputException;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
import org.apache.hyracks.storage.am.common.tuples.PermutingTupleReference;
@@ -57,7 +54,6 @@
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearcher;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedListBuilder;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
-import org.apache.hyracks.storage.am.lsm.invertedindex.exceptions.InvertedIndexException;
import org.apache.hyracks.storage.am.lsm.invertedindex.search.InvertedIndexSearchPredicate;
import org.apache.hyracks.storage.am.lsm.invertedindex.search.TOccurrenceSearcher;
import org.apache.hyracks.storage.common.buffercache.IBufferCache;
@@ -258,7 +254,7 @@
@Override
public void openInvertedListCursor(IInvertedListCursor listCursor, ITupleReference searchKey,
- IIndexOperationContext ictx) throws HyracksDataException, IndexException {
+ IIndexOperationContext ictx) throws HyracksDataException {
OnDiskInvertedIndexOpContext ctx = (OnDiskInvertedIndexOpContext) ictx;
ctx.btreePred.setLowKeyComparator(ctx.searchCmp);
ctx.btreePred.setHighKeyComparator(ctx.searchCmp);
@@ -311,7 +307,7 @@
private IFIFOPageQueue queue;
public OnDiskInvertedIndexBulkLoader(float btreeFillFactor, boolean verifyInput, long numElementsHint,
- boolean checkIfEmptyIndex, int startPageId) throws IndexException, HyracksDataException {
+ boolean checkIfEmptyIndex, int startPageId) throws HyracksDataException {
this.verifyInput = verifyInput;
this.tokenCmp = MultiComparator.create(btree.getComparatorFactories());
this.invListCmp = MultiComparator.create(invListCmpFactories);
@@ -338,7 +334,7 @@
currentPage = bufferCache.confiscatePage(BufferedFileHandle.getDiskPageId(fileId, currentPageId));
}
- private void createAndInsertBTreeTuple() throws IndexException, HyracksDataException {
+ private void createAndInsertBTreeTuple() throws HyracksDataException {
// Build tuple.
btreeTupleBuilder.reset();
DataOutput output = btreeTupleBuilder.getDataOutput();
@@ -375,7 +371,7 @@
* Key fields of inverted list are fixed size.
*/
@Override
- public void add(ITupleReference tuple) throws IndexException, HyracksDataException {
+ public void add(ITupleReference tuple) throws HyracksDataException {
boolean firstElement = lastTupleBuilder.getSize() == 0;
boolean startNewList = firstElement;
if (!firstElement) {
@@ -416,7 +412,7 @@
if (verifyInput && lastTupleBuilder.getSize() != 0) {
if (allCmp.compare(tuple, lastTuple) <= 0) {
- throw new UnsortedInputException(
+ throw new HyracksDataException(
"Input stream given to OnDiskInvertedIndex bulk load is not sorted.");
}
}
@@ -430,7 +426,7 @@
}
@Override
- public void end() throws IndexException, HyracksDataException {
+ public void end() throws HyracksDataException {
// The last tuple builder is empty if add() was never called.
if (lastTupleBuilder.getSize() != 0) {
createAndInsertBTreeTuple();
@@ -505,10 +501,8 @@
}
@Override
- public void search(IIndexCursor cursor, ISearchPredicate searchPred)
- throws HyracksDataException, IndexException {
- searcher.search((OnDiskInvertedIndexSearchCursor) cursor, (InvertedIndexSearchPredicate) searchPred,
- opCtx);
+ public void search(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException {
+ searcher.search((OnDiskInvertedIndexSearchCursor) cursor, (InvertedIndexSearchPredicate) searchPred, opCtx);
}
@Override
@@ -518,7 +512,7 @@
@Override
public void openInvertedListCursor(IInvertedListCursor listCursor, ITupleReference searchKey)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
index.openInvertedListCursor(listCursor, searchKey, opCtx);
}
@@ -528,29 +522,28 @@
}
@Override
- public void rangeSearch(IIndexCursor cursor, ISearchPredicate searchPred)
- throws HyracksDataException, IndexException {
+ public void rangeSearch(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException {
OnDiskInvertedIndexRangeSearchCursor rangeSearchCursor = (OnDiskInvertedIndexRangeSearchCursor) cursor;
rangeSearchCursor.open(null, searchPred);
}
@Override
- public void insert(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public void insert(ITupleReference tuple) throws HyracksDataException {
throw new UnsupportedOperationException("Insert not supported by inverted index.");
}
@Override
- public void update(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public void update(ITupleReference tuple) throws HyracksDataException {
throw new UnsupportedOperationException("Update not supported by inverted index.");
}
@Override
- public void delete(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public void delete(ITupleReference tuple) throws HyracksDataException {
throw new UnsupportedOperationException("Delete not supported by inverted index.");
}
@Override
- public void upsert(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
+ public void upsert(ITupleReference tuple) throws HyracksDataException {
throw new UnsupportedOperationException("Upsert not supported by inverted index.");
}
}
@@ -603,13 +596,9 @@
@Override
public IIndexBulkLoader createBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint,
- boolean checkIfEmptyIndex) throws IndexException {
- try {
- return new OnDiskInvertedIndexBulkLoader(fillFactor, verifyInput, numElementsHint, checkIfEmptyIndex,
- rootPageId);
- } catch (HyracksDataException e) {
- throw new InvertedIndexException(e);
- }
+ boolean checkIfEmptyIndex) throws HyracksDataException {
+ return new OnDiskInvertedIndexBulkLoader(fillFactor, verifyInput, numElementsHint, checkIfEmptyIndex,
+ rootPageId);
}
@Override
@@ -628,8 +617,7 @@
PermutingTupleReference tokenTuple = new PermutingTupleReference(fieldPermutation);
IInvertedIndexAccessor invIndexAccessor =
- (IInvertedIndexAccessor) createAccessor(NoOpOperationCallback.INSTANCE,
- NoOpOperationCallback.INSTANCE);
+ (IInvertedIndexAccessor) createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
IInvertedListCursor invListCursor = invIndexAccessor.createInvertedListCursor();
MultiComparator invListCmp = MultiComparator.create(invListCmpFactories);
@@ -667,8 +655,6 @@
invListCursor.unpinPages();
}
}
- } catch (IndexException e) {
- throw new HyracksDataException(e);
} finally {
btreeCursor.close();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexRangeSearchCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexRangeSearchCursor.java
index bd6ce9b..4a74833 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexRangeSearchCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/OnDiskInvertedIndexRangeSearchCursor.java
@@ -27,7 +27,6 @@
import org.apache.hyracks.storage.am.common.api.IIndexCursor;
import org.apache.hyracks.storage.am.common.api.IIndexOperationContext;
import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
import org.apache.hyracks.storage.am.common.tuples.ConcatenatingTupleReference;
import org.apache.hyracks.storage.am.common.tuples.PermutingTupleReference;
@@ -70,19 +69,15 @@
}
@Override
- public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException, IndexException {
+ public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
this.btreePred = (RangePredicate) searchPred;
- try {
- btreeAccessor.search(btreeCursor, btreePred);
- } catch (IndexException e) {
- throw new HyracksDataException(e);
- }
+ btreeAccessor.search(btreeCursor, btreePred);
invListCursor.pinPages();
unpinNeeded = true;
}
@Override
- public boolean hasNext() throws HyracksDataException, IndexException {
+ public boolean hasNext() throws HyracksDataException {
if (invListCursor.hasNext()) {
return true;
}
@@ -95,11 +90,7 @@
}
btreeCursor.next();
tokenTuple.reset(btreeCursor.getTuple());
- try {
- invIndex.openInvertedListCursor(invListCursor, tokenTuple, opCtx);
- } catch (IndexException e) {
- throw new HyracksDataException(e);
- }
+ invIndex.openInvertedListCursor(invListCursor, tokenTuple, opCtx);
invListCursor.pinPages();
invListCursor.hasNext();
unpinNeeded = true;
@@ -127,7 +118,7 @@
}
@Override
- public void reset() throws HyracksDataException, IndexException {
+ public void reset() throws HyracksDataException {
if (unpinNeeded) {
invListCursor.unpinPages();
unpinNeeded = false;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/PartitionedOnDiskInvertedIndex.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/PartitionedOnDiskInvertedIndex.java
index a3a4de4..697d217 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/PartitionedOnDiskInvertedIndex.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/PartitionedOnDiskInvertedIndex.java
@@ -19,7 +19,7 @@
package org.apache.hyracks.storage.am.lsm.invertedindex.ondisk;
-import java.util.ArrayList;
+import java.util.List;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ITypeTraits;
@@ -32,7 +32,6 @@
import org.apache.hyracks.storage.am.common.api.IModificationOperationCallback;
import org.apache.hyracks.storage.am.common.api.IPageManagerFactory;
import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearcher;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedListBuilder;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
@@ -70,7 +69,7 @@
@Override
public boolean openInvertedListPartitionCursors(IInvertedIndexSearcher searcher, IIndexOperationContext ictx,
short numTokensLowerBound, short numTokensUpperBound, InvertedListPartitions invListPartitions,
- ArrayList<IInvertedListCursor> cursorsOrderedByTokens) throws HyracksDataException, IndexException {
+ List<IInvertedListCursor> cursorsOrderedByTokens) throws HyracksDataException {
PartitionedTOccurrenceSearcher partSearcher = (PartitionedTOccurrenceSearcher) searcher;
OnDiskInvertedIndexOpContext ctx = (OnDiskInvertedIndexOpContext) ictx;
ITupleReference lowSearchKey = null;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/AbstractTOccurrenceSearcher.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/AbstractTOccurrenceSearcher.java
index e4b220b..764d9a5 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/AbstractTOccurrenceSearcher.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/AbstractTOccurrenceSearcher.java
@@ -42,7 +42,6 @@
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearcher;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IObjectFactory;
-import org.apache.hyracks.storage.am.lsm.invertedindex.exceptions.OccurrenceThresholdPanicException;
import org.apache.hyracks.storage.am.lsm.invertedindex.ondisk.FixedSizeFrameTupleAccessor;
import org.apache.hyracks.storage.am.lsm.invertedindex.ondisk.FixedSizeTupleReference;
import org.apache.hyracks.storage.am.lsm.invertedindex.tokenizers.DelimitedUTF8StringBinaryTokenizer;
@@ -52,8 +51,8 @@
import org.apache.hyracks.storage.am.lsm.invertedindex.util.ObjectCache;
public abstract class AbstractTOccurrenceSearcher implements IInvertedIndexSearcher {
- protected static final RecordDescriptor QUERY_TOKEN_REC_DESC = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
+ protected static final RecordDescriptor QUERY_TOKEN_REC_DESC =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
protected final int OBJECT_CACHE_INIT_SIZE = 10;
protected final int OBJECT_CACHE_EXPAND_SIZE = 10;
@@ -82,20 +81,20 @@
this.invIndex = invIndex;
this.invListCmp = MultiComparator.create(invIndex.getInvListCmpFactories());
this.invListCursorFactory = new InvertedListCursorFactory(invIndex);
- this.invListCursorCache = new ObjectCache<IInvertedListCursor>(invListCursorFactory, OBJECT_CACHE_INIT_SIZE,
+ this.invListCursorCache = new ObjectCache<>(invListCursorFactory, OBJECT_CACHE_INIT_SIZE,
OBJECT_CACHE_EXPAND_SIZE);
- this.queryTokenFrame = new VSizeFrame(ctx);
+ this.queryTokenFrame = new VSizeFrame(ctx);
this.queryTokenAppender = new FrameTupleAppenderAccessor(QUERY_TOKEN_REC_DESC);
this.queryTokenAppender.reset(queryTokenFrame, true);
}
+ @Override
public void reset() {
searchResult.clear();
invListMerger.reset();
}
- protected void tokenizeQuery(InvertedIndexSearchPredicate searchPred) throws HyracksDataException,
- OccurrenceThresholdPanicException {
+ protected void tokenizeQuery(InvertedIndexSearchPredicate searchPred) throws HyracksDataException {
ITupleReference queryTuple = searchPred.getQueryTuple();
int queryFieldIndex = searchPred.getQueryFieldIndex();
IBinaryTokenizer queryTokenizer = searchPred.getQueryTokenizer();
@@ -144,10 +143,12 @@
}
}
+ @Override
public IFrameTupleAccessor createResultFrameTupleAccessor() {
return new FixedSizeFrameTupleAccessor(ctx.getInitialFrameSize(), searchResult.getTypeTraits());
}
+ @Override
public ITupleReference createResultFrameTupleReference() {
return new FixedSizeTupleReference(searchResult.getTypeTraits());
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/InvertedListMerger.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/InvertedListMerger.java
index 5c916f2..55aa159 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/InvertedListMerger.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/InvertedListMerger.java
@@ -27,8 +27,6 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.primitive.IntegerPointable;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
-import org.apache.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndex;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
@@ -50,7 +48,7 @@
}
public void merge(ArrayList<IInvertedListCursor> invListCursors, int occurrenceThreshold, int numPrefixLists,
- SearchResult searchResult) throws HyracksDataException, IndexException {
+ SearchResult searchResult) throws HyracksDataException {
Collections.sort(invListCursors);
int numInvLists = invListCursors.size();
SearchResult result = null;
@@ -88,7 +86,7 @@
protected void mergeSuffixListProbe(IInvertedListCursor invListCursor, SearchResult prevSearchResult,
SearchResult newSearchResult, int invListIx, int numInvLists, int occurrenceThreshold)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
int prevBufIdx = 0;
int maxPrevBufIdx = prevSearchResult.getCurrentBufferIndex();
@@ -104,7 +102,8 @@
while (resultTidx < resultFrameTupleAcc.getTupleCount()) {
resultTuple.reset(prevCurrentBuffer.array(), resultFrameTupleAcc.getTupleStartOffset(resultTidx));
- int count = IntegerPointable.getInteger(resultTuple.getFieldData(0), resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
+ int count = IntegerPointable.getInteger(resultTuple.getFieldData(0),
+ resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
if (invListCursor.containsKey(resultTuple, invListCmp)) {
count++;
@@ -129,7 +128,7 @@
protected void mergeSuffixListScan(IInvertedListCursor invListCursor, SearchResult prevSearchResult,
SearchResult newSearchResult, int invListIx, int numInvLists, int occurrenceThreshold)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
int prevBufIdx = 0;
int maxPrevBufIdx = prevSearchResult.getCurrentBufferIndex();
@@ -147,8 +146,9 @@
int invListTidx = 0;
int invListNumTuples = invListCursor.size();
- if (invListCursor.hasNext())
+ if (invListCursor.hasNext()) {
invListCursor.next();
+ }
while (invListTidx < invListNumTuples && resultTidx < resultFrameTupleAcc.getTupleCount()) {
@@ -158,7 +158,8 @@
int cmp = invListCmp.compare(invListTuple, resultTuple);
if (cmp == 0) {
- int count = IntegerPointable.getInteger(resultTuple.getFieldData(0), resultTuple.getFieldStart(resultTuple.getFieldCount() - 1)) + 1;
+ int count = IntegerPointable.getInteger(resultTuple.getFieldData(0),
+ resultTuple.getFieldStart(resultTuple.getFieldCount() - 1)) + 1;
newSearchResult.append(resultTuple, count);
advanceCursor = true;
advancePrevResult = true;
@@ -167,7 +168,8 @@
advanceCursor = true;
advancePrevResult = false;
} else {
- int count = IntegerPointable.getInteger(resultTuple.getFieldData(0), resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
+ int count = IntegerPointable.getInteger(resultTuple.getFieldData(0),
+ resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
if (count + numInvLists - invListIx > occurrenceThreshold) {
newSearchResult.append(resultTuple, count);
}
@@ -201,7 +203,8 @@
resultTuple.reset(prevCurrentBuffer.array(), resultFrameTupleAcc.getTupleStartOffset(resultTidx));
- int count = IntegerPointable.getInteger(resultTuple.getFieldData(0), resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
+ int count = IntegerPointable.getInteger(resultTuple.getFieldData(0),
+ resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
if (count + numInvLists - invListIx > occurrenceThreshold) {
newSearchResult.append(resultTuple, count);
}
@@ -219,7 +222,7 @@
}
protected void mergePrefixList(IInvertedListCursor invListCursor, SearchResult prevSearchResult,
- SearchResult newSearchResult) throws HyracksDataException, IndexException {
+ SearchResult newSearchResult) throws HyracksDataException {
int prevBufIdx = 0;
int maxPrevBufIdx = prevSearchResult.getCurrentBufferIndex();
@@ -237,8 +240,9 @@
int invListTidx = 0;
int invListNumTuples = invListCursor.size();
- if (invListCursor.hasNext())
+ if (invListCursor.hasNext()) {
invListCursor.next();
+ }
while (invListTidx < invListNumTuples && resultTidx < resultFrameTupleAcc.getTupleCount()) {
@@ -247,7 +251,8 @@
int cmp = invListCmp.compare(invListTuple, resultTuple);
if (cmp == 0) {
- int count = IntegerPointable.getInteger(resultTuple.getFieldData(0), resultTuple.getFieldStart(resultTuple.getFieldCount() - 1)) + 1;
+ int count = IntegerPointable.getInteger(resultTuple.getFieldData(0),
+ resultTuple.getFieldStart(resultTuple.getFieldCount() - 1)) + 1;
newSearchResult.append(resultTuple, count);
advanceCursor = true;
advancePrevResult = true;
@@ -258,7 +263,8 @@
advanceCursor = true;
advancePrevResult = false;
} else {
- int count = IntegerPointable.getInteger(resultTuple.getFieldData(0), resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
+ int count = IntegerPointable.getInteger(resultTuple.getFieldData(0),
+ resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
newSearchResult.append(resultTuple, count);
advanceCursor = false;
advancePrevResult = true;
@@ -300,7 +306,8 @@
resultTuple.reset(prevCurrentBuffer.array(), resultFrameTupleAcc.getTupleStartOffset(resultTidx));
- int count = IntegerPointable.getInteger(resultTuple.getFieldData(0), resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
+ int count = IntegerPointable.getInteger(resultTuple.getFieldData(0),
+ resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
newSearchResult.append(resultTuple, count);
resultTidx++;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/PartitionedTOccurrenceSearcher.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/PartitionedTOccurrenceSearcher.java
index 7c7e781..9221e1f 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/PartitionedTOccurrenceSearcher.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/PartitionedTOccurrenceSearcher.java
@@ -23,19 +23,18 @@
import java.util.ArrayList;
import org.apache.hyracks.api.context.IHyracksCommonContext;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.primitive.ShortPointable;
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleReference;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
import org.apache.hyracks.storage.am.common.api.IIndexOperationContext;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.tuples.ConcatenatingTupleReference;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndex;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifier;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IPartitionedInvertedIndex;
-import org.apache.hyracks.storage.am.lsm.invertedindex.exceptions.OccurrenceThresholdPanicException;
import org.apache.hyracks.storage.am.lsm.invertedindex.ondisk.OnDiskInvertedIndexSearchCursor;
public class PartitionedTOccurrenceSearcher extends AbstractTOccurrenceSearcher {
@@ -50,7 +49,7 @@
// Inverted list cursors ordered by token. Used to read relevant inverted-list partitions of one token one after
// the other for better I/O performance (because the partitions of one inverted list are stored contiguously in a file).
// The above implies that we currently require holding all inverted list for a query in memory.
- protected final ArrayList<IInvertedListCursor> cursorsOrderedByTokens = new ArrayList<IInvertedListCursor>();
+ protected final ArrayList<IInvertedListCursor> cursorsOrderedByTokens = new ArrayList<>();
protected final InvertedListPartitions partitions = new InvertedListPartitions();
public PartitionedTOccurrenceSearcher(IHyracksCommonContext ctx, IInvertedIndex invIndex)
@@ -87,8 +86,9 @@
}
}
+ @Override
public void search(OnDiskInvertedIndexSearchCursor resultCursor, InvertedIndexSearchPredicate searchPred,
- IIndexOperationContext ictx) throws HyracksDataException, IndexException {
+ IIndexOperationContext ictx) throws HyracksDataException {
IPartitionedInvertedIndex partInvIndex = (IPartitionedInvertedIndex) invIndex;
searchResult.reset();
if (partInvIndex.isEmpty()) {
@@ -104,7 +104,7 @@
occurrenceThreshold = searchModifier.getOccurrenceThreshold(numQueryTokens);
if (occurrenceThreshold <= 0) {
- throw new OccurrenceThresholdPanicException("Merge Threshold is <= 0. Failing Search.");
+ throw HyracksDataException.create(ErrorCode.OCCURRENCE_THRESHOLD_PANIC_EXCEPTION);
}
short maxCountPossible = numQueryTokens;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/TOccurrenceSearcher.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/TOccurrenceSearcher.java
index 39418f3..508a51d 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/TOccurrenceSearcher.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/TOccurrenceSearcher.java
@@ -22,25 +22,25 @@
import java.util.ArrayList;
import org.apache.hyracks.api.context.IHyracksCommonContext;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.storage.am.common.api.IIndexOperationContext;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndex;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifier;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
-import org.apache.hyracks.storage.am.lsm.invertedindex.exceptions.OccurrenceThresholdPanicException;
import org.apache.hyracks.storage.am.lsm.invertedindex.ondisk.OnDiskInvertedIndexSearchCursor;
public class TOccurrenceSearcher extends AbstractTOccurrenceSearcher {
- protected final ArrayList<IInvertedListCursor> invListCursors = new ArrayList<IInvertedListCursor>();
+ protected final ArrayList<IInvertedListCursor> invListCursors = new ArrayList<>();
public TOccurrenceSearcher(IHyracksCommonContext ctx, IInvertedIndex invIndex) throws HyracksDataException {
super(ctx, invIndex);
}
+ @Override
public void search(OnDiskInvertedIndexSearchCursor resultCursor, InvertedIndexSearchPredicate searchPred,
- IIndexOperationContext ictx) throws HyracksDataException, IndexException {
+ IIndexOperationContext ictx) throws HyracksDataException {
tokenizeQuery(searchPred);
int numQueryTokens = queryTokenAppender.getTupleCount();
@@ -56,7 +56,7 @@
IInvertedIndexSearchModifier searchModifier = searchPred.getSearchModifier();
occurrenceThreshold = searchModifier.getOccurrenceThreshold(numQueryTokens);
if (occurrenceThreshold <= 0) {
- throw new OccurrenceThresholdPanicException("Merge threshold is <= 0. Failing Search.");
+ throw HyracksDataException.create(ErrorCode.OCCURRENCE_THRESHOLD_PANIC_EXCEPTION);
}
int numPrefixLists = searchModifier.getNumPrefixLists(occurrenceThreshold, invListCursors.size());
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/InvertedIndexUtils.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/InvertedIndexUtils.java
index c6a552c..075cded 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/InvertedIndexUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/InvertedIndexUtils.java
@@ -34,7 +34,6 @@
import org.apache.hyracks.storage.am.common.api.IPageManager;
import org.apache.hyracks.storage.am.common.api.IPageManagerFactory;
import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
@@ -69,8 +68,8 @@
IBinaryComparatorFactory[] invListCmpFactories, ITypeTraits[] tokenTypeTraits,
IBinaryComparatorFactory[] tokenCmpFactories, IBinaryTokenizerFactory tokenizerFactory,
FileReference btreeFileRef) throws HyracksDataException {
- return new InMemoryInvertedIndex(memBufferCache, virtualFreePageManager, invListTypeTraits,
- invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory, btreeFileRef);
+ return new InMemoryInvertedIndex(memBufferCache, virtualFreePageManager, invListTypeTraits, invListCmpFactories,
+ tokenTypeTraits, tokenCmpFactories, tokenizerFactory, btreeFileRef);
}
public static InMemoryInvertedIndex createPartitionedInMemoryBTreeInvertedindex(IBufferCache memBufferCache,
@@ -86,7 +85,7 @@
IFileMapProvider fileMapProvider, ITypeTraits[] invListTypeTraits,
IBinaryComparatorFactory[] invListCmpFactories, ITypeTraits[] tokenTypeTraits,
IBinaryComparatorFactory[] tokenCmpFactories, FileReference invListsFile,
- IPageManagerFactory pageManagerFactory) throws IndexException, HyracksDataException {
+ IPageManagerFactory pageManagerFactory) throws HyracksDataException {
IInvertedListBuilder builder = new FixedSizeElementInvertedListBuilder(invListTypeTraits);
FileReference btreeFile = getBTreeFile(ioManager, invListsFile);
return new OnDiskInvertedIndex(bufferCache, fileMapProvider, builder, invListTypeTraits, invListCmpFactories,
@@ -97,7 +96,7 @@
IBufferCache bufferCache, IFileMapProvider fileMapProvider, ITypeTraits[] invListTypeTraits,
IBinaryComparatorFactory[] invListCmpFactories, ITypeTraits[] tokenTypeTraits,
IBinaryComparatorFactory[] tokenCmpFactories, FileReference invListsFile,
- IPageManagerFactory pageManagerFactory) throws IndexException, HyracksDataException {
+ IPageManagerFactory pageManagerFactory) throws HyracksDataException {
IInvertedListBuilder builder = new FixedSizeElementInvertedListBuilder(invListTypeTraits);
FileReference btreeFile = getBTreeFile(ioManager, invListsFile);
return new PartitionedOnDiskInvertedIndex(bufferCache, fileMapProvider, builder, invListTypeTraits,
@@ -109,10 +108,9 @@
return ioManager.resolveAbsolutePath(invListsFile.getFile().getPath() + "_btree");
}
- public static BTreeFactory createDeletedKeysBTreeFactory(IIOManager ioManager,
- IFileMapProvider diskFileMapProvider, ITypeTraits[] invListTypeTraits,
- IBinaryComparatorFactory[] invListCmpFactories, IBufferCache diskBufferCache,
- IPageManagerFactory freePageManagerFactory) throws HyracksDataException {
+ public static BTreeFactory createDeletedKeysBTreeFactory(IIOManager ioManager, IFileMapProvider diskFileMapProvider,
+ ITypeTraits[] invListTypeTraits, IBinaryComparatorFactory[] invListCmpFactories,
+ IBufferCache diskBufferCache, IPageManagerFactory freePageManagerFactory) throws HyracksDataException {
TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(invListTypeTraits);
ITreeIndexFrameFactory leafFrameFactory =
BTreeUtils.getLeafFrameFactory(tupleWriterFactory, BTreeLeafFrameType.REGULAR_NSM);
@@ -132,7 +130,7 @@
ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallback ioOpCallback, int[] invertedIndexFields,
ITypeTraits[] filterTypeTraits, IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields,
int[] filterFieldsForNonBulkLoadOps, int[] invertedIndexFieldsForNonBulkLoadOps, boolean durable,
- IMetadataPageManagerFactory pageManagerFactory) throws IndexException, HyracksDataException {
+ IMetadataPageManagerFactory pageManagerFactory) throws HyracksDataException {
BTreeFactory deletedKeysBTreeFactory = createDeletedKeysBTreeFactory(ioManager, diskFileMapProvider,
invListTypeTraits, invListCmpFactories, diskBufferCache, pageManagerFactory);
@@ -166,9 +164,9 @@
LSMInvertedIndex invIndex = new LSMInvertedIndex(ioManager, virtualBufferCaches, invIndexFactory,
deletedKeysBTreeFactory, bloomFilterFactory, filterFactory, filterFrameFactory, filterManager,
bloomFilterFalsePositiveRate, fileManager, diskFileMapProvider, invListTypeTraits, invListCmpFactories,
- tokenTypeTraits, tokenCmpFactories, tokenizerFactory, mergePolicy, opTracker, ioScheduler,
- ioOpCallback, invertedIndexFields, filterFields, filterFieldsForNonBulkLoadOps,
- invertedIndexFieldsForNonBulkLoadOps, durable);
+ tokenTypeTraits, tokenCmpFactories, tokenizerFactory, mergePolicy, opTracker, ioScheduler, ioOpCallback,
+ invertedIndexFields, filterFields, filterFieldsForNonBulkLoadOps, invertedIndexFieldsForNonBulkLoadOps,
+ durable);
return invIndex;
}
@@ -181,7 +179,7 @@
ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallback ioOpCallback, int[] invertedIndexFields,
ITypeTraits[] filterTypeTraits, IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields,
int[] filterFieldsForNonBulkLoadOps, int[] invertedIndexFieldsForNonBulkLoadOps, boolean durable,
- IPageManagerFactory pageManagerFactory) throws IndexException, HyracksDataException {
+ IPageManagerFactory pageManagerFactory) throws HyracksDataException {
BTreeFactory deletedKeysBTreeFactory = createDeletedKeysBTreeFactory(ioManager, diskFileMapProvider,
invListTypeTraits, invListCmpFactories, diskBufferCache, pageManagerFactory);
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/dataflow/ExternalRTreeDataflowHelper.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/dataflow/ExternalRTreeDataflowHelper.java
index 1b2ff6f..ba61d6c 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/dataflow/ExternalRTreeDataflowHelper.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/dataflow/ExternalRTreeDataflowHelper.java
@@ -30,7 +30,6 @@
import org.apache.hyracks.storage.am.common.api.IMetadataPageManagerFactory;
import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackFactory;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
@@ -95,16 +94,11 @@
RTreePolicyType rtreePolicyType, ILinearizeComparatorFactory linearizeCmpFactory, int[] rtreeFields,
ITypeTraits[] filterTypeTraits, IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields)
throws HyracksDataException {
- try {
- return LSMRTreeUtils.createExternalRTree(ctx.getIOManager(), file, diskBufferCache, diskFileMapProvider,
- typeTraits,
- rtreeCmpFactories, btreeCmpFactories, valueProviderFactories, rtreePolicyType,
- bloomFilterFalsePositiveRate, mergePolicy, opTracker, ioScheduler,
- ioOpCallbackFactory.createIoOpCallback(), linearizeCmpFactory, btreeFields, version, durable,
- isPointMBR, (IMetadataPageManagerFactory) opDesc.getPageManagerFactory());
- } catch (TreeIndexException e) {
- throw new HyracksDataException(e);
- }
+ return LSMRTreeUtils.createExternalRTree(ctx.getIOManager(), file, diskBufferCache, diskFileMapProvider,
+ typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories, rtreePolicyType,
+ bloomFilterFalsePositiveRate, mergePolicy, opTracker, ioScheduler,
+ ioOpCallbackFactory.createIoOpCallback(), linearizeCmpFactory, btreeFields, version, durable,
+ isPointMBR, (IMetadataPageManagerFactory) opDesc.getPageManagerFactory());
}
public int getTargetVersion() {
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeDataflowHelper.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeDataflowHelper.java
index 63633aa..d580756 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeDataflowHelper.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeDataflowHelper.java
@@ -30,7 +30,6 @@
import org.apache.hyracks.storage.am.common.api.IMetadataPageManagerFactory;
import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackFactory;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
@@ -84,16 +83,11 @@
RTreePolicyType rtreePolicyType, ILinearizeComparatorFactory linearizeCmpFactory, int[] rtreeFields,
ITypeTraits[] filterTypeTraits, IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields)
throws HyracksDataException {
- try {
- return LSMRTreeUtils.createLSMTree(ctx.getIOManager(), virtualBufferCaches, file, diskBufferCache,
- diskFileMapProvider,
- typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories, rtreePolicyType,
- bloomFilterFalsePositiveRate, mergePolicy, opTracker, ioScheduler,
- ioOpCallbackFactory.createIoOpCallback(), linearizeCmpFactory, rtreeFields, btreeFields,
- filterTypeTraits, filterCmpFactories, filterFields, durable, isPointMBR,
- (IMetadataPageManagerFactory) opDesc.getPageManagerFactory());
- } catch (TreeIndexException e) {
- throw new HyracksDataException(e);
- }
+ return LSMRTreeUtils.createLSMTree(ctx.getIOManager(), virtualBufferCaches, file, diskBufferCache,
+ diskFileMapProvider, typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories,
+ rtreePolicyType, bloomFilterFalsePositiveRate, mergePolicy, opTracker, ioScheduler,
+ ioOpCallbackFactory.createIoOpCallback(), linearizeCmpFactory, rtreeFields, btreeFields,
+ filterTypeTraits, filterCmpFactories, filterFields, durable, isPointMBR,
+ (IMetadataPageManagerFactory) opDesc.getPageManagerFactory());
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeWithAntiMatterTuplesDataflowHelper.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeWithAntiMatterTuplesDataflowHelper.java
index 6e0ffaf..81fdbb8 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeWithAntiMatterTuplesDataflowHelper.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeWithAntiMatterTuplesDataflowHelper.java
@@ -30,7 +30,6 @@
import org.apache.hyracks.storage.am.common.api.IMetadataPageManagerFactory;
import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackFactory;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
@@ -66,15 +65,11 @@
RTreePolicyType rtreePolicyType, ILinearizeComparatorFactory linearizeCmpFactory, int[] rtreeFields,
ITypeTraits[] filterTypeTraits, IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields)
throws HyracksDataException {
- try {
- return LSMRTreeUtils.createLSMTreeWithAntiMatterTuples(ctx.getIOManager(), virtualBufferCaches, file,
- diskBufferCache, diskFileMapProvider, typeTraits, rtreeCmpFactories, btreeCmpFactories,
- valueProviderFactories, rtreePolicyType, mergePolicy, opTracker, ioScheduler, ioOpCallbackFactory
- .createIoOpCallback(), linearizeCmpFactory, rtreeFields, filterTypeTraits,
- filterCmpFactories, filterFields, durable, isPointMBR, (IMetadataPageManagerFactory) opDesc
- .getPageManagerFactory());
- } catch (TreeIndexException e) {
- throw new HyracksDataException(e);
- }
+ return LSMRTreeUtils.createLSMTreeWithAntiMatterTuples(ctx.getIOManager(), virtualBufferCaches, file,
+ diskBufferCache, diskFileMapProvider, typeTraits, rtreeCmpFactories, btreeCmpFactories,
+ valueProviderFactories, rtreePolicyType, mergePolicy, opTracker, ioScheduler,
+ ioOpCallbackFactory.createIoOpCallback(), linearizeCmpFactory, rtreeFields, filterTypeTraits,
+ filterCmpFactories, filterFields, durable, isPointMBR,
+ (IMetadataPageManagerFactory) opDesc.getPageManagerFactory());
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/AbstractLSMRTree.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/AbstractLSMRTree.java
index 480e1e2..646dbef 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/AbstractLSMRTree.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/AbstractLSMRTree.java
@@ -23,6 +23,7 @@
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.api.io.IIOManager;
@@ -36,16 +37,14 @@
import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
import org.apache.hyracks.storage.am.common.api.ITreeIndex;
import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
import org.apache.hyracks.storage.am.common.impls.AbstractSearchPredicate;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
-import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
-import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponentFactory;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponentFilterFactory;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponentFilterFrameFactory;
+import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
+import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponentFactory;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexAccessor;
@@ -184,8 +183,7 @@
if (flushOnExit) {
BlockingIOOperationCallbackWrapper cb = new BlockingIOOperationCallbackWrapper(ioOpCallback);
- ILSMIndexAccessor accessor =
- createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
+ ILSMIndexAccessor accessor = createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
accessor.scheduleFlush(cb);
try {
cb.waitForIO();
@@ -258,7 +256,7 @@
@Override
public void search(ILSMIndexOperationContext ictx, IIndexCursor cursor, ISearchPredicate pred)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
LSMRTreeOpContext ctx = (LSMRTreeOpContext) ictx;
cursor.open(ctx.searchInitialState, pred);
}
@@ -277,7 +275,7 @@
protected LSMRTreeDiskComponent createDiskComponent(ILSMDiskComponentFactory factory, FileReference insertFileRef,
FileReference deleteFileRef, FileReference bloomFilterFileRef, boolean createComponent)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
// Create new tree instance.
LSMRTreeDiskComponent component = (LSMRTreeDiskComponent) factory
.createComponent(new LSMComponentFileReferences(insertFileRef, deleteFileRef, bloomFilterFileRef));
@@ -343,8 +341,7 @@
}
@Override
- public void modify(IIndexOperationContext ictx, ITupleReference tuple)
- throws HyracksDataException, IndexException {
+ public void modify(IIndexOperationContext ictx, ITupleReference tuple) throws HyracksDataException {
LSMRTreeOpContext ctx = (LSMRTreeOpContext) ictx;
if (ctx.getOperation() == IndexOperation.PHYSICALDELETE) {
throw new UnsupportedOperationException("Physical delete not supported in the LSM-RTree");
@@ -368,9 +365,12 @@
// Insert key into the deleted-keys BTree.
try {
ctx.currentMutableBTreeAccessor.insert(indexTuple);
- } catch (TreeIndexDuplicateKeyException e) {
- // Do nothing, because one delete tuple is enough to indicate
- // that all the corresponding insert tuples are deleted
+ } catch (HyracksDataException e) {
+ if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+ // Do nothing, because one delete tuple is enough to indicate
+ // that all the corresponding insert tuples are deleted
+ throw e;
+ }
}
}
if (ctx.filterTuple != null) {
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/ExternalRTree.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/ExternalRTree.java
index f846c6c..dce7102 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/ExternalRTree.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/ExternalRTree.java
@@ -24,6 +24,7 @@
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.IIOManager;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -42,12 +43,10 @@
import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
import org.apache.hyracks.storage.am.common.api.ITwoPCIndexBulkLoader;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
-import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
+import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
@@ -198,21 +197,13 @@
if (diskComponents.size() == 0 && secondDiskComponents.size() == 0) {
//First time activation
List<LSMComponentFileReferences> validFileReferences;
- try {
- validFileReferences = fileManager.cleanupAndGetValidFiles();
- } catch (IndexException e) {
- throw new HyracksDataException(e);
- }
+ validFileReferences = fileManager.cleanupAndGetValidFiles();
for (LSMComponentFileReferences lsmComonentFileReference : validFileReferences) {
LSMRTreeDiskComponent component;
- try {
- component = createDiskComponent(componentFactory,
- lsmComonentFileReference.getInsertIndexFileReference(),
- lsmComonentFileReference.getDeleteIndexFileReference(),
- lsmComonentFileReference.getBloomFilterFileReference(), false);
- } catch (IndexException e) {
- throw new HyracksDataException(e);
- }
+ component =
+ createDiskComponent(componentFactory, lsmComonentFileReference.getInsertIndexFileReference(),
+ lsmComonentFileReference.getDeleteIndexFileReference(),
+ lsmComonentFileReference.getBloomFilterFileReference(), false);
diskComponents.add(component);
secondDiskComponents.add(component);
}
@@ -254,7 +245,7 @@
// we override this method because this index uses a different opcontext
@Override
public void search(ILSMIndexOperationContext ictx, IIndexCursor cursor, ISearchPredicate pred)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
ExternalRTreeOpContext ctx = (ExternalRTreeOpContext) ictx;
List<ILSMComponent> operationalComponents = ictx.getComponentHolder();
ctx.initialState.setOperationalComponents(operationalComponents);
@@ -267,7 +258,7 @@
// This can be done in a better way by creating a method boolean
// keepDeletedTuples(mergedComponents);
@Override
- public ILSMDiskComponent merge(ILSMIOOperation operation) throws HyracksDataException, IndexException {
+ public ILSMDiskComponent merge(ILSMIOOperation operation) throws HyracksDataException {
LSMRTreeMergeOperation mergeOp = (LSMRTreeMergeOperation) operation;
ITreeIndexCursor cursor = mergeOp.getCursor();
ISearchPredicate rtreeSearchPred = new SearchPredicate(null, null);
@@ -430,8 +421,7 @@
// Not supported
@Override
- public void modify(IIndexOperationContext ictx, ITupleReference tuple)
- throws HyracksDataException, IndexException {
+ public void modify(IIndexOperationContext ictx, ITupleReference tuple) throws HyracksDataException {
throw new UnsupportedOperationException("tuple modify not supported in LSM-Disk-Only-RTree");
}
@@ -444,7 +434,7 @@
// Not supported
@Override
- public ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException, IndexException {
+ public ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException {
throw new UnsupportedOperationException("flush not supported in LSM-Disk-Only-RTree");
}
@@ -492,23 +482,15 @@
// For initial load
@Override
public IIndexBulkLoader createBulkLoader(float fillLevel, boolean verifyInput, long numElementsHint,
- boolean checkIfEmptyIndex) throws TreeIndexException {
- try {
- return new LSMTwoPCRTreeBulkLoader(fillLevel, verifyInput, 0, checkIfEmptyIndex, false);
- } catch (HyracksDataException e) {
- throw new TreeIndexException(e);
- }
+ boolean checkIfEmptyIndex) throws HyracksDataException {
+ return new LSMTwoPCRTreeBulkLoader(fillLevel, verifyInput, 0, checkIfEmptyIndex, false);
}
// For transaction bulk load <- could consolidate with the above method ->
@Override
public IIndexBulkLoader createTransactionBulkLoader(float fillLevel, boolean verifyInput, long numElementsHint,
- boolean checkIfEmptyIndex) throws TreeIndexException {
- try {
- return new LSMTwoPCRTreeBulkLoader(fillLevel, verifyInput, numElementsHint, checkIfEmptyIndex, true);
- } catch (HyracksDataException e) {
- throw new TreeIndexException(e);
- }
+ boolean checkIfEmptyIndex) throws HyracksDataException {
+ return new LSMTwoPCRTreeBulkLoader(fillLevel, verifyInput, numElementsHint, checkIfEmptyIndex, true);
}
// The bulk loader used for both initial loading and transaction
@@ -524,24 +506,16 @@
private final boolean isTransaction;
public LSMTwoPCRTreeBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint,
- boolean checkIfEmptyIndex, boolean isTransaction) throws TreeIndexException, HyracksDataException {
+ boolean checkIfEmptyIndex, boolean isTransaction) throws HyracksDataException {
this.isTransaction = isTransaction;
// Create the appropriate target
if (isTransaction) {
- try {
- component = createTransactionTarget();
- } catch (HyracksDataException | IndexException e) {
- throw new TreeIndexException(e);
- }
+ component = createTransactionTarget();
} else {
if (checkIfEmptyIndex && !isEmptyIndex()) {
- throw new TreeIndexException("Cannot load an index that is not empty");
+ throw HyracksDataException.create(ErrorCode.LOAD_NON_EMPTY_INDEX);
}
- try {
- component = createBulkLoadTarget();
- } catch (HyracksDataException | IndexException e) {
- throw new TreeIndexException(e);
- }
+ component = createBulkLoadTarget();
}
// Create the three loaders
@@ -557,10 +531,10 @@
}
@Override
- public void add(ITupleReference tuple) throws IndexException, HyracksDataException {
+ public void add(ITupleReference tuple) throws HyracksDataException {
try {
rtreeBulkLoader.add(tuple);
- } catch (IndexException | HyracksDataException | RuntimeException e) {
+ } catch (Exception e) {
cleanupArtifacts();
throw e;
}
@@ -596,7 +570,7 @@
}
@Override
- public void end() throws HyracksDataException, IndexException {
+ public void end() throws HyracksDataException {
if (!cleanedUpArtifacts) {
if (!endedBloomFilterLoad) {
builder.end();
@@ -623,11 +597,11 @@
}
@Override
- public void delete(ITupleReference tuple) throws IndexException, HyracksDataException {
+ public void delete(ITupleReference tuple) throws HyracksDataException {
try {
btreeBulkLoader.add(tuple);
builder.add(tuple);
- } catch (IndexException | HyracksDataException | RuntimeException e) {
+ } catch (Exception e) {
cleanupArtifacts();
throw e;
}
@@ -647,12 +621,12 @@
// This method is used to create a target for a bulk modify operation. This
// component must then eventually be either committed or deleted
- private ILSMDiskComponent createTransactionTarget() throws HyracksDataException, IndexException {
+ private ILSMDiskComponent createTransactionTarget() throws HyracksDataException {
LSMComponentFileReferences componentFileRefs;
try {
componentFileRefs = fileManager.getNewTransactionFileReference();
} catch (IOException e) {
- throw new HyracksDataException("Failed to create transaction components", e);
+ throw HyracksDataException.create(e);
}
return createDiskComponent(componentFactory, componentFileRefs.getInsertIndexFileReference(),
componentFileRefs.getDeleteIndexFileReference(), componentFileRefs.getBloomFilterFileReference(),
@@ -669,7 +643,7 @@
// opCtx. first line <- in schedule merge, we->
@Override
public void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
ILSMIndexOperationContext rctx = createOpContext(NoOpOperationCallback.INSTANCE, -1);
rctx.setOperation(IndexOperation.MERGE);
List<ILSMComponent> mergingComponents = ctx.getComponentHolder();
@@ -732,7 +706,7 @@
}
@Override
- public void commitTransaction() throws TreeIndexException, HyracksDataException, IndexException {
+ public void commitTransaction() throws HyracksDataException {
LSMComponentFileReferences componentFileRefrences = fileManager.getTransactionFileReferenceForCommit();
LSMRTreeDiskComponent component = null;
if (componentFileRefrences != null) {
@@ -744,21 +718,13 @@
}
@Override
- public void abortTransaction() throws TreeIndexException {
- try {
- fileManager.deleteTransactionFiles();
- } catch (HyracksDataException e) {
- throw new TreeIndexException(e);
- }
+ public void abortTransaction() throws HyracksDataException {
+ fileManager.deleteTransactionFiles();
}
@Override
- public void recoverTransaction() throws TreeIndexException {
- try {
- fileManager.recoverTransaction();
- } catch (HyracksDataException e) {
- throw new TreeIndexException(e);
- }
+ public void recoverTransaction() throws HyracksDataException {
+ fileManager.recoverTransaction();
}
@Override
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTree.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTree.java
index e62ccdc..497a887 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTree.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTree.java
@@ -26,6 +26,7 @@
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.IIOManager;
import org.apache.hyracks.data.std.primitive.IntegerPointable;
@@ -46,18 +47,15 @@
import org.apache.hyracks.storage.am.common.api.ITreeIndexAccessor;
import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
import org.apache.hyracks.storage.am.common.tuples.DualTupleReference;
import org.apache.hyracks.storage.am.common.tuples.PermutingTupleReference;
-import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponentFilterFactory;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponentFilterFrameFactory;
+import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMHarness;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
@@ -96,8 +94,7 @@
throws HyracksDataException {
super(ioManager, virtualBufferCaches, rtreeInteriorFrameFactory, rtreeLeafFrameFactory,
btreeInteriorFrameFactory, btreeLeafFrameFactory, fileNameManager,
- new LSMRTreeDiskComponentFactory(diskRTreeFactory, diskBTreeFactory, bloomFilterFactory,
- filterFactory),
+ new LSMRTreeDiskComponentFactory(diskRTreeFactory, diskBTreeFactory, bloomFilterFactory, filterFactory),
diskFileMapProvider, fieldCount, rtreeCmpFactories, btreeCmpFactories, linearizer, comparatorFields,
linearizerArray, bloomFilterFalsePositiveRate, mergePolicy, opTracker, ioScheduler, ioOpCallback,
filterFactory, filterFrameFactory, filterManager, rtreeFields, filterFields, durable, isPointMBR,
@@ -138,22 +135,13 @@
super.activate();
List<ILSMDiskComponent> immutableComponents = diskComponents;
List<LSMComponentFileReferences> validFileReferences;
- try {
- validFileReferences = fileManager.cleanupAndGetValidFiles();
- } catch (IndexException e) {
- throw new HyracksDataException(e);
- }
+ validFileReferences = fileManager.cleanupAndGetValidFiles();
immutableComponents.clear();
for (LSMComponentFileReferences lsmComonentFileReference : validFileReferences) {
LSMRTreeDiskComponent component;
- try {
- component =
- createDiskComponent(componentFactory, lsmComonentFileReference.getInsertIndexFileReference(),
- lsmComonentFileReference.getDeleteIndexFileReference(),
- lsmComonentFileReference.getBloomFilterFileReference(), false);
- } catch (IndexException e) {
- throw new HyracksDataException(e);
- }
+ component = createDiskComponent(componentFactory, lsmComonentFileReference.getInsertIndexFileReference(),
+ lsmComonentFileReference.getDeleteIndexFileReference(),
+ lsmComonentFileReference.getBloomFilterFileReference(), false);
immutableComponents.add(component);
}
isActivated = true;
@@ -225,7 +213,7 @@
}
@Override
- public ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException, IndexException {
+ public ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException {
LSMRTreeFlushOperation flushOp = (LSMRTreeFlushOperation) operation;
LSMRTreeMemoryComponent flushingComponent = (LSMRTreeMemoryComponent) flushOp.getFlushingComponent();
// Renaming order is critical because we use assume ordering when we
@@ -337,7 +325,7 @@
@Override
public void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
ILSMIndexOperationContext rctx =
createOpContext(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
rctx.setOperation(IndexOperation.MERGE);
@@ -351,7 +339,7 @@
}
@Override
- public ILSMDiskComponent merge(ILSMIOOperation operation) throws HyracksDataException, IndexException {
+ public ILSMDiskComponent merge(ILSMIOOperation operation) throws HyracksDataException {
LSMRTreeMergeOperation mergeOp = (LSMRTreeMergeOperation) operation;
ITreeIndexCursor cursor = mergeOp.getCursor();
ISearchPredicate rtreeSearchPred = new SearchPredicate(null, null);
@@ -444,21 +432,21 @@
}
@Override
- public void delete(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public void delete(ITupleReference tuple) throws HyracksDataException {
ctx.setOperation(IndexOperation.DELETE);
dualTuple.reset(tuple);
lsmHarness.modify(ctx, false, dualTuple);
}
@Override
- public boolean tryDelete(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public boolean tryDelete(ITupleReference tuple) throws HyracksDataException {
ctx.setOperation(IndexOperation.DELETE);
dualTuple.reset(tuple);
return lsmHarness.modify(ctx, true, dualTuple);
}
@Override
- public void forceDelete(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public void forceDelete(ITupleReference tuple) throws HyracksDataException {
ctx.setOperation(IndexOperation.DELETE);
dualTuple.reset(tuple);
lsmHarness.forceModify(ctx, dualTuple);
@@ -470,27 +458,21 @@
}
}
- protected ILSMDiskComponent createBulkLoadTarget() throws HyracksDataException, IndexException {
+ protected ILSMDiskComponent createBulkLoadTarget() throws HyracksDataException {
LSMComponentFileReferences componentFileRefs = fileManager.getRelFlushFileReference();
return createDiskComponent(componentFactory, componentFileRefs.getInsertIndexFileReference(),
- componentFileRefs.getDeleteIndexFileReference(), componentFileRefs.getBloomFilterFileReference(),
- true);
+ componentFileRefs.getDeleteIndexFileReference(), componentFileRefs.getBloomFilterFileReference(), true);
}
@Override
public IIndexBulkLoader createBulkLoader(float fillLevel, boolean verifyInput, long numElementsHint,
- boolean checkIfEmptyIndex) throws TreeIndexException {
- try {
- return new LSMRTreeBulkLoader(fillLevel, verifyInput, numElementsHint, checkIfEmptyIndex);
- } catch (HyracksDataException e) {
- throw new TreeIndexException(e);
- }
+ boolean checkIfEmptyIndex) throws HyracksDataException {
+ return new LSMRTreeBulkLoader(fillLevel, verifyInput, numElementsHint, checkIfEmptyIndex);
}
// This function is modified for R-Trees without antimatter tuples to allow buddy B-Tree to have only primary keys
@Override
- public void modify(IIndexOperationContext ictx, ITupleReference tuple)
- throws HyracksDataException, IndexException {
+ public void modify(IIndexOperationContext ictx, ITupleReference tuple) throws HyracksDataException {
LSMRTreeOpContext ctx = (LSMRTreeOpContext) ictx;
if (ctx.getOperation() == IndexOperation.PHYSICALDELETE) {
throw new UnsupportedOperationException("Physical delete not supported in the LSM-RTree");
@@ -513,9 +495,12 @@
ctx.currentMutableRTreeAccessor.delete(indexTuple);
try {
ctx.currentMutableBTreeAccessor.insert(((DualTupleReference) tuple).getPermutingTuple());
- } catch (TreeIndexDuplicateKeyException e) {
+ } catch (HyracksDataException e) {
// Do nothing, because one delete tuple is enough to indicate
// that all the corresponding insert tuples are deleted
+ if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+ throw e;
+ }
}
}
if (ctx.filterTuple != null) {
@@ -536,17 +521,13 @@
public final MultiComparator filterCmp;
public LSMRTreeBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint,
- boolean checkIfEmptyIndex) throws TreeIndexException, HyracksDataException {
+ boolean checkIfEmptyIndex) throws HyracksDataException {
if (checkIfEmptyIndex && !isEmptyIndex()) {
- throw new TreeIndexException("Cannot load an index that is not empty");
+ throw HyracksDataException.create(ErrorCode.LOAD_NON_EMPTY_INDEX);
}
// Note that by using a flush target file name, we state that the
// new bulk loaded tree is "newer" than any other merged tree.
- try {
- component = createBulkLoadTarget();
- } catch (HyracksDataException | IndexException e) {
- throw new TreeIndexException(e);
- }
+ component = createBulkLoadTarget();
bulkLoader = ((LSMRTreeDiskComponent) component).getRTree().createBulkLoader(fillFactor, verifyInput,
numElementsHint, false);
buddyBTreeBulkloader = ((LSMRTreeDiskComponent) component).getBTree().createBulkLoader(fillFactor,
@@ -563,7 +544,7 @@
}
@Override
- public void add(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public void add(ITupleReference tuple) throws HyracksDataException {
try {
ITupleReference t;
if (indexTuple != null) {
@@ -579,7 +560,7 @@
filterTuple.reset(tuple);
component.getLSMComponentFilter().update(filterTuple, filterCmp);
}
- } catch (IndexException | HyracksDataException | RuntimeException e) {
+ } catch (Exception e) {
cleanupArtifacts();
throw e;
}
@@ -589,7 +570,7 @@
}
@Override
- public void end() throws HyracksDataException, IndexException {
+ public void end() throws HyracksDataException {
if (!cleanedUpArtifacts) {
if (component.getLSMComponentFilter() != null) {
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeDeletedKeysBTreeMergeCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeDeletedKeysBTreeMergeCursor.java
index 22a1054..409649b 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeDeletedKeysBTreeMergeCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeDeletedKeysBTreeMergeCursor.java
@@ -29,7 +29,6 @@
import org.apache.hyracks.storage.am.common.api.IIndexCursor;
import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
import org.apache.hyracks.storage.am.common.api.ITreeIndexAccessor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
@@ -42,13 +41,12 @@
}
@Override
- protected boolean isDeleted(PriorityQueueElement checkElement) throws HyracksDataException, IndexException {
+ protected boolean isDeleted(PriorityQueueElement checkElement) throws HyracksDataException {
return false;
}
@Override
- public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException,
- IndexException {
+ public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
LSMRTreeCursorInitialState lsmInitialState = (LSMRTreeCursorInitialState) initialState;
cmp = lsmInitialState.getBTreeCmp();
operationalComponents = lsmInitialState.getOperationalComponents();
@@ -63,7 +61,7 @@
ILSMComponent component = operationalComponents.get(i);
IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) lsmInitialState.getBTreeLeafFrameFactory().createFrame();
rangeCursors[i] = new BTreeRangeSearchCursor(leafFrame, false);
- BTree btree = (BTree) ((LSMRTreeDiskComponent) component).getBTree();
+ BTree btree = ((LSMRTreeDiskComponent) component).getBTree();
btreeAccessors[i] = btree.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
btreeAccessors[i].search(rangeCursors[i], btreePredicate);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFileManager.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFileManager.java
index cb6c065..f9ee5c9 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFileManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFileManager.java
@@ -34,7 +34,6 @@
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.api.io.IIOManager;
import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.lsm.common.impls.AbstractLSMIndexFileManager;
import org.apache.hyracks.storage.am.lsm.common.impls.LSMComponentFileReferences;
import org.apache.hyracks.storage.am.lsm.common.impls.TreeIndexFactory;
@@ -74,8 +73,8 @@
String baseName = baseDir + ts + SPLIT_STRING + ts;
// Begin timestamp and end timestamp are identical since it is a flush
return new LSMComponentFileReferences(createFlushFile(baseName + SPLIT_STRING + RTREE_STRING),
- createFlushFile(baseName + SPLIT_STRING + BTREE_STRING), createFlushFile(baseName + SPLIT_STRING
- + BLOOM_FILTER_STRING));
+ createFlushFile(baseName + SPLIT_STRING + BTREE_STRING),
+ createFlushFile(baseName + SPLIT_STRING + BLOOM_FILTER_STRING));
}
@Override
@@ -88,12 +87,12 @@
// Get the range of timestamps by taking the earliest and the latest
// timestamps
return new LSMComponentFileReferences(createMergeFile(baseName + SPLIT_STRING + RTREE_STRING),
- createMergeFile(baseName + SPLIT_STRING + BTREE_STRING), createMergeFile(baseName + SPLIT_STRING
- + BLOOM_FILTER_STRING));
+ createMergeFile(baseName + SPLIT_STRING + BTREE_STRING),
+ createMergeFile(baseName + SPLIT_STRING + BLOOM_FILTER_STRING));
}
@Override
- public List<LSMComponentFileReferences> cleanupAndGetValidFiles() throws HyracksDataException, IndexException {
+ public List<LSMComponentFileReferences> cleanupAndGetValidFiles() throws HyracksDataException {
List<LSMComponentFileReferences> validFiles = new ArrayList<>();
ArrayList<ComparableFileName> allRTreeFiles = new ArrayList<>();
ArrayList<ComparableFileName> allBTreeFiles = new ArrayList<>();
@@ -110,7 +109,8 @@
btreeFilesSet.add(cmpFileName.fileName.substring(0, index));
}
validateFiles(btreeFilesSet, allRTreeFiles, getCompoundFilter(transactionFilter, rtreeFilter), rtreeFactory);
- validateFiles(btreeFilesSet, allBloomFilterFiles, getCompoundFilter(transactionFilter, bloomFilterFilter), null);
+ validateFiles(btreeFilesSet, allBloomFilterFiles, getCompoundFilter(transactionFilter, bloomFilterFilter),
+ null);
// Sanity check.
if (allRTreeFiles.size() != allBTreeFiles.size() || allBTreeFiles.size() != allBloomFilterFiles.size()) {
@@ -175,7 +175,8 @@
invalidBloomFilterFile.delete();
} else {
// This scenario should not be possible.
- throw new HyracksDataException("Found LSM files with overlapping but not contained timetamp intervals.");
+ throw new HyracksDataException(
+ "Found LSM files with overlapping but not contained timetamp intervals.");
}
}
@@ -206,8 +207,8 @@
String baseName = baseDir + ts + SPLIT_STRING + ts;
return new LSMComponentFileReferences(createFlushFile(baseName + SPLIT_STRING + RTREE_STRING),
- createFlushFile(baseName + SPLIT_STRING + BTREE_STRING), createFlushFile(baseName + SPLIT_STRING
- + BLOOM_FILTER_STRING));
+ createFlushFile(baseName + SPLIT_STRING + BTREE_STRING),
+ createFlushFile(baseName + SPLIT_STRING + BLOOM_FILTER_STRING));
}
@Override
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFlushOperation.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFlushOperation.java
index 013821d..618b3a7 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFlushOperation.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFlushOperation.java
@@ -25,7 +25,6 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.api.io.IODeviceHandle;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
@@ -70,7 +69,7 @@
}
@Override
- public Boolean call() throws HyracksDataException, IndexException {
+ public Boolean call() throws HyracksDataException {
accessor.flush(this);
return true;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeMergeOperation.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeMergeOperation.java
index 6a12ab2..847533f 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeMergeOperation.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeMergeOperation.java
@@ -26,7 +26,6 @@
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.api.io.IODeviceHandle;
import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
@@ -82,7 +81,7 @@
}
@Override
- public Boolean call() throws HyracksDataException, IndexException {
+ public Boolean call() throws HyracksDataException {
accessor.merge(this);
return true;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeSearchCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeSearchCursor.java
index a11f742..08458d4 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeSearchCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeSearchCursor.java
@@ -23,7 +23,6 @@
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
import org.apache.hyracks.storage.am.common.api.ICursorInitialState;
import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.tuples.PermutingTupleReference;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
@@ -67,16 +66,12 @@
private void searchNextCursor() throws HyracksDataException {
if (currentCursor < numberOfTrees) {
rtreeCursors[currentCursor].reset();
- try {
- rtreeAccessors[currentCursor].search(rtreeCursors[currentCursor], rtreeSearchPredicate);
- } catch (IndexException e) {
- throw new HyracksDataException(e);
- }
+ rtreeAccessors[currentCursor].search(rtreeCursors[currentCursor], rtreeSearchPredicate);
}
}
@Override
- public boolean hasNext() throws HyracksDataException, IndexException {
+ public boolean hasNext() throws HyracksDataException {
if (foundNext) {
return true;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeSortedCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeSortedCursor.java
index d9da016..c536712 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeSortedCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeSortedCursor.java
@@ -24,7 +24,6 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.storage.am.common.api.ICursorInitialState;
import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.tuples.PermutingTupleReference;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
@@ -37,8 +36,8 @@
private int foundIn = -1;
private PermutingTupleReference btreeTuple;
- public LSMRTreeSortedCursor(ILSMIndexOperationContext opCtx, ILinearizeComparatorFactory linearizer, int[] buddyBTreeFields)
- throws HyracksDataException {
+ public LSMRTreeSortedCursor(ILSMIndexOperationContext opCtx, ILinearizeComparatorFactory linearizer,
+ int[] buddyBTreeFields) throws HyracksDataException {
super(opCtx);
this.linearizeCmp = linearizer.createBinaryComparator();
this.btreeTuple = new PermutingTupleReference(buddyBTreeFields);
@@ -56,11 +55,7 @@
try {
for (int i = 0; i < numberOfTrees; i++) {
rtreeCursors[i].reset();
- try {
- rtreeAccessors[i].search(rtreeCursors[i], rtreeSearchPredicate);
- } catch (IndexException e) {
- throw new HyracksDataException(e);
- }
+ rtreeAccessors[i].search(rtreeCursors[i], rtreeSearchPredicate);
if (rtreeCursors[i].hasNext()) {
rtreeCursors[i].next();
} else {
@@ -75,7 +70,7 @@
}
@Override
- public boolean hasNext() throws HyracksDataException, IndexException {
+ public boolean hasNext() throws HyracksDataException {
while (!foundNext) {
frameTuple = null;
@@ -89,8 +84,9 @@
foundIn = -1;
for (int i = 0; i < numberOfTrees; i++) {
- if (depletedRtreeCursors[i])
+ if (depletedRtreeCursors[i]) {
continue;
+ }
if (frameTuple == null) {
frameTuple = rtreeCursors[i].getTuple();
@@ -99,28 +95,25 @@
}
if (linearizeCmp.compare(frameTuple.getFieldData(0), frameTuple.getFieldStart(0),
- frameTuple.getFieldLength(0) * linearizeCmp.getDimensions(), rtreeCursors[i].getTuple()
- .getFieldData(0), rtreeCursors[i].getTuple().getFieldStart(0), rtreeCursors[i]
- .getTuple().getFieldLength(0) * linearizeCmp.getDimensions()) > 0) {
+ frameTuple.getFieldLength(0) * linearizeCmp.getDimensions(),
+ rtreeCursors[i].getTuple().getFieldData(0), rtreeCursors[i].getTuple().getFieldStart(0),
+ rtreeCursors[i].getTuple().getFieldLength(0) * linearizeCmp.getDimensions()) > 0) {
frameTuple = rtreeCursors[i].getTuple();
foundIn = i;
}
}
- if (foundIn == -1)
+ if (foundIn == -1) {
return false;
+ }
boolean killed = false;
btreeTuple.reset(frameTuple);
for (int i = 0; i < foundIn; i++) {
- try {
- btreeCursors[i].reset();
- btreeRangePredicate.setHighKey(btreeTuple, true);
- btreeRangePredicate.setLowKey(btreeTuple, true);
- btreeAccessors[i].search(btreeCursors[i], btreeRangePredicate);
- } catch (IndexException e) {
- throw new HyracksDataException(e);
- }
+ btreeCursors[i].reset();
+ btreeRangePredicate.setHighKey(btreeTuple, true);
+ btreeRangePredicate.setLowKey(btreeTuple, true);
+ btreeAccessors[i].search(btreeCursors[i], btreeRangePredicate);
try {
if (btreeCursors[i].hasNext()) {
killed = true;
@@ -151,11 +144,7 @@
foundNext = false;
for (int i = 0; i < numberOfTrees; i++) {
rtreeCursors[i].reset();
- try {
- rtreeAccessors[i].search(rtreeCursors[i], rtreeSearchPredicate);
- } catch (IndexException e) {
- throw new HyracksDataException(e);
- }
+ rtreeAccessors[i].search(rtreeCursors[i], rtreeSearchPredicate);
if (rtreeCursors[i].hasNext()) {
rtreeCursors[i].next();
} else {
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuples.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuples.java
index 179865a..b246e8f 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuples.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuples.java
@@ -26,6 +26,7 @@
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.IIOManager;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -38,17 +39,15 @@
import org.apache.hyracks.storage.am.common.api.ITreeIndexAccessor;
import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
import org.apache.hyracks.storage.am.common.tuples.PermutingTupleReference;
-import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
-import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponentFactory;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponentFilterFactory;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponentFilterFrameFactory;
+import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponent;
+import org.apache.hyracks.storage.am.lsm.common.api.ILSMDiskComponentFactory;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMHarness;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
@@ -104,19 +103,11 @@
List<ILSMDiskComponent> immutableComponents = diskComponents;
immutableComponents.clear();
List<LSMComponentFileReferences> validFileReferences;
- try {
- validFileReferences = fileManager.cleanupAndGetValidFiles();
- } catch (IndexException e) {
- throw new HyracksDataException(e);
- }
+ validFileReferences = fileManager.cleanupAndGetValidFiles();
for (LSMComponentFileReferences lsmComonentFileReference : validFileReferences) {
LSMRTreeDiskComponent component;
- try {
- component = createDiskComponent(componentFactory,
- lsmComonentFileReference.getInsertIndexFileReference(), null, null, false);
- } catch (IndexException e) {
- throw new HyracksDataException(e);
- }
+ component = createDiskComponent(componentFactory, lsmComonentFileReference.getInsertIndexFileReference(),
+ null, null, false);
immutableComponents.add(component);
}
isActivated = true;
@@ -175,7 +166,7 @@
}
@Override
- public ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException, IndexException {
+ public ILSMDiskComponent flush(ILSMIOOperation operation) throws HyracksDataException {
LSMRTreeFlushOperation flushOp = (LSMRTreeFlushOperation) operation;
// Renaming order is critical because we use assume ordering when we
// read the file names when we open the tree.
@@ -268,7 +259,7 @@
@Override
public void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
LSMRTreeOpContext rctx = createOpContext(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
rctx.setOperation(IndexOperation.MERGE);
List<ILSMComponent> mergingComponents = ctx.getComponentHolder();
@@ -285,7 +276,7 @@
}
@Override
- public ILSMDiskComponent merge(ILSMIOOperation operation) throws HyracksDataException, IndexException {
+ public ILSMDiskComponent merge(ILSMIOOperation operation) throws HyracksDataException {
LSMRTreeMergeOperation mergeOp = (LSMRTreeMergeOperation) operation;
ITreeIndexCursor cursor = mergeOp.getCursor();
ISearchPredicate rtreeSearchPred = new SearchPredicate(null, null);
@@ -346,13 +337,8 @@
@Override
public IIndexBulkLoader createBulkLoader(float fillLevel, boolean verifyInput, long numElementsHint,
- boolean checkIfEmptyIndex) throws TreeIndexException {
- try {
- return new LSMRTreeWithAntiMatterTuplesBulkLoader(fillLevel, verifyInput, numElementsHint,
- checkIfEmptyIndex);
- } catch (HyracksDataException e) {
- throw new TreeIndexException(e);
- }
+ boolean checkIfEmptyIndex) throws HyracksDataException {
+ return new LSMRTreeWithAntiMatterTuplesBulkLoader(fillLevel, verifyInput, numElementsHint, checkIfEmptyIndex);
}
public class LSMRTreeWithAntiMatterTuplesBulkLoader implements IIndexBulkLoader {
@@ -365,17 +351,14 @@
public final MultiComparator filterCmp;
public LSMRTreeWithAntiMatterTuplesBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint,
- boolean checkIfEmptyIndex) throws TreeIndexException, HyracksDataException {
+ boolean checkIfEmptyIndex) throws HyracksDataException {
if (checkIfEmptyIndex && !isEmptyIndex()) {
- throw new TreeIndexException("Cannot load an index that is not empty");
+ throw HyracksDataException.create(ErrorCode.LOAD_NON_EMPTY_INDEX);
}
// Note that by using a flush target file name, we state that the
// new bulk loaded tree is "newer" than any other merged tree.
- try {
- component = createBulkLoadTarget();
- } catch (HyracksDataException | IndexException e) {
- throw new TreeIndexException(e);
- }
+
+ component = createBulkLoadTarget();
bulkLoader = ((LSMRTreeDiskComponent) component).getRTree().createBulkLoader(fillFactor, verifyInput,
numElementsHint, false);
@@ -391,7 +374,7 @@
}
@Override
- public void add(ITupleReference tuple) throws HyracksDataException, IndexException {
+ public void add(ITupleReference tuple) throws HyracksDataException {
try {
ITupleReference t;
if (indexTuple != null) {
@@ -408,7 +391,7 @@
component.getLSMComponentFilter().update(filterTuple, filterCmp);
}
- } catch (IndexException | HyracksDataException | RuntimeException e) {
+ } catch (Exception e) {
cleanupArtifacts();
throw e;
}
@@ -418,7 +401,7 @@
}
@Override
- public void end() throws HyracksDataException, IndexException {
+ public void end() throws HyracksDataException {
if (!cleanedUpArtifacts) {
if (component.getLSMComponentFilter() != null) {
@@ -451,7 +434,7 @@
}
}
- private ILSMDiskComponent createBulkLoadTarget() throws HyracksDataException, IndexException {
+ private ILSMDiskComponent createBulkLoadTarget() throws HyracksDataException {
LSMComponentFileReferences relFlushFileRefs = fileManager.getRelFlushFileReference();
return createDiskComponent(bulkLoaComponentFactory, relFlushFileRefs.getInsertIndexFileReference(), null,
null, true);
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuplesSearchCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuplesSearchCursor.java
index a913b81..98ab803 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuplesSearchCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuplesSearchCursor.java
@@ -29,7 +29,6 @@
import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
import org.apache.hyracks.storage.am.common.api.ITreeIndexAccessor;
import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
@@ -68,8 +67,7 @@
}
@Override
- public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException,
- IndexException {
+ public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
LSMRTreeCursorInitialState lsmInitialState = (LSMRTreeCursorInitialState) initialState;
cmp = lsmInitialState.getHilbertCmp();
btreeCmp = lsmInitialState.getBTreeCmp();
@@ -99,16 +97,16 @@
btreeAccessors = new ITreeIndexAccessor[numMutableComponents];
for (int i = 0; i < numMutableComponents; i++) {
ILSMComponent component = operationalComponents.get(i);
- RTree rtree = (RTree) ((LSMRTreeMemoryComponent) component).getRTree();
- BTree btree = (BTree) ((LSMRTreeMemoryComponent) component).getBTree();
- mutableRTreeCursors[i] = new RTreeSearchCursor((IRTreeInteriorFrame) lsmInitialState
- .getRTreeInteriorFrameFactory().createFrame(), (IRTreeLeafFrame) lsmInitialState
- .getRTreeLeafFrameFactory().createFrame());
- btreeCursors[i] = new BTreeRangeSearchCursor((IBTreeLeafFrame) lsmInitialState.getBTreeLeafFrameFactory()
- .createFrame(), false);
+ RTree rtree = ((LSMRTreeMemoryComponent) component).getRTree();
+ BTree btree = ((LSMRTreeMemoryComponent) component).getBTree();
+ mutableRTreeCursors[i] = new RTreeSearchCursor(
+ (IRTreeInteriorFrame) lsmInitialState.getRTreeInteriorFrameFactory().createFrame(),
+ (IRTreeLeafFrame) lsmInitialState.getRTreeLeafFrameFactory().createFrame());
+ btreeCursors[i] = new BTreeRangeSearchCursor(
+ (IBTreeLeafFrame) lsmInitialState.getBTreeLeafFrameFactory().createFrame(), false);
btreeAccessors[i] = btree.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
- mutableRTreeAccessors[i] = rtree.createAccessor(NoOpOperationCallback.INSTANCE,
- NoOpOperationCallback.INSTANCE);
+ mutableRTreeAccessors[i] =
+ rtree.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
}
rangeCursors = new RTreeSearchCursor[numImmutableComponents];
@@ -116,12 +114,12 @@
int j = 0;
for (int i = numMutableComponents; i < operationalComponents.size(); i++) {
ILSMComponent component = operationalComponents.get(i);
- rangeCursors[j] = new RTreeSearchCursor((IRTreeInteriorFrame) lsmInitialState
- .getRTreeInteriorFrameFactory().createFrame(), (IRTreeLeafFrame) lsmInitialState
- .getRTreeLeafFrameFactory().createFrame());
- RTree rtree = (RTree) ((LSMRTreeDiskComponent) component).getRTree();
- immutableRTreeAccessors[j] = rtree.createAccessor(NoOpOperationCallback.INSTANCE,
- NoOpOperationCallback.INSTANCE);
+ rangeCursors[j] = new RTreeSearchCursor(
+ (IRTreeInteriorFrame) lsmInitialState.getRTreeInteriorFrameFactory().createFrame(),
+ (IRTreeLeafFrame) lsmInitialState.getRTreeLeafFrameFactory().createFrame());
+ RTree rtree = ((LSMRTreeDiskComponent) component).getRTree();
+ immutableRTreeAccessors[j] =
+ rtree.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
immutableRTreeAccessors[j].search(rangeCursors[j], searchPred);
j++;
}
@@ -131,7 +129,7 @@
open = true;
}
- private void searchNextCursor() throws HyracksDataException, IndexException {
+ private void searchNextCursor() throws HyracksDataException {
if (currentCursor < numMutableComponents) {
mutableRTreeCursors[currentCursor].reset();
mutableRTreeAccessors[currentCursor].search(mutableRTreeCursors[currentCursor], rtreeSearchPredicate);
@@ -139,7 +137,7 @@
}
@Override
- public boolean hasNext() throws HyracksDataException, IndexException {
+ public boolean hasNext() throws HyracksDataException {
if (includeMutableComponent) {
if (foundNext) {
return true;
@@ -196,7 +194,7 @@
}
@Override
- public void reset() throws HyracksDataException, IndexException {
+ public void reset() throws HyracksDataException {
if (!open) {
return;
}
@@ -233,8 +231,7 @@
return cmp.selectiveFieldCompare(tupleA, tupleB, comparatorFields);
}
- private boolean searchMemBTrees(ITupleReference tuple, int lastBTreeToSearch) throws HyracksDataException,
- IndexException {
+ private boolean searchMemBTrees(ITupleReference tuple, int lastBTreeToSearch) throws HyracksDataException {
for (int i = 0; i < lastBTreeToSearch; i++) {
btreeCursors[i].reset();
btreeRangePredicate.setHighKey(tuple, true);
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/utils/LSMRTreeUtils.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/utils/LSMRTreeUtils.java
index 624c7de..12226cf 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/utils/LSMRTreeUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/utils/LSMRTreeUtils.java
@@ -24,6 +24,7 @@
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ITypeTraits;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.api.io.IIOManager;
@@ -37,7 +38,6 @@
import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
@@ -72,33 +72,32 @@
public class LSMRTreeUtils {
public static LSMRTree createLSMTree(IIOManager ioManager, List<IVirtualBufferCache> virtualBufferCaches,
- FileReference file,
- IBufferCache diskBufferCache, IFileMapProvider diskFileMapProvider, ITypeTraits[] typeTraits,
- IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories,
- IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType,
- double bloomFilterFalsePositiveRate, ILSMMergePolicy mergePolicy, ILSMOperationTracker opTracker,
- ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallback ioOpCallback,
+ FileReference file, IBufferCache diskBufferCache, IFileMapProvider diskFileMapProvider,
+ ITypeTraits[] typeTraits, IBinaryComparatorFactory[] rtreeCmpFactories,
+ IBinaryComparatorFactory[] btreeCmpFactories, IPrimitiveValueProviderFactory[] valueProviderFactories,
+ RTreePolicyType rtreePolicyType, double bloomFilterFalsePositiveRate, ILSMMergePolicy mergePolicy,
+ ILSMOperationTracker opTracker, ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallback ioOpCallback,
ILinearizeComparatorFactory linearizeCmpFactory, int[] rtreeFields, int[] buddyBTreeFields,
ITypeTraits[] filterTypeTraits, IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields,
boolean durable, boolean isPointMBR, IMetadataPageManagerFactory freePageManagerFactory)
- throws TreeIndexException, HyracksDataException {
+ throws HyracksDataException {
int valueFieldCount = buddyBTreeFields.length;
int keyFieldCount = typeTraits.length - valueFieldCount;
ITypeTraits[] btreeTypeTraits = new ITypeTraits[valueFieldCount];
for (int i = 0; i < valueFieldCount; i++) {
btreeTypeTraits[i] = typeTraits[buddyBTreeFields[i]];
}
- ITreeIndexTupleWriterFactory rtreeInteriorFrameTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(
- typeTraits, false);
+ ITreeIndexTupleWriterFactory rtreeInteriorFrameTupleWriterFactory =
+ new LSMTypeAwareTupleWriterFactory(typeTraits, false);
ITreeIndexTupleWriterFactory rtreeLeafFrameTupleWriterFactory = null;
if (isPointMBR) {
- rtreeLeafFrameTupleWriterFactory = new LSMRTreeTupleWriterFactoryForPointMBR(typeTraits, keyFieldCount,
- valueFieldCount, false);
+ rtreeLeafFrameTupleWriterFactory =
+ new LSMRTreeTupleWriterFactoryForPointMBR(typeTraits, keyFieldCount, valueFieldCount, false);
} else {
rtreeLeafFrameTupleWriterFactory = rtreeInteriorFrameTupleWriterFactory;
}
- ITreeIndexTupleWriterFactory btreeTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(btreeTypeTraits,
- true);
+ ITreeIndexTupleWriterFactory btreeTupleWriterFactory =
+ new LSMTypeAwareTupleWriterFactory(btreeTypeTraits, true);
ITreeIndexFrameFactory rtreeInteriorFrameFactory = new RTreeNSMInteriorFrameFactory(
rtreeInteriorFrameTupleWriterFactory, valueProviderFactories, rtreePolicyType, isPointMBR);
ITreeIndexFrameFactory rtreeLeafFrameFactory = new RTreeNSMLeafFrameFactory(rtreeLeafFrameTupleWriterFactory,
@@ -108,9 +107,9 @@
TreeIndexFactory<RTree> diskRTreeFactory = new RTreeFactory(ioManager, diskBufferCache, diskFileMapProvider,
freePageManagerFactory, rtreeInteriorFrameFactory, rtreeLeafFrameFactory, rtreeCmpFactories,
typeTraits.length, isPointMBR);
- TreeIndexFactory<BTree> diskBTreeFactory = new BTreeFactory(ioManager, diskBufferCache, diskFileMapProvider,
- freePageManagerFactory, btreeInteriorFrameFactory, btreeLeafFrameFactory, btreeCmpFactories,
- btreeTypeTraits.length);
+ TreeIndexFactory<BTree> diskBTreeFactory =
+ new BTreeFactory(ioManager, diskBufferCache, diskFileMapProvider, freePageManagerFactory,
+ btreeInteriorFrameFactory, btreeLeafFrameFactory, btreeCmpFactories, btreeTypeTraits.length);
int[] comparatorFields = { 0 };
IBinaryComparatorFactory[] linearizerArray = { linearizeCmpFactory };
@@ -119,8 +118,8 @@
for (int i = 0; i < btreeCmpFactories.length; i++) {
bloomFilterKeyFields[i] = i;
}
- BloomFilterFactory bloomFilterFactory = new BloomFilterFactory(diskBufferCache, diskFileMapProvider,
- bloomFilterKeyFields);
+ BloomFilterFactory bloomFilterFactory =
+ new BloomFilterFactory(diskBufferCache, diskFileMapProvider, bloomFilterKeyFields);
LSMComponentFilterFactory filterFactory = null;
LSMComponentFilterFrameFactory filterFrameFactory = null;
@@ -131,16 +130,14 @@
filterFrameFactory = new LSMComponentFilterFrameFactory(filterTupleWriterFactory);
filterManager = new LSMComponentFilterManager(filterFrameFactory);
}
- ILSMIndexFileManager fileNameManager = new LSMRTreeFileManager(ioManager, diskFileMapProvider, file,
- diskRTreeFactory,
- diskBTreeFactory);
+ ILSMIndexFileManager fileNameManager =
+ new LSMRTreeFileManager(ioManager, diskFileMapProvider, file, diskRTreeFactory, diskBTreeFactory);
LSMRTree lsmTree = new LSMRTree(ioManager, virtualBufferCaches, rtreeInteriorFrameFactory,
- rtreeLeafFrameFactory,
- btreeInteriorFrameFactory, btreeLeafFrameFactory, fileNameManager, diskRTreeFactory, diskBTreeFactory,
- bloomFilterFactory, filterFactory, filterFrameFactory, filterManager, bloomFilterFalsePositiveRate,
- diskFileMapProvider, typeTraits.length, rtreeCmpFactories, btreeCmpFactories, linearizeCmpFactory,
- comparatorFields, linearizerArray, mergePolicy, opTracker, ioScheduler, ioOpCallback, rtreeFields,
- buddyBTreeFields, filterFields, durable, isPointMBR);
+ rtreeLeafFrameFactory, btreeInteriorFrameFactory, btreeLeafFrameFactory, fileNameManager,
+ diskRTreeFactory, diskBTreeFactory, bloomFilterFactory, filterFactory, filterFrameFactory,
+ filterManager, bloomFilterFalsePositiveRate, diskFileMapProvider, typeTraits.length, rtreeCmpFactories,
+ btreeCmpFactories, linearizeCmpFactory, comparatorFields, linearizerArray, mergePolicy, opTracker,
+ ioScheduler, ioOpCallback, rtreeFields, buddyBTreeFields, filterFields, durable, isPointMBR);
return lsmTree;
}
@@ -153,18 +150,18 @@
ILSMIOOperationCallback ioOpCallback, ILinearizeComparatorFactory linearizerCmpFactory, int[] rtreeFields,
ITypeTraits[] filterTypeTraits, IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields,
boolean durable, boolean isPointMBR, IMetadataPageManagerFactory freePageManagerFactory)
- throws TreeIndexException, HyracksDataException {
- ITreeIndexTupleWriterFactory rtreeInteriorFrameTupleWriterFactory = new LSMRTreeTupleWriterFactory(typeTraits,
- false);
+ throws HyracksDataException {
+ ITreeIndexTupleWriterFactory rtreeInteriorFrameTupleWriterFactory =
+ new LSMRTreeTupleWriterFactory(typeTraits, false);
ITreeIndexTupleWriterFactory rtreeLeafFrameTupleWriterFactory;
ITreeIndexTupleWriterFactory rtreeLeafFrameCopyTupleWriterFactory;
if (isPointMBR) {
int keyFieldCount = rtreeCmpFactories.length;
int valueFieldCount = btreeCmpFactories.length - keyFieldCount;
- rtreeLeafFrameTupleWriterFactory = new LSMRTreeTupleWriterFactoryForPointMBR(typeTraits, keyFieldCount,
- valueFieldCount, true);
- rtreeLeafFrameCopyTupleWriterFactory = new LSMRTreeTupleWriterFactoryForPointMBR(typeTraits, keyFieldCount,
- valueFieldCount, true);
+ rtreeLeafFrameTupleWriterFactory =
+ new LSMRTreeTupleWriterFactoryForPointMBR(typeTraits, keyFieldCount, valueFieldCount, true);
+ rtreeLeafFrameCopyTupleWriterFactory =
+ new LSMRTreeTupleWriterFactoryForPointMBR(typeTraits, keyFieldCount, valueFieldCount, true);
} else {
rtreeLeafFrameTupleWriterFactory = new LSMRTreeTupleWriterFactory(typeTraits, false);
@@ -193,8 +190,8 @@
// The first field is for the sorted curve (e.g. Hilbert curve), and the
// second field is for the primary key.
int[] comparatorFields = new int[btreeCmpFactories.length - rtreeCmpFactories.length + 1];
- IBinaryComparatorFactory[] linearizerArray = new IBinaryComparatorFactory[btreeCmpFactories.length
- - rtreeCmpFactories.length + 1];
+ IBinaryComparatorFactory[] linearizerArray =
+ new IBinaryComparatorFactory[btreeCmpFactories.length - rtreeCmpFactories.length + 1];
comparatorFields[0] = 0;
for (int i = 1; i < comparatorFields.length; i++) {
@@ -216,9 +213,8 @@
filterFrameFactory = new LSMComponentFilterFrameFactory(filterTupleWriterFactory);
filterManager = new LSMComponentFilterManager(filterFrameFactory);
}
- ILSMIndexFileManager fileNameManager = new LSMRTreeWithAntiMatterTuplesFileManager(ioManager,
- diskFileMapProvider, file,
- diskRTreeFactory);
+ ILSMIndexFileManager fileNameManager =
+ new LSMRTreeWithAntiMatterTuplesFileManager(ioManager, diskFileMapProvider, file, diskRTreeFactory);
LSMRTreeWithAntiMatterTuples lsmTree = new LSMRTreeWithAntiMatterTuples(ioManager, virtualBufferCaches,
rtreeInteriorFrameFactory, rtreeLeafFrameFactory, btreeInteriorFrameFactory, btreeLeafFrameFactory,
fileNameManager, diskRTreeFactory, bulkLoadRTreeFactory, filterFactory, filterFrameFactory,
@@ -229,15 +225,14 @@
}
public static ExternalRTree createExternalRTree(IIOManager ioManager, FileReference file,
- IBufferCache diskBufferCache,
- IFileMapProvider diskFileMapProvider, ITypeTraits[] typeTraits,
+ IBufferCache diskBufferCache, IFileMapProvider diskFileMapProvider, ITypeTraits[] typeTraits,
IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories,
IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType,
double bloomFilterFalsePositiveRate, ILSMMergePolicy mergePolicy, ILSMOperationTracker opTracker,
ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallback ioOpCallback,
ILinearizeComparatorFactory linearizeCmpFactory, int[] buddyBTreeFields, int startWithVersion,
boolean durable, boolean isPointMBR, IMetadataPageManagerFactory freePageManagerFactory)
- throws TreeIndexException {
+ throws HyracksDataException {
int keyFieldCount = rtreeCmpFactories.length;
int valueFieldCount = typeTraits.length - keyFieldCount;
@@ -245,17 +240,17 @@
for (int i = 0; i < buddyBTreeFields.length; i++) {
btreeTypeTraits[i] = typeTraits[buddyBTreeFields[i]];
}
- ITreeIndexTupleWriterFactory rtreeInteriorFrameTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(
- typeTraits, false);
+ ITreeIndexTupleWriterFactory rtreeInteriorFrameTupleWriterFactory =
+ new LSMTypeAwareTupleWriterFactory(typeTraits, false);
ITreeIndexTupleWriterFactory rtreeLeafFrameTupleWriterFactory = null;
if (isPointMBR) {
- rtreeLeafFrameTupleWriterFactory = new LSMRTreeTupleWriterFactoryForPointMBR(typeTraits, keyFieldCount,
- valueFieldCount, false);
+ rtreeLeafFrameTupleWriterFactory =
+ new LSMRTreeTupleWriterFactoryForPointMBR(typeTraits, keyFieldCount, valueFieldCount, false);
} else {
rtreeLeafFrameTupleWriterFactory = rtreeInteriorFrameTupleWriterFactory;
}
- ITreeIndexTupleWriterFactory btreeTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(btreeTypeTraits,
- true);
+ ITreeIndexTupleWriterFactory btreeTupleWriterFactory =
+ new LSMTypeAwareTupleWriterFactory(btreeTypeTraits, true);
ITreeIndexFrameFactory rtreeInteriorFrameFactory = new RTreeNSMInteriorFrameFactory(
rtreeInteriorFrameTupleWriterFactory, valueProviderFactories, rtreePolicyType, isPointMBR);
ITreeIndexFrameFactory rtreeLeafFrameFactory = new RTreeNSMLeafFrameFactory(rtreeLeafFrameTupleWriterFactory,
@@ -265,9 +260,9 @@
TreeIndexFactory<RTree> diskRTreeFactory = new RTreeFactory(ioManager, diskBufferCache, diskFileMapProvider,
freePageManagerFactory, rtreeInteriorFrameFactory, rtreeLeafFrameFactory, rtreeCmpFactories,
typeTraits.length, isPointMBR);
- TreeIndexFactory<BTree> diskBTreeFactory = new BTreeFactory(ioManager, diskBufferCache, diskFileMapProvider,
- freePageManagerFactory, btreeInteriorFrameFactory, btreeLeafFrameFactory, btreeCmpFactories,
- btreeTypeTraits.length);
+ TreeIndexFactory<BTree> diskBTreeFactory =
+ new BTreeFactory(ioManager, diskBufferCache, diskFileMapProvider, freePageManagerFactory,
+ btreeInteriorFrameFactory, btreeLeafFrameFactory, btreeCmpFactories, btreeTypeTraits.length);
int[] comparatorFields = { 0 };
IBinaryComparatorFactory[] linearizerArray = { linearizeCmpFactory };
@@ -275,12 +270,11 @@
for (int i = 0; i < btreeCmpFactories.length; i++) {
bloomFilterKeyFields[i] = i;
}
- BloomFilterFactory bloomFilterFactory = new BloomFilterFactory(diskBufferCache, diskFileMapProvider,
- bloomFilterKeyFields);
+ BloomFilterFactory bloomFilterFactory =
+ new BloomFilterFactory(diskBufferCache, diskFileMapProvider, bloomFilterKeyFields);
- ILSMIndexFileManager fileNameManager = new LSMRTreeFileManager(ioManager, diskFileMapProvider, file,
- diskRTreeFactory,
- diskBTreeFactory);
+ ILSMIndexFileManager fileNameManager =
+ new LSMRTreeFileManager(ioManager, diskFileMapProvider, file, diskRTreeFactory, diskBTreeFactory);
ExternalRTree lsmTree = new ExternalRTree(ioManager, rtreeInteriorFrameFactory, rtreeLeafFrameFactory,
btreeInteriorFrameFactory, btreeLeafFrameFactory, fileNameManager, diskRTreeFactory, diskBTreeFactory,
bloomFilterFactory, bloomFilterFalsePositiveRate, diskFileMapProvider, typeTraits.length,
@@ -291,10 +285,10 @@
}
public static ILinearizeComparatorFactory proposeBestLinearizer(ITypeTraits[] typeTraits, int numKeyFields)
- throws TreeIndexException {
+ throws HyracksDataException {
for (int i = 0; i < numKeyFields; i++) {
if (!(typeTraits[i].getClass().equals(typeTraits[0].getClass()))) {
- throw new TreeIndexException("Cannot propose linearizer if dimensions have different types");
+ throw HyracksDataException.create(ErrorCode.CANNOT_PROPOSE_LINEARIZER_DIFF_DIMENSIONS);
}
}
@@ -306,6 +300,7 @@
return new ZCurveIntComparatorFactory(numKeyFields / 2);
}
- throw new TreeIndexException("Cannot propose linearizer");
+ throw HyracksDataException.create(ErrorCode.CANNOT_PROPOSE_LINEARIZER_FOR_TYPE,
+ typeTraits[0].getClass().getSimpleName());
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java
index 892b715..b941989 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java
@@ -21,7 +21,6 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
import org.apache.hyracks.storage.am.rtree.impls.PathList;
@@ -41,7 +40,7 @@
public int findTupleByPointer(ITupleReference tuple, PathList traverseList, int parentIndex, MultiComparator cmp)
throws HyracksDataException;
- public void adjustKey(ITupleReference tuple, int tupleIndex, MultiComparator cmp) throws TreeIndexException;
+ public void adjustKey(ITupleReference tuple, int tupleIndex, MultiComparator cmp) throws HyracksDataException;
public void enlarge(ITupleReference tuple, MultiComparator cmp) throws HyracksDataException;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java
index efdb67f..e24ab6b 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java
@@ -23,6 +23,7 @@
import java.util.Collections;
import org.apache.hyracks.api.dataflow.value.IBinaryComparator;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
import org.apache.hyracks.data.std.primitive.IntegerPointable;
@@ -30,7 +31,6 @@
import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProvider;
import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleReference;
import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.common.frames.AbstractSlotManager;
import org.apache.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
@@ -41,12 +41,12 @@
public class RTreeNSMInteriorFrame extends RTreeNSMFrame implements IRTreeInteriorFrame {
public static final int childPtrSize = 4;
- private IBinaryComparator childPtrCmp = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY)
- .createBinaryComparator();
+ private IBinaryComparator childPtrCmp =
+ PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
private final int keyFieldCount;
public RTreeNSMInteriorFrame(ITreeIndexTupleWriter tupleWriter, IPrimitiveValueProvider[] keyValueProviders,
- RTreePolicyType rtreePolicyType, boolean isPointMBR) {
+ RTreePolicyType rtreePolicyType, boolean isPointMBR) {
super(tupleWriter, keyValueProviders, rtreePolicyType, isPointMBR);
keyFieldCount = keyValueProviders.length;
frameTuple.setFieldCount(keyFieldCount);
@@ -130,7 +130,8 @@
if (c == 0) {
return i;
} else {
- int pageId = IntegerPointable.getInteger(frameTuple.getFieldData(cmp.getKeyFieldCount() - 1), getChildPointerOff(frameTuple));
+ int pageId = IntegerPointable.getInteger(frameTuple.getFieldData(cmp.getKeyFieldCount() - 1),
+ getChildPointerOff(frameTuple));
traverseList.add(pageId, -1, parentIndex);
}
}
@@ -197,19 +198,15 @@
}
@Override
- public void adjustKey(ITupleReference tuple, int tupleIndex, MultiComparator cmp) throws TreeIndexException {
+ public void adjustKey(ITupleReference tuple, int tupleIndex, MultiComparator cmp) throws HyracksDataException {
frameTuple.setFieldCount(cmp.getKeyFieldCount());
if (tupleIndex == -1) {
- try {
- tupleIndex = findTupleByPointer(tuple, cmp);
- } catch (HyracksDataException e) {
- throw new TreeIndexException(e);
- }
+ tupleIndex = findTupleByPointer(tuple, cmp);
}
if (tupleIndex != -1) {
tupleWriter.writeTuple(tuple, buf.array(), getTupleOffset(tupleIndex));
} else {
- throw new TreeIndexException("Error: Faild to find a tuple in a page");
+ throw HyracksDataException.create(ErrorCode.FAILED_TO_FIND_TUPLE);
}
@@ -217,9 +214,9 @@
protected int pointerCmp(ITupleReference tupleA, ITupleReference tupleB, MultiComparator cmp)
throws HyracksDataException {
- return childPtrCmp
- .compare(tupleA.getFieldData(cmp.getKeyFieldCount() - 1), getChildPointerOff(tupleA), childPtrSize,
- tupleB.getFieldData(cmp.getKeyFieldCount() - 1), getChildPointerOff(tupleB), childPtrSize);
+ return childPtrCmp.compare(tupleA.getFieldData(cmp.getKeyFieldCount() - 1), getChildPointerOff(tupleA),
+ childPtrSize, tupleB.getFieldData(cmp.getKeyFieldCount() - 1), getChildPointerOff(tupleB),
+ childPtrSize);
}
@Override
@@ -243,8 +240,8 @@
buf.putInt(Constants.TUPLE_COUNT_OFFSET, buf.getInt(Constants.TUPLE_COUNT_OFFSET) + 1);
buf.putInt(Constants.FREE_SPACE_OFFSET, buf.getInt(Constants.FREE_SPACE_OFFSET) + tupleSize);
- buf.putInt(TOTAL_FREE_SPACE_OFFSET, buf.getInt(TOTAL_FREE_SPACE_OFFSET) - tupleSize - slotManager
- .getSlotSize());
+ buf.putInt(TOTAL_FREE_SPACE_OFFSET,
+ buf.getInt(TOTAL_FREE_SPACE_OFFSET) - tupleSize - slotManager.getSlotSize());
}
@@ -298,8 +295,9 @@
for (int i = 0; i < tupleCount; i++) {
int tupleOff = slotManager.getTupleOff(slotManager.getSlotOff(i));
frameTuple.resetByTupleOffset(buf.array(), tupleOff);
- int intVal = IntegerPointable.getInteger(buf.array(), frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
- + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1));
+ int intVal =
+ IntegerPointable.getInteger(buf.array(), frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
+ + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1));
ret.add(intVal);
}
return ret;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/impls/RTree.java b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/impls/RTree.java
index 9cd16c4..7b2ed48 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/impls/RTree.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/impls/RTree.java
@@ -26,6 +26,7 @@
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -41,8 +42,6 @@
import org.apache.hyracks.storage.am.common.api.ITreeIndexFrame;
import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.common.frames.AbstractSlotManager;
import org.apache.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
import org.apache.hyracks.storage.am.common.impls.AbstractTreeIndex;
@@ -159,63 +158,6 @@
modificationCallback);
}
- private void insert(ITupleReference tuple, IIndexOperationContext ictx)
- throws HyracksDataException, TreeIndexException {
- RTreeOpContext ctx = (RTreeOpContext) ictx;
- int tupleSize = Math.max(ctx.leafFrame.getBytesRequiredToWriteTuple(tuple),
- ctx.interiorFrame.getBytesRequiredToWriteTuple(tuple));
- if (tupleSize > maxTupleSize) {
- throw new TreeIndexException("Record size (" + tupleSize + ") larger than maximum acceptable record size ("
- + maxTupleSize + ")");
- }
- ctx.reset();
- ctx.setTuple(tuple);
- ctx.splitKey.reset();
- ctx.splitKey.getLeftTuple().setFieldCount(cmpFactories.length);
- ctx.splitKey.getRightTuple().setFieldCount(cmpFactories.length);
- ctx.modificationCallback.before(tuple);
-
- int maxFieldPos = cmpFactories.length / 2;
- for (int i = 0; i < maxFieldPos; i++) {
- int j = maxFieldPos + i;
- int c = ctx.cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
- tuple.getFieldLength(i), tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
- if (c > 0) {
- throw new IllegalArgumentException("The low key point has larger coordinates than the high key point.");
- }
- }
-
- try {
- ICachedPage leafNode = findLeaf(ctx);
-
- int pageId = ctx.pathList.getLastPageId();
- ctx.pathList.moveLast();
- insertTuple(leafNode, pageId, ctx.getTuple(), ctx, true);
-
- while (true) {
- if (ctx.splitKey.getLeftPageBuffer() != null) {
- updateParentForInsert(ctx);
- } else {
- break;
- }
- }
- } finally {
- for (int i = ctx.NSNUpdates.size() - 1; i >= 0; i--) {
- ICachedPage node = ctx.NSNUpdates.get(i);
- ctx.interiorFrame.setPage(node);
- ctx.interiorFrame.setPageNsn(incrementGlobalNsn());
- }
-
- for (int i = ctx.LSNUpdates.size() - 1; i >= 0; i--) {
- ICachedPage node = ctx.LSNUpdates.get(i);
- ctx.interiorFrame.setPage(node);
- ctx.interiorFrame.setPageLsn(incrementGlobalNsn());
- node.releaseWriteLatch(true);
- bufferCache.unpin(node);
- }
- }
- }
-
private ICachedPage findLeaf(RTreeOpContext ctx) throws HyracksDataException {
int pageId = rootPage;
boolean writeLatched = false;
@@ -342,7 +284,7 @@
}
private void insertTuple(ICachedPage node, int pageId, ITupleReference tuple, RTreeOpContext ctx, boolean isLeaf)
- throws HyracksDataException, TreeIndexException {
+ throws HyracksDataException {
boolean succeeded = false;
FrameOpSpaceStatus spaceStatus;
if (!isLeaf) {
@@ -412,8 +354,7 @@
rightFrame.setPage(rightNode);
rightFrame.initBuffer(ctx.interiorFrame.getLevel());
rightFrame.setRightPage(ctx.interiorFrame.getRightPage());
- ctx.interiorFrame.split(rightFrame, tuple, ctx.splitKey, ctx,
- bufferCache);
+ ctx.interiorFrame.split(rightFrame, tuple, ctx.splitKey, ctx, bufferCache);
ctx.interiorFrame.setRightPage(rightPageId);
} else {
rightFrame = (IRTreeFrame) leafFrameFactory.createFrame();
@@ -421,8 +362,7 @@
rightFrame.initBuffer((byte) 0);
rightFrame.setRightPage(ctx.interiorFrame.getRightPage());
ctx.modificationCallback.found(null, tuple);
- ctx.leafFrame.split(rightFrame, tuple, ctx.splitKey, ctx,
- bufferCache);
+ ctx.leafFrame.split(rightFrame, tuple, ctx.splitKey, ctx, bufferCache);
ctx.leafFrame.setRightPage(rightPageId);
}
succeeded = true;
@@ -448,8 +388,8 @@
ctx.splitKey.setPages(pageId, rightPageId);
if (pageId == rootPage) {
int newLeftId = freePageManager.takePage(ctx.metaFrame);
- ICachedPage newLeftNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, newLeftId),
- true);
+ ICachedPage newLeftNode =
+ bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, newLeftId), true);
newLeftNode.acquireWriteLatch();
succeeded = false;
try {
@@ -503,7 +443,7 @@
}
}
- private void updateParentForInsert(RTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
+ private void updateParentForInsert(RTreeOpContext ctx) throws HyracksDataException {
boolean succeeded = false;
boolean writeLatched = false;
int parentId = ctx.pathList.getLastPageId();
@@ -542,7 +482,7 @@
if (foundParent) {
try {
ctx.interiorFrame.adjustKey(ctx.splitKey.getLeftTuple(), -1, ctx.cmp);
- } catch (TreeIndexException e) {
+ } catch (Exception e) {
if (writeLatched) {
parentNode.releaseWriteLatch(true);
writeLatched = false;
@@ -571,7 +511,7 @@
updateParentForInsert(ctx);
}
- private void findPath(RTreeOpContext ctx) throws TreeIndexException, HyracksDataException {
+ private void findPath(RTreeOpContext ctx) throws HyracksDataException {
boolean readLatched = false;
int pageId = rootPage;
int parentIndex = -1;
@@ -596,7 +536,7 @@
ctx.traverseList.moveFirst();
if (ctx.interiorFrame.isLeaf()) {
- throw new TreeIndexException("Error: Failed to re-find parent of a page in the tree.");
+ throw HyracksDataException.create(ErrorCode.FAILED_TO_RE_FIND_PARENT);
}
if (pageId != rootPage) {
@@ -635,7 +575,7 @@
}
}
- private void delete(ITupleReference tuple, RTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
+ private void delete(ITupleReference tuple, RTreeOpContext ctx) throws HyracksDataException {
ctx.reset();
ctx.setTuple(tuple);
ctx.splitKey.reset();
@@ -764,7 +704,7 @@
}
private void search(ITreeIndexCursor cursor, ISearchPredicate searchPred, RTreeOpContext ctx)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
ctx.reset();
ctx.cursor = cursor;
@@ -828,19 +768,19 @@
}
@Override
- public void insert(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
+ public void insert(ITupleReference tuple) throws HyracksDataException {
ctx.setOperation(IndexOperation.INSERT);
- rtree.insert(tuple, ctx);
+ insert(tuple, ctx);
}
@Override
- public void update(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
+ public void update(ITupleReference tuple) throws HyracksDataException {
ctx.setOperation(IndexOperation.UPDATE);
rtree.update(tuple, ctx);
}
@Override
- public void delete(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
+ public void delete(ITupleReference tuple) throws HyracksDataException {
ctx.setOperation(IndexOperation.DELETE);
rtree.delete(tuple, ctx);
}
@@ -852,8 +792,7 @@
}
@Override
- public void search(IIndexCursor cursor, ISearchPredicate searchPred)
- throws HyracksDataException, IndexException {
+ public void search(IIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException {
ctx.setOperation(IndexOperation.SEARCH);
rtree.search((ITreeIndexCursor) cursor, searchPred, ctx);
}
@@ -874,45 +813,97 @@
}
@Override
- public void upsert(ITupleReference tuple) throws HyracksDataException, TreeIndexException {
+ public void upsert(ITupleReference tuple) throws HyracksDataException {
throw new UnsupportedOperationException(
"The RTree does not support the notion of keys, therefore upsert does not make sense.");
}
+
+ private void insert(ITupleReference tuple, IIndexOperationContext ictx) throws HyracksDataException {
+ RTreeOpContext ctx = (RTreeOpContext) ictx;
+ int tupleSize = Math.max(ctx.leafFrame.getBytesRequiredToWriteTuple(tuple),
+ ctx.interiorFrame.getBytesRequiredToWriteTuple(tuple));
+ if (tupleSize > maxTupleSize) {
+ throw HyracksDataException.create(ErrorCode.RECORD_IS_TOO_LARGE, tupleSize, maxTupleSize);
+ }
+ ctx.reset();
+ ctx.setTuple(tuple);
+ ctx.splitKey.reset();
+ ctx.splitKey.getLeftTuple().setFieldCount(cmpFactories.length);
+ ctx.splitKey.getRightTuple().setFieldCount(cmpFactories.length);
+ ctx.modificationCallback.before(tuple);
+
+ int maxFieldPos = cmpFactories.length / 2;
+ for (int i = 0; i < maxFieldPos; i++) {
+ int j = maxFieldPos + i;
+ int c = ctx.cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
+ tuple.getFieldLength(i), tuple.getFieldData(j), tuple.getFieldStart(j),
+ tuple.getFieldLength(j));
+ if (c > 0) {
+ throw new IllegalArgumentException(
+ "The low key point has larger coordinates than the high key point.");
+ }
+ }
+
+ try {
+ ICachedPage leafNode = findLeaf(ctx);
+
+ int pageId = ctx.pathList.getLastPageId();
+ ctx.pathList.moveLast();
+ insertTuple(leafNode, pageId, ctx.getTuple(), ctx, true);
+
+ while (true) {
+ if (ctx.splitKey.getLeftPageBuffer() != null) {
+ updateParentForInsert(ctx);
+ } else {
+ break;
+ }
+ }
+ } finally {
+ for (int i = ctx.NSNUpdates.size() - 1; i >= 0; i--) {
+ ICachedPage node = ctx.NSNUpdates.get(i);
+ ctx.interiorFrame.setPage(node);
+ ctx.interiorFrame.setPageNsn(incrementGlobalNsn());
+ }
+
+ for (int i = ctx.LSNUpdates.size() - 1; i >= 0; i--) {
+ ICachedPage node = ctx.LSNUpdates.get(i);
+ ctx.interiorFrame.setPage(node);
+ ctx.interiorFrame.setPageLsn(incrementGlobalNsn());
+ node.releaseWriteLatch(true);
+ bufferCache.unpin(node);
+ }
+ }
+ }
}
@Override
public IIndexBulkLoader createBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint,
- boolean checkIfEmptyIndex) throws TreeIndexException {
+ boolean checkIfEmptyIndex) throws HyracksDataException {
// TODO: verifyInput currently does nothing.
- try {
- return new RTreeBulkLoader(fillFactor);
- } catch (HyracksDataException e) {
- throw new TreeIndexException(e);
- }
+ return new RTreeBulkLoader(fillFactor);
}
public class RTreeBulkLoader extends AbstractTreeIndex.AbstractTreeIndexBulkLoader {
ITreeIndexFrame lowerFrame, prevInteriorFrame;
- RTreeTypeAwareTupleWriter interiorFrameTupleWriter = ((RTreeTypeAwareTupleWriter) interiorFrame
- .getTupleWriter());
+ RTreeTypeAwareTupleWriter interiorFrameTupleWriter =
+ ((RTreeTypeAwareTupleWriter) interiorFrame.getTupleWriter());
ITreeIndexTupleReference mbrTuple = interiorFrame.createTupleReference();
ByteBuffer mbr;
List<Integer> prevNodeFrontierPages = new ArrayList<>();
- public RTreeBulkLoader(float fillFactor) throws TreeIndexException, HyracksDataException {
+ public RTreeBulkLoader(float fillFactor) throws HyracksDataException {
super(fillFactor);
prevInteriorFrame = interiorFrameFactory.createFrame();
}
@Override
- public void add(ITupleReference tuple) throws IndexException, HyracksDataException {
+ public void add(ITupleReference tuple) throws HyracksDataException {
try {
int leafFrameTupleSize = leafFrame.getBytesRequiredToWriteTuple(tuple);
int interiorFrameTupleSize = interiorFrame.getBytesRequiredToWriteTuple(tuple);
int tupleSize = Math.max(leafFrameTupleSize, interiorFrameTupleSize);
if (tupleSize > maxTupleSize) {
- throw new TreeIndexException("Space required for record (" + tupleSize
- + ") larger than maximum acceptable size (" + maxTupleSize + ")");
+ throw HyracksDataException.create(ErrorCode.RECORD_IS_TOO_LARGE, tupleSize, maxTupleSize);
}
NodeFrontier leafFrontier = nodeFrontiers.get(0);
@@ -942,8 +933,8 @@
}
pagesToWrite.clear();
- leafFrontier.page = bufferCache
- .confiscatePage(BufferedFileHandle.getDiskPageId(fileId, leafFrontier.pageId));
+ leafFrontier.page =
+ bufferCache.confiscatePage(BufferedFileHandle.getDiskPageId(fileId, leafFrontier.pageId));
leafFrame.setPage(leafFrontier.page);
leafFrame.initBuffer((byte) 0);
@@ -1035,9 +1026,9 @@
((RTreeNSMFrame) lowerFrame).adjustMBR();
if (mbr == null) {
- int bytesRequired = interiorFrameTupleWriter
- .bytesRequired(((RTreeNSMFrame) lowerFrame).getMBRTuples()[0], 0, cmp.getKeyFieldCount())
- + ((RTreeNSMInteriorFrame) interiorFrame).getChildPointerSize();
+ int bytesRequired =
+ interiorFrameTupleWriter.bytesRequired(((RTreeNSMFrame) lowerFrame).getMBRTuples()[0], 0,
+ cmp.getKeyFieldCount()) + ((RTreeNSMInteriorFrame) interiorFrame).getChildPointerSize();
mbr = ByteBuffer.allocate(bytesRequired);
}
interiorFrameTupleWriter.writeTupleFields(((RTreeNSMFrame) lowerFrame).getMBRTuples(), 0, mbr, 0);
@@ -1049,8 +1040,8 @@
// load where finalization can possibly lead to a split
//TODO: accomplish this without wasting 1 tuple
int sizeOfTwoTuples = 2 * (mbrTuple.getTupleSize() + RTreeNSMInteriorFrame.childPtrSize);
- FrameOpSpaceStatus spaceForTwoTuples = (((RTreeNSMInteriorFrame) interiorFrame)
- .hasSpaceInsert(sizeOfTwoTuples));
+ FrameOpSpaceStatus spaceForTwoTuples =
+ (((RTreeNSMInteriorFrame) interiorFrame).hasSpaceInsert(sizeOfTwoTuples));
if (spaceForTwoTuples != FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE && !toRoot) {
int finalPageId = freePageManager.takePage(metaFrame);
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexExamplesTest.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexExamplesTest.java
index 8988605..38486cd 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexExamplesTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexExamplesTest.java
@@ -28,6 +28,7 @@
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.dataflow.value.ITypeTraits;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
import org.apache.hyracks.data.std.primitive.IntegerPointable;
@@ -47,9 +48,6 @@
import org.apache.hyracks.storage.am.common.api.ITreeIndex;
import org.apache.hyracks.storage.am.common.api.ITreeIndexAccessor;
import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
-import org.apache.hyracks.storage.am.common.api.UnsortedInputException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
import org.apache.hyracks.storage.am.common.impls.TreeIndexDiskOrderScanCursor;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
import org.junit.Test;
@@ -61,7 +59,7 @@
protected abstract ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories,
int[] bloomFilterKeyFields, ITypeTraits[] filterTypeTraits, IBinaryComparatorFactory[] filterCmpFactories,
- int[] btreeFields, int[] filterFields) throws TreeIndexException, HyracksDataException;
+ int[] btreeFields, int[] filterFields) throws HyracksDataException;
/**
* Fixed-Length Key,Value Example. Create a tree index with one fixed-length
@@ -80,8 +78,8 @@
typeTraits[0] = IntegerPointable.TYPE_TRAITS;
typeTraits[1] = IntegerPointable.TYPE_TRAITS;
// Declare field serdes.
- ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE };
+ ISerializerDeserializer[] fieldSerdes =
+ { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
// Declare keys.
int keyFieldCount = 1;
@@ -102,8 +100,8 @@
}
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
ArrayTupleReference tuple = new ArrayTupleReference();
- IIndexAccessor indexAccessor = treeIndex.createAccessor(TestOperationCallback.INSTANCE,
- TestOperationCallback.INSTANCE);
+ IIndexAccessor indexAccessor =
+ treeIndex.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
int numInserts = 10000;
for (int i = 0; i < numInserts; i++) {
int f0 = rnd.nextInt() % numInserts;
@@ -116,7 +114,10 @@
}
try {
indexAccessor.insert(tuple);
- } catch (TreeIndexException e) {
+ } catch (HyracksDataException e) {
+ if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+ throw e;
+ }
}
}
long end = System.currentTimeMillis();
@@ -164,8 +165,8 @@
typeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
typeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
// Declare field serdes.
- ISerializerDeserializer[] fieldSerdes = { new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() };
+ ISerializerDeserializer[] fieldSerdes =
+ { new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() };
// Declare keys.
int keyFieldCount = 1;
@@ -182,8 +183,8 @@
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
ArrayTupleReference tuple = new ArrayTupleReference();
- IIndexAccessor indexAccessor = treeIndex.createAccessor(TestOperationCallback.INSTANCE,
- TestOperationCallback.INSTANCE);
+ IIndexAccessor indexAccessor =
+ treeIndex.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
String key = "111";
String data = "XXX";
@@ -263,8 +264,8 @@
}
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
ArrayTupleReference tuple = new ArrayTupleReference();
- IIndexAccessor indexAccessor = treeIndex.createAccessor(TestOperationCallback.INSTANCE,
- TestOperationCallback.INSTANCE);
+ IIndexAccessor indexAccessor =
+ treeIndex.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
int numInserts = 10000;
for (int i = 0; i < 10000; i++) {
int f0 = rnd.nextInt() % 2000;
@@ -278,7 +279,10 @@
}
try {
indexAccessor.insert(tuple);
- } catch (TreeIndexException e) {
+ } catch (HyracksDataException e) {
+ if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+ throw e;
+ }
}
}
long end = System.currentTimeMillis();
@@ -324,8 +328,8 @@
typeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
typeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
// Declare field serdes.
- ISerializerDeserializer[] fieldSerdes = { new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() };
+ ISerializerDeserializer[] fieldSerdes =
+ { new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() };
// Declare keys.
int keyFieldCount = 1;
@@ -346,8 +350,8 @@
}
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
ArrayTupleReference tuple = new ArrayTupleReference();
- IIndexAccessor indexAccessor = treeIndex.createAccessor(TestOperationCallback.INSTANCE,
- TestOperationCallback.INSTANCE);
+ IIndexAccessor indexAccessor =
+ treeIndex.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
// Max string length to be generated.
int maxLength = 10;
int numInserts = 10000;
@@ -362,7 +366,10 @@
}
try {
indexAccessor.insert(tuple);
- } catch (TreeIndexException e) {
+ } catch (HyracksDataException e) {
+ if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+ throw e;
+ }
}
}
long end = System.currentTimeMillis();
@@ -408,8 +415,8 @@
typeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
typeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
// Declare field serdes.
- ISerializerDeserializer[] fieldSerdes = { new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() };
+ ISerializerDeserializer[] fieldSerdes =
+ { new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() };
// Declare keys.
int keyFieldCount = 1;
@@ -426,8 +433,8 @@
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
ArrayTupleReference tuple = new ArrayTupleReference();
- IIndexAccessor indexAccessor = treeIndex.createAccessor(TestOperationCallback.INSTANCE,
- TestOperationCallback.INSTANCE);
+ IIndexAccessor indexAccessor =
+ treeIndex.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
// Max string length to be generated.
int runs = 3;
for (int run = 0; run < runs; run++) {
@@ -455,7 +462,10 @@
try {
indexAccessor.insert(tuple);
insDone++;
- } catch (TreeIndexException e) {
+ } catch (HyracksDataException e) {
+ if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+ throw e;
+ }
}
insDoneCmp[i] = insDone;
}
@@ -474,7 +484,10 @@
try {
indexAccessor.delete(tuple);
delDone++;
- } catch (TreeIndexException e) {
+ } catch (HyracksDataException e) {
+ if (e.getErrorCode() != ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+ throw e;
+ }
}
if (insDoneCmp[i] != delDone) {
if (LOGGER.isLoggable(Level.INFO)) {
@@ -514,8 +527,8 @@
typeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
typeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
// Declare field serdes.
- ISerializerDeserializer[] fieldSerdes = { new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() };
+ ISerializerDeserializer[] fieldSerdes =
+ { new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() };
// Declare keys.
int keyFieldCount = 1;
@@ -533,8 +546,8 @@
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Inserting into tree...");
}
- IIndexAccessor indexAccessor = treeIndex.createAccessor(TestOperationCallback.INSTANCE,
- TestOperationCallback.INSTANCE);
+ IIndexAccessor indexAccessor =
+ treeIndex.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
ArrayTupleReference tuple = new ArrayTupleReference();
int maxLength = 10;
@@ -552,7 +565,10 @@
}
try {
indexAccessor.insert(tuple);
- } catch (TreeIndexException e) {
+ } catch (HyracksDataException e) {
+ if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+ throw e;
+ }
}
}
// Print before doing any updates.
@@ -573,11 +589,7 @@
LOGGER.info("Updating " + i);
}
}
- try {
- indexAccessor.update(tuple);
- } catch (TreeIndexException e) {
- } catch (UnsupportedOperationException e) {
- }
+ indexAccessor.update(tuple);
}
// Do another scan after a round of updates.
orderedScan(indexAccessor, fieldSerdes);
@@ -640,8 +652,8 @@
LOGGER.info(ins + " tuples loaded in " + (end - start) + "ms");
}
- IIndexAccessor indexAccessor = treeIndex.createAccessor(TestOperationCallback.INSTANCE,
- TestOperationCallback.INSTANCE);
+ IIndexAccessor indexAccessor =
+ treeIndex.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
// Build low key.
ArrayTupleBuilder lowKeyTb = new ArrayTupleBuilder(1);
@@ -693,8 +705,8 @@
int ins = 1000;
for (int i = 1; i < ins; i++) {
- ITreeIndex treeIndex = createTreeIndex(typeTraits, cmpFactories, bloomFilterKeyFields, null, null, null,
- null);
+ ITreeIndex treeIndex =
+ createTreeIndex(typeTraits, cmpFactories, bloomFilterKeyFields, null, null, null, null);
treeIndex.create();
treeIndex.activate();
@@ -718,19 +730,17 @@
TupleUtils.createIntegerTuple(tb, tuple, key, 5);
try {
bulkLoader.add(tuple);
- } catch (UnsortedInputException e) {
- if (j != i) {
- fail("Unexpected exception: " + e.getMessage());
+ } catch (HyracksDataException e) {
+ if (e.getErrorCode() == ErrorCode.UNSORTED_LOAD_INPUT || e.getErrorCode() == ErrorCode.DUPLICATE_KEY
+ || e.getErrorCode() == ErrorCode.DUPLICATE_LOAD_INPUT) {
+ if (j != i) {
+ fail("Unexpected exception: " + e.getMessage());
+ }
+ // Success.
+ break;
+ } else {
+ throw e;
}
- // Success.
-
- break;
- } catch (TreeIndexDuplicateKeyException e2) {
- if (j != i) {
- fail("Unexpected exception: " + e2.getMessage());
- }
- // Success.
- break;
}
}
treeIndex.deactivate();
@@ -765,8 +775,8 @@
LOGGER.info("Disk-Order Scan:");
}
ITreeIndexAccessor treeIndexAccessor = (ITreeIndexAccessor) indexAccessor;
- TreeIndexDiskOrderScanCursor diskOrderCursor = (TreeIndexDiskOrderScanCursor) treeIndexAccessor
- .createDiskOrderScanCursor();
+ TreeIndexDiskOrderScanCursor diskOrderCursor =
+ (TreeIndexDiskOrderScanCursor) treeIndexAccessor.createDiskOrderScanCursor();
treeIndexAccessor.diskOrderScan(diskOrderCursor);
try {
while (diskOrderCursor.hasNext()) {
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexMultiThreadTest.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexMultiThreadTest.java
index b9c0286..ec56549 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexMultiThreadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexMultiThreadTest.java
@@ -34,7 +34,6 @@
import org.apache.hyracks.storage.am.common.IndexMultiThreadTestDriver;
import org.apache.hyracks.storage.am.common.TestWorkloadConf;
import org.apache.hyracks.storage.am.common.api.IIndex;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
import org.junit.Test;
@@ -56,7 +55,7 @@
protected abstract void tearDown() throws HyracksDataException;
protected abstract IIndex createIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories,
- int[] bloomFilterKeyFields) throws TreeIndexException, HyracksDataException;
+ int[] bloomFilterKeyFields) throws HyracksDataException;
protected abstract IIndexTestWorkerFactory getWorkerFactory();
@@ -65,7 +64,7 @@
protected abstract String getIndexTypeName();
protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, int numThreads, TestWorkloadConf conf,
- String dataMsg) throws InterruptedException, TreeIndexException, HyracksDataException {
+ String dataMsg) throws InterruptedException, HyracksDataException {
setUp();
if (LOGGER.isLoggable(Level.INFO)) {
@@ -88,8 +87,8 @@
// 4 batches per thread.
int batchSize = (NUM_OPERATIONS / numThreads) / 4;
- IndexMultiThreadTestDriver driver = new IndexMultiThreadTestDriver(index, workerFactory, fieldSerdes, conf.ops,
- conf.opProbs);
+ IndexMultiThreadTestDriver driver =
+ new IndexMultiThreadTestDriver(index, workerFactory, fieldSerdes, conf.ops, conf.opProbs);
driver.init();
long[] times = driver.run(numThreads, 1, NUM_OPERATIONS, batchSize);
index.validate();
@@ -103,7 +102,7 @@
}
@Test
- public void oneIntKeyAndValue() throws InterruptedException, TreeIndexException, HyracksDataException {
+ public void oneIntKeyAndValue() throws InterruptedException, HyracksDataException {
ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE,
IntegerSerializerDeserializer.INSTANCE };
int numKeys = 1;
@@ -116,9 +115,9 @@
}
@Test
- public void oneStringKeyAndValue() throws InterruptedException, TreeIndexException, HyracksDataException {
- ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() };
+ public void oneStringKeyAndValue() throws InterruptedException, HyracksDataException {
+ ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer() };
int numKeys = 1;
String dataMsg = "One String Key And Value";
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestUtils.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestUtils.java
index 2a16ebe36..970f49d 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestUtils.java
@@ -32,6 +32,7 @@
import java.util.logging.Logger;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleReference;
@@ -47,8 +48,6 @@
import org.apache.hyracks.storage.am.common.api.IIndexCursor;
import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
@SuppressWarnings("rawtypes")
@@ -58,13 +57,13 @@
private static void compareActualAndExpected(ITupleReference actual, CheckTuple expected,
ISerializerDeserializer[] fieldSerdes) throws HyracksDataException {
for (int i = 0; i < fieldSerdes.length; i++) {
- ByteArrayInputStream inStream = new ByteArrayInputStream(actual.getFieldData(i), actual.getFieldStart(i),
- actual.getFieldLength(i));
+ ByteArrayInputStream inStream =
+ new ByteArrayInputStream(actual.getFieldData(i), actual.getFieldStart(i), actual.getFieldLength(i));
DataInput dataIn = new DataInputStream(inStream);
Object actualObj = fieldSerdes[i].deserialize(dataIn);
if (!actualObj.equals(expected.getField(i))) {
- fail("Actual and expected fields do not match on field " + i + ".\nExpected: " + expected.getField(i)
- + "\nActual : " + actualObj);
+ fail("Actual and expected fields do not match on field " + i + ".\nExpected: " + expected.getField(i)
+ + "\nActual : " + actualObj);
}
}
}
@@ -81,11 +80,11 @@
CheckTuple high = checkTuples.floor(highKey);
if (low == null || high == null) {
// Must be empty.
- return new TreeSet<CheckTuple>();
+ return new TreeSet<>();
}
if (high.compareTo(low) < 0) {
// Must be empty.
- return new TreeSet<CheckTuple>();
+ return new TreeSet<>();
}
return checkTuples.subSet(low, true, high, true);
}
@@ -99,20 +98,20 @@
MultiComparator lowKeyCmp = BTreeUtils.getSearchMultiComparator(ctx.getComparatorFactories(), lowKey);
MultiComparator highKeyCmp = BTreeUtils.getSearchMultiComparator(ctx.getComparatorFactories(), highKey);
IIndexCursor searchCursor = ctx.getIndexAccessor().createSearchCursor(false);
- RangePredicate rangePred = new RangePredicate(lowKey, highKey, lowKeyInclusive, highKeyInclusive, lowKeyCmp,
- highKeyCmp);
+ RangePredicate rangePred =
+ new RangePredicate(lowKey, highKey, lowKeyInclusive, highKeyInclusive, lowKeyCmp, highKeyCmp);
ctx.getIndexAccessor().search(searchCursor, rangePred);
// Get the subset of elements from the expected set within given key
// range.
CheckTuple lowKeyCheck = createCheckTupleFromTuple(lowKey, ctx.getFieldSerdes(), lowKeyCmp.getKeyFieldCount());
- CheckTuple highKeyCheck = createCheckTupleFromTuple(highKey, ctx.getFieldSerdes(),
- highKeyCmp.getKeyFieldCount());
+ CheckTuple highKeyCheck =
+ createCheckTupleFromTuple(highKey, ctx.getFieldSerdes(), highKeyCmp.getKeyFieldCount());
SortedSet<CheckTuple> expectedSubset = null;
if (lowKeyCmp.getKeyFieldCount() < ctx.getKeyFieldCount()
|| highKeyCmp.getKeyFieldCount() < ctx.getKeyFieldCount()) {
// Searching on a key prefix (low key or high key or both).
- expectedSubset = getPrefixExpectedSubset((TreeSet<CheckTuple>) ctx.getCheckTuples(), lowKeyCheck,
- highKeyCheck);
+ expectedSubset =
+ getPrefixExpectedSubset((TreeSet<CheckTuple>) ctx.getCheckTuples(), lowKeyCheck, highKeyCheck);
} else {
// Searching on all key fields.
expectedSubset = ((TreeSet<CheckTuple>) ctx.getCheckTuples()).subSet(lowKeyCheck, lowKeyInclusive,
@@ -189,7 +188,7 @@
int fieldCount = ctx.getFieldCount();
int numKeyFields = ctx.getKeyFieldCount();
int[] fieldValues = new int[ctx.getFieldCount()];
- int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / (double) numKeyFields));
+ int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / numKeyFields));
Collection<CheckTuple> tmpCheckTuples = createCheckTuplesCollection();
for (int i = 0; i < numTuples; i++) {
// Set keys.
@@ -214,7 +213,7 @@
int fieldCount = ctx.getFieldCount();
int numKeyFields = ctx.getKeyFieldCount();
String[] fieldValues = new String[fieldCount];
- TreeSet<CheckTuple> tmpCheckTuples = new TreeSet<CheckTuple>();
+ TreeSet<CheckTuple> tmpCheckTuples = new TreeSet<>();
for (int i = 0; i < numTuples; i++) {
// Set keys.
for (int j = 0; j < numKeyFields; j++) {
@@ -238,7 +237,7 @@
}
public static void insertCheckTuples(IIndexTestContext ctx, Collection<CheckTuple> checkTuples)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
int fieldCount = ctx.getFieldCount();
int numTuples = checkTuples.size();
ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
@@ -283,8 +282,11 @@
// Set expected values. Do this only after insertion succeeds
// because we ignore duplicate keys.
ctx.insertCheckTuple(createStringCheckTuple(fieldValues, ctx.getKeyFieldCount()), ctx.getCheckTuples());
- } catch (TreeIndexDuplicateKeyException e) {
+ } catch (HyracksDataException e) {
// Ignore duplicate key insertions.
+ if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+ throw e;
+ }
}
}
}
@@ -320,7 +322,7 @@
int fieldCount = ctx.getFieldCount();
int numKeyFields = ctx.getKeyFieldCount();
String[] fieldValues = new String[fieldCount];
- TreeSet<CheckTuple> tmpCheckTuples = new TreeSet<CheckTuple>();
+ TreeSet<CheckTuple> tmpCheckTuples = new TreeSet<>();
for (int i = 0; i < numTuples; i++) {
// Set keys.
for (int j = 0; j < numKeyFields; j++) {
@@ -343,6 +345,7 @@
}
}
+ @Override
public void upsertIntTuples(IIndexTestContext ictx, int numTuples, Random rnd) throws Exception {
OrderedIndexTestContext ctx = (OrderedIndexTestContext) ictx;
int fieldCount = ctx.getFieldCount();
@@ -351,7 +354,7 @@
// Scale range of values according to number of keys.
// For example, for 2 keys we want the square root of numTuples, for 3
// keys the cube root of numTuples, etc.
- int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / (double) numKeyFields));
+ int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / numKeyFields));
for (int i = 0; i < numTuples; i++) {
// Set keys.
setIntKeyFields(fieldValues, numKeyFields, maxValue, rnd);
@@ -413,9 +416,9 @@
}
public CheckTuple createStringCheckTuple(String[] fieldValues, int numKeyFields) {
- CheckTuple<String> checkTuple = new CheckTuple<String>(fieldValues.length, numKeyFields);
+ CheckTuple<String> checkTuple = new CheckTuple<>(fieldValues.length, numKeyFields);
for (String s : fieldValues) {
- checkTuple.appendField((String) s);
+ checkTuple.appendField(s);
}
return checkTuple;
}
@@ -475,7 +478,7 @@
@Override
protected CheckTuple createIntCheckTuple(int[] fieldValues, int numKeyFields) {
- CheckTuple<Integer> checkTuple = new CheckTuple<Integer>(fieldValues.length, numKeyFields);
+ CheckTuple<Integer> checkTuple = new CheckTuple<>(fieldValues.length, numKeyFields);
for (int v : fieldValues) {
checkTuple.appendField(v);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/AbstractIndexTestWorker.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/AbstractIndexTestWorker.java
index c1538f2..b6b19cf 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/AbstractIndexTestWorker.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/AbstractIndexTestWorker.java
@@ -27,7 +27,6 @@
import org.apache.hyracks.storage.am.common.api.IIndex;
import org.apache.hyracks.storage.am.common.api.IIndexAccessor;
import org.apache.hyracks.storage.am.common.api.IIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.datagen.DataGenThread;
import org.apache.hyracks.storage.am.common.datagen.TupleBatch;
@@ -39,8 +38,8 @@
protected final IIndexAccessor indexAccessor;
- public AbstractIndexTestWorker(DataGenThread dataGen, TestOperationSelector opSelector, IIndex index, int numBatches)
- throws HyracksDataException {
+ public AbstractIndexTestWorker(DataGenThread dataGen, TestOperationSelector opSelector, IIndex index,
+ int numBatches) throws HyracksDataException {
this.dataGen = dataGen;
this.opSelector = opSelector;
this.numBatches = numBatches;
@@ -65,7 +64,7 @@
}
}
- protected void consumeCursorTuples(IIndexCursor cursor) throws HyracksDataException, IndexException {
+ protected void consumeCursorTuples(IIndexCursor cursor) throws HyracksDataException {
try {
while (cursor.hasNext()) {
cursor.next();
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/ITreeIndexTestWorker.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/ITreeIndexTestWorker.java
index 144e9ea..ddbe73a 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/ITreeIndexTestWorker.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/ITreeIndexTestWorker.java
@@ -22,8 +22,8 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
import org.apache.hyracks.storage.am.common.TestOperationSelector.TestOperation;
-import org.apache.hyracks.storage.am.common.api.IndexException;
+@FunctionalInterface
public interface ITreeIndexTestWorker {
- void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException, IndexException;
+ void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/IndexMultiThreadTestDriver.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/IndexMultiThreadTestDriver.java
index 32dd15d..a765637 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/IndexMultiThreadTestDriver.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/IndexMultiThreadTestDriver.java
@@ -23,7 +23,6 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.storage.am.common.TestOperationSelector.TestOperation;
import org.apache.hyracks.storage.am.common.api.IIndex;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.common.datagen.DataGenThread;
@SuppressWarnings("rawtypes")
@@ -49,8 +48,8 @@
index.activate();
}
- public long[] run(int numThreads, int numRepeats, int numOps, int batchSize) throws InterruptedException,
- TreeIndexException, HyracksDataException {
+ public long[] run(int numThreads, int numRepeats, int numOps, int batchSize)
+ throws InterruptedException, HyracksDataException {
int numBatches = (batchSize < 1 ? numOps : numOps / batchSize);
if (numBatches < numThreads) {
numThreads = numBatches;
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TreeIndexTestUtils.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TreeIndexTestUtils.java
index 94c0ab4..b801715 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TreeIndexTestUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TreeIndexTestUtils.java
@@ -32,6 +32,7 @@
import java.util.logging.Logger;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleReference;
@@ -41,8 +42,6 @@
import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
import org.apache.hyracks.storage.am.common.api.ITreeIndexAccessor;
import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
@SuppressWarnings("rawtypes")
public abstract class TreeIndexTestUtils {
@@ -88,8 +87,8 @@
CheckTuple checkTuple = createCheckTuple(fieldSerdes.length, numKeys);
int fieldCount = Math.min(fieldSerdes.length, tuple.getFieldCount());
for (int i = 0; i < fieldCount; i++) {
- ByteArrayInputStream inStream = new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i),
- tuple.getFieldLength(i));
+ ByteArrayInputStream inStream =
+ new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
DataInput dataIn = new DataInputStream(inStream);
Comparable fieldObj = (Comparable) fieldSerdes[i].deserialize(dataIn);
checkTuple.appendField(fieldObj);
@@ -122,8 +121,8 @@
while (diskOrderCursor.hasNext()) {
diskOrderCursor.next();
ITupleReference tuple = diskOrderCursor.getTuple();
- CheckTuple checkTuple = createCheckTupleFromTuple(tuple, ctx.getFieldSerdes(),
- ctx.getKeyFieldCount());
+ CheckTuple checkTuple =
+ createCheckTupleFromTuple(tuple, ctx.getFieldSerdes(), ctx.getKeyFieldCount());
if (!checkDiskOrderScanResult(tuple, checkTuple, ctx)) {
fail("Disk-order scan returned unexpected answer: " + checkTuple.toString());
}
@@ -140,9 +139,8 @@
} finally {
try {
diskOrderCursor.close();
- }
- catch(Exception ex){
- LOGGER.log(Level.WARNING,"Error during scan cursor close",ex);
+ } catch (Exception ex) {
+ LOGGER.log(Level.WARNING, "Error during scan cursor close", ex);
}
}
} catch (UnsupportedOperationException e) {
@@ -168,7 +166,7 @@
// Scale range of values according to number of keys.
// For example, for 2 keys we want the square root of numTuples, for 3
// keys the cube root of numTuples, etc.
- int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / (double) numKeyFields));
+ int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / numKeyFields));
for (int i = 0; i < numTuples; i++) {
// Set keys.
setIntKeyFields(fieldValues, numKeyFields, maxValue, rnd);
@@ -183,9 +181,12 @@
try {
ctx.getIndexAccessor().insert(ctx.getTuple());
ctx.insertCheckTuple(createIntCheckTuple(fieldValues, ctx.getKeyFieldCount()), ctx.getCheckTuples());
- } catch (TreeIndexException e) {
+ } catch (HyracksDataException e) {
// We set expected values only after insertion succeeds because
// we ignore duplicate keys.
+ if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+ throw e;
+ }
}
}
}
@@ -198,7 +199,7 @@
// Scale range of values according to number of keys.
// For example, for 2 keys we want the square root of numTuples, for 3
// keys the cube root of numTuples, etc.
- int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / (double) numKeyFields));
+ int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / numKeyFields));
for (int i = 0; i < numTuples; i++) {
// Set keys.
setIntKeyFields(fieldValues, numKeyFields, maxValue, rnd);
@@ -213,9 +214,12 @@
try {
ctx.getIndexAccessor().upsert(ctx.getTuple());
ctx.insertCheckTuple(createIntCheckTuple(fieldValues, ctx.getKeyFieldCount()), ctx.getCheckTuples());
- } catch (TreeIndexException e) {
+ } catch (HyracksDataException e) {
// We set expected values only after insertion succeeds because
// we ignore duplicate keys.
+ if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+ throw e;
+ }
}
}
}
@@ -225,7 +229,7 @@
int fieldCount = ctx.getFieldCount();
int numKeyFields = ctx.getKeyFieldCount();
int[] fieldValues = new int[ctx.getFieldCount()];
- int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / (double) numKeyFields));
+ int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / numKeyFields));
Collection<CheckTuple> tmpCheckTuples = createCheckTuplesCollection();
for (int i = 0; i < numTuples; i++) {
// Set keys.
@@ -246,7 +250,7 @@
}
public static void bulkLoadCheckTuples(IIndexTestContext ctx, Collection<CheckTuple> checkTuples)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
int fieldCount = ctx.getFieldCount();
int numTuples = checkTuples.size();
ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
@@ -257,7 +261,7 @@
for (CheckTuple checkTuple : checkTuples) {
if (LOGGER.isLoggable(Level.INFO)) {
//if (c % (numTuples / 10) == 0) {
- LOGGER.info("Bulk Loading Tuple " + c + "/" + numTuples);
+ LOGGER.info("Bulk Loading Tuple " + c + "/" + numTuples);
//}
}
createTupleFromCheckTuple(checkTuple, tupleBuilder, tuple, ctx.getFieldSerdes());
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeExamplesTest.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeExamplesTest.java
index ba55a1d..cc8445a 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeExamplesTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeExamplesTest.java
@@ -26,6 +26,7 @@
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.dataflow.value.ITypeTraits;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
import org.apache.hyracks.data.std.primitive.DoublePointable;
@@ -45,7 +46,6 @@
import org.apache.hyracks.storage.am.common.api.ITreeIndex;
import org.apache.hyracks.storage.am.common.api.ITreeIndexAccessor;
import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
import org.apache.hyracks.storage.am.common.impls.TreeIndexDiskOrderScanCursor;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
@@ -69,10 +69,9 @@
protected abstract ITreeIndex createTreeIndex(ITypeTraits[] typeTraits,
IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories,
- IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType,
- int[] rtreeFields, int[] btreeFields, ITypeTraits[] filterTypeTraits,
- IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields) throws TreeIndexException,
- HyracksDataException;
+ IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType, int[] rtreeFields,
+ int[] btreeFields, ITypeTraits[] filterTypeTraits, IBinaryComparatorFactory[] filterCmpFactories,
+ int[] filterFields) throws HyracksDataException;
/**
* Two Dimensions Example. Create an RTree index of two dimensions, where
@@ -96,10 +95,10 @@
typeTraits[4] = IntegerPointable.TYPE_TRAITS;
typeTraits[5] = IntegerPointable.TYPE_TRAITS;
// Declare field serdes.
- ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE };
+ ISerializerDeserializer[] fieldSerdes =
+ { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
// Declare RTree keys.
int rtreeKeyFieldCount = 4;
@@ -136,11 +135,11 @@
}
// create value providers
- IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
- rtreeCmpFactories.length, IntegerPointable.FACTORY);
+ IPrimitiveValueProviderFactory[] valueProviderFactories =
+ RTreeUtils.createPrimitiveValueProviderFactories(rtreeCmpFactories.length, IntegerPointable.FACTORY);
- ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories,
- valueProviderFactories, RTreePolicyType.RTREE, null, btreeFields, null, null, null);
+ ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories,
+ RTreePolicyType.RTREE, null, btreeFields, null, null, null);
treeIndex.create();
treeIndex.activate();
@@ -150,8 +149,8 @@
}
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
ArrayTupleReference tuple = new ArrayTupleReference();
- IIndexAccessor indexAccessor = treeIndex.createAccessor(NoOpOperationCallback.INSTANCE,
- NoOpOperationCallback.INSTANCE);
+ IIndexAccessor indexAccessor =
+ treeIndex.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
int numInserts = 10000;
for (int i = 0; i < numInserts; i++) {
int p1x = rnd.nextInt();
@@ -166,7 +165,10 @@
Math.max(p1y, p2y), pk1, pk2);
try {
indexAccessor.insert(tuple);
- } catch (TreeIndexException e) {
+ } catch (HyracksDataException e) {
+ if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+ throw e;
+ }
}
}
long end = System.currentTimeMillis();
@@ -246,20 +248,20 @@
}
// create value providers
- IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
- rtreeCmpFactories.length, IntegerPointable.FACTORY);
+ IPrimitiveValueProviderFactory[] valueProviderFactories =
+ RTreeUtils.createPrimitiveValueProviderFactories(rtreeCmpFactories.length, IntegerPointable.FACTORY);
//2
- ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories,
- valueProviderFactories, RTreePolicyType.RTREE, null, btreeFields, null, null, null);
+ ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories,
+ RTreePolicyType.RTREE, null, btreeFields, null, null, null);
treeIndex.create();
treeIndex.activate();
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
ArrayTupleReference tuple = new ArrayTupleReference();
- IIndexAccessor indexAccessor = treeIndex.createAccessor(TestOperationCallback.INSTANCE,
- TestOperationCallback.INSTANCE);
+ IIndexAccessor indexAccessor =
+ treeIndex.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
int p1x = rnd.nextInt();
int p1y = rnd.nextInt();
@@ -386,19 +388,19 @@
}
// create value providers
- IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
- rtreeCmpFactories.length, IntegerPointable.FACTORY);
+ IPrimitiveValueProviderFactory[] valueProviderFactories =
+ RTreeUtils.createPrimitiveValueProviderFactories(rtreeCmpFactories.length, IntegerPointable.FACTORY);
- ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories,
- valueProviderFactories, RTreePolicyType.RSTARTREE, null, btreeFields, null, null, null);
+ ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories,
+ RTreePolicyType.RSTARTREE, null, btreeFields, null, null, null);
treeIndex.create();
treeIndex.activate();
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
ArrayTupleReference tuple = new ArrayTupleReference();
- IIndexAccessor indexAccessor = treeIndex.createAccessor(TestOperationCallback.INSTANCE,
- TestOperationCallback.INSTANCE);
+ IIndexAccessor indexAccessor =
+ treeIndex.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
int p1x = rnd.nextInt();
int p1y = rnd.nextInt();
@@ -535,12 +537,12 @@
}
// create value providers
- IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
- rtreeCmpFactories.length, DoublePointable.FACTORY);
+ IPrimitiveValueProviderFactory[] valueProviderFactories =
+ RTreeUtils.createPrimitiveValueProviderFactories(rtreeCmpFactories.length, DoublePointable.FACTORY);
//4
- ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories,
- valueProviderFactories, RTreePolicyType.RTREE, null, btreeFields, null, null, null);
+ ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories,
+ RTreePolicyType.RTREE, null, btreeFields, null, null, null);
treeIndex.create();
treeIndex.activate();
@@ -550,8 +552,8 @@
}
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
ArrayTupleReference tuple = new ArrayTupleReference();
- IIndexAccessor indexAccessor = treeIndex.createAccessor(NoOpOperationCallback.INSTANCE,
- NoOpOperationCallback.INSTANCE);
+ IIndexAccessor indexAccessor =
+ treeIndex.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
int numInserts = 10000;
for (int i = 0; i < numInserts; i++) {
double p1x = rnd.nextDouble();
@@ -567,7 +569,10 @@
Math.max(p1x, p2x), Math.max(p1y, p2y), Math.max(p1z, p2z), pk);
try {
indexAccessor.insert(tuple);
- } catch (TreeIndexException e) {
+ } catch (HyracksDataException e) {
+ if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+ throw e;
+ }
}
}
long end = System.currentTimeMillis();
@@ -643,18 +648,18 @@
}
// create value providers
- IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
- rtreeCmpFactories.length, IntegerPointable.FACTORY);
+ IPrimitiveValueProviderFactory[] valueProviderFactories =
+ RTreeUtils.createPrimitiveValueProviderFactories(rtreeCmpFactories.length, IntegerPointable.FACTORY);
- ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories,
- valueProviderFactories, RTreePolicyType.RTREE, null, btreeFields, null, null, null);
+ ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories,
+ RTreePolicyType.RTREE, null, btreeFields, null, null, null);
treeIndex.create();
treeIndex.activate();
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
ArrayTupleReference tuple = new ArrayTupleReference();
- IIndexAccessor indexAccessor = treeIndex.createAccessor(NoOpOperationCallback.INSTANCE,
- NoOpOperationCallback.INSTANCE);
+ IIndexAccessor indexAccessor =
+ treeIndex.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
int runs = 3;
for (int run = 0; run < runs; run++) {
@@ -689,7 +694,10 @@
Math.max(p1y, p2y), pk);
try {
indexAccessor.insert(tuple);
- } catch (TreeIndexException e) {
+ } catch (HyracksDataException e) {
+ if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+ throw e;
+ }
}
insDoneCmp[i] = insDone;
}
@@ -703,7 +711,10 @@
try {
indexAccessor.delete(tuple);
delDone++;
- } catch (TreeIndexException e) {
+ } catch (HyracksDataException e) {
+ if (e.getErrorCode() != ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+ throw e;
+ }
}
if (insDoneCmp[i] != delDone) {
if (LOGGER.isLoggable(Level.INFO)) {
@@ -779,12 +790,12 @@
}
// create value providers
- IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
- rtreeCmpFactories.length, IntegerPointable.FACTORY);
+ IPrimitiveValueProviderFactory[] valueProviderFactories =
+ RTreeUtils.createPrimitiveValueProviderFactories(rtreeCmpFactories.length, IntegerPointable.FACTORY);
//6
- ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories,
- valueProviderFactories, RTreePolicyType.RTREE, null, btreeFields, null, null, null);
+ ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories,
+ RTreePolicyType.RTREE, null, btreeFields, null, null, null);
treeIndex.create();
treeIndex.activate();
@@ -817,8 +828,8 @@
LOGGER.info(numInserts + " tuples loaded in " + (end - start) + "ms");
}
- IIndexAccessor indexAccessor = treeIndex.createAccessor(NoOpOperationCallback.INSTANCE,
- NoOpOperationCallback.INSTANCE);
+ IIndexAccessor indexAccessor =
+ treeIndex.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
// Build key.
ArrayTupleBuilder keyTb = new ArrayTupleBuilder(rtreeKeyFieldCount);
@@ -858,8 +869,8 @@
LOGGER.info("Disk-Order Scan:");
}
ITreeIndexAccessor treeIndexAccessor = (ITreeIndexAccessor) indexAccessor;
- TreeIndexDiskOrderScanCursor diskOrderCursor = (TreeIndexDiskOrderScanCursor) treeIndexAccessor
- .createDiskOrderScanCursor();
+ TreeIndexDiskOrderScanCursor diskOrderCursor =
+ (TreeIndexDiskOrderScanCursor) treeIndexAccessor.createDiskOrderScanCursor();
treeIndexAccessor.diskOrderScan(diskOrderCursor);
try {
while (diskOrderCursor.hasNext()) {
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeMultiThreadTest.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeMultiThreadTest.java
index d6f07d9..53245ac 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeMultiThreadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeMultiThreadTest.java
@@ -37,7 +37,6 @@
import org.apache.hyracks.storage.am.common.TestWorkloadConf;
import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
import org.apache.hyracks.storage.am.rtree.AbstractRTreeExamplesTest.RTreeType;
import org.apache.hyracks.storage.am.rtree.frames.RTreePolicyType;
@@ -72,7 +71,7 @@
protected abstract ITreeIndex createTreeIndex(ITypeTraits[] typeTraits,
IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories,
IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType, int[] btreeFields)
- throws TreeIndexException, HyracksDataException;
+ throws HyracksDataException;
protected abstract IIndexTestWorkerFactory getWorkerFactory();
@@ -82,8 +81,7 @@
protected void runTest(ISerializerDeserializer[] fieldSerdes,
IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, RTreePolicyType rtreePolicyType,
- int numThreads, TestWorkloadConf conf, String dataMsg) throws HyracksDataException, InterruptedException,
- TreeIndexException {
+ int numThreads, TestWorkloadConf conf, String dataMsg) throws HyracksDataException, InterruptedException {
setUp();
if (LOGGER.isLoggable(Level.INFO)) {
@@ -116,8 +114,8 @@
// 4 batches per thread.
int batchSize = (NUM_OPERATIONS / numThreads) / 4;
- IndexMultiThreadTestDriver driver = new IndexMultiThreadTestDriver(index, workerFactory, fieldSerdes, conf.ops,
- conf.opProbs);
+ IndexMultiThreadTestDriver driver =
+ new IndexMultiThreadTestDriver(index, workerFactory, fieldSerdes, conf.ops, conf.opProbs);
driver.init();
long[] times = driver.run(numThreads, 1, NUM_OPERATIONS, batchSize);
driver.deinit();
@@ -130,14 +128,14 @@
}
@Test
- public void rtreeTwoDimensionsInt() throws InterruptedException, HyracksDataException, TreeIndexException {
+ public void rtreeTwoDimensionsInt() throws InterruptedException, HyracksDataException {
ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
int numKeys = 4;
- IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
- numKeys, IntegerPointable.FACTORY);
+ IPrimitiveValueProviderFactory[] valueProviderFactories =
+ RTreeUtils.createPrimitiveValueProviderFactories(numKeys, IntegerPointable.FACTORY);
String dataMsg = "Two Dimensions Of Integer Values";
@@ -156,8 +154,8 @@
DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
int numKeys = 4;
- IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
- numKeys, DoublePointable.FACTORY);
+ IPrimitiveValueProviderFactory[] valueProviderFactories =
+ RTreeUtils.createPrimitiveValueProviderFactories(numKeys, DoublePointable.FACTORY);
String dataMsg = "Two Dimensions Of Double Values";
@@ -171,7 +169,7 @@
}
@Test
- public void rtreeFourDimensionsDouble() throws InterruptedException, HyracksDataException, TreeIndexException {
+ public void rtreeFourDimensionsDouble() throws InterruptedException, HyracksDataException {
ISerializerDeserializer[] fieldSerdes = { DoubleSerializerDeserializer.INSTANCE,
DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
@@ -179,8 +177,8 @@
DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
int numKeys = 8;
- IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
- numKeys, DoublePointable.FACTORY);
+ IPrimitiveValueProviderFactory[] valueProviderFactories =
+ RTreeUtils.createPrimitiveValueProviderFactories(numKeys, DoublePointable.FACTORY);
String dataMsg = "Four Dimensions Of Double Values";
@@ -193,7 +191,7 @@
}
@Test
- public void rstartreeTwoDimensionsInt() throws InterruptedException, HyracksDataException, TreeIndexException {
+ public void rstartreeTwoDimensionsInt() throws InterruptedException, HyracksDataException {
if (!testRstarPolicy) {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Ignoring RTree Multithread Test With Two Dimensions With Integer Keys.");
@@ -206,8 +204,8 @@
IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
int numKeys = 4;
- IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
- numKeys, IntegerPointable.FACTORY);
+ IPrimitiveValueProviderFactory[] valueProviderFactories =
+ RTreeUtils.createPrimitiveValueProviderFactories(numKeys, IntegerPointable.FACTORY);
String dataMsg = "Two Dimensions Of Integer Values";
@@ -233,8 +231,8 @@
DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
int numKeys = 4;
- IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
- numKeys, DoublePointable.FACTORY);
+ IPrimitiveValueProviderFactory[] valueProviderFactories =
+ RTreeUtils.createPrimitiveValueProviderFactories(numKeys, DoublePointable.FACTORY);
String dataMsg = "Two Dimensions Of Double Values";
@@ -248,7 +246,7 @@
}
@Test
- public void rstartreeFourDimensionsDouble() throws InterruptedException, HyracksDataException, TreeIndexException {
+ public void rstartreeFourDimensionsDouble() throws InterruptedException, HyracksDataException {
if (!testRstarPolicy) {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Ignoring RTree Multithread Test With Four Dimensions With Double Keys.");
@@ -263,8 +261,8 @@
DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
int numKeys = 8;
- IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
- numKeys, DoublePointable.FACTORY);
+ IPrimitiveValueProviderFactory[] valueProviderFactories =
+ RTreeUtils.createPrimitiveValueProviderFactories(numKeys, DoublePointable.FACTORY);
String dataMsg = "Four Dimensions Of Double Values";
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/RTreeTestUtils.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/RTreeTestUtils.java
index 13a4fcb..301b448 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/RTreeTestUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/RTreeTestUtils.java
@@ -28,6 +28,7 @@
import java.util.logging.Logger;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -37,7 +38,6 @@
import org.apache.hyracks.storage.am.common.TreeIndexTestUtils;
import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
import org.apache.hyracks.storage.am.common.util.HashMultiSet;
import org.apache.hyracks.storage.am.rtree.impls.SearchPredicate;
@@ -53,7 +53,7 @@
// Create a new ArrayList containing the elements satisfying the search key
public HashMultiSet<RTreeCheckTuple> getRangeSearchExpectedResults(Collection<RTreeCheckTuple> checkTuples,
RTreeCheckTuple key) {
- HashMultiSet<RTreeCheckTuple> expectedResult = new HashMultiSet<RTreeCheckTuple>();
+ HashMultiSet<RTreeCheckTuple> expectedResult = new HashMultiSet<>();
Iterator<RTreeCheckTuple> iter = checkTuples.iterator();
while (iter.hasNext()) {
RTreeCheckTuple t = iter.next();
@@ -77,8 +77,8 @@
// Get the subset of elements from the expected set within given key
// range.
- RTreeCheckTuple keyCheck = (RTreeCheckTuple) createCheckTupleFromTuple(key, ctx.getFieldSerdes(),
- cmp.getKeyFieldCount());
+ RTreeCheckTuple keyCheck =
+ (RTreeCheckTuple) createCheckTupleFromTuple(key, ctx.getFieldSerdes(), cmp.getKeyFieldCount());
HashMultiSet<RTreeCheckTuple> expectedResult = null;
@@ -94,7 +94,7 @@
// Scale range of values according to number of keys.
// For example, for 2 keys we want the square root of numTuples, for 3
// keys the cube root of numTuples, etc.
- double maxValue = Math.ceil(Math.pow(numTuples, 1.0 / (double) numKeyFields));
+ double maxValue = Math.ceil(Math.pow(numTuples, 1.0 / numKeyFields));
for (int i = 0; i < numTuples; i++) {
// Set keys.
setDoubleKeyFields(fieldValues, numKeyFields, maxValue, rnd);
@@ -109,10 +109,12 @@
try {
ctx.getIndexAccessor().insert(ctx.getTuple());
ctx.insertCheckTuple(createDoubleCheckTuple(fieldValues, ctx.getKeyFieldCount()), ctx.getCheckTuples());
- } catch (TreeIndexException e) {
+ } catch (HyracksDataException e) {
// We set expected values only after insertion succeeds because
- // we
- // ignore duplicate keys.
+ // we ignore duplicate keys.
+ if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+ throw e;
+ }
}
}
}
@@ -139,7 +141,7 @@
@SuppressWarnings("unchecked")
protected CheckTuple createDoubleCheckTuple(double[] fieldValues, int numKeyFields) {
- RTreeCheckTuple<Double> checkTuple = new RTreeCheckTuple<Double>(fieldValues.length, numKeyFields);
+ RTreeCheckTuple<Double> checkTuple = new RTreeCheckTuple<>(fieldValues.length, numKeyFields);
for (double v : fieldValues) {
checkTuple.appendField(v);
}
@@ -151,7 +153,7 @@
int fieldCount = ctx.getFieldCount();
int numKeyFields = ctx.getKeyFieldCount();
double[] fieldValues = new double[ctx.getFieldCount()];
- double maxValue = Math.ceil(Math.pow(numTuples, 1.0 / (double) numKeyFields));
+ double maxValue = Math.ceil(Math.pow(numTuples, 1.0 / numKeyFields));
Collection<CheckTuple> tmpCheckTuples = createCheckTuplesCollection();
for (int i = 0; i < numTuples; i++) {
// Set keys.
@@ -178,8 +180,8 @@
while (cursor.hasNext()) {
cursor.next();
ITupleReference tuple = cursor.getTuple();
- RTreeCheckTuple checkTuple = (RTreeCheckTuple) createCheckTupleFromTuple(tuple, fieldSerdes,
- keyFieldCount);
+ RTreeCheckTuple checkTuple =
+ (RTreeCheckTuple) createCheckTupleFromTuple(tuple, fieldSerdes, keyFieldCount);
if (!checkTuples.contains(checkTuple)) {
fail("Scan or range search returned unexpected answer: " + checkTuple.toString());
}
@@ -211,7 +213,7 @@
@SuppressWarnings("unchecked")
@Override
protected CheckTuple createIntCheckTuple(int[] fieldValues, int numKeyFields) {
- RTreeCheckTuple<Integer> checkTuple = new RTreeCheckTuple<Integer>(fieldValues.length, numKeyFields);
+ RTreeCheckTuple<Integer> checkTuple = new RTreeCheckTuple<>(fieldValues.length, numKeyFields);
for (int v : fieldValues) {
checkTuple.appendField(v);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeSearchCursorTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeSearchCursorTest.java
index 3f71dd9..f44dee2 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeSearchCursorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeSearchCursorTest.java
@@ -28,12 +28,6 @@
import java.util.TreeSet;
import java.util.logging.Level;
-import org.apache.hyracks.storage.am.common.api.*;
-import org.apache.hyracks.storage.am.common.freepage.LinkedMetaDataPageManager;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
import org.apache.hyracks.api.dataflow.value.IBinaryComparator;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ITypeTraits;
@@ -47,7 +41,6 @@
import org.apache.hyracks.dataflow.common.utils.TupleUtils;
import org.apache.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
import org.apache.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import org.apache.hyracks.storage.am.btree.exceptions.BTreeException;
import org.apache.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
import org.apache.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
import org.apache.hyracks.storage.am.btree.impls.BTree;
@@ -56,10 +49,18 @@
import org.apache.hyracks.storage.am.btree.util.AbstractBTreeTest;
import org.apache.hyracks.storage.am.common.TestOperationCallback;
import org.apache.hyracks.storage.am.common.api.IMetadataPageManager;
+import org.apache.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
+import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import org.apache.hyracks.storage.am.common.api.ITreeIndexMetadataFrameFactory;
import org.apache.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+import org.apache.hyracks.storage.am.common.freepage.LinkedMetaDataPageManager;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
import org.apache.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
import org.apache.hyracks.storage.common.buffercache.IBufferCache;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
public class BTreeSearchCursorTest extends AbstractBTreeTest {
private final int fieldCount = 2;
@@ -68,6 +69,7 @@
private final ITreeIndexMetadataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
private final Random rnd = new Random(50);
+ @Override
@Before
public void setUp() throws HyracksDataException {
super.setUp();
@@ -104,14 +106,14 @@
ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
ArrayTupleReference tuple = new ArrayTupleReference();
- ITreeIndexAccessor indexAccessor = btree.createAccessor(TestOperationCallback.INSTANCE,
- TestOperationCallback.INSTANCE);
+ ITreeIndexAccessor indexAccessor =
+ btree.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
// generate keys
int numKeys = 50;
int maxKey = 1000;
- TreeSet<Integer> uniqueKeys = new TreeSet<Integer>();
- ArrayList<Integer> keys = new ArrayList<Integer>();
+ TreeSet<Integer> uniqueKeys = new TreeSet<>();
+ ArrayList<Integer> keys = new ArrayList<>();
while (uniqueKeys.size() < numKeys) {
int key = rnd.nextInt() % maxKey;
uniqueKeys.add(key);
@@ -128,7 +130,6 @@
try {
indexAccessor.insert(tuple);
- } catch (BTreeException e) {
} catch (Exception e) {
e.printStackTrace();
}
@@ -138,14 +139,14 @@
int maxSearchKey = 100;
// forward searches
- Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true,
- true, false));
- Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false,
- true, false));
- Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true,
- false, false));
- Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true,
- true, false));
+ Assert.assertTrue(
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false));
+ Assert.assertTrue(
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, false));
+ Assert.assertTrue(
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, false, false));
+ Assert.assertTrue(
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false));
btree.deactivate();
btree.destroy();
@@ -181,13 +182,13 @@
ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
ArrayTupleReference tuple = new ArrayTupleReference();
- ITreeIndexAccessor indexAccessor = btree.createAccessor(TestOperationCallback.INSTANCE,
- TestOperationCallback.INSTANCE);
+ ITreeIndexAccessor indexAccessor =
+ btree.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
// generate keys
int numKeys = 50;
int maxKey = 10;
- ArrayList<Integer> keys = new ArrayList<Integer>();
+ ArrayList<Integer> keys = new ArrayList<>();
for (int i = 0; i < numKeys; i++) {
int k = rnd.nextInt() % maxKey;
keys.add(k);
@@ -202,7 +203,6 @@
try {
indexAccessor.insert(tuple);
- } catch (BTreeException e) {
} catch (Exception e) {
e.printStackTrace();
}
@@ -212,14 +212,14 @@
int maxSearchKey = 100;
// forward searches
- Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true,
- true, false));
- Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false,
- true, false));
- Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true,
- false, false));
- Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true,
- true, false));
+ Assert.assertTrue(
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false));
+ Assert.assertTrue(
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, false));
+ Assert.assertTrue(
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, false, false));
+ Assert.assertTrue(
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false));
btree.deactivate();
btree.destroy();
@@ -255,13 +255,13 @@
ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
ArrayTupleReference tuple = new ArrayTupleReference();
- ITreeIndexAccessor indexAccessor = btree.createAccessor(TestOperationCallback.INSTANCE,
- TestOperationCallback.INSTANCE);
+ ITreeIndexAccessor indexAccessor =
+ btree.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
// generate keys
int numKeys = 50;
int maxKey = 10;
- ArrayList<Integer> keys = new ArrayList<Integer>();
+ ArrayList<Integer> keys = new ArrayList<>();
for (int i = 0; i < numKeys; i++) {
int k = rnd.nextInt() % maxKey;
keys.add(k);
@@ -276,7 +276,6 @@
try {
indexAccessor.insert(tuple);
- } catch (BTreeException e) {
} catch (Exception e) {
e.printStackTrace();
}
@@ -286,14 +285,14 @@
int maxSearchKey = 100;
// forward searches
- Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true,
- true, false));
- Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false,
- true, false));
- Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true,
- false, false));
- Assert.assertTrue(performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true,
- true, false));
+ Assert.assertTrue(
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false));
+ Assert.assertTrue(
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, false));
+ Assert.assertTrue(
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, false, false));
+ Assert.assertTrue(
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false));
btree.deactivate();
btree.destroy();
@@ -310,8 +309,8 @@
searchCmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
MultiComparator searchCmp = new MultiComparator(searchCmps);
- RangePredicate rangePred = new RangePredicate(lowKey, highKey, lowKeyInclusive, highKeyInclusive, searchCmp,
- searchCmp);
+ RangePredicate rangePred =
+ new RangePredicate(lowKey, highKey, lowKeyInclusive, highKeyInclusive, searchCmp, searchCmp);
return rangePred;
}
@@ -319,10 +318,12 @@
boolean lowKeyInclusive, boolean highKeyInclusive) {
// special cases
- if (lk == hk && (!lowKeyInclusive || !highKeyInclusive))
+ if (lk == hk && (!lowKeyInclusive || !highKeyInclusive)) {
return;
- if (lk > hk)
+ }
+ if (lk > hk) {
return;
+ }
for (int i = 0; i < keys.size(); i++) {
if ((lk == keys.get(i) && lowKeyInclusive) || (hk == keys.get(i) && highKeyInclusive)) {
@@ -341,8 +342,8 @@
IBTreeInteriorFrame interiorFrame, int minKey, int maxKey, boolean lowKeyInclusive,
boolean highKeyInclusive, boolean printExpectedResults) throws Exception {
- ArrayList<Integer> results = new ArrayList<Integer>();
- ArrayList<Integer> expectedResults = new ArrayList<Integer>();
+ ArrayList<Integer> results = new ArrayList<>();
+ ArrayList<Integer> expectedResults = new ArrayList<>();
for (int i = minKey; i < maxKey; i++) {
for (int j = minKey; j < maxKey; j++) {
@@ -355,8 +356,8 @@
ITreeIndexCursor rangeCursor = new BTreeRangeSearchCursor(leafFrame, false);
RangePredicate rangePred = createRangePredicate(lowKey, highKey, lowKeyInclusive, highKeyInclusive);
- ITreeIndexAccessor indexAccessor = btree.createAccessor(TestOperationCallback.INSTANCE,
- TestOperationCallback.INSTANCE);
+ ITreeIndexAccessor indexAccessor =
+ btree.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
indexAccessor.search(rangeCursor, rangePred);
try {
@@ -381,15 +382,17 @@
if (expectedResults.size() > 0) {
char l, u;
- if (lowKeyInclusive)
+ if (lowKeyInclusive) {
l = '[';
- else
+ } else {
l = '(';
+ }
- if (highKeyInclusive)
+ if (highKeyInclusive) {
u = ']';
- else
+ } else {
u = ')';
+ }
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("RANGE: " + l + " " + lowKey + " , " + highKey + " " + u);
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeStatsTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeStatsTest.java
index e4c1449..ee7e968 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeStatsTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeStatsTest.java
@@ -22,9 +22,6 @@
import java.util.Random;
import java.util.logging.Level;
-import org.apache.hyracks.storage.am.common.api.*;
-import org.junit.Test;
-
import org.apache.hyracks.api.comm.IFrame;
import org.apache.hyracks.api.comm.IFrameTupleAccessor;
import org.apache.hyracks.api.comm.VSizeFrame;
@@ -33,6 +30,8 @@
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.dataflow.value.ITypeTraits;
import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+import org.apache.hyracks.api.exceptions.ErrorCode;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
import org.apache.hyracks.data.std.primitive.IntegerPointable;
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
@@ -48,6 +47,10 @@
import org.apache.hyracks.storage.am.btree.util.AbstractBTreeTest;
import org.apache.hyracks.storage.am.common.TestOperationCallback;
import org.apache.hyracks.storage.am.common.api.IMetadataPageManager;
+import org.apache.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import org.apache.hyracks.storage.am.common.api.ITreeIndexMetadataFrame;
+import org.apache.hyracks.storage.am.common.api.ITreeIndexMetadataFrameFactory;
import org.apache.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
import org.apache.hyracks.storage.am.common.freepage.LinkedMetaDataPageManager;
import org.apache.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
@@ -58,6 +61,7 @@
import org.apache.hyracks.storage.common.file.IFileMapProvider;
import org.apache.hyracks.test.support.TestStorageManagerComponentHolder;
import org.apache.hyracks.test.support.TestUtils;
+import org.junit.Test;
@SuppressWarnings("rawtypes")
public class BTreeStatsTest extends AbstractBTreeTest {
@@ -97,8 +101,8 @@
IMetadataPageManager freePageManager = new LinkedMetaDataPageManager(bufferCache, metaFrameFactory);
- BTree btree = new BTree(bufferCache, fmp, freePageManager, interiorFrameFactory, leafFrameFactory,
- cmpFactories, fieldCount, harness.getFileReference());
+ BTree btree = new BTree(bufferCache, fmp, freePageManager, interiorFrameFactory, leafFrameFactory, cmpFactories,
+ fieldCount, harness.getFileReference());
btree.create();
btree.activate();
@@ -116,15 +120,15 @@
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
DataOutput dos = tb.getDataOutput();
- ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE };
+ ISerializerDeserializer[] recDescSers =
+ { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
IFrameTupleAccessor accessor = new FrameTupleAccessor(recDesc);
accessor.reset(frame.getBuffer());
FrameTupleReference tuple = new FrameTupleReference();
- ITreeIndexAccessor indexAccessor = btree.createAccessor(TestOperationCallback.INSTANCE,
- TestOperationCallback.INSTANCE);
+ ITreeIndexAccessor indexAccessor =
+ btree.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
// 10000
for (int i = 0; i < 100000; i++) {
@@ -151,22 +155,24 @@
try {
indexAccessor.insert(tuple);
- } catch (TreeIndexException e) {
- } catch (Exception e) {
- e.printStackTrace();
+ } catch (HyracksDataException e) {
+ if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+ e.printStackTrace();
+ throw e;
+ }
}
}
int fileId = fmp.lookupFileId(harness.getFileReference());
- TreeIndexStatsGatherer statsGatherer = new TreeIndexStatsGatherer(bufferCache, freePageManager, fileId,
- btree.getRootPageId());
+ TreeIndexStatsGatherer statsGatherer =
+ new TreeIndexStatsGatherer(bufferCache, freePageManager, fileId, btree.getRootPageId());
TreeIndexStats stats = statsGatherer.gatherStats(leafFrame, interiorFrame, metaFrame);
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("\n" + stats.toString());
}
- TreeIndexBufferCacheWarmup bufferCacheWarmup = new TreeIndexBufferCacheWarmup(bufferCache, freePageManager,
- fileId);
+ TreeIndexBufferCacheWarmup bufferCacheWarmup =
+ new TreeIndexBufferCacheWarmup(bufferCache, freePageManager, fileId);
bufferCacheWarmup.warmup(leafFrame, metaFrame, new int[] { 1, 2 }, new int[] { 2, 5 });
btree.deactivate();
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeUpdateSearchTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeUpdateSearchTest.java
index 8fc5a09..78023a2 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeUpdateSearchTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/BTreeUpdateSearchTest.java
@@ -21,13 +21,11 @@
import java.util.Random;
import java.util.logging.Level;
-import org.apache.hyracks.storage.am.common.api.*;
-import org.apache.hyracks.storage.am.common.freepage.LinkedMetaDataPageManager;
-import org.junit.Test;
-
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.dataflow.value.ITypeTraits;
+import org.apache.hyracks.api.exceptions.ErrorCode;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
import org.apache.hyracks.data.std.primitive.IntegerPointable;
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
@@ -44,9 +42,15 @@
import org.apache.hyracks.storage.am.btree.util.AbstractBTreeTest;
import org.apache.hyracks.storage.am.common.TestOperationCallback;
import org.apache.hyracks.storage.am.common.api.IMetadataPageManager;
+import org.apache.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
+import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import org.apache.hyracks.storage.am.common.api.ITreeIndexMetadataFrameFactory;
import org.apache.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+import org.apache.hyracks.storage.am.common.freepage.LinkedMetaDataPageManager;
import org.apache.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
import org.apache.hyracks.storage.common.buffercache.IBufferCache;
+import org.junit.Test;
public class BTreeUpdateSearchTest extends AbstractBTreeTest {
@@ -67,8 +71,8 @@
cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
@SuppressWarnings("rawtypes")
- ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE };
+ ISerializerDeserializer[] recDescSers =
+ { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
@@ -94,8 +98,8 @@
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
ArrayTupleReference insertTuple = new ArrayTupleReference();
- ITreeIndexAccessor indexAccessor = btree.createAccessor(TestOperationCallback.INSTANCE,
- TestOperationCallback.INSTANCE);
+ ITreeIndexAccessor indexAccessor =
+ btree.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
int numInserts = 10000;
for (int i = 0; i < numInserts; i++) {
@@ -111,9 +115,11 @@
try {
indexAccessor.insert(insertTuple);
- } catch (TreeIndexException e) {
- } catch (Exception e) {
- e.printStackTrace();
+ } catch (HyracksDataException hde) {
+ if (hde.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+ hde.printStackTrace();
+ throw hde;
+ }
}
}
long end = System.currentTimeMillis();
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/FieldPrefixNSMTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/FieldPrefixNSMTest.java
index 4420fb0..d284193 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/FieldPrefixNSMTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/FieldPrefixNSMTest.java
@@ -23,9 +23,6 @@
import java.util.Random;
import java.util.logging.Level;
-import org.junit.Assert;
-import org.junit.Test;
-
import org.apache.hyracks.api.comm.IFrame;
import org.apache.hyracks.api.comm.IFrameTupleAccessor;
import org.apache.hyracks.api.comm.VSizeFrame;
@@ -43,7 +40,6 @@
import org.apache.hyracks.dataflow.common.data.accessors.FrameTupleReference;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
import org.apache.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import org.apache.hyracks.storage.am.btree.exceptions.BTreeException;
import org.apache.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame;
import org.apache.hyracks.storage.am.btree.util.AbstractBTreeTest;
import org.apache.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
@@ -54,6 +50,8 @@
import org.apache.hyracks.storage.common.buffercache.ICachedPage;
import org.apache.hyracks.storage.common.file.BufferedFileHandle;
import org.apache.hyracks.storage.common.file.IFileMapProvider;
+import org.junit.Assert;
+import org.junit.Test;
public class FieldPrefixNSMTest extends AbstractBTreeTest {
@@ -172,8 +170,6 @@
try {
int targetTupleIndex = frame.findInsertTupleIndex(tuple);
frame.insert(tuple, targetTupleIndex);
- } catch (BTreeException e) {
- e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
@@ -206,8 +202,8 @@
}
}
- ITupleReference tuple = createTuple(ctx, savedFields[i][0], savedFields[i][1], savedFields[i][2],
- false);
+ ITupleReference tuple =
+ createTuple(ctx, savedFields[i][0], savedFields[i][1], savedFields[i][2], false);
try {
int tupleIndex = frame.findDeleteTupleIndex(tuple);
frame.delete(tuple, tupleIndex);
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/multithread/BTreeTestWorker.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/multithread/BTreeTestWorker.java
index 77ad13c..3734f08 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/multithread/BTreeTestWorker.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/org/apache/hyracks/storage/am/btree/multithread/BTreeTestWorker.java
@@ -19,11 +19,11 @@
package org.apache.hyracks.storage.am.btree.multithread;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleReference;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
-import org.apache.hyracks.storage.am.btree.exceptions.BTreeNotUpdateableException;
import org.apache.hyracks.storage.am.btree.impls.BTree;
import org.apache.hyracks.storage.am.btree.impls.RangePredicate;
import org.apache.hyracks.storage.am.common.AbstractIndexTestWorker;
@@ -31,10 +31,7 @@
import org.apache.hyracks.storage.am.common.TestOperationSelector.TestOperation;
import org.apache.hyracks.storage.am.common.api.IIndex;
import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.datagen.DataGenThread;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexNonExistentKeyException;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
public class BTreeTestWorker extends AbstractIndexTestWorker {
@@ -53,7 +50,7 @@
}
@Override
- public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException, IndexException {
+ public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException {
BTree.BTreeAccessor accessor = (BTree.BTreeAccessor) indexAccessor;
ITreeIndexCursor searchCursor = accessor.createSearchCursor(false);
ITreeIndexCursor diskOrderScanCursor = accessor.createDiskOrderScanCursor();
@@ -64,8 +61,11 @@
case INSERT:
try {
accessor.insert(tuple);
- } catch (TreeIndexDuplicateKeyException e) {
- // Ignore duplicate keys, since we get random tuples.
+ } catch (HyracksDataException e) {
+ if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+ // Ignore duplicate keys, since we get random tuples.
+ throw e;
+ }
}
break;
@@ -78,18 +78,25 @@
deleteTuple.reset(deleteTb.getFieldEndOffsets(), deleteTb.getByteArray());
try {
accessor.delete(deleteTuple);
- } catch (TreeIndexNonExistentKeyException e) {
- // Ignore non-existant keys, since we get random tuples.
+ } catch (HyracksDataException e) {
+ if (e.getErrorCode() != ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+ // Ignore non-existant keys, since we get random tuples.
+ throw e;
+ }
}
break;
case UPDATE:
try {
accessor.update(tuple);
- } catch (TreeIndexNonExistentKeyException e) {
+ } catch (HyracksDataException e) {
// Ignore non-existant keys, since we get random tuples.
- } catch (BTreeNotUpdateableException e) {
- // Ignore not updateable exception due to numKeys == numFields.
+ if (e.getErrorCode() != ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY
+ && e.getErrorCode() != ErrorCode.INDEX_NOT_UPDATABLE) {
+ // Ignore non-existant keys, since we get random tuples.
+ // Ignore not updateable exception due to numKeys == numFields.
+ throw e;
+ }
}
break;
@@ -126,7 +133,7 @@
}
}
- private void consumeCursorTuples(ITreeIndexCursor cursor) throws HyracksDataException, IndexException {
+ private void consumeCursorTuples(ITreeIndexCursor cursor) throws HyracksDataException {
try {
while (cursor.hasNext()) {
cursor.next();
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeExamplesTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeExamplesTest.java
index 0cdcf48..4b28e25 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeExamplesTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/LSMBTreeExamplesTest.java
@@ -35,7 +35,6 @@
import org.apache.hyracks.storage.am.common.TestOperationCallback;
import org.apache.hyracks.storage.am.common.api.IIndexAccessor;
import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.lsm.btree.util.LSMBTreeTestHarness;
import org.apache.hyracks.storage.am.lsm.btree.utils.LSMBTreeUtil;
import org.junit.After;
@@ -48,14 +47,13 @@
@Override
protected ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories,
int[] bloomFilterKeyFields, ITypeTraits[] filterTypeTraits, IBinaryComparatorFactory[] filterCmpFactories,
- int[] btreeFields, int[] filterFields) throws TreeIndexException, HyracksDataException {
- return LSMBTreeUtil.createLSMTree(harness.getIOManager(), harness.getVirtualBufferCaches(), harness
- .getFileReference(),
- harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), typeTraits, cmpFactories,
- bloomFilterKeyFields, harness.getBoomFilterFalsePositiveRate(), harness.getMergePolicy(),
+ int[] btreeFields, int[] filterFields) throws HyracksDataException {
+ return LSMBTreeUtil.createLSMTree(harness.getIOManager(), harness.getVirtualBufferCaches(),
+ harness.getFileReference(), harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), typeTraits,
+ cmpFactories, bloomFilterKeyFields, harness.getBoomFilterFalsePositiveRate(), harness.getMergePolicy(),
harness.getOperationTracker(), harness.getIOScheduler(), harness.getIOOperationCallback(), true,
- filterTypeTraits, filterCmpFactories, btreeFields, filterFields, true, harness
- .getMetadataPageManagerFactory());
+ filterTypeTraits, filterCmpFactories, btreeFields, filterFields, true,
+ harness.getMetadataPageManagerFactory());
}
@Before
@@ -83,8 +81,8 @@
typeTraits[0] = IntegerPointable.TYPE_TRAITS;
typeTraits[1] = IntegerPointable.TYPE_TRAITS;
// Declare field serdes.
- ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE };
+ ISerializerDeserializer[] fieldSerdes =
+ { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
// Declare keys.
int keyFieldCount = 1;
@@ -96,8 +94,8 @@
bloomFilterKeyFields[0] = 0;
ITypeTraits[] filterTypeTraits = { IntegerPointable.TYPE_TRAITS };
- IBinaryComparatorFactory[] filterCmpFactories = { PointableBinaryComparatorFactory.of(
- IntegerPointable.FACTORY) };
+ IBinaryComparatorFactory[] filterCmpFactories =
+ { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) };
int[] filterFields = { 1 };
int[] btreeFields = { 1 };
ITreeIndex treeIndex = createTreeIndex(typeTraits, cmpFactories, bloomFilterKeyFields, filterTypeTraits,
@@ -111,8 +109,8 @@
}
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
ArrayTupleReference tuple = new ArrayTupleReference();
- IIndexAccessor indexAccessor = treeIndex.createAccessor(TestOperationCallback.INSTANCE,
- TestOperationCallback.INSTANCE);
+ IIndexAccessor indexAccessor =
+ treeIndex.createAccessor(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
int numInserts = 10000;
for (int i = 0; i < numInserts; i++) {
int f0 = rnd.nextInt() % numInserts;
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/multithread/LSMBTreeMultiThreadTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/multithread/LSMBTreeMultiThreadTest.java
index cfd2196..9abc321 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/multithread/LSMBTreeMultiThreadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/multithread/LSMBTreeMultiThreadTest.java
@@ -29,7 +29,6 @@
import org.apache.hyracks.storage.am.common.TestOperationSelector.TestOperation;
import org.apache.hyracks.storage.am.common.TestWorkloadConf;
import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.common.datagen.ProbabilityHelper;
import org.apache.hyracks.storage.am.lsm.btree.util.LSMBTreeTestHarness;
import org.apache.hyracks.storage.am.lsm.btree.utils.LSMBTreeUtil;
@@ -52,9 +51,9 @@
@Override
protected ITreeIndex createIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories,
- int[] bloomFilterKeyFields) throws TreeIndexException, HyracksDataException {
- return LSMBTreeUtil.createLSMTree(harness.getIOManager(), harness.getVirtualBufferCaches(), harness
- .getFileReference(), harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), typeTraits,
+ int[] bloomFilterKeyFields) throws HyracksDataException {
+ return LSMBTreeUtil.createLSMTree(harness.getIOManager(), harness.getVirtualBufferCaches(),
+ harness.getFileReference(), harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), typeTraits,
cmpFactories, bloomFilterKeyFields, harness.getBoomFilterFalsePositiveRate(), harness.getMergePolicy(),
harness.getOperationTracker(), harness.getIOScheduler(), harness.getIOOperationCallback(), true, null,
null, null, null, true, harness.getMetadataPageManagerFactory());
@@ -71,41 +70,41 @@
// Insert only workload.
TestOperation[] insertOnlyOps = new TestOperation[] { TestOperation.INSERT };
- workloadConfs.add(new TestWorkloadConf(insertOnlyOps, ProbabilityHelper
- .getUniformProbDist(insertOnlyOps.length)));
+ workloadConfs
+ .add(new TestWorkloadConf(insertOnlyOps, ProbabilityHelper.getUniformProbDist(insertOnlyOps.length)));
// Insert and merge workload.
TestOperation[] insertMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.MERGE };
- workloadConfs.add(new TestWorkloadConf(insertMergeOps, ProbabilityHelper
- .getUniformProbDist(insertMergeOps.length)));
+ workloadConfs
+ .add(new TestWorkloadConf(insertMergeOps, ProbabilityHelper.getUniformProbDist(insertMergeOps.length)));
// Inserts mixed with point searches and scans.
- TestOperation[] insertSearchOnlyOps = new TestOperation[] { TestOperation.INSERT, TestOperation.POINT_SEARCH,
- TestOperation.SCAN };
- workloadConfs.add(new TestWorkloadConf(insertSearchOnlyOps, ProbabilityHelper
- .getUniformProbDist(insertSearchOnlyOps.length)));
+ TestOperation[] insertSearchOnlyOps =
+ new TestOperation[] { TestOperation.INSERT, TestOperation.POINT_SEARCH, TestOperation.SCAN };
+ workloadConfs.add(new TestWorkloadConf(insertSearchOnlyOps,
+ ProbabilityHelper.getUniformProbDist(insertSearchOnlyOps.length)));
// Inserts, updates, and deletes.
- TestOperation[] insertDeleteUpdateOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
- TestOperation.UPDATE };
- workloadConfs.add(new TestWorkloadConf(insertDeleteUpdateOps, ProbabilityHelper
- .getUniformProbDist(insertDeleteUpdateOps.length)));
+ TestOperation[] insertDeleteUpdateOps =
+ new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE, TestOperation.UPDATE };
+ workloadConfs.add(new TestWorkloadConf(insertDeleteUpdateOps,
+ ProbabilityHelper.getUniformProbDist(insertDeleteUpdateOps.length)));
// Inserts, updates, deletes and merges.
TestOperation[] insertDeleteUpdateMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
TestOperation.UPDATE, TestOperation.MERGE };
- workloadConfs.add(new TestWorkloadConf(insertDeleteUpdateMergeOps, ProbabilityHelper
- .getUniformProbDist(insertDeleteUpdateMergeOps.length)));
+ workloadConfs.add(new TestWorkloadConf(insertDeleteUpdateMergeOps,
+ ProbabilityHelper.getUniformProbDist(insertDeleteUpdateMergeOps.length)));
// All operations except merge.
TestOperation[] allNoMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
TestOperation.UPDATE, TestOperation.POINT_SEARCH, TestOperation.SCAN };
- workloadConfs.add(new TestWorkloadConf(allNoMergeOps, ProbabilityHelper
- .getUniformProbDist(allNoMergeOps.length)));
+ workloadConfs
+ .add(new TestWorkloadConf(allNoMergeOps, ProbabilityHelper.getUniformProbDist(allNoMergeOps.length)));
// All operations.
- TestOperation[] allOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
- TestOperation.UPDATE, TestOperation.POINT_SEARCH, TestOperation.SCAN, TestOperation.MERGE };
+ TestOperation[] allOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE, TestOperation.UPDATE,
+ TestOperation.POINT_SEARCH, TestOperation.SCAN, TestOperation.MERGE };
workloadConfs.add(new TestWorkloadConf(allOps, ProbabilityHelper.getUniformProbDist(allOps.length)));
return workloadConfs;
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/multithread/LSMBTreeTestWorker.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/multithread/LSMBTreeTestWorker.java
index 56f40b0..e849828 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/multithread/LSMBTreeTestWorker.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/multithread/LSMBTreeTestWorker.java
@@ -19,21 +19,18 @@
package org.apache.hyracks.storage.am.lsm.btree.multithread;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleReference;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
-import org.apache.hyracks.storage.am.btree.exceptions.BTreeNotUpdateableException;
import org.apache.hyracks.storage.am.btree.impls.RangePredicate;
import org.apache.hyracks.storage.am.common.AbstractIndexTestWorker;
import org.apache.hyracks.storage.am.common.TestOperationSelector;
import org.apache.hyracks.storage.am.common.TestOperationSelector.TestOperation;
import org.apache.hyracks.storage.am.common.api.IIndex;
import org.apache.hyracks.storage.am.common.api.IIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.datagen.DataGenThread;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException;
-import org.apache.hyracks.storage.am.common.exceptions.TreeIndexNonExistentKeyException;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
import org.apache.hyracks.storage.am.lsm.btree.impls.LSMBTree;
import org.apache.hyracks.storage.am.lsm.btree.impls.LSMBTree.LSMBTreeAccessor;
@@ -54,7 +51,7 @@
}
@Override
- public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException, IndexException {
+ public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException {
LSMBTreeAccessor accessor = (LSMBTreeAccessor) indexAccessor;
IIndexCursor searchCursor = accessor.createSearchCursor(false);
MultiComparator cmp = accessor.getMultiComparator();
@@ -64,8 +61,11 @@
case INSERT:
try {
accessor.insert(tuple);
- } catch (TreeIndexDuplicateKeyException e) {
- // Ignore duplicate keys, since we get random tuples.
+ } catch (HyracksDataException e) {
+ if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+ // Ignore duplicate keys, since we get random tuples.
+ throw e;
+ }
}
break;
@@ -78,18 +78,24 @@
deleteTuple.reset(deleteTb.getFieldEndOffsets(), deleteTb.getByteArray());
try {
accessor.delete(deleteTuple);
- } catch (TreeIndexNonExistentKeyException e) {
+ } catch (HyracksDataException e) {
// Ignore non-existant keys, since we get random tuples.
+ if (e.getErrorCode() != ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY) {
+ throw e;
+ }
}
break;
case UPDATE:
try {
accessor.update(tuple);
- } catch (TreeIndexNonExistentKeyException e) {
- // Ignore non-existant keys, since we get random tuples.
- } catch (BTreeNotUpdateableException e) {
- // Ignore not updateable exception due to numKeys == numFields.
+ } catch (HyracksDataException e) {
+ if (e.getErrorCode() != ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY
+ && e.getErrorCode() != ErrorCode.INDEX_NOT_UPDATABLE) {
+ // Ignore non-existant keys, since we get random tuples.
+ // Ignore not updateable exception due to numKeys == numFields.
+ throw e;
+ }
}
break;
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/BTreeBulkLoadRunner.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/BTreeBulkLoadRunner.java
index 62c0c43..f3e8ad8 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/BTreeBulkLoadRunner.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/BTreeBulkLoadRunner.java
@@ -22,7 +22,6 @@
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ITypeTraits;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.storage.am.btree.exceptions.BTreeException;
import org.apache.hyracks.storage.am.common.api.IIndexBulkLoader;
import org.apache.hyracks.storage.am.common.datagen.DataGenThread;
import org.apache.hyracks.storage.am.common.datagen.TupleBatch;
@@ -32,7 +31,7 @@
protected final float fillFactor;
public BTreeBulkLoadRunner(int numBatches, int pageSize, int numPages, ITypeTraits[] typeTraits,
- IBinaryComparatorFactory[] cmpFactories, float fillFactor) throws BTreeException, HyracksDataException {
+ IBinaryComparatorFactory[] cmpFactories, float fillFactor) throws HyracksDataException {
super(numBatches, pageSize, numPages, typeTraits, cmpFactories);
this.fillFactor = fillFactor;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/BTreeRunner.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/BTreeRunner.java
index 5b84652..f6809b2 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/BTreeRunner.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/BTreeRunner.java
@@ -23,7 +23,6 @@
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ITypeTraits;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.storage.am.btree.exceptions.BTreeException;
import org.apache.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
import org.apache.hyracks.storage.am.btree.util.BTreeUtils;
import org.apache.hyracks.storage.am.common.api.ITreeIndexMetadataFrameFactory;
@@ -38,20 +37,20 @@
protected static final int HYRACKS_FRAME_SIZE = 128;
public BTreeRunner(int numTuples, int pageSize, int numPages, ITypeTraits[] typeTraits,
- IBinaryComparatorFactory[] cmpFactories) throws BTreeException, HyracksDataException {
+ IBinaryComparatorFactory[] cmpFactories) throws HyracksDataException {
super(numTuples, pageSize, numPages, typeTraits, cmpFactories);
}
@Override
protected void init(int pageSize, int numPages, ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories)
- throws HyracksDataException, BTreeException {
+ throws HyracksDataException {
IHyracksTaskContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
TestStorageManagerComponentHolder.init(pageSize, numPages, MAX_OPEN_FILES);
bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
ITreeIndexMetadataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
LinkedMetaDataPageManager freePageManager = new LinkedMetaDataPageManager(bufferCache, metaFrameFactory);
- btree = BTreeUtils.createBTree(bufferCache, fmp, typeTraits, cmpFactories, BTreeLeafFrameType.REGULAR_NSM,
- file, freePageManager);
+ btree = BTreeUtils.createBTree(bufferCache, fmp, typeTraits, cmpFactories, BTreeLeafFrameType.REGULAR_NSM, file,
+ freePageManager);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/InMemoryBTreeRunner.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/InMemoryBTreeRunner.java
index b4c9418..9ae5aae 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/InMemoryBTreeRunner.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/InMemoryBTreeRunner.java
@@ -27,14 +27,12 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.api.io.IIOManager;
-import org.apache.hyracks.storage.am.btree.exceptions.BTreeException;
import org.apache.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
import org.apache.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
import org.apache.hyracks.storage.am.btree.impls.BTree;
import org.apache.hyracks.storage.am.common.api.IPageManager;
import org.apache.hyracks.storage.am.common.api.ITreeIndexAccessor;
import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.common.datagen.DataGenThread;
import org.apache.hyracks.storage.am.common.datagen.TupleBatch;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
@@ -59,7 +57,7 @@
protected BTree btree;
public InMemoryBTreeRunner(int numBatches, int pageSize, int numPages, ITypeTraits[] typeTraits,
- IBinaryComparatorFactory[] cmpFactories) throws BTreeException, HyracksDataException {
+ IBinaryComparatorFactory[] cmpFactories) throws HyracksDataException {
this.numBatches = numBatches;
TestStorageManagerComponentHolder.init(pageSize, numPages, numPages);
IIOManager ioManager = TestStorageManagerComponentHolder.getIOManager();
@@ -69,7 +67,7 @@
}
protected void init(int pageSize, int numPages, ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories)
- throws HyracksDataException, BTreeException {
+ throws HyracksDataException {
bufferCache = new VirtualBufferCache(new HeapBufferAllocator(), pageSize, numPages);
TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
@@ -136,7 +134,9 @@
for (int j = 0; j < batch.size(); j++) {
try {
indexAccessor.insert(batch.get(j));
- } catch (TreeIndexException e) {
+ } catch (Exception e) {
+ e.printStackTrace();
+ throw e;
}
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/LSMTreeRunner.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/LSMTreeRunner.java
index d4e7886b..50592a1 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/LSMTreeRunner.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/LSMTreeRunner.java
@@ -31,9 +31,7 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.control.nc.io.IOManager;
-import org.apache.hyracks.storage.am.btree.exceptions.BTreeException;
import org.apache.hyracks.storage.am.common.api.IIndexAccessor;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.common.datagen.DataGenThread;
import org.apache.hyracks.storage.am.common.datagen.TupleBatch;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
@@ -84,7 +82,7 @@
public LSMTreeRunner(int numBatches, int inMemPageSize, int inMemNumPages, int onDiskPageSize, int onDiskNumPages,
ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories, int[] bloomFilterKeyFields,
- double bloomFilterFalsePositiveRate) throws BTreeException, HyracksDataException {
+ double bloomFilterFalsePositiveRate) throws HyracksDataException {
this.numBatches = numBatches;
this.onDiskPageSize = onDiskPageSize;
@@ -101,8 +99,8 @@
List<IVirtualBufferCache> virtualBufferCaches = new ArrayList<>();
for (int i = 0; i < 2; i++) {
- IVirtualBufferCache virtualBufferCache = new VirtualBufferCache(new HeapBufferAllocator(), inMemPageSize,
- inMemNumPages / 2);
+ IVirtualBufferCache virtualBufferCache =
+ new VirtualBufferCache(new HeapBufferAllocator(), inMemPageSize, inMemNumPages / 2);
virtualBufferCaches.add(virtualBufferCache);
}
@@ -110,10 +108,9 @@
AsynchronousScheduler.INSTANCE.init(threadFactory);
lsmtree = LSMBTreeUtil.createLSMTree(ioManager, virtualBufferCaches, file, bufferCache, fmp, typeTraits,
- cmpFactories,
- bloomFilterKeyFields, bloomFilterFalsePositiveRate, new NoMergePolicy(), new ThreadCountingTracker(),
- ioScheduler, NoOpIOOperationCallback.INSTANCE, true, null, null, null, null, true,
- TestStorageManagerComponentHolder.getMetadataPageManagerFactory());
+ cmpFactories, bloomFilterKeyFields, bloomFilterFalsePositiveRate, new NoMergePolicy(),
+ new ThreadCountingTracker(), ioScheduler, NoOpIOOperationCallback.INSTANCE, true, null, null, null,
+ null, true, TestStorageManagerComponentHolder.getMetadataPageManagerFactory());
}
@Override
@@ -184,7 +181,8 @@
for (int j = 0; j < batch.size(); j++) {
try {
lsmTreeAccessor.insert(batch.get(j));
- } catch (TreeIndexException e) {
+ } catch (Exception e) {
+ throw e;
}
}
dataGen.releaseBatch(batch);
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/DummyLSMIndexFileManager.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/DummyLSMIndexFileManager.java
index c16577b..e03f765 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/DummyLSMIndexFileManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/DummyLSMIndexFileManager.java
@@ -27,7 +27,6 @@
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.api.io.IIOManager;
import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.lsm.common.impls.AbstractLSMIndexFileManager;
import org.apache.hyracks.storage.am.lsm.common.impls.TreeIndexFactory;
import org.apache.hyracks.storage.common.file.IFileMapProvider;
@@ -42,7 +41,7 @@
@Override
protected void cleanupAndGetValidFilesInternal(FilenameFilter filter,
TreeIndexFactory<? extends ITreeIndex> treeFactory, ArrayList<ComparableFileName> allFiles)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
File dir = new File(baseDir);
String[] files = dir.list(filter);
for (String fileName : files) {
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/LSMIndexFileManagerTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/LSMIndexFileManagerTest.java
index 71b2c64..9b1ea52 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/LSMIndexFileManagerTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/LSMIndexFileManagerTest.java
@@ -37,7 +37,6 @@
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.api.io.IODeviceHandle;
import org.apache.hyracks.control.nc.io.IOManager;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexFileManager;
import org.apache.hyracks.storage.am.lsm.common.impls.LSMComponentFileReferences;
import org.apache.hyracks.storage.common.file.IFileMapProvider;
@@ -77,22 +76,22 @@
}
public void sortOrderTest(boolean testFlushFileName) throws InterruptedException, HyracksDataException {
- ILSMIndexFileManager fileManager = new DummyLSMIndexFileManager(ioManager, fileMapProvider, file,
- new DummyTreeFactory());
+ ILSMIndexFileManager fileManager =
+ new DummyLSMIndexFileManager(ioManager, fileMapProvider, file, new DummyTreeFactory());
LinkedList<String> fileNames = new LinkedList<>();
int numFileNames = 100;
long sleepTime = 5;
for (int i = 0; i < numFileNames; i++) {
- String flushFileName = fileManager.getRelFlushFileReference().getInsertIndexFileReference()
- .getFile().getName();
+ String flushFileName =
+ fileManager.getRelFlushFileReference().getInsertIndexFileReference().getFile().getName();
if (testFlushFileName) {
fileNames.addFirst(flushFileName);
}
Thread.sleep(sleepTime);
if (!testFlushFileName) {
- String secondFlushFileName = fileManager.getRelFlushFileReference()
- .getInsertIndexFileReference().getFile().getName();
+ String secondFlushFileName =
+ fileManager.getRelFlushFileReference().getInsertIndexFileReference().getFile().getName();
String mergeFileName = getMergeFileName(fileManager, flushFileName, secondFlushFileName);
fileNames.addFirst(mergeFileName);
Thread.sleep(sleepTime);
@@ -117,14 +116,14 @@
sortOrderTest(false);
}
- public void cleanInvalidFilesTest(IOManager ioManager) throws InterruptedException, IOException, IndexException {
+ public void cleanInvalidFilesTest(IOManager ioManager) throws InterruptedException, IOException {
String dirPath = ioManager.getIODevices().get(DEFAULT_IO_DEVICE_ID).getMount() + sep + "lsm_tree"
+ simpleDateFormat.format(new Date()) + sep;
File f = new File(dirPath);
f.mkdirs();
FileReference file = ioManager.resolveAbsolutePath(f.getAbsolutePath());
- ILSMIndexFileManager fileManager = new DummyLSMIndexFileManager(ioManager, fileMapProvider, file,
- new DummyTreeFactory());
+ ILSMIndexFileManager fileManager =
+ new DummyLSMIndexFileManager(ioManager, fileMapProvider, file, new DummyTreeFactory());
fileManager.createDirs();
List<FileReference> flushFiles = new ArrayList<>();
@@ -192,8 +191,8 @@
// Check actual files against expected files.
assertEquals(expectedValidFiles.size(), lsmComonentFileReference.size());
for (int i = 0; i < expectedValidFiles.size(); i++) {
- assertEquals(expectedValidFiles.get(i), lsmComonentFileReference.get(i).getInsertIndexFileReference()
- .getFile().getName());
+ assertEquals(expectedValidFiles.get(i),
+ lsmComonentFileReference.get(i).getInsertIndexFileReference().getFile().getName());
}
// Make sure invalid files were removed from the IODevices.
@@ -220,7 +219,7 @@
}
@Test
- public void singleIODeviceTest() throws InterruptedException, IOException, IndexException {
+ public void singleIODeviceTest() throws InterruptedException, IOException {
IOManager singleDeviceIOManager = createIOManager(1);
cleanInvalidFilesTest(singleDeviceIOManager);
cleanDirs(singleDeviceIOManager);
@@ -253,8 +252,8 @@
private FileReference simulateMerge(ILSMIndexFileManager fileManager, FileReference a, FileReference b)
throws HyracksDataException {
- LSMComponentFileReferences relMergeFileRefs = fileManager.getRelMergeFileReference(a.getFile().getName(), b
- .getFile().getName());
+ LSMComponentFileReferences relMergeFileRefs =
+ fileManager.getRelMergeFileReference(a.getFile().getName(), b.getFile().getName());
return relMergeFileRefs.getInsertIndexFileReference();
}
@@ -262,7 +261,7 @@
throws HyracksDataException {
File f1 = new File(firstFile);
File f2 = new File(lastFile);
- return fileNameManager.getRelMergeFileReference(f1.getName(), f2.getName())
- .getInsertIndexFileReference().getFile().getName();
+ return fileNameManager.getRelMergeFileReference(f1.getName(), f2.getName()).getInsertIndexFileReference()
+ .getFile().getName();
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/LSMInvertedIndexMergeTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/LSMInvertedIndexMergeTest.java
index f336d4a..8b419b3 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/LSMInvertedIndexMergeTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/LSMInvertedIndexMergeTest.java
@@ -22,7 +22,6 @@
import java.io.IOException;
import org.apache.hyracks.storage.am.common.api.IIndex;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.datagen.TupleGenerator;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
@@ -43,13 +42,12 @@
}
@Override
- protected void runTest(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen) throws IOException,
- IndexException {
+ protected void runTest(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen) throws IOException {
IIndex invIndex = testCtx.getIndex();
invIndex.create();
invIndex.activate();
- ILSMIndexAccessor invIndexAccessor = (ILSMIndexAccessor) invIndex.createAccessor(
- NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
+ ILSMIndexAccessor invIndexAccessor = (ILSMIndexAccessor) invIndex.createAccessor(NoOpOperationCallback.INSTANCE,
+ NoOpOperationCallback.INSTANCE);
for (int i = 0; i < maxTreesToMerge; i++) {
for (int j = 0; j < i; j++) {
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/PartitionedLSMInvertedIndexMergeTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/PartitionedLSMInvertedIndexMergeTest.java
index 8aed83a..550d312 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/PartitionedLSMInvertedIndexMergeTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/PartitionedLSMInvertedIndexMergeTest.java
@@ -21,8 +21,8 @@
import java.io.IOException;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.storage.am.common.api.IIndex;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.datagen.TupleGenerator;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
@@ -43,13 +43,13 @@
}
@Override
- protected void runTest(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen) throws IOException,
- IndexException {
+ protected void runTest(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen)
+ throws IOException, HyracksDataException {
IIndex invIndex = testCtx.getIndex();
invIndex.create();
invIndex.activate();
- ILSMIndexAccessor invIndexAccessor = (ILSMIndexAccessor) invIndex.createAccessor(
- NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
+ ILSMIndexAccessor invIndexAccessor = (ILSMIndexAccessor) invIndex.createAccessor(NoOpOperationCallback.INSTANCE,
+ NoOpOperationCallback.INSTANCE);
for (int i = 0; i < maxTreesToMerge; i++) {
for (int j = 0; j < i; j++) {
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexDeleteTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexDeleteTest.java
index e59f85c..f4290bb 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexDeleteTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexDeleteTest.java
@@ -21,15 +21,13 @@
import java.io.IOException;
-import org.junit.Test;
-
import org.apache.hyracks.storage.am.common.api.IIndex;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.datagen.TupleGenerator;
import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext;
import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestUtils;
+import org.junit.Test;
public abstract class AbstractInvertedIndexDeleteTest extends AbstractInvertedIndexTest {
@@ -42,11 +40,11 @@
this.bulkLoad = bulkLoad;
}
- protected void runTest(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen) throws IOException,
- IndexException {
+ protected void runTest(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen) throws IOException {
IIndex invIndex = testCtx.getIndex();
- if ((invIndexType != InvertedIndexType.LSM) && (invIndexType != InvertedIndexType.PARTITIONED_LSM) || !bulkLoad) {
+ if ((invIndexType != InvertedIndexType.LSM) && (invIndexType != InvertedIndexType.PARTITIONED_LSM)
+ || !bulkLoad) {
invIndex.create();
invIndex.activate();
}
@@ -66,8 +64,8 @@
}
// Delete all documents in a couple of rounds.
- int numTuplesPerDeleteRound = (int) Math.ceil((float) testCtx.getDocumentCorpus().size()
- / (float) numDeleteRounds);
+ int numTuplesPerDeleteRound =
+ (int) Math.ceil((float) testCtx.getDocumentCorpus().size() / (float) numDeleteRounds);
for (int j = 0; j < numDeleteRounds; j++) {
LSMInvertedIndexTestUtils.deleteFromInvIndex(testCtx, harness.getRandom(), numTuplesPerDeleteRound);
validateAndCheckIndex(testCtx);
@@ -80,33 +78,33 @@
}
@Test
- public void wordTokensInvIndexTest() throws IOException, IndexException {
- LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createWordInvIndexTestContext(harness,
- invIndexType);
+ public void wordTokensInvIndexTest() throws IOException {
+ LSMInvertedIndexTestContext testCtx =
+ LSMInvertedIndexTestUtils.createWordInvIndexTestContext(harness, invIndexType);
TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createStringDocumentTupleGen(harness.getRandom());
runTest(testCtx, tupleGen);
}
@Test
- public void hashedWordTokensInvIndexTest() throws IOException, IndexException {
- LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createHashedWordInvIndexTestContext(harness,
- invIndexType);
+ public void hashedWordTokensInvIndexTest() throws IOException {
+ LSMInvertedIndexTestContext testCtx =
+ LSMInvertedIndexTestUtils.createHashedWordInvIndexTestContext(harness, invIndexType);
TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createStringDocumentTupleGen(harness.getRandom());
runTest(testCtx, tupleGen);
}
@Test
- public void ngramTokensInvIndexTest() throws IOException, IndexException {
- LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createNGramInvIndexTestContext(harness,
- invIndexType);
+ public void ngramTokensInvIndexTest() throws IOException {
+ LSMInvertedIndexTestContext testCtx =
+ LSMInvertedIndexTestUtils.createNGramInvIndexTestContext(harness, invIndexType);
TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createPersonNamesTupleGen(harness.getRandom());
runTest(testCtx, tupleGen);
}
@Test
- public void hashedNGramTokensInvIndexTest() throws IOException, IndexException {
- LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createHashedNGramInvIndexTestContext(harness,
- invIndexType);
+ public void hashedNGramTokensInvIndexTest() throws IOException {
+ LSMInvertedIndexTestContext testCtx =
+ LSMInvertedIndexTestUtils.createHashedNGramInvIndexTestContext(harness, invIndexType);
TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createPersonNamesTupleGen(harness.getRandom());
runTest(testCtx, tupleGen);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexLoadTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexLoadTest.java
index 048f3e4..6f006f8 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexLoadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexLoadTest.java
@@ -22,7 +22,6 @@
import java.io.IOException;
import org.apache.hyracks.storage.am.common.api.IIndex;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.datagen.TupleGenerator;
import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext;
import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
@@ -38,13 +37,12 @@
this.bulkLoad = bulkLoad;
}
- protected void runTest(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen) throws IOException,
- IndexException {
+ protected void runTest(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen) throws IOException {
IIndex invIndex = testCtx.getIndex();
invIndex.create();
invIndex.activate();
if (bulkLoad) {
- LSMInvertedIndexTestUtils.bulkLoadInvIndex(testCtx, tupleGen, NUM_DOCS_TO_INSERT, true);
+ LSMInvertedIndexTestUtils.bulkLoadInvIndex(testCtx, tupleGen, NUM_DOCS_TO_INSERT, true);
} else {
LSMInvertedIndexTestUtils.insertIntoInvIndex(testCtx, tupleGen, NUM_DOCS_TO_INSERT);
}
@@ -56,33 +54,33 @@
}
@Test
- public void wordTokensInvIndexTest() throws IOException, IndexException {
- LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createWordInvIndexTestContext(harness,
- invIndexType);
+ public void wordTokensInvIndexTest() throws IOException {
+ LSMInvertedIndexTestContext testCtx =
+ LSMInvertedIndexTestUtils.createWordInvIndexTestContext(harness, invIndexType);
TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createStringDocumentTupleGen(harness.getRandom());
runTest(testCtx, tupleGen);
}
@Test
- public void hashedWordTokensInvIndexTest() throws IOException, IndexException {
- LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createHashedWordInvIndexTestContext(harness,
- invIndexType);
+ public void hashedWordTokensInvIndexTest() throws IOException {
+ LSMInvertedIndexTestContext testCtx =
+ LSMInvertedIndexTestUtils.createHashedWordInvIndexTestContext(harness, invIndexType);
TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createStringDocumentTupleGen(harness.getRandom());
runTest(testCtx, tupleGen);
}
@Test
- public void ngramTokensInvIndexTest() throws IOException, IndexException {
- LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createNGramInvIndexTestContext(harness,
- invIndexType);
+ public void ngramTokensInvIndexTest() throws IOException {
+ LSMInvertedIndexTestContext testCtx =
+ LSMInvertedIndexTestUtils.createNGramInvIndexTestContext(harness, invIndexType);
TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createPersonNamesTupleGen(harness.getRandom());
runTest(testCtx, tupleGen);
}
@Test
- public void hashedNGramTokensInvIndexTest() throws IOException, IndexException {
- LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createHashedNGramInvIndexTestContext(harness,
- invIndexType);
+ public void hashedNGramTokensInvIndexTest() throws IOException {
+ LSMInvertedIndexTestContext testCtx =
+ LSMInvertedIndexTestUtils.createHashedNGramInvIndexTestContext(harness, invIndexType);
TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createPersonNamesTupleGen(harness.getRandom());
runTest(testCtx, tupleGen);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexSearchTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexSearchTest.java
index 3a188fe..3a63e55 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexSearchTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexSearchTest.java
@@ -26,7 +26,6 @@
import java.util.logging.Logger;
import org.apache.hyracks.storage.am.common.api.IIndex;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.datagen.TupleGenerator;
import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifier;
@@ -52,9 +51,10 @@
}
protected void runTest(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen,
- List<IInvertedIndexSearchModifier> searchModifiers) throws IOException, IndexException {
+ List<IInvertedIndexSearchModifier> searchModifiers) throws IOException {
IIndex invIndex = testCtx.getIndex();
- if ((invIndexType != InvertedIndexType.LSM) && (invIndexType != InvertedIndexType.PARTITIONED_LSM) || !bulkLoad) {
+ if ((invIndexType != InvertedIndexType.LSM) && (invIndexType != InvertedIndexType.PARTITIONED_LSM)
+ || !bulkLoad) {
invIndex.create();
invIndex.activate();
}
@@ -81,9 +81,9 @@
invIndex.destroy();
}
- private void testWordInvIndexIndex(LSMInvertedIndexTestContext testCtx) throws IOException, IndexException {
+ private void testWordInvIndexIndex(LSMInvertedIndexTestContext testCtx) throws IOException {
TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createStringDocumentTupleGen(harness.getRandom());
- List<IInvertedIndexSearchModifier> searchModifiers = new ArrayList<IInvertedIndexSearchModifier>();
+ List<IInvertedIndexSearchModifier> searchModifiers = new ArrayList<>();
searchModifiers.add(new ConjunctiveSearchModifier());
searchModifiers.add(new JaccardSearchModifier(1.0f));
searchModifiers.add(new JaccardSearchModifier(0.8f));
@@ -91,9 +91,9 @@
runTest(testCtx, tupleGen, searchModifiers);
}
- private void testNGramInvIndexIndex(LSMInvertedIndexTestContext testCtx) throws IOException, IndexException {
+ private void testNGramInvIndexIndex(LSMInvertedIndexTestContext testCtx) throws IOException {
TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createPersonNamesTupleGen(harness.getRandom());
- List<IInvertedIndexSearchModifier> searchModifiers = new ArrayList<IInvertedIndexSearchModifier>();
+ List<IInvertedIndexSearchModifier> searchModifiers = new ArrayList<>();
searchModifiers.add(new ConjunctiveSearchModifier());
searchModifiers.add(new JaccardSearchModifier(1.0f));
searchModifiers.add(new JaccardSearchModifier(0.8f));
@@ -106,30 +106,30 @@
}
@Test
- public void wordTokensInvIndexTest() throws IOException, IndexException {
- LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createWordInvIndexTestContext(harness,
- invIndexType);
+ public void wordTokensInvIndexTest() throws IOException {
+ LSMInvertedIndexTestContext testCtx =
+ LSMInvertedIndexTestUtils.createWordInvIndexTestContext(harness, invIndexType);
testWordInvIndexIndex(testCtx);
}
@Test
- public void hashedWordTokensInvIndexTest() throws IOException, IndexException {
- LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createHashedWordInvIndexTestContext(harness,
- invIndexType);
+ public void hashedWordTokensInvIndexTest() throws IOException {
+ LSMInvertedIndexTestContext testCtx =
+ LSMInvertedIndexTestUtils.createHashedWordInvIndexTestContext(harness, invIndexType);
testWordInvIndexIndex(testCtx);
}
@Test
- public void ngramTokensInvIndexTest() throws IOException, IndexException {
- LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createNGramInvIndexTestContext(harness,
- invIndexType);
+ public void ngramTokensInvIndexTest() throws IOException {
+ LSMInvertedIndexTestContext testCtx =
+ LSMInvertedIndexTestUtils.createNGramInvIndexTestContext(harness, invIndexType);
testNGramInvIndexIndex(testCtx);
}
@Test
- public void hashedNGramTokensInvIndexTest() throws IOException, IndexException {
- LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createHashedNGramInvIndexTestContext(harness,
- invIndexType);
+ public void hashedNGramTokensInvIndexTest() throws IOException {
+ LSMInvertedIndexTestContext testCtx =
+ LSMInvertedIndexTestUtils.createHashedNGramInvIndexTestContext(harness, invIndexType);
testNGramInvIndexIndex(testCtx);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexTest.java
index df4062f..2115eac 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/common/AbstractInvertedIndexTest.java
@@ -25,7 +25,6 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.storage.am.common.api.IIndex;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.datagen.TupleGenerator;
import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifier;
@@ -73,7 +72,7 @@
* This test is only for verifying the integrity and correctness of the index,
* it does not ensure the correctness of index searches.
*/
- protected void validateAndCheckIndex(LSMInvertedIndexTestContext testCtx) throws HyracksDataException, IndexException {
+ protected void validateAndCheckIndex(LSMInvertedIndexTestContext testCtx) throws HyracksDataException {
IIndex invIndex = testCtx.getIndex();
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Validating index: " + invIndex);
@@ -90,8 +89,8 @@
/**
* Runs a workload of queries using different search modifiers, and verifies the correctness of the results.
*/
- protected void runTinySearchWorkload(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen) throws IOException,
- IndexException {
+ protected void runTinySearchWorkload(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen)
+ throws IOException {
for (IInvertedIndexSearchModifier searchModifier : TEST_SEARCH_MODIFIERS) {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Running test workload with: " + searchModifier.toString());
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTest.java
index 88a8abd..e0427e8 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTest.java
@@ -27,8 +27,6 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.storage.am.common.TestOperationSelector.TestOperation;
import org.apache.hyracks.storage.am.common.TestWorkloadConf;
-import org.apache.hyracks.storage.am.common.api.IndexException;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.common.datagen.ProbabilityHelper;
import org.apache.hyracks.storage.am.common.datagen.TupleGenerator;
import org.apache.hyracks.storage.am.config.AccessMethodTestsConfig;
@@ -61,8 +59,7 @@
}
protected void runTest(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen, int numThreads,
- TestWorkloadConf conf, String dataMsg) throws InterruptedException, TreeIndexException,
- HyracksDataException {
+ TestWorkloadConf conf, String dataMsg) throws InterruptedException, HyracksDataException {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("LSMInvertedIndex MultiThread Test:\nData: " + dataMsg + "; Threads: " + numThreads
+ "; Workload: " + conf.toString() + ".");
@@ -88,36 +85,36 @@
// Insert only workload.
TestOperation[] insertOnlyOps = new TestOperation[] { TestOperation.INSERT };
- workloadConfs.add(new TestWorkloadConf(insertOnlyOps, ProbabilityHelper
- .getUniformProbDist(insertOnlyOps.length)));
+ workloadConfs
+ .add(new TestWorkloadConf(insertOnlyOps, ProbabilityHelper.getUniformProbDist(insertOnlyOps.length)));
// Insert and merge workload.
TestOperation[] insertMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.MERGE };
- workloadConfs.add(new TestWorkloadConf(insertMergeOps, ProbabilityHelper
- .getUniformProbDist(insertMergeOps.length)));
+ workloadConfs
+ .add(new TestWorkloadConf(insertMergeOps, ProbabilityHelper.getUniformProbDist(insertMergeOps.length)));
// Inserts mixed with point searches and scans.
- TestOperation[] insertSearchOnlyOps = new TestOperation[] { TestOperation.INSERT, TestOperation.POINT_SEARCH,
- TestOperation.SCAN };
- workloadConfs.add(new TestWorkloadConf(insertSearchOnlyOps, ProbabilityHelper
- .getUniformProbDist(insertSearchOnlyOps.length)));
+ TestOperation[] insertSearchOnlyOps =
+ new TestOperation[] { TestOperation.INSERT, TestOperation.POINT_SEARCH, TestOperation.SCAN };
+ workloadConfs.add(new TestWorkloadConf(insertSearchOnlyOps,
+ ProbabilityHelper.getUniformProbDist(insertSearchOnlyOps.length)));
// Inserts, and deletes.
TestOperation[] insertDeleteUpdateOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE };
- workloadConfs.add(new TestWorkloadConf(insertDeleteUpdateOps, ProbabilityHelper
- .getUniformProbDist(insertDeleteUpdateOps.length)));
+ workloadConfs.add(new TestWorkloadConf(insertDeleteUpdateOps,
+ ProbabilityHelper.getUniformProbDist(insertDeleteUpdateOps.length)));
// Inserts, deletes and merges.
- TestOperation[] insertDeleteUpdateMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
- TestOperation.MERGE };
- workloadConfs.add(new TestWorkloadConf(insertDeleteUpdateMergeOps, ProbabilityHelper
- .getUniformProbDist(insertDeleteUpdateMergeOps.length)));
+ TestOperation[] insertDeleteUpdateMergeOps =
+ new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE, TestOperation.MERGE };
+ workloadConfs.add(new TestWorkloadConf(insertDeleteUpdateMergeOps,
+ ProbabilityHelper.getUniformProbDist(insertDeleteUpdateMergeOps.length)));
// All operations except merge.
TestOperation[] allNoMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
TestOperation.POINT_SEARCH, TestOperation.SCAN };
- workloadConfs.add(new TestWorkloadConf(allNoMergeOps, ProbabilityHelper
- .getUniformProbDist(allNoMergeOps.length)));
+ workloadConfs
+ .add(new TestWorkloadConf(allNoMergeOps, ProbabilityHelper.getUniformProbDist(allNoMergeOps.length)));
// All operations.
TestOperation[] allOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
@@ -128,14 +125,14 @@
}
@Test
- public void wordTokensInvIndexTest() throws IOException, IndexException, InterruptedException {
+ public void wordTokensInvIndexTest() throws IOException, InterruptedException {
String dataMsg = "Documents";
int[] numThreads = new int[] { REGULAR_NUM_THREADS, EXCESSIVE_NUM_THREADS };
for (int i = 0; i < numThreads.length; i++) {
for (TestWorkloadConf conf : workloadConfs) {
setUp();
- LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createWordInvIndexTestContext(harness,
- getIndexType());
+ LSMInvertedIndexTestContext testCtx =
+ LSMInvertedIndexTestUtils.createWordInvIndexTestContext(harness, getIndexType());
TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createStringDocumentTupleGen(harness.getRandom());
runTest(testCtx, tupleGen, numThreads[i], conf, dataMsg);
tearDown();
@@ -144,14 +141,14 @@
}
@Test
- public void hashedNGramTokensInvIndexTest() throws IOException, IndexException, InterruptedException {
+ public void hashedNGramTokensInvIndexTest() throws IOException, InterruptedException {
String dataMsg = "Person Names";
int[] numThreads = new int[] { REGULAR_NUM_THREADS, EXCESSIVE_NUM_THREADS };
for (int i = 0; i < numThreads.length; i++) {
for (TestWorkloadConf conf : workloadConfs) {
setUp();
- LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestUtils.createHashedNGramInvIndexTestContext(
- harness, getIndexType());
+ LSMInvertedIndexTestContext testCtx =
+ LSMInvertedIndexTestUtils.createHashedNGramInvIndexTestContext(harness, getIndexType());
TupleGenerator tupleGen = LSMInvertedIndexTestUtils.createPersonNamesTupleGen(harness.getRandom());
runTest(testCtx, tupleGen, numThreads[i], conf, dataMsg);
tearDown();
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexTestWorker.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexTestWorker.java
index 9e24600..d0a90b7 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexTestWorker.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexTestWorker.java
@@ -23,6 +23,7 @@
import java.util.List;
import java.util.Random;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
import org.apache.hyracks.dataflow.common.utils.TupleUtils;
@@ -32,11 +33,9 @@
import org.apache.hyracks.storage.am.common.TestOperationSelector.TestOperation;
import org.apache.hyracks.storage.am.common.api.IIndex;
import org.apache.hyracks.storage.am.common.api.IIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.datagen.DataGenThread;
import org.apache.hyracks.storage.am.lsm.common.impls.NoOpIOOperationCallback;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifier;
-import org.apache.hyracks.storage.am.lsm.invertedindex.exceptions.OccurrenceThresholdPanicException;
import org.apache.hyracks.storage.am.lsm.invertedindex.impls.LSMInvertedIndex;
import org.apache.hyracks.storage.am.lsm.invertedindex.impls.LSMInvertedIndexAccessor;
import org.apache.hyracks.storage.am.lsm.invertedindex.search.ConjunctiveSearchModifier;
@@ -47,7 +46,7 @@
public class LSMInvertedIndexTestWorker extends AbstractIndexTestWorker {
protected final LSMInvertedIndex invIndex;
- protected final List<ITupleReference> documentCorpus = new ArrayList<ITupleReference>();
+ protected final List<ITupleReference> documentCorpus = new ArrayList<>();
protected final Random rnd = new Random(50);
protected final IInvertedIndexSearchModifier[] TEST_SEARCH_MODIFIERS = new IInvertedIndexSearchModifier[] {
@@ -60,7 +59,7 @@
}
@Override
- public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException, IndexException {
+ public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException {
LSMInvertedIndexAccessor accessor = (LSMInvertedIndexAccessor) indexAccessor;
IIndexCursor searchCursor = accessor.createSearchCursor(false);
IIndexCursor rangeSearchCursor = accessor.createRangeSearchCursor();
@@ -99,8 +98,11 @@
try {
accessor.search(searchCursor, searchPred);
consumeCursorTuples(searchCursor);
- } catch (OccurrenceThresholdPanicException e) {
+ } catch (HyracksDataException e) {
// Ignore.
+ if (e.getErrorCode() != ErrorCode.OCCURRENCE_THRESHOLD_PANIC_EXCEPTION) {
+ throw e;
+ }
}
break;
}
@@ -122,8 +124,7 @@
}
}
- private void insert(LSMInvertedIndexAccessor accessor, ITupleReference tuple) throws HyracksDataException,
- IndexException {
+ private void insert(LSMInvertedIndexAccessor accessor, ITupleReference tuple) throws HyracksDataException {
// Ignore ongoing merges. Do an insert instead.
accessor.insert(tuple);
// Add tuple to document corpus so we can delete it.
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/LSMInvertedIndexTestContext.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/LSMInvertedIndexTestContext.java
index cae6a7d..6e17fff 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/LSMInvertedIndexTestContext.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/LSMInvertedIndexTestContext.java
@@ -30,6 +30,7 @@
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.dataflow.value.ITypeTraits;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.control.nc.io.IOManager;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -38,11 +39,9 @@
import org.apache.hyracks.storage.am.btree.OrderedIndexTestContext;
import org.apache.hyracks.storage.am.common.CheckTuple;
import org.apache.hyracks.storage.am.common.api.IIndex;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.lsm.common.freepage.VirtualFreePageManager;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndex;
import org.apache.hyracks.storage.am.lsm.invertedindex.common.LSMInvertedIndexTestHarness;
-import org.apache.hyracks.storage.am.lsm.invertedindex.exceptions.InvertedIndexException;
import org.apache.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
@SuppressWarnings("rawtypes")
@@ -103,11 +102,11 @@
ISerializerDeserializer[] fieldSerdes, int tokenFieldCount, IBinaryTokenizerFactory tokenizerFactory,
InvertedIndexType invIndexType, int[] invertedIndexFields, ITypeTraits[] filterTypeTraits,
IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields, int[] filterFieldsForNonBulkLoadOps,
- int[] invertedIndexFieldsForNonBulkLoadOps) throws IndexException, HyracksDataException {
+ int[] invertedIndexFieldsForNonBulkLoadOps) throws HyracksDataException {
ITypeTraits[] allTypeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes);
IOManager ioManager = harness.getIOManager();
- IBinaryComparatorFactory[] allCmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes,
- fieldSerdes.length);
+ IBinaryComparatorFactory[] allCmpFactories =
+ SerdeUtils.serdesToComparatorFactories(fieldSerdes, fieldSerdes.length);
// Set token type traits and comparators.
ITypeTraits[] tokenTypeTraits = new ITypeTraits[tokenFieldCount];
IBinaryComparatorFactory[] tokenCmpFactories = new IBinaryComparatorFactory[tokenFieldCount];
@@ -129,17 +128,17 @@
switch (invIndexType) {
case INMEMORY: {
invIndex = InvertedIndexUtils.createInMemoryBTreeInvertedindex(harness.getVirtualBufferCaches().get(0),
- new VirtualFreePageManager(harness.getVirtualBufferCaches().get(0)),
- invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory,
+ new VirtualFreePageManager(harness.getVirtualBufferCaches().get(0)), invListTypeTraits,
+ invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory,
ioManager.resolveAbsolutePath(harness.getOnDiskDir()));
break;
}
case PARTITIONED_INMEMORY: {
- invIndex = InvertedIndexUtils.createPartitionedInMemoryBTreeInvertedindex(harness
- .getVirtualBufferCaches().get(0), new VirtualFreePageManager(harness.getVirtualBufferCaches()
- .get(0)), invListTypeTraits,
- invListCmpFactories, tokenTypeTraits,
- tokenCmpFactories, tokenizerFactory, ioManager.resolveAbsolutePath(harness.getOnDiskDir()));
+ invIndex = InvertedIndexUtils.createPartitionedInMemoryBTreeInvertedindex(
+ harness.getVirtualBufferCaches().get(0),
+ new VirtualFreePageManager(harness.getVirtualBufferCaches().get(0)), invListTypeTraits,
+ invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory,
+ ioManager.resolveAbsolutePath(harness.getOnDiskDir()));
break;
}
case ONDISK: {
@@ -149,10 +148,10 @@
break;
}
case PARTITIONED_ONDISK: {
- invIndex = InvertedIndexUtils.createPartitionedOnDiskInvertedIndex(ioManager, harness
- .getDiskBufferCache(),
- harness.getDiskFileMapProvider(), invListTypeTraits, invListCmpFactories, tokenTypeTraits,
- tokenCmpFactories, harness.getInvListsFileRef(), harness.getMetadataPageManagerFactory());
+ invIndex = InvertedIndexUtils.createPartitionedOnDiskInvertedIndex(ioManager,
+ harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), invListTypeTraits,
+ invListCmpFactories, tokenTypeTraits, tokenCmpFactories, harness.getInvListsFileRef(),
+ harness.getMetadataPageManagerFactory());
break;
}
case LSM: {
@@ -162,24 +161,23 @@
harness.getBoomFilterFalsePositiveRate(), harness.getMergePolicy(),
harness.getOperationTracker(), harness.getIOScheduler(), harness.getIOOperationCallback(),
invertedIndexFields, filterTypeTraits, filterCmpFactories, filterFields,
- filterFieldsForNonBulkLoadOps, invertedIndexFieldsForNonBulkLoadOps, true, harness
- .getMetadataPageManagerFactory());
+ filterFieldsForNonBulkLoadOps, invertedIndexFieldsForNonBulkLoadOps, true,
+ harness.getMetadataPageManagerFactory());
break;
}
case PARTITIONED_LSM: {
- invIndex = InvertedIndexUtils.createPartitionedLSMInvertedIndex(ioManager, harness
- .getVirtualBufferCaches(),
- harness.getDiskFileMapProvider(), invListTypeTraits, invListCmpFactories, tokenTypeTraits,
- tokenCmpFactories, tokenizerFactory, harness.getDiskBufferCache(), harness.getOnDiskDir(),
- harness.getBoomFilterFalsePositiveRate(), harness.getMergePolicy(),
- harness.getOperationTracker(), harness.getIOScheduler(), harness.getIOOperationCallback(),
- invertedIndexFields, filterTypeTraits, filterCmpFactories, filterFields,
- filterFieldsForNonBulkLoadOps, invertedIndexFieldsForNonBulkLoadOps, true, harness
- .getMetadataPageManagerFactory());
+ invIndex = InvertedIndexUtils.createPartitionedLSMInvertedIndex(ioManager,
+ harness.getVirtualBufferCaches(), harness.getDiskFileMapProvider(), invListTypeTraits,
+ invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory,
+ harness.getDiskBufferCache(), harness.getOnDiskDir(), harness.getBoomFilterFalsePositiveRate(),
+ harness.getMergePolicy(), harness.getOperationTracker(), harness.getIOScheduler(),
+ harness.getIOOperationCallback(), invertedIndexFields, filterTypeTraits, filterCmpFactories,
+ filterFields, filterFieldsForNonBulkLoadOps, invertedIndexFieldsForNonBulkLoadOps, true,
+ harness.getMetadataPageManagerFactory());
break;
}
default: {
- throw new InvertedIndexException("Unknow inverted-index type '" + invIndexType + "'.");
+ throw HyracksDataException.create(ErrorCode.UNKNOWN_INVERTED_INDEX_TYPE, invIndexType);
}
}
InvertedIndexTokenizingTupleIterator indexTupleIter = null;
@@ -194,17 +192,17 @@
case PARTITIONED_INMEMORY:
case PARTITIONED_ONDISK:
case PARTITIONED_LSM: {
- indexTupleIter = new PartitionedInvertedIndexTokenizingTupleIterator(
- invIndex.getTokenTypeTraits().length, invIndex.getInvListTypeTraits().length,
- tokenizerFactory.createTokenizer());
+ indexTupleIter =
+ new PartitionedInvertedIndexTokenizingTupleIterator(invIndex.getTokenTypeTraits().length,
+ invIndex.getInvListTypeTraits().length, tokenizerFactory.createTokenizer());
break;
}
default: {
- throw new InvertedIndexException("Unknow inverted-index type '" + invIndexType + "'.");
+ throw HyracksDataException.create(ErrorCode.UNKNOWN_INVERTED_INDEX_TYPE, invIndexType);
}
}
- LSMInvertedIndexTestContext testCtx = new LSMInvertedIndexTestContext(fieldSerdes, invIndex, tokenizerFactory,
- invIndexType, indexTupleIter);
+ LSMInvertedIndexTestContext testCtx =
+ new LSMInvertedIndexTestContext(fieldSerdes, invIndex, tokenizerFactory, invIndexType, indexTupleIter);
return testCtx;
}
@@ -240,8 +238,8 @@
public CheckTuple createCheckTuple(ITupleReference tuple) throws HyracksDataException {
CheckTuple checkTuple = new CheckTuple(fieldSerdes.length, fieldSerdes.length);
for (int i = 0; i < fieldSerdes.length; i++) {
- ByteArrayInputStream bains = new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i),
- tuple.getFieldLength(i));
+ ByteArrayInputStream bains =
+ new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
DataInput in = new DataInputStream(bains);
Comparable field = (Comparable) fieldSerdes[i].deserialize(in);
checkTuple.appendField(field);
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/LSMInvertedIndexTestUtils.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/LSMInvertedIndexTestUtils.java
index a5d77b0..157f86b 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/LSMInvertedIndexTestUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/LSMInvertedIndexTestUtils.java
@@ -36,6 +36,7 @@
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.primitive.IntegerPointable;
import org.apache.hyracks.data.std.util.GrowableArray;
@@ -50,7 +51,6 @@
import org.apache.hyracks.storage.am.common.CheckTuple;
import org.apache.hyracks.storage.am.common.api.IIndexBulkLoader;
import org.apache.hyracks.storage.am.common.api.IIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.datagen.DocumentStringFieldValueGenerator;
import org.apache.hyracks.storage.am.common.datagen.IFieldValueGenerator;
import org.apache.hyracks.storage.am.common.datagen.PersonNameFieldValueGenerator;
@@ -64,7 +64,6 @@
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifier;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
import org.apache.hyracks.storage.am.lsm.invertedindex.common.LSMInvertedIndexTestHarness;
-import org.apache.hyracks.storage.am.lsm.invertedindex.exceptions.OccurrenceThresholdPanicException;
import org.apache.hyracks.storage.am.lsm.invertedindex.search.InvertedIndexSearchPredicate;
import org.apache.hyracks.storage.am.lsm.invertedindex.tokenizers.DelimitedUTF8StringBinaryTokenizerFactory;
import org.apache.hyracks.storage.am.lsm.invertedindex.tokenizers.HashedUTF8NGramTokenFactory;
@@ -87,8 +86,8 @@
IFieldValueGenerator[] fieldGens = new IFieldValueGenerator[2];
fieldGens[0] = new DocumentStringFieldValueGenerator(2, 10, 10000, rnd);
fieldGens[1] = new SortedIntegerFieldValueGenerator(0);
- ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), IntegerSerializerDeserializer.INSTANCE };
+ ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ IntegerSerializerDeserializer.INSTANCE };
TupleGenerator tupleGen = new TupleGenerator(fieldGens, fieldSerdes, 0);
return tupleGen;
}
@@ -97,14 +96,14 @@
IFieldValueGenerator[] fieldGens = new IFieldValueGenerator[2];
fieldGens[0] = new PersonNameFieldValueGenerator(rnd, 0.5f);
fieldGens[1] = new SortedIntegerFieldValueGenerator(0);
- ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), IntegerSerializerDeserializer.INSTANCE };
+ ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ IntegerSerializerDeserializer.INSTANCE };
TupleGenerator tupleGen = new TupleGenerator(fieldGens, fieldSerdes, 0);
return tupleGen;
}
private static ISerializerDeserializer[] getNonHashedIndexFieldSerdes(InvertedIndexType invIndexType)
- throws IndexException {
+ throws HyracksDataException {
ISerializerDeserializer[] fieldSerdes = null;
switch (invIndexType) {
case INMEMORY:
@@ -123,14 +122,14 @@
break;
}
default: {
- throw new IndexException("Unhandled inverted index type '" + invIndexType + "'.");
+ throw new HyracksDataException("Unhandled inverted index type '" + invIndexType + "'.");
}
}
return fieldSerdes;
}
private static ISerializerDeserializer[] getHashedIndexFieldSerdes(InvertedIndexType invIndexType)
- throws IndexException {
+ throws HyracksDataException {
ISerializerDeserializer[] fieldSerdes = null;
switch (invIndexType) {
case INMEMORY:
@@ -149,58 +148,58 @@
break;
}
default: {
- throw new IndexException("Unhandled inverted index type '" + invIndexType + "'.");
+ throw new HyracksDataException("Unhandled inverted index type '" + invIndexType + "'.");
}
}
return fieldSerdes;
}
public static LSMInvertedIndexTestContext createWordInvIndexTestContext(LSMInvertedIndexTestHarness harness,
- InvertedIndexType invIndexType) throws IOException, IndexException {
+ InvertedIndexType invIndexType) throws IOException, HyracksDataException {
ISerializerDeserializer[] fieldSerdes = getNonHashedIndexFieldSerdes(invIndexType);
ITokenFactory tokenFactory = new UTF8WordTokenFactory();
- IBinaryTokenizerFactory tokenizerFactory = new DelimitedUTF8StringBinaryTokenizerFactory(true, false,
- tokenFactory);
+ IBinaryTokenizerFactory tokenizerFactory =
+ new DelimitedUTF8StringBinaryTokenizerFactory(true, false, tokenFactory);
LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestContext.create(harness, fieldSerdes,
fieldSerdes.length - 1, tokenizerFactory, invIndexType, null, null, null, null, null, null);
return testCtx;
}
public static LSMInvertedIndexTestContext createHashedWordInvIndexTestContext(LSMInvertedIndexTestHarness harness,
- InvertedIndexType invIndexType) throws IOException, IndexException {
+ InvertedIndexType invIndexType) throws IOException, HyracksDataException {
ISerializerDeserializer[] fieldSerdes = getHashedIndexFieldSerdes(invIndexType);
ITokenFactory tokenFactory = new HashedUTF8WordTokenFactory();
- IBinaryTokenizerFactory tokenizerFactory = new DelimitedUTF8StringBinaryTokenizerFactory(true, false,
- tokenFactory);
+ IBinaryTokenizerFactory tokenizerFactory =
+ new DelimitedUTF8StringBinaryTokenizerFactory(true, false, tokenFactory);
LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestContext.create(harness, fieldSerdes,
fieldSerdes.length - 1, tokenizerFactory, invIndexType, null, null, null, null, null, null);
return testCtx;
}
public static LSMInvertedIndexTestContext createNGramInvIndexTestContext(LSMInvertedIndexTestHarness harness,
- InvertedIndexType invIndexType) throws IOException, IndexException {
+ InvertedIndexType invIndexType) throws IOException, HyracksDataException {
ISerializerDeserializer[] fieldSerdes = getNonHashedIndexFieldSerdes(invIndexType);
ITokenFactory tokenFactory = new UTF8NGramTokenFactory();
- IBinaryTokenizerFactory tokenizerFactory = new NGramUTF8StringBinaryTokenizerFactory(TEST_GRAM_LENGTH, true,
- true, false, tokenFactory);
+ IBinaryTokenizerFactory tokenizerFactory =
+ new NGramUTF8StringBinaryTokenizerFactory(TEST_GRAM_LENGTH, true, true, false, tokenFactory);
LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestContext.create(harness, fieldSerdes,
fieldSerdes.length - 1, tokenizerFactory, invIndexType, null, null, null, null, null, null);
return testCtx;
}
public static LSMInvertedIndexTestContext createHashedNGramInvIndexTestContext(LSMInvertedIndexTestHarness harness,
- InvertedIndexType invIndexType) throws IOException, IndexException {
+ InvertedIndexType invIndexType) throws IOException, HyracksDataException {
ISerializerDeserializer[] fieldSerdes = getHashedIndexFieldSerdes(invIndexType);
ITokenFactory tokenFactory = new HashedUTF8NGramTokenFactory();
- IBinaryTokenizerFactory tokenizerFactory = new NGramUTF8StringBinaryTokenizerFactory(TEST_GRAM_LENGTH, true,
- true, false, tokenFactory);
+ IBinaryTokenizerFactory tokenizerFactory =
+ new NGramUTF8StringBinaryTokenizerFactory(TEST_GRAM_LENGTH, true, true, false, tokenFactory);
LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestContext.create(harness, fieldSerdes,
fieldSerdes.length - 1, tokenizerFactory, invIndexType, null, null, null, null, null, null);
return testCtx;
}
- public static void bulkLoadInvIndex(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen, int numDocs, boolean appendOnly)
- throws IndexException, IOException {
+ public static void bulkLoadInvIndex(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen, int numDocs,
+ boolean appendOnly) throws HyracksDataException, IOException {
SortedSet<CheckTuple> tmpMemIndex = new TreeSet<>();
// First generate the expected index by inserting the documents one-by-one.
for (int i = 0; i < numDocs; i++) {
@@ -226,7 +225,7 @@
}
public static void insertIntoInvIndex(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen, int numDocs)
- throws IOException, IndexException {
+ throws IOException {
// InMemoryInvertedIndex only supports insert.
for (int i = 0; i < numDocs; i++) {
ITupleReference tuple = tupleGen.next();
@@ -236,7 +235,7 @@
}
public static void deleteFromInvIndex(LSMInvertedIndexTestContext testCtx, Random rnd, int numDocsToDelete)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
List<ITupleReference> documentCorpus = testCtx.getDocumentCorpus();
for (int i = 0; i < numDocsToDelete && !documentCorpus.isEmpty(); i++) {
int size = documentCorpus.size();
@@ -254,15 +253,16 @@
* Compares actual and expected indexes using the rangeSearch() method of the inverted-index accessor.
*/
public static void compareActualAndExpectedIndexesRangeSearch(LSMInvertedIndexTestContext testCtx)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
IInvertedIndex invIndex = (IInvertedIndex) testCtx.getIndex();
int tokenFieldCount = invIndex.getTokenTypeTraits().length;
int invListFieldCount = invIndex.getInvListTypeTraits().length;
- IInvertedIndexAccessor invIndexAccessor = (IInvertedIndexAccessor) invIndex.createAccessor(
- NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
+ IInvertedIndexAccessor invIndexAccessor = (IInvertedIndexAccessor) invIndex
+ .createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
IIndexCursor invIndexCursor = invIndexAccessor.createRangeSearchCursor();
MultiComparator tokenCmp = MultiComparator.create(invIndex.getTokenCmpFactories());
- IBinaryComparatorFactory[] tupleCmpFactories = new IBinaryComparatorFactory[tokenFieldCount + invListFieldCount];
+ IBinaryComparatorFactory[] tupleCmpFactories =
+ new IBinaryComparatorFactory[tokenFieldCount + invListFieldCount];
for (int i = 0; i < tokenFieldCount; i++) {
tupleCmpFactories[i] = invIndex.getTokenCmpFactories()[i];
}
@@ -307,7 +307,7 @@
*/
@SuppressWarnings("unchecked")
public static void compareActualAndExpectedIndexes(LSMInvertedIndexTestContext testCtx)
- throws HyracksDataException, IndexException {
+ throws HyracksDataException {
IInvertedIndex invIndex = (IInvertedIndex) testCtx.getIndex();
ISerializerDeserializer[] fieldSerdes = testCtx.getFieldSerdes();
MultiComparator invListCmp = MultiComparator.create(invIndex.getInvListCmpFactories());
@@ -344,8 +344,8 @@
CheckTuple checkHighKey = new CheckTuple(tokenFieldCount, tokenFieldCount);
checkHighKey.appendField(token);
- SortedSet<CheckTuple> expectedInvList = OrderedIndexTestUtils.getPrefixExpectedSubset(
- testCtx.getCheckTuples(), checkLowKey, checkHighKey);
+ SortedSet<CheckTuple> expectedInvList =
+ OrderedIndexTestUtils.getPrefixExpectedSubset(testCtx.getCheckTuples(), checkLowKey, checkHighKey);
Iterator<CheckTuple> expectedInvListIter = expectedInvList.iterator();
// Position inverted-list cursor in actual index.
@@ -439,7 +439,8 @@
IToken token = tokenizer.getToken();
tokenData.reset();
token.serializeToken(tokenData);
- ByteArrayInputStream inStream = new ByteArrayInputStream(tokenData.getByteArray(), 0, tokenData.getLength());
+ ByteArrayInputStream inStream =
+ new ByteArrayInputStream(tokenData.getByteArray(), 0, tokenData.getLength());
DataInput dataIn = new DataInputStream(inStream);
Comparable tokenObj = (Comparable) tokenSerde.deserialize(dataIn);
CheckTuple lowKey;
@@ -487,10 +488,10 @@
public static void testIndexSearch(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen, Random rnd,
int numDocQueries, int numRandomQueries, IInvertedIndexSearchModifier searchModifier, int[] scanCountArray)
- throws IOException, IndexException {
+ throws IOException, HyracksDataException {
IInvertedIndex invIndex = testCtx.invIndex;
- IInvertedIndexAccessor accessor = (IInvertedIndexAccessor) invIndex.createAccessor(
- NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
+ IInvertedIndexAccessor accessor = (IInvertedIndexAccessor) invIndex
+ .createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
IBinaryTokenizer tokenizer = testCtx.getTokenizerFactory().createTokenizer();
InvertedIndexSearchPredicate searchPred = new InvertedIndexSearchPredicate(tokenizer, searchModifier);
List<ITupleReference> documentCorpus = testCtx.getDocumentCorpus();
@@ -519,9 +520,13 @@
boolean panic = false;
try {
accessor.search(resultCursor, searchPred);
- } catch (OccurrenceThresholdPanicException e) {
+ } catch (HyracksDataException e) {
// ignore panic queries.
- panic = true;
+ if (e.getErrorCode() == ErrorCode.OCCURRENCE_THRESHOLD_PANIC_EXCEPTION) {
+ panic = true;
+ } else {
+ throw e;
+ }
}
try {
@@ -532,12 +537,17 @@
while (resultCursor.hasNext()) {
resultCursor.next();
ITupleReference resultTuple = resultCursor.getTuple();
- int actual = IntegerPointable.getInteger(resultTuple.getFieldData(0), resultTuple.getFieldStart(0));
+ int actual = IntegerPointable.getInteger(resultTuple.getFieldData(0),
+ resultTuple.getFieldStart(0));
actualResults.add(Integer.valueOf(actual));
}
- } catch (OccurrenceThresholdPanicException e) {
- // Ignore panic queries.
- continue;
+ } catch (HyracksDataException e) {
+ if (e.getErrorCode() == ErrorCode.OCCURRENCE_THRESHOLD_PANIC_EXCEPTION) {
+ // Ignore panic queries.
+ continue;
+ } else {
+ throw e;
+ }
}
Collections.sort(actualResults);
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/AbstractLSMRTreeExamplesTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/AbstractLSMRTreeExamplesTest.java
index 1c47373..befdf07 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/AbstractLSMRTreeExamplesTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/AbstractLSMRTreeExamplesTest.java
@@ -21,11 +21,11 @@
import java.util.logging.Level;
-import org.junit.Test;
-
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.dataflow.value.ITypeTraits;
+import org.apache.hyracks.api.exceptions.ErrorCode;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
import org.apache.hyracks.data.std.primitive.IntegerPointable;
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
@@ -35,11 +35,11 @@
import org.apache.hyracks.storage.am.common.api.IIndexAccessor;
import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
import org.apache.hyracks.storage.am.rtree.AbstractRTreeExamplesTest;
import org.apache.hyracks.storage.am.rtree.frames.RTreePolicyType;
import org.apache.hyracks.storage.am.rtree.util.RTreeUtils;
+import org.junit.Test;
public abstract class AbstractLSMRTreeExamplesTest extends AbstractRTreeExamplesTest {
@@ -62,10 +62,10 @@
typeTraits[4] = IntegerPointable.TYPE_TRAITS;
typeTraits[5] = IntegerPointable.TYPE_TRAITS;
// Declare field serdes.
- ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE };
+ ISerializerDeserializer[] fieldSerdes =
+ { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
// Declare RTree keys.
int rtreeKeyFieldCount = 4;
@@ -101,17 +101,17 @@
}
// create value providers
- IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
- rtreeCmpFactories.length, IntegerPointable.FACTORY);
+ IPrimitiveValueProviderFactory[] valueProviderFactories =
+ RTreeUtils.createPrimitiveValueProviderFactories(rtreeCmpFactories.length, IntegerPointable.FACTORY);
int[] rtreeFields = { 0, 1, 2, 3, 4 };
ITypeTraits[] filterTypeTraits = { IntegerPointable.TYPE_TRAITS };
- IBinaryComparatorFactory[] filterCmpFactories = { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) };
+ IBinaryComparatorFactory[] filterCmpFactories =
+ { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) };
int[] filterFields = { 5 };
- ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories,
- valueProviderFactories, RTreePolicyType.RTREE, rtreeFields, btreeFields, filterTypeTraits,
- filterCmpFactories, filterFields);
+ ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories,
+ RTreePolicyType.RTREE, rtreeFields, btreeFields, filterTypeTraits, filterCmpFactories, filterFields);
treeIndex.create();
treeIndex.activate();
@@ -121,8 +121,8 @@
}
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
ArrayTupleReference tuple = new ArrayTupleReference();
- IIndexAccessor indexAccessor = (IIndexAccessor) treeIndex.createAccessor(NoOpOperationCallback.INSTANCE,
- NoOpOperationCallback.INSTANCE);
+ IIndexAccessor indexAccessor =
+ treeIndex.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
int numInserts = 10000;
for (int i = 0; i < numInserts; i++) {
int p1x = rnd.nextInt();
@@ -137,7 +137,10 @@
Math.max(p1y, p2y), pk, filter);
try {
indexAccessor.insert(tuple);
- } catch (TreeIndexException e) {
+ } catch (HyracksDataException e) {
+ if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+ throw e;
+ }
}
}
long end = System.currentTimeMillis();
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeExamplesTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeExamplesTest.java
index cdc39ba..00cb223 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeExamplesTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeExamplesTest.java
@@ -24,7 +24,6 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestHarness;
import org.apache.hyracks.storage.am.lsm.rtree.utils.LSMRTreeUtils;
import org.apache.hyracks.storage.am.rtree.frames.RTreePolicyType;
@@ -43,17 +42,15 @@
protected ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] rtreeCmpFactories,
IBinaryComparatorFactory[] btreeCmpFactories, IPrimitiveValueProviderFactory[] valueProviderFactories,
RTreePolicyType rtreePolicyType, int[] rtreeFields, int[] btreeFields, ITypeTraits[] filterTypeTraits,
- IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields) throws TreeIndexException,
- HyracksDataException {
- return LSMRTreeUtils.createLSMTree(harness.getIOManager(), harness.getVirtualBufferCaches(), harness
- .getFileReference(),
- harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), typeTraits, rtreeCmpFactories,
- btreeCmpFactories, valueProviderFactories, rtreePolicyType, harness.getBoomFilterFalsePositiveRate(),
- harness.getMergePolicy(), harness.getOperationTracker(), harness.getIOScheduler(),
- harness.getIOOperationCallback(),
+ IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields) throws HyracksDataException {
+ return LSMRTreeUtils.createLSMTree(harness.getIOManager(), harness.getVirtualBufferCaches(),
+ harness.getFileReference(), harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), typeTraits,
+ rtreeCmpFactories, btreeCmpFactories, valueProviderFactories, rtreePolicyType,
+ harness.getBoomFilterFalsePositiveRate(), harness.getMergePolicy(), harness.getOperationTracker(),
+ harness.getIOScheduler(), harness.getIOOperationCallback(),
LSMRTreeUtils.proposeBestLinearizer(typeTraits, rtreeCmpFactories.length), rtreeFields, btreeFields,
- filterTypeTraits, filterCmpFactories, filterFields, true, false, harness
- .getMetadataPageManagerFactory());
+ filterTypeTraits, filterCmpFactories, filterFields, true, false,
+ harness.getMetadataPageManagerFactory());
}
@Before
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesExamplesTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesExamplesTest.java
index 6ce70f7..835ae64 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesExamplesTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesExamplesTest.java
@@ -24,7 +24,6 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestHarness;
import org.apache.hyracks.storage.am.lsm.rtree.utils.LSMRTreeUtils;
import org.apache.hyracks.storage.am.rtree.frames.RTreePolicyType;
@@ -43,15 +42,14 @@
protected ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] rtreeCmpFactories,
IBinaryComparatorFactory[] btreeCmpFactories, IPrimitiveValueProviderFactory[] valueProviderFactories,
RTreePolicyType rtreePolicyType, int[] rtreeFields, int[] btreeFields, ITypeTraits[] filterTypeTraits,
- IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields) throws TreeIndexException,
- HyracksDataException {
+ IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields) throws HyracksDataException {
return LSMRTreeUtils.createLSMTreeWithAntiMatterTuples(harness.getIOManager(), harness.getVirtualBufferCaches(),
harness.getFileReference(), harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), typeTraits,
rtreeCmpFactories, btreeCmpFactories, valueProviderFactories, rtreePolicyType, harness.getMergePolicy(),
harness.getOperationTracker(), harness.getIOScheduler(), harness.getIOOperationCallback(),
LSMRTreeUtils.proposeBestLinearizer(typeTraits, rtreeCmpFactories.length), rtreeFields,
- filterTypeTraits, filterCmpFactories, filterFields, true, false, harness
- .getMetadataPageManagerFactory());
+ filterTypeTraits, filterCmpFactories, filterFields, true, false,
+ harness.getMetadataPageManagerFactory());
}
@Before
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/AbstractLSMRTreeTestWorker.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/AbstractLSMRTreeTestWorker.java
index 06501b4..406e0f8 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/AbstractLSMRTreeTestWorker.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/AbstractLSMRTreeTestWorker.java
@@ -28,7 +28,6 @@
import org.apache.hyracks.storage.am.common.api.IIndex;
import org.apache.hyracks.storage.am.common.api.ITreeIndex;
import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.datagen.DataGenThread;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
@@ -78,7 +77,7 @@
rearrangedTuple.reset(rearrangedTb.getFieldEndOffsets(), rearrangedTb.getByteArray());
}
- protected void consumeCursorTuples(ITreeIndexCursor cursor) throws HyracksDataException, IndexException {
+ protected void consumeCursorTuples(ITreeIndexCursor cursor) throws HyracksDataException {
try {
while (cursor.hasNext()) {
cursor.next();
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeMultiThreadTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeMultiThreadTest.java
index d6f4fcc..e319d1d 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeMultiThreadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeMultiThreadTest.java
@@ -29,7 +29,6 @@
import org.apache.hyracks.storage.am.common.TestWorkloadConf;
import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.common.datagen.ProbabilityHelper;
import org.apache.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestHarness;
import org.apache.hyracks.storage.am.lsm.rtree.utils.LSMRTreeUtils;
@@ -60,14 +59,13 @@
@Override
protected ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] rtreeCmpFactories,
IBinaryComparatorFactory[] btreeCmpFactories, IPrimitiveValueProviderFactory[] valueProviderFactories,
- RTreePolicyType rtreePolicyType, int[] btreeFields) throws TreeIndexException, HyracksDataException {
- return LSMRTreeUtils.createLSMTree(harness.getIOManager(), harness.getVirtualBufferCaches(), harness
- .getFileReference(),
- harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), typeTraits, rtreeCmpFactories,
- btreeCmpFactories, valueProviderFactories, rtreePolicyType, harness.getBoomFilterFalsePositiveRate(),
- harness.getMergePolicy(), harness.getOperationTracker(), harness.getIOScheduler(),
- harness.getIOOperationCallback(), LSMRTreeUtils.proposeBestLinearizer(typeTraits,
- rtreeCmpFactories.length), null, btreeFields, null,
+ RTreePolicyType rtreePolicyType, int[] btreeFields) throws HyracksDataException {
+ return LSMRTreeUtils.createLSMTree(harness.getIOManager(), harness.getVirtualBufferCaches(),
+ harness.getFileReference(), harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), typeTraits,
+ rtreeCmpFactories, btreeCmpFactories, valueProviderFactories, rtreePolicyType,
+ harness.getBoomFilterFalsePositiveRate(), harness.getMergePolicy(), harness.getOperationTracker(),
+ harness.getIOScheduler(), harness.getIOOperationCallback(),
+ LSMRTreeUtils.proposeBestLinearizer(typeTraits, rtreeCmpFactories.length), null, btreeFields, null,
null, null, true, false, harness.getMetadataPageManagerFactory());
}
@@ -101,14 +99,14 @@
new TestWorkloadConf(insertDeleteOps, ProbabilityHelper.getUniformProbDist(insertDeleteOps.length)));
// Inserts, deletes and merges.
- TestOperation[] insertDeleteMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
- TestOperation.MERGE };
+ TestOperation[] insertDeleteMergeOps =
+ new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE, TestOperation.MERGE };
workloadConfs.add(new TestWorkloadConf(insertDeleteMergeOps,
ProbabilityHelper.getUniformProbDist(insertDeleteMergeOps.length)));
// All operations except merge.
- TestOperation[] allNoMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
- TestOperation.SCAN };
+ TestOperation[] allNoMergeOps =
+ new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE, TestOperation.SCAN };
workloadConfs
.add(new TestWorkloadConf(allNoMergeOps, ProbabilityHelper.getUniformProbDist(allNoMergeOps.length)));
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeTestWorker.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeTestWorker.java
index 8099582..5f783da 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeTestWorker.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeTestWorker.java
@@ -28,7 +28,6 @@
import org.apache.hyracks.storage.am.common.TestOperationSelector.TestOperation;
import org.apache.hyracks.storage.am.common.api.IIndex;
import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.datagen.DataGenThread;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
import org.apache.hyracks.storage.am.lsm.common.impls.NoOpIOOperationCallback;
@@ -52,7 +51,7 @@
}
@Override
- public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException, IndexException {
+ public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException {
LSMRTreeAccessor accessor = (LSMRTreeAccessor) indexAccessor;
ITreeIndexCursor searchCursor = accessor.createSearchCursor(false);
MultiComparator cmp = accessor.getMultiComparator();
@@ -116,7 +115,7 @@
rearrangedTuple.reset(rearrangedTb.getFieldEndOffsets(), rearrangedTb.getByteArray());
}
- private void consumeCursorTuples(ITreeIndexCursor cursor) throws HyracksDataException, IndexException {
+ private void consumeCursorTuples(ITreeIndexCursor cursor) throws HyracksDataException {
try {
while (cursor.hasNext()) {
cursor.next();
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesMultiThreadTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesMultiThreadTest.java
index d72011b..987e015 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesMultiThreadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesMultiThreadTest.java
@@ -29,7 +29,6 @@
import org.apache.hyracks.storage.am.common.TestWorkloadConf;
import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.common.datagen.ProbabilityHelper;
import org.apache.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestHarness;
import org.apache.hyracks.storage.am.lsm.rtree.utils.LSMRTreeUtils;
@@ -41,7 +40,8 @@
private final LSMRTreeTestHarness harness = new LSMRTreeTestHarness();
- private final LSMRTreeWithAntiMatterTuplesTestWorkerFactory workerFactory = new LSMRTreeWithAntiMatterTuplesTestWorkerFactory();
+ private final LSMRTreeWithAntiMatterTuplesTestWorkerFactory workerFactory =
+ new LSMRTreeWithAntiMatterTuplesTestWorkerFactory();
public LSMRTreeWithAntiMatterTuplesMultiThreadTest() {
super(false, RTreeType.LSMRTREE_WITH_ANTIMATTER);
@@ -60,7 +60,7 @@
@Override
protected ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] rtreeCmpFactories,
IBinaryComparatorFactory[] btreeCmpFactories, IPrimitiveValueProviderFactory[] valueProviderFactories,
- RTreePolicyType rtreePolicyType, int[] btreeFields) throws TreeIndexException, HyracksDataException {
+ RTreePolicyType rtreePolicyType, int[] btreeFields) throws HyracksDataException {
return LSMRTreeUtils.createLSMTreeWithAntiMatterTuples(harness.getIOManager(), harness.getVirtualBufferCaches(),
harness.getFileReference(), harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), typeTraits,
rtreeCmpFactories, btreeCmpFactories, valueProviderFactories, rtreePolicyType, harness.getMergePolicy(),
@@ -99,14 +99,14 @@
new TestWorkloadConf(insertDeleteOps, ProbabilityHelper.getUniformProbDist(insertDeleteOps.length)));
// Inserts, deletes and merges.
- TestOperation[] insertDeleteMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
- TestOperation.MERGE };
+ TestOperation[] insertDeleteMergeOps =
+ new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE, TestOperation.MERGE };
workloadConfs.add(new TestWorkloadConf(insertDeleteMergeOps,
ProbabilityHelper.getUniformProbDist(insertDeleteMergeOps.length)));
// All operations except merge.
- TestOperation[] allNoMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
- TestOperation.SCAN };
+ TestOperation[] allNoMergeOps =
+ new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE, TestOperation.SCAN };
workloadConfs
.add(new TestWorkloadConf(allNoMergeOps, ProbabilityHelper.getUniformProbDist(allNoMergeOps.length)));
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesTestWorker.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesTestWorker.java
index 4e23e8f..4232207 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesTestWorker.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/org/apache/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesTestWorker.java
@@ -25,7 +25,6 @@
import org.apache.hyracks.storage.am.common.TestOperationSelector.TestOperation;
import org.apache.hyracks.storage.am.common.api.IIndex;
import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.datagen.DataGenThread;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
import org.apache.hyracks.storage.am.lsm.common.impls.NoOpIOOperationCallback;
@@ -35,13 +34,13 @@
public class LSMRTreeWithAntiMatterTuplesTestWorker extends AbstractLSMRTreeTestWorker {
- public LSMRTreeWithAntiMatterTuplesTestWorker(DataGenThread dataGen, TestOperationSelector opSelector,
- IIndex index, int numBatches) throws HyracksDataException {
+ public LSMRTreeWithAntiMatterTuplesTestWorker(DataGenThread dataGen, TestOperationSelector opSelector, IIndex index,
+ int numBatches) throws HyracksDataException {
super(dataGen, opSelector, index, numBatches);
}
@Override
- public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException, IndexException {
+ public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException {
LSMRTreeWithAntiMatterTuplesAccessor accessor = (LSMRTreeWithAntiMatterTuplesAccessor) indexAccessor;
ITreeIndexCursor searchCursor = accessor.createSearchCursor(false);
MultiComparator cmp = accessor.getMultiComparator();
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeExamplesTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeExamplesTest.java
index 30ab9c2..ecc6fe4 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeExamplesTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeExamplesTest.java
@@ -24,7 +24,6 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.rtree.frames.RTreePolicyType;
import org.apache.hyracks.storage.am.rtree.util.RTreeUtils;
import org.apache.hyracks.storage.am.rtree.utils.RTreeTestHarness;
@@ -53,7 +52,7 @@
protected ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] rtreeCmpFactories,
IBinaryComparatorFactory[] btreeCmpFactories, IPrimitiveValueProviderFactory[] valueProviderFactories,
RTreePolicyType rtreePolicyType, int[] rtreeFields, int[] btreeFields, ITypeTraits[] filterTypeTraits,
- IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields) throws TreeIndexException {
+ IBinaryComparatorFactory[] filterCmpFactories, int[] filterFields) throws HyracksDataException {
return RTreeUtils.createRTree(harness.getBufferCache(), harness.getFileMapProvider(), typeTraits,
valueProviderFactories, rtreeCmpFactories, rtreePolicyType, harness.getFileReference(), false,
harness.getMetadataManagerFactory());
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeSearchCursorTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeSearchCursorTest.java
index 256c98f..f15be9b 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeSearchCursorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/RTreeSearchCursorTest.java
@@ -26,6 +26,7 @@
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.dataflow.value.ITypeTraits;
+import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
import org.apache.hyracks.data.std.primitive.IntegerPointable;
@@ -39,7 +40,6 @@
import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
import org.apache.hyracks.storage.am.common.api.ITreeIndexMetadataFrameFactory;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
import org.apache.hyracks.storage.am.common.freepage.LinkedMetaDataPageManager;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
@@ -106,16 +106,16 @@
cmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
// create value providers
- IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils
- .createPrimitiveValueProviderFactories(cmpFactories.length, IntegerPointable.FACTORY);
+ IPrimitiveValueProviderFactory[] valueProviderFactories =
+ RTreeUtils.createPrimitiveValueProviderFactories(cmpFactories.length, IntegerPointable.FACTORY);
RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(typeTraits);
ITreeIndexMetadataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(tupleWriterFactory,
valueProviderFactories, RTreePolicyType.RTREE, false);
- ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory,
- valueProviderFactories, RTreePolicyType.RTREE, false);
+ ITreeIndexFrameFactory leafFrameFactory =
+ new RTreeNSMLeafFrameFactory(tupleWriterFactory, valueProviderFactories, RTreePolicyType.RTREE, false);
IRTreeInteriorFrame interiorFrame = (IRTreeInteriorFrame) interiorFrameFactory.createFrame();
IRTreeLeafFrame leafFrame = (IRTreeLeafFrame) leafFrameFactory.createFrame();
@@ -128,8 +128,8 @@
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
ArrayTupleReference tuple = new ArrayTupleReference();
- ITreeIndexAccessor indexAccessor = rtree.createAccessor(NoOpOperationCallback.INSTANCE,
- NoOpOperationCallback.INSTANCE);
+ ITreeIndexAccessor indexAccessor =
+ rtree.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
int numInserts = 10000;
ArrayList<RTreeCheckTuple> checkTuples = new ArrayList<>();
for (int i = 0; i < numInserts; i++) {
@@ -144,7 +144,10 @@
Math.max(p1y, p2y), pk);
try {
indexAccessor.insert(tuple);
- } catch (TreeIndexException e) {
+ } catch (HyracksDataException e) {
+ if (e.getErrorCode() != ErrorCode.DUPLICATE_KEY) {
+ throw e;
+ }
}
RTreeCheckTuple checkTuple = new RTreeCheckTuple(fieldCount, keyFieldCount);
checkTuple.appendField(Math.min(p1x, p2x));
@@ -165,10 +168,10 @@
ITreeIndexCursor searchCursor = new RTreeSearchCursor(interiorFrame, leafFrame);
SearchPredicate searchPredicate = new SearchPredicate(key, cmp);
- RTreeCheckTuple keyCheck = (RTreeCheckTuple) rTreeTestUtils.createCheckTupleFromTuple(key, fieldSerdes,
- keyFieldCount);
- HashMultiSet<RTreeCheckTuple> expectedResult = rTreeTestUtils.getRangeSearchExpectedResults(checkTuples,
- keyCheck);
+ RTreeCheckTuple keyCheck =
+ (RTreeCheckTuple) rTreeTestUtils.createCheckTupleFromTuple(key, fieldSerdes, keyFieldCount);
+ HashMultiSet<RTreeCheckTuple> expectedResult =
+ rTreeTestUtils.getRangeSearchExpectedResults(checkTuples, keyCheck);
rTreeTestUtils.getRangeSearchExpectedResults(checkTuples, keyCheck);
indexAccessor.search(searchCursor, searchPredicate);
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/multithread/RTreeMultiThreadTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/multithread/RTreeMultiThreadTest.java
index c8fec8c..0a27be3 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/multithread/RTreeMultiThreadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/multithread/RTreeMultiThreadTest.java
@@ -29,7 +29,6 @@
import org.apache.hyracks.storage.am.common.TestWorkloadConf;
import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.TreeIndexException;
import org.apache.hyracks.storage.am.common.datagen.ProbabilityHelper;
import org.apache.hyracks.storage.am.rtree.AbstractRTreeExamplesTest.RTreeType;
import org.apache.hyracks.storage.am.rtree.AbstractRTreeMultiThreadTest;
@@ -60,7 +59,7 @@
@Override
protected ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] rtreeCmpFactories,
IBinaryComparatorFactory[] btreeCmpFactories, IPrimitiveValueProviderFactory[] valueProviderFactories,
- RTreePolicyType rtreePolicyType, int[] btreeFields) throws TreeIndexException {
+ RTreePolicyType rtreePolicyType, int[] btreeFields) throws HyracksDataException {
return RTreeUtils.createRTree(harness.getBufferCache(), harness.getFileMapProvider(), typeTraits,
valueProviderFactories, rtreeCmpFactories, rtreePolicyType, harness.getFileReference(), false,
harness.getMetadataManagerFactory());
@@ -82,8 +81,8 @@
.add(new TestWorkloadConf(insertOnlyOps, ProbabilityHelper.getUniformProbDist(insertOnlyOps.length)));
// Inserts mixed with scans.
- TestOperation[] insertSearchOnlyOps = new TestOperation[] { TestOperation.INSERT, TestOperation.SCAN,
- TestOperation.DISKORDER_SCAN };
+ TestOperation[] insertSearchOnlyOps =
+ new TestOperation[] { TestOperation.INSERT, TestOperation.SCAN, TestOperation.DISKORDER_SCAN };
workloadConfs.add(new TestWorkloadConf(insertSearchOnlyOps,
ProbabilityHelper.getUniformProbDist(insertSearchOnlyOps.length)));
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/multithread/RTreeTestWorker.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/multithread/RTreeTestWorker.java
index ecca1e7..898c051 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/multithread/RTreeTestWorker.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/org/apache/hyracks/storage/am/rtree/multithread/RTreeTestWorker.java
@@ -29,7 +29,6 @@
import org.apache.hyracks.storage.am.common.api.IIndex;
import org.apache.hyracks.storage.am.common.api.IIndexCursor;
import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
-import org.apache.hyracks.storage.am.common.api.IndexException;
import org.apache.hyracks.storage.am.common.datagen.DataGenThread;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
import org.apache.hyracks.storage.am.rtree.impls.RTree;
@@ -51,7 +50,7 @@
}
@Override
- public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException, IndexException {
+ public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException {
RTree.RTreeAccessor accessor = (RTree.RTreeAccessor) indexAccessor;
IIndexCursor searchCursor = accessor.createSearchCursor(false);
ITreeIndexCursor diskOrderScanCursor = accessor.createDiskOrderScanCursor();
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/org/apache/hyracks/storage/common/BufferCacheRegressionTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/org/apache/hyracks/storage/common/BufferCacheRegressionTest.java
index 69f4a3c..2b4c4c8 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/org/apache/hyracks/storage/common/BufferCacheRegressionTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/org/apache/hyracks/storage/common/BufferCacheRegressionTest.java
@@ -42,6 +42,7 @@
import org.junit.Test;
public class BufferCacheRegressionTest {
+ protected String dirName = "target";
protected String fileName = "flushTestFile";
private static final int PAGE_SIZE = 256;
private static final int HYRACKS_FRAME_SIZE = PAGE_SIZE;
@@ -53,19 +54,22 @@
// invalidated, but most not be flushed.
// 2. If the file was not deleted, then we must flush its dirty pages.
@Before
- public void setUp() throws IOException{
+ public void setUp() throws IOException {
resetState();
}
+
@After
- public void tearDown() throws IOException{
+ public void tearDown() throws IOException {
resetState();
}
- private void resetState() throws IOException{
- File f = new File(fileName);
+
+ private void resetState() throws IOException {
+ File f = new File(dirName, fileName);
if (f.exists()) {
f.delete();
}
}
+
@Test
public void testFlushBehaviorOnFileEviction() throws IOException {
flushBehaviorTest(true);
@@ -120,8 +124,8 @@
// physical memory again, and for performance reasons pages are never
// reset with 0's.
FileReference testFileRef = ioManager.resolve(fileName);
- IFileHandle testFileHandle = ioManager.open(testFileRef, FileReadWriteMode.READ_ONLY,
- FileSyncMode.METADATA_SYNC_DATA_SYNC);
+ IFileHandle testFileHandle =
+ ioManager.open(testFileRef, FileReadWriteMode.READ_ONLY, FileSyncMode.METADATA_SYNC_DATA_SYNC);
ByteBuffer testBuffer = ByteBuffer.allocate(PAGE_SIZE + BufferCache.RESERVED_HEADER_BYTES);
ioManager.syncRead(testFileHandle, 0, testBuffer);
for (int i = BufferCache.RESERVED_HEADER_BYTES; i < testBuffer.capacity(); i++) {