[NO ISSUE][MTD] Add error codes in MetadataNode
- user model changes: no
- storage format changes: no
- interface changes: no
Details:
- Add error codes for exceptions raised by MetadataNode
- Introduce MetadataUtil.getFullyQualifiedDisplayName()
Change-Id: Idf827fd3c0e824468634a1755c96182c62577433
Reviewed-on: https://asterix-gerrit.ics.uci.edu/c/asterixdb/+/10231
Integration-Tests: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Tested-by: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Reviewed-by: Dmitry Lychagin <dmitry.lychagin@couchbase.com>
Reviewed-by: Ali Alsuliman <ali.al.solaiman@gmail.com>
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_it_sqlpp.xml b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_it_sqlpp.xml
index 954252c..1ebc78c 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_it_sqlpp.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_it_sqlpp.xml
@@ -63,7 +63,7 @@
<test-case FilePath="external-library">
<compilation-unit name="mysum_dropinuse">
<output-dir compare="Text">mysum_dropinuse</output-dir>
- <expected-error>Cannot drop library externallibtest.testlib being used by funciton externallibtest.mysum(2)</expected-error>
+ <expected-error>ASX1148: Cannot drop library externallibtest.testlib being used by function externallibtest.mysum(2)</expected-error>
</compilation-unit>
</test-case>
<test-case FilePath="external-library">
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
index ebc8ec0..d2135df 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
@@ -11912,7 +11912,7 @@
<test-case FilePath="cross-dataverse">
<compilation-unit name="drop-dataverse">
<output-dir compare="Text">drop-dataverse</output-dir>
- <expected-error>Cannot drop dataverse. Type a.a used by dataset b.b1</expected-error>
+ <expected-error>ASX1147: Cannot drop dataverse: type a.a being used by dataset b.b1</expected-error>
<source-location>false</source-location>
</compilation-unit>
</test-case>
@@ -12027,18 +12027,18 @@
<test-case FilePath="user-defined-functions">
<compilation-unit name="drop-dependency-1">
<output-dir compare="Text">drop-dependency-1</output-dir>
- <expected-error>Cannot drop dataverse. Function B.f0(2) depends on function C.f1(2)</expected-error>
- <expected-error>Cannot drop dataverse. Function B.f3(2) depends on function C.f2(...)</expected-error>
- <expected-error>Cannot drop dataverse. Function B.f5(...) depends on function C.f4(2)</expected-error>
- <expected-error>Cannot drop dataverse. Function B.f7(...) depends on function C.f6(...)</expected-error>
+ <expected-error>ASX1147: Cannot drop dataverse: function C.f1(2) being used by function B.f0(2)</expected-error>
+ <expected-error>ASX1147: Cannot drop dataverse: function C.f2(...) being used by function B.f3(2)</expected-error>
+ <expected-error>ASX1147: Cannot drop dataverse: function C.f4(2) being used by function B.f5(...)</expected-error>
+ <expected-error>ASX1147: Cannot drop dataverse: function C.f6(...) being used by function B.f7(...)</expected-error>
<source-location>false</source-location>
</compilation-unit>
</test-case>
<test-case FilePath="user-defined-functions">
<compilation-unit name="drop-dependency-2">
<output-dir compare="Text">drop-dependency-2</output-dir>
- <expected-error>Cannot drop dataverse. Function B.f2(2) depends on dataset C.TweetMessages</expected-error>
- <expected-error>Cannot drop dataverse. Function B.f3(...) depends on dataset C.TweetMessages</expected-error>
+ <expected-error>ASX1147: Cannot drop dataverse: dataset C.TweetMessages being used by function B.f2(2)</expected-error>
+ <expected-error>ASX1147: Cannot drop dataverse: dataset C.TweetMessages being used by function B.f3(...)</expected-error>
<source-location>false</source-location>
</compilation-unit>
</test-case>
@@ -12780,7 +12780,7 @@
<test-case FilePath="feeds">
<compilation-unit name="drop-function-used-by-feed">
<output-dir compare="Text">drop-function-used-by-feed</output-dir>
- <expected-error>Cannot drop function experiments.test_func0(1) being used by feed connection TwitterUsers.UserFeed</expected-error>
+ <expected-error>ASX1148: Cannot drop function experiments.test_func0(1) being used by feed connection experiments.UserFeed</expected-error>
<source-location>false</source-location>
</compilation-unit>
</test-case>
@@ -12792,7 +12792,7 @@
<test-case FilePath="feeds">
<compilation-unit name="drop-dataverse-with-function-used-by-feed">
<output-dir compare="Text">drop-dataverse-with-function-used-by-feed</output-dir>
- <expected-error>Cannot drop dataverse. Feed connection feeddv.UserFeed depends on function fundv.test_func0(1)</expected-error>
+ <expected-error>ASX1147: Cannot drop dataverse: function fundv.test_func0(1) being used by feed connection feeddv.UserFeed</expected-error>
<source-location>false</source-location>
</compilation-unit>
</test-case>
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/exceptions/ErrorCode.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/exceptions/ErrorCode.java
index 19be2c8..47a388d 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/exceptions/ErrorCode.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/exceptions/ErrorCode.java
@@ -219,6 +219,20 @@
INVALID_HINT(1132),
ONLY_SINGLE_AUTHENTICATION_IS_ALLOWED(1133),
NO_AUTH_METHOD_PROVIDED(1134),
+ NODE_EXISTS(1135),
+ NODEGROUP_EXISTS(1136),
+ COMPACTION_POLICY_EXISTS(1137),
+ EXTERNAL_FILE_EXISTS(1138),
+ FEED_EXISTS(1139),
+ FEED_POLICY_EXISTS(1140),
+ FEED_CONNECTION_EXISTS(1141),
+ LIBRARY_EXISTS(1142),
+ UNKNOWN_EXTERNAL_FILE(1143),
+ UNKNOWN_FEED(1144),
+ UNKNOWN_FEED_CONNECTION(1145),
+ UNKNOWN_FEED_POLICY(1146),
+ CANNOT_DROP_DATAVERSE_DEPENDENT_EXISTS(1147),
+ CANNOT_DROP_OBJECT_DEPENDENT_EXISTS(1148),
// Feed errors
DATAFLOW_ILLEGAL_STATE(3001),
diff --git a/asterixdb/asterix-common/src/main/resources/asx_errormsg/en.properties b/asterixdb/asterix-common/src/main/resources/asx_errormsg/en.properties
index 3eed1d8..50f6458 100644
--- a/asterixdb/asterix-common/src/main/resources/asx_errormsg/en.properties
+++ b/asterixdb/asterix-common/src/main/resources/asx_errormsg/en.properties
@@ -203,7 +203,7 @@
1114 = The provided external dataset configuration returned no files from the external source
1115 = Invalid name for a database object: \"%1$s\"
1116 = Cannot find synonym with name %1$s
-1117 = Unknown library %1$s
+1117 = Cannot find library with name %1$s
1118 = Too many grouping sets in group by clause: %1$s. Maximum allowed: %2$s.
1119 = Invalid argument to grouping() function
1120 = Unexpected alias: %1$s
@@ -221,6 +221,20 @@
1132 = Invalid specification for hint %1$s. %2$s
1133 = Only a single authentication method is allowed: connectionString, accountName & accountKey, or accountName & sharedAccessSignature
1134 = No authentication parameters provided
+1135 = A node with this name %1$s already exists
+1136 = A node group with this name %1$s already exists
+1137 = A compaction policy with this name %1$s already exists
+1138 = A external file with this number %1$s already exists in dataset %2$s
+1139 = A feed with this name %1$s already exists
+1140 = A feed policy with this name %1$s already exists
+1141 = A feed connection between feed %1$s and dataset %2$s already exists
+1142 = A library with this name %1$s already exists
+1143 = Cannot find external file with number %1$s in dataset %2$s
+1144 = Cannot find feed with name %1$s
+1145 = Cannot find feed connection between feed %1$s and dataset %2$s
+1146 = Cannot find feed policy with name %1$s
+1147 = Cannot drop dataverse: %1$s %2$s being used by %3$s %4$s
+1148 = Cannot drop %1$s %2$s being used by %3$s %4$s
# Feed Errors
3001 = Illegal state.
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
index 13d6270..088bbc6 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
@@ -25,12 +25,14 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.stream.Collectors;
import org.apache.asterix.common.api.IDatasetLifecycleManager;
import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.config.DatasetConfig.DatasetType;
import org.apache.asterix.common.config.DatasetConfig.IndexType;
import org.apache.asterix.common.dataflow.LSMIndexUtil;
+import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.common.functions.FunctionSignature;
import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.metadata.MetadataIndexImmutableProperties;
@@ -86,6 +88,7 @@
import org.apache.asterix.metadata.entitytupletranslators.NodeTupleTranslator;
import org.apache.asterix.metadata.entitytupletranslators.SynonymTupleTranslator;
import org.apache.asterix.metadata.utils.DatasetUtil;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.asterix.metadata.utils.TypeUtil;
import org.apache.asterix.metadata.valueextractors.MetadataEntityValueExtractor;
import org.apache.asterix.metadata.valueextractors.TupleCopyValueExtractor;
@@ -283,30 +286,21 @@
@Override
public <T extends IExtensionMetadataEntity> void addEntity(TxnId txnId, T entity) throws AlgebricksException {
- ExtensionMetadataDataset<T> index = (ExtensionMetadataDataset<T>) extensionDatasets.get(entity.getDatasetId());
- if (index == null) {
- throw new AlgebricksException("Metadata Extension Index: " + entity.getDatasetId() + " was not found");
- }
+ ExtensionMetadataDataset<T> index = getExtensionMetadataDataset(entity.getDatasetId());
IMetadataEntityTupleTranslator<T> tupleTranslator = index.getTupleTranslator(true);
addEntity(txnId, entity, tupleTranslator, index);
}
@Override
public <T extends IExtensionMetadataEntity> void upsertEntity(TxnId txnId, T entity) throws AlgebricksException {
- ExtensionMetadataDataset<T> index = (ExtensionMetadataDataset<T>) extensionDatasets.get(entity.getDatasetId());
- if (index == null) {
- throw new AlgebricksException("Metadata Extension Index: " + entity.getDatasetId() + " was not found");
- }
+ ExtensionMetadataDataset<T> index = getExtensionMetadataDataset(entity.getDatasetId());
IMetadataEntityTupleTranslator<T> tupleTranslator = index.getTupleTranslator(true);
upsertEntity(txnId, entity, tupleTranslator, index);
}
@Override
public <T extends IExtensionMetadataEntity> void deleteEntity(TxnId txnId, T entity) throws AlgebricksException {
- ExtensionMetadataDataset<T> index = (ExtensionMetadataDataset<T>) extensionDatasets.get(entity.getDatasetId());
- if (index == null) {
- throw new AlgebricksException("Metadata Extension Index: " + entity.getDatasetId() + " was not found");
- }
+ ExtensionMetadataDataset<T> index = getExtensionMetadataDataset(entity.getDatasetId());
IMetadataEntityTupleTranslator<T> tupleTranslator = index.getTupleTranslator(true);
deleteEntity(txnId, entity, tupleTranslator, index);
}
@@ -314,15 +308,21 @@
@Override
public <T extends IExtensionMetadataEntity> List<T> getEntities(TxnId txnId, IExtensionMetadataSearchKey searchKey)
throws AlgebricksException {
- ExtensionMetadataDataset<T> index =
- (ExtensionMetadataDataset<T>) extensionDatasets.get(searchKey.getDatasetId());
- if (index == null) {
- throw new AlgebricksException("Metadata Extension Index: " + searchKey.getDatasetId() + " was not found");
- }
+ ExtensionMetadataDataset<T> index = getExtensionMetadataDataset(searchKey.getDatasetId());
IMetadataEntityTupleTranslator<T> tupleTranslator = index.getTupleTranslator(false);
return getEntities(txnId, searchKey.getSearchKey(), tupleTranslator, index);
}
+ private <T extends IExtensionMetadataEntity> ExtensionMetadataDataset<T> getExtensionMetadataDataset(
+ ExtensionMetadataDatasetId datasetId) throws AlgebricksException {
+ ExtensionMetadataDataset<T> index = (ExtensionMetadataDataset<T>) extensionDatasets.get(datasetId);
+ if (index == null) {
+ throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.METADATA_ERROR,
+ "Metadata Extension Index: " + datasetId + " was not found");
+ }
+ return index;
+ }
+
@Override
public void addDataverse(TxnId txnId, Dataverse dataverse) throws AlgebricksException {
try {
@@ -331,8 +331,8 @@
insertTupleIntoIndex(txnId, MetadataPrimaryIndexes.DATAVERSE_DATASET, tuple);
} catch (HyracksDataException e) {
if (e.matches(ErrorCode.DUPLICATE_KEY)) {
- throw new AlgebricksException(
- "A dataverse with this name " + dataverse.getDataverseName() + " already exists.", e);
+ throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.DATAVERSE_EXISTS, e,
+ dataverse.getDataverseName());
} else {
throw new AlgebricksException(e);
}
@@ -357,8 +357,8 @@
}
} catch (HyracksDataException e) {
if (e.matches(ErrorCode.DUPLICATE_KEY)) {
- throw new AlgebricksException("A dataset with this name " + dataset.getDatasetName()
- + " already exists in dataverse '" + dataset.getDataverseName() + "'.", e);
+ throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.DATASET_EXISTS, e,
+ dataset.getDatasetName(), dataset.getDataverseName());
} else {
throw new AlgebricksException(e);
}
@@ -373,7 +373,8 @@
insertTupleIntoIndex(txnId, MetadataPrimaryIndexes.INDEX_DATASET, tuple);
} catch (HyracksDataException e) {
if (e.matches(ErrorCode.DUPLICATE_KEY)) {
- throw new AlgebricksException("An index with name '" + index.getIndexName() + "' already exists.", e);
+ throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.INDEX_EXISTS, e,
+ index.getIndexName());
} else {
throw new AlgebricksException(e);
}
@@ -388,7 +389,8 @@
insertTupleIntoIndex(txnId, MetadataPrimaryIndexes.NODE_DATASET, tuple);
} catch (HyracksDataException e) {
if (e.matches(ErrorCode.DUPLICATE_KEY)) {
- throw new AlgebricksException("A node with name '" + node.getNodeName() + "' already exists.", e);
+ throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.NODE_EXISTS, e,
+ node.getNodeName());
} else {
throw new AlgebricksException(e);
}
@@ -403,8 +405,8 @@
modifyMetadataIndex(modificationOp, txnId, MetadataPrimaryIndexes.NODEGROUP_DATASET, tuple);
} catch (HyracksDataException e) {
if (e.matches(ErrorCode.DUPLICATE_KEY)) {
- throw new AlgebricksException(
- "A nodegroup with name '" + nodeGroup.getNodeGroupName() + "' already exists.", e);
+ throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.NODEGROUP_EXISTS, e,
+ nodeGroup.getNodeGroupName());
} else {
throw new AlgebricksException(e);
}
@@ -420,8 +422,8 @@
insertTupleIntoIndex(txnId, MetadataPrimaryIndexes.DATATYPE_DATASET, tuple);
} catch (HyracksDataException e) {
if (e.matches(ErrorCode.DUPLICATE_KEY)) {
- throw new AlgebricksException(
- "A datatype with name '" + datatype.getDatatypeName() + "' already exists.", e);
+ throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.TYPE_EXISTS, e,
+ datatype.getDatatypeName());
} else {
throw new AlgebricksException(e);
}
@@ -438,8 +440,8 @@
insertTupleIntoIndex(txnId, MetadataPrimaryIndexes.FUNCTION_DATASET, functionTuple);
} catch (HyracksDataException e) {
if (e.matches(ErrorCode.DUPLICATE_KEY)) {
- throw new AlgebricksException("A function with this name " + function.getSignature()
- + " already exists in dataverse '" + function.getDataverseName() + "'.", e);
+ throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.FUNCTION_EXISTS, e,
+ function.getName());
} else {
throw new AlgebricksException(e);
}
@@ -583,8 +585,8 @@
deleteTupleFromIndex(txnId, MetadataPrimaryIndexes.DATAVERSE_DATASET, tuple);
} catch (HyracksDataException e) {
if (e.matches(ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY)) {
- throw new AlgebricksException("Cannot drop dataverse '" + dataverseName + "' because it doesn't exist.",
- e);
+ throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.UNKNOWN_DATAVERSE, e,
+ dataverseName);
} else {
throw new AlgebricksException(e);
}
@@ -612,7 +614,8 @@
Dataset dataset = getDataset(txnId, dataverseName, datasetName);
if (dataset == null) {
- throw new AlgebricksException("Cannot drop dataset '" + datasetName + "' because it doesn't exist.");
+ throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE,
+ datasetName, dataverseName);
}
try {
// Delete entry from the 'datasets' dataset.
@@ -668,8 +671,7 @@
deleteTupleFromIndex(txnId, MetadataPrimaryIndexes.INDEX_DATASET, tuple);
} catch (HyracksDataException e) {
if (e.matches(ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY)) {
- throw new AlgebricksException(
- "Cannot drop index '" + datasetName + "." + indexName + "' because it doesn't exist.", e);
+ throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.UNKNOWN_INDEX, e, indexName);
} else {
throw new AlgebricksException(e);
}
@@ -678,18 +680,15 @@
@Override
public boolean dropNodegroup(TxnId txnId, String nodeGroupName, boolean failSilently) throws AlgebricksException {
- List<String> datasetNames = getDatasetNamesPartitionedOnThisNodeGroup(txnId, nodeGroupName);
- if (!datasetNames.isEmpty()) {
+ List<Dataset> datasets = getDatasetsPartitionedOnThisNodeGroup(txnId, nodeGroupName);
+ if (!datasets.isEmpty()) {
if (failSilently) {
return false;
}
- StringBuilder sb = new StringBuilder();
- sb.append("Nodegroup '" + nodeGroupName
- + "' cannot be dropped; it was used for partitioning these datasets:");
- for (int i = 0; i < datasetNames.size(); i++) {
- sb.append("\n" + (i + 1) + "- " + datasetNames.get(i) + ".");
- }
- throw new AlgebricksException(sb.toString());
+ throw new AsterixException(
+ org.apache.asterix.common.exceptions.ErrorCode.CANNOT_DROP_OBJECT_DEPENDENT_EXISTS, "node group",
+ nodeGroupName, "datasets",
+ datasets.stream().map(DatasetUtil::getFullyQualifiedDisplayName).collect(Collectors.joining(", ")));
}
try {
ITupleReference searchKey = createTuple(nodeGroupName);
@@ -700,8 +699,8 @@
return true;
} catch (HyracksDataException e) {
if (e.matches(ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY)) {
- throw new AlgebricksException("Cannot drop nodegroup '" + nodeGroupName + "' because it doesn't exist",
- e);
+ throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.UNKNOWN_NODEGROUP, e,
+ nodeGroupName);
} else {
throw new AlgebricksException(e);
}
@@ -735,7 +734,8 @@
}
} catch (HyracksDataException e) {
if (e.matches(ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY)) {
- throw new AlgebricksException("Cannot drop type '" + datatypeName + "' because it doesn't exist", e);
+ throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.UNKNOWN_TYPE, e,
+ datatypeName);
} else {
throw new AlgebricksException(e);
}
@@ -752,7 +752,8 @@
deleteTupleFromIndex(txnId, MetadataPrimaryIndexes.DATATYPE_DATASET, tuple);
} catch (HyracksDataException e) {
if (e.matches(ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY)) {
- throw new AlgebricksException("Cannot drop type '" + datatypeName + "' because it doesn't exist", e);
+ throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.UNKNOWN_TYPE, e,
+ datatypeName);
} else {
throw new AlgebricksException(e);
}
@@ -932,16 +933,18 @@
continue;
}
if (set.getItemTypeDataverseName().equals(dataverseName)) {
- throw new AlgebricksException("Cannot drop dataverse. Type "
- + TypeUtil.getFullyQualifiedDisplayName(set.getItemTypeDataverseName(), set.getItemTypeName())
- + " used by dataset " + DatasetUtil.getFullyQualifiedDisplayName(set));
+ throw new AsterixException(
+ org.apache.asterix.common.exceptions.ErrorCode.CANNOT_DROP_DATAVERSE_DEPENDENT_EXISTS, "type",
+ TypeUtil.getFullyQualifiedDisplayName(set.getItemTypeDataverseName(), set.getItemTypeName()),
+ "dataset", DatasetUtil.getFullyQualifiedDisplayName(set));
}
if (set.getMetaItemTypeDataverseName() != null
&& set.getMetaItemTypeDataverseName().equals(dataverseName)) {
- throw new AlgebricksException("Cannot drop dataverse. Type "
- + TypeUtil.getFullyQualifiedDisplayName(set.getMetaItemTypeDataverseName(),
- set.getMetaItemTypeName())
- + " used by dataset " + DatasetUtil.getFullyQualifiedDisplayName(set));
+ throw new AsterixException(
+ org.apache.asterix.common.exceptions.ErrorCode.CANNOT_DROP_DATAVERSE_DEPENDENT_EXISTS, "type",
+ TypeUtil.getFullyQualifiedDisplayName(set.getMetaItemTypeDataverseName(),
+ set.getMetaItemTypeName()),
+ "dataset", DatasetUtil.getFullyQualifiedDisplayName(set));
}
}
@@ -955,22 +958,28 @@
}
for (Triple<DataverseName, String, String> datasetDependency : function.getDependencies().get(0)) {
if (datasetDependency.first.equals(dataverseName)) {
- throw new AlgebricksException("Cannot drop dataverse. Function " + function.getSignature()
- + " depends on dataset " + DatasetUtil.getFullyQualifiedDisplayName(datasetDependency.first,
- datasetDependency.second));
+ throw new AsterixException(
+ org.apache.asterix.common.exceptions.ErrorCode.CANNOT_DROP_DATAVERSE_DEPENDENT_EXISTS,
+ "dataset",
+ DatasetUtil.getFullyQualifiedDisplayName(datasetDependency.first, datasetDependency.second),
+ "function", function.getSignature());
}
}
for (Triple<DataverseName, String, String> functionDependency : function.getDependencies().get(1)) {
if (functionDependency.first.equals(dataverseName)) {
- throw new AlgebricksException("Cannot drop dataverse. Function " + function.getSignature()
- + " depends on function " + new FunctionSignature(functionDependency.first,
- functionDependency.second, Integer.parseInt(functionDependency.third)));
+ throw new AsterixException(
+ org.apache.asterix.common.exceptions.ErrorCode.CANNOT_DROP_DATAVERSE_DEPENDENT_EXISTS,
+ "function", new FunctionSignature(functionDependency.first, functionDependency.second,
+ Integer.parseInt(functionDependency.third)),
+ "function", function.getSignature());
}
}
for (Triple<DataverseName, String, String> type : function.getDependencies().get(2)) {
if (type.first.equals(dataverseName)) {
- throw new AlgebricksException("Cannot drop dataverse. Function " + function.getSignature()
- + " depends on type " + TypeUtil.getFullyQualifiedDisplayName(type.first, type.second));
+ throw new AsterixException(
+ org.apache.asterix.common.exceptions.ErrorCode.CANNOT_DROP_DATAVERSE_DEPENDENT_EXISTS,
+ "type", TypeUtil.getFullyQualifiedDisplayName(type.first, type.second), "function",
+ function.getSignature());
}
}
}
@@ -984,9 +993,10 @@
}
for (FunctionSignature functionSignature : feedConnection.getAppliedFunctions()) {
if (dataverseName.equals(functionSignature.getDataverseName())) {
- throw new AlgebricksException(
- "Cannot drop dataverse. Feed connection " + feedConnection.getDataverseName() + "."
- + feedConnection.getFeedName() + " depends on function " + functionSignature);
+ throw new AsterixException(
+ org.apache.asterix.common.exceptions.ErrorCode.CANNOT_DROP_DATAVERSE_DEPENDENT_EXISTS,
+ "function", functionSignature, "feed connection", MetadataUtil.getFullyQualifiedDisplayName(
+ feedConnection.getDataverseName(), feedConnection.getFeedName()));
}
}
}
@@ -1000,8 +1010,9 @@
if (functionalDependency.first.equals(signature.getDataverseName())
&& functionalDependency.second.equals(signature.getName())
&& functionalDependency.third.equals(Integer.toString(signature.getArity()))) {
- throw new AlgebricksException(
- "Cannot drop function " + signature + " being used by function " + function.getSignature());
+ throw new AsterixException(
+ org.apache.asterix.common.exceptions.ErrorCode.CANNOT_DROP_OBJECT_DEPENDENT_EXISTS,
+ "function", signature, "function", function.getSignature());
}
}
}
@@ -1010,8 +1021,10 @@
List<FeedConnection> feedConnections = getAllFeedConnections(txnId);
for (FeedConnection feedConnection : feedConnections) {
if (feedConnection.containsFunction(signature)) {
- throw new AlgebricksException("Cannot drop function " + signature + " being used by feed connection "
- + feedConnection.getDatasetName() + "." + feedConnection.getFeedName());
+ throw new AsterixException(
+ org.apache.asterix.common.exceptions.ErrorCode.CANNOT_DROP_OBJECT_DEPENDENT_EXISTS, "function",
+ signature, "feed connection", MetadataUtil.getFullyQualifiedDisplayName(
+ feedConnection.getDataverseName(), feedConnection.getFeedName()));
}
}
}
@@ -1023,9 +1036,10 @@
for (Function function : functions) {
for (Triple<DataverseName, String, String> datasetDependency : function.getDependencies().get(0)) {
if (datasetDependency.first.equals(dataverseName) && datasetDependency.second.equals(datasetName)) {
- throw new AlgebricksException("Cannot drop dataset "
- + DatasetUtil.getFullyQualifiedDisplayName(dataverseName, datasetName)
- + " being used by function " + function.getSignature());
+ throw new AsterixException(
+ org.apache.asterix.common.exceptions.ErrorCode.CANNOT_DROP_OBJECT_DEPENDENT_EXISTS,
+ "dataset", DatasetUtil.getFullyQualifiedDisplayName(dataverseName, datasetName), "function",
+ function.getSignature());
}
}
}
@@ -1043,8 +1057,10 @@
for (Function function : functions) {
if (libraryName.equals(function.getLibraryName())
&& dataverseName.equals(function.getLibraryDataverseName())) {
- throw new AlgebricksException("Cannot drop library " + dataverseName + '.' + libraryName
- + " being used by funciton " + function.getSignature());
+ throw new AsterixException(
+ org.apache.asterix.common.exceptions.ErrorCode.CANNOT_DROP_OBJECT_DEPENDENT_EXISTS, "library",
+ MetadataUtil.getFullyQualifiedDisplayName(dataverseName, libraryName), "function",
+ function.getSignature());
}
}
}
@@ -1055,9 +1071,11 @@
for (DatasourceAdapter adapter : adapters) {
if (libraryName.equals(adapter.getLibraryName())
&& dataverseName.equals(adapter.getLibraryDataverseName())) {
- throw new AlgebricksException("Cannot drop library " + dataverseName + '.' + libraryName
- + " being used by adapter " + adapter.getAdapterIdentifier().getDataverseName() + '.'
- + adapter.getAdapterIdentifier().getName());
+ throw new AsterixException(
+ org.apache.asterix.common.exceptions.ErrorCode.CANNOT_DROP_OBJECT_DEPENDENT_EXISTS, "library",
+ MetadataUtil.getFullyQualifiedDisplayName(dataverseName, libraryName), "adapter",
+ MetadataUtil.getFullyQualifiedDisplayName(adapter.getAdapterIdentifier().getDataverseName(),
+ adapter.getAdapterIdentifier().getName()));
}
}
}
@@ -1075,9 +1093,10 @@
List<Dataset> datasets = getAllDatasets(txnId);
for (Dataset set : datasets) {
if (set.getItemTypeName().equals(datatypeName) && set.getItemTypeDataverseName().equals(dataverseName)) {
- throw new AlgebricksException(
- "Cannot drop type " + TypeUtil.getFullyQualifiedDisplayName(dataverseName, datatypeName)
- + " being used by dataset " + DatasetUtil.getFullyQualifiedDisplayName(set));
+ throw new AsterixException(
+ org.apache.asterix.common.exceptions.ErrorCode.CANNOT_DROP_OBJECT_DEPENDENT_EXISTS, "type",
+ TypeUtil.getFullyQualifiedDisplayName(dataverseName, datatypeName), "dataset",
+ DatasetUtil.getFullyQualifiedDisplayName(set));
}
}
}
@@ -1099,9 +1118,10 @@
}
AbstractComplexType recType = (AbstractComplexType) dataType.getDatatype();
if (recType.containsType(typeToBeDropped)) {
- throw new AlgebricksException("Cannot drop type "
- + TypeUtil.getFullyQualifiedDisplayName(dataverseName, datatypeName) + " being used by type "
- + TypeUtil.getFullyQualifiedDisplayName(dataverseName, recType.getTypeName()));
+ throw new AsterixException(
+ org.apache.asterix.common.exceptions.ErrorCode.CANNOT_DROP_OBJECT_DEPENDENT_EXISTS, "type",
+ TypeUtil.getFullyQualifiedDisplayName(dataverseName, datatypeName), "type",
+ TypeUtil.getFullyQualifiedDisplayName(dataverseName, recType.getTypeName()));
}
}
}
@@ -1113,9 +1133,10 @@
for (Function function : functions) {
for (Triple<DataverseName, String, String> datasetDependency : function.getDependencies().get(2)) {
if (datasetDependency.first.equals(dataverseName) && datasetDependency.second.equals(dataTypeName)) {
- throw new AlgebricksException(
- "Cannot drop type " + TypeUtil.getFullyQualifiedDisplayName(dataverseName, dataTypeName)
- + " is being used by function " + function.getSignature());
+ throw new AsterixException(
+ org.apache.asterix.common.exceptions.ErrorCode.CANNOT_DROP_OBJECT_DEPENDENT_EXISTS, "type",
+ TypeUtil.getFullyQualifiedDisplayName(dataverseName, dataTypeName), "function",
+ function.getSignature());
}
}
}
@@ -1146,19 +1167,17 @@
return nestedTypes;
}
- private List<String> getDatasetNamesPartitionedOnThisNodeGroup(TxnId txnId, String nodegroup)
+ private List<Dataset> getDatasetsPartitionedOnThisNodeGroup(TxnId txnId, String nodegroup)
throws AlgebricksException {
- // this needs to scan the datasets and return the datasets that use this
- // nodegroup
- List<String> nodeGroupDatasets = new ArrayList<>();
+ // this needs to scan the datasets and return the datasets that use this nodegroup
+ List<Dataset> nodeGroupDatasets = new ArrayList<>();
List<Dataset> datasets = getAllDatasets(txnId);
for (Dataset set : datasets) {
if (set.getNodeGroupName().equals(nodegroup)) {
- nodeGroupDatasets.add(set.getDatasetName());
+ nodeGroupDatasets.add(set);
}
}
return nodeGroupDatasets;
-
}
@Override
@@ -1277,8 +1296,8 @@
deleteTupleFromIndex(txnId, MetadataPrimaryIndexes.FUNCTION_DATASET, functionTuple);
} catch (HyracksDataException e) {
if (e.matches(ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY)) {
- throw new AlgebricksException(
- "Cannot drop function '" + functionSignature + "' because it doesn't exist", e);
+ throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.UNKNOWN_FUNCTION, e,
+ functionSignature.toString());
} else {
throw new AlgebricksException(e);
}
@@ -1374,7 +1393,8 @@
IValueExtractor<T> valueExtractor, List<T> results) throws AlgebricksException, HyracksDataException {
IBinaryComparatorFactory[] comparatorFactories = index.getKeyBinaryComparatorFactory();
if (index.getFile() == null) {
- throw new AlgebricksException("No file for Index " + index.getDataverseName() + "." + index.getIndexName());
+ throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.METADATA_ERROR,
+ "No file for Index " + index.getDataverseName() + "." + index.getIndexName());
}
String resourceName = index.getFile().getRelativePath();
IIndex indexInstance = datasetLifecycleManager.get(resourceName);
@@ -1514,9 +1534,8 @@
insertTupleIntoIndex(txnId, MetadataPrimaryIndexes.DATASOURCE_ADAPTER_DATASET, adapterTuple);
} catch (HyracksDataException e) {
if (e.matches(ErrorCode.DUPLICATE_KEY)) {
- throw new AlgebricksException("A adapter with this name " + adapter.getAdapterIdentifier().getName()
- + " already exists in dataverse '" + adapter.getAdapterIdentifier().getDataverseName() + "'.",
- e);
+ throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.ADAPTER_EXISTS, e,
+ adapter.getAdapterIdentifier().getName());
} else {
throw new AlgebricksException(e);
}
@@ -1525,10 +1544,6 @@
@Override
public void dropAdapter(TxnId txnId, DataverseName dataverseName, String adapterName) throws AlgebricksException {
- DatasourceAdapter adapter = getAdapter(txnId, dataverseName, adapterName);
- if (adapter == null) {
- throw new AlgebricksException("Cannot drop adapter '" + adapter + "' because it doesn't exist.");
- }
try {
// Delete entry from the 'Adapter' dataset.
ITupleReference searchKey = createTuple(dataverseName, adapterName);
@@ -1539,7 +1554,8 @@
deleteTupleFromIndex(txnId, MetadataPrimaryIndexes.DATASOURCE_ADAPTER_DATASET, datasetTuple);
} catch (HyracksDataException e) {
if (e.matches(ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY)) {
- throw new AlgebricksException("Cannot drop adapter '" + adapterName + " since it doesn't exist", e);
+ throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.UNKNOWN_ADAPTER, e,
+ adapterName);
} else {
throw new AlgebricksException(e);
}
@@ -1575,8 +1591,8 @@
insertTupleIntoIndex(txnId, MetadataPrimaryIndexes.COMPACTION_POLICY_DATASET, compactionPolicyTuple);
} catch (HyracksDataException e) {
if (e.matches(ErrorCode.DUPLICATE_KEY)) {
- throw new AlgebricksException("A compaction policy with this name " + compactionPolicy.getPolicyName()
- + " already exists in dataverse '" + compactionPolicy.getPolicyName() + "'.", e);
+ throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.COMPACTION_POLICY_EXISTS, e,
+ compactionPolicy.getPolicyName());
} else {
throw new AlgebricksException(e);
}
@@ -1627,8 +1643,8 @@
insertTupleIntoIndex(txnId, MetadataPrimaryIndexes.LIBRARY_DATASET, libraryTuple);
} catch (HyracksDataException e) {
if (e.matches(ErrorCode.DUPLICATE_KEY)) {
- throw new AlgebricksException("A library with this name " + library.getDataverseName()
- + " already exists in dataverse '" + library.getDataverseName() + "'.", e);
+ throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.LIBRARY_EXISTS, e,
+ library.getName());
} else {
throw new AlgebricksException(e);
}
@@ -1654,7 +1670,8 @@
deleteTupleFromIndex(txnId, MetadataPrimaryIndexes.LIBRARY_DATASET, datasetTuple);
} catch (HyracksDataException e) {
if (e.matches(ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY)) {
- throw new AlgebricksException("Cannot drop library '" + libraryName + "' because it doesn't exist", e);
+ throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.UNKNOWN_LIBRARY, e,
+ libraryName);
} else {
throw new AlgebricksException(e);
}
@@ -1692,8 +1709,8 @@
insertTupleIntoIndex(txnId, MetadataPrimaryIndexes.FEED_POLICY_DATASET, feedPolicyTuple);
} catch (HyracksDataException e) {
if (e.matches(ErrorCode.DUPLICATE_KEY)) {
- throw new AlgebricksException("A feed policy with this name " + feedPolicy.getPolicyName()
- + " already exists in dataverse '" + feedPolicy.getPolicyName() + "'.", e);
+ throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.FEED_POLICY_EXISTS, e,
+ feedPolicy.getPolicyName());
} else {
throw new AlgebricksException(e);
}
@@ -1726,7 +1743,12 @@
ITupleReference feedConnTuple = tupleReaderWriter.getTupleFromMetadataEntity(feedConnection);
insertTupleIntoIndex(txnId, MetadataPrimaryIndexes.FEED_CONNECTION_DATASET, feedConnTuple);
} catch (HyracksDataException e) {
- throw new AlgebricksException(e);
+ if (e.matches(ErrorCode.DUPLICATE_KEY)) {
+ throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.FEED_CONNECTION_EXISTS, e,
+ feedConnection.getFeedName(), feedConnection.getDatasetName());
+ } else {
+ throw new AlgebricksException(e);
+ }
}
}
@@ -1774,7 +1796,12 @@
getTupleToBeDeleted(txnId, MetadataPrimaryIndexes.FEED_CONNECTION_DATASET, searchKey);
deleteTupleFromIndex(txnId, MetadataPrimaryIndexes.FEED_CONNECTION_DATASET, tuple);
} catch (HyracksDataException e) {
- throw new AlgebricksException(e);
+ if (e.matches(ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY)) {
+ throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.UNKNOWN_FEED_CONNECTION, e,
+ feedName, datasetName);
+ } else {
+ throw new AlgebricksException(e);
+ }
}
}
@@ -1787,8 +1814,8 @@
insertTupleIntoIndex(txnId, MetadataPrimaryIndexes.FEED_DATASET, feedTuple);
} catch (HyracksDataException e) {
if (e.matches(ErrorCode.DUPLICATE_KEY)) {
- throw new AlgebricksException("A feed with this name " + feed.getFeedName()
- + " already exists in dataverse '" + feed.getDataverseName() + "'.", e);
+ throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.FEED_EXISTS, e,
+ feed.getFeedName());
} else {
throw new AlgebricksException(e);
}
@@ -1836,7 +1863,7 @@
deleteTupleFromIndex(txnId, MetadataPrimaryIndexes.FEED_DATASET, tuple);
} catch (HyracksDataException e) {
if (e.matches(ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY)) {
- throw new AlgebricksException("Cannot drop feed '" + feedName + "' because it doesn't exist", e);
+ throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.UNKNOWN_FEED, e, feedName);
} else {
throw new AlgebricksException(e);
}
@@ -1851,7 +1878,8 @@
deleteTupleFromIndex(txnId, MetadataPrimaryIndexes.FEED_POLICY_DATASET, tuple);
} catch (HyracksDataException e) {
if (e.matches(ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY)) {
- throw new AlgebricksException("Unknown feed policy " + policyName, e);
+ throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.UNKNOWN_FEED_POLICY, e,
+ policyName);
} else {
throw new AlgebricksException(e);
}
@@ -1883,9 +1911,8 @@
insertTupleIntoIndex(txnId, MetadataPrimaryIndexes.EXTERNAL_FILE_DATASET, externalFileTuple);
} catch (HyracksDataException e) {
if (e.matches(ErrorCode.DUPLICATE_KEY)) {
- throw new AlgebricksException("An externalFile with this number " + externalFile.getFileNumber()
- + " already exists in dataset '" + externalFile.getDatasetName() + "' in dataverse '"
- + externalFile.getDataverseName() + "'.", e);
+ throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.EXTERNAL_FILE_EXISTS, e,
+ externalFile.getFileNumber(), externalFile.getDatasetName());
} else {
throw new AlgebricksException(e);
}
@@ -1920,7 +1947,8 @@
deleteTupleFromIndex(txnId, MetadataPrimaryIndexes.EXTERNAL_FILE_DATASET, datasetTuple);
} catch (HyracksDataException e) {
if (e.matches(ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY)) {
- throw new AlgebricksException("Couldn't drop externalFile.", e);
+ throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.UNKNOWN_EXTERNAL_FILE, e,
+ fileNumber, datasetName);
} else {
throw new AlgebricksException(e);
}
@@ -1996,8 +2024,8 @@
insertTupleIntoIndex(txnId, MetadataPrimaryIndexes.SYNONYM_DATASET, synonymTuple);
} catch (HyracksDataException e) {
if (e.matches(ErrorCode.DUPLICATE_KEY)) {
- throw new AlgebricksException("A synonym with name '" + synonym.getSynonymName() + "' already exists.",
- e);
+ throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.SYNONYM_EXISTS, e,
+ synonym.getSynonymName());
} else {
throw new AlgebricksException(e);
}
@@ -2006,10 +2034,6 @@
@Override
public void dropSynonym(TxnId txnId, DataverseName dataverseName, String synonymName) throws AlgebricksException {
- Synonym synonym = getSynonym(txnId, dataverseName, synonymName);
- if (synonym == null) {
- throw new AlgebricksException("Cannot drop synonym '" + synonym + "' because it doesn't exist.");
- }
try {
// Delete entry from the 'Synonym' dataset.
ITupleReference searchKey = createTuple(dataverseName, synonymName);
@@ -2020,7 +2044,8 @@
deleteTupleFromIndex(txnId, MetadataPrimaryIndexes.SYNONYM_DATASET, synonymTuple);
} catch (HyracksDataException e) {
if (e.matches(ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY)) {
- throw new AlgebricksException("Cannot drop synonym '" + synonymName, e);
+ throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.UNKNOWN_SYNONYM, e,
+ synonymName);
} else {
throw new AlgebricksException(e);
}
@@ -2074,8 +2099,8 @@
insertTupleIntoIndex(txnId, MetadataPrimaryIndexes.DATASET_DATASET, datasetTuple);
} catch (HyracksDataException e) {
if (e.matches(ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY)) {
- throw new AlgebricksException(
- "Cannot drop dataset '" + dataset.getDatasetName() + "' because it doesn't exist");
+ throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE,
+ e, dataset.getDatasetName(), dataset.getDataverseName());
} else {
throw new AlgebricksException(e);
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
index 52a133d..4bcc5f0 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
@@ -563,7 +563,7 @@
}
public static String getFullyQualifiedDisplayName(DataverseName dataverseName, String datasetName) {
- return dataverseName + "." + datasetName;
+ return MetadataUtil.getFullyQualifiedDisplayName(dataverseName, datasetName);
}
/***
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/MetadataUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/MetadataUtil.java
index 3133aba..7bc6e98 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/MetadataUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/MetadataUtil.java
@@ -18,6 +18,8 @@
*/
package org.apache.asterix.metadata.utils;
+import org.apache.asterix.common.metadata.DataverseName;
+
public class MetadataUtil {
public static final int PENDING_NO_OP = 0;
public static final int PENDING_ADD_OP = 1;
@@ -38,4 +40,8 @@
return "Unknown Pending Operation";
}
}
+
+ public static String getFullyQualifiedDisplayName(DataverseName dataverseName, String objectName) {
+ return dataverseName + "." + objectName;
+ }
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/TypeUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/TypeUtil.java
index 7660909..65a800e 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/TypeUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/TypeUtil.java
@@ -289,7 +289,7 @@
}
public static String getFullyQualifiedDisplayName(DataverseName dataverseName, String typeName) {
- return dataverseName + "." + typeName;
+ return MetadataUtil.getFullyQualifiedDisplayName(dataverseName, typeName);
}
/**