[ASTERIXDB-3259][MTD] Change MetadataProvider/MetadataManagerUtil APIs to accept 'database'
- user model changes: no
- storage format changes: no
- interface changes: yes
Details:
Change MetadataProvider/MetadataManagerUtil APIs to accept 'database'.
Ensure 'database' is non-null in MetadataManager.
Change-Id: I13584ba6bcd03b3befde12ddad5a6e62ab97b970
Reviewed-on: https://asterix-gerrit.ics.uci.edu/c/asterixdb/+/17792
Reviewed-by: Murtadha Hubail <mhubail@apache.org>
Integration-Tests: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Tested-by: Ali Alsuliman <ali.al.solaiman@gmail.com>
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/BTreeSearchPOperator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/BTreeSearchPOperator.java
index fd54891..247408f 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/BTreeSearchPOperator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/BTreeSearchPOperator.java
@@ -32,6 +32,7 @@
import org.apache.asterix.metadata.entities.Dataset;
import org.apache.asterix.metadata.entities.Index;
import org.apache.asterix.metadata.utils.IndexUtil;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.types.ARecordType;
import org.apache.asterix.optimizer.rules.am.BTreeJobGenParams;
@@ -126,7 +127,9 @@
int[] maxFilterFieldIndexes = getKeyIndexes(unnestMap.getMaxFilterVars(), inputSchemas);
MetadataProvider metadataProvider = (MetadataProvider) context.getMetadataProvider();
- Dataset dataset = metadataProvider.findDataset(jobGenParams.getDataverseName(), jobGenParams.getDatasetName());
+ String database = MetadataUtil.resolveDatabase(null, jobGenParams.getDataverseName());
+ Dataset dataset =
+ metadataProvider.findDataset(database, jobGenParams.getDataverseName(), jobGenParams.getDatasetName());
IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(op);
ITupleFilterFactory tupleFilterFactory = null;
long outputLimit = -1;
@@ -240,7 +243,8 @@
}
propsLocal.add(new LocalOrderProperty(orderColumns));
MetadataProvider mp = (MetadataProvider) context.getMetadataProvider();
- Dataset dataset = mp.findDataset(searchIndex.getDataverseName(), searchIndex.getDatasetName());
+ Dataset dataset = mp.findDataset(searchIndex.getDatabaseName(), searchIndex.getDataverseName(),
+ searchIndex.getDatasetName());
PartitioningProperties partitioningProperties = mp.getPartitioningProperties(dataset);
pv[0] = new StructuralPropertiesVector(UnorderedPartitionedProperty.ofPartitionsMap(searchKeyVars,
domain, partitioningProperties.getComputeStorageMap()), propsLocal);
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java
index b83c8f5..953dc10 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java
@@ -28,6 +28,7 @@
import org.apache.asterix.metadata.entities.Dataset;
import org.apache.asterix.metadata.entities.Index;
import org.apache.asterix.metadata.utils.FullTextUtil;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.asterix.om.base.IAObject;
import org.apache.asterix.om.constants.AsterixConstantValue;
import org.apache.asterix.om.functions.BuiltinFunctions;
@@ -116,7 +117,9 @@
jobGenParams.readFromFuncArgs(unnestFuncExpr.getArguments());
MetadataProvider metadataProvider = (MetadataProvider) context.getMetadataProvider();
- Dataset dataset = metadataProvider.findDataset(jobGenParams.getDataverseName(), jobGenParams.getDatasetName());
+ String database = MetadataUtil.resolveDatabase(null, jobGenParams.getDataverseName());
+ Dataset dataset =
+ metadataProvider.findDataset(database, jobGenParams.getDataverseName(), jobGenParams.getDatasetName());
int[] keyIndexes = getKeyIndexes(jobGenParams.getKeyVarList(), inputSchemas);
boolean propagateIndexFilter = unnestMapOp.propagateIndexFilter();
@@ -180,7 +183,8 @@
IBinaryTokenizerFactory queryTokenizerFactory =
InvertedIndexAccessMethod.getBinaryTokenizerFactory(searchModifierType, searchKeyType, secondaryIndex);
IFullTextConfigEvaluatorFactory fullTextConfigEvaluatorFactory =
- FullTextUtil.fetchFilterAndCreateConfigEvaluator(metadataProvider, secondaryIndex.getDataverseName(),
+ FullTextUtil.fetchFilterAndCreateConfigEvaluator(metadataProvider, secondaryIndex.getDatabaseName(),
+ secondaryIndex.getDataverseName(),
((Index.TextIndexDetails) secondaryIndex.getIndexDetails()).getFullTextConfigName());
IIndexDataflowHelperFactory dataflowHelperFactory =
new IndexDataflowHelperFactory(metadataProvider.getStorageComponentProvider().getStorageManager(),
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/RTreeSearchPOperator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/RTreeSearchPOperator.java
index 6b5adea..6eaa9fa 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/RTreeSearchPOperator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/RTreeSearchPOperator.java
@@ -24,6 +24,7 @@
import org.apache.asterix.metadata.declared.DataSourceId;
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Dataset;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.optimizer.rules.am.RTreeJobGenParams;
import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
@@ -89,11 +90,12 @@
int[] maxFilterFieldIndexes = getKeyIndexes(unnestMap.getMaxFilterVars(), inputSchemas);
MetadataProvider mp = (MetadataProvider) context.getMetadataProvider();
- Dataset dataset = mp.findDataset(jobGenParams.getDataverseName(), jobGenParams.getDatasetName());
+ String database = MetadataUtil.resolveDatabase(null, jobGenParams.getDataverseName());
+ Dataset dataset = mp.findDataset(database, jobGenParams.getDataverseName(), jobGenParams.getDatasetName());
IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(unnestMap);
List<LogicalVariable> outputVars = unnestMap.getVariables();
if (jobGenParams.getRetainInput()) {
- outputVars = new ArrayList<LogicalVariable>();
+ outputVars = new ArrayList<>();
VariableUtilities.getLiveVariables(unnestMap, outputVars);
}
boolean retainMissing = false;
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FullTextContainsParameterCheckAndSetRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FullTextContainsParameterCheckAndSetRule.java
index 3b2153c..91c15df 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FullTextContainsParameterCheckAndSetRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FullTextContainsParameterCheckAndSetRule.java
@@ -25,6 +25,7 @@
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.declared.MetadataProvider;
+import org.apache.asterix.metadata.entities.Dataverse;
import org.apache.asterix.metadata.utils.FullTextUtil;
import org.apache.asterix.om.base.ARecord;
import org.apache.asterix.om.base.AString;
@@ -185,9 +186,12 @@
}
MetadataProvider metadataProvider = (MetadataProvider) context.getMetadataProvider();
- DataverseName dataverseName = metadataProvider.getDefaultDataverseName();
- funcExpr.setOpaqueParameters(new Object[] { FullTextUtil
- .fetchFilterAndCreateConfigEvaluator(metadataProvider, dataverseName, ftConfigName) });
+ Dataverse defaultDataverse = metadataProvider.getDefaultDataverse();
+ String database = defaultDataverse.getDatabaseName();
+ DataverseName dataverseName = defaultDataverse.getDataverseName();
+ funcExpr.setOpaqueParameters(
+ new Object[] { FullTextUtil.fetchFilterAndCreateConfigEvaluator(metadataProvider, database,
+ dataverseName, ftConfigName) });
// Resets the last argument.
funcExpr.getArguments().clear();
funcExpr.getArguments().addAll(newExprs);
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java
index 4a0c4ae..d3c7cac 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java
@@ -44,6 +44,7 @@
import org.apache.asterix.metadata.entities.InternalDatasetDetails;
import org.apache.asterix.metadata.utils.ArrayIndexUtil;
import org.apache.asterix.metadata.utils.IndexUtil;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.asterix.om.base.AInt32;
import org.apache.asterix.om.base.AOrderedList;
import org.apache.asterix.om.base.AString;
@@ -168,8 +169,9 @@
DataSource datasetSource = (DataSource) primaryIndexModificationOp.getDataSource();
MetadataProvider mp = (MetadataProvider) context.getMetadataProvider();
DataverseName dataverseName = datasetSource.getId().getDataverseName();
+ String database = MetadataUtil.resolveDatabase(null, dataverseName);
String datasetName = datasetSource.getId().getDatasourceName();
- Dataset dataset = mp.findDataset(dataverseName, datasetName);
+ Dataset dataset = mp.findDataset(database, dataverseName, datasetName);
if (dataset == null) {
throw new CompilationException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, sourceLoc, datasetName,
dataverseName);
@@ -180,7 +182,8 @@
// Create operators for secondary index insert / delete.
String itemTypeName = dataset.getItemTypeName();
- IAType itemType = mp.findType(dataset.getItemTypeDataverseName(), itemTypeName);
+ String itemTypeDatabase = MetadataUtil.resolveDatabase(null, dataset.getItemTypeDataverseName());
+ IAType itemType = mp.findType(itemTypeDatabase, dataset.getItemTypeDataverseName(), itemTypeName);
if (itemType.getTypeTag() != ATypeTag.OBJECT) {
throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc, "Only record types can be indexed.");
}
@@ -188,11 +191,14 @@
// meta type
ARecordType metaType = null;
if (dataset.hasMetaPart()) {
- metaType = (ARecordType) mp.findType(dataset.getMetaItemTypeDataverseName(), dataset.getMetaItemTypeName());
+ String metaItemTypeDatabase = MetadataUtil.resolveDatabase(null, dataset.getMetaItemTypeDataverseName());
+ metaType = (ARecordType) mp.findType(metaItemTypeDatabase, dataset.getMetaItemTypeDataverseName(),
+ dataset.getMetaItemTypeName());
}
recType = (ARecordType) mp.findTypeForDatasetWithoutType(recType, metaType, dataset);
- List<Index> indexes = mp.getDatasetIndexes(dataset.getDataverseName(), dataset.getDatasetName());
+ List<Index> indexes =
+ mp.getDatasetIndexes(dataset.getDatabaseName(), dataset.getDataverseName(), dataset.getDatasetName());
Stream<Index> indexStream = indexes.stream();
indexStream = indexStream.filter(index -> index.getIndexType() != IndexType.SAMPLE);
if (primaryIndexModificationOp.getOperation() == Kind.INSERT && !primaryIndexModificationOp.isBulkload()) {
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushFieldAccessRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushFieldAccessRule.java
index 9070de1..f4dd1d3 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushFieldAccessRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushFieldAccessRule.java
@@ -34,6 +34,7 @@
import org.apache.asterix.metadata.entities.Dataset;
import org.apache.asterix.metadata.entities.Index;
import org.apache.asterix.metadata.utils.DatasetUtil;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.asterix.om.base.AInt32;
import org.apache.asterix.om.base.AString;
import org.apache.asterix.om.base.IAObject;
@@ -128,8 +129,8 @@
}
MetadataProvider mp = (MetadataProvider) context.getMetadataProvider();
DataSourceId asid = ((IDataSource<DataSourceId>) scan.getDataSource()).getId();
-
- Dataset dataset = mp.findDataset(asid.getDataverseName(), asid.getDatasourceName());
+ String database = MetadataUtil.resolveDatabase(null, asid.getDataverseName());
+ Dataset dataset = mp.findDataset(database, asid.getDataverseName(), asid.getDatasourceName());
if (dataset == null) {
throw new CompilationException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, scan.getSourceLocation(),
asid.getDatasourceName(), asid.getDataverseName());
@@ -140,7 +141,8 @@
final Integer pos = ConstantExpressionUtil.getIntConstant(accessFun.getArguments().get(1).getValue());
if (pos != null) {
String tName = dataset.getItemTypeName();
- IAType t = mp.findType(dataset.getItemTypeDataverseName(), tName);
+ String tDatabase = MetadataUtil.resolveDatabase(null, dataset.getItemTypeDataverseName());
+ IAType t = mp.findType(tDatabase, dataset.getItemTypeDataverseName(), tName);
if (t.getTypeTag() != ATypeTag.OBJECT) {
return false;
}
@@ -150,7 +152,8 @@
}
}
- List<Index> datasetIndexes = mp.getDatasetIndexes(dataset.getDataverseName(), dataset.getDatasetName());
+ List<Index> datasetIndexes =
+ mp.getDatasetIndexes(dataset.getDatabaseName(), dataset.getDataverseName(), dataset.getDatasetName());
boolean hasSecondaryIndex = false;
for (Index index : datasetIndexes) {
if (index.isSecondaryIndex()) {
@@ -317,7 +320,8 @@
}
DataSourceId asid = dataSource.getId();
MetadataProvider mp = (MetadataProvider) context.getMetadataProvider();
- Dataset dataset = mp.findDataset(asid.getDataverseName(), asid.getDatasourceName());
+ String database = MetadataUtil.resolveDatabase(null, asid.getDataverseName());
+ Dataset dataset = mp.findDataset(database, asid.getDataverseName(), asid.getDatasourceName());
if (dataset == null) {
throw new CompilationException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, scan.getSourceLocation(),
asid.getDatasourceName(), asid.getDataverseName());
@@ -333,7 +337,8 @@
// data part
String dataTypeName = dataset.getItemTypeName();
- IAType dataType = mp.findType(dataset.getItemTypeDataverseName(), dataTypeName);
+ String dataTypeDatabase = MetadataUtil.resolveDatabase(null, dataset.getItemTypeDataverseName());
+ IAType dataType = mp.findType(dataTypeDatabase, dataset.getItemTypeDataverseName(), dataTypeName);
if (dataType.getTypeTag() != ATypeTag.OBJECT) {
return false;
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushValueAccessAndFilterDownRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushValueAccessAndFilterDownRule.java
index c74c293..f1f8b69 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushValueAccessAndFilterDownRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushValueAccessAndFilterDownRule.java
@@ -25,6 +25,7 @@
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Dataset;
import org.apache.asterix.metadata.utils.DatasetUtil;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.asterix.optimizer.base.AsterixOptimizationContext;
import org.apache.asterix.optimizer.rules.pushdown.PushdownContext;
import org.apache.asterix.optimizer.rules.pushdown.PushdownProcessorsExecutor;
@@ -144,7 +145,8 @@
throws AlgebricksException {
DataverseName dataverse = dataSource.getId().getDataverseName();
String datasetName = dataSource.getId().getDatasourceName();
- Dataset dataset = metadataProvider.findDataset(dataverse, datasetName);
+ String database = MetadataUtil.resolveDatabase(null, dataverse);
+ Dataset dataset = metadataProvider.findDataset(database, dataverse, datasetName);
return dataset != null && (DatasetUtil.isFieldAccessPushdownSupported(dataset)
|| DatasetUtil.isFilterPushdownSupported(dataset)
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetAsterixPhysicalOperatorsRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetAsterixPhysicalOperatorsRule.java
index 5ec1a7f..78a5640 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetAsterixPhysicalOperatorsRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetAsterixPhysicalOperatorsRule.java
@@ -34,6 +34,7 @@
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Dataset;
import org.apache.asterix.metadata.functions.ExternalFunctionCompilerUtil;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.optimizer.base.AnalysisUtil;
import org.apache.asterix.optimizer.rules.am.AccessMethodJobGenParams;
@@ -261,7 +262,8 @@
MetadataProvider mp = (MetadataProvider) context.getMetadataProvider();
DataSourceId dataSourceId =
new DataSourceId(jobGenParams.getDataverseName(), jobGenParams.getDatasetName());
- Dataset dataset = mp.findDataset(jobGenParams.getDataverseName(), jobGenParams.getDatasetName());
+ String database = MetadataUtil.resolveDatabase(null, jobGenParams.getDataverseName());
+ Dataset dataset = mp.findDataset(database, jobGenParams.getDataverseName(), jobGenParams.getDatasetName());
IDataSourceIndex<String, DataSourceId> dsi =
mp.findDataSourceIndex(jobGenParams.getIndexName(), dataSourceId);
INodeDomain storageDomain = mp.findNodeDomain(dataset.getNodeGroupName());
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AbstractIntroduceAccessMethodRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AbstractIntroduceAccessMethodRule.java
index 9996e1f..abcd5c2 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AbstractIntroduceAccessMethodRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AbstractIntroduceAccessMethodRule.java
@@ -792,8 +792,8 @@
LogicalVariable datasetMetaVar = null;
if (subTree.getDataSourceType() != DataSourceType.COLLECTION_SCAN
&& subTree.getDataSourceType() != DataSourceType.INDEXONLY_PLAN_SECONDARY_INDEX_LOOKUP) {
- datasetIndexes = metadataProvider.getDatasetIndexes(subTree.getDataset().getDataverseName(),
- subTree.getDataset().getDatasetName());
+ datasetIndexes = metadataProvider.getDatasetIndexes(subTree.getDataset().getDatabaseName(),
+ subTree.getDataset().getDataverseName(), subTree.getDataset().getDatasetName());
List<LogicalVariable> datasetVars = subTree.getDataSourceVariables();
if (subTree.getDataset().hasMetaPart()) {
datasetMetaVar = datasetVars.get(datasetVars.size() - 1);
@@ -1108,6 +1108,7 @@
}
Pair<DataverseName, String> datasetInfo =
AnalysisUtil.getDatasetInfo((DataSourceScanOperator) dataSourceScanOp);
- return metadataProvider.getIndex(datasetInfo.first, datasetInfo.second, datasetInfo.second);
+ String database = MetadataUtil.resolveDatabase(null, datasetInfo.first);
+ return metadataProvider.getIndex(database, datasetInfo.first, datasetInfo.second, datasetInfo.second);
}
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceLSMComponentFilterRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceLSMComponentFilterRule.java
index bb0bddb..f2a2e26 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceLSMComponentFilterRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceLSMComponentFilterRule.java
@@ -37,6 +37,7 @@
import org.apache.asterix.metadata.utils.ArrayIndexUtil;
import org.apache.asterix.metadata.utils.DatasetUtil;
import org.apache.asterix.metadata.utils.KeyFieldTypeUtil;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.types.ARecordType;
import org.apache.asterix.om.types.ATypeTag;
@@ -98,8 +99,10 @@
filterSourceIndicator = DatasetUtil.getFilterSourceIndicator(dataset);
filterFieldName = DatasetUtil.getFilterField(dataset);
IAType filterSourceType = filterSourceIndicator == null || filterSourceIndicator == 0
- ? mp.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName())
- : mp.findType(dataset.getMetaItemTypeDataverseName(), dataset.getMetaItemTypeName());
+ ? mp.findType(MetadataUtil.resolveDatabase(null, dataset.getItemTypeDataverseName()),
+ dataset.getItemTypeDataverseName(), dataset.getItemTypeName())
+ : mp.findType(MetadataUtil.resolveDatabase(null, dataset.getMetaItemTypeDataverseName()),
+ dataset.getMetaItemTypeDataverseName(), dataset.getMetaItemTypeName());
if (filterSourceType.getTypeTag() == ATypeTag.OBJECT) {
itemType = (ARecordType) filterSourceType;
@@ -118,7 +121,8 @@
List<IOptimizableFuncExpr> optFuncExprs = new ArrayList<>();
if (!analysisCtx.getMatchedFuncExprs().isEmpty()) {
- List<Index> datasetIndexes = mp.getDatasetIndexes(dataset.getDataverseName(), dataset.getDatasetName());
+ List<Index> datasetIndexes = mp.getDatasetIndexes(dataset.getDatabaseName(), dataset.getDataverseName(),
+ dataset.getDatasetName());
for (int i = 0; i < analysisCtx.getMatchedFuncExprs().size(); i++) {
IOptimizableFuncExpr optFuncExpr = analysisCtx.getMatchedFuncExpr(i);
@@ -444,7 +448,9 @@
throw new CompilationException(ErrorCode.COMPILATION_ERROR, f.getSourceLocation(),
"Unexpected function for Unnest Map: " + fid);
}
- return ((MetadataProvider) context.getMetadataProvider()).findDataset(dataverseName, datasetName);
+ String database = MetadataUtil.resolveDatabase(null, dataverseName);
+ return ((MetadataProvider) context.getMetadataProvider()).findDataset(database, dataverseName,
+ datasetName);
}
}
if (descendantOp.getInputs().isEmpty()) {
@@ -583,10 +589,13 @@
"Could not find the corresponding index for an" + " index search.");
}
- IAType metaItemType = ((MetadataProvider) context.getMetadataProvider())
- .findType(dataset.getMetaItemTypeDataverseName(), dataset.getMetaItemTypeName());
- IAType recordItemType = ((MetadataProvider) context.getMetadataProvider())
- .findType(dataset.getMetaItemTypeDataverseName(), dataset.getItemTypeName());
+ String metaItemDatabase =
+ MetadataUtil.resolveDatabase(null, dataset.getMetaItemTypeDataverseName());
+ String recordItemDatabase = MetadataUtil.resolveDatabase(null, dataset.getItemTypeDataverseName());
+ IAType metaItemType = ((MetadataProvider) context.getMetadataProvider()).findType(metaItemDatabase,
+ dataset.getMetaItemTypeDataverseName(), dataset.getMetaItemTypeName());
+ IAType recordItemType = ((MetadataProvider) context.getMetadataProvider()).findType(
+ recordItemDatabase, dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
ARecordType recordType = (ARecordType) recordItemType;
ARecordType metaRecType = (ARecordType) metaItemType;
int numSecondaryKeys = KeyFieldTypeUtil.getNumSecondaryKeys(index, recordType, metaRecType);
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroducePrimaryIndexForAggregationRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroducePrimaryIndexForAggregationRule.java
index fde1ad7..687792c 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroducePrimaryIndexForAggregationRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroducePrimaryIndexForAggregationRule.java
@@ -30,6 +30,7 @@
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Dataset;
import org.apache.asterix.metadata.entities.Index;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.types.ARecordType;
import org.apache.asterix.om.utils.ConstantExpressionUtil;
@@ -167,8 +168,10 @@
String indexName = ConstantExpressionUtil.getStringArgument(functionCallExpression, 0);
DataverseName dataverseName = DataverseName
.createFromCanonicalForm(ConstantExpressionUtil.getStringArgument(functionCallExpression, 2));
+ String database = MetadataUtil.resolveDatabase(null, dataverseName);
String datasetName = ConstantExpressionUtil.getStringArgument(functionCallExpression, 3);
- Index index = ((MetadataProvider) metadataProvider).getIndex(dataverseName, datasetName, indexName);
+ Index index =
+ ((MetadataProvider) metadataProvider).getIndex(database, dataverseName, datasetName, indexName);
if (!index.isPrimaryIndex()) {
return null;
}
@@ -283,11 +286,13 @@
if (originalBTreeParameters.isEqCondition()) {
return null;
}
- dataset = mp.findDataset(originalBTreeParameters.getDataverseName(),
+ String database = MetadataUtil.resolveDatabase(null, originalBTreeParameters.getDataverseName());
+ dataset = mp.findDataset(database, originalBTreeParameters.getDataverseName(),
originalBTreeParameters.getDatasetName());
}
// #2. get all indexes and look for the primary one
- List<Index> indexes = mp.getDatasetIndexes(dataset.getDataverseName(), dataset.getDatasetName());
+ List<Index> indexes =
+ mp.getDatasetIndexes(dataset.getDatabaseName(), dataset.getDataverseName(), dataset.getDatasetName());
for (Index index : indexes) {
if (index.isPrimaryKeyIndex()) {
return Pair.of(dataset, index);
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/OptimizableOperatorSubTree.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/OptimizableOperatorSubTree.java
index 29ee113..38266e7 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/OptimizableOperatorSubTree.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/OptimizableOperatorSubTree.java
@@ -29,6 +29,7 @@
import org.apache.asterix.metadata.declared.DataSource;
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Dataset;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.types.ARecordType;
import org.apache.asterix.om.types.ATypeTag;
@@ -314,13 +315,16 @@
return false;
}
// Find the dataset corresponding to the datasource in the metadata.
- Dataset ds = metadataProvider.findDataset(dataverseName, datasetName);
+ String database = MetadataUtil.resolveDatabase(null, dataverseName);
+ Dataset ds = metadataProvider.findDataset(database, dataverseName, datasetName);
if (ds == null) {
throw new CompilationException(ErrorCode.NO_METADATA_FOR_DATASET, root.getSourceLocation(),
datasetName);
}
// Get the record type for that dataset.
- IAType itemType = metadataProvider.findType(ds.getItemTypeDataverseName(), ds.getItemTypeName());
+ String itemTypeDatabase = MetadataUtil.resolveDatabase(null, ds.getItemTypeDataverseName());
+ IAType itemType =
+ metadataProvider.findType(itemTypeDatabase, ds.getItemTypeDataverseName(), ds.getItemTypeName());
if (itemType.getTypeTag() != ATypeTag.OBJECT) {
if (i == 0) {
return false;
@@ -332,8 +336,9 @@
ARecordType rType = (ARecordType) itemType;
// Get the meta record type for that dataset.
- ARecordType metaItemType = (ARecordType) metadataProvider.findType(ds.getMetaItemTypeDataverseName(),
- ds.getMetaItemTypeName());
+ String metaItemTypeDatabase = MetadataUtil.resolveDatabase(null, ds.getMetaItemTypeDataverseName());
+ ARecordType metaItemType = (ARecordType) metadataProvider.findType(metaItemTypeDatabase,
+ ds.getMetaItemTypeDataverseName(), ds.getMetaItemTypeName());
rType = (ARecordType) metadataProvider.findTypeForDatasetWithoutType(rType, metaItemType, ds);
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/array/AbstractOperatorFromSubplanRewrite.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/array/AbstractOperatorFromSubplanRewrite.java
index 1611bda..a510deb 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/array/AbstractOperatorFromSubplanRewrite.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/array/AbstractOperatorFromSubplanRewrite.java
@@ -28,6 +28,7 @@
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.declared.MetadataProvider;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.asterix.om.base.AInt16;
import org.apache.asterix.om.base.AInt32;
import org.apache.asterix.om.base.AInt64;
@@ -80,8 +81,9 @@
DataSourceScanOperator dataSourceScanOperator = (DataSourceScanOperator) workingOp;
Pair<DataverseName, String> datasetInfo = AnalysisUtil.getDatasetInfo(dataSourceScanOperator);
DataverseName dataverseName = datasetInfo.first;
+ String database = MetadataUtil.resolveDatabase(null, dataverseName);
String datasetName = datasetInfo.second;
- if (metadataProvider.getDatasetIndexes(dataverseName, datasetName).stream()
+ if (metadataProvider.getDatasetIndexes(database, dataverseName, datasetName).stream()
.anyMatch(i -> i.getIndexType() == DatasetConfig.IndexType.ARRAY)) {
return true;
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/pushdown/visitor/PushdownOperatorVisitor.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/pushdown/visitor/PushdownOperatorVisitor.java
index dd9acd5..0b958e7 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/pushdown/visitor/PushdownOperatorVisitor.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/pushdown/visitor/PushdownOperatorVisitor.java
@@ -31,6 +31,7 @@
import org.apache.asterix.metadata.declared.DatasetDataSource;
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Dataset;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.utils.ConstantExpressionUtil;
import org.apache.asterix.optimizer.rules.pushdown.PushdownContext;
@@ -212,7 +213,8 @@
MetadataProvider mp = (MetadataProvider) context.getMetadataProvider();
DataverseName dataverse = dataSource.getId().getDataverseName();
String datasetName = dataSource.getId().getDatasourceName();
- return mp.findDataset(dataverse, datasetName);
+ String database = MetadataUtil.resolveDatabase(null, dataverse);
+ return mp.findDataset(database, dataverse, datasetName);
}
/**
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
index 469fef2..e9af7da 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
@@ -88,6 +88,7 @@
import org.apache.asterix.metadata.entities.InternalDatasetDetails;
import org.apache.asterix.metadata.functions.ExternalFunctionCompilerUtil;
import org.apache.asterix.metadata.utils.DatasetUtil;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.asterix.om.base.ABoolean;
import org.apache.asterix.om.base.AInt32;
import org.apache.asterix.om.base.AInt64;
@@ -197,15 +198,19 @@
public ILogicalPlan translateCopyOrLoad(ICompiledDmlStatement stmt) throws AlgebricksException {
SourceLocation sourceLoc = stmt.getSourceLocation();
- Dataset dataset = metadataProvider.findDataset(stmt.getDataverseName(), stmt.getDatasetName());
+ String database = MetadataUtil.resolveDatabase(null, stmt.getDataverseName());
+ Dataset dataset = metadataProvider.findDataset(database, stmt.getDataverseName(), stmt.getDatasetName());
if (dataset == null) {
// This would never happen since we check for this in AqlTranslator
throw new CompilationException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, sourceLoc, stmt.getDatasetName(),
stmt.getDataverseName());
}
- IAType itemType = metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
- IAType metaItemType =
- metadataProvider.findType(dataset.getMetaItemTypeDataverseName(), dataset.getMetaItemTypeName());
+ String itemTypeDatabase = MetadataUtil.resolveDatabase(null, dataset.getItemTypeDataverseName());
+ String metaItemTypeDatabase = MetadataUtil.resolveDatabase(null, dataset.getMetaItemTypeDataverseName());
+ IAType itemType = metadataProvider.findType(itemTypeDatabase, dataset.getItemTypeDataverseName(),
+ dataset.getItemTypeName());
+ IAType metaItemType = metadataProvider.findType(metaItemTypeDatabase, dataset.getMetaItemTypeDataverseName(),
+ dataset.getMetaItemTypeName());
itemType = metadataProvider.findTypeForDatasetWithoutType(itemType, metaItemType, dataset);
DatasetDataSource targetDatasource =
@@ -719,7 +724,8 @@
protected DatasetDataSource validateDatasetInfo(MetadataProvider metadataProvider, DataverseName dataverseName,
String datasetName, SourceLocation sourceLoc) throws AlgebricksException {
- Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
+ String database = MetadataUtil.resolveDatabase(null, dataverseName);
+ Dataset dataset = metadataProvider.findDataset(database, dataverseName, datasetName);
if (dataset == null) {
throw new CompilationException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, sourceLoc, datasetName,
dataverseName);
@@ -729,10 +735,13 @@
"Cannot write output to an external " + dataset());
}
DataSourceId sourceId = new DataSourceId(dataverseName, datasetName);
- IAType itemType = metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
- IAType metaItemType =
- metadataProvider.findType(dataset.getMetaItemTypeDataverseName(), dataset.getMetaItemTypeName());
- itemType = (ARecordType) metadataProvider.findTypeForDatasetWithoutType(itemType, metaItemType, dataset);
+ String itemTypeDatabase = MetadataUtil.resolveDatabase(null, dataset.getItemTypeDataverseName());
+ String metaItemTypeDatabase = MetadataUtil.resolveDatabase(null, dataset.getMetaItemTypeDataverseName());
+ IAType itemType = metadataProvider.findType(itemTypeDatabase, dataset.getItemTypeDataverseName(),
+ dataset.getItemTypeName());
+ IAType metaItemType = metadataProvider.findType(metaItemTypeDatabase, dataset.getMetaItemTypeDataverseName(),
+ dataset.getMetaItemTypeName());
+ itemType = metadataProvider.findTypeForDatasetWithoutType(itemType, metaItemType, dataset);
INodeDomain domain = metadataProvider.findNodeDomain(dataset.getNodeGroupName());
return new DatasetDataSource(sourceId, dataset, itemType, metaItemType, DataSource.Type.INTERNAL_DATASET,
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/TypeTranslator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/TypeTranslator.java
index 033e077..3bd6cf0 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/TypeTranslator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/TypeTranslator.java
@@ -42,6 +42,7 @@
import org.apache.asterix.metadata.MetadataTransactionContext;
import org.apache.asterix.metadata.entities.Datatype;
import org.apache.asterix.metadata.utils.MetadataConstants;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.asterix.metadata.utils.TypeUtil;
import org.apache.asterix.om.types.AOrderedListType;
import org.apache.asterix.om.types.ARecordType;
@@ -146,7 +147,8 @@
// solve remaining top level references
for (TypeSignature typeSignature : incompleteTopLevelTypeReferences.keySet()) {
IAType t;
- Datatype dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, null, typeSignature.getDataverseName(),
+ String typeDatabase = MetadataUtil.resolveDatabase(null, typeSignature.getDataverseName());
+ Datatype dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, typeDatabase, typeSignature.getDataverseName(),
typeSignature.getName());
if (dt == null) {
throw new CompilationException(ErrorCode.UNKNOWN_TYPE, sourceLoc, typeSignature.getName());
@@ -160,10 +162,11 @@
// solve remaining field type references
for (String trefName : incompleteFieldTypes.keySet()) {
IAType t;
- Datatype dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, null, typeDataverse, trefName);
+ String typeDatabase = MetadataUtil.resolveDatabase(null, typeDataverse);
+ Datatype dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, typeDatabase, typeDataverse, trefName);
if (dt == null) {
- dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, null, MetadataConstants.METADATA_DATAVERSE_NAME,
- trefName);
+ dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, MetadataConstants.SYSTEM_DATABASE,
+ MetadataConstants.METADATA_DATAVERSE_NAME, trefName);
}
if (dt == null) {
throw new CompilationException(ErrorCode.UNKNOWN_TYPE, sourceLoc, trefName);
@@ -190,7 +193,8 @@
IAType t;
Datatype dt;
if (MetadataManager.INSTANCE != null) {
- dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, null, typeSignature.getDataverseName(),
+ String typeDatabase = MetadataUtil.resolveDatabase(null, typeSignature.getDataverseName());
+ dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, typeDatabase, typeSignature.getDataverseName(),
typeSignature.getName());
if (dt == null) {
throw new CompilationException(ErrorCode.UNKNOWN_TYPE, sourceLoc, typeSignature.getName());
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ConnectorApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ConnectorApiServlet.java
index 353b849..7c8b5b8 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ConnectorApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ConnectorApiServlet.java
@@ -34,6 +34,7 @@
import org.apache.asterix.metadata.MetadataTransactionContext;
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Dataset;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.asterix.om.types.ARecordType;
import org.apache.asterix.utils.FlushDatasetUtil;
import org.apache.commons.lang3.StringUtils;
@@ -91,6 +92,7 @@
return;
}
+ String database = MetadataUtil.resolveDatabase(null, dataverseName);
IHyracksClientConnection hcc = (IHyracksClientConnection) ctx.get(HYRACKS_CONNECTION_ATTR);
// Metadata transaction begins.
MetadataManager.INSTANCE.init();
@@ -99,7 +101,7 @@
MetadataProvider metadataProvider = MetadataProvider.create(appCtx, null);
try {
metadataProvider.setMetadataTxnContext(mdTxnCtx);
- Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
+ Dataset dataset = metadataProvider.findDataset(database, dataverseName, datasetName);
if (dataset == null) {
jsonResponse.put("error", StringUtils.capitalize(dataset()) + " " + datasetName
+ " does not exist in " + dataverse() + " " + dataverseName);
@@ -108,8 +110,9 @@
return;
}
FileSplit[] fileSplits = metadataProvider.splitsForIndex(mdTxnCtx, dataset, datasetName);
- ARecordType recordType = (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(),
- dataset.getItemTypeName());
+ String itemTypeDatabase = MetadataUtil.resolveDatabase(null, dataset.getItemTypeDataverseName());
+ ARecordType recordType = (ARecordType) metadataProvider.findType(itemTypeDatabase,
+ dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
List<List<String>> primaryKeys = dataset.getPrimaryKeys();
StringBuilder pkStrBuf = new StringBuilder();
for (List<String> keys : primaryKeys) {
@@ -123,7 +126,7 @@
hcc.getNodeControllerInfos());
// Flush the cached contents of the dataset to file system.
- FlushDatasetUtil.flushDataset(hcc, metadataProvider, dataverseName, datasetName);
+ FlushDatasetUtil.flushDataset(hcc, metadataProvider, database, dataverseName, datasetName);
// Metadata transaction commits.
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RebalanceApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RebalanceApiServlet.java
index 68b93f0..0747f91 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RebalanceApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RebalanceApiServlet.java
@@ -47,6 +47,7 @@
import org.apache.asterix.metadata.entities.Dataverse;
import org.apache.asterix.metadata.utils.DatasetUtil;
import org.apache.asterix.metadata.utils.MetadataConstants;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.asterix.rebalance.NoOpDatasetRebalanceCallback;
import org.apache.asterix.utils.RebalanceUtil;
import org.apache.commons.collections4.CollectionUtils;
@@ -142,8 +143,9 @@
return;
}
// Schedules a rebalance task and wait for its completion.
+ String database = MetadataUtil.resolveDatabase(null, dataverseName);
CountDownLatch terminated =
- scheduleRebalance(dataverseName, datasetName, targetNodes, response, forceRebalance);
+ scheduleRebalance(database, dataverseName, datasetName, targetNodes, response, forceRebalance);
terminated.await();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
@@ -165,18 +167,18 @@
}
// Schedules a rebalance task.
- private synchronized CountDownLatch scheduleRebalance(DataverseName dataverseName, String datasetName,
- Set<String> targetNodes, IServletResponse response, boolean force) {
+ private synchronized CountDownLatch scheduleRebalance(String database, DataverseName dataverseName,
+ String datasetName, Set<String> targetNodes, IServletResponse response, boolean force) {
CountDownLatch terminated = new CountDownLatch(1);
- Future<Void> task = executor
- .submit(() -> doRebalance(dataverseName, datasetName, targetNodes, response, terminated, force));
+ Future<Void> task = executor.submit(
+ () -> doRebalance(database, dataverseName, datasetName, targetNodes, response, terminated, force));
rebalanceTasks.add(task);
rebalanceFutureTerminated.add(terminated);
return terminated;
}
// Performs the actual rebalance.
- private Void doRebalance(DataverseName dataverseName, String datasetName, Set<String> targetNodes,
+ private Void doRebalance(String database, DataverseName dataverseName, String datasetName, Set<String> targetNodes,
IServletResponse response, CountDownLatch terminated, boolean force) {
try {
// Sets the content type.
@@ -185,15 +187,16 @@
if (datasetName == null) {
// Rebalances datasets in a given dataverse or all non-metadata datasets.
Iterable<Dataset> datasets = dataverseName == null ? getAllDatasetsForRebalance()
- : getAllDatasetsForRebalance(dataverseName);
+ : getAllDatasetsForRebalance(database, dataverseName);
for (Dataset dataset : datasets) {
// By the time rebalanceDataset(...) is called, the dataset could have been dropped.
// If that's the case, rebalanceDataset(...) would be a no-op.
- rebalanceDataset(dataset.getDataverseName(), dataset.getDatasetName(), targetNodes, force);
+ rebalanceDataset(dataset.getDatabaseName(), dataset.getDataverseName(), dataset.getDatasetName(),
+ targetNodes, force);
}
} else {
// Rebalances a given dataset from its current locations to the target nodes.
- rebalanceDataset(dataverseName, datasetName, targetNodes, force);
+ rebalanceDataset(database, dataverseName, datasetName, targetNodes, force);
}
// Sends response.
@@ -215,11 +218,12 @@
}
// Lists all datasets that should be rebalanced in a given datavserse.
- private Iterable<Dataset> getAllDatasetsForRebalance(DataverseName dataverseName) throws Exception {
+ private Iterable<Dataset> getAllDatasetsForRebalance(String database, DataverseName dataverseName)
+ throws Exception {
Iterable<Dataset> datasets;
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
try {
- datasets = getDatasetsInDataverseForRebalance(dataverseName, mdTxnCtx);
+ datasets = getDatasetsInDataverseForRebalance(database, dataverseName, mdTxnCtx);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
@@ -235,7 +239,8 @@
try {
List<Dataverse> dataverses = MetadataManager.INSTANCE.getDataverses(mdTxnCtx);
for (Dataverse dv : dataverses) {
- CollectionUtils.addAll(datasets, getDatasetsInDataverseForRebalance(dv.getDataverseName(), mdTxnCtx));
+ CollectionUtils.addAll(datasets,
+ getDatasetsInDataverseForRebalance(dv.getDatabaseName(), dv.getDataverseName(), mdTxnCtx));
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
@@ -246,16 +251,17 @@
}
// Gets all datasets in a dataverse for the rebalance operation, with a given metadata transaction context.
- private Iterable<Dataset> getDatasetsInDataverseForRebalance(DataverseName dvName,
+ private Iterable<Dataset> getDatasetsInDataverseForRebalance(String database, DataverseName dvName,
MetadataTransactionContext mdTxnCtx) throws Exception {
return MetadataConstants.METADATA_DATAVERSE_NAME.equals(dvName) ? Collections.emptyList()
- : IterableUtils.filteredIterable(MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, null, dvName),
+ : IterableUtils.filteredIterable(
+ MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, database, dvName),
DatasetUtil::isNotView);
}
// Rebalances a given dataset.
- private void rebalanceDataset(DataverseName dataverseName, String datasetName, Set<String> targetNodes,
- boolean force) throws Exception {
+ private void rebalanceDataset(String database, DataverseName dataverseName, String datasetName,
+ Set<String> targetNodes, boolean force) throws Exception {
IHyracksClientConnection hcc = (IHyracksClientConnection) ctx.get(HYRACKS_CONNECTION_ATTR);
MetadataProvider metadataProvider = MetadataProvider.create(appCtx, null);
try {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/DatasetResourcesRewriter.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/DatasetResourcesRewriter.java
index c663617..545a8b0 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/DatasetResourcesRewriter.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/DatasetResourcesRewriter.java
@@ -24,6 +24,7 @@
import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Dataset;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext;
import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
@@ -48,7 +49,8 @@
DataverseName dataverseName = getDataverseName(loc, f.getArguments(), 0);
String datasetName = getString(loc, f.getArguments(), 1);
MetadataProvider metadataProvider = (MetadataProvider) context.getMetadataProvider();
- Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
+ String database = MetadataUtil.resolveDatabase(null, dataverseName);
+ Dataset dataset = metadataProvider.findDataset(database, dataverseName, datasetName);
if (dataset == null) {
throw new CompilationException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, loc, datasetName, dataverseName);
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/DatasetRewriter.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/DatasetRewriter.java
index 68edc0d..e5bb3b8 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/DatasetRewriter.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/DatasetRewriter.java
@@ -39,6 +39,7 @@
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Dataset;
import org.apache.asterix.metadata.utils.DatasetUtil;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.asterix.om.functions.IFunctionToDataSourceRewriter;
import org.apache.asterix.om.typecomputer.base.IResultTypeComputer;
import org.apache.asterix.om.types.ARecordType;
@@ -146,7 +147,9 @@
AbstractFunctionCallExpression datasetFnCall = (AbstractFunctionCallExpression) expression;
MetadataProvider metadata = (MetadataProvider) mp;
Dataset dataset = fetchDataset(metadata, datasetFnCall);
- IAType type = metadata.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
+ String itemTypeDatabase = MetadataUtil.resolveDatabase(null, dataset.getItemTypeDataverseName());
+ IAType type =
+ metadata.findType(itemTypeDatabase, dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
if (type == null) {
throw new CompilationException(ErrorCode.COMPILATION_ERROR, datasetFnCall.getSourceLocation(),
"No type for " + dataset() + " " + dataset.getDatasetName());
@@ -158,10 +161,11 @@
throws CompilationException {
DatasetFullyQualifiedName datasetReference = FunctionUtil.parseDatasetFunctionArguments(datasetFnCall);
DataverseName dataverseName = datasetReference.getDataverseName();
+ String database = MetadataUtil.resolveDatabase(null, dataverseName);
String datasetName = datasetReference.getDatasetName();
Dataset dataset;
try {
- dataset = metadataProvider.findDataset(dataverseName, datasetName);
+ dataset = metadataProvider.findDataset(database, dataverseName, datasetName);
} catch (CompilationException e) {
throw e;
} catch (AlgebricksException e) {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/DumpIndexRewriter.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/DumpIndexRewriter.java
index 9fb6385..8383e2e 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/DumpIndexRewriter.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/DumpIndexRewriter.java
@@ -27,6 +27,7 @@
import org.apache.asterix.metadata.entities.Dataset;
import org.apache.asterix.metadata.entities.Index;
import org.apache.asterix.metadata.utils.ISecondaryIndexOperationsHelper;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.asterix.metadata.utils.SecondaryIndexOperationsHelper;
import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -51,14 +52,15 @@
throws AlgebricksException {
final SourceLocation loc = f.getSourceLocation();
DataverseName dataverseName = getDataverseName(loc, f.getArguments(), 0);
+ String database = MetadataUtil.resolveDatabase(null, dataverseName);
String datasetName = getString(loc, f.getArguments(), 1);
String indexName = getString(loc, f.getArguments(), 2);
MetadataProvider metadataProvider = (MetadataProvider) context.getMetadataProvider();
- final Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
+ final Dataset dataset = metadataProvider.findDataset(database, dataverseName, datasetName);
if (dataset == null) {
throw new CompilationException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, loc, datasetName, dataverseName);
}
- Index index = metadataProvider.getIndex(dataverseName, datasetName, indexName);
+ Index index = metadataProvider.getIndex(database, dataverseName, datasetName, indexName);
if (index == null) {
throw new CompilationException(ErrorCode.UNKNOWN_INDEX, loc, indexName);
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/FeedRewriter.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/FeedRewriter.java
index cc1b3ea..339fa62 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/FeedRewriter.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/FeedRewriter.java
@@ -40,6 +40,7 @@
import org.apache.asterix.metadata.entities.FeedPolicyEntity;
import org.apache.asterix.metadata.entities.InternalDatasetDetails;
import org.apache.asterix.metadata.feeds.BuiltinFeedPolicies;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionToDataSourceRewriter;
import org.apache.asterix.om.typecomputer.base.IResultTypeComputer;
@@ -86,7 +87,8 @@
MetadataProvider metadataProvider = (MetadataProvider) context.getMetadataProvider();
DataSourceId asid = new DataSourceId(dataverseName, getTargetFeed);
String policyName = (String) metadataProvider.getConfig().get(FeedActivityDetails.FEED_POLICY_NAME);
- FeedPolicyEntity policy = metadataProvider.findFeedPolicy(dataverseName, policyName);
+ String database = MetadataUtil.resolveDatabase(null, dataverseName);
+ FeedPolicyEntity policy = metadataProvider.findFeedPolicy(database, dataverseName, policyName);
if (policy == null) {
policy = BuiltinFeedPolicies.getFeedPolicy(policyName);
if (policy == null) {
@@ -122,11 +124,13 @@
String subscriptionLocation, MetadataProvider metadataProvider, FeedPolicyEntity feedPolicy,
String outputType, String locations, LogicalVariable recordVar, IOptimizationContext context,
List<LogicalVariable> pkVars) throws AlgebricksException {
- Dataset dataset = metadataProvider.findDataset(id.getDataverseName(), targetDataset);
- ARecordType feedOutputType = (ARecordType) metadataProvider.findType(id.getDataverseName(), outputType);
- Feed sourceFeed = metadataProvider.findFeed(id.getDataverseName(), sourceFeedName);
+ String database = MetadataUtil.resolveDatabase(null, id.getDataverseName());
+ Dataset dataset = metadataProvider.findDataset(database, id.getDataverseName(), targetDataset);
+ ARecordType feedOutputType =
+ (ARecordType) metadataProvider.findType(database, id.getDataverseName(), outputType);
+ Feed sourceFeed = metadataProvider.findFeed(database, id.getDataverseName(), sourceFeedName);
FeedConnection feedConnection =
- metadataProvider.findFeedConnection(id.getDataverseName(), sourceFeedName, targetDataset);
+ metadataProvider.findFeedConnection(database, id.getDataverseName(), sourceFeedName, targetDataset);
ARecordType metaType = null;
// Does dataset have meta?
if (dataset.hasMetaPart()) {
@@ -135,7 +139,7 @@
throw new AlgebricksException(
"Feed to " + dataset(SINGULAR) + " with metadata doesn't have meta type specified");
}
- metaType = (ARecordType) metadataProvider.findType(id.getDataverseName(), metaTypeName);
+ metaType = (ARecordType) metadataProvider.findType(database, id.getDataverseName(), metaTypeName);
}
// Is a change feed?
List<IAType> pkTypes = null;
@@ -189,7 +193,8 @@
return BuiltinType.ANY;
}
MetadataProvider metadata = (MetadataProvider) mp;
- IAType outputType = metadata.findType(dataverseName, outputTypeName);
+ String database = MetadataUtil.resolveDatabase(null, dataverseName);
+ IAType outputType = metadata.findType(database, dataverseName, outputTypeName);
if (outputType == null) {
throw new AlgebricksException("Unknown type " + outputTypeName);
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/QueryIndexRewriter.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/QueryIndexRewriter.java
index 0aa66dd..5b8ed0b 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/QueryIndexRewriter.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/QueryIndexRewriter.java
@@ -34,6 +34,7 @@
import org.apache.asterix.metadata.utils.DatasetUtil;
import org.apache.asterix.metadata.utils.ISecondaryIndexOperationsHelper;
import org.apache.asterix.metadata.utils.KeyFieldTypeUtil;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.asterix.metadata.utils.SecondaryIndexOperationsHelper;
import org.apache.asterix.om.base.AString;
import org.apache.asterix.om.constants.AsterixConstantValue;
@@ -90,7 +91,8 @@
String idName = getString(loc, f.getArguments(), 2);
MetadataProvider mp = (MetadataProvider) ctx.getMetadataProvider();
final Dataset dataset = validateDataset(mp, dvName, dsName, loc);
- Index index = validateIndex(f, mp, loc, dvName, dsName, idName);
+ String database = dataset.getDatabaseName();
+ Index index = validateIndex(f, mp, loc, database, dvName, dsName, idName);
return createQueryIndexDatasource(mp, dataset, index, loc, f);
}
@@ -137,7 +139,8 @@
String datasetName = getString(loc, f.getArguments(), 1);
String indexName = getString(loc, f.getArguments(), 2);
Dataset dataset = validateDataset(metadataProvider, dataverseName, datasetName, loc);
- Index index = validateIndex(f, metadataProvider, loc, dataverseName, datasetName, indexName);
+ String database = dataset.getDatabaseName();
+ Index index = validateIndex(f, metadataProvider, loc, database, dataverseName, datasetName, indexName);
ARecordType dsType = (ARecordType) metadataProvider.findType(dataset);
ARecordType metaType = DatasetUtil.getMetaType(metadataProvider, dataset);
dsType = (ARecordType) metadataProvider.findTypeForDatasetWithoutType(dsType, metaType, dataset);
@@ -182,7 +185,8 @@
private static Dataset validateDataset(MetadataProvider mp, DataverseName dvName, String dsName, SourceLocation loc)
throws AlgebricksException {
- Dataset dataset = mp.findDataset(dvName, dsName);
+ String database = MetadataUtil.resolveDatabase(null, dvName);
+ Dataset dataset = mp.findDataset(database, dvName, dsName);
if (dataset == null) {
throw new CompilationException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, loc, dsName, dvName);
}
@@ -190,8 +194,8 @@
}
private static Index validateIndex(AbstractFunctionCallExpression f, MetadataProvider mp, SourceLocation loc,
- DataverseName dvName, String dsName, String idxName) throws AlgebricksException {
- Index index = mp.getIndex(dvName, dsName, idxName);
+ String database, DataverseName dvName, String dsName, String idxName) throws AlgebricksException {
+ Index index = mp.getIndex(database, dvName, dsName, idxName);
if (index == null) {
throw new CompilationException(ErrorCode.UNKNOWN_INDEX, loc, idxName);
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/StorageComponentsRewriter.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/StorageComponentsRewriter.java
index fb0a6d4..f525dc6 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/StorageComponentsRewriter.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/StorageComponentsRewriter.java
@@ -24,6 +24,7 @@
import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Dataset;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext;
import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
@@ -46,9 +47,10 @@
throws AlgebricksException {
SourceLocation loc = f.getSourceLocation();
DataverseName dataverseName = getDataverseName(loc, f.getArguments(), 0);
+ String database = MetadataUtil.resolveDatabase(null, dataverseName);
String datasetName = getString(loc, f.getArguments(), 1);
MetadataProvider metadataProvider = (MetadataProvider) context.getMetadataProvider();
- Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
+ Dataset dataset = metadataProvider.findDataset(database, dataverseName, datasetName);
if (dataset == null) {
throw new CompilationException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, loc, datasetName, dataverseName);
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
index 3a95595..15f4e98 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
@@ -665,9 +665,8 @@
protected static void validateCompactionPolicy(String compactionPolicy,
Map<String, String> compactionPolicyProperties, MetadataTransactionContext mdTxnCtx,
boolean isExternalDataset, SourceLocation sourceLoc) throws Exception {
- String database = null;
- CompactionPolicy compactionPolicyEntity = MetadataManager.INSTANCE.getCompactionPolicy(mdTxnCtx, database,
- MetadataConstants.METADATA_DATAVERSE_NAME, compactionPolicy);
+ CompactionPolicy compactionPolicyEntity = MetadataManager.INSTANCE.getCompactionPolicy(mdTxnCtx,
+ MetadataConstants.SYSTEM_DATABASE, MetadataConstants.METADATA_DATAVERSE_NAME, compactionPolicy);
if (compactionPolicyEntity == null) {
throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
"Unknown compaction policy: " + compactionPolicy);
@@ -792,7 +791,7 @@
}
IDatasetDetails datasetDetails;
- Dataset ds = metadataProvider.findDataset(dataverseName, datasetName, true);
+ Dataset ds = metadataProvider.findDataset(database, dataverseName, datasetName, true);
if (ds != null) {
if (ds.getDatasetType() == DatasetType.VIEW) {
throw new CompilationException(ErrorCode.VIEW_EXISTS, sourceLoc,
@@ -1026,7 +1025,8 @@
String itemTypeDatabase = MetadataUtil.resolveDatabase(null, itemTypeDataverseName);
switch (itemTypeExpr.getTypeKind()) {
case TYPEREFERENCE:
- Datatype itemTypeEntity = metadataProvider.findTypeEntity(itemTypeDataverseName, itemTypeName);
+ Datatype itemTypeEntity =
+ metadataProvider.findTypeEntity(itemTypeDatabase, itemTypeDataverseName, itemTypeName);
if (itemTypeEntity == null || itemTypeEntity.getIsAnonymous()) {
// anonymous types cannot be referred from CREATE DATASET/VIEW
throw new AsterixException(ErrorCode.UNKNOWN_TYPE, sourceLoc,
@@ -1145,7 +1145,7 @@
throw new CompilationException(ErrorCode.UNKNOWN_DATAVERSE, sourceLoc, dataverseName);
}
- Dataset ds = metadataProvider.findDataset(dataverseName, datasetName);
+ Dataset ds = metadataProvider.findDataset(database, dataverseName, datasetName);
if (ds == null) {
throw new CompilationException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, sourceLoc, datasetName,
dataverseName);
@@ -1170,7 +1170,7 @@
}
}
- String itemTypeDatabase = null;
+ String itemTypeDatabase = MetadataUtil.resolveDatabase(null, ds.getItemTypeDataverseName());
Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(),
itemTypeDatabase, ds.getItemTypeDataverseName(), ds.getItemTypeName());
ARecordType aRecordType = (ARecordType) dt.getDatatype();
@@ -1689,7 +1689,8 @@
// memory component
// of the primary index, which is incorrect.
if (ds.getDatasetType() == DatasetType.INTERNAL) {
- FlushDatasetUtil.flushDataset(hcc, metadataProvider, index.getDataverseName(), index.getDatasetName());
+ FlushDatasetUtil.flushDataset(hcc, metadataProvider, index.getDatabaseName(), index.getDataverseName(),
+ index.getDatasetName());
}
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
@@ -2065,7 +2066,7 @@
throw new CompilationException(ErrorCode.UNKNOWN_DATAVERSE, sourceLoc, dataverseName);
}
}
- ds = metadataProvider.findDataset(dataverseName, datasetName, true);
+ ds = metadataProvider.findDataset(database, dataverseName, datasetName, true);
if (ds == null) {
if (ifExists) {
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx.getValue());
@@ -2162,7 +2163,7 @@
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
- Dataset ds = metadataProvider.findDataset(dataverseName, datasetName);
+ Dataset ds = metadataProvider.findDataset(database, dataverseName, datasetName);
if (ds == null) {
throw new CompilationException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, sourceLoc, datasetName,
dataverseName);
@@ -2497,10 +2498,11 @@
for (CreateViewStatement.ForeignKeyDecl foreignKeyDecl : foreignKeyDecls) {
List<String> foreignKeyFields =
ValidateUtil.validateViewKeyFields(foreignKeyDecl, itemType, true, sourceLoc);
- String refDatabase = null;
DataverseName refDataverseName = foreignKeyDecl.getReferencedDataverseName();
+ String refDatabase = MetadataUtil.resolveDatabase(null, refDataverseName);
if (refDataverseName == null) {
refDataverseName = dataverseName;
+ refDatabase = MetadataUtil.resolveDatabase(null, dataverseName);
} else {
Dataverse refDataverse =
MetadataManager.INSTANCE.getDataverse(mdTxnCtx, refDatabase, refDataverseName);
@@ -2520,7 +2522,8 @@
refPrimaryKeyFields = primaryKeyFields;
} else {
// findDataset() will acquire lock on referenced dataset (view)
- Dataset refDataset = metadataProvider.findDataset(refDataverseName, refDatasetName, true);
+ Dataset refDataset =
+ metadataProvider.findDataset(refDatabase, refDataverseName, refDatasetName, true);
if (refDataset == null || DatasetUtil.isNotView(refDataset)) {
throw new CompilationException(ErrorCode.UNKNOWN_VIEW, sourceLoc,
DatasetUtil.getFullyQualifiedDisplayName(refDataverseName, refDatasetName));
@@ -2663,7 +2666,7 @@
throw new CompilationException(ErrorCode.UNKNOWN_DATAVERSE, sourceLoc, dataverseName);
}
}
- Dataset dataset = metadataProvider.findDataset(dataverseName, viewName, true);
+ Dataset dataset = metadataProvider.findDataset(database, dataverseName, viewName, true);
if (dataset == null) {
if (stmtViewDrop.getIfExists()) {
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
@@ -2733,8 +2736,8 @@
protected CreateResult doCreateFunction(MetadataProvider metadataProvider, CreateFunctionStatement cfs,
FunctionSignature functionSignature, IStatementRewriter stmtRewriter, IRequestParameters requestParameters)
throws Exception {
- String database = null;
DataverseName dataverseName = functionSignature.getDataverseName();
+ String database = MetadataUtil.resolveDatabase(null, dataverseName);
SourceLocation sourceLoc = cfs.getSourceLocation();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -2823,11 +2826,11 @@
newInlineTypes.put(returnTypeSignature, returnInlineTypeEntity);
}
- String libraryDatabase = null;
DataverseName libraryDataverseName = cfs.getLibraryDataverseName();
if (libraryDataverseName == null) {
libraryDataverseName = dataverseName;
}
+ String libraryDatabase = MetadataUtil.resolveDatabase(null, dataverseName);
String libraryName = cfs.getLibraryName();
Library library = MetadataManager.INSTANCE.getLibrary(mdTxnCtx, libraryDatabase, libraryDataverseName,
libraryName);
@@ -2943,7 +2946,9 @@
if (paramTypeDataverseName == null) {
paramTypeDataverseName = functionSignature.getDataverseName();
}
- Datatype paramTypeEntity = metadataProvider.findTypeEntity(paramTypeDataverseName, paramTypeName);
+ String paramTypeDatabase = MetadataUtil.resolveDatabase(null, paramTypeDataverseName);
+ Datatype paramTypeEntity =
+ metadataProvider.findTypeEntity(paramTypeDatabase, paramTypeDataverseName, paramTypeName);
if (paramTypeEntity == null || paramTypeEntity.getIsAnonymous()) {
throw new CompilationException(ErrorCode.UNKNOWN_TYPE, sourceLoc, paramTypeName);
}
@@ -2990,8 +2995,8 @@
protected boolean doDropFunction(MetadataProvider metadataProvider, FunctionDropStatement stmtDropFunction,
FunctionSignature signature, IRequestParameters requestParameters) throws Exception {
- String database = null;
DataverseName dataverseName = signature.getDataverseName();
+ String database = MetadataUtil.resolveDatabase(null, dataverseName);
SourceLocation sourceLoc = stmtDropFunction.getSourceLocation();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -3538,6 +3543,7 @@
metadataProvider.validateDatabaseObjectName(copyStmt.getDataverseName(), datasetName,
copyStmt.getSourceLocation());
DataverseName dataverseName = getActiveDataverseName(copyStmt.getDataverseName());
+ String database = MetadataUtil.resolveDatabase(null, dataverseName);
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -3546,13 +3552,13 @@
boolean atomic = false;
try {
metadataProvider.setWriteTransaction(true);
- Dataset dataset = metadataProvider.findDataset(dataverseName, copyStmt.getDatasetName());
+ Dataset dataset = metadataProvider.findDataset(database, dataverseName, copyStmt.getDatasetName());
if (dataset == null) {
throw new CompilationException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, stmt.getSourceLocation(),
datasetName, dataverseName);
}
- String itemTypeDatabase = null;
- Datatype itemType = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, itemTypeDatabase,
+ Datatype itemType = MetadataManager.INSTANCE.getDatatype(mdTxnCtx,
+ MetadataUtil.resolveDatabase(null, dataset.getItemTypeDataverseName()),
dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
// Copy statement with csv files will have a type expression
if (copyStmt.getTypeExpr() != null) {
@@ -3561,9 +3567,9 @@
datasetName, itemTypeExpr, false, stmt.getSourceLocation());
DataverseName itemTypeDataverseName = itemTypeQualifiedName.first;
String itemTypeName = itemTypeQualifiedName.second;
- String database = MetadataUtil.resolveDatabase(null, itemTypeDataverseName);
+ String itemTypeDatabase = MetadataUtil.resolveDatabase(null, itemTypeDataverseName);
IAType itemTypeEntity = translateType(itemTypeDataverseName, itemTypeName, itemTypeExpr, mdTxnCtx);
- itemType = new Datatype(database, itemTypeDataverseName, itemTypeName, itemTypeEntity, true);
+ itemType = new Datatype(itemTypeDatabase, itemTypeDataverseName, itemTypeName, itemTypeEntity, true);
}
ExternalDetailsDecl externalDetails = copyStmt.getExternalDetails();
Map<String, String> properties =
@@ -3671,7 +3677,8 @@
if (jobSpec == null) {
return jobSpec;
}
- Dataset ds = metadataProvider.findDataset(((InsertStatement) stmt).getDataverseName(),
+ String database = MetadataUtil.resolveDatabase(null, ((InsertStatement) stmt).getDataverseName());
+ Dataset ds = metadataProvider.findDataset(database, ((InsertStatement) stmt).getDataverseName(),
((InsertStatement) stmt).getDatasetName());
atomic = ds.isAtomic();
if (atomic) {
@@ -3716,6 +3723,7 @@
metadataProvider.validateDatabaseObjectName(stmtDelete.getDataverseName(), datasetName,
stmt.getSourceLocation());
DataverseName dataverseName = getActiveDataverseName(stmtDelete.getDataverseName());
+ String database = MetadataUtil.resolveDatabase(null, dataverseName);
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -3734,7 +3742,7 @@
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
if (jobSpec != null && !isCompileOnly()) {
- Dataset ds = metadataProvider.findDataset(dataverseName, datasetName);
+ Dataset ds = metadataProvider.findDataset(database, dataverseName, datasetName);
atomic = ds.isAtomic();
if (atomic) {
int numParticipatingNodes = appCtx.getNodeJobTracker()
@@ -3839,8 +3847,8 @@
SourceLocation sourceLoc = cfs.getSourceLocation();
String feedName = cfs.getFeedName().getValue();
metadataProvider.validateDatabaseObjectName(cfs.getDataverseName(), feedName, sourceLoc);
- String database = null;
DataverseName dataverseName = getActiveDataverseName(cfs.getDataverseName());
+ String database = MetadataUtil.resolveDatabase(null, dataverseName);
if (isCompileOnly()) {
return;
}
@@ -3909,7 +3917,8 @@
metadataProvider.getMetadataTxnContext(), database, dataverseName, cfps.getSourcePolicyName());
if (sourceFeedPolicy == null) {
sourceFeedPolicy = MetadataManager.INSTANCE.getFeedPolicy(metadataProvider.getMetadataTxnContext(),
- database, MetadataConstants.METADATA_DATAVERSE_NAME, cfps.getSourcePolicyName());
+ MetadataConstants.SYSTEM_DATABASE, MetadataConstants.METADATA_DATAVERSE_NAME,
+ cfps.getSourcePolicyName());
if (sourceFeedPolicy == null) {
throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
"Unknown policy " + cfps.getSourcePolicyName());
@@ -3947,8 +3956,8 @@
SourceLocation sourceLoc = stmtFeedDrop.getSourceLocation();
String feedName = stmtFeedDrop.getFeedName().getValue();
metadataProvider.validateDatabaseObjectName(stmtFeedDrop.getDataverseName(), feedName, sourceLoc);
- String database = null;
DataverseName dataverseName = getActiveDataverseName(stmtFeedDrop.getDataverseName());
+ String database = MetadataUtil.resolveDatabase(null, dataverseName);
if (isCompileOnly()) {
return;
}
@@ -4037,8 +4046,8 @@
IHyracksClientConnection hcc) throws Exception {
StartFeedStatement sfs = (StartFeedStatement) stmt;
SourceLocation sourceLoc = sfs.getSourceLocation();
- String database = null;
DataverseName dataverseName = getActiveDataverseName(sfs.getDataverseName());
+ String database = MetadataUtil.resolveDatabase(null, dataverseName);
String feedName = sfs.getFeedName().getValue();
if (isCompileOnly()) {
return;
@@ -4051,7 +4060,7 @@
// Runtime handler
EntityId entityId = new EntityId(Feed.EXTENSION_NAME, dataverseName, feedName);
// Feed & Feed Connections
- Feed feed = FeedMetadataUtil.validateIfFeedExists(dataverseName, feedName,
+ Feed feed = FeedMetadataUtil.validateIfFeedExists(database, dataverseName, feedName,
metadataProvider.getMetadataTxnContext());
List<FeedConnection> feedConnections = MetadataManager.INSTANCE
.getFeedConections(metadataProvider.getMetadataTxnContext(), database, dataverseName, feedName);
@@ -4070,8 +4079,8 @@
// Prepare policy
List<Dataset> datasets = new ArrayList<>();
for (FeedConnection connection : feedConnections) {
- Dataset ds =
- metadataProvider.findDataset(connection.getDataverseName(), connection.getDatasetName());
+ Dataset ds = metadataProvider.findDataset(connection.getDatabaseName(),
+ connection.getDataverseName(), connection.getDatasetName());
datasets.add(ds);
}
listener = new FeedEventsListener(this, metadataProvider.getApplicationContext(), hcc, entityId,
@@ -4139,8 +4148,9 @@
lockUtil.connectFeedBegin(lockManager, metadataProvider.getLocks(), dataverseName, datasetName, feedName);
try {
// validation
- Dataset dataset = FeedMetadataUtil.validateIfDatasetExists(metadataProvider, dataverseName, datasetName);
- Feed feed = FeedMetadataUtil.validateIfFeedExists(dataverseName, feedName,
+ Dataset dataset =
+ FeedMetadataUtil.validateIfDatasetExists(metadataProvider, database, dataverseName, datasetName);
+ Feed feed = FeedMetadataUtil.validateIfFeedExists(database, dataverseName, feedName,
metadataProvider.getMetadataTxnContext());
FeedEventsListener listener = (FeedEventsListener) activeEventHandler.getListener(feed.getFeedId());
if (listener != null && listener.isActive()) {
@@ -4201,11 +4211,12 @@
throw new CompilationException(ErrorCode.FEED_CHANGE_FEED_CONNECTIVITY_ON_ALIVE_FEED, sourceLoc,
feedName);
}
- FeedMetadataUtil.validateIfDatasetExists(metadataProvider, dataverseName, cfs.getDatasetName().getValue());
- FeedMetadataUtil.validateIfFeedExists(dataverseName, cfs.getFeedName().getValue(), mdTxnCtx);
+ FeedMetadataUtil.validateIfDatasetExists(metadataProvider, database, dataverseName,
+ cfs.getDatasetName().getValue());
+ FeedMetadataUtil.validateIfFeedExists(database, dataverseName, cfs.getFeedName().getValue(), mdTxnCtx);
FeedConnection fc = MetadataManager.INSTANCE.getFeedConnection(metadataProvider.getMetadataTxnContext(),
database, dataverseName, feedName, datasetName);
- Dataset ds = metadataProvider.findDataset(dataverseName, datasetName);
+ Dataset ds = metadataProvider.findDataset(database, dataverseName, datasetName);
if (ds == null) {
throw new CompilationException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, sourceLoc, datasetName,
dataverseName);
@@ -4265,7 +4276,7 @@
throw new CompilationException(ErrorCode.UNKNOWN_DATAVERSE, sourceLoc, dataverseName);
}
// Check if the dataset exists
- ds = metadataProvider.findDataset(dataverseName, datasetName);
+ ds = metadataProvider.findDataset(database, dataverseName, datasetName);
if (ds == null) {
throw new CompilationException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, sourceLoc, datasetName,
dataverseName);
@@ -4317,7 +4328,7 @@
runJob(hcc, spec);
// #. flush dataset
- FlushDatasetUtil.flushDataset(hcc, metadataProvider, dataverseName, datasetName);
+ FlushDatasetUtil.flushDataset(hcc, metadataProvider, database, dataverseName, datasetName);
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
@@ -4485,7 +4496,7 @@
metadataProvider.setMetadataTxnContext(mdTxnCtx);
boolean index1Exists = false, index2Exists = false;
try {
- Dataset ds = metadataProvider.findDataset(dataverseName, datasetName);
+ Dataset ds = metadataProvider.findDataset(database, dataverseName, datasetName);
if (ds == null) {
throw new CompilationException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, sourceLoc, datasetName,
dataverseName);
@@ -4609,8 +4620,8 @@
IHyracksClientConnection hcc) throws Exception {
CompactStatement compactStatement = (CompactStatement) stmt;
SourceLocation sourceLoc = compactStatement.getSourceLocation();
- String database = null;
DataverseName dataverseName = getActiveDataverseName(compactStatement.getDataverseName());
+ String database = MetadataUtil.resolveDatabase(null, dataverseName);
String datasetName = compactStatement.getDatasetName().getValue();
if (isCompileOnly()) {
return;
@@ -4621,7 +4632,7 @@
List<JobSpecification> jobsToExecute = new ArrayList<>();
lockUtil.compactBegin(lockManager, metadataProvider.getLocks(), dataverseName, datasetName);
try {
- Dataset ds = metadataProvider.findDataset(dataverseName, datasetName);
+ Dataset ds = metadataProvider.findDataset(database, dataverseName, datasetName);
if (ds == null) {
throw new CompilationException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, sourceLoc, datasetName,
dataverseName);
@@ -4875,8 +4886,11 @@
// ensure request not cancelled before running job
ensureNotCancelled(clientRequest);
if (atomicStatement != null) {
- Dataset ds = metadataProvider.findDataset(((InsertStatement) atomicStatement).getDataverseName(),
- ((InsertStatement) atomicStatement).getDatasetName());
+ String database =
+ MetadataUtil.resolveDatabase(null, ((InsertStatement) atomicStatement).getDataverseName());
+ Dataset ds =
+ metadataProvider.findDataset(database, ((InsertStatement) atomicStatement).getDataverseName(),
+ ((InsertStatement) atomicStatement).getDatasetName());
atomic = ds.isAtomic();
if (atomic) {
int numParticipatingNodes = appCtx.getNodeJobTracker()
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FeedOperations.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FeedOperations.java
index 44f8a80..130c3a2 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FeedOperations.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FeedOperations.java
@@ -281,9 +281,9 @@
String datasetName = feedConnections.get(iter1).getDatasetName();
FeedConnectionId feedConnectionId = new FeedConnectionId(ingestionOp.getEntityId(), datasetName);
- FeedPolicyEntity feedPolicyEntity =
- FeedMetadataUtil.validateIfPolicyExists(curFeedConnection.getDataverseName(),
- curFeedConnection.getPolicyName(), metadataProvider.getMetadataTxnContext());
+ FeedPolicyEntity feedPolicyEntity = FeedMetadataUtil.validateIfPolicyExists(
+ curFeedConnection.getDatabaseName(), curFeedConnection.getDataverseName(),
+ curFeedConnection.getPolicyName(), metadataProvider.getMetadataTxnContext());
for (Map.Entry<OperatorDescriptorId, IOperatorDescriptor> entry : operatorsMap.entrySet()) {
IOperatorDescriptor opDesc = entry.getValue();
@@ -406,9 +406,8 @@
for (OperatorDescriptorId root : subJob.getRoots()) {
jobSpec.addRoot(jobSpec.getOperatorMap().get(operatorIdMapping.get(root)));
}
- int datasetId = metadataProvider
- .findDataset(curFeedConnection.getDataverseName(), curFeedConnection.getDatasetName())
- .getDatasetId();
+ int datasetId = metadataProvider.findDataset(curFeedConnection.getDatabaseName(),
+ curFeedConnection.getDataverseName(), curFeedConnection.getDatasetName()).getDatasetId();
TxnId txnId = ((JobEventListenerFactory) subJob.getJobletEventListenerFactory()).getTxnId(datasetId);
txnIdMap.put(datasetId, txnId);
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FlushDatasetUtil.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FlushDatasetUtil.java
index e46738d..f38c18b 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FlushDatasetUtil.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FlushDatasetUtil.java
@@ -43,9 +43,9 @@
private FlushDatasetUtil() {
}
- public static void flushDataset(IHyracksClientConnection hcc, MetadataProvider metadataProvider,
+ public static void flushDataset(IHyracksClientConnection hcc, MetadataProvider metadataProvider, String database,
DataverseName dataverseName, String datasetName) throws Exception {
- Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
+ Dataset dataset = metadataProvider.findDataset(database, dataverseName, datasetName);
flushDataset(hcc, metadataProvider, dataset);
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/RebalanceUtil.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/RebalanceUtil.java
index 2b78526..86b05c3 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/RebalanceUtil.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/RebalanceUtil.java
@@ -50,6 +50,7 @@
import org.apache.asterix.metadata.entities.Index;
import org.apache.asterix.metadata.utils.DatasetUtil;
import org.apache.asterix.metadata.utils.IndexUtil;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.asterix.om.types.ARecordType;
import org.apache.asterix.rebalance.IDatasetRebalanceCallback;
import org.apache.asterix.runtime.job.listener.JobEventListenerFactory;
@@ -96,6 +97,7 @@
public static boolean rebalance(DataverseName dataverseName, String datasetName, Set<String> targetNcNames,
MetadataProvider metadataProvider, IHyracksClientConnection hcc,
IDatasetRebalanceCallback datasetRebalanceCallback, boolean forceRebalance) throws Exception {
+ String database = MetadataUtil.resolveDatabase(null, dataverseName);
Dataset sourceDataset;
Dataset targetDataset;
boolean success = true;
@@ -106,7 +108,7 @@
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
// The source dataset.
- sourceDataset = metadataProvider.findDataset(dataverseName, datasetName);
+ sourceDataset = metadataProvider.findDataset(database, dataverseName, datasetName);
// If the source dataset doesn't exist, then it's a no-op.
if (sourceDataset == null) {
@@ -353,8 +355,9 @@
private static ITupleProjectorFactory createTupleProjectorFactory(Dataset source, MetadataProvider metadataProvider)
throws AlgebricksException {
- ARecordType itemType =
- (ARecordType) metadataProvider.findType(source.getItemTypeDataverseName(), source.getItemTypeName());
+ String itemTypeDatabase = MetadataUtil.resolveDatabase(null, source.getItemTypeDataverseName());
+ ARecordType itemType = (ARecordType) metadataProvider.findType(itemTypeDatabase,
+ source.getItemTypeDataverseName(), source.getItemTypeName());
ARecordType metaType = DatasetUtil.getMetaType(metadataProvider, source);
itemType = (ARecordType) metadataProvider.findTypeForDatasetWithoutType(itemType, metaType, source);
int numberOfPrimaryKeys = source.getPrimaryKeys().size();
@@ -396,7 +399,8 @@
return;
}
List<JobSpecification> jobs = new ArrayList<>();
- List<Index> indexes = metadataProvider.getDatasetIndexes(dataset.getDataverseName(), dataset.getDatasetName());
+ List<Index> indexes = metadataProvider.getDatasetIndexes(dataset.getDatabaseName(), dataset.getDataverseName(),
+ dataset.getDatasetName());
for (Index index : indexes) {
jobs.add(IndexUtil.buildDropIndexJobSpec(index, metadataProvider, dataset,
EnumSet.of(DropOption.IF_EXISTS, DropOption.WAIT_ON_IN_USE), null));
@@ -409,7 +413,8 @@
// Creates and loads all secondary indexes for the rebalance target dataset.
private static void createAndLoadSecondaryIndexesForTarget(Dataset source, Dataset target,
MetadataProvider metadataProvider, IHyracksClientConnection hcc) throws Exception {
- List<Index> indexes = metadataProvider.getDatasetIndexes(source.getDataverseName(), source.getDatasetName());
+ List<Index> indexes = metadataProvider.getDatasetIndexes(source.getDatabaseName(), source.getDataverseName(),
+ source.getDatasetName());
List<Index> secondaryIndexes = indexes.stream().filter(Index::isSecondaryIndex).collect(Collectors.toList());
List<Index> nonSampleIndexes =
secondaryIndexes.stream().filter(idx -> !idx.isSampleIndex()).collect(Collectors.toList());
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorApiServletTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorApiServletTest.java
index a4ffab5..844464a 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorApiServletTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorApiServletTest.java
@@ -38,6 +38,7 @@
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Dataset;
import org.apache.asterix.metadata.utils.MetadataConstants;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.asterix.om.types.ARecordType;
import org.apache.asterix.om.types.BuiltinType;
import org.apache.asterix.om.types.IAType;
@@ -185,9 +186,11 @@
.create((ICcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext(), null);
try {
metadataProvider.setMetadataTxnContext(mdTxnCtx);
- Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
- ARecordType recordType = (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(),
- dataset.getItemTypeName());
+ String database = MetadataUtil.resolveDatabase(null, dataverseName);
+ Dataset dataset = metadataProvider.findDataset(database, dataverseName, datasetName);
+ String itemTypeDatabase = MetadataUtil.resolveDatabase(null, dataset.getItemTypeDataverseName());
+ ARecordType recordType = (ARecordType) metadataProvider.findType(itemTypeDatabase,
+ dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
// Metadata transaction commits.
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return recordType;
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/common/TestDataUtil.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/common/TestDataUtil.java
index 60bcbb5..87c6898 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/common/TestDataUtil.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/common/TestDataUtil.java
@@ -37,6 +37,7 @@
import org.apache.asterix.metadata.bootstrap.MetadataBuiltinEntities;
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Dataset;
+import org.apache.asterix.metadata.utils.MetadataConstants;
import org.apache.asterix.metadata.utils.SplitsAndConstraintsUtil;
import org.apache.asterix.rebalance.NoOpDatasetRebalanceCallback;
import org.apache.asterix.test.common.TestExecutor;
@@ -227,7 +228,8 @@
metadataProvider.setMetadataTxnContext(mdTxnCtx);
Dataset dataset;
try {
- dataset = metadataProvider.findDataset(MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME, datasetName);
+ dataset = metadataProvider.findDataset(MetadataConstants.DEFAULT_DATABASE,
+ MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME, datasetName);
} finally {
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
metadataProvider.getLocks().unlock();
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/ddl/SecondaryBTreeOperationsHelperTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/ddl/SecondaryBTreeOperationsHelperTest.java
index 5a78ea8..620de3f 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/ddl/SecondaryBTreeOperationsHelperTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/ddl/SecondaryBTreeOperationsHelperTest.java
@@ -32,6 +32,7 @@
import org.apache.asterix.metadata.entities.Dataset;
import org.apache.asterix.metadata.entities.Index;
import org.apache.asterix.metadata.utils.IndexUtil;
+import org.apache.asterix.metadata.utils.MetadataConstants;
import org.apache.hyracks.api.job.JobSpecification;
import org.apache.hyracks.dataflow.std.sort.AbstractSorterOperatorDescriptor;
import org.junit.AfterClass;
@@ -78,7 +79,8 @@
// create the dataset
TestDataUtil.createDataset(datasetName, fields, PKFieldName);
- final Dataset dataset = metadataProvider.findDataset(null, datasetName);
+ final Dataset dataset = metadataProvider.findDataset(MetadataConstants.DEFAULT_DATABASE,
+ MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME, datasetName);
Assert.assertNotNull(dataset);
Index index;
@@ -86,8 +88,8 @@
// create a secondary primary index
TestDataUtil.createPrimaryIndex(datasetName, primaryIndexName);
- index = metadataProvider.getIndex(MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME, datasetName,
- primaryIndexName);
+ index = metadataProvider.getIndex(MetadataConstants.DEFAULT_DATABASE,
+ MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME, datasetName, primaryIndexName);
Assert.assertNotNull(index);
jobSpecification = IndexUtil.buildSecondaryIndexLoadingJobSpec(dataset, index, metadataProvider, null);
jobSpecification.getOperatorMap().values().forEach(iOperatorDescriptor -> {
@@ -96,8 +98,8 @@
// create a normal BTree index
TestDataUtil.createSecondaryBTreeIndex(datasetName, secondaryIndexName, SKFieldName);
- index = metadataProvider.getIndex(MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME, datasetName,
- secondaryIndexName);
+ index = metadataProvider.getIndex(MetadataConstants.DEFAULT_DATABASE,
+ MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME, datasetName, secondaryIndexName);
Assert.assertNotNull(index);
jobSpecification = IndexUtil.buildSecondaryIndexLoadingJobSpec(dataset, index, metadataProvider, null);
final long numOfSortOperators = jobSpecification.getOperatorMap().values().stream()
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/metadata/MetadataTxnTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/metadata/MetadataTxnTest.java
index ec2620fb..81121f5 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/metadata/MetadataTxnTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/metadata/MetadataTxnTest.java
@@ -45,6 +45,7 @@
import org.apache.asterix.metadata.entities.Dataset;
import org.apache.asterix.metadata.entities.NodeGroup;
import org.apache.asterix.metadata.utils.DatasetUtil;
+import org.apache.asterix.metadata.utils.MetadataConstants;
import org.apache.asterix.test.common.TestExecutor;
import org.apache.asterix.testframework.context.TestCaseContext;
import org.apache.hyracks.api.util.InvokeUtil;
@@ -120,7 +121,8 @@
final MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
- sourceDataset = metadataProvider.findDataset(MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME, datasetName);
+ sourceDataset = metadataProvider.findDataset(MetadataConstants.DEFAULT_DATABASE,
+ MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME, datasetName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} finally {
metadataProvider.getLocks().unlock();
@@ -170,7 +172,8 @@
metadataProvider.setMetadataTxnContext(mdTxnCtx);
Dataset sourceDataset;
try {
- sourceDataset = metadataProvider.findDataset(MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME, datasetName);
+ sourceDataset = metadataProvider.findDataset(MetadataConstants.DEFAULT_DATABASE,
+ MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME, datasetName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} finally {
metadataProvider.getLocks().unlock();
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/ParserTestExecutor.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/ParserTestExecutor.java
index 5d10028..fd40436 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/ParserTestExecutor.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/ParserTestExecutor.java
@@ -153,33 +153,38 @@
when(metadataProvider.getDefaultDataverseName()).thenReturn(dvName);
when(metadataProvider.getConfig()).thenReturn(config);
when(config.get(FunctionUtil.IMPORT_PRIVATE_FUNCTIONS)).thenReturn("true");
- when(metadataProvider.findDataverse(Mockito.<DataverseName> any())).thenAnswer(new Answer<Dataverse>() {
- @Override
- public Dataverse answer(InvocationOnMock invocation) {
- Object[] args = invocation.getArguments();
- final Dataverse mockDataverse = mock(Dataverse.class);
- when(mockDataverse.getDataverseName()).thenReturn((DataverseName) args[0]);
- return mockDataverse;
- }
- });
- when(metadataProvider.findDataset(any(DataverseName.class), anyString())).thenAnswer(new Answer<Dataset>() {
- @Override
- public Dataset answer(InvocationOnMock invocation) {
- Object[] args = invocation.getArguments();
- final Dataset mockDataset = mock(Dataset.class);
- when(mockDataset.getDataverseName()).thenReturn((DataverseName) args[0]);
- when(mockDataset.getDatasetName()).thenReturn((String) args[1]);
- return mockDataset;
- }
- });
- when(metadataProvider.findDataset(any(DataverseName.class), anyString(), anyBoolean()))
+ when(metadataProvider.findDataverse(anyString(), Mockito.<DataverseName> any()))
+ .thenAnswer(new Answer<Dataverse>() {
+ @Override
+ public Dataverse answer(InvocationOnMock invocation) {
+ Object[] args = invocation.getArguments();
+ final Dataverse mockDataverse = mock(Dataverse.class);
+ when(mockDataverse.getDatabaseName()).thenReturn((String) args[0]);
+ when(mockDataverse.getDataverseName()).thenReturn((DataverseName) args[1]);
+ return mockDataverse;
+ }
+ });
+ when(metadataProvider.findDataset(anyString(), any(DataverseName.class), anyString()))
.thenAnswer(new Answer<Dataset>() {
@Override
public Dataset answer(InvocationOnMock invocation) {
Object[] args = invocation.getArguments();
final Dataset mockDataset = mock(Dataset.class);
- when(mockDataset.getDataverseName()).thenReturn((DataverseName) args[0]);
- when(mockDataset.getDatasetName()).thenReturn((String) args[1]);
+ when(mockDataset.getDatabaseName()).thenReturn((String) args[0]);
+ when(mockDataset.getDataverseName()).thenReturn((DataverseName) args[1]);
+ when(mockDataset.getDatasetName()).thenReturn((String) args[2]);
+ return mockDataset;
+ }
+ });
+ when(metadataProvider.findDataset(anyString(), any(DataverseName.class), anyString(), anyBoolean()))
+ .thenAnswer(new Answer<Dataset>() {
+ @Override
+ public Dataset answer(InvocationOnMock invocation) {
+ Object[] args = invocation.getArguments();
+ final Dataset mockDataset = mock(Dataset.class);
+ when(mockDataset.getDatabaseName()).thenReturn((String) args[0]);
+ when(mockDataset.getDataverseName()).thenReturn((DataverseName) args[1]);
+ when(mockDataset.getDatasetName()).thenReturn((String) args[2]);
return mockDataset;
}
});
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/storage/IndexDropOperatorNodePushableTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/storage/IndexDropOperatorNodePushableTest.java
index a06f7b7..265de65 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/storage/IndexDropOperatorNodePushableTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/storage/IndexDropOperatorNodePushableTest.java
@@ -38,6 +38,7 @@
import org.apache.asterix.metadata.entities.Dataset;
import org.apache.asterix.metadata.entities.Index;
import org.apache.asterix.metadata.entities.InternalDatasetDetails;
+import org.apache.asterix.metadata.utils.MetadataConstants;
import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.asterix.metadata.utils.SplitsAndConstraintsUtil;
import org.apache.asterix.om.types.ARecordType;
@@ -154,7 +155,8 @@
MetadataProvider metadataProver = MetadataProvider.create(appCtx, null);
metadataProver.setMetadataTxnContext(mdTxn);
final DataverseName defaultDv = MetadataBuiltinEntities.DEFAULT_DATAVERSE.getDataverseName();
- final Dataset dataset = MetadataManager.INSTANCE.getDataset(mdTxn, null, defaultDv, datasetName);
+ final Dataset dataset = MetadataManager.INSTANCE.getDataset(mdTxn, MetadataConstants.DEFAULT_DATABASE,
+ defaultDv, datasetName);
MetadataManager.INSTANCE.commitTransaction(mdTxn);
FileSplit[] splits = SplitsAndConstraintsUtil.getIndexSplits(appCtx.getClusterStateManager(), dataset,
indexName, Arrays.asList("asterix_nc1"));
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/FunctionUtil.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/FunctionUtil.java
index 742cd82..56432ba 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/FunctionUtil.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/FunctionUtil.java
@@ -50,6 +50,7 @@
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Dataverse;
import org.apache.asterix.metadata.entities.Function;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.asterix.om.functions.BuiltinFunctionInfo;
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.types.BuiltinType;
@@ -118,6 +119,7 @@
if (dataverse == null) {
dataverse = metadataProvider.getDefaultDataverseName();
}
+ String database = MetadataUtil.resolveDatabase(null, dataverse);
if (searchUdfs && !isBuiltinFunctionDataverse(dataverse)) {
// attempt to resolve to a user-defined function
FunctionSignature fsWithDv =
@@ -151,7 +153,7 @@
if (fs.getDataverseName() != null) {
Dataverse dv;
try {
- dv = metadataProvider.findDataverse(dataverse);
+ dv = metadataProvider.findDataverse(database, dataverse);
} catch (AlgebricksException e) {
throw new CompilationException(ErrorCode.UNKNOWN_DATAVERSE, e, sourceLoc, dataverse);
}
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/SqlppQueryRewriter.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/SqlppQueryRewriter.java
index 38177d8..a48143b 100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/SqlppQueryRewriter.java
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/SqlppQueryRewriter.java
@@ -85,6 +85,7 @@
import org.apache.asterix.metadata.entities.Function;
import org.apache.asterix.metadata.entities.ViewDetails;
import org.apache.asterix.metadata.utils.DatasetUtil;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.asterix.metadata.utils.TypeUtil;
import org.apache.asterix.om.types.IAType;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -488,7 +489,9 @@
if (viewDecl == null) {
Dataset dataset;
try {
- dataset = metadataProvider.findDataset(viewName.getDataverseName(), viewName.getDatasetName(), true);
+ String database = MetadataUtil.resolveDatabase(null, viewName.getDataverseName());
+ dataset = metadataProvider.findDataset(database, viewName.getDataverseName(), viewName.getDatasetName(),
+ true);
} catch (AlgebricksException e) {
throw new CompilationException(ErrorCode.UNKNOWN_VIEW, e, sourceLoc, viewName);
}
@@ -499,13 +502,14 @@
viewDecl = ViewUtil.parseStoredView(viewName, viewDetails, parserFactory, context.getWarningCollector(),
sourceLoc);
DataverseName itemTypeDataverseName = dataset.getItemTypeDataverseName();
+ String itemTypeDatabase = MetadataUtil.resolveDatabase(null, itemTypeDataverseName);
String itemTypeName = dataset.getItemTypeName();
boolean isAnyType =
MetadataBuiltinEntities.ANY_OBJECT_DATATYPE.getDataverseName().equals(itemTypeDataverseName)
&& MetadataBuiltinEntities.ANY_OBJECT_DATATYPE.getDatatypeName().equals(itemTypeName);
if (!isAnyType) {
try {
- viewItemType = metadataProvider.findType(itemTypeDataverseName, itemTypeName);
+ viewItemType = metadataProvider.findType(itemTypeDatabase, itemTypeDataverseName, itemTypeName);
} catch (AlgebricksException e) {
throw new CompilationException(ErrorCode.UNKNOWN_TYPE,
TypeUtil.getFullyQualifiedDisplayName(itemTypeDataverseName, itemTypeName));
@@ -551,11 +555,14 @@
SourceLocation sourceLoc) throws CompilationException {
Dataverse defaultDataverse = metadataProvider.getDefaultDataverse();
Dataverse targetDataverse;
+ String database;
if (entityDataverseName == null || entityDataverseName.equals(defaultDataverse.getDataverseName())) {
targetDataverse = defaultDataverse;
+ database = MetadataUtil.resolveDatabase(null, targetDataverse.getDataverseName());
} else {
try {
- targetDataverse = metadataProvider.findDataverse(entityDataverseName);
+ database = MetadataUtil.resolveDatabase(null, entityDataverseName);
+ targetDataverse = metadataProvider.findDataverse(database, entityDataverseName);
} catch (AlgebricksException e) {
throw new CompilationException(ErrorCode.UNKNOWN_DATAVERSE, e, sourceLoc, entityDataverseName);
}
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/VariableCheckAndRewriteVisitor.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/VariableCheckAndRewriteVisitor.java
index 8fbad4f..bdf1115 100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/VariableCheckAndRewriteVisitor.java
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/VariableCheckAndRewriteVisitor.java
@@ -54,6 +54,7 @@
import org.apache.asterix.lang.sqlpp.visitor.base.AbstractSqlppExpressionScopingVisitor;
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Dataset;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.common.utils.Pair;
@@ -266,14 +267,16 @@
SourceLocation sourceLoc) throws CompilationException {
try {
Boolean viaSynonym = false;
- Triple<DataverseName, String, Boolean> dsName =
- metadataProvider.resolveDatasetNameUsingSynonyms(dataverseName, datasetName, includingViews);
+ String database = MetadataUtil.resolveDatabase(null, dataverseName);
+ Triple<DataverseName, String, Boolean> dsName = metadataProvider.resolveDatasetNameUsingSynonyms(database,
+ dataverseName, datasetName, includingViews);
if (dsName != null) {
dataverseName = dsName.first;
+ database = MetadataUtil.resolveDatabase(null, dataverseName);
datasetName = dsName.second;
viaSynonym = dsName.third;
}
- Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName, includingViews);
+ Dataset dataset = metadataProvider.findDataset(database, dataverseName, datasetName, includingViews);
return dataset == null ? null : new Pair<>(dataset, viaSynonym);
} catch (AlgebricksException e) {
throw new CompilationException(ErrorCode.COMPILATION_ERROR, e, sourceLoc, e.getMessage());
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/SqlppSynonymRewriteVisitor.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/SqlppSynonymRewriteVisitor.java
index b47eb6e..0696b5c 100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/SqlppSynonymRewriteVisitor.java
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/SqlppSynonymRewriteVisitor.java
@@ -27,6 +27,7 @@
import org.apache.asterix.lang.common.statement.LoadStatement;
import org.apache.asterix.lang.sqlpp.visitor.base.AbstractSqlppAstVisitor;
import org.apache.asterix.metadata.declared.MetadataProvider;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.common.utils.Triple;
import org.apache.hyracks.api.exceptions.SourceLocation;
@@ -43,7 +44,8 @@
@Override
public Void visit(LoadStatement loadStmt, MetadataProvider metadataProvider) throws CompilationException {
- Triple<DataverseName, String, Boolean> dsName = resolveDatasetNameUsingSynonyms(metadataProvider,
+ String database = MetadataUtil.resolveDatabase(null, loadStmt.getDataverseName());
+ Triple<DataverseName, String, Boolean> dsName = resolveDatasetNameUsingSynonyms(metadataProvider, database,
loadStmt.getDataverseName(), loadStmt.getDatasetName(), false, loadStmt.getSourceLocation());
if (dsName != null) {
loadStmt.setDataverseName(dsName.first);
@@ -54,7 +56,8 @@
@Override
public Void visit(InsertStatement insertStmt, MetadataProvider metadataProvider) throws CompilationException {
- Triple<DataverseName, String, Boolean> dsName = resolveDatasetNameUsingSynonyms(metadataProvider,
+ String database = MetadataUtil.resolveDatabase(null, insertStmt.getDataverseName());
+ Triple<DataverseName, String, Boolean> dsName = resolveDatasetNameUsingSynonyms(metadataProvider, database,
insertStmt.getDataverseName(), insertStmt.getDatasetName(), false, insertStmt.getSourceLocation());
if (dsName != null) {
insertStmt.setDataverseName(dsName.first);
@@ -65,7 +68,8 @@
@Override
public Void visit(DeleteStatement deleteStmt, MetadataProvider metadataProvider) throws CompilationException {
- Triple<DataverseName, String, Boolean> dsName = resolveDatasetNameUsingSynonyms(metadataProvider,
+ String database = MetadataUtil.resolveDatabase(null, deleteStmt.getDataverseName());
+ Triple<DataverseName, String, Boolean> dsName = resolveDatasetNameUsingSynonyms(metadataProvider, database,
deleteStmt.getDataverseName(), deleteStmt.getDatasetName(), false, deleteStmt.getSourceLocation());
if (dsName != null) {
deleteStmt.setDataverseName(dsName.first);
@@ -75,10 +79,11 @@
}
private Triple<DataverseName, String, Boolean> resolveDatasetNameUsingSynonyms(MetadataProvider metadataProvider,
- DataverseName dataverseName, String datasetName, boolean includingViews, SourceLocation sourceLoc)
- throws CompilationException {
+ String database, DataverseName dataverseName, String datasetName, boolean includingViews,
+ SourceLocation sourceLoc) throws CompilationException {
try {
- return metadataProvider.resolveDatasetNameUsingSynonyms(dataverseName, datasetName, includingViews);
+ return metadataProvider.resolveDatasetNameUsingSynonyms(database, dataverseName, datasetName,
+ includingViews);
} catch (AlgebricksException e) {
throw new CompilationException(ErrorCode.COMPILATION_ERROR, e, sourceLoc, e.getMessage());
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataManager.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataManager.java
index 7d91f73..f2d9883 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataManager.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataManager.java
@@ -23,6 +23,7 @@
import java.util.Collection;
import java.util.Collections;
import java.util.List;
+import java.util.Objects;
import java.util.concurrent.TimeUnit;
import org.apache.asterix.common.config.MetadataProperties;
@@ -182,6 +183,7 @@
@Override
public void dropDatabase(MetadataTransactionContext ctx, String databaseName) throws AlgebricksException {
try {
+ Objects.requireNonNull(databaseName);
metadataNode.dropDatabase(ctx.getTxnId(), databaseName);
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
@@ -203,6 +205,7 @@
public void dropDataverse(MetadataTransactionContext ctx, String database, DataverseName dataverseName)
throws AlgebricksException {
try {
+ Objects.requireNonNull(database);
metadataNode.dropDataverse(ctx.getTxnId(), database, dataverseName);
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
@@ -214,6 +217,7 @@
public boolean isDataverseNotEmpty(MetadataTransactionContext ctx, String database, DataverseName dataverseName)
throws AlgebricksException {
try {
+ Objects.requireNonNull(database);
return metadataNode.isDataverseNotEmpty(ctx.getTxnId(), database, dataverseName);
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
@@ -233,7 +237,7 @@
public Dataverse getDataverse(MetadataTransactionContext ctx, String database, DataverseName dataverseName)
throws AlgebricksException {
//TODO(DB): change cache to consider database
-
+ Objects.requireNonNull(database);
// First look in the context to see if this transaction created the
// requested dataverse itself (but the dataverse is still uncommitted).
Dataverse dataverse = ctx.getDataverse(dataverseName);
@@ -270,6 +274,7 @@
DataverseName dataverseName) throws AlgebricksException {
List<Dataset> dataverseDatasets;
try {
+ Objects.requireNonNull(database);
// Assuming that the transaction can read its own writes on the metadata node.
dataverseDatasets = metadataNode.getDataverseDatasets(ctx.getTxnId(), database, dataverseName);
} catch (RemoteException e) {
@@ -296,6 +301,7 @@
public void dropDataset(MetadataTransactionContext ctx, String database, DataverseName dataverseName,
String datasetName, boolean force) throws AlgebricksException {
try {
+ Objects.requireNonNull(database);
metadataNode.dropDataset(ctx.getTxnId(), database, dataverseName, datasetName, force);
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
@@ -307,7 +313,7 @@
@Override
public Dataset getDataset(MetadataTransactionContext ctx, String database, DataverseName dataverseName,
String datasetName) throws AlgebricksException {
-
+ Objects.requireNonNull(database);
// First look in the context to see if this transaction created the
// requested dataset itself (but the dataset is still uncommitted).
Dataset dataset = ctx.getDataset(dataverseName, datasetName);
@@ -343,6 +349,7 @@
@Override
public List<Index> getDatasetIndexes(MetadataTransactionContext ctx, String database, DataverseName dataverseName,
String datasetName) throws AlgebricksException {
+ Objects.requireNonNull(database);
Dataset dataset = getDataset(ctx, database, dataverseName, datasetName);
if (dataset == null) {
return Collections.emptyList();
@@ -372,6 +379,7 @@
DataverseName dataverse, String policyName) throws AlgebricksException {
CompactionPolicy compactionPolicy;
try {
+ Objects.requireNonNull(database);
compactionPolicy = metadataNode.getCompactionPolicy(ctx.getTxnId(), database, dataverse, policyName);
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
@@ -398,6 +406,7 @@
public void dropDatatype(MetadataTransactionContext ctx, String database, DataverseName dataverseName,
String datatypeName) throws AlgebricksException {
try {
+ Objects.requireNonNull(database);
metadataNode.dropDatatype(ctx.getTxnId(), database, dataverseName, datatypeName);
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
@@ -408,6 +417,7 @@
@Override
public Datatype getDatatype(MetadataTransactionContext ctx, String database, DataverseName dataverseName,
String datatypeName) throws AlgebricksException {
+ Objects.requireNonNull(database);
// First look in the context to see if this transaction created the
// requested datatype itself (but the datatype is still uncommitted).
Datatype datatype = ctx.getDatatype(dataverseName, datatypeName);
@@ -464,6 +474,7 @@
public void dropIndex(MetadataTransactionContext ctx, String database, DataverseName dataverseName,
String datasetName, String indexName) throws AlgebricksException {
try {
+ Objects.requireNonNull(database);
metadataNode.dropIndex(ctx.getTxnId(), database, dataverseName, datasetName, indexName);
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
@@ -474,7 +485,7 @@
@Override
public Index getIndex(MetadataTransactionContext ctx, String database, DataverseName dataverseName,
String datasetName, String indexName) throws AlgebricksException {
-
+ Objects.requireNonNull(database);
// First look in the context to see if this transaction created the
// requested index itself (but the index is still uncommitted).
Index index = ctx.getIndex(dataverseName, datasetName, indexName);
@@ -648,6 +659,7 @@
public List<Function> getDataverseFunctions(MetadataTransactionContext ctx, String database,
DataverseName dataverseName) throws AlgebricksException {
try {
+ Objects.requireNonNull(database);
return metadataNode.getDataverseFunctions(ctx.getTxnId(), database, dataverseName);
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
@@ -669,6 +681,7 @@
public void dropFullTextFilter(MetadataTransactionContext mdTxnCtx, String database, DataverseName dataverseName,
String filterName) throws AlgebricksException {
try {
+ Objects.requireNonNull(database);
metadataNode.dropFullTextFilter(mdTxnCtx.getTxnId(), database, dataverseName, filterName);
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
@@ -679,6 +692,7 @@
@Override
public FullTextFilterMetadataEntity getFullTextFilter(MetadataTransactionContext ctx, String database,
DataverseName dataverseName, String filterName) throws AlgebricksException {
+ Objects.requireNonNull(database);
// First look in the context to see if this transaction created the
// requested full-text filter itself (but the full-text filter is still uncommitted).
FullTextFilterMetadataEntity filter = ctx.getFullTextFilter(dataverseName, filterName);
@@ -737,6 +751,7 @@
@Override
public FullTextConfigMetadataEntity getFullTextConfig(MetadataTransactionContext ctx, String database,
DataverseName dataverseName, String configName) throws AlgebricksException {
+ Objects.requireNonNull(database);
// First look in the context to see if this transaction created the
// requested full-text config itself (but the full-text config is still uncommitted).
FullTextConfigMetadataEntity configMetadataEntity = ctx.getFullTextConfig(dataverseName, configName);
@@ -782,6 +797,7 @@
public void dropFullTextConfig(MetadataTransactionContext mdTxnCtx, String database, DataverseName dataverseName,
String configName) throws AlgebricksException {
try {
+ Objects.requireNonNull(database);
metadataNode.dropFullTextConfig(mdTxnCtx.getTxnId(), database, dataverseName, configName);
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
@@ -822,6 +838,7 @@
public void dropAdapter(MetadataTransactionContext ctx, String database, DataverseName dataverseName, String name)
throws AlgebricksException {
try {
+ Objects.requireNonNull(database);
metadataNode.dropAdapter(ctx.getTxnId(), database, dataverseName, name);
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
@@ -834,6 +851,7 @@
String name) throws AlgebricksException {
DatasourceAdapter adapter;
try {
+ Objects.requireNonNull(database);
adapter = metadataNode.getAdapter(ctx.getTxnId(), database, dataverseName, name);
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
@@ -845,6 +863,7 @@
public void dropLibrary(MetadataTransactionContext ctx, String database, DataverseName dataverseName,
String libraryName) throws AlgebricksException {
try {
+ Objects.requireNonNull(database);
metadataNode.dropLibrary(ctx.getTxnId(), database, dataverseName, libraryName);
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
@@ -859,6 +878,7 @@
try {
// Assuming that the transaction can read its own writes on the
// metadata node.
+ Objects.requireNonNull(database);
dataverseLibaries = metadataNode.getDataverseLibraries(ctx.getTxnId(), database, dataverseName);
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
@@ -883,6 +903,7 @@
String libraryName) throws AlgebricksException {
Library library;
try {
+ Objects.requireNonNull(database);
library = metadataNode.getLibrary(ctx.getTxnId(), database, dataverseName, libraryName);
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
@@ -895,6 +916,7 @@
String policyName) throws AlgebricksException {
FeedPolicyEntity feedPolicy;
try {
+ Objects.requireNonNull(database);
feedPolicy = metadataNode.getFeedPolicy(ctx.getTxnId(), database, dataverseName, policyName);
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
@@ -907,6 +929,7 @@
throws AlgebricksException {
Feed feed;
try {
+ Objects.requireNonNull(database);
feed = metadataNode.getFeed(ctx.getTxnId(), database, dataverseName, feedName);
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
@@ -919,6 +942,7 @@
throws AlgebricksException {
List<Feed> feeds;
try {
+ Objects.requireNonNull(database);
feeds = metadataNode.getFeeds(ctx.getTxnId(), database, dataverseName);
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
@@ -932,6 +956,7 @@
Feed feed;
List<FeedConnection> feedConnections;
try {
+ Objects.requireNonNull(database);
feed = metadataNode.getFeed(ctx.getTxnId(), database, dataverseName, feedName);
feedConnections = metadataNode.getFeedConnections(ctx.getTxnId(), database, dataverseName, feedName);
metadataNode.dropFeed(ctx.getTxnId(), database, dataverseName, feedName);
@@ -971,6 +996,7 @@
public void dropFeedConnection(MetadataTransactionContext ctx, String database, DataverseName dataverseName,
String feedName, String datasetName) throws AlgebricksException {
try {
+ Objects.requireNonNull(database);
metadataNode.dropFeedConnection(ctx.getTxnId(), database, dataverseName, feedName, datasetName);
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
@@ -982,6 +1008,7 @@
public FeedConnection getFeedConnection(MetadataTransactionContext ctx, String database,
DataverseName dataverseName, String feedName, String datasetName) throws AlgebricksException {
try {
+ Objects.requireNonNull(database);
return metadataNode.getFeedConnection(ctx.getTxnId(), database, dataverseName, feedName, datasetName);
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
@@ -992,6 +1019,7 @@
public List<FeedConnection> getFeedConections(MetadataTransactionContext ctx, String database,
DataverseName dataverseName, String feedName) throws AlgebricksException {
try {
+ Objects.requireNonNull(database);
return metadataNode.getFeedConnections(ctx.getTxnId(), database, dataverseName, feedName);
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
@@ -1003,6 +1031,7 @@
DataverseName dataverseName) throws AlgebricksException {
List<DatasourceAdapter> dataverseAdapters;
try {
+ Objects.requireNonNull(database);
dataverseAdapters = metadataNode.getDataverseAdapters(mdTxnCtx.getTxnId(), database, dataverseName);
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
@@ -1015,6 +1044,7 @@
String policyName) throws AlgebricksException {
FeedPolicyEntity feedPolicy;
try {
+ Objects.requireNonNull(database);
feedPolicy = metadataNode.getFeedPolicy(mdTxnCtx.getTxnId(), database, dataverseName, policyName);
metadataNode.dropFeedPolicy(mdTxnCtx.getTxnId(), database, dataverseName, policyName);
} catch (RemoteException e) {
@@ -1028,6 +1058,7 @@
DataverseName dataverseName) throws AlgebricksException {
List<FeedPolicyEntity> dataverseFeedPolicies;
try {
+ Objects.requireNonNull(database);
dataverseFeedPolicies = metadataNode.getDataverseFeedPolicies(mdTxnCtx.getTxnId(), database, dataverseName);
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
@@ -1071,6 +1102,7 @@
String datasetName, Integer fileNumber) throws AlgebricksException {
ExternalFile file;
try {
+ Objects.requireNonNull(database);
file = metadataNode.getExternalFile(ctx.getTxnId(), database, dataverseName, datasetName, fileNumber);
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
@@ -1091,6 +1123,7 @@
public void dropSynonym(MetadataTransactionContext ctx, String database, DataverseName dataverseName,
String synonymName) throws AlgebricksException {
try {
+ Objects.requireNonNull(database);
metadataNode.dropSynonym(ctx.getTxnId(), database, dataverseName, synonymName);
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
@@ -1101,6 +1134,7 @@
public Synonym getSynonym(MetadataTransactionContext ctx, String database, DataverseName dataverseName,
String synonymName) throws AlgebricksException {
try {
+ Objects.requireNonNull(database);
return metadataNode.getSynonym(ctx.getTxnId(), database, dataverseName, synonymName);
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
@@ -1111,6 +1145,7 @@
public List<Synonym> getDataverseSynonyms(MetadataTransactionContext ctx, String database,
DataverseName dataverseName) throws AlgebricksException {
try {
+ Objects.requireNonNull(database);
return metadataNode.getDataverseSynonyms(ctx.getTxnId(), database, dataverseName);
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/DataSourcePartitioningProvider.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/DataSourcePartitioningProvider.java
index 3f3482a..25af177 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/DataSourcePartitioningProvider.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/DataSourcePartitioningProvider.java
@@ -24,6 +24,7 @@
import org.apache.asterix.common.cluster.PartitioningProperties;
import org.apache.asterix.metadata.entities.Dataset;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.common.utils.ListSet;
import org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext;
@@ -64,8 +65,9 @@
break;
case DataSource.Type.FEED:
String dsName = ((FeedDataSource) ds).getTargetDataset();
- Dataset feedDs = ((MetadataProvider) ctx.getMetadataProvider())
- .findDataset(ds.getId().getDataverseName(), dsName);
+ String database = MetadataUtil.resolveDatabase(null, ds.getId().getDataverseName());
+ Dataset feedDs = ((MetadataProvider) ctx.getMetadataProvider()).findDataset(database,
+ ds.getId().getDataverseName(), dsName);
PartitioningProperties partitioningProperties =
((MetadataProvider) ctx.getMetadataProvider()).getPartitioningProperties(feedDs);
pp = getFeedDatasetPartitioningProperty(ds, domain, scanVariables,
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/DatasetDataSource.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/DatasetDataSource.java
index 39a70f8..9880a24 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/DatasetDataSource.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/DatasetDataSource.java
@@ -126,7 +126,7 @@
IVariableTypeEnvironment typeEnv, JobGenContext context, JobSpecification jobSpec, Object implConfig,
IProjectionFiltrationInfo projectionFiltrationInfo) throws AlgebricksException {
String itemTypeName = dataset.getItemTypeName();
- String itemTypeDatabase = null;
+ String itemTypeDatabase = MetadataUtil.resolveDatabase(null, dataset.getItemTypeDataverseName());
IAType itemType = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(),
itemTypeDatabase, dataset.getItemTypeDataverseName(), itemTypeName).getDatatype();
switch (dataset.getDatasetType()) {
@@ -158,7 +158,8 @@
ARecordType datasetType = (ARecordType) itemType;
ARecordType metaItemType = null;
if (dataset.hasMetaPart()) {
- String metaItemTypeDatabase = null;
+ String metaItemTypeDatabase =
+ MetadataUtil.resolveDatabase(null, dataset.getMetaItemTypeDataverseName());
metaItemType = (ARecordType) MetadataManager.INSTANCE
.getDatatype(metadataProvider.getMetadataTxnContext(), metaItemTypeDatabase,
dataset.getMetaItemTypeDataverseName(), dataset.getMetaItemTypeName())
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataManagerUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataManagerUtil.java
index 152ace9..47136b6 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataManagerUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataManagerUtil.java
@@ -44,6 +44,7 @@
import org.apache.asterix.metadata.entities.NodeGroup;
import org.apache.asterix.metadata.entities.Synonym;
import org.apache.asterix.metadata.utils.MetadataConstants;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.asterix.om.types.ARecordType;
import org.apache.asterix.om.types.IAType;
import org.apache.asterix.om.utils.ProjectionFiltrationTypeUtil;
@@ -59,9 +60,9 @@
throw new AssertionError("This util class should not be initialized.");
}
- public static IAType findType(MetadataTransactionContext mdTxnCtx, DataverseName dataverseName, String typeName)
- throws AlgebricksException {
- Datatype type = findTypeEntity(mdTxnCtx, null, dataverseName, typeName);
+ public static IAType findType(MetadataTransactionContext mdTxnCtx, String database, DataverseName dataverseName,
+ String typeName) throws AlgebricksException {
+ Datatype type = findTypeEntity(mdTxnCtx, database, dataverseName, typeName);
return type != null ? type.getDatatype() : null;
}
@@ -94,7 +95,7 @@
public static Datatype findTypeEntity(MetadataTransactionContext mdTxnCtx, String database,
DataverseName dataverseName, String typeName) throws AlgebricksException {
- if (dataverseName == null || typeName == null) {
+ if (database == null || dataverseName == null || typeName == null) {
return null;
}
Datatype type = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, database, dataverseName, typeName);
@@ -104,16 +105,20 @@
return type;
}
- public static ARecordType findOutputRecordType(MetadataTransactionContext mdTxnCtx, DataverseName dataverseName,
- String outputRecordType) throws AlgebricksException {
+ public static ARecordType findOutputRecordType(MetadataTransactionContext mdTxnCtx, String database,
+ DataverseName dataverseName, String outputRecordType) throws AlgebricksException {
if (outputRecordType == null) {
return null;
}
+ if (database == null) {
+ throw new CompilationException(ErrorCode.COMPILATION_ERROR,
+ "Cannot declare output-record-type with no database");
+ }
if (dataverseName == null) {
throw new CompilationException(ErrorCode.COMPILATION_ERROR,
"Cannot declare output-record-type with no " + dataverse());
}
- IAType type = findType(mdTxnCtx, dataverseName, outputRecordType);
+ IAType type = findType(mdTxnCtx, database, dataverseName, outputRecordType);
if (!(type instanceof ARecordType)) {
throw new CompilationException(ErrorCode.COMPILATION_ERROR,
"Type " + outputRecordType + " is not a record type!");
@@ -125,8 +130,8 @@
DataverseName dataverseName, String adapterName) throws AlgebricksException {
DatasourceAdapter adapter;
// search in default namespace (built-in adapter)
- adapter = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, null, MetadataConstants.METADATA_DATAVERSE_NAME,
- adapterName);
+ adapter = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, MetadataConstants.SYSTEM_DATABASE,
+ MetadataConstants.METADATA_DATAVERSE_NAME, adapterName);
// search in dataverse (user-defined adapter)
if (adapter == null) {
@@ -144,14 +149,14 @@
return dataset;
}
- public static Dataset findDataset(MetadataTransactionContext mdTxnCtx, DataverseName dataverseName,
+ public static Dataset findDataset(MetadataTransactionContext mdTxnCtx, String database, DataverseName dataverseName,
String datasetName) throws AlgebricksException {
- return findDataset(mdTxnCtx, null, dataverseName, datasetName, false);
+ return findDataset(mdTxnCtx, database, dataverseName, datasetName, false);
}
- public static Dataset findExistingDataset(MetadataTransactionContext mdTxnCtx, DataverseName dataverseName,
- String datasetName) throws AlgebricksException {
- Dataset dataset = findDataset(mdTxnCtx, dataverseName, datasetName);
+ public static Dataset findExistingDataset(MetadataTransactionContext mdTxnCtx, String database,
+ DataverseName dataverseName, String datasetName) throws AlgebricksException {
+ Dataset dataset = findDataset(mdTxnCtx, database, dataverseName, datasetName);
if (dataset == null) {
throw new AsterixException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, datasetName, dataverseName);
}
@@ -223,7 +228,8 @@
public static DataSource lookupSourceInMetadata(IClusterStateManager clusterStateManager,
MetadataTransactionContext mdTxnCtx, DataSourceId id) throws AlgebricksException {
- Dataset dataset = findDataset(mdTxnCtx, id.getDataverseName(), id.getDatasourceName());
+ Dataset dataset = findDataset(mdTxnCtx, MetadataUtil.resolveDatabase(null, id.getDataverseName()),
+ id.getDataverseName(), id.getDatasourceName());
if (dataset == null) {
throw new AsterixException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, id.getDatasourceName(),
id.getDataverseName());
@@ -241,8 +247,11 @@
id.getDataverseName());
}
- IAType itemType = findType(mdTxnCtx, dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
- IAType metaItemType = findType(mdTxnCtx, dataset.getMetaItemTypeDataverseName(), dataset.getMetaItemTypeName());
+ IAType itemType = findType(mdTxnCtx, MetadataUtil.resolveDatabase(null, dataset.getItemTypeDataverseName()),
+ dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
+ IAType metaItemType =
+ findType(mdTxnCtx, MetadataUtil.resolveDatabase(null, dataset.getMetaItemTypeDataverseName()),
+ dataset.getMetaItemTypeDataverseName(), dataset.getMetaItemTypeName());
itemType = findTypeForDatasetWithoutType(itemType, metaItemType, dataset);
INodeDomain domain = findNodeDomain(clusterStateManager, mdTxnCtx, dataset.getNodeGroupName());
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java
index 42162b3..5e82240 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java
@@ -340,23 +340,33 @@
* Retrieve the Output RecordType, as defined by "set output-record-type".
*/
public ARecordType findOutputRecordType() throws AlgebricksException {
- return MetadataManagerUtil.findOutputRecordType(mdTxnCtx, getDefaultDataverseName(),
+ String database = defaultDataverse == null ? null : defaultDataverse.getDatabaseName();
+ DataverseName dataverseName = defaultDataverse == null ? null : defaultDataverse.getDataverseName();
+ return MetadataManagerUtil.findOutputRecordType(mdTxnCtx, database, dataverseName,
getProperty("output-record-type"));
}
- public Dataset findDataset(DataverseName dataverseName, String datasetName) throws AlgebricksException {
- return findDataset(dataverseName, datasetName, false);
+ public Dataset findDataset(String database, DataverseName dataverseName, String datasetName)
+ throws AlgebricksException {
+ return findDataset(database, dataverseName, datasetName, false);
}
- public Dataset findDataset(DataverseName dataverseName, String datasetName, boolean includingViews)
+ public Dataset findDataset(String database, DataverseName dataverseName, String datasetName, boolean includingViews)
throws AlgebricksException {
- DataverseName dvName = getActiveDataverseName(dataverseName);
- if (dvName == null) {
+ String dbName = database;
+ DataverseName dvName = dataverseName;
+ if (dbName == null && dvName == null) {
+ if (defaultDataverse == null) {
+ return null;
+ }
+ dbName = defaultDataverse.getDatabaseName();
+ dvName = defaultDataverse.getDataverseName();
+ } else if (dbName == null || dvName == null) {
return null;
}
appCtx.getMetadataLockManager().acquireDataverseReadLock(locks, dvName);
appCtx.getMetadataLockManager().acquireDatasetReadLock(locks, dvName, datasetName);
- return MetadataManagerUtil.findDataset(mdTxnCtx, null, dvName, datasetName, includingViews);
+ return MetadataManagerUtil.findDataset(mdTxnCtx, dbName, dvName, datasetName, includingViews);
}
public INodeDomain findNodeDomain(String nodeGroupName) throws AlgebricksException {
@@ -367,8 +377,9 @@
return MetadataManagerUtil.findNodes(mdTxnCtx, nodeGroupName);
}
- public Datatype findTypeEntity(DataverseName dataverseName, String typeName) throws AlgebricksException {
- return MetadataManagerUtil.findTypeEntity(mdTxnCtx, null, dataverseName, typeName);
+ public Datatype findTypeEntity(String database, DataverseName dataverseName, String typeName)
+ throws AlgebricksException {
+ return MetadataManagerUtil.findTypeEntity(mdTxnCtx, database, dataverseName, typeName);
}
public IAType findTypeForDatasetWithoutType(IAType recordType, IAType metaRecordType, Dataset dataset)
@@ -376,29 +387,32 @@
return MetadataManagerUtil.findTypeForDatasetWithoutType(recordType, metaRecordType, dataset);
}
- public IAType findType(DataverseName dataverseName, String typeName) throws AlgebricksException {
- return MetadataManagerUtil.findType(mdTxnCtx, dataverseName, typeName);
+ public IAType findType(String database, DataverseName dataverseName, String typeName) throws AlgebricksException {
+ return MetadataManagerUtil.findType(mdTxnCtx, database, dataverseName, typeName);
}
public IAType findType(Dataset dataset) throws AlgebricksException {
- return findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
+ String typeDatabase = MetadataUtil.resolveDatabase(null, dataset.getItemTypeDataverseName());
+ return findType(typeDatabase, dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
}
public IAType findMetaType(Dataset dataset) throws AlgebricksException {
- return findType(dataset.getMetaItemTypeDataverseName(), dataset.getMetaItemTypeName());
+ String metaTypeDatabase = MetadataUtil.resolveDatabase(null, dataset.getMetaItemTypeDataverseName());
+ return findType(metaTypeDatabase, dataset.getMetaItemTypeDataverseName(), dataset.getMetaItemTypeName());
}
- public Feed findFeed(DataverseName dataverseName, String feedName) throws AlgebricksException {
- return MetadataManagerUtil.findFeed(mdTxnCtx, null, dataverseName, feedName);
+ public Feed findFeed(String database, DataverseName dataverseName, String feedName) throws AlgebricksException {
+ return MetadataManagerUtil.findFeed(mdTxnCtx, database, dataverseName, feedName);
}
- public FeedConnection findFeedConnection(DataverseName dataverseName, String feedName, String datasetName)
+ public FeedConnection findFeedConnection(String database, DataverseName dataverseName, String feedName,
+ String datasetName) throws AlgebricksException {
+ return MetadataManagerUtil.findFeedConnection(mdTxnCtx, database, dataverseName, feedName, datasetName);
+ }
+
+ public FeedPolicyEntity findFeedPolicy(String database, DataverseName dataverseName, String policyName)
throws AlgebricksException {
- return MetadataManagerUtil.findFeedConnection(mdTxnCtx, null, dataverseName, feedName, datasetName);
- }
-
- public FeedPolicyEntity findFeedPolicy(DataverseName dataverseName, String policyName) throws AlgebricksException {
- return MetadataManagerUtil.findFeedPolicy(mdTxnCtx, null, dataverseName, policyName);
+ return MetadataManagerUtil.findFeedPolicy(mdTxnCtx, database, dataverseName, policyName);
}
@Override
@@ -417,60 +431,72 @@
Dataset dataset = ((DatasetDataSource) source).getDataset();
// index could be a primary index or secondary index
DataverseName dataverseName = dataset.getDataverseName();
+ String database = dataset.getDatabaseName();
String datasetName = dataset.getDatasetName();
- Index index = getIndex(dataverseName, datasetName, indexId);
+ Index index = getIndex(database, dataverseName, datasetName, indexId);
return index != null ? new DataSourceIndex(index, dataverseName, datasetName, this) : null;
}
- public Index getIndex(DataverseName dataverseName, String datasetName, String indexName)
+ public Index getIndex(String database, DataverseName dataverseName, String datasetName, String indexName)
throws AlgebricksException {
- return MetadataManager.INSTANCE.getIndex(mdTxnCtx, null, dataverseName, datasetName, indexName);
+ return MetadataManager.INSTANCE.getIndex(mdTxnCtx, database, dataverseName, datasetName, indexName);
}
- public List<Index> getDatasetIndexes(DataverseName dataverseName, String datasetName) throws AlgebricksException {
- return MetadataManagerUtil.getDatasetIndexes(mdTxnCtx, null, dataverseName, datasetName);
+ public List<Index> getDatasetIndexes(String database, DataverseName dataverseName, String datasetName)
+ throws AlgebricksException {
+ return MetadataManagerUtil.getDatasetIndexes(mdTxnCtx, database, dataverseName, datasetName);
}
public Index findSampleIndex(DataverseName dataverseName, String datasetName) throws AlgebricksException {
Pair<String, String> sampleIndexNames = IndexUtil.getSampleIndexNames(datasetName);
- Index sampleIndex = getIndex(dataverseName, datasetName, sampleIndexNames.first);
+ String database = MetadataUtil.resolveDatabase(null, dataverseName);
+ Index sampleIndex = getIndex(database, dataverseName, datasetName, sampleIndexNames.first);
if (sampleIndex != null && sampleIndex.getPendingOp() == MetadataUtil.PENDING_NO_OP) {
return sampleIndex;
}
- sampleIndex = getIndex(dataverseName, datasetName, sampleIndexNames.second);
+ sampleIndex = getIndex(database, dataverseName, datasetName, sampleIndexNames.second);
return sampleIndex != null && sampleIndex.getPendingOp() == MetadataUtil.PENDING_NO_OP ? sampleIndex : null;
}
- public Triple<DataverseName, String, Boolean> resolveDatasetNameUsingSynonyms(DataverseName dataverseName,
- String datasetName, boolean includingViews) throws AlgebricksException {
- DataverseName dvName = getActiveDataverseName(dataverseName);
- if (dvName == null) {
+ public Triple<DataverseName, String, Boolean> resolveDatasetNameUsingSynonyms(String database,
+ DataverseName dataverseName, String datasetName, boolean includingViews) throws AlgebricksException {
+ String dbName = database;
+ DataverseName dvName = dataverseName;
+ if (dbName == null && dvName == null) {
+ if (defaultDataverse == null) {
+ return null;
+ }
+ dbName = defaultDataverse.getDatabaseName();
+ dvName = defaultDataverse.getDataverseName();
+ } else if (dbName == null || dvName == null) {
return null;
}
Synonym synonym = null;
- while (MetadataManagerUtil.findDataset(mdTxnCtx, null, dvName, datasetName, includingViews) == null) {
- synonym = findSynonym(dvName, datasetName);
+ while (MetadataManagerUtil.findDataset(mdTxnCtx, dbName, dvName, datasetName, includingViews) == null) {
+ synonym = findSynonym(dbName, dvName, datasetName);
if (synonym == null) {
return null;
}
+ //TODO(DB): object database
dvName = synonym.getObjectDataverseName();
datasetName = synonym.getObjectName();
}
return new Triple<>(dvName, datasetName, synonym != null);
}
- public Synonym findSynonym(DataverseName dataverseName, String synonymName) throws AlgebricksException {
- return MetadataManagerUtil.findSynonym(mdTxnCtx, null, dataverseName, synonymName);
+ public Synonym findSynonym(String database, DataverseName dataverseName, String synonymName)
+ throws AlgebricksException {
+ return MetadataManagerUtil.findSynonym(mdTxnCtx, database, dataverseName, synonymName);
}
- public FullTextConfigMetadataEntity findFullTextConfig(DataverseName dataverseName, String ftConfigName)
- throws AlgebricksException {
- return MetadataManagerUtil.findFullTextConfigDescriptor(mdTxnCtx, null, dataverseName, ftConfigName);
+ public FullTextConfigMetadataEntity findFullTextConfig(String database, DataverseName dataverseName,
+ String ftConfigName) throws AlgebricksException {
+ return MetadataManagerUtil.findFullTextConfigDescriptor(mdTxnCtx, database, dataverseName, ftConfigName);
}
- public FullTextFilterMetadataEntity findFullTextFilter(DataverseName dataverseName, String ftFilterName)
- throws AlgebricksException {
- return MetadataManagerUtil.findFullTextFilterDescriptor(mdTxnCtx, null, dataverseName, ftFilterName);
+ public FullTextFilterMetadataEntity findFullTextFilter(String database, DataverseName dataverseName,
+ String ftFilterName) throws AlgebricksException {
+ return MetadataManagerUtil.findFullTextFilterDescriptor(mdTxnCtx, database, dataverseName, ftFilterName);
}
@Override
@@ -479,6 +505,7 @@
}
public Function lookupUserDefinedFunction(FunctionSignature signature) throws AlgebricksException {
+ //TODO(DB):
if (signature.getDataverseName() == null) {
return null;
}
@@ -508,8 +535,8 @@
}
}
- public Dataverse findDataverse(DataverseName dataverseName) throws AlgebricksException {
- return MetadataManager.INSTANCE.getDataverse(mdTxnCtx, null, dataverseName);
+ public Dataverse findDataverse(String database, DataverseName dataverseName) throws AlgebricksException {
+ return MetadataManager.INSTANCE.getDataverse(mdTxnCtx, database, dataverseName);
}
public Triple<IOperatorDescriptor, AlgebricksPartitionConstraint, ITypedAdapterFactory> getFeedIntakeRuntime(
@@ -660,7 +687,9 @@
byte[] successValueForIndexOnlyPlan = null;
byte[] failValueForIndexOnlyPlan = null;
if (isIndexOnlyPlan) {
- ARecordType recType = (ARecordType) findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
+ String itemTypeDatabase = MetadataUtil.resolveDatabase(null, dataset.getItemTypeDataverseName());
+ ARecordType recType = (ARecordType) findType(itemTypeDatabase, dataset.getItemTypeDataverseName(),
+ dataset.getItemTypeName());
List<List<String>> secondaryKeyFields = secondaryIndexDetails.getKeyFieldNames();
List<IAType> secondaryKeyTypes = secondaryIndexDetails.getKeyFieldTypes();
Pair<IAType, Boolean> keyTypePair = Index.getNonNullableOpenFieldType(secondaryIndex,
@@ -762,7 +791,8 @@
JobSpecification spec) throws AlgebricksException {
DataverseName dataverseName = dataSource.getId().getDataverseName();
String datasetName = dataSource.getId().getDatasourceName();
- Dataset dataset = findDataset(dataverseName, datasetName);
+ String database = MetadataUtil.resolveDatabase(null, dataverseName);
+ Dataset dataset = findDataset(database, dataverseName, datasetName);
if (dataset == null) {
throw new AsterixException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, datasetName, dataverseName);
}
@@ -852,6 +882,7 @@
String indexName = dataSourceIndex.getId();
DataverseName dataverseName = dataSourceIndex.getDataSource().getId().getDataverseName();
String datasetName = dataSourceIndex.getDataSource().getId().getDatasourceName();
+ String database = MetadataUtil.resolveDatabase(null, dataverseName);
IOperatorSchema inputSchema;
if (inputSchemas.length > 0) {
@@ -860,7 +891,7 @@
throw new AlgebricksException("TokenizeOperator can not operate without any input variable.");
}
- Dataset dataset = MetadataManagerUtil.findExistingDataset(mdTxnCtx, dataverseName, datasetName);
+ Dataset dataset = MetadataManagerUtil.findExistingDataset(mdTxnCtx, database, dataverseName, datasetName);
Index secondaryIndex = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataset.getDatabaseName(),
dataset.getDataverseName(), dataset.getDatasetName(), indexName);
// TokenizeOperator only supports a keyword or n-gram index.
@@ -869,8 +900,8 @@
case SINGLE_PARTITION_NGRAM_INVIX:
case LENGTH_PARTITIONED_WORD_INVIX:
case LENGTH_PARTITIONED_NGRAM_INVIX:
- return getBinaryTokenizerRuntime(dataverseName, datasetName, indexName, inputSchema, propagatedSchema,
- primaryKeys, secondaryKeys, recordDesc, spec, secondaryIndex.getIndexType());
+ return getBinaryTokenizerRuntime(database, dataverseName, datasetName, indexName, inputSchema,
+ propagatedSchema, primaryKeys, secondaryKeys, recordDesc, spec, secondaryIndex.getIndexType());
default:
throw new AlgebricksException("Currently, we do not support TokenizeOperator for the index type: "
+ secondaryIndex.getIndexType());
@@ -932,16 +963,16 @@
.getFileSplits();
}
- public DatasourceAdapter getAdapter(MetadataTransactionContext mdTxnCtx, DataverseName dataverseName,
- String adapterName) throws AlgebricksException {
+ public DatasourceAdapter getAdapter(MetadataTransactionContext mdTxnCtx, String database,
+ DataverseName dataverseName, String adapterName) throws AlgebricksException {
DatasourceAdapter adapter;
// search in default namespace (built-in adapter)
- adapter = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, null, MetadataConstants.METADATA_DATAVERSE_NAME,
- adapterName);
+ adapter = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, MetadataConstants.SYSTEM_DATABASE,
+ MetadataConstants.METADATA_DATAVERSE_NAME, adapterName);
// search in dataverse (user-defined adapter)
if (adapter == null) {
- adapter = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, null, dataverseName, adapterName);
+ adapter = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, database, dataverseName, adapterName);
}
return adapter;
}
@@ -997,8 +1028,9 @@
List<LogicalVariable> additionalNonFilteringFields) throws AlgebricksException {
String datasetName = dataSource.getId().getDatasourceName();
- Dataset dataset =
- MetadataManagerUtil.findExistingDataset(mdTxnCtx, dataSource.getId().getDataverseName(), datasetName);
+ String database = MetadataUtil.resolveDatabase(null, dataSource.getId().getDataverseName());
+ Dataset dataset = MetadataManagerUtil.findExistingDataset(mdTxnCtx, database,
+ dataSource.getId().getDataverseName(), datasetName);
int numKeys = keys.size();
int numFilterFields = DatasetUtil.getFilterField(dataset) == null ? 0 : 1;
// Move key fields to front.
@@ -1114,9 +1146,10 @@
throws AlgebricksException {
String indexName = dataSourceIndex.getId();
DataverseName dataverseName = dataSourceIndex.getDataSource().getId().getDataverseName();
+ String database = MetadataUtil.resolveDatabase(null, dataverseName);
String datasetName = dataSourceIndex.getDataSource().getId().getDatasourceName();
- Dataset dataset = MetadataManagerUtil.findExistingDataset(mdTxnCtx, dataverseName, datasetName);
+ Dataset dataset = MetadataManagerUtil.findExistingDataset(mdTxnCtx, database, dataverseName, datasetName);
Index secondaryIndex = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataset.getDatabaseName(),
dataset.getDataverseName(), dataset.getDatasetName(), indexName);
@@ -1142,41 +1175,43 @@
switch (secondaryIndex.getIndexType()) {
case BTREE:
- return getBTreeModificationRuntime(dataverseName, datasetName, indexName, propagatedSchema, primaryKeys,
- secondaryKeys, additionalNonKeyFields, filterFactory, prevFilterFactory, inputRecordDesc,
- context, spec, indexOp, bulkload, operationVar, prevSecondaryKeys, prevAdditionalFilteringKeys);
+ return getBTreeModificationRuntime(database, dataverseName, datasetName, indexName, propagatedSchema,
+ primaryKeys, secondaryKeys, additionalNonKeyFields, filterFactory, prevFilterFactory,
+ inputRecordDesc, context, spec, indexOp, bulkload, operationVar, prevSecondaryKeys,
+ prevAdditionalFilteringKeys);
case ARRAY:
if (bulkload) {
// In the case of bulk-load, we do not handle any nested plans. We perform the exact same behavior
// as a normal B-Tree bulk load.
- return getBTreeModificationRuntime(dataverseName, datasetName, indexName, propagatedSchema,
- primaryKeys, secondaryKeys, additionalNonKeyFields, filterFactory, prevFilterFactory,
- inputRecordDesc, context, spec, indexOp, bulkload, operationVar, prevSecondaryKeys,
- prevAdditionalFilteringKeys);
+ return getBTreeModificationRuntime(database, dataverseName, datasetName, indexName,
+ propagatedSchema, primaryKeys, secondaryKeys, additionalNonKeyFields, filterFactory,
+ prevFilterFactory, inputRecordDesc, context, spec, indexOp, bulkload, operationVar,
+ prevSecondaryKeys, prevAdditionalFilteringKeys);
} else {
- return getArrayIndexModificationRuntime(dataverseName, datasetName, indexName, propagatedSchema,
- primaryKeys, additionalNonKeyFields, inputRecordDesc, spec, indexOp, operationVar,
- secondaryKeysPipelines);
+ return getArrayIndexModificationRuntime(database, dataverseName, datasetName, indexName,
+ propagatedSchema, primaryKeys, additionalNonKeyFields, inputRecordDesc, spec, indexOp,
+ operationVar, secondaryKeysPipelines);
}
case RTREE:
- return getRTreeModificationRuntime(dataverseName, datasetName, indexName, propagatedSchema, primaryKeys,
- secondaryKeys, additionalNonKeyFields, filterFactory, prevFilterFactory, inputRecordDesc,
- context, spec, indexOp, bulkload, operationVar, prevSecondaryKeys, prevAdditionalFilteringKeys);
+ return getRTreeModificationRuntime(database, dataverseName, datasetName, indexName, propagatedSchema,
+ primaryKeys, secondaryKeys, additionalNonKeyFields, filterFactory, prevFilterFactory,
+ inputRecordDesc, context, spec, indexOp, bulkload, operationVar, prevSecondaryKeys,
+ prevAdditionalFilteringKeys);
case SINGLE_PARTITION_WORD_INVIX:
case SINGLE_PARTITION_NGRAM_INVIX:
case LENGTH_PARTITIONED_WORD_INVIX:
case LENGTH_PARTITIONED_NGRAM_INVIX:
- return getInvertedIndexModificationRuntime(dataverseName, datasetName, indexName, propagatedSchema,
- primaryKeys, secondaryKeys, additionalNonKeyFields, filterFactory, prevFilterFactory,
- inputRecordDesc, context, spec, indexOp, secondaryIndex.getIndexType(), bulkload, operationVar,
- prevSecondaryKeys, prevAdditionalFilteringKeys);
+ return getInvertedIndexModificationRuntime(database, dataverseName, datasetName, indexName,
+ propagatedSchema, primaryKeys, secondaryKeys, additionalNonKeyFields, filterFactory,
+ prevFilterFactory, inputRecordDesc, context, spec, indexOp, secondaryIndex.getIndexType(),
+ bulkload, operationVar, prevSecondaryKeys, prevAdditionalFilteringKeys);
default:
throw new AlgebricksException(
indexOp.name() + " not implemented for index type: " + secondaryIndex.getIndexType());
}
}
- private Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getBTreeModificationRuntime(
+ private Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getBTreeModificationRuntime(String database,
DataverseName dataverseName, String datasetName, String indexName, IOperatorSchema propagatedSchema,
List<LogicalVariable> primaryKeys, List<LogicalVariable> secondaryKeys,
List<LogicalVariable> additionalNonKeyFields, AsterixTupleFilterFactory filterFactory,
@@ -1184,7 +1219,7 @@
JobSpecification spec, IndexOperation indexOp, boolean bulkload, LogicalVariable operationVar,
List<LogicalVariable> prevSecondaryKeys, List<LogicalVariable> prevAdditionalFilteringKeys)
throws AlgebricksException {
- Dataset dataset = MetadataManagerUtil.findExistingDataset(mdTxnCtx, dataverseName, datasetName);
+ Dataset dataset = MetadataManagerUtil.findExistingDataset(mdTxnCtx, database, dataverseName, datasetName);
int numKeys = primaryKeys.size() + secondaryKeys.size();
int numFilterFields = DatasetUtil.getFilterField(dataset) == null ? 0 : 1;
@@ -1273,14 +1308,14 @@
}
}
- private Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getArrayIndexModificationRuntime(
+ private Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getArrayIndexModificationRuntime(String database,
DataverseName dataverseName, String datasetName, String indexName, IOperatorSchema propagatedSchema,
List<LogicalVariable> primaryKeys, List<LogicalVariable> additionalNonKeyFields,
RecordDescriptor inputRecordDesc, JobSpecification spec, IndexOperation indexOp,
LogicalVariable operationVar, List<List<AlgebricksPipeline>> secondaryKeysPipelines)
throws AlgebricksException {
- Dataset dataset = MetadataManagerUtil.findExistingDataset(mdTxnCtx, dataverseName, datasetName);
+ Dataset dataset = MetadataManagerUtil.findExistingDataset(mdTxnCtx, database, dataverseName, datasetName);
int numPrimaryKeys = primaryKeys.size();
int numFilterFields = DatasetUtil.getFilterField(dataset) == null ? 0 : 1;
@@ -1337,7 +1372,7 @@
}
}
- private Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getRTreeModificationRuntime(
+ private Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getRTreeModificationRuntime(String database,
DataverseName dataverseName, String datasetName, String indexName, IOperatorSchema propagatedSchema,
List<LogicalVariable> primaryKeys, List<LogicalVariable> secondaryKeys,
List<LogicalVariable> additionalNonKeyFields, AsterixTupleFilterFactory filterFactory,
@@ -1345,9 +1380,9 @@
JobSpecification spec, IndexOperation indexOp, boolean bulkload, LogicalVariable operationVar,
List<LogicalVariable> prevSecondaryKeys, List<LogicalVariable> prevAdditionalFilteringKeys)
throws AlgebricksException {
- Dataset dataset = MetadataManagerUtil.findExistingDataset(mdTxnCtx, dataverseName, datasetName);
+ Dataset dataset = MetadataManagerUtil.findExistingDataset(mdTxnCtx, database, dataverseName, datasetName);
String itemTypeName = dataset.getItemTypeName();
- String itemTypeDatabase = null;
+ String itemTypeDatabase = MetadataUtil.resolveDatabase(null, dataset.getItemTypeDataverseName());
IAType itemType = MetadataManager.INSTANCE
.getDatatype(mdTxnCtx, itemTypeDatabase, dataset.getItemTypeDataverseName(), itemTypeName)
.getDatatype();
@@ -1449,8 +1484,8 @@
}
private Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getInvertedIndexModificationRuntime(
- DataverseName dataverseName, String datasetName, String indexName, IOperatorSchema propagatedSchema,
- List<LogicalVariable> primaryKeys, List<LogicalVariable> secondaryKeys,
+ String database, DataverseName dataverseName, String datasetName, String indexName,
+ IOperatorSchema propagatedSchema, List<LogicalVariable> primaryKeys, List<LogicalVariable> secondaryKeys,
List<LogicalVariable> additionalNonKeyFields, AsterixTupleFilterFactory filterFactory,
AsterixTupleFilterFactory prevFilterFactory, RecordDescriptor recordDesc, JobGenContext context,
JobSpecification spec, IndexOperation indexOp, IndexType indexType, boolean bulkload,
@@ -1471,7 +1506,7 @@
if ((secondaryKeys.size() > 1 && !isPartitioned) || (secondaryKeys.size() > 2 && isPartitioned)) {
throw new AlgebricksException("Cannot create composite inverted index on multiple fields.");
}
- Dataset dataset = MetadataManagerUtil.findExistingDataset(mdTxnCtx, dataverseName, datasetName);
+ Dataset dataset = MetadataManagerUtil.findExistingDataset(mdTxnCtx, database, dataverseName, datasetName);
// For tokenization, sorting and loading.
// One token (+ optional partitioning field) + primary keys: [token,
// number of token, PK]
@@ -1571,7 +1606,7 @@
}
// Get a Tokenizer for the bulk-loading data into a n-gram or keyword index.
- private Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getBinaryTokenizerRuntime(
+ private Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getBinaryTokenizerRuntime(String database,
DataverseName dataverseName, String datasetName, String indexName, IOperatorSchema inputSchema,
IOperatorSchema propagatedSchema, List<LogicalVariable> primaryKeys, List<LogicalVariable> secondaryKeys,
RecordDescriptor recordDesc, JobSpecification spec, IndexType indexType) throws AlgebricksException {
@@ -1653,11 +1688,11 @@
i++;
}
- Dataset dataset = MetadataManagerUtil.findExistingDataset(mdTxnCtx, dataverseName, datasetName);
+ Dataset dataset = MetadataManagerUtil.findExistingDataset(mdTxnCtx, database, dataverseName, datasetName);
String itemTypeName = dataset.getItemTypeName();
IAType itemType;
try {
- String itemTypeDatabase = null;
+ String itemTypeDatabase = MetadataUtil.resolveDatabase(null, dataset.getItemTypeDataverseName());
itemType = MetadataManager.INSTANCE
.getDatatype(mdTxnCtx, itemTypeDatabase, dataset.getItemTypeDataverseName(), itemTypeName)
.getDatatype();
@@ -1705,8 +1740,8 @@
IBinaryTokenizerFactory tokenizerFactory = NonTaggedFormatUtil.getBinaryTokenizerFactory(
secondaryKeyType.getTypeTag(), indexType, secondaryIndexDetails.getGramLength());
IFullTextConfigEvaluatorFactory fullTextConfigEvaluatorFactory =
- FullTextUtil.fetchFilterAndCreateConfigEvaluator(this, secondaryIndex.getDataverseName(),
- secondaryIndexDetails.getFullTextConfigName());
+ FullTextUtil.fetchFilterAndCreateConfigEvaluator(this, secondaryIndex.getDatabaseName(),
+ secondaryIndex.getDataverseName(), secondaryIndexDetails.getFullTextConfigName());
PartitioningProperties partitioningProperties =
getPartitioningProperties(dataset, secondaryIndex.getIndexName());
@@ -1783,7 +1818,7 @@
}
public PartitioningProperties getPartitioningProperties(Index idx) throws AlgebricksException {
- Dataset ds = findDataset(idx.getDataverseName(), idx.getDatasetName());
+ Dataset ds = findDataset(idx.getDatabaseName(), idx.getDataverseName(), idx.getDatasetName());
return getPartitioningProperties(ds, idx.getIndexName());
}
@@ -1800,7 +1835,7 @@
}
public List<Index> getSecondaryIndexes(Dataset ds) throws AlgebricksException {
- return getDatasetIndexes(ds.getDataverseName(), ds.getDatasetName()).stream()
+ return getDatasetIndexes(ds.getDatabaseName(), ds.getDataverseName(), ds.getDatasetName()).stream()
.filter(idx -> idx.isSecondaryIndex() && !idx.isSampleIndex()).collect(Collectors.toList());
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java
index e09eb8f..eab5fd7 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java
@@ -75,6 +75,8 @@
((AString) adapterRecord.getValueByPos(libraryDataverseNameIdx)).getStringValue())
: dataverseName;
libraryDatabase = MetadataUtil.resolveDatabase(libraryDatabase, libraryDataverseName);
+ } else {
+ libraryDatabase = MetadataUtil.resolveDatabase(libraryDatabase, dataverseName);
}
return new DatasourceAdapter(new AdapterIdentifier(databaseName, dataverseName, adapterName), adapterType,
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java
index cccd3dd..12595f3 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java
@@ -53,6 +53,7 @@
import org.apache.asterix.metadata.entities.FeedPolicyEntity;
import org.apache.asterix.metadata.entities.Library;
import org.apache.asterix.metadata.utils.MetadataConstants;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.asterix.om.types.ARecordType;
import org.apache.asterix.om.types.ATypeTag;
import org.apache.commons.lang3.StringUtils;
@@ -73,9 +74,9 @@
private FeedMetadataUtil() {
}
- public static Dataset validateIfDatasetExists(MetadataProvider metadataProvider, DataverseName dataverseName,
- String datasetName) throws AlgebricksException {
- Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
+ public static Dataset validateIfDatasetExists(MetadataProvider metadataProvider, String database,
+ DataverseName dataverseName, String datasetName) throws AlgebricksException {
+ Dataset dataset = metadataProvider.findDataset(database, dataverseName, datasetName);
if (dataset == null) {
throw new CompilationException("Unknown target " + dataset() + " :" + datasetName);
}
@@ -87,9 +88,8 @@
return dataset;
}
- public static Feed validateIfFeedExists(DataverseName dataverseName, String feedName,
+ public static Feed validateIfFeedExists(String database, DataverseName dataverseName, String feedName,
MetadataTransactionContext ctx) throws AlgebricksException {
- String database = null;
Feed feed = MetadataManager.INSTANCE.getFeed(ctx, database, dataverseName, feedName);
if (feed == null) {
throw new CompilationException("Unknown source feed: " + feedName);
@@ -97,13 +97,12 @@
return feed;
}
- public static FeedPolicyEntity validateIfPolicyExists(DataverseName dataverseName, String policyName,
- MetadataTransactionContext ctx) throws AlgebricksException {
- String database = null;
+ public static FeedPolicyEntity validateIfPolicyExists(String database, DataverseName dataverseName,
+ String policyName, MetadataTransactionContext ctx) throws AlgebricksException {
FeedPolicyEntity feedPolicy = MetadataManager.INSTANCE.getFeedPolicy(ctx, database, dataverseName, policyName);
if (feedPolicy == null) {
- feedPolicy = MetadataManager.INSTANCE.getFeedPolicy(ctx, null, MetadataConstants.METADATA_DATAVERSE_NAME,
- policyName);
+ feedPolicy = MetadataManager.INSTANCE.getFeedPolicy(ctx, MetadataConstants.SYSTEM_DATABASE,
+ MetadataConstants.METADATA_DATAVERSE_NAME, policyName);
if (feedPolicy == null) {
throw new CompilationException("Unknown feed policy" + policyName);
}
@@ -123,12 +122,13 @@
if (adapterName == null) {
throw new AlgebricksException("cannot find adapter name");
}
- DatasourceAdapter adapterEntity = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, null,
- MetadataConstants.METADATA_DATAVERSE_NAME, adapterName);
+ DatasourceAdapter adapterEntity = MetadataManager.INSTANCE.getAdapter(mdTxnCtx,
+ MetadataConstants.SYSTEM_DATABASE, MetadataConstants.METADATA_DATAVERSE_NAME, adapterName);
// Get adapter from metadata dataset <The feed dataverse>
if (adapterEntity == null) {
- adapterEntity =
- MetadataManager.INSTANCE.getAdapter(mdTxnCtx, null, feed.getDataverseName(), adapterName);
+ String feedDatabase = MetadataUtil.resolveDatabase(null, feed.getDataverseName());
+ adapterEntity = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, feedDatabase, feed.getDataverseName(),
+ adapterName);
}
AdapterType adapterType;
ITypedAdapterFactory adapterFactory;
@@ -213,12 +213,13 @@
metaType = getOutputType(feed, configuration.get(ExternalDataConstants.KEY_META_TYPE_NAME));
ExternalDataUtils.prepareFeed(configuration, feed.getDataverseName(), feed.getFeedName());
// Get adapter from metadata dataset <Metadata dataverse>
- adapterEntity = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, null,
+ adapterEntity = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, MetadataConstants.SYSTEM_DATABASE,
MetadataConstants.METADATA_DATAVERSE_NAME, adapterName);
// Get adapter from metadata dataset <The feed dataverse>
if (adapterEntity == null) {
- adapterEntity =
- MetadataManager.INSTANCE.getAdapter(mdTxnCtx, null, feed.getDataverseName(), adapterName);
+ String feedDatabase = MetadataUtil.resolveDatabase(null, feed.getDataverseName());
+ adapterEntity = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, feedDatabase, feed.getDataverseName(),
+ adapterName);
}
if (adapterEntity != null) {
adapterType = adapterEntity.getType();
@@ -311,7 +312,8 @@
MetadataTransactionContext ctx = null;
try {
ctx = MetadataManager.INSTANCE.beginTransaction();
- Datatype t = MetadataManager.INSTANCE.getDatatype(ctx, null, feed.getDataverseName(), fqOutputType);
+ String feedDatabase = MetadataUtil.resolveDatabase(null, feed.getDataverseName());
+ Datatype t = MetadataManager.INSTANCE.getDatatype(ctx, feedDatabase, feed.getDataverseName(), fqOutputType);
if (t == null || t.getDatatype().getTypeTag() != ATypeTag.OBJECT) {
throw new MetadataException(ErrorCode.FEED_METADATA_UTIL_UNEXPECTED_FEED_DATATYPE, fqOutputType);
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/functions/ExternalFunctionCompilerUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/functions/ExternalFunctionCompilerUtil.java
index 0c5315c..9b51096 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/functions/ExternalFunctionCompilerUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/functions/ExternalFunctionCompilerUtil.java
@@ -28,6 +28,7 @@
import org.apache.asterix.common.functions.ExternalFunctionLanguage;
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Function;
+import org.apache.asterix.metadata.utils.MetadataUtil;
import org.apache.asterix.om.functions.IExternalFunctionInfo;
import org.apache.asterix.om.typecomputer.base.IResultTypeComputer;
import org.apache.asterix.om.types.BuiltinType;
@@ -136,7 +137,8 @@
}
IAType type = BuiltinTypeMap.getBuiltinType(typeName);
if (type == null) {
- type = metadataProvider.findType(typeSignature.getDataverseName(), typeName);
+ String database = MetadataUtil.resolveDatabase(null, typeSignature.getDataverseName());
+ type = metadataProvider.findType(database, typeSignature.getDataverseName(), typeName);
}
return type;
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
index 09a8a4b..a6119d8 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
@@ -250,8 +250,8 @@
public static Pair<ILSMMergePolicyFactory, Map<String, String>> getMergePolicyFactory(Dataset dataset,
MetadataTransactionContext mdTxnCtx) throws AlgebricksException {
String policyName = dataset.getCompactionPolicy();
- CompactionPolicy compactionPolicy = MetadataManager.INSTANCE.getCompactionPolicy(mdTxnCtx, null,
- MetadataConstants.METADATA_DATAVERSE_NAME, policyName);
+ CompactionPolicy compactionPolicy = MetadataManager.INSTANCE.getCompactionPolicy(mdTxnCtx,
+ MetadataConstants.SYSTEM_DATABASE, MetadataConstants.METADATA_DATAVERSE_NAME, policyName);
String compactionPolicyFactoryClassName = compactionPolicy.getClassName();
ILSMMergePolicyFactory mergePolicyFactory;
Map<String, String> properties = dataset.getCompactionPolicyProperties();
@@ -294,7 +294,8 @@
public static ARecordType getMetaType(MetadataProvider metadataProvider, Dataset dataset)
throws AlgebricksException {
if (dataset.hasMetaPart()) {
- return (ARecordType) metadataProvider.findType(dataset.getMetaItemTypeDataverseName(),
+ String database = MetadataUtil.resolveDatabase(null, dataset.getMetaItemTypeDataverseName());
+ return (ARecordType) metadataProvider.findType(database, dataset.getMetaItemTypeDataverseName(),
dataset.getMetaItemTypeName());
}
return null;
@@ -363,7 +364,7 @@
public static JobSpecification compactDatasetJobSpec(Dataverse dataverse, String datasetName,
MetadataProvider metadataProvider) throws AlgebricksException {
DataverseName dataverseName = dataverse.getDataverseName();
- Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
+ Dataset dataset = metadataProvider.findDataset(dataverse.getDatabaseName(), dataverseName, datasetName);
if (dataset == null) {
throw new AsterixException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, datasetName, dataverseName);
}
@@ -439,8 +440,8 @@
ARecordType metaItemType = (ARecordType) metadataProvider.findMetaType(dataset);
itemType = (ARecordType) metadataProvider.findTypeForDatasetWithoutType(itemType, metaItemType, dataset);
- Index primaryIndex = metadataProvider.getIndex(dataset.getDataverseName(), dataset.getDatasetName(),
- dataset.getDatasetName());
+ Index primaryIndex = metadataProvider.getIndex(dataset.getDatabaseName(), dataset.getDataverseName(),
+ dataset.getDatasetName(), dataset.getDatasetName());
PartitioningProperties partitioningProperties = metadataProvider.getPartitioningProperties(dataset);
// prepare callback
@@ -450,8 +451,9 @@
primaryKeyFields[i] = i;
pkFields[i] = fieldPermutation[i];
}
- boolean hasSecondaries =
- metadataProvider.getDatasetIndexes(dataset.getDataverseName(), dataset.getDatasetName()).size() > 1;
+ boolean hasSecondaries = metadataProvider
+ .getDatasetIndexes(dataset.getDatabaseName(), dataset.getDataverseName(), dataset.getDatasetName())
+ .size() > 1;
IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider();
IModificationOperationCallbackFactory modificationCallbackFactory = dataset.getModificationCallbackFactory(
storageComponentProvider, primaryIndex, IndexOperation.UPSERT, primaryKeyFields);
@@ -541,8 +543,8 @@
}
// Column
- List<Index> secondaryIndexes =
- metadataProvider.getDatasetIndexes(dataset.getDataverseName(), dataset.getDatasetName());
+ List<Index> secondaryIndexes = metadataProvider.getDatasetIndexes(dataset.getDatabaseName(),
+ dataset.getDataverseName(), dataset.getDatasetName());
List<ARecordType> indexPaths = new ArrayList<>();
for (Index index : secondaryIndexes) {
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/FullTextUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/FullTextUtil.java
index 563bab4..4acb24c 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/FullTextUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/FullTextUtil.java
@@ -30,14 +30,14 @@
public class FullTextUtil {
public static IFullTextConfigEvaluatorFactory fetchFilterAndCreateConfigEvaluator(MetadataProvider metadataProvider,
- DataverseName dataverseName, String configName) throws AlgebricksException {
+ String database, DataverseName dataverseName, String configName) throws AlgebricksException {
FullTextConfigDescriptor configDescriptor =
- metadataProvider.findFullTextConfig(dataverseName, configName).getFullTextConfig();
+ metadataProvider.findFullTextConfig(database, dataverseName, configName).getFullTextConfig();
ImmutableList.Builder<AbstractFullTextFilterDescriptor> filterDescriptorsBuilder = ImmutableList.builder();
for (String filterName : configDescriptor.getFilterNames()) {
filterDescriptorsBuilder
- .add(metadataProvider.findFullTextFilter(dataverseName, filterName).getFullTextFilter());
+ .add(metadataProvider.findFullTextFilter(database, dataverseName, filterName).getFullTextFilter());
}
return configDescriptor.createEvaluatorFactory(filterDescriptorsBuilder.build());
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/InvertedIndexResourceFactoryProvider.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/InvertedIndexResourceFactoryProvider.java
index a75a75d..7a50b40 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/InvertedIndexResourceFactoryProvider.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/InvertedIndexResourceFactoryProvider.java
@@ -129,8 +129,8 @@
getTokenComparatorFactories(dataset, index, recordType, metaType);
IBinaryTokenizerFactory tokenizerFactory = getTokenizerFactory(dataset, index, recordType, metaType);
IFullTextConfigEvaluatorFactory fullTextConfigEvaluatorFactory =
- FullTextUtil.fetchFilterAndCreateConfigEvaluator(mdProvider, index.getDataverseName(),
- indexDetails.getFullTextConfigName());
+ FullTextUtil.fetchFilterAndCreateConfigEvaluator(mdProvider, index.getDatabaseName(),
+ index.getDataverseName(), indexDetails.getFullTextConfigName());
ITypeTraitProvider typeTraitProvider = mdProvider.getDataFormat().getTypeTraitProvider();
return new LSMInvertedIndexLocalResourceFactory(storageManager, typeTraits, cmpFactories, filterTypeTraits,
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/MetadataUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/MetadataUtil.java
index 638c3f4..45efd11 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/MetadataUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/MetadataUtil.java
@@ -46,11 +46,17 @@
}
public static String databaseFor(DataverseName dataverse) {
+ if (dataverse == null) {
+ return null;
+ }
return MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse) ? MetadataConstants.SYSTEM_DATABASE
: MetadataConstants.DEFAULT_DATABASE;
}
public static String resolveDatabase(String database, DataverseName dataverse) {
+ if (dataverse == null) {
+ return null;
+ }
if (database != null) {
return database;
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SampleOperationsHelper.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SampleOperationsHelper.java
index 8e5fa2b..46eebfc 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SampleOperationsHelper.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SampleOperationsHelper.java
@@ -130,8 +130,9 @@
@Override
public void init() throws AlgebricksException {
- itemType =
- (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
+ String database = MetadataUtil.resolveDatabase(null, dataset.getItemTypeDataverseName());
+ itemType = (ARecordType) metadataProvider.findType(database, dataset.getItemTypeDataverseName(),
+ dataset.getItemTypeName());
metaType = DatasetUtil.getMetaType(metadataProvider, dataset);
itemType = (ARecordType) metadataProvider.findTypeForDatasetWithoutType(itemType, metaType, dataset);
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryCorrelatedInvertedIndexOperationsHelper.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryCorrelatedInvertedIndexOperationsHelper.java
index cd3f01c..f0704eb 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryCorrelatedInvertedIndexOperationsHelper.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryCorrelatedInvertedIndexOperationsHelper.java
@@ -154,7 +154,7 @@
tokenizerFactory = NonTaggedFormatUtil.getBinaryTokenizerFactory(secondaryKeyType.getTypeTag(), indexType,
indexDetails.getGramLength());
fullTextConfigEvaluatorFactory = FullTextUtil.fetchFilterAndCreateConfigEvaluator(metadataProvider,
- index.getDataverseName(), indexDetails.getFullTextConfigName());
+ index.getDatabaseName(), index.getDataverseName(), indexDetails.getFullTextConfigName());
// Type traits for inverted-list elements. Inverted lists contain
// primary keys.
invListsTypeTraits = new ITypeTraits[numPrimaryKeys];
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryIndexOperationsHelper.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryIndexOperationsHelper.java
index b824512..7dc76ac 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryIndexOperationsHelper.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryIndexOperationsHelper.java
@@ -134,8 +134,9 @@
this.dataset = dataset;
this.index = index;
this.metadataProvider = metadataProvider;
- ARecordType recordType =
- (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
+ String database = MetadataUtil.resolveDatabase(null, dataset.getItemTypeDataverseName());
+ ARecordType recordType = (ARecordType) metadataProvider.findType(database, dataset.getItemTypeDataverseName(),
+ dataset.getItemTypeName());
this.metaType = DatasetUtil.getMetaType(metadataProvider, dataset);
this.itemType = (ARecordType) metadataProvider.findTypeForDatasetWithoutType(recordType, metaType, dataset);
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryInvertedIndexOperationsHelper.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryInvertedIndexOperationsHelper.java
index fa55105..b50bdc2 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryInvertedIndexOperationsHelper.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryInvertedIndexOperationsHelper.java
@@ -68,7 +68,7 @@
private IBinaryComparatorFactory[] tokenComparatorFactories;
private ITypeTraits[] tokenTypeTraits;
private IBinaryTokenizerFactory tokenizerFactory;
- private IFullTextConfigEvaluatorFactory fullTextConfigEvaluatorFactory;
+ private final IFullTextConfigEvaluatorFactory fullTextConfigEvaluatorFactory;
// For tokenization, sorting and loading. Represents <token, primary keys>.
private int numTokenKeyPairFields;
private IBinaryComparatorFactory[] tokenKeyPairComparatorFactories;
@@ -82,7 +82,8 @@
SourceLocation sourceLoc) throws AlgebricksException {
super(dataset, index, metadataProvider, sourceLoc);
this.fullTextConfigEvaluatorFactory = FullTextUtil.fetchFilterAndCreateConfigEvaluator(metadataProvider,
- index.getDataverseName(), ((Index.TextIndexDetails) index.getIndexDetails()).getFullTextConfigName());
+ index.getDatabaseName(), index.getDataverseName(),
+ ((Index.TextIndexDetails) index.getIndexDetails()).getFullTextConfigName());
}
@Override