[NO ISSUE] Support multipart dataverse names
- user model changes: yes
- storage format changes: no
- interface changes: yes
Details:
- Support dataverse names that consist of multiple parts
- Introduce DataverseName class and adopt it throughout the system
- Add testcases for multipart dataverse names
Change-Id: I1313b2dc0e8df6a9b0ded48dea122afc3ba5dbe7
Reviewed-on: https://asterix-gerrit.ics.uci.edu/c/asterixdb/+/4004
Tested-by: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Integration-Tests: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Reviewed-by: Murtadha Hubail <mhubail@apache.org>
diff --git a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/Activity.java b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/Activity.java
index 9538118..d8a6c48 100644
--- a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/Activity.java
+++ b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/Activity.java
@@ -19,6 +19,8 @@
import java.util.Map;
+import org.apache.asterix.common.metadata.DataverseName;
+
public class Activity implements Comparable<Activity> {
protected int activityId;
@@ -30,8 +32,8 @@
this.activityDetails = activityDetails;
}
- public String getDataverseName() {
- return activeEntityId.getDataverse();
+ public DataverseName getDataverseName() {
+ return activeEntityId.getDataverseName();
}
public String getActiveEntityName() {
diff --git a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/EntityId.java b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/EntityId.java
index 13e16f0..1d33961 100644
--- a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/EntityId.java
+++ b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/EntityId.java
@@ -21,25 +21,27 @@
import java.io.Serializable;
import java.util.Objects;
+import org.apache.asterix.common.metadata.DataverseName;
+
/**
* A unique identifier for a data feed.
*/
public class EntityId implements Serializable {
- private static final long serialVersionUID = 1L;
+ private static final long serialVersionUID = 2L;
private final String extensionName;
- private final String dataverse;
+ private final DataverseName dataverseName;
private final String entityName;
- public EntityId(String extentionName, String dataverse, String entityName) {
+ public EntityId(String extentionName, DataverseName dataverseName, String entityName) {
this.extensionName = extentionName;
- this.dataverse = dataverse;
+ this.dataverseName = dataverseName;
this.entityName = entityName;
}
- public String getDataverse() {
- return dataverse;
+ public DataverseName getDataverseName() {
+ return dataverseName;
}
public String getEntityName() {
@@ -55,18 +57,18 @@
return true;
}
EntityId other = (EntityId) o;
- return Objects.equals(other.dataverse, dataverse) && Objects.equals(other.entityName, entityName)
+ return Objects.equals(other.dataverseName, dataverseName) && Objects.equals(other.entityName, entityName)
&& Objects.equals(other.extensionName, extensionName);
}
@Override
public int hashCode() {
- return Objects.hash(dataverse, entityName, extensionName);
+ return Objects.hash(dataverseName, entityName, extensionName);
}
@Override
public String toString() {
- return dataverse + "." + entityName + "(" + extensionName + ")";
+ return dataverseName + "." + entityName + "(" + extensionName + ")";
}
public String getExtensionName() {
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/base/AnalysisUtil.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/base/AnalysisUtil.java
index 9c476e5..7eea595 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/base/AnalysisUtil.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/base/AnalysisUtil.java
@@ -23,11 +23,11 @@
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.exceptions.ErrorCode;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.declared.DataSourceId;
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.optimizer.rules.am.AccessMethodUtils;
import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.common.utils.Pair;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
@@ -122,14 +122,15 @@
return false;
}
- public static Pair<String, String> getDatasetInfo(AbstractDataSourceOperator op) throws AlgebricksException {
+ public static Pair<DataverseName, String> getDatasetInfo(AbstractDataSourceOperator op) {
DataSourceId srcId = (DataSourceId) op.getDataSource().getId();
return new Pair<>(srcId.getDataverseName(), srcId.getDatasourceName());
}
- public static Pair<String, String> getExternalDatasetInfo(UnnestMapOperator op) throws AlgebricksException {
+ public static Pair<DataverseName, String> getExternalDatasetInfo(UnnestMapOperator op) {
AbstractFunctionCallExpression unnestExpr = (AbstractFunctionCallExpression) op.getExpressionRef().getValue();
- String dataverseName = AccessMethodUtils.getStringConstant(unnestExpr.getArguments().get(0));
+ DataverseName dataverseName = DataverseName
+ .createFromCanonicalForm(AccessMethodUtils.getStringConstant(unnestExpr.getArguments().get(0)));
String datasetName = AccessMethodUtils.getStringConstant(unnestExpr.getArguments().get(1));
return new Pair<>(dataverseName, datasetName);
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java
index 6c258e4..0b75be6 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java
@@ -30,6 +30,7 @@
import org.apache.asterix.common.config.DatasetConfig.IndexType;
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.exceptions.ErrorCode;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.util.FunctionUtil;
import org.apache.asterix.metadata.declared.DataSource;
import org.apache.asterix.metadata.declared.DataSourceIndex;
@@ -147,7 +148,7 @@
DataSource datasetSource = (DataSource) primaryIndexModificationOp.getDataSource();
MetadataProvider mp = (MetadataProvider) context.getMetadataProvider();
- String dataverseName = datasetSource.getId().getDataverseName();
+ DataverseName dataverseName = datasetSource.getId().getDataverseName();
String datasetName = datasetSource.getId().getDatasourceName();
Dataset dataset = mp.findDataset(dataverseName, datasetName);
if (dataset == null) {
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetAsterixMemoryRequirementsRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetAsterixMemoryRequirementsRule.java
index 91dfe7f..f4f8fa2 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetAsterixMemoryRequirementsRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetAsterixMemoryRequirementsRule.java
@@ -22,13 +22,13 @@
import java.util.Set;
import java.util.function.Predicate;
+import org.apache.asterix.common.functions.FunctionSignature;
import org.apache.asterix.metadata.declared.DataSource;
import org.apache.asterix.metadata.declared.DataSourceId;
import org.apache.asterix.metadata.utils.MetadataConstants;
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.optimizer.base.AsterixOptimizationContext;
import org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.algebricks.core.algebra.visitors.ILogicalOperatorVisitor;
import org.apache.hyracks.algebricks.rewriter.rules.SetMemoryRequirementsRule;
@@ -83,7 +83,7 @@
private static boolean isMinMemoryBudgetFunction(DataSourceId dsId) {
return BuiltinFunctions.builtinFunctionHasProperty(
- new FunctionIdentifier(dsId.getDataverseName(), dsId.getDatasourceName()),
+ FunctionSignature.createFunctionIdentifier(dsId.getDataverseName(), dsId.getDatasourceName()),
BuiltinFunctions.DataSourceFunctionProperty.MIN_MEMORY_BUDGET);
}
}
\ No newline at end of file
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AbstractIntroduceAccessMethodRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AbstractIntroduceAccessMethodRule.java
index 6583342..a85ed90 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AbstractIntroduceAccessMethodRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AbstractIntroduceAccessMethodRule.java
@@ -32,6 +32,7 @@
import org.apache.asterix.common.config.DatasetConfig.IndexType;
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.exceptions.ErrorCode;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.dataflow.data.common.ExpressionTypeComputer;
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Dataset;
@@ -1095,12 +1096,8 @@
if (dataSourceScanOp.getOperatorTag() != LogicalOperatorTag.DATASOURCESCAN) {
return null;
}
- Pair<String, String> datasetInfo = AnalysisUtil.getDatasetInfo((DataSourceScanOperator) dataSourceScanOp);
- String dataverseName = datasetInfo.first;
- String datasetName = datasetInfo.second;
-
- Index idxUsedInUnnestMap = metadataProvider.getIndex(dataverseName, datasetName, datasetName);
- return idxUsedInUnnestMap;
+ Pair<DataverseName, String> datasetInfo =
+ AnalysisUtil.getDatasetInfo((DataSourceScanOperator) dataSourceScanOp);
+ return metadataProvider.getIndex(datasetInfo.first, datasetInfo.second, datasetInfo.second);
}
-
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AccessMethodJobGenParams.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AccessMethodJobGenParams.java
index cba4411..a4795ba 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AccessMethodJobGenParams.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AccessMethodJobGenParams.java
@@ -21,6 +21,7 @@
import java.util.List;
import org.apache.asterix.common.config.DatasetConfig.IndexType;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.om.base.AInt32;
import org.apache.asterix.om.constants.AsterixConstantValue;
import org.apache.commons.lang3.mutable.Mutable;
@@ -38,7 +39,7 @@
private static final int NUM_PARAMS = 6;
protected String indexName;
protected IndexType indexType;
- protected String dataverseName;
+ protected DataverseName dataverseName;
protected String datasetName;
protected boolean retainInput;
protected boolean requiresBroadcast;
@@ -48,8 +49,8 @@
// Enable creation of an empty object and fill members using setters
}
- public AccessMethodJobGenParams(String indexName, IndexType indexType, String dataverseName, String datasetName,
- boolean retainInput, boolean requiresBroadcast) {
+ public AccessMethodJobGenParams(String indexName, IndexType indexType, DataverseName dataverseName,
+ String datasetName, boolean retainInput, boolean requiresBroadcast) {
this.indexName = indexName;
this.indexType = indexType;
this.dataverseName = dataverseName;
@@ -62,7 +63,7 @@
public void writeToFuncArgs(List<Mutable<ILogicalExpression>> funcArgs) {
funcArgs.add(new MutableObject<>(AccessMethodUtils.createStringConstant(indexName)));
funcArgs.add(new MutableObject<>(AccessMethodUtils.createInt32Constant(indexType.ordinal())));
- funcArgs.add(new MutableObject<>(AccessMethodUtils.createStringConstant(dataverseName)));
+ funcArgs.add(new MutableObject<>(AccessMethodUtils.createStringConstant(dataverseName.getCanonicalForm())));
funcArgs.add(new MutableObject<>(AccessMethodUtils.createStringConstant(datasetName)));
funcArgs.add(new MutableObject<>(AccessMethodUtils.createBooleanConstant(retainInput)));
funcArgs.add(new MutableObject<>(AccessMethodUtils.createBooleanConstant(requiresBroadcast)));
@@ -71,7 +72,7 @@
public void readFromFuncArgs(List<Mutable<ILogicalExpression>> funcArgs) {
indexName = AccessMethodUtils.getStringConstant(funcArgs.get(0));
indexType = IndexType.values()[AccessMethodUtils.getInt32Constant(funcArgs.get(1))];
- dataverseName = AccessMethodUtils.getStringConstant(funcArgs.get(2));
+ dataverseName = DataverseName.createFromCanonicalForm(AccessMethodUtils.getStringConstant(funcArgs.get(2)));
datasetName = AccessMethodUtils.getStringConstant(funcArgs.get(3));
retainInput = AccessMethodUtils.getBooleanConstant(funcArgs.get(4));
requiresBroadcast = AccessMethodUtils.getBooleanConstant(funcArgs.get(5));
@@ -86,7 +87,7 @@
return indexType;
}
- public String getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AccessMethodUtils.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AccessMethodUtils.java
index 1357fd2..38072f3 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AccessMethodUtils.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AccessMethodUtils.java
@@ -1764,7 +1764,7 @@
context.computeAndSetTypeEnvironmentForOperator(order);
List<Mutable<ILogicalExpression>> externalLookupArgs = new ArrayList<>();
//Add dataverse to the arguments
- AccessMethodUtils.addStringArg(dataset.getDataverseName(), externalLookupArgs);
+ AccessMethodUtils.addStringArg(dataset.getDataverseName().getCanonicalForm(), externalLookupArgs);
//Add dataset to the arguments
AccessMethodUtils.addStringArg(dataset.getDatasetName(), externalLookupArgs);
//Add PK vars to the arguments
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/BTreeJobGenParams.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/BTreeJobGenParams.java
index 26501c2..2eefc8c 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/BTreeJobGenParams.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/BTreeJobGenParams.java
@@ -22,6 +22,7 @@
import java.util.List;
import org.apache.asterix.common.config.DatasetConfig.IndexType;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.commons.lang3.mutable.Mutable;
import org.apache.commons.lang3.mutable.MutableObject;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
@@ -45,7 +46,7 @@
super();
}
- public BTreeJobGenParams(String indexName, IndexType indexType, String dataverseName, String datasetName,
+ public BTreeJobGenParams(String indexName, IndexType indexType, DataverseName dataverseName, String datasetName,
boolean retainInput, boolean requiresBroadcast) {
super(indexName, indexType, dataverseName, datasetName, retainInput, requiresBroadcast);
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceLSMComponentFilterRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceLSMComponentFilterRule.java
index 109812b..95b7e17 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceLSMComponentFilterRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceLSMComponentFilterRule.java
@@ -28,6 +28,7 @@
import org.apache.asterix.common.config.DatasetConfig.DatasetType;
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.exceptions.ErrorCode;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.declared.DataSource;
import org.apache.asterix.metadata.declared.DatasetDataSource;
import org.apache.asterix.metadata.declared.MetadataProvider;
@@ -90,10 +91,10 @@
Dataset dataset = getDataset(op, context);
List<String> filterFieldName = null;
ARecordType recType = null;
+ MetadataProvider mp = (MetadataProvider) context.getMetadataProvider();
if (dataset != null && dataset.getDatasetType() == DatasetType.INTERNAL) {
filterFieldName = DatasetUtil.getFilterField(dataset);
- IAType itemType = ((MetadataProvider) context.getMetadataProvider())
- .findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
+ IAType itemType = mp.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
if (itemType.getTypeTag() == ATypeTag.OBJECT) {
recType = (ARecordType) itemType;
}
@@ -111,8 +112,7 @@
List<IOptimizableFuncExpr> optFuncExprs = new ArrayList<>();
if (!analysisCtx.getMatchedFuncExprs().isEmpty()) {
- List<Index> datasetIndexes = ((MetadataProvider) context.getMetadataProvider())
- .getDatasetIndexes(dataset.getDataverseName(), dataset.getDatasetName());
+ List<Index> datasetIndexes = mp.getDatasetIndexes(dataset.getDataverseName(), dataset.getDatasetName());
for (int i = 0; i < analysisCtx.getMatchedFuncExprs().size(); i++) {
IOptimizableFuncExpr optFuncExpr = analysisCtx.getMatchedFuncExpr(i);
@@ -417,10 +417,11 @@
if (unnestExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) unnestExpr;
FunctionIdentifier fid = f.getFunctionIdentifier();
- String dataverseName;
+ DataverseName dataverseName;
String datasetName;
if (BuiltinFunctions.EXTERNAL_LOOKUP.equals(fid)) {
- dataverseName = AccessMethodUtils.getStringConstant(f.getArguments().get(0));
+ dataverseName = DataverseName
+ .createFromCanonicalForm(AccessMethodUtils.getStringConstant(f.getArguments().get(0)));
datasetName = AccessMethodUtils.getStringConstant(f.getArguments().get(1));
} else if (fid.equals(BuiltinFunctions.INDEX_SEARCH)) {
AccessMethodJobGenParams jobGenParams = new AccessMethodJobGenParams();
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroducePrimaryIndexForAggregationRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroducePrimaryIndexForAggregationRule.java
index 5eb0fc6..64f9068 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroducePrimaryIndexForAggregationRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroducePrimaryIndexForAggregationRule.java
@@ -24,6 +24,7 @@
import java.util.Set;
import org.apache.asterix.common.config.DatasetConfig;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.declared.DataSource;
import org.apache.asterix.metadata.declared.DatasetDataSource;
import org.apache.asterix.metadata.declared.MetadataProvider;
@@ -163,7 +164,8 @@
return null;
}
String indexName = ConstantExpressionUtil.getStringArgument(functionCallExpression, 0);
- String dataverseName = ConstantExpressionUtil.getStringArgument(functionCallExpression, 2);
+ DataverseName dataverseName = DataverseName
+ .createFromCanonicalForm(ConstantExpressionUtil.getStringArgument(functionCallExpression, 2));
String datasetName = ConstantExpressionUtil.getStringArgument(functionCallExpression, 3);
Index index = ((MetadataProvider) metadataProvider).getIndex(dataverseName, datasetName, indexName);
if (!index.isPrimaryIndex()) {
@@ -257,6 +259,7 @@
*/
private Pair<Dataset, Index> findDatasetAndSecondaryPrimaryIndex(AbstractScanOperator scanOperator,
BTreeJobGenParams originalBTreeParameters, IOptimizationContext context) throws AlgebricksException {
+ MetadataProvider mp = (MetadataProvider) context.getMetadataProvider();
// #1. get the dataset
Dataset dataset;
// case 1: dataset scan
@@ -275,12 +278,11 @@
if (originalBTreeParameters.isEqCondition()) {
return null;
}
- dataset = ((MetadataProvider) context.getMetadataProvider())
- .findDataset(originalBTreeParameters.getDataverseName(), originalBTreeParameters.getDatasetName());
+ dataset = mp.findDataset(originalBTreeParameters.getDataverseName(),
+ originalBTreeParameters.getDatasetName());
}
// #2. get all indexes and look for the primary one
- List<Index> indexes = ((MetadataProvider) context.getMetadataProvider())
- .getDatasetIndexes(dataset.getDataverseName(), dataset.getDatasetName());
+ List<Index> indexes = mp.getDatasetIndexes(dataset.getDataverseName(), dataset.getDatasetName());
for (Index index : indexes) {
if (index.getKeyFieldNames().isEmpty()) {
return Pair.of(dataset, index);
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/InvertedIndexJobGenParams.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/InvertedIndexJobGenParams.java
index 1ddd1b5..26350d8 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/InvertedIndexJobGenParams.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/InvertedIndexJobGenParams.java
@@ -22,6 +22,7 @@
import java.util.List;
import org.apache.asterix.common.config.DatasetConfig.IndexType;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.om.types.ATypeTag;
import org.apache.asterix.optimizer.rules.am.InvertedIndexAccessMethod.SearchModifierType;
import org.apache.commons.lang3.mutable.Mutable;
@@ -56,8 +57,8 @@
public InvertedIndexJobGenParams() {
}
- public InvertedIndexJobGenParams(String indexName, IndexType indexType, String dataverseName, String datasetName,
- boolean retainInput, boolean requiresBroadcast) {
+ public InvertedIndexJobGenParams(String indexName, IndexType indexType, DataverseName dataverseName,
+ String datasetName, boolean retainInput, boolean requiresBroadcast) {
super(indexName, indexType, dataverseName, datasetName, retainInput, requiresBroadcast);
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/OptimizableOperatorSubTree.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/OptimizableOperatorSubTree.java
index 2f1db8c..3cbba0d 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/OptimizableOperatorSubTree.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/OptimizableOperatorSubTree.java
@@ -25,6 +25,7 @@
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.exceptions.ErrorCode;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.declared.DataSource;
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Dataset;
@@ -236,7 +237,7 @@
* Also sets recordType to be the type of that dataset.
*/
public boolean setDatasetAndTypeMetadata(MetadataProvider metadataProvider) throws AlgebricksException {
- String dataverseName = null;
+ DataverseName dataverseName = null;
String datasetName = null;
Dataset ds = null;
@@ -267,7 +268,7 @@
return false;
}
}
- Pair<String, String> datasetInfo = AnalysisUtil.getDatasetInfo(dataSourceScan);
+ Pair<DataverseName, String> datasetInfo = AnalysisUtil.getDatasetInfo(dataSourceScan);
dataverseName = datasetInfo.first;
datasetName = datasetInfo.second;
break;
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/RTreeJobGenParams.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/RTreeJobGenParams.java
index cccb6ef..d58766e 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/RTreeJobGenParams.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/RTreeJobGenParams.java
@@ -22,6 +22,7 @@
import java.util.List;
import org.apache.asterix.common.config.DatasetConfig.IndexType;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.commons.lang3.mutable.Mutable;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable;
@@ -37,7 +38,7 @@
public RTreeJobGenParams() {
}
- public RTreeJobGenParams(String indexName, IndexType indexType, String dataverseName, String datasetName,
+ public RTreeJobGenParams(String indexName, IndexType indexType, DataverseName dataverseName, String datasetName,
boolean retainInput, boolean requiresBroadcast) {
super(indexName, indexType, dataverseName, datasetName, retainInput, requiresBroadcast);
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java
index 967b3ad..b7b464d 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java
@@ -32,6 +32,7 @@
import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.common.exceptions.ErrorCode;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.Statement;
import org.apache.asterix.lang.common.statement.DatasetDecl;
import org.apache.asterix.lang.common.statement.DataverseDropStatement;
@@ -114,14 +115,14 @@
boolean invalidOperation = false;
String message = null;
- String dataverse = defaultDataverse != null ? defaultDataverse.getDataverseName() : null;
+ DataverseName dataverseName = defaultDataverse != null ? defaultDataverse.getDataverseName() : null;
switch (stmt.getKind()) {
case INSERT:
InsertStatement insertStmt = (InsertStatement) stmt;
if (insertStmt.getDataverseName() != null) {
- dataverse = insertStmt.getDataverseName().getValue();
+ dataverseName = insertStmt.getDataverseName();
}
- invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
+ invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverseName);
if (invalidOperation) {
message = "Insert operation is not permitted in dataverse "
+ MetadataConstants.METADATA_DATAVERSE_NAME;
@@ -131,9 +132,9 @@
case DELETE:
DeleteStatement deleteStmt = (DeleteStatement) stmt;
if (deleteStmt.getDataverseName() != null) {
- dataverse = deleteStmt.getDataverseName().getValue();
+ dataverseName = deleteStmt.getDataverseName();
}
- invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
+ invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverseName);
if (invalidOperation) {
message = "Delete operation is not permitted in dataverse "
+ MetadataConstants.METADATA_DATAVERSE_NAME;
@@ -142,19 +143,19 @@
case DATAVERSE_DROP:
DataverseDropStatement dvDropStmt = (DataverseDropStatement) stmt;
- invalidOperation =
- MetadataConstants.METADATA_DATAVERSE_NAME.equals(dvDropStmt.getDataverseName().getValue());
+ dataverseName = dvDropStmt.getDataverseName();
+ invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverseName);
if (invalidOperation) {
- message = "Cannot drop dataverse:" + dvDropStmt.getDataverseName().getValue();
+ message = "Cannot drop dataverse:" + dataverseName;
}
break;
case DATASET_DROP:
DropDatasetStatement dropStmt = (DropDatasetStatement) stmt;
if (dropStmt.getDataverseName() != null) {
- dataverse = dropStmt.getDataverseName().getValue();
+ dataverseName = dropStmt.getDataverseName();
}
- invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
+ invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverseName);
if (invalidOperation) {
message = "Cannot drop a dataset belonging to the dataverse:"
+ MetadataConstants.METADATA_DATAVERSE_NAME;
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/CompiledStatements.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/CompiledStatements.java
index 5950329..e85fec8 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/CompiledStatements.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/CompiledStatements.java
@@ -20,6 +20,7 @@
import java.util.Map;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.external.feed.management.FeedConnectionRequest;
import org.apache.asterix.lang.common.base.Expression;
import org.apache.asterix.lang.common.base.Statement;
@@ -189,7 +190,7 @@
public interface ICompiledDmlStatement extends ICompiledStatement {
- String getDataverseName();
+ DataverseName getDataverseName();
String getDatasetName();
}
@@ -210,7 +211,7 @@
}
@Override
- public String getDataverseName() {
+ public DataverseName getDataverseName() {
return index.getDataverseName();
}
@@ -230,13 +231,13 @@
public static class CompiledLoadFromFileStatement extends AbstractCompiledStatement
implements ICompiledDmlStatement {
- private final String dataverseName;
+ private final DataverseName dataverseName;
private final String datasetName;
private final boolean alreadySorted;
private final String adapter;
private final Map<String, String> properties;
- public CompiledLoadFromFileStatement(String dataverseName, String datasetName, String adapter,
+ public CompiledLoadFromFileStatement(DataverseName dataverseName, String datasetName, String adapter,
Map<String, String> properties, boolean alreadySorted) {
this.dataverseName = dataverseName;
this.datasetName = datasetName;
@@ -246,7 +247,7 @@
}
@Override
- public String getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
@@ -274,14 +275,14 @@
}
public static class CompiledInsertStatement extends AbstractCompiledStatement implements ICompiledDmlStatement {
- private final String dataverseName;
+ private final DataverseName dataverseName;
private final String datasetName;
private final Query query;
private final int varCounter;
private final VariableExpr var;
private final Expression returnExpression;
- public CompiledInsertStatement(String dataverseName, String datasetName, Query query, int varCounter,
+ public CompiledInsertStatement(DataverseName dataverseName, String datasetName, Query query, int varCounter,
VariableExpr var, Expression returnExpression) {
this.dataverseName = dataverseName;
this.datasetName = datasetName;
@@ -292,7 +293,7 @@
}
@Override
- public String getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
@@ -325,7 +326,7 @@
public static class CompiledUpsertStatement extends CompiledInsertStatement {
- public CompiledUpsertStatement(String dataverseName, String datasetName, Query query, int varCounter,
+ public CompiledUpsertStatement(DataverseName dataverseName, String datasetName, Query query, int varCounter,
VariableExpr var, Expression returnExpression) {
super(dataverseName, datasetName, query, varCounter, var, returnExpression);
}
@@ -348,8 +349,8 @@
}
@Override
- public String getDataverseName() {
- return request.getReceivingFeedId().getDataverse();
+ public DataverseName getDataverseName() {
+ return request.getReceivingFeedId().getDataverseName();
}
public String getFeedName() {
@@ -372,14 +373,14 @@
}
public static class CompiledDeleteStatement extends AbstractCompiledStatement implements ICompiledDmlStatement {
- private final String dataverseName;
+ private final DataverseName dataverseName;
private final String datasetName;
private final Expression condition;
private final int varCounter;
private final Query query;
- public CompiledDeleteStatement(VariableExpr var, String dataverseName, String datasetName, Expression condition,
- int varCounter, Query query) {
+ public CompiledDeleteStatement(VariableExpr var, DataverseName dataverseName, String datasetName,
+ Expression condition, int varCounter, Query query) {
this.dataverseName = dataverseName;
this.datasetName = datasetName;
this.condition = condition;
@@ -393,7 +394,7 @@
}
@Override
- public String getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
@@ -417,15 +418,15 @@
}
public static class CompiledCompactStatement extends AbstractCompiledStatement {
- private final String dataverseName;
+ private final DataverseName dataverseName;
private final String datasetName;
- public CompiledCompactStatement(String dataverseName, String datasetName) {
+ public CompiledCompactStatement(DataverseName dataverseName, String datasetName) {
this.dataverseName = dataverseName;
this.datasetName = datasetName;
}
- public String getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/IStatementExecutor.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/IStatementExecutor.java
index 3fdcc33..09b8c1e 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/IStatementExecutor.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/IStatementExecutor.java
@@ -35,6 +35,7 @@
import org.apache.asterix.common.api.IResponsePrinter;
import org.apache.asterix.common.exceptions.ACIDException;
import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.IStatementRewriter;
import org.apache.asterix.lang.common.statement.Query;
import org.apache.asterix.metadata.declared.MetadataProvider;
@@ -250,12 +251,12 @@
/**
* returns the active dataverse for an entity or a statement
*
- * @param dataverse:
+ * @param dataverseName:
* the entity or statement dataverse
* @return
* returns the passed dataverse if not null, the active dataverse otherwise
*/
- String getActiveDataverseName(String dataverse);
+ DataverseName getActiveDataverseName(DataverseName dataverseName);
/**
* Gets the execution plans that are generated during query compilation
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
index 5851467..3940364 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
@@ -38,6 +38,7 @@
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.functions.FunctionConstants;
import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.Expression;
import org.apache.asterix.lang.common.base.Expression.Kind;
import org.apache.asterix.lang.common.base.ILangExpression;
@@ -651,7 +652,7 @@
return distResultOperator;
}
- private DatasetDataSource validateDatasetInfo(MetadataProvider metadataProvider, String dataverseName,
+ private DatasetDataSource validateDatasetInfo(MetadataProvider metadataProvider, DataverseName dataverseName,
String datasetName, SourceLocation sourceLoc) throws AlgebricksException {
Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
if (dataset == null) {
@@ -887,7 +888,7 @@
private AbstractFunctionCallExpression lookupUserDefinedFunction(FunctionSignature signature,
List<Mutable<ILogicalExpression>> args, SourceLocation sourceLoc) throws CompilationException {
try {
- if (signature.getNamespace() == null) {
+ if (signature.getDataverseName() == null) {
return null;
}
Function function =
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/TypeTranslator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/TypeTranslator.java
index 9c6e75e..9e01f6a 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/TypeTranslator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/TypeTranslator.java
@@ -29,6 +29,7 @@
import org.apache.asterix.common.annotations.IRecordFieldDataGen;
import org.apache.asterix.common.annotations.RecordDataGenAnnotation;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.expression.OrderedListTypeDefinition;
import org.apache.asterix.lang.common.expression.RecordTypeDefinition;
import org.apache.asterix.lang.common.expression.RecordTypeDefinition.RecordKind;
@@ -56,13 +57,14 @@
}
public static Map<TypeSignature, IAType> computeTypes(MetadataTransactionContext mdTxnCtx, TypeExpression typeExpr,
- String typeName, String typeDataverse) throws AlgebricksException {
+ String typeName, DataverseName typeDataverse) throws AlgebricksException {
Map<TypeSignature, IAType> typeMap = new HashMap<>();
return computeTypes(mdTxnCtx, typeExpr, typeName, typeDataverse, typeMap);
}
public static Map<TypeSignature, IAType> computeTypes(MetadataTransactionContext mdTxnCtx, TypeExpression typeExpr,
- String typeName, String typeDataverse, Map<TypeSignature, IAType> typeMap) throws AlgebricksException {
+ String typeName, DataverseName typeDataverse, Map<TypeSignature, IAType> typeMap)
+ throws AlgebricksException {
Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes = new HashMap<>();
Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes = new HashMap<>();
Map<TypeSignature, List<TypeSignature>> incompleteTopLevelTypeReferences = new HashMap<>();
@@ -82,7 +84,7 @@
private static void firstPass(TypeExpression typeExpr, String typeName, Map<TypeSignature, IAType> typeMap,
Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes,
Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes,
- Map<TypeSignature, List<TypeSignature>> incompleteTopLevelTypeReferences, String typeDataverse)
+ Map<TypeSignature, List<TypeSignature>> incompleteTopLevelTypeReferences, DataverseName typeDataverse)
throws AlgebricksException {
if (BuiltinTypeMap.getBuiltinType(typeName) != null) {
@@ -92,9 +94,10 @@
switch (typeExpr.getTypeKind()) {
case TYPEREFERENCE: {
TypeReferenceExpression tre = (TypeReferenceExpression) typeExpr;
- IAType t = solveTypeReference(new TypeSignature(
- tre.getIdent().first == null ? typeDataverse : tre.getIdent().first.getValue(),
- tre.getIdent().second.getValue()), typeMap);
+ IAType t = solveTypeReference(
+ new TypeSignature(tre.getIdent().first == null ? typeDataverse : tre.getIdent().first,
+ tre.getIdent().second.getValue()),
+ typeMap);
if (t != null) {
typeMap.put(typeSignature, t);
} else {
@@ -132,12 +135,12 @@
private static void secondPass(MetadataTransactionContext mdTxnCtx, Map<TypeSignature, IAType> typeMap,
Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes,
Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes,
- Map<TypeSignature, List<TypeSignature>> incompleteTopLevelTypeReferences, String typeDataverse)
+ Map<TypeSignature, List<TypeSignature>> incompleteTopLevelTypeReferences, DataverseName typeDataverse)
throws AlgebricksException {
// solve remaining top level references
for (TypeSignature typeSignature : incompleteTopLevelTypeReferences.keySet()) {
IAType t;
- Datatype dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, typeSignature.getNamespace(),
+ Datatype dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, typeSignature.getDataverseName(),
typeSignature.getName());
if (dt == null) {
throw new AlgebricksException("Could not resolve type " + typeSignature);
@@ -181,7 +184,7 @@
IAType t;
Datatype dt;
if (MetadataManager.INSTANCE != null) {
- dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, typeSignature.getNamespace(),
+ dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, typeSignature.getDataverseName(),
typeSignature.getName());
if (dt == null) {
throw new AlgebricksException("Could not resolve type " + typeSignature);
@@ -198,7 +201,7 @@
private static AOrderedListType computeOrderedListType(TypeSignature typeSignature, OrderedListTypeDefinition oltd,
Map<TypeSignature, IAType> typeMap, Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes,
- Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes, String defaultDataverse)
+ Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes, DataverseName defaultDataverse)
throws AlgebricksException {
TypeExpression tExpr = oltd.getItemTypeExpression();
String typeName = typeSignature != null ? typeSignature.getName() : null;
@@ -210,7 +213,7 @@
private static AUnorderedListType computeUnorderedListType(TypeSignature typeSignature,
UnorderedListTypeDefinition ultd, Map<TypeSignature, IAType> typeMap,
Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes,
- Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes, String defaulDataverse)
+ Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes, DataverseName defaulDataverse)
throws AlgebricksException {
TypeExpression tExpr = ultd.getItemTypeExpression();
String typeName = typeSignature != null ? typeSignature.getName() : null;
@@ -222,7 +225,7 @@
private static void setCollectionItemType(TypeExpression tExpr, Map<TypeSignature, IAType> typeMap,
Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes,
Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes, AbstractCollectionType act,
- String defaultDataverse) throws AlgebricksException {
+ DataverseName defaultDataverse) throws AlgebricksException {
switch (tExpr.getTypeKind()) {
case ORDEREDLIST: {
OrderedListTypeDefinition oltd = (OrderedListTypeDefinition) tExpr;
@@ -247,9 +250,9 @@
}
case TYPEREFERENCE: {
TypeReferenceExpression tre = (TypeReferenceExpression) tExpr;
- TypeSignature signature = new TypeSignature(
- tre.getIdent().first == null ? defaultDataverse : tre.getIdent().first.getValue(),
- tre.getIdent().second.getValue());
+ TypeSignature signature =
+ new TypeSignature(tre.getIdent().first == null ? defaultDataverse : tre.getIdent().first,
+ tre.getIdent().second.getValue());
IAType tref = solveTypeReference(signature, typeMap);
if (tref != null) {
act.setItemType(tref);
@@ -266,10 +269,10 @@
private static void addIncompleteCollectionTypeReference(AbstractCollectionType collType,
TypeReferenceExpression tre, Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes,
- String defaultDataverse) {
+ DataverseName defaultDataverse) {
String typeName = tre.getIdent().second.getValue();
- TypeSignature typeSignature = new TypeSignature(
- tre.getIdent().first == null ? defaultDataverse : tre.getIdent().first.getValue(), typeName);
+ TypeSignature typeSignature =
+ new TypeSignature(tre.getIdent().first == null ? defaultDataverse : tre.getIdent().first, typeName);
List<AbstractCollectionType> typeList = incompleteItemTypes.get(typeSignature);
if (typeList == null) {
typeList = new LinkedList<>();
@@ -295,17 +298,16 @@
}
private static void addIncompleteTopLevelTypeReference(TypeReferenceExpression tre,
- Map<TypeSignature, List<TypeSignature>> incompleteTopLevelTypeReferences, String defaultDataverse) {
+ Map<TypeSignature, List<TypeSignature>> incompleteTopLevelTypeReferences, DataverseName defaultDataverse) {
String name = tre.getIdent().second.getValue();
- TypeSignature typeSignature = new TypeSignature(
- tre.getIdent().first == null ? defaultDataverse : tre.getIdent().first.getValue(), name);
- List<TypeSignature> refList = incompleteTopLevelTypeReferences.get(name);
+ TypeSignature typeSignature =
+ new TypeSignature(tre.getIdent().first == null ? defaultDataverse : tre.getIdent().first, name);
+ List<TypeSignature> refList = incompleteTopLevelTypeReferences.get(typeSignature);
if (refList == null) {
refList = new LinkedList<>();
- incompleteTopLevelTypeReferences.put(
- new TypeSignature(tre.getIdent().first == null ? defaultDataverse : tre.getIdent().first.getValue(),
- tre.getIdent().second.getValue()),
- refList);
+ incompleteTopLevelTypeReferences
+ .put(new TypeSignature(tre.getIdent().first == null ? defaultDataverse : tre.getIdent().first,
+ tre.getIdent().second.getValue()), refList);
}
refList.add(typeSignature);
}
@@ -321,7 +323,7 @@
private static ARecordType computeRecordType(TypeSignature typeSignature, RecordTypeDefinition rtd,
Map<TypeSignature, IAType> typeMap, Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes,
- Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes, String defaultDataverse)
+ Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes, DataverseName defaultDataverse)
throws AlgebricksException {
List<String> names = rtd.getFieldNames();
int n = names.size();
@@ -346,9 +348,9 @@
switch (texpr.getTypeKind()) {
case TYPEREFERENCE: {
TypeReferenceExpression tre = (TypeReferenceExpression) texpr;
- TypeSignature signature = new TypeSignature(
- tre.getIdent().first == null ? defaultDataverse : tre.getIdent().first.getValue(),
- tre.getIdent().second.getValue());
+ TypeSignature signature =
+ new TypeSignature(tre.getIdent().first == null ? defaultDataverse : tre.getIdent().first,
+ tre.getIdent().second.getValue());
IAType tref = solveTypeReference(signature, typeMap);
if (tref != null) {
if (!rtd.getOptionableFields().get(j)) { // not nullable
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ConnectorApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ConnectorApiServlet.java
index e422c24..2a08511 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ConnectorApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ConnectorApiServlet.java
@@ -27,6 +27,7 @@
import java.util.concurrent.ConcurrentMap;
import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.MetadataManager;
import org.apache.asterix.metadata.MetadataTransactionContext;
import org.apache.asterix.metadata.declared.MetadataProvider;
@@ -79,7 +80,7 @@
PrintWriter out = response.writer();
try {
ObjectNode jsonResponse = OBJECT_MAPPER.createObjectNode();
- String dataverseName = request.getParameter("dataverseName");
+ DataverseName dataverseName = ServletUtil.getDataverseName(request, "dataverseName");
String datasetName = request.getParameter("datasetName");
if (dataverseName == null || datasetName == null) {
jsonResponse.put("error", "Parameter dataverseName or datasetName is null,");
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RebalanceApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RebalanceApiServlet.java
index 320c7aa..02d75f9 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RebalanceApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RebalanceApiServlet.java
@@ -37,11 +37,13 @@
import org.apache.asterix.app.active.ActiveNotificationHandler;
import org.apache.asterix.common.api.IMetadataLockManager;
import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.MetadataManager;
import org.apache.asterix.metadata.MetadataTransactionContext;
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Dataset;
import org.apache.asterix.metadata.entities.Dataverse;
+import org.apache.asterix.metadata.utils.MetadataConstants;
import org.apache.asterix.rebalance.NoOpDatasetRebalanceCallback;
import org.apache.asterix.utils.RebalanceUtil;
import org.apache.commons.lang3.StringUtils;
@@ -66,7 +68,6 @@
*/
public class RebalanceApiServlet extends AbstractServlet {
private static final Logger LOGGER = LogManager.getLogger();
- private static final String METADATA = "Metadata";
private final ICcApplicationContext appCtx;
// One-at-a-time thread executor, for rebalance tasks.
@@ -103,7 +104,7 @@
protected void post(IServletRequest request, IServletResponse response) {
try {
// Gets dataverse, dataset, and target nodes for rebalance.
- String dataverseName = request.getParameter("dataverseName");
+ DataverseName dataverseName = ServletUtil.getDataverseName(request, "dataverseName");
String datasetName = request.getParameter("datasetName");
String nodes = request.getParameter("nodes");
@@ -127,7 +128,7 @@
}
// Does not allow rebalancing a metadata dataset.
- if (METADATA.equals(dataverseName)) {
+ if (MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverseName)) {
sendResponse(response, HttpResponseStatus.BAD_REQUEST, "cannot rebalance a metadata dataset");
return;
}
@@ -154,7 +155,7 @@
}
// Schedules a rebalance task.
- private synchronized CountDownLatch scheduleRebalance(String dataverseName, String datasetName,
+ private synchronized CountDownLatch scheduleRebalance(DataverseName dataverseName, String datasetName,
String[] targetNodes, IServletResponse response) {
CountDownLatch terminated = new CountDownLatch(1);
Future<Void> task =
@@ -165,8 +166,8 @@
}
// Performs the actual rebalance.
- private Void doRebalance(String dataverseName, String datasetName, String[] targetNodes, IServletResponse response,
- CountDownLatch terminated) {
+ private Void doRebalance(DataverseName dataverseName, String datasetName, String[] targetNodes,
+ IServletResponse response, CountDownLatch terminated) {
try {
// Sets the content type.
HttpUtil.setContentType(response, HttpUtil.ContentType.APPLICATION_JSON, HttpUtil.Encoding.UTF8);
@@ -204,7 +205,7 @@
}
// Lists all datasets that should be rebalanced in a given datavserse.
- private List<Dataset> getAllDatasetsForRebalance(String dataverseName) throws Exception {
+ private List<Dataset> getAllDatasetsForRebalance(DataverseName dataverseName) throws Exception {
List<Dataset> datasets;
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
try {
@@ -235,14 +236,15 @@
}
// Gets all datasets in a dataverse for the rebalance operation, with a given metadata transaction context.
- private List<Dataset> getDatasetsInDataverseForRebalance(String dvName, MetadataTransactionContext mdTxnCtx)
+ private List<Dataset> getDatasetsInDataverseForRebalance(DataverseName dvName, MetadataTransactionContext mdTxnCtx)
throws Exception {
- return METADATA.equals(dvName) ? Collections.emptyList()
+ return MetadataConstants.METADATA_DATAVERSE_NAME.equals(dvName) ? Collections.emptyList()
: MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, dvName);
}
// Rebalances a given dataset.
- private void rebalanceDataset(String dataverseName, String datasetName, String[] targetNodes) throws Exception {
+ private void rebalanceDataset(DataverseName dataverseName, String datasetName, String[] targetNodes)
+ throws Exception {
IHyracksClientConnection hcc = (IHyracksClientConnection) ctx.get(HYRACKS_CONNECTION_ATTR);
MetadataProvider metadataProvider = new MetadataProvider(appCtx, null);
try {
@@ -251,8 +253,8 @@
activeNotificationHandler.suspend(metadataProvider);
try {
IMetadataLockManager lockManager = appCtx.getMetadataLockManager();
- lockManager.acquireDatasetExclusiveModificationLock(metadataProvider.getLocks(),
- dataverseName + '.' + datasetName);
+ lockManager.acquireDatasetExclusiveModificationLock(metadataProvider.getLocks(), dataverseName,
+ datasetName);
RebalanceUtil.rebalance(dataverseName, datasetName, new LinkedHashSet<>(Arrays.asList(targetNodes)),
metadataProvider, hcc, NoOpDatasetRebalanceCallback.INSTANCE);
} finally {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ServletUtil.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ServletUtil.java
index 2308ea3..9e85c82 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ServletUtil.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ServletUtil.java
@@ -20,13 +20,16 @@
import static org.apache.asterix.api.http.server.ServletConstants.RESULTSET_ATTR;
+import java.util.List;
import java.util.Map;
import org.apache.asterix.app.result.ResultReader;
import org.apache.asterix.common.api.IApplicationContext;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.hyracks.api.client.IHyracksClientConnection;
import org.apache.hyracks.api.result.IResultSet;
import org.apache.hyracks.client.result.ResultSet;
+import org.apache.hyracks.http.api.IServletRequest;
public class ServletUtil {
static IResultSet getResultSet(IHyracksClientConnection hcc, IApplicationContext appCtx,
@@ -46,4 +49,9 @@
}
return resultSet;
}
+
+ public static DataverseName getDataverseName(IServletRequest request, String dataverseParameterName) {
+ List<String> values = request.getParameterValues(dataverseParameterName);
+ return values != null ? DataverseName.create(values) : null;
+ }
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/UdfApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/UdfApiServlet.java
index 79b78a8..c58651f 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/UdfApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/UdfApiServlet.java
@@ -33,6 +33,8 @@
import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.messaging.api.ICCMessageBroker;
import org.apache.asterix.common.messaging.api.INcAddressedMessage;
+import org.apache.asterix.common.metadata.DataverseName;
+import org.apache.hyracks.algebricks.common.utils.Pair;
import org.apache.hyracks.api.client.IHyracksClientConnection;
import org.apache.hyracks.api.deployment.DeploymentId;
import org.apache.hyracks.http.api.IServletRequest;
@@ -61,28 +63,28 @@
this.broker = (ICCMessageBroker) appCtx.getServiceContext().getMessageBroker();
}
- private String[] getResource(FullHttpRequest req) throws IllegalArgumentException {
+ private Pair<String, DataverseName> getResource(FullHttpRequest req) throws IllegalArgumentException {
String[] path = new QueryStringDecoder(req.uri()).path().split("/");
if (path.length != 5) {
throw new IllegalArgumentException("Invalid resource.");
}
String resourceName = path[path.length - 1];
- String dataverseName = path[path.length - 2];
- return new String[] { resourceName, dataverseName };
+ DataverseName dataverseName = DataverseName.createSinglePartName(path[path.length - 2]); //TODO(MULTI_PART_DATAVERSE_NAME):REVISIT
+ return new Pair<>(resourceName, dataverseName);
}
@Override
protected void post(IServletRequest request, IServletResponse response) {
FullHttpRequest req = request.getHttpRequest();
- String[] resourceNames;
+ Pair<String, DataverseName> resourceNames;
try {
resourceNames = getResource(req);
} catch (IllegalArgumentException e) {
response.setStatus(HttpResponseStatus.BAD_REQUEST);
return;
}
- String resourceName = resourceNames[0];
- String dataverse = resourceNames[1];
+ String resourceName = resourceNames.first;
+ DataverseName dataverse = resourceNames.second;
File udf = null;
try {
File workingDir = new File(appCtx.getServiceContext().getServerCtx().getBaseDir().getAbsolutePath(),
@@ -101,7 +103,7 @@
}
}
IHyracksClientConnection hcc = appCtx.getHcc();
- DeploymentId udfName = new DeploymentId(dataverse + "." + resourceName);
+ DeploymentId udfName = new DeploymentId(dataverse.getCanonicalForm() + "." + resourceName); //TODO(MULTI_PART_DATAVERSE_NAME):REVISIT
ClassLoader cl = appCtx.getLibraryManager().getLibraryClassLoader(dataverse, resourceName);
if (cl != null) {
deleteUdf(dataverse, resourceName);
@@ -129,27 +131,27 @@
}
- private void deleteUdf(String dataverse, String resourceName) throws Exception {
+ private void deleteUdf(DataverseName dataverse, String resourceName) throws Exception {
long reqId = broker.newRequestId();
List<INcAddressedMessage> requests = new ArrayList<>();
List<String> ncs = new ArrayList<>(appCtx.getClusterStateManager().getParticipantNodes());
ncs.forEach(s -> requests.add(new DeleteUdfMessage(dataverse, resourceName, reqId)));
broker.sendSyncRequestToNCs(reqId, ncs, requests, UDF_RESPONSE_TIMEOUT);
appCtx.getLibraryManager().deregisterLibraryClassLoader(dataverse, resourceName);
- appCtx.getHcc().unDeployBinary(new DeploymentId(resourceName));
+ appCtx.getHcc().unDeployBinary(new DeploymentId(resourceName)); //TODO(MULTI_PART_DATAVERSE_NAME):REVISIT:why dataverse not used?
}
@Override
protected void delete(IServletRequest request, IServletResponse response) {
- String[] resourceNames;
+ Pair<String, DataverseName> resourceNames;
try {
resourceNames = getResource(request.getHttpRequest());
} catch (IllegalArgumentException e) {
response.setStatus(HttpResponseStatus.BAD_REQUEST);
return;
}
- String resourceName = resourceNames[0];
- String dataverse = resourceNames[1];
+ String resourceName = resourceNames.first;
+ DataverseName dataverse = resourceNames.second;
try {
deleteUdf(dataverse, resourceName);
} catch (Exception e) {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/active/ActiveNotificationHandler.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/active/ActiveNotificationHandler.java
index a572e28..0a7cad6 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/active/ActiveNotificationHandler.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/active/ActiveNotificationHandler.java
@@ -32,9 +32,9 @@
import org.apache.asterix.common.api.IMetadataLockManager;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.exceptions.RuntimeDataException;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Dataset;
-import org.apache.asterix.metadata.utils.DatasetUtil;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.exceptions.HyracksException;
@@ -280,13 +280,13 @@
// write lock the listener
// exclusive lock all the datasets (except the target dataset)
IMetadataLockManager lockManager = metadataProvider.getApplicationContext().getMetadataLockManager();
- String dataverseName = listener.getEntityId().getDataverse();
+ DataverseName dataverseName = listener.getEntityId().getDataverseName();
String entityName = listener.getEntityId().getEntityName();
if (LOGGER.isEnabled(level)) {
LOGGER.log(level, "Suspending " + listener.getEntityId());
}
LOGGER.log(level, "Acquiring locks");
- lockManager.acquireActiveEntityWriteLock(metadataProvider.getLocks(), dataverseName + '.' + entityName);
+ lockManager.acquireActiveEntityWriteLock(metadataProvider.getLocks(), dataverseName, entityName);
List<Dataset> datasets = ((ActiveEntityEventsListener) listener).getDatasets();
for (Dataset dataset : datasets) {
if (targetDataset != null && targetDataset.equals(dataset)) {
@@ -294,7 +294,7 @@
continue;
}
lockManager.acquireDatasetExclusiveModificationLock(metadataProvider.getLocks(),
- DatasetUtil.getFullyQualifiedName(dataset));
+ dataset.getDataverseName(), dataset.getDatasetName());
}
LOGGER.log(level, "locks acquired");
((ActiveEntityEventsListener) listener).suspend(metadataProvider);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/active/RecoveryTask.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/active/RecoveryTask.java
index a1989fc..6efda9f 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/active/RecoveryTask.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/active/RecoveryTask.java
@@ -21,6 +21,7 @@
import java.util.concurrent.Callable;
import org.apache.asterix.active.ActivityState;
+import org.apache.asterix.active.EntityId;
import org.apache.asterix.active.IRetryPolicyFactory;
import org.apache.asterix.active.NoRetryPolicyFactory;
import org.apache.asterix.common.api.IClusterManagementWork.ClusterState;
@@ -29,7 +30,6 @@
import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Dataset;
-import org.apache.asterix.metadata.utils.DatasetUtil;
import org.apache.asterix.metadata.utils.MetadataLockUtil;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -173,12 +173,13 @@
}
protected void acquireRecoveryLocks(IMetadataLockManager lockManager) throws AlgebricksException {
- lockManager.acquireActiveEntityWriteLock(metadataProvider.getLocks(),
- listener.getEntityId().getDataverse() + '.' + listener.getEntityId().getEntityName());
+ EntityId entityId = listener.getEntityId();
+ lockManager.acquireActiveEntityWriteLock(metadataProvider.getLocks(), entityId.getDataverseName(),
+ entityId.getEntityName());
for (Dataset dataset : listener.getDatasets()) {
lockManager.acquireDataverseReadLock(metadataProvider.getLocks(), dataset.getDataverseName());
- lockManager.acquireDatasetExclusiveModificationLock(metadataProvider.getLocks(),
- DatasetUtil.getFullyQualifiedName(dataset));
+ lockManager.acquireDatasetExclusiveModificationLock(metadataProvider.getLocks(), dataset.getDataverseName(),
+ dataset.getDatasetName());
}
}
@@ -187,11 +188,12 @@
}
protected void acquirePostRecoveryLocks(IMetadataLockManager lockManager) throws AlgebricksException {
- lockManager.acquireActiveEntityWriteLock(metadataProvider.getLocks(),
- listener.getEntityId().getDataverse() + '.' + listener.getEntityId().getEntityName());
+ EntityId entityId = listener.getEntityId();
+ lockManager.acquireActiveEntityWriteLock(metadataProvider.getLocks(), entityId.getDataverseName(),
+ entityId.getEntityName());
for (Dataset dataset : listener.getDatasets()) {
- MetadataLockUtil.modifyDatasetBegin(lockManager, metadataProvider.getLocks(), dataset.getDatasetName(),
- DatasetUtil.getFullyQualifiedName(dataset));
+ MetadataLockUtil.modifyDatasetBegin(lockManager, metadataProvider.getLocks(), dataset.getDataverseName(),
+ dataset.getDatasetName());
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalLibraryUtils.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalLibraryUtils.java
index a989941..6bb9bbd 100755
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalLibraryUtils.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalLibraryUtils.java
@@ -36,6 +36,7 @@
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.common.functions.FunctionSignature;
import org.apache.asterix.common.library.ILibraryManager;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.external.api.IDataSourceAdapter;
import org.apache.asterix.external.dataset.adapter.AdapterIdentifier;
import org.apache.asterix.external.library.ExternalLibrary;
@@ -64,7 +65,7 @@
public static void setUpExternaLibrary(ILibraryManager externalLibraryManager, boolean isMetadataNode,
String libraryPath) throws Exception {
// start by un-installing removed libraries (Metadata Node only)
- Map<String, List<String>> uninstalledLibs = null;
+ Map<DataverseName, List<String>> uninstalledLibs = null;
if (isMetadataNode) {
uninstalledLibs = uninstallLibraries();
}
@@ -72,7 +73,7 @@
// get the directory of the to be installed libraries
String[] pathSplit = libraryPath.split("\\.");
String[] dvSplit = pathSplit[pathSplit.length - 2].split("/");
- String dataverse = dvSplit[dvSplit.length - 1];
+ DataverseName dataverse = DataverseName.createSinglePartName(dvSplit[dvSplit.length - 1]); //TODO(MULTI_PART_DATAVERSE_NAME):REVISIT
String name = pathSplit[pathSplit.length - 1].trim();
File installLibDir = new File(libraryPath);
@@ -104,8 +105,8 @@
* @return a map from dataverse -> list of uninstalled libraries.
* @throws Exception
*/
- private static Map<String, List<String>> uninstallLibraries() throws Exception {
- Map<String, List<String>> uninstalledLibs = new HashMap<>();
+ private static Map<DataverseName, List<String>> uninstallLibraries() throws Exception {
+ Map<DataverseName, List<String>> uninstalledLibs = new HashMap<>();
// get the directory of the un-install libraries
File uninstallLibDir = getLibraryUninstallDir();
String[] uninstallLibNames;
@@ -116,7 +117,7 @@
for (String uninstallLibName : uninstallLibNames) {
// Get the <dataverse name - library name> pair
String[] components = uninstallLibName.split("\\.");
- String dataverse = components[0];
+ DataverseName dataverse = DataverseName.createSinglePartName(components[0]); //TODO(MULTI_PART_DATAVERSE_NAME):REVISIT
String libName = components[1];
// un-install
uninstallLibrary(dataverse, libName);
@@ -147,7 +148,7 @@
* @throws RemoteException
* @throws ACIDException
*/
- public static boolean uninstallLibrary(String dataverse, String libraryName)
+ public static boolean uninstallLibrary(DataverseName dataverse, String libraryName)
throws AsterixException, RemoteException, ACIDException {
MetadataTransactionContext mdTxnCtx = null;
try {
@@ -194,7 +195,7 @@
return true;
}
- private static void addLibraryToMetadata(Map<String, List<String>> uninstalledLibs, String dataverse,
+ private static void addLibraryToMetadata(Map<DataverseName, List<String>> uninstalledLibs, DataverseName dataverse,
String libraryName, ExternalLibrary library) throws ACIDException, RemoteException {
// Modify metadata accordingly
List<String> uninstalledLibsInDv = uninstalledLibs.get(dataverse);
@@ -283,8 +284,9 @@
* failure in installing an element does not effect installation of other
* libraries.
*/
- protected static void configureLibrary(ILibraryManager libraryManager, String dataverse, String libraryName,
- final File libraryDir, Map<String, List<String>> uninstalledLibs, boolean isMetadataNode) throws Exception {
+ protected static void configureLibrary(ILibraryManager libraryManager, DataverseName dataverse, String libraryName,
+ final File libraryDir, Map<DataverseName, List<String>> uninstalledLibs, boolean isMetadataNode)
+ throws Exception {
String[] libraryDescriptors = libraryDir.list((dir, name) -> name.endsWith(".xml"));
@@ -318,8 +320,8 @@
* @param libraryPath
* @throws Exception
*/
- protected static void registerClassLoader(ILibraryManager externalLibraryManager, String dataverse, String name,
- String libraryPath) throws Exception {
+ protected static void registerClassLoader(ILibraryManager externalLibraryManager, DataverseName dataverse,
+ String name, String libraryPath) throws Exception {
// get the class loader
URLClassLoader classLoader = getLibraryClassLoader(dataverse, name, libraryPath);
// register it with the external library manager
@@ -343,12 +345,12 @@
/**
* Get the class loader for the library
*
- * @param libraryPath
* @param dataverse
+ * @param libraryPath
* @return
* @throws Exception
*/
- private static URLClassLoader getLibraryClassLoader(String dataverse, String name, String libraryPath)
+ private static URLClassLoader getLibraryClassLoader(DataverseName dataverse, String name, String libraryPath)
throws Exception {
// Get a reference to the library directory
File installDir = new File(libraryPath);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/DatasetResourcesRewriter.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/DatasetResourcesRewriter.java
index f1687b5..c663617 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/DatasetResourcesRewriter.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/DatasetResourcesRewriter.java
@@ -21,6 +21,7 @@
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.functions.FunctionConstants;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Dataset;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -44,7 +45,7 @@
public DatasetResourcesDatasource toDatasource(IOptimizationContext context, AbstractFunctionCallExpression f)
throws AlgebricksException {
final SourceLocation loc = f.getSourceLocation();
- String dataverseName = getString(loc, f.getArguments(), 0);
+ DataverseName dataverseName = getDataverseName(loc, f.getArguments(), 0);
String datasetName = getString(loc, f.getArguments(), 1);
MetadataProvider metadataProvider = (MetadataProvider) context.getMetadataProvider();
Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/DatasetRewriter.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/DatasetRewriter.java
index 590bf54..3b65f96 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/DatasetRewriter.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/DatasetRewriter.java
@@ -24,21 +24,16 @@
import org.apache.asterix.common.config.DatasetConfig.DatasetType;
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.exceptions.ErrorCode;
+import org.apache.asterix.common.metadata.DataverseName;
+import org.apache.asterix.lang.common.util.FunctionUtil;
import org.apache.asterix.metadata.declared.DataSource;
import org.apache.asterix.metadata.declared.DataSourceId;
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Dataset;
-import org.apache.asterix.metadata.entities.Dataverse;
-import org.apache.asterix.metadata.utils.DatasetUtil;
-import org.apache.asterix.om.base.AString;
-import org.apache.asterix.om.constants.AsterixConstantValue;
import org.apache.asterix.om.functions.IFunctionToDataSourceRewriter;
import org.apache.asterix.om.typecomputer.base.IResultTypeComputer;
import org.apache.asterix.om.types.ARecordType;
-import org.apache.asterix.om.types.ATypeTag;
-import org.apache.asterix.om.types.BuiltinType;
import org.apache.asterix.om.types.IAType;
-import org.apache.asterix.om.utils.ConstantExpressionUtil;
import org.apache.asterix.optimizer.rules.UnnestToDataScanRule;
import org.apache.asterix.optimizer.rules.util.EquivalenceClassUtils;
import org.apache.commons.lang3.mutable.Mutable;
@@ -47,11 +42,8 @@
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import org.apache.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable;
import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import org.apache.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import org.apache.hyracks.algebricks.core.algebra.expressions.IAlgebricksConstantValue;
import org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
import org.apache.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
@@ -73,30 +65,18 @@
throw new CompilationException(ErrorCode.COMPILATION_ERROR, unnest.getSourceLocation(),
"No positional variables are allowed over datasets.");
}
- ILogicalExpression expr = f.getArguments().get(0).getValue();
- if (expr.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
- return false;
- }
- ConstantExpression ce = (ConstantExpression) expr;
- IAlgebricksConstantValue acv = ce.getValue();
- if (!(acv instanceof AsterixConstantValue)) {
- return false;
- }
- AsterixConstantValue acv2 = (AsterixConstantValue) acv;
- if (acv2.getObject().getType().getTypeTag() != ATypeTag.STRING) {
- return false;
- }
- String datasetArg = ((AString) acv2.getObject()).getStringValue();
+
MetadataProvider metadataProvider = (MetadataProvider) context.getMetadataProvider();
- Pair<String, String> datasetReference = parseDatasetReference(metadataProvider, datasetArg);
- String dataverseName = datasetReference.first;
+ Pair<DataverseName, String> datasetReference = FunctionUtil.parseDatasetFunctionArguments(f.getArguments(),
+ metadataProvider.getDefaultDataverseName(), unnest.getSourceLocation());
+ DataverseName dataverseName = datasetReference.first;
String datasetName = datasetReference.second;
Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
if (dataset == null) {
throw new CompilationException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, unnest.getSourceLocation(),
datasetName, dataverseName);
}
- DataSourceId asid = new DataSourceId(dataverseName, datasetName);
+ DataSourceId dsid = new DataSourceId(dataset.getDataverseName(), dataset.getDatasetName());
List<LogicalVariable> variables = new ArrayList<>();
if (dataset.getDatasetType() == DatasetType.INTERNAL) {
int numPrimaryKeys = dataset.getPrimaryKeys().size();
@@ -105,7 +85,7 @@
}
}
variables.add(unnest.getVariable());
- DataSource dataSource = metadataProvider.findDataSource(asid);
+ DataSource dataSource = metadataProvider.findDataSource(dsid);
boolean hasMeta = dataSource.hasMeta();
if (hasMeta) {
variables.add(context.newVar());
@@ -128,25 +108,6 @@
return true;
}
- private Pair<String, String> parseDatasetReference(MetadataProvider metadataProvider, String datasetArg)
- throws AlgebricksException {
- String[] datasetNameComponents = datasetArg.split("\\.");
- String dataverseName;
- String datasetName;
- if (datasetNameComponents.length == 1) {
- Dataverse defaultDataverse = metadataProvider.getDefaultDataverse();
- if (defaultDataverse == null) {
- throw new AlgebricksException("Unresolved dataset " + datasetArg + " Dataverse not specified.");
- }
- dataverseName = defaultDataverse.getDataverseName();
- datasetName = datasetNameComponents[0];
- } else {
- dataverseName = datasetNameComponents[0];
- datasetName = datasetNameComponents[1];
- }
- return new Pair<>(dataverseName, datasetName);
- }
-
private void addPrimaryKey(List<LogicalVariable> scanVariables, DataSource dataSource,
IOptimizationContext context) {
List<LogicalVariable> primaryKey = dataSource.getPrimaryKeyVariables(scanVariables);
@@ -160,31 +121,15 @@
public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env, IMetadataProvider<?, ?> mp)
throws AlgebricksException {
AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) expression;
- if (f.getArguments().size() != 1) {
- throw new AlgebricksException("dataset arity is 1, not " + f.getArguments().size());
- }
- ILogicalExpression a1 = f.getArguments().get(0).getValue();
- IAType t1 = (IAType) env.getType(a1);
- if (t1.getTypeTag() == ATypeTag.ANY) {
- return BuiltinType.ANY;
- }
- if (t1.getTypeTag() != ATypeTag.STRING) {
- throw new AlgebricksException("Illegal type " + t1 + " for dataset() argument.");
- }
- String datasetArg = ConstantExpressionUtil.getStringConstant(a1);
- if (datasetArg == null) {
- return BuiltinType.ANY;
- }
MetadataProvider metadata = (MetadataProvider) mp;
- Pair<String, String> datasetInfo = DatasetUtil.getDatasetInfo(metadata, datasetArg);
- String dataverseName = datasetInfo.first;
+ Pair<DataverseName, String> datasetInfo = FunctionUtil.parseDatasetFunctionArguments(f.getArguments(),
+ metadata.getDefaultDataverseName(), f.getSourceLocation());
+ DataverseName dataverseName = datasetInfo.first;
String datasetName = datasetInfo.second;
- if (dataverseName == null) {
- throw new AlgebricksException("Unspecified dataverse!");
- }
Dataset dataset = metadata.findDataset(dataverseName, datasetName);
if (dataset == null) {
- throw new AlgebricksException("Could not find dataset " + datasetName + " in dataverse " + dataverseName);
+ throw new CompilationException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, f.getSourceLocation(), datasetName,
+ dataverseName);
}
String tn = dataset.getItemTypeName();
IAType t2 = metadata.findType(dataset.getItemTypeDataverseName(), tn);
@@ -192,6 +137,5 @@
throw new AlgebricksException("No type for dataset " + datasetName);
}
return t2;
-
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/DumpIndexRewriter.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/DumpIndexRewriter.java
index 08e5d51..000e910 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/DumpIndexRewriter.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/DumpIndexRewriter.java
@@ -21,6 +21,7 @@
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.functions.FunctionConstants;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Dataset;
import org.apache.asterix.metadata.entities.Index;
@@ -46,7 +47,7 @@
public DumpIndexDatasource toDatasource(IOptimizationContext context, AbstractFunctionCallExpression f)
throws AlgebricksException {
final SourceLocation loc = f.getSourceLocation();
- String dataverseName = getString(loc, f.getArguments(), 0);
+ DataverseName dataverseName = getDataverseName(loc, f.getArguments(), 0);
String datasetName = getString(loc, f.getArguments(), 1);
String indexName = getString(loc, f.getArguments(), 2);
MetadataProvider metadataProvider = (MetadataProvider) context.getMetadataProvider();
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/FeedRewriter.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/FeedRewriter.java
index bb80406..90374c3 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/FeedRewriter.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/FeedRewriter.java
@@ -23,6 +23,7 @@
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.exceptions.ErrorCode;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.external.feed.watch.FeedActivityDetails;
import org.apache.asterix.external.util.ExternalDataUtils;
import org.apache.asterix.external.util.FeedUtils;
@@ -36,12 +37,10 @@
import org.apache.asterix.metadata.entities.FeedPolicyEntity;
import org.apache.asterix.metadata.entities.InternalDatasetDetails;
import org.apache.asterix.metadata.feeds.BuiltinFeedPolicies;
-import org.apache.asterix.metadata.utils.DatasetUtil;
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionToDataSourceRewriter;
import org.apache.asterix.om.typecomputer.base.IResultTypeComputer;
import org.apache.asterix.om.types.ARecordType;
-import org.apache.asterix.om.types.ATypeTag;
import org.apache.asterix.om.types.BuiltinType;
import org.apache.asterix.om.types.IAType;
import org.apache.asterix.om.utils.ConstantExpressionUtil;
@@ -49,7 +48,6 @@
import org.apache.asterix.translator.util.PlanTranslationUtil;
import org.apache.commons.lang3.mutable.Mutable;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
-import org.apache.hyracks.algebricks.common.utils.Pair;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext;
@@ -75,16 +73,17 @@
throw new CompilationException(ErrorCode.COMPILATION_ERROR, unnest.getSourceLocation(),
"No positional variables are allowed over feeds.");
}
- String dataverse = ConstantExpressionUtil.getStringArgument(f, 0);
+ DataverseName dataverseName =
+ DataverseName.createFromCanonicalForm(ConstantExpressionUtil.getStringArgument(f, 0));
String sourceFeedName = ConstantExpressionUtil.getStringArgument(f, 1);
String getTargetFeed = ConstantExpressionUtil.getStringArgument(f, 2);
String subscriptionLocation = ConstantExpressionUtil.getStringArgument(f, 3);
String targetDataset = ConstantExpressionUtil.getStringArgument(f, 4);
String outputType = ConstantExpressionUtil.getStringArgument(f, 5);
MetadataProvider metadataProvider = (MetadataProvider) context.getMetadataProvider();
- DataSourceId asid = new DataSourceId(dataverse, getTargetFeed);
+ DataSourceId asid = new DataSourceId(dataverseName, getTargetFeed);
String policyName = (String) metadataProvider.getConfig().get(FeedActivityDetails.FEED_POLICY_NAME);
- FeedPolicyEntity policy = metadataProvider.findFeedPolicy(dataverse, policyName);
+ FeedPolicyEntity policy = metadataProvider.findFeedPolicy(dataverseName, policyName);
if (policy == null) {
policy = BuiltinFeedPolicies.getFeedPolicy(policyName);
if (policy == null) {
@@ -116,15 +115,15 @@
return true;
}
- private FeedDataSource createFeedDataSource(DataSourceId aqlId, String targetDataset, String sourceFeedName,
+ private FeedDataSource createFeedDataSource(DataSourceId id, String targetDataset, String sourceFeedName,
String subscriptionLocation, MetadataProvider metadataProvider, FeedPolicyEntity feedPolicy,
String outputType, String locations, LogicalVariable recordVar, IOptimizationContext context,
List<LogicalVariable> pkVars) throws AlgebricksException {
- Dataset dataset = metadataProvider.findDataset(aqlId.getDataverseName(), targetDataset);
- ARecordType feedOutputType = (ARecordType) metadataProvider.findType(aqlId.getDataverseName(), outputType);
- Feed sourceFeed = metadataProvider.findFeed(aqlId.getDataverseName(), sourceFeedName);
+ Dataset dataset = metadataProvider.findDataset(id.getDataverseName(), targetDataset);
+ ARecordType feedOutputType = (ARecordType) metadataProvider.findType(id.getDataverseName(), outputType);
+ Feed sourceFeed = metadataProvider.findFeed(id.getDataverseName(), sourceFeedName);
FeedConnection feedConnection =
- metadataProvider.findFeedConnection(aqlId.getDataverseName(), sourceFeedName, targetDataset);
+ metadataProvider.findFeedConnection(id.getDataverseName(), sourceFeedName, targetDataset);
ARecordType metaType = null;
// Does dataset have meta?
if (dataset.hasMetaPart()) {
@@ -132,12 +131,7 @@
if (metaTypeName == null) {
throw new AlgebricksException("Feed to a dataset with metadata doesn't have meta type specified");
}
- String dataverseName = aqlId.getDataverseName();
- if (metaTypeName.contains(".")) {
- dataverseName = metaTypeName.substring(0, metaTypeName.indexOf('.'));
- metaTypeName = metaTypeName.substring(metaTypeName.indexOf('.') + 1);
- }
- metaType = (ARecordType) metadataProvider.findType(dataverseName, metaTypeName);
+ metaType = (ARecordType) metadataProvider.findType(id.getDataverseName(), metaTypeName);
}
// Is a change feed?
List<IAType> pkTypes = null;
@@ -168,7 +162,7 @@
} else {
keyAccessScalarFunctionCallExpression = null;
}
- FeedDataSource feedDataSource = new FeedDataSource(sourceFeed, aqlId, targetDataset, feedOutputType, metaType,
+ FeedDataSource feedDataSource = new FeedDataSource(sourceFeed, id, targetDataset, feedOutputType, metaType,
pkTypes, keyAccessScalarFunctionCallExpression, sourceFeed.getFeedId(),
FeedRuntimeType.valueOf(subscriptionLocation), locations.split(","), context.getComputationNodeDomain(),
feedConnection);
@@ -184,31 +178,17 @@
throw new AlgebricksException("Incorrect number of arguments -> arity is "
+ BuiltinFunctions.FEED_COLLECT.getArity() + ", not " + f.getArguments().size());
}
- ILogicalExpression a1 = f.getArguments().get(5).getValue();
- IAType t1 = (IAType) env.getType(a1);
- if (t1.getTypeTag() == ATypeTag.ANY) {
- return BuiltinType.ANY;
- }
- if (t1.getTypeTag() != ATypeTag.STRING) {
- throw new AlgebricksException("Illegal type " + t1 + " for feed-ingest argument.");
- }
- String typeArg = ConstantExpressionUtil.getStringConstant(a1);
- if (typeArg == null) {
+ DataverseName dataverseName =
+ DataverseName.createFromCanonicalForm(ConstantExpressionUtil.getStringArgument(f, 0));
+ String outputTypeName = ConstantExpressionUtil.getStringArgument(f, 5);
+ if (outputTypeName == null) {
return BuiltinType.ANY;
}
MetadataProvider metadata = (MetadataProvider) mp;
- Pair<String, String> argInfo = DatasetUtil.getDatasetInfo(metadata, typeArg);
- String dataverseName = argInfo.first;
- String typeName = argInfo.second;
- if (dataverseName == null) {
- throw new AlgebricksException("Unspecified dataverse!");
+ IAType outputType = metadata.findType(dataverseName, outputTypeName);
+ if (outputType == null) {
+ throw new AlgebricksException("Unknown type " + outputTypeName);
}
- IAType t2 = metadata.findType(dataverseName, typeName);
- if (t2 == null) {
- throw new AlgebricksException("Unknown type " + typeName);
- }
- return t2;
-
+ return outputType;
}
-
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/FunctionRewriter.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/FunctionRewriter.java
index 1cc0a94..dfe29c2 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/FunctionRewriter.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/FunctionRewriter.java
@@ -25,12 +25,15 @@
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.exceptions.ErrorCode;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.declared.FunctionDataSource;
+import org.apache.asterix.om.base.AOrderedList;
import org.apache.asterix.om.base.AString;
-import org.apache.asterix.om.constants.AsterixConstantValue;
+import org.apache.asterix.om.base.IAObject;
import org.apache.asterix.om.exceptions.UnsupportedTypeException;
import org.apache.asterix.om.functions.IFunctionToDataSourceRewriter;
import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.om.utils.ConstantExpressionUtil;
import org.apache.asterix.optimizer.rules.UnnestToDataScanRule;
import org.apache.commons.lang3.mutable.Mutable;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -40,8 +43,6 @@
import org.apache.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable;
import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import org.apache.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import org.apache.hyracks.algebricks.core.algebra.expressions.IAlgebricksConstantValue;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
@@ -96,17 +97,43 @@
protected String getString(SourceLocation loc, List<Mutable<ILogicalExpression>> args, int i)
throws AlgebricksException {
- ConstantExpression ce = (ConstantExpression) args.get(i).getValue();
- IAlgebricksConstantValue acv = ce.getValue();
- if (!(acv instanceof AsterixConstantValue)) {
+ IAObject iaObject = ConstantExpressionUtil.getConstantIaObject(args.get(i).getValue(), null);
+ if (iaObject == null) {
throw new CompilationException(EXPECTED_CONSTANT_VALUE, loc);
}
- AsterixConstantValue acv2 = (AsterixConstantValue) acv;
- final ATypeTag typeTag = acv2.getObject().getType().getTypeTag();
- if (typeTag != ATypeTag.STRING) {
- throw new UnsupportedTypeException(loc, functionId, typeTag);
+ ATypeTag tt = iaObject.getType().getTypeTag();
+ if (tt != ATypeTag.STRING) {
+ throw new UnsupportedTypeException(loc, functionId, tt);
}
- return ((AString) acv2.getObject()).getStringValue();
+ return ((AString) iaObject).getStringValue();
}
+ protected DataverseName getDataverseName(SourceLocation loc, List<Mutable<ILogicalExpression>> args, int i)
+ throws AlgebricksException {
+ IAObject iaObject = ConstantExpressionUtil.getConstantIaObject(args.get(i).getValue(), null);
+ if (iaObject == null) {
+ throw new CompilationException(EXPECTED_CONSTANT_VALUE, loc);
+ }
+ ATypeTag tt = iaObject.getType().getTypeTag();
+ switch (tt) {
+ case STRING:
+ AString str = (AString) iaObject;
+ return DataverseName.createSinglePartName(str.getStringValue());
+ case ARRAY:
+ AOrderedList list = ((AOrderedList) iaObject);
+ int ln = list.size();
+ List<String> parts = new ArrayList<>(ln);
+ for (int j = 0; j < ln; j++) {
+ IAObject item = list.getItem(j);
+ ATypeTag itt = item.getType().getTypeTag();
+ if (itt != ATypeTag.STRING) {
+ throw new UnsupportedTypeException(loc, functionId, itt);
+ }
+ parts.add(((AString) item).getStringValue());
+ }
+ return DataverseName.create(parts);
+ default:
+ throw new UnsupportedTypeException(loc, functionId, tt);
+ }
+ }
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/StorageComponentsRewriter.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/StorageComponentsRewriter.java
index 3b89d50..fb0a6d4 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/StorageComponentsRewriter.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/function/StorageComponentsRewriter.java
@@ -21,6 +21,7 @@
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.functions.FunctionConstants;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Dataset;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -44,7 +45,7 @@
public StorageComponentsDatasource toDatasource(IOptimizationContext context, AbstractFunctionCallExpression f)
throws AlgebricksException {
SourceLocation loc = f.getSourceLocation();
- String dataverseName = getString(loc, f.getArguments(), 0);
+ DataverseName dataverseName = getDataverseName(loc, f.getArguments(), 0);
String datasetName = getString(loc, f.getArguments(), 1);
MetadataProvider metadataProvider = (MetadataProvider) context.getMetadataProvider();
Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/AbstractUdfMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/AbstractUdfMessage.java
index 90bd2a6..1c212fe 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/AbstractUdfMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/AbstractUdfMessage.java
@@ -23,20 +23,21 @@
import org.apache.asterix.common.messaging.CcIdentifiedMessage;
import org.apache.asterix.common.messaging.api.INCMessageBroker;
import org.apache.asterix.common.messaging.api.INcAddressedMessage;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public abstract class AbstractUdfMessage extends CcIdentifiedMessage implements INcAddressedMessage {
- protected final String dataverseName;
+ protected final DataverseName dataverseName;
protected final String libraryName;
protected static final Logger LOGGER = LogManager.getLogger();
- private static final long serialVersionUID = 2L;
+ private static final long serialVersionUID = 3L;
private final long reqId;
- public AbstractUdfMessage(String dataverseName, String libraryName, long reqId) {
+ public AbstractUdfMessage(DataverseName dataverseName, String libraryName, long reqId) {
this.dataverseName = dataverseName;
this.libraryName = libraryName;
this.reqId = reqId;
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/DeleteUdfMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/DeleteUdfMessage.java
index efbc9c1..3fb58fa 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/DeleteUdfMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/DeleteUdfMessage.java
@@ -21,12 +21,13 @@
import org.apache.asterix.app.external.ExternalLibraryUtils;
import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.library.ILibraryManager;
+import org.apache.asterix.common.metadata.DataverseName;
public class DeleteUdfMessage extends AbstractUdfMessage {
- private static final long serialVersionUID = 2L;
+ private static final long serialVersionUID = 3L;
- public DeleteUdfMessage(String dataverseName, String libraryName, long reqId) {
+ public DeleteUdfMessage(DataverseName dataverseName, String libraryName, long reqId) {
super(dataverseName, libraryName, reqId);
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/LoadUdfMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/LoadUdfMessage.java
index 600603b..6dffb28 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/LoadUdfMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/LoadUdfMessage.java
@@ -21,13 +21,14 @@
import org.apache.asterix.app.external.ExternalLibraryUtils;
import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.library.ILibraryManager;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.hyracks.util.file.FileUtil;
public class LoadUdfMessage extends AbstractUdfMessage {
private static final long serialVersionUID = 2L;
- public LoadUdfMessage(String dataverseName, String libraryName, long reqId) {
+ public LoadUdfMessage(DataverseName dataverseName, String libraryName, long reqId) {
super(dataverseName, libraryName, reqId);
}
@@ -35,6 +36,6 @@
protected void handleAction(ILibraryManager mgr, boolean isMdNode, INcApplicationContext appCtx) throws Exception {
ExternalLibraryUtils.setUpExternaLibrary(mgr, isMdNode,
FileUtil.joinPath(appCtx.getServiceContext().getServerCtx().getBaseDir().getAbsolutePath(),
- "applications", dataverseName + "." + libraryName));
+ "applications", dataverseName.getCanonicalForm() + "." + libraryName)); //TODO(MULTI_PART_DATAVERSE_NAME):REVISIT
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
index 8dd8d02..6351d0d 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
@@ -77,6 +77,7 @@
import org.apache.asterix.common.exceptions.WarningCollector;
import org.apache.asterix.common.exceptions.WarningUtil;
import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.utils.JobUtils;
import org.apache.asterix.common.utils.JobUtils.ProgressState;
import org.apache.asterix.common.utils.StorageConstants;
@@ -297,13 +298,13 @@
sessionOutput.out().println(ApiServlet.HTML_STATEMENT_SEPARATOR);
}
validateOperation(appCtx, activeDataverse, stmt);
- IStatementRewriter stmtRewriter = rewriterFactory.createStatementRewriter();
- rewriteStatement(stmt, stmtRewriter); // Rewrite the statement's AST.
MetadataProvider metadataProvider = new MetadataProvider(appCtx, activeDataverse);
metadataProvider.getConfig().putAll(config);
metadataProvider.setWriterFactory(writerFactory);
metadataProvider.setResultSerializerFactoryProvider(resultSerializerFactoryProvider);
metadataProvider.setOutputFile(outputFile);
+ IStatementRewriter stmtRewriter = rewriterFactory.createStatementRewriter();
+ rewriteStatement(stmt, stmtRewriter, metadataProvider); // Rewrite the statement's AST.
switch (stmt.getKind()) {
case SET:
handleSetStatement(stmt, config);
@@ -457,7 +458,7 @@
throws Exception {
DataverseDecl dvd = (DataverseDecl) stmt;
SourceLocation sourceLoc = dvd.getSourceLocation();
- String dvName = dvd.getDataverseName().getValue();
+ DataverseName dvName = dvd.getDataverseName();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
lockManager.acquireDataverseReadLock(metadataProvider.getLocks(), dvName);
@@ -479,7 +480,7 @@
protected void handleCreateDataverseStatement(MetadataProvider metadataProvider, Statement stmt,
IRequestParameters requestParameters) throws Exception {
CreateDataverseStatement stmtCreateDataverse = (CreateDataverseStatement) stmt;
- String dvName = stmtCreateDataverse.getDataverseName().getValue();
+ DataverseName dvName = stmtCreateDataverse.getDataverseName();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
lockManager.acquireDataverseReadLock(metadataProvider.getLocks(), dvName);
@@ -496,7 +497,7 @@
@SuppressWarnings("squid:S00112")
protected boolean doCreateDataverseStatement(MetadataTransactionContext mdTxnCtx, MetadataProvider metadataProvider,
CreateDataverseStatement stmtCreateDataverse) throws Exception {
- String dvName = stmtCreateDataverse.getDataverseName().getValue();
+ DataverseName dvName = stmtCreateDataverse.getDataverseName();
Dataverse dv = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(), dvName);
if (dv != null) {
if (stmtCreateDataverse.getIfNotExists()) {
@@ -555,21 +556,17 @@
MutableObject<ProgressState> progress = new MutableObject<>(ProgressState.NO_PROGRESS);
DatasetDecl dd = (DatasetDecl) stmt;
SourceLocation sourceLoc = dd.getSourceLocation();
- String dataverseName = getActiveDataverse(dd.getDataverse());
+ DataverseName dataverseName = getActiveDataverseName(dd.getDataverse());
String datasetName = dd.getName().getValue();
- String datasetFullyQualifiedName = dataverseName + "." + datasetName;
DatasetType dsType = dd.getDatasetType();
- String itemTypeDataverseName = getActiveDataverse(dd.getItemTypeDataverse());
+ DataverseName itemTypeDataverseName = getActiveDataverseName(dd.getItemTypeDataverse());
String itemTypeName = dd.getItemTypeName().getValue();
- String itemTypeFullyQualifiedName = itemTypeDataverseName + "." + itemTypeName;
- String metaItemTypeDataverseName = null;
+ DataverseName metaItemTypeDataverseName = null;
String metaItemTypeName = null;
- String metaItemTypeFullyQualifiedName = null;
Identifier metaItemTypeId = dd.getMetaItemTypeName();
if (metaItemTypeId != null) {
metaItemTypeName = metaItemTypeId.getValue();
- metaItemTypeDataverseName = getActiveDataverse(dd.getMetaItemTypeDataverse());
- metaItemTypeFullyQualifiedName = metaItemTypeDataverseName + "." + metaItemTypeName;
+ metaItemTypeDataverseName = getActiveDataverseName(dd.getMetaItemTypeDataverse());
}
Identifier ngNameId = dd.getNodegroupName();
String nodegroupName = ngNameId == null ? null : ngNameId.getValue();
@@ -581,10 +578,9 @@
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
- MetadataLockUtil.createDatasetBegin(lockManager, metadataProvider.getLocks(), dataverseName,
- itemTypeDataverseName, itemTypeFullyQualifiedName, metaItemTypeDataverseName,
- metaItemTypeFullyQualifiedName, nodegroupName, compactionPolicy, datasetFullyQualifiedName,
- defaultCompactionPolicy);
+ MetadataLockUtil.createDatasetBegin(lockManager, metadataProvider.getLocks(), dataverseName, datasetName,
+ itemTypeDataverseName, itemTypeName, metaItemTypeDataverseName, metaItemTypeName, nodegroupName,
+ compactionPolicy, defaultCompactionPolicy);
Dataset dataset = null;
try {
IDatasetDetails datasetDetails;
@@ -756,14 +752,14 @@
for (IActiveEntityEventsListener listener : listeners) {
if (listener.isEntityUsingDataset(dataset) && listener.isActive()) {
throw new CompilationException(ErrorCode.COMPILATION_CANT_DROP_ACTIVE_DATASET, sourceLoc,
- dataset.getFullyQualifiedName(), listener.getEntityId().toString());
+ DatasetUtil.getFullyQualifiedDisplayName(dataset), listener.getEntityId().toString());
}
}
}
protected static String configureNodegroupForDataset(ICcApplicationContext appCtx, Map<String, String> hints,
- String dataverseName, String datasetName, MetadataProvider metadataProvider, SourceLocation sourceLoc)
- throws Exception {
+ DataverseName dataverseName, String datasetName, MetadataProvider metadataProvider,
+ SourceLocation sourceLoc) throws Exception {
IClusterStateManager csm = appCtx.getClusterStateManager();
Set<String> allNodes = csm.getParticipantNodes(true);
Set<String> selectedNodes = new LinkedHashSet<>();
@@ -793,26 +789,22 @@
IHyracksClientConnection hcc, IRequestParameters requestParameters) throws Exception {
CreateIndexStatement stmtCreateIndex = (CreateIndexStatement) stmt;
SourceLocation sourceLoc = stmtCreateIndex.getSourceLocation();
- String dataverseName = getActiveDataverse(stmtCreateIndex.getDataverseName());
+ DataverseName dataverseName = getActiveDataverseName(stmtCreateIndex.getDataverseName());
String datasetName = stmtCreateIndex.getDatasetName().getValue();
String indexName = stmtCreateIndex.getIndexName().getValue();
List<Integer> keySourceIndicators = stmtCreateIndex.getFieldSourceIndicators();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
- String datasetFullyQualifiedName = dataverseName + "." + datasetName;
boolean isSecondaryPrimary = stmtCreateIndex.getFieldExprs().isEmpty();
- Dataset ds = null;
- Index index = null;
- MetadataLockUtil.createIndexBegin(lockManager, metadataProvider.getLocks(), dataverseName,
- datasetFullyQualifiedName);
+ MetadataLockUtil.createIndexBegin(lockManager, metadataProvider.getLocks(), dataverseName, datasetName);
try {
- ds = metadataProvider.findDataset(dataverseName, datasetName);
+ Dataset ds = metadataProvider.findDataset(dataverseName, datasetName);
if (ds == null) {
throw new CompilationException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, sourceLoc, datasetName,
dataverseName);
}
- index = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
+ Index index = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
datasetName, indexName);
if (index != null) {
if (stmtCreateIndex.getIfNotExists()) {
@@ -1213,12 +1205,11 @@
protected void handleCreateTypeStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception {
TypeDecl stmtCreateType = (TypeDecl) stmt;
SourceLocation sourceLoc = stmtCreateType.getSourceLocation();
- String dataverseName = getActiveDataverse(stmtCreateType.getDataverseName());
+ DataverseName dataverseName = getActiveDataverseName(stmtCreateType.getDataverseName());
String typeName = stmtCreateType.getIdent().getValue();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
- MetadataLockUtil.createTypeBegin(lockManager, metadataProvider.getLocks(), dataverseName,
- dataverseName + "." + typeName);
+ MetadataLockUtil.createTypeBegin(lockManager, metadataProvider.getLocks(), dataverseName, typeName);
try {
Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
if (dv == null) {
@@ -1254,7 +1245,7 @@
IHyracksClientConnection hcc, IRequestParameters requestParameters) throws Exception {
DataverseDropStatement stmtDelete = (DataverseDropStatement) stmt;
SourceLocation sourceLoc = stmtDelete.getSourceLocation();
- String dataverseName = stmtDelete.getDataverseName().getValue();
+ DataverseName dataverseName = stmtDelete.getDataverseName();
if (dataverseName.equals(MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME)) {
throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME + " dataverse can't be dropped");
@@ -1270,7 +1261,7 @@
protected boolean doDropDataverse(DataverseDropStatement stmtDelete, SourceLocation sourceLoc,
MetadataProvider metadataProvider, IHyracksClientConnection hcc) throws Exception {
- String dataverseName = stmtDelete.getDataverseName().getValue();
+ DataverseName dataverseName = stmtDelete.getDataverseName();
ProgressState progress = ProgressState.NO_PROGRESS;
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
@@ -1304,7 +1295,7 @@
for (IActiveEntityEventsListener listener : activeListeners) {
EntityId activeEntityId = listener.getEntityId();
if (activeEntityId.getExtensionName().equals(Feed.EXTENSION_NAME)
- && activeEntityId.getDataverse().equals(dataverseName)) {
+ && activeEntityId.getDataverseName().equals(dataverseName)) {
if (listener.getState() != ActivityState.STOPPED) {
((ActiveEntityEventsListener) listener).stop(metadataProvider);
}
@@ -1427,10 +1418,9 @@
IHyracksClientConnection hcc, IRequestParameters requestParameters) throws Exception {
DropDatasetStatement stmtDelete = (DropDatasetStatement) stmt;
SourceLocation sourceLoc = stmtDelete.getSourceLocation();
- String dataverseName = getActiveDataverse(stmtDelete.getDataverseName());
+ DataverseName dataverseName = getActiveDataverseName(stmtDelete.getDataverseName());
String datasetName = stmtDelete.getDatasetName().getValue();
- MetadataLockUtil.dropDatasetBegin(lockManager, metadataProvider.getLocks(), dataverseName,
- dataverseName + "." + datasetName);
+ MetadataLockUtil.dropDatasetBegin(lockManager, metadataProvider.getLocks(), dataverseName, datasetName);
try {
doDropDataset(dataverseName, datasetName, metadataProvider, stmtDelete.getIfExists(), hcc, true, sourceLoc);
} finally {
@@ -1438,7 +1428,7 @@
}
}
- public void doDropDataset(String dataverseName, String datasetName, MetadataProvider metadataProvider,
+ public void doDropDataset(DataverseName dataverseName, String datasetName, MetadataProvider metadataProvider,
boolean ifExists, IHyracksClientConnection hcc, boolean dropCorrespondingNodeGroup,
SourceLocation sourceLoc) throws Exception {
MutableObject<ProgressState> progress = new MutableObject<>(ProgressState.NO_PROGRESS);
@@ -1505,15 +1495,14 @@
IndexDropStatement stmtIndexDrop = (IndexDropStatement) stmt;
SourceLocation sourceLoc = stmtIndexDrop.getSourceLocation();
String datasetName = stmtIndexDrop.getDatasetName().getValue();
- String dataverseName = getActiveDataverse(stmtIndexDrop.getDataverseName());
+ DataverseName dataverseName = getActiveDataverseName(stmtIndexDrop.getDataverseName());
String indexName = stmtIndexDrop.getIndexName().getValue();
ProgressState progress = ProgressState.NO_PROGRESS;
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
List<JobSpecification> jobsToExecute = new ArrayList<>();
- String dsFullyQualifiedName = dataverseName + "." + datasetName;
- MetadataLockUtil.dropIndexBegin(lockManager, metadataProvider.getLocks(), dataverseName, dsFullyQualifiedName);
+ MetadataLockUtil.dropIndexBegin(lockManager, metadataProvider.getLocks(), dataverseName, datasetName);
// For external index
boolean dropFilesIndex = false;
try {
@@ -1682,13 +1671,12 @@
protected void handleTypeDropStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception {
TypeDropStatement stmtTypeDrop = (TypeDropStatement) stmt;
SourceLocation sourceLoc = stmtTypeDrop.getSourceLocation();
- String dataverseName = getActiveDataverse(stmtTypeDrop.getDataverseName());
+ DataverseName dataverseName = getActiveDataverseName(stmtTypeDrop.getDataverseName());
String typeName = stmtTypeDrop.getTypeName().getValue();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
- MetadataLockUtil.dropTypeBegin(lockManager, metadataProvider.getLocks(), dataverseName,
- dataverseName + "." + typeName);
+ MetadataLockUtil.dropTypeBegin(lockManager, metadataProvider.getLocks(), dataverseName, typeName);
try {
Datatype dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataverseName, typeName);
if (dt == null) {
@@ -1737,17 +1725,17 @@
CreateFunctionStatement cfs = (CreateFunctionStatement) stmt;
SourceLocation sourceLoc = cfs.getSourceLocation();
FunctionSignature signature = cfs.getFunctionSignature();
- String dataverse = getActiveDataverseName(signature.getNamespace());
- signature.setNamespace(dataverse);
+ DataverseName dataverseName = getActiveDataverseName(signature.getDataverseName());
+ signature.setDataverseName(dataverseName);
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
- MetadataLockUtil.functionStatementBegin(lockManager, metadataProvider.getLocks(), dataverse,
- dataverse + "." + signature.getName());
+ MetadataLockUtil.createFunctionBegin(lockManager, metadataProvider.getLocks(), dataverseName,
+ signature.getName());
try {
- Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverse);
+ Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
if (dv == null) {
- throw new CompilationException(ErrorCode.UNKNOWN_DATAVERSE, sourceLoc, dataverse);
+ throw new CompilationException(ErrorCode.UNKNOWN_DATAVERSE, sourceLoc, dataverseName);
}
//Check whether the function is use-able
@@ -1763,8 +1751,9 @@
apiFramework.reWriteQuery(declaredFunctions, metadataProvider, wrappedQuery, sessionOutput, false,
paramVars, warningCollector);
- List<List<List<String>>> dependencies = FunctionUtil.getFunctionDependencies(
- rewriterFactory.createQueryRewriter(), cfs.getFunctionBodyExpression(), metadataProvider);
+ List<List<org.apache.hyracks.algebricks.common.utils.Triple<DataverseName, String, String>>> dependencies =
+ FunctionUtil.getFunctionDependencies(rewriterFactory.createQueryRewriter(),
+ cfs.getFunctionBodyExpression(), metadataProvider);
Function function = new Function(signature, cfs.getParamList(), Function.RETURNTYPE_VOID,
cfs.getFunctionBody(), getFunctionLanguage(), FunctionKind.SCALAR.toString(), dependencies);
@@ -1792,7 +1781,7 @@
}
protected boolean isFunctionUsed(MetadataTransactionContext ctx, FunctionSignature signature,
- String currentDataverse) throws AlgebricksException {
+ DataverseName currentDataverse) throws AlgebricksException {
List<Dataverse> allDataverses = MetadataManager.INSTANCE.getDataverses(ctx);
for (Dataverse dataverse : allDataverses) {
if (dataverse.getDataverseName().equals(currentDataverse)) {
@@ -1816,11 +1805,11 @@
FunctionDropStatement stmtDropFunction = (FunctionDropStatement) stmt;
SourceLocation sourceLoc = stmtDropFunction.getSourceLocation();
FunctionSignature signature = stmtDropFunction.getFunctionSignature();
- signature.setNamespace(getActiveDataverseName(signature.getNamespace()));
+ signature.setDataverseName(getActiveDataverseName(signature.getDataverseName()));
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
- MetadataLockUtil.functionStatementBegin(lockManager, metadataProvider.getLocks(), signature.getNamespace(),
- signature.getNamespace() + "." + signature.getName());
+ MetadataLockUtil.dropFunctionBegin(lockManager, metadataProvider.getLocks(), signature.getDataverseName(),
+ signature.getName());
try {
Function function = MetadataManager.INSTANCE.getFunction(mdTxnCtx, signature);
// If function == null && stmtDropFunction.getIfExists() == true, commit txn directly.
@@ -1845,13 +1834,12 @@
protected void handleLoadStatement(MetadataProvider metadataProvider, Statement stmt, IHyracksClientConnection hcc)
throws Exception {
LoadStatement loadStmt = (LoadStatement) stmt;
- String dataverseName = getActiveDataverse(loadStmt.getDataverseName());
+ DataverseName dataverseName = getActiveDataverseName(loadStmt.getDataverseName());
String datasetName = loadStmt.getDatasetName().getValue();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
- MetadataLockUtil.modifyDatasetBegin(lockManager, metadataProvider.getLocks(), dataverseName,
- dataverseName + "." + datasetName);
+ MetadataLockUtil.modifyDatasetBegin(lockManager, metadataProvider.getLocks(), dataverseName, datasetName);
try {
CompiledLoadFromFileStatement cls =
new CompiledLoadFromFileStatement(dataverseName, loadStmt.getDatasetName().getValue(),
@@ -1880,12 +1868,12 @@
ResultMetadata outMetadata, Stats stats, boolean compileOnly, IRequestParameters requestParameters,
Map<String, IAObject> stmtParams, IStatementRewriter stmtRewriter) throws Exception {
InsertStatement stmtInsertUpsert = (InsertStatement) stmt;
- String dataverseName = getActiveDataverse(stmtInsertUpsert.getDataverseName());
+ DataverseName dataverseName = getActiveDataverseName(stmtInsertUpsert.getDataverseName());
final IMetadataLocker locker = new IMetadataLocker() {
@Override
public void lock() throws AlgebricksException {
- MetadataLockUtil.insertDeleteUpsertBegin(lockManager, metadataProvider.getLocks(),
- dataverseName + "." + stmtInsertUpsert.getDatasetName());
+ MetadataLockUtil.insertDeleteUpsertBegin(lockManager, metadataProvider.getLocks(), dataverseName,
+ stmtInsertUpsert.getDatasetName());
}
@Override
@@ -1942,16 +1930,16 @@
IHyracksClientConnection hcc, boolean compileOnly, Map<String, IAObject> stmtParams,
IStatementRewriter stmtRewriter) throws Exception {
DeleteStatement stmtDelete = (DeleteStatement) stmt;
- String dataverseName = getActiveDataverse(stmtDelete.getDataverseName());
+ DataverseName dataverseName = getActiveDataverseName(stmtDelete.getDataverseName());
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
- MetadataLockUtil.insertDeleteUpsertBegin(lockManager, metadataProvider.getLocks(),
- dataverseName + "." + stmtDelete.getDatasetName());
+ MetadataLockUtil.insertDeleteUpsertBegin(lockManager, metadataProvider.getLocks(), dataverseName,
+ stmtDelete.getDatasetName());
try {
metadataProvider.setWriteTransaction(true);
CompiledDeleteStatement clfrqs = new CompiledDeleteStatement(stmtDelete.getVariableExpr(), dataverseName,
- stmtDelete.getDatasetName().getValue(), stmtDelete.getCondition(), stmtDelete.getVarCounter(),
+ stmtDelete.getDatasetName(), stmtDelete.getCondition(), stmtDelete.getVarCounter(),
stmtDelete.getQuery());
clfrqs.setSourceLocation(stmt.getSourceLocation());
JobSpecification jobSpec =
@@ -2005,8 +1993,8 @@
metadataProvider, insertUpsert, sessionOutput, true, externalVars.keySet(), warningCollector);
InsertStatement rewrittenInsertUpsert = (InsertStatement) rewrittenResult.first;
- String dataverseName = getActiveDataverse(rewrittenInsertUpsert.getDataverseName());
- String datasetName = rewrittenInsertUpsert.getDatasetName().getValue();
+ DataverseName dataverseName = getActiveDataverseName(rewrittenInsertUpsert.getDataverseName());
+ String datasetName = rewrittenInsertUpsert.getDatasetName();
CompiledInsertStatement clfrqs;
switch (insertUpsert.getKind()) {
case INSERT:
@@ -2035,12 +2023,11 @@
protected void handleCreateFeedStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception {
CreateFeedStatement cfs = (CreateFeedStatement) stmt;
SourceLocation sourceLoc = cfs.getSourceLocation();
- String dataverseName = getActiveDataverse(cfs.getDataverseName());
+ DataverseName dataverseName = getActiveDataverseName(cfs.getDataverseName());
String feedName = cfs.getFeedName().getValue();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
- MetadataLockUtil.createFeedBegin(lockManager, metadataProvider.getLocks(), dataverseName,
- dataverseName + "." + feedName);
+ MetadataLockUtil.createFeedBegin(lockManager, metadataProvider.getLocks(), dataverseName, feedName);
try {
Feed feed =
MetadataManager.INSTANCE.getFeed(metadataProvider.getMetadataTxnContext(), dataverseName, feedName);
@@ -2067,35 +2054,32 @@
protected void handleCreateFeedPolicyStatement(MetadataProvider metadataProvider, Statement stmt)
throws AlgebricksException, HyracksDataException {
- String dataverse;
- String policy;
- FeedPolicyEntity newPolicy = null;
+ FeedPolicyEntity newPolicy;
MetadataTransactionContext mdTxnCtx = null;
CreateFeedPolicyStatement cfps = (CreateFeedPolicyStatement) stmt;
SourceLocation sourceLoc = cfps.getSourceLocation();
- dataverse = getActiveDataverse(null);
- policy = cfps.getPolicyName();
- MetadataLockUtil.createFeedPolicyBegin(lockManager, metadataProvider.getLocks(), dataverse,
- dataverse + "." + policy);
+ DataverseName dataverseName = getActiveDataverseName(null);
+ String policyName = cfps.getPolicyName();
+ MetadataLockUtil.createFeedPolicyBegin(lockManager, metadataProvider.getLocks(), dataverseName, policyName);
try {
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
- FeedPolicyEntity feedPolicy =
- MetadataManager.INSTANCE.getFeedPolicy(metadataProvider.getMetadataTxnContext(), dataverse, policy);
+ FeedPolicyEntity feedPolicy = MetadataManager.INSTANCE
+ .getFeedPolicy(metadataProvider.getMetadataTxnContext(), dataverseName, policyName);
if (feedPolicy != null) {
if (cfps.getIfNotExists()) {
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
} else {
throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
- "A policy with this name " + policy + " already exists.");
+ "A policy with this name " + policyName + " already exists.");
}
}
boolean extendingExisting = cfps.getSourcePolicyName() != null;
String description = cfps.getDescription() == null ? "" : cfps.getDescription();
if (extendingExisting) {
- FeedPolicyEntity sourceFeedPolicy = MetadataManager.INSTANCE
- .getFeedPolicy(metadataProvider.getMetadataTxnContext(), dataverse, cfps.getSourcePolicyName());
+ FeedPolicyEntity sourceFeedPolicy = MetadataManager.INSTANCE.getFeedPolicy(
+ metadataProvider.getMetadataTxnContext(), dataverseName, cfps.getSourcePolicyName());
if (sourceFeedPolicy == null) {
sourceFeedPolicy = MetadataManager.INSTANCE.getFeedPolicy(metadataProvider.getMetadataTxnContext(),
MetadataConstants.METADATA_DATAVERSE_NAME, cfps.getSourcePolicyName());
@@ -2106,7 +2090,7 @@
}
Map<String, String> policyProperties = sourceFeedPolicy.getProperties();
policyProperties.putAll(cfps.getProperties());
- newPolicy = new FeedPolicyEntity(dataverse, policy, description, policyProperties);
+ newPolicy = new FeedPolicyEntity(dataverseName, policyName, description, policyProperties);
} else {
Properties prop = new Properties();
try {
@@ -2118,7 +2102,7 @@
}
Map<String, String> policyProperties = new HashMap<>();
prop.forEach((key, value) -> policyProperties.put((String) key, (String) value));
- newPolicy = new FeedPolicyEntity(dataverse, policy, description, policyProperties);
+ newPolicy = new FeedPolicyEntity(dataverseName, policyName, description, policyProperties);
}
MetadataManager.INSTANCE.addFeedPolicy(mdTxnCtx, newPolicy);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
@@ -2134,12 +2118,11 @@
IHyracksClientConnection hcc) throws Exception {
FeedDropStatement stmtFeedDrop = (FeedDropStatement) stmt;
SourceLocation sourceLoc = stmtFeedDrop.getSourceLocation();
- String dataverseName = getActiveDataverse(stmtFeedDrop.getDataverseName());
+ DataverseName dataverseName = getActiveDataverseName(stmtFeedDrop.getDataverseName());
String feedName = stmtFeedDrop.getFeedName().getValue();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
- MetadataLockUtil.dropFeedBegin(lockManager, metadataProvider.getLocks(), dataverseName,
- dataverseName + "." + feedName);
+ MetadataLockUtil.dropFeedBegin(lockManager, metadataProvider.getLocks(), dataverseName, feedName);
try {
Feed feed = MetadataManager.INSTANCE.getFeed(mdTxnCtx, dataverseName, feedName);
if (feed == null) {
@@ -2175,7 +2158,7 @@
listener.unregister();
}
JobSpecification spec = FeedOperations.buildRemoveFeedStorageJob(metadataProvider,
- MetadataManager.INSTANCE.getFeed(mdTxnCtx, feedId.getDataverse(), feedId.getEntityName()));
+ MetadataManager.INSTANCE.getFeed(mdTxnCtx, feedId.getDataverseName(), feedId.getEntityName()));
runJob(hcc, spec);
MetadataManager.INSTANCE.dropFeed(mdTxnCtx, feed.getDataverseName(), feed.getFeedName());
if (LOGGER.isInfoEnabled()) {
@@ -2188,10 +2171,9 @@
metadataProvider.setMetadataTxnContext(mdTxnCtx);
FeedPolicyDropStatement stmtFeedPolicyDrop = (FeedPolicyDropStatement) stmt;
SourceLocation sourceLoc = stmtFeedPolicyDrop.getSourceLocation();
- String dataverseName = getActiveDataverse(stmtFeedPolicyDrop.getDataverseName());
+ DataverseName dataverseName = getActiveDataverseName(stmtFeedPolicyDrop.getDataverseName());
String policyName = stmtFeedPolicyDrop.getPolicyName().getValue();
- MetadataLockUtil.dropFeedPolicyBegin(lockManager, metadataProvider.getLocks(), dataverseName,
- dataverseName + "." + policyName);
+ MetadataLockUtil.dropFeedPolicyBegin(lockManager, metadataProvider.getLocks(), dataverseName, policyName);
try {
FeedPolicyEntity feedPolicy = MetadataManager.INSTANCE.getFeedPolicy(mdTxnCtx, dataverseName, policyName);
if (feedPolicy == null) {
@@ -2216,12 +2198,11 @@
IHyracksClientConnection hcc) throws Exception {
StartFeedStatement sfs = (StartFeedStatement) stmt;
SourceLocation sourceLoc = sfs.getSourceLocation();
- String dataverseName = getActiveDataverse(sfs.getDataverseName());
+ DataverseName dataverseName = getActiveDataverseName(sfs.getDataverseName());
String feedName = sfs.getFeedName().getValue();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean committed = false;
- MetadataLockUtil.startFeedBegin(lockManager, metadataProvider.getLocks(), dataverseName,
- dataverseName + "." + feedName);
+ MetadataLockUtil.startFeedBegin(lockManager, metadataProvider.getLocks(), dataverseName, feedName);
try {
metadataProvider.setMetadataTxnContext(mdTxnCtx);
// Runtime handler
@@ -2236,8 +2217,8 @@
}
for (FeedConnection feedConnection : feedConnections) {
// what if the dataset is in a different dataverse
- String fqName = feedConnection.getDataverseName() + "." + feedConnection.getDatasetName();
- lockManager.acquireDatasetReadLock(metadataProvider.getLocks(), fqName);
+ lockManager.acquireDatasetReadLock(metadataProvider.getLocks(), feedConnection.getDataverseName(),
+ feedConnection.getDatasetName());
}
ActiveNotificationHandler activeEventHandler =
(ActiveNotificationHandler) appCtx.getActiveNotificationHandler();
@@ -2270,7 +2251,7 @@
private void handleStopFeedStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception {
StopFeedStatement sfst = (StopFeedStatement) stmt;
SourceLocation sourceLoc = sfst.getSourceLocation();
- String dataverseName = getActiveDataverse(sfst.getDataverseName());
+ DataverseName dataverseName = getActiveDataverseName(sfst.getDataverseName());
String feedName = sfst.getFeedName().getValue();
EntityId entityId = new EntityId(Feed.EXTENSION_NAME, dataverseName, feedName);
ActiveNotificationHandler activeEventHandler =
@@ -2281,8 +2262,7 @@
throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
"Feed " + feedName + " is not started.");
}
- MetadataLockUtil.stopFeedBegin(lockManager, metadataProvider.getLocks(), entityId.getDataverse(),
- entityId.getEntityName());
+ MetadataLockUtil.stopFeedBegin(lockManager, metadataProvider.getLocks(), dataverseName, feedName);
try {
listener.stop(metadataProvider);
} finally {
@@ -2294,7 +2274,7 @@
FeedConnection fc;
ConnectFeedStatement cfs = (ConnectFeedStatement) stmt;
SourceLocation sourceLoc = cfs.getSourceLocation();
- String dataverseName = getActiveDataverse(cfs.getDataverseName());
+ DataverseName dataverseName = getActiveDataverseName(cfs.getDataverseName());
String feedName = cfs.getFeedName();
String datasetName = cfs.getDatasetName().getValue();
String policyName = cfs.getPolicy();
@@ -2306,8 +2286,8 @@
ActiveNotificationHandler activeEventHandler =
(ActiveNotificationHandler) appCtx.getActiveNotificationHandler();
// Transaction handling
- MetadataLockUtil.connectFeedBegin(lockManager, metadataProvider.getLocks(), dataverseName,
- dataverseName + "." + datasetName, dataverseName + "." + feedName);
+ MetadataLockUtil.connectFeedBegin(lockManager, metadataProvider.getLocks(), dataverseName, datasetName,
+ feedName);
try {
// validation
Dataset dataset = FeedMetadataUtil.validateIfDatasetExists(metadataProvider, dataverseName, datasetName);
@@ -2352,13 +2332,13 @@
protected void handleDisconnectFeedStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception {
DisconnectFeedStatement cfs = (DisconnectFeedStatement) stmt;
SourceLocation sourceLoc = cfs.getSourceLocation();
- String dataverseName = getActiveDataverse(cfs.getDataverseName());
+ DataverseName dataverseName = getActiveDataverseName(cfs.getDataverseName());
String datasetName = cfs.getDatasetName().getValue();
String feedName = cfs.getFeedName().getValue();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
- MetadataLockUtil.disconnectFeedBegin(lockManager, metadataProvider.getLocks(), dataverseName,
- dataverseName + "." + datasetName, dataverseName + "." + cfs.getFeedName());
+ MetadataLockUtil.disconnectFeedBegin(lockManager, metadataProvider.getLocks(), dataverseName, datasetName,
+ feedName);
try {
ActiveNotificationHandler activeEventHandler =
(ActiveNotificationHandler) appCtx.getActiveNotificationHandler();
@@ -2399,14 +2379,13 @@
IHyracksClientConnection hcc) throws Exception {
CompactStatement compactStatement = (CompactStatement) stmt;
SourceLocation sourceLoc = compactStatement.getSourceLocation();
- String dataverseName = getActiveDataverse(compactStatement.getDataverseName());
+ DataverseName dataverseName = getActiveDataverseName(compactStatement.getDataverseName());
String datasetName = compactStatement.getDatasetName().getValue();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
List<JobSpecification> jobsToExecute = new ArrayList<>();
- MetadataLockUtil.compactBegin(lockManager, metadataProvider.getLocks(), dataverseName,
- dataverseName + "." + datasetName);
+ MetadataLockUtil.compactBegin(lockManager, metadataProvider.getLocks(), dataverseName, datasetName);
try {
Dataset ds = metadataProvider.findDataset(dataverseName, datasetName);
if (ds == null) {
@@ -2703,7 +2682,7 @@
IHyracksClientConnection hcc) throws Exception {
RefreshExternalDatasetStatement stmtRefresh = (RefreshExternalDatasetStatement) stmt;
SourceLocation sourceLoc = stmtRefresh.getSourceLocation();
- String dataverseName = getActiveDataverse(stmtRefresh.getDataverseName());
+ DataverseName dataverseName = getActiveDataverseName(stmtRefresh.getDataverseName());
String datasetName = stmtRefresh.getDatasetName().getValue();
TransactionState transactionState = TransactionState.COMMIT;
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
@@ -2719,8 +2698,7 @@
Dataset transactionDataset = null;
boolean lockAquired = false;
boolean success = false;
- MetadataLockUtil.refreshDatasetBegin(lockManager, metadataProvider.getLocks(), dataverseName,
- dataverseName + "." + datasetName);
+ MetadataLockUtil.refreshDatasetBegin(lockManager, metadataProvider.getLocks(), dataverseName, datasetName);
try {
ds = metadataProvider.findDataset(dataverseName, datasetName);
// Dataset exists ?
@@ -2925,8 +2903,8 @@
}
@Override
- public String getActiveDataverseName(String dataverse) {
- return (dataverse != null && !dataverse.isEmpty()) ? dataverse : activeDataverse.getDataverseName();
+ public DataverseName getActiveDataverseName(DataverseName dataverseName) {
+ return dataverseName != null ? dataverseName : activeDataverse.getDataverseName();
}
@Override
@@ -2939,10 +2917,6 @@
return responsePrinter;
}
- public String getActiveDataverse(Identifier dataverse) {
- return getActiveDataverseName(dataverse != null ? dataverse.getValue() : null);
- }
-
@Override
public void getWarnings(Collection<? super Warning> outWarnings, long maxWarnings) {
warningCollector.getWarnings(outWarnings, maxWarnings);
@@ -2974,8 +2948,9 @@
}
}
- protected void rewriteStatement(Statement stmt, IStatementRewriter rewriter) throws CompilationException {
- rewriter.rewrite(stmt);
+ protected void rewriteStatement(Statement stmt, IStatementRewriter rewriter, MetadataProvider metadataProvider)
+ throws CompilationException {
+ rewriter.rewrite(stmt, metadataProvider);
}
private void ensureNonPrimaryIndexDrop(Index index, SourceLocation sourceLoc) throws AlgebricksException {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FeedOperations.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FeedOperations.java
index b74f4c6..447fdff 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FeedOperations.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FeedOperations.java
@@ -39,6 +39,7 @@
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.transactions.TxnId;
import org.apache.asterix.common.utils.StoragePathUtil;
import org.apache.asterix.compiler.provider.SqlppCompilationProvider;
@@ -186,6 +187,10 @@
argExprs.add(new LiteralExpr(new StringLiteral((String) arg)));
} else if (arg instanceof Expression) {
argExprs.add((Expression) arg);
+ } else if (arg instanceof DataverseName) {
+ argExprs.add(new LiteralExpr(new StringLiteral(((DataverseName) arg).getCanonicalForm())));
+ } else {
+ throw new IllegalArgumentException();
}
}
return argExprs;
@@ -240,12 +245,12 @@
Query feedConnQuery = makeConnectionQuery(feedConn);
CompiledStatements.ICompiledDmlStatement clfrqs;
if (insertFeed) {
- InsertStatement stmtUpsert = new InsertStatement(new Identifier(feedConn.getDataverseName()),
+ InsertStatement stmtUpsert = new InsertStatement(feedConn.getDataverseName(),
new Identifier(feedConn.getDatasetName()), feedConnQuery, -1, null, null);
clfrqs = new CompiledStatements.CompiledInsertStatement(feedConn.getDataverseName(),
feedConn.getDatasetName(), feedConnQuery, stmtUpsert.getVarCounter(), null, null);
} else {
- UpsertStatement stmtUpsert = new UpsertStatement(new Identifier(feedConn.getDataverseName()),
+ UpsertStatement stmtUpsert = new UpsertStatement(feedConn.getDataverseName(),
new Identifier(feedConn.getDatasetName()), feedConnQuery, -1, null, null);
clfrqs = new CompiledStatements.CompiledUpsertStatement(feedConn.getDataverseName(),
feedConn.getDatasetName(), feedConnQuery, stmtUpsert.getVarCounter(), null, null);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FlushDatasetUtil.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FlushDatasetUtil.java
index c37d8cc..ff99327 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FlushDatasetUtil.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FlushDatasetUtil.java
@@ -20,6 +20,7 @@
package org.apache.asterix.utils;
import org.apache.asterix.common.config.CompilerProperties;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.transactions.TxnId;
import org.apache.asterix.common.utils.JobUtils;
import org.apache.asterix.metadata.declared.MetadataProvider;
@@ -44,7 +45,7 @@
}
public static void flushDataset(IHyracksClientConnection hcc, MetadataProvider metadataProvider,
- String dataverseName, String datasetName) throws Exception {
+ DataverseName dataverseName, String datasetName) throws Exception {
Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
flushDataset(hcc, metadataProvider, dataset);
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/RebalanceUtil.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/RebalanceUtil.java
index 483987c..81f749e 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/RebalanceUtil.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/RebalanceUtil.java
@@ -34,6 +34,7 @@
import org.apache.asterix.common.api.IMetadataLockManager;
import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.exceptions.ExceptionUtils;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.transactions.TxnId;
import org.apache.asterix.common.utils.JobUtils;
import org.apache.asterix.dataflow.data.nontagged.MissingWriterFactory;
@@ -90,7 +91,7 @@
* the reusable hyracks connection.
* @throws Exception
*/
- public static void rebalance(String dataverseName, String datasetName, Set<String> targetNcNames,
+ public static void rebalance(DataverseName dataverseName, String datasetName, Set<String> targetNcNames,
MetadataProvider metadataProvider, IHyracksClientConnection hcc,
IDatasetRebalanceCallback datasetRebalanceCallback) throws Exception {
Dataset sourceDataset;
@@ -241,7 +242,8 @@
ActiveNotificationHandler activeNotificationHandler =
(ActiveNotificationHandler) appCtx.getActiveNotificationHandler();
IMetadataLockManager lockManager = appCtx.getMetadataLockManager();
- lockManager.upgradeDatasetLockToWrite(metadataProvider.getLocks(), DatasetUtil.getFullyQualifiedName(source));
+ lockManager.upgradeDatasetLockToWrite(metadataProvider.getLocks(), source.getDataverseName(),
+ source.getDatasetName());
LOGGER.info("Updating dataset {} node group from {} to {}", source.getDatasetName(), source.getNodeGroupName(),
target.getNodeGroupName());
try {
@@ -256,8 +258,8 @@
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
LOGGER.info("dataset {} node group updated to {}", target.getDatasetName(), target.getNodeGroupName());
} finally {
- lockManager.downgradeDatasetLockToExclusiveModify(metadataProvider.getLocks(),
- DatasetUtil.getFullyQualifiedName(target));
+ lockManager.downgradeDatasetLockToExclusiveModify(metadataProvider.getLocks(), target.getDataverseName(),
+ target.getDatasetName());
}
}
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorApiServletTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorApiServletTest.java
index 06d92b7..5c6f0b6 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorApiServletTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorApiServletTest.java
@@ -24,6 +24,7 @@
import java.io.ByteArrayOutputStream;
import java.io.PrintWriter;
+import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
@@ -31,10 +32,12 @@
import org.apache.asterix.api.http.server.ConnectorApiServlet;
import org.apache.asterix.api.http.server.ServletConstants;
import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.MetadataManager;
import org.apache.asterix.metadata.MetadataTransactionContext;
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Dataset;
+import org.apache.asterix.metadata.utils.MetadataConstants;
import org.apache.asterix.om.types.ARecordType;
import org.apache.asterix.om.types.BuiltinType;
import org.apache.asterix.om.types.IAType;
@@ -97,7 +100,7 @@
// Sets up mock returns.
when(mockRequest.getHttpRequest()).thenReturn(mockHttpRequest);
when(mockHttpRequest.method()).thenReturn(HttpMethod.GET);
- when(mockRequest.getParameter("dataverseName")).thenReturn("Metadata");
+ when(mockRequest.getParameterValues("dataverseName")).thenReturn(Collections.singletonList("Metadata"));
when(mockRequest.getParameter("datasetName")).thenReturn("Dataset");
when(mockResponse.writer()).thenReturn(outputWriter);
when(mockHcc.getNodeControllerInfos()).thenReturn(nodeMap);
@@ -117,7 +120,8 @@
String primaryKey = actualResponse.get("keys").asText();
Assert.assertEquals("DataverseName,DatasetName", primaryKey);
ARecordType recordType = (ARecordType) JSONDeserializerForTypes.convertFromJSON(actualResponse.get("type"));
- Assert.assertEquals(getMetadataRecordType("Metadata", "Dataset"), recordType);
+ Assert.assertEquals(getMetadataRecordType(MetadataConstants.METADATA_DATAVERSE_NAME,
+ MetadataConstants.DATASET_DATASET_NAME), recordType);
// Checks the correctness of results.
ArrayNode splits = (ArrayNode) actualResponse.get("splits");
@@ -174,7 +178,7 @@
Assert.assertEquals(actualResponse.toString(), expectedResponse.toString());
}
- private ARecordType getMetadataRecordType(String dataverseName, String datasetName) throws Exception {
+ private ARecordType getMetadataRecordType(DataverseName dataverseName, String datasetName) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
// Retrieves file splits of the dataset.
MetadataProvider metadataProvider = new MetadataProvider(
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/common/TestDataUtil.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/common/TestDataUtil.java
index d453824..b184053 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/common/TestDataUtil.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/common/TestDataUtil.java
@@ -30,6 +30,7 @@
import org.apache.asterix.app.active.ActiveNotificationHandler;
import org.apache.asterix.common.api.IMetadataLockManager;
import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.utils.Servlets;
import org.apache.asterix.metadata.MetadataManager;
import org.apache.asterix.metadata.MetadataTransactionContext;
@@ -153,7 +154,7 @@
* @param targetNodes
* @throws Exception
*/
- public static void rebalanceDataset(AsterixHyracksIntegrationUtil integrationUtil, String dataverseName,
+ public static void rebalanceDataset(AsterixHyracksIntegrationUtil integrationUtil, DataverseName dataverseName,
String datasetName, String[] targetNodes) throws Exception {
ICcApplicationContext ccAppCtx =
(ICcApplicationContext) integrationUtil.getClusterControllerService().getApplicationContext();
@@ -164,8 +165,8 @@
activeNotificationHandler.suspend(metadataProvider);
try {
IMetadataLockManager lockManager = ccAppCtx.getMetadataLockManager();
- lockManager.acquireDatasetExclusiveModificationLock(metadataProvider.getLocks(),
- dataverseName + '.' + datasetName);
+ lockManager.acquireDatasetExclusiveModificationLock(metadataProvider.getLocks(), dataverseName,
+ datasetName);
RebalanceUtil.rebalance(dataverseName, datasetName, new LinkedHashSet<>(Arrays.asList(targetNodes)),
metadataProvider, ccAppCtx.getHcc(), NoOpDatasetRebalanceCallback.INSTANCE);
} finally {
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/active/ActiveEventsListenerTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/active/ActiveEventsListenerTest.java
index 64520a4..bc38254 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/active/ActiveEventsListenerTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/active/ActiveEventsListenerTest.java
@@ -40,8 +40,10 @@
import org.apache.asterix.common.config.ActiveProperties;
import org.apache.asterix.common.context.IStorageComponentProvider;
import org.apache.asterix.common.exceptions.ErrorCode;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.external.feed.watch.WaitForStateSubscriber;
import org.apache.asterix.file.StorageComponentProvider;
+import org.apache.asterix.metadata.bootstrap.MetadataBuiltinEntities;
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Dataset;
import org.apache.asterix.metadata.entities.Feed;
@@ -76,7 +78,7 @@
static TestUserActor[] users;
static String[] nodes = { "node1", "node2" };
static ActiveNotificationHandler handler;
- static String dataverseName = "Default";
+ static DataverseName dataverseName = MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME;
static String entityName = "entityName";
static EntityId entityId = new EntityId(Feed.EXTENSION_NAME, dataverseName, entityName);
static Dataset firstDataset;
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/active/ActiveStatsTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/active/ActiveStatsTest.java
index 80dde8a..bc40304 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/active/ActiveStatsTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/active/ActiveStatsTest.java
@@ -38,6 +38,7 @@
import org.apache.asterix.app.nc.NCAppRuntimeContext;
import org.apache.asterix.app.result.ResponsePrinter;
import org.apache.asterix.common.exceptions.ErrorCode;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.external.feed.watch.WaitForStateSubscriber;
import org.apache.asterix.external.operators.FeedIntakeOperatorNodePushable;
import org.apache.asterix.metadata.declared.MetadataProvider;
@@ -71,7 +72,8 @@
@Test
public void refreshStatsTest() throws Exception {
// Entities to be used
- EntityId entityId = new EntityId("MockExtension", "MockDataverse", "MockEntity");
+ EntityId entityId =
+ new EntityId("MockExtension", DataverseName.createSinglePartName("MockDataverse"), "MockEntity");
ActiveRuntimeId activeRuntimeId =
new ActiveRuntimeId(entityId, FeedIntakeOperatorNodePushable.class.getSimpleName(), 0);
List<Dataset> datasetList = new ArrayList<>();
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/active/TestEventsListener.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/active/TestEventsListener.java
index 2143404..f87bfe2 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/active/TestEventsListener.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/active/TestEventsListener.java
@@ -31,6 +31,7 @@
import org.apache.asterix.common.api.IMetadataLockManager;
import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.metadata.LockList;
+import org.apache.asterix.metadata.bootstrap.MetadataBuiltinEntities;
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Dataset;
import org.apache.asterix.translator.IStatementExecutor;
@@ -112,8 +113,8 @@
protected JobId compileAndStartJob(MetadataProvider metadataProvider) throws HyracksDataException {
step(onStart);
try {
- metadataProvider.getApplicationContext().getMetadataLockManager()
- .acquireDatasetReadLock(metadataProvider.getLocks(), "Default.type");
+ metadataProvider.getApplicationContext().getMetadataLockManager().acquireDatasetReadLock(
+ metadataProvider.getLocks(), MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME, "type");
} catch (AlgebricksException e) {
throw HyracksDataException.create(e);
}
@@ -207,8 +208,8 @@
try {
IMetadataLockManager lockManager = metadataProvider.getApplicationContext().getMetadataLockManager();
LockList locks = metadataProvider.getLocks();
- lockManager.acquireDataverseReadLock(locks, entityId.getDataverse());
- lockManager.acquireActiveEntityWriteLock(locks, entityId.getDataverse() + '.' + entityId.getEntityName());
+ lockManager.acquireDataverseReadLock(locks, entityId.getDataverseName());
+ lockManager.acquireActiveEntityWriteLock(locks, entityId.getDataverseName(), entityId.getEntityName());
// persist entity
} catch (Throwable th) {
// This failure puts the system in a bad state.
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/active/TestUserActor.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/active/TestUserActor.java
index 5f715af..36f704c 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/active/TestUserActor.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/active/TestUserActor.java
@@ -25,6 +25,7 @@
import org.apache.asterix.common.api.IMetadataLockManager;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.exceptions.RuntimeDataException;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.api.IActiveEntityController;
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Dataset;
@@ -47,14 +48,14 @@
Action action = new Action() {
@Override
protected void doExecute(MetadataProvider mdProvider) throws Exception {
- String dataverseName = actionListener.getEntityId().getDataverse();
+ DataverseName dataverseName = actionListener.getEntityId().getDataverseName();
String entityName = actionListener.getEntityId().getEntityName();
try {
- lockManager.acquireActiveEntityWriteLock(mdProvider.getLocks(), dataverseName + '.' + entityName);
+ lockManager.acquireActiveEntityWriteLock(mdProvider.getLocks(), dataverseName, entityName);
List<Dataset> datasets = actionListener.getDatasets();
for (Dataset dataset : datasets) {
- MetadataLockUtil.modifyDatasetBegin(lockManager, mdProvider.getLocks(), dataverseName,
- DatasetUtil.getFullyQualifiedName(dataset));
+ MetadataLockUtil.modifyDatasetBegin(lockManager, mdProvider.getLocks(),
+ dataset.getDataverseName(), dataset.getDatasetName());
}
actionListener.start(mdProvider);
} finally {
@@ -70,14 +71,14 @@
Action action = new Action() {
@Override
protected void doExecute(MetadataProvider mdProvider) throws Exception {
- String dataverseName = actionListener.getEntityId().getDataverse();
+ DataverseName dataverseName = actionListener.getEntityId().getDataverseName();
String entityName = actionListener.getEntityId().getEntityName();
try {
- lockManager.acquireActiveEntityWriteLock(mdProvider.getLocks(), dataverseName + '.' + entityName);
+ lockManager.acquireActiveEntityWriteLock(mdProvider.getLocks(), dataverseName, entityName);
List<Dataset> datasets = actionListener.getDatasets();
for (Dataset dataset : datasets) {
- MetadataLockUtil.modifyDatasetBegin(lockManager, mdProvider.getLocks(), dataverseName,
- DatasetUtil.getFullyQualifiedName(dataset));
+ MetadataLockUtil.modifyDatasetBegin(lockManager, mdProvider.getLocks(),
+ dataset.getDataverseName(), dataset.getDatasetName());
}
actionListener.stop(mdProvider);
} finally {
@@ -93,14 +94,14 @@
Action action = new Action() {
@Override
protected void doExecute(MetadataProvider mdProvider) throws Exception {
- String dataverseName = actionListener.getEntityId().getDataverse();
+ DataverseName dataverseName = actionListener.getEntityId().getDataverseName();
String entityName = actionListener.getEntityId().getEntityName();
List<Dataset> datasets = actionListener.getDatasets();
try {
- lockManager.acquireActiveEntityWriteLock(mdProvider.getLocks(), dataverseName + '.' + entityName);
+ lockManager.acquireActiveEntityWriteLock(mdProvider.getLocks(), dataverseName, entityName);
for (Dataset dataset : datasets) {
lockManager.acquireDatasetExclusiveModificationLock(mdProvider.getLocks(),
- DatasetUtil.getFullyQualifiedName(dataset));
+ dataset.getDataverseName(), dataset.getDatasetName());
}
actionListener.suspend(mdProvider);
} catch (Exception e) {
@@ -118,16 +119,16 @@
Action action = new Action() {
@Override
protected void doExecute(MetadataProvider mdProvider) throws Exception {
- String dataverseName = actionListener.getEntityId().getDataverse();
+ DataverseName dataverseName = actionListener.getEntityId().getDataverseName();
String entityName = actionListener.getEntityId().getEntityName();
try {
- lockManager.acquireActiveEntityWriteLock(mdProvider.getLocks(), dataverseName + '.' + entityName);
+ lockManager.acquireActiveEntityWriteLock(mdProvider.getLocks(), dataverseName, entityName);
List<Dataset> datasets = actionListener.getDatasets();
for (Dataset dataset : datasets) {
- lockManager.upgradeDatasetLockToWrite(mdProvider.getLocks(),
- DatasetUtil.getFullyQualifiedName(dataset));
+ lockManager.upgradeDatasetLockToWrite(mdProvider.getLocks(), dataset.getDataverseName(),
+ dataset.getDatasetName());
lockManager.downgradeDatasetLockToExclusiveModify(mdProvider.getLocks(),
- DatasetUtil.getFullyQualifiedName(dataset));
+ dataset.getDataverseName(), dataset.getDatasetName());
}
actionListener.resume(mdProvider);
} finally {
@@ -143,13 +144,12 @@
Action action = new Action() {
@Override
protected void doExecute(MetadataProvider mdProvider) throws Exception {
- String entityDataverseName = actionListener.getEntityId().getDataverse();
+ DataverseName entityDataverseName = actionListener.getEntityId().getDataverseName();
String entityName = actionListener.getEntityId().getEntityName();
try {
- lockManager.acquireActiveEntityReadLock(mdProvider.getLocks(),
- entityDataverseName + '.' + entityName);
- lockManager.acquireDatasetWriteLock(mdProvider.getLocks(),
- DatasetUtil.getFullyQualifiedName(dataset));
+ lockManager.acquireActiveEntityReadLock(mdProvider.getLocks(), entityDataverseName, entityName);
+ lockManager.acquireDatasetWriteLock(mdProvider.getLocks(), dataset.getDataverseName(),
+ dataset.getDatasetName());
List<Dataset> datasets = clusterController.getAllDatasets();
if (datasets.contains(dataset)) {
throw new HyracksDataException("Dataset " + dataset + " already exists");
@@ -169,13 +169,12 @@
Action action = new Action() {
@Override
protected void doExecute(MetadataProvider mdProvider) throws Exception {
- String entityDataverseName = actionListener.getEntityId().getDataverse();
+ DataverseName entityDataverseName = actionListener.getEntityId().getDataverseName();
String entityName = actionListener.getEntityId().getEntityName();
try {
- lockManager.acquireActiveEntityReadLock(mdProvider.getLocks(),
- entityDataverseName + '.' + entityName); // we have to first read lock all active entities before deleting a dataset
- lockManager.acquireDatasetWriteLock(mdProvider.getLocks(),
- DatasetUtil.getFullyQualifiedName(dataset));
+ lockManager.acquireActiveEntityReadLock(mdProvider.getLocks(), entityDataverseName, entityName); // we have to first read lock all active entities before deleting a dataset
+ lockManager.acquireDatasetWriteLock(mdProvider.getLocks(), dataset.getDataverseName(),
+ dataset.getDatasetName());
List<Dataset> datasets = clusterController.getAllDatasets();
if (!datasets.contains(dataset)) {
throw new HyracksDataException("Dataset " + dataset + " does not exist");
@@ -195,15 +194,14 @@
Action action = new Action() {
@Override
protected void doExecute(MetadataProvider mdProvider) throws Exception {
- String dataverseName = dataset.getDataverseName();
- String datasetFullyQualifiedName = dataverseName + '.' + dataset.getDatasetName();
- String indexFullyQualifiedName = datasetFullyQualifiedName + ".index";
+ DataverseName dataverseName = dataset.getDataverseName();
+ String datasetName = dataset.getDatasetName();
try {
- MetadataLockUtil.createIndexBegin(lockManager, mdProvider.getLocks(), dataverseName,
- datasetFullyQualifiedName);
+ MetadataLockUtil.createIndexBegin(lockManager, mdProvider.getLocks(), dataverseName, datasetName);
if (actionListener.isActive()) {
throw new RuntimeDataException(ErrorCode.CANNOT_ADD_INDEX_TO_DATASET_CONNECTED_TO_ACTIVE_ENTITY,
- indexFullyQualifiedName, actionListener.getEntityId(), actionListener.getState());
+ DatasetUtil.getFullyQualifiedDisplayName(dataverseName, datasetName) + ".index",
+ actionListener.getEntityId(), actionListener.getState());
}
} finally {
mdProvider.getLocks().reset();
@@ -218,16 +216,15 @@
Action action = new Action() {
@Override
protected void doExecute(MetadataProvider mdProvider) throws Exception {
- String dataverseName = dataset.getDataverseName();
- String datasetFullyQualifiedName = dataverseName + '.' + dataset.getDatasetName();
+ DataverseName dataverseName = dataset.getDataverseName();
+ String datasetName = dataset.getDatasetName();
try {
- MetadataLockUtil.dropIndexBegin(lockManager, mdProvider.getLocks(), dataverseName,
- datasetFullyQualifiedName);
+ MetadataLockUtil.dropIndexBegin(lockManager, mdProvider.getLocks(), dataverseName, datasetName);
if (actionListener.isActive()) {
throw new RuntimeDataException(
ErrorCode.CANNOT_REMOVE_INDEX_FROM_DATASET_CONNECTED_TO_ACTIVE_ENTITY,
- datasetFullyQualifiedName + ".index", actionListener.getEntityId(),
- actionListener.getState());
+ DatasetUtil.getFullyQualifiedDisplayName(dataverseName, datasetName) + ".index",
+ actionListener.getEntityId(), actionListener.getState());
}
} finally {
mdProvider.getLocks().reset();
@@ -242,11 +239,11 @@
Action action = new Action() {
@Override
protected void doExecute(MetadataProvider mdProvider) throws Exception {
- String dataverseName = dataset.getDataverseName();
- String datasetFullyQualifiedName = dataverseName + '.' + dataset.getDatasetName();
+ DataverseName dataverseName = dataset.getDataverseName();
+ String datasetName = dataset.getDatasetName();
try {
lockManager.acquireDataverseReadLock(mdProvider.getLocks(), dataverseName);
- lockManager.acquireDatasetReadLock(mdProvider.getLocks(), datasetFullyQualifiedName);
+ lockManager.acquireDatasetReadLock(mdProvider.getLocks(), dataverseName, datasetName);
if (!semaphore.tryAcquire()) {
semaphore.acquire();
}
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java
index 7a63685..7c1b1f3 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java
@@ -70,8 +70,11 @@
import org.apache.asterix.app.external.IExternalUDFLibrarian;
import org.apache.asterix.common.api.Duration;
import org.apache.asterix.common.config.GlobalConfig;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.utils.Servlets;
import org.apache.asterix.lang.sqlpp.util.SqlppStatementUtil;
+import org.apache.asterix.metadata.bootstrap.MetadataBuiltinEntities;
+import org.apache.asterix.metadata.utils.MetadataConstants;
import org.apache.asterix.runtime.evaluators.common.NumberUtils;
import org.apache.asterix.test.server.ITestServer;
import org.apache.asterix.test.server.TestServerProvider;
@@ -1997,7 +2000,7 @@
public void cleanup(String testCase, List<String> badtestcases) throws Exception {
try {
- ArrayList<String> toBeDropped = new ArrayList<>();
+ List<DataverseName> toBeDropped = new ArrayList<>();
InputStream resultStream = executeQueryService(
"select dv.DataverseName from Metadata.`Dataverse` as dv order by dv.DataverseName;",
getEndpoint(Servlets.QUERY_SERVICE), OutputFormat.CLEAN_JSON);
@@ -2005,9 +2008,10 @@
for (int i = 0; i < result.size(); i++) {
JsonNode json = result.get(i);
if (json != null) {
- String dvName = json.get("DataverseName").asText();
- if (!dvName.equals("Metadata") && !dvName.equals("Default")) {
- toBeDropped.add(SqlppStatementUtil.enclose(dvName));
+ DataverseName dvName = DataverseName.createFromCanonicalForm(json.get("DataverseName").asText());
+ if (!dvName.equals(MetadataConstants.METADATA_DATAVERSE_NAME)
+ && !dvName.equals(MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME)) {
+ toBeDropped.add(dvName);
}
}
}
@@ -2015,10 +2019,10 @@
badtestcases.add(testCase);
LOGGER.info("Last test left some garbage. Dropping dataverses: " + StringUtils.join(toBeDropped, ','));
StringBuilder dropStatement = new StringBuilder();
- for (String dv : toBeDropped) {
+ for (DataverseName dv : toBeDropped) {
dropStatement.setLength(0);
dropStatement.append("drop dataverse ");
- dropStatement.append(dv);
+ SqlppStatementUtil.encloseDataverseName(dropStatement, dv);
dropStatement.append(";\n");
resultStream = executeQueryService(dropStatement.toString(), getEndpoint(Servlets.QUERY_SERVICE),
OutputFormat.CLEAN_JSON, UTF_8);
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/CheckpointInSecondaryIndexTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/CheckpointInSecondaryIndexTest.java
index 54f44b1..b43c445 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/CheckpointInSecondaryIndexTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/CheckpointInSecondaryIndexTest.java
@@ -36,6 +36,7 @@
import org.apache.asterix.common.config.DatasetConfig.DatasetType;
import org.apache.asterix.common.config.DatasetConfig.IndexType;
import org.apache.asterix.common.exceptions.ACIDException;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.storage.IIndexCheckpointManager;
import org.apache.asterix.common.storage.IIndexCheckpointManagerProvider;
import org.apache.asterix.common.storage.IndexCheckpoint;
@@ -107,7 +108,7 @@
private static final List<Integer> KEY_INDICATORS_LIST = Collections.singletonList(Index.RECORD_INDICATOR);
private static final int RECORDS_PER_COMPONENT = 500;
private static final int DATASET_ID = 101;
- private static final String DATAVERSE_NAME = "TestDV";
+ private static final DataverseName DATAVERSE_NAME = DataverseName.createSinglePartName("TestDV");
private static final String DATASET_NAME = "TestDS";
private static final String INDEX_NAME = "TestIdx";
private static final String DATA_TYPE_NAME = "DUMMY";
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/MultiPartitionLSMIndexTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/MultiPartitionLSMIndexTest.java
index a1b251f..1e1df54 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/MultiPartitionLSMIndexTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/MultiPartitionLSMIndexTest.java
@@ -35,6 +35,7 @@
import org.apache.asterix.common.api.IDatasetLifecycleManager;
import org.apache.asterix.common.config.DatasetConfig.DatasetType;
import org.apache.asterix.common.config.DatasetConfig.IndexType;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.transactions.ITransactionContext;
import org.apache.asterix.common.transactions.ITransactionManager;
import org.apache.asterix.common.transactions.TransactionOptions;
@@ -98,7 +99,7 @@
private static final int TOTAL_NUM_OF_RECORDS = 5000;
private static final int RECORDS_PER_COMPONENT = 500;
private static final int DATASET_ID = 101;
- private static final String DATAVERSE_NAME = "TestDV";
+ private static final DataverseName DATAVERSE_NAME = DataverseName.createSinglePartName("TestDV");
private static final String DATASET_NAME = "TestDS";
private static final String INDEX_NAME = "TestIdx";
private static final String DATA_TYPE_NAME = "DUMMY";
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/SearchCursorComponentSwitchTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/SearchCursorComponentSwitchTest.java
index 37b40bb..28da85c 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/SearchCursorComponentSwitchTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/SearchCursorComponentSwitchTest.java
@@ -31,6 +31,7 @@
import org.apache.asterix.app.nc.NCAppRuntimeContext;
import org.apache.asterix.common.api.IDatasetLifecycleManager;
import org.apache.asterix.common.config.DatasetConfig.DatasetType;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.transactions.ITransactionContext;
import org.apache.asterix.common.transactions.ITransactionManager;
import org.apache.asterix.common.transactions.TransactionOptions;
@@ -87,7 +88,7 @@
private static final int TOTAL_NUM_OF_RECORDS = 2000;
private static final int RECORDS_PER_COMPONENT = 1000;
private static final int DATASET_ID = 101;
- private static final String DATAVERSE_NAME = "TestDV";
+ private static final DataverseName DATAVERSE_NAME = DataverseName.createSinglePartName("TestDV");
private static final String DATASET_NAME = "TestDS";
private static final String DATA_TYPE_NAME = "DUMMY";
private static final String NODE_GROUP_NAME = "DEFAULT";
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/StorageTestUtils.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/StorageTestUtils.java
index 99aee07..e41c193 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/StorageTestUtils.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/StorageTestUtils.java
@@ -42,6 +42,7 @@
import org.apache.asterix.common.context.PrimaryIndexOperationTracker;
import org.apache.asterix.common.dataflow.LSMInsertDeleteOperatorNodePushable;
import org.apache.asterix.common.exceptions.ACIDException;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.file.StorageComponentProvider;
import org.apache.asterix.metadata.entities.Dataset;
import org.apache.asterix.metadata.entities.Index;
@@ -84,7 +85,7 @@
public static final int TOTAL_NUM_OF_RECORDS = 10000;
public static final int RECORDS_PER_COMPONENT = 1000;
public static final int DATASET_ID = 101;
- public static final String DATAVERSE_NAME = "TestDV";
+ public static final DataverseName DATAVERSE_NAME = DataverseName.createSinglePartName("TestDV");
public static final String DATASET_NAME = "TestDS";
public static final String DATA_TYPE_NAME = "DUMMY";
public static final String NODE_GROUP_NAME = "DEFAULT";
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/TestDataset.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/TestDataset.java
index bcf68b5..a6a53cb 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/TestDataset.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/TestDataset.java
@@ -21,6 +21,7 @@
import java.util.Map;
import org.apache.asterix.common.config.DatasetConfig.DatasetType;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.transactions.TxnId;
import org.apache.asterix.metadata.IDatasetDetails;
import org.apache.asterix.metadata.declared.MetadataProvider;
@@ -45,10 +46,10 @@
private static final long serialVersionUID = 1L;
- public TestDataset(String dataverseName, String datasetName, String recordTypeDataverseName, String recordTypeName,
- String nodeGroupName, String compactionPolicy, Map<String, String> compactionPolicyProperties,
- IDatasetDetails datasetDetails, Map<String, String> hints, DatasetType datasetType, int datasetId,
- int pendingOp) {
+ public TestDataset(DataverseName dataverseName, String datasetName, DataverseName recordTypeDataverseName,
+ String recordTypeName, String nodeGroupName, String compactionPolicy,
+ Map<String, String> compactionPolicyProperties, IDatasetDetails datasetDetails, Map<String, String> hints,
+ DatasetType datasetType, int datasetId, int pendingOp) {
super(dataverseName, datasetName, recordTypeDataverseName, recordTypeName, nodeGroupName, compactionPolicy,
compactionPolicyProperties, datasetDetails, hints, datasetType, datasetId, pendingOp);
}
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/ParserTestExecutor.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/ParserTestExecutor.java
index 970cbc0..4826a99 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/ParserTestExecutor.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/ParserTestExecutor.java
@@ -18,6 +18,7 @@
*/
package org.apache.asterix.test.sqlpp;
+import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@@ -32,6 +33,7 @@
import org.apache.asterix.common.config.GlobalConfig;
import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.IParser;
import org.apache.asterix.lang.common.base.IParserFactory;
import org.apache.asterix.lang.common.base.IQueryRewriter;
@@ -46,6 +48,7 @@
import org.apache.asterix.lang.sqlpp.rewrites.SqlppRewriterFactory;
import org.apache.asterix.lang.sqlpp.util.SqlppAstPrintUtil;
import org.apache.asterix.lang.sqlpp.util.SqlppRewriteUtil;
+import org.apache.asterix.metadata.bootstrap.MetadataBuiltinEntities;
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Dataset;
import org.apache.asterix.test.common.ComparisonException;
@@ -127,7 +130,7 @@
try {
List<Statement> statements = parser.parse();
List<FunctionDecl> functions = getDeclaredFunctions(statements);
- String dvName = getDefaultDataverse(statements);
+ DataverseName dvName = getDefaultDataverse(statements);
MetadataProvider metadataProvider = mock(MetadataProvider.class);
@SuppressWarnings("unchecked")
@@ -135,13 +138,13 @@
when(metadataProvider.getDefaultDataverseName()).thenReturn(dvName);
when(metadataProvider.getConfig()).thenReturn(config);
when(config.get(FunctionUtil.IMPORT_PRIVATE_FUNCTIONS)).thenReturn("true");
- when(metadataProvider.findDataset(anyString(), anyString())).thenAnswer(new Answer<Dataset>() {
+ when(metadataProvider.findDataset(any(DataverseName.class), anyString())).thenAnswer(new Answer<Dataset>() {
@Override
public Dataset answer(InvocationOnMock invocation) {
Object[] args = invocation.getArguments();
final Dataset mockDataset = mock(Dataset.class);
- String fullyQualifiedName = args[0] != null ? args[0] + "." + args[1] : (String) args[1];
- when(mockDataset.getFullyQualifiedName()).thenReturn(fullyQualifiedName);
+ when(mockDataset.getDataverseName()).thenReturn((DataverseName) args[0]);
+ when(mockDataset.getDatasetName()).thenReturn((String) args[1]);
return mockDataset;
}
});
@@ -184,14 +187,14 @@
}
// Gets the default dataverse for the input statements.
- private String getDefaultDataverse(List<Statement> statements) {
+ private DataverseName getDefaultDataverse(List<Statement> statements) {
for (Statement st : statements) {
if (st.getKind() == Statement.Kind.DATAVERSE_DECL) {
DataverseDecl dv = (DataverseDecl) st;
- return dv.getDataverseName().getValue();
+ return dv.getDataverseName();
}
}
- return null;
+ return MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME;
}
// Rewrite queries.
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/storage/IndexDropOperatorNodePushableTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/storage/IndexDropOperatorNodePushableTest.java
index 31a6004..aea32f57 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/storage/IndexDropOperatorNodePushableTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/storage/IndexDropOperatorNodePushableTest.java
@@ -29,6 +29,7 @@
import org.apache.asterix.app.bootstrap.TestNodeController;
import org.apache.asterix.common.config.DatasetConfig;
import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.file.StorageComponentProvider;
import org.apache.asterix.metadata.MetadataManager;
import org.apache.asterix.metadata.MetadataTransactionContext;
@@ -70,7 +71,7 @@
private static final int[] KEY_INDEXES = { 0 };
private static final List<Integer> KEY_INDICATORS_LIST = Arrays.asList(new Integer[] { Index.RECORD_INDICATOR });
private static final int DATASET_ID = 101;
- private static final String DATAVERSE_NAME = "TestDV";
+ private static final DataverseName DATAVERSE_NAME = DataverseName.createSinglePartName("TestDV");
private static final String DATASET_NAME = "TestDS";
private static final String DATA_TYPE_NAME = "DUMMY";
private static final String NODE_GROUP_NAME = "DEFAULT";
@@ -148,7 +149,7 @@
.getClusterControllerService().getApplicationContext();
MetadataProvider metadataProver = new MetadataProvider(appCtx, null);
metadataProver.setMetadataTxnContext(mdTxn);
- final String defaultDv = MetadataBuiltinEntities.DEFAULT_DATAVERSE.getDataverseName();
+ final DataverseName defaultDv = MetadataBuiltinEntities.DEFAULT_DATAVERSE.getDataverseName();
final Dataset dataset = MetadataManager.INSTANCE.getDataset(mdTxn, defaultDv, datasetName);
MetadataManager.INSTANCE.commitTransaction(mdTxn);
FileSplit[] splits = SplitsAndConstraintsUtil.getIndexSplits(appCtx.getClusterStateManager(), dataset,
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/queries/multipart-dataverse/index/index-01.sqlpp b/asterixdb/asterix-app/src/test/resources/optimizerts/queries/multipart-dataverse/index/index-01.sqlpp
new file mode 100644
index 0000000..bc811a2
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries/multipart-dataverse/index/index-01.sqlpp
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description: index in a dataverse with a multipart name
+ */
+
+drop dataverse x.y if exists;
+create dataverse x.y;
+
+use x.y;
+
+create type Emp as
+ closed {
+ id : bigint,
+ fname : string,
+ lname : string,
+ age : bigint,
+ dept : string
+};
+
+create dataset employee(Emp) primary key id;
+
+create index idx_employee_f_l_name on employee (fname,lname) type btree;
+
+select element l
+from employee as l
+where ((l.fname = 'Julio') and (l.lname = 'Isa'))
+;
+
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/queries/multipart-dataverse/index/index-02.sqlpp b/asterixdb/asterix-app/src/test/resources/optimizerts/queries/multipart-dataverse/index/index-02.sqlpp
new file mode 100644
index 0000000..cf8da32
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries/multipart-dataverse/index/index-02.sqlpp
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description: index in a dataverse with a multipart name
+ */
+
+drop dataverse x.y if exists;
+create dataverse x.y;
+
+create type x.y.Emp as
+ closed {
+ id : bigint,
+ fname : string,
+ lname : string,
+ age : bigint,
+ dept : string
+};
+
+create dataset x.y.employee(Emp) primary key id;
+
+create index idx_employee_f_l_name on x.y.employee (fname,lname) type btree;
+
+select element l
+from x.y.employee as l
+where ((l.fname = 'Julio') and (l.lname = 'Isa'))
+;
+
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-equi-join-neg_01.sqlpp b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-equi-join-neg_01.sqlpp
index ab8d14a..37fc73e 100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-equi-join-neg_01.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-equi-join-neg_01.sqlpp
@@ -40,7 +40,7 @@
write output to asterix_nc1:"rttest/btree-index-join_primary-equi-join-neg_01.adm";
select element x
-from `test1.DsOne` as x,
- `test1.DsTwo` as y
+from test1.DsOne as x,
+ test1.DsTwo as y
where (x.key1 = y.key2)
;
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-equi-join-neg_02.sqlpp b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-equi-join-neg_02.sqlpp
index bd7d7b2..31126b6 100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-equi-join-neg_02.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-equi-join-neg_02.sqlpp
@@ -40,7 +40,7 @@
write output to asterix_nc1:"rttest/btree-index-join_primary-equi-join-neg_02.adm";
select element x
-from `test1.DsOne` as x,
- `test1.DsTwo` as y
+from test1.DsOne as x,
+ test1.DsTwo as y
where (x.key2 = y.key1)
;
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-equi-join_01.sqlpp b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-equi-join_01.sqlpp
index 5313576..3822746 100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-equi-join_01.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-equi-join_01.sqlpp
@@ -40,7 +40,7 @@
write output to asterix_nc1:"rttest/btree-index-join_primary-equi-join_01.adm";
select element x
-from `test1.DsOne` as x,
- `test1.DsTwo` as y
+from test1.DsOne as x,
+ test1.DsTwo as y
where (x.key1 /*+ indexnl */ = y.key2)
;
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-equi-join_02.sqlpp b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-equi-join_02.sqlpp
index a3740b6..fb1bf93 100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-equi-join_02.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-equi-join_02.sqlpp
@@ -40,7 +40,7 @@
write output to asterix_nc1:"rttest/btree-index-join_primary-equi-join_02.adm";
select element x
-from `test1.DsOne` as x,
- `test1.DsTwo` as y
+from test1.DsOne as x,
+ test1.DsTwo as y
where (x.key2 /*+ indexnl */ = y.key1)
;
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-ge-join_01.sqlpp b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-ge-join_01.sqlpp
index 6c1a30a..64433e9 100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-ge-join_01.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-ge-join_01.sqlpp
@@ -40,7 +40,7 @@
write output to asterix_nc1:"rttest/btree-index-join_primary-ge-join_01.adm";
select element x
-from `test1.DsOne` as x,
- `test1.DsTwo` as y
+from test1.DsOne as x,
+ test1.DsTwo as y
where (x.key1 /*+ indexnl */ >= y.key2)
;
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-ge-join_02.sqlpp b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-ge-join_02.sqlpp
index 954b3a6..30c5406 100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-ge-join_02.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-ge-join_02.sqlpp
@@ -40,7 +40,7 @@
write output to asterix_nc1:"rttest/btree-index-join_primary-ge-join_02.adm";
select element x
-from `test1.DsOne` as x,
- `test1.DsTwo` as y
+from test1.DsOne as x,
+ test1.DsTwo as y
where (x.key2 /*+ indexnl */ <= y.key1)
;
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-gt-join_01.sqlpp b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-gt-join_01.sqlpp
index 8763a0d..67e8208 100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-gt-join_01.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-gt-join_01.sqlpp
@@ -40,7 +40,7 @@
write output to asterix_nc1:"rttest/btree-index-join_primary-gt-join_01.adm";
select element x
-from `test1.DsOne` as x,
- `test1.DsTwo` as y
+from test1.DsOne as x,
+ test1.DsTwo as y
where (x.key1 /*+ indexnl */ > y.key2)
;
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-gt-join_02.sqlpp b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-gt-join_02.sqlpp
index 748bfb0..84a84c0 100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-gt-join_02.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-gt-join_02.sqlpp
@@ -40,7 +40,7 @@
write output to asterix_nc1:"rttest/btree-index-join_primary-gt-join_02.adm";
select element x
-from `test1.DsOne` as x,
- `test1.DsTwo` as y
+from test1.DsOne as x,
+ test1.DsTwo as y
where (x.key2 /*+ indexnl */ < y.key1)
;
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-le-join_01.sqlpp b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-le-join_01.sqlpp
index 0e99c88..a80c2ca 100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-le-join_01.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-le-join_01.sqlpp
@@ -40,7 +40,7 @@
write output to asterix_nc1:"rttest/btree-index-join_primary-le-join_01.adm";
select element x
-from `test1.DsOne` as x,
- `test1.DsTwo` as y
+from test1.DsOne as x,
+ test1.DsTwo as y
where (x.key1 /*+ indexnl */ <= y.key2)
;
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-le-join_02.sqlpp b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-le-join_02.sqlpp
index 2eb1dde..66ac893 100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-le-join_02.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-le-join_02.sqlpp
@@ -40,7 +40,7 @@
write output to asterix_nc1:"rttest/btree-index-join_primary-le-join_02.adm";
select element x
-from `test1.DsOne` as x,
- `test1.DsTwo` as y
+from test1.DsOne as x,
+ test1.DsTwo as y
where (x.key2 /*+ indexnl */ >= y.key1)
;
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-lt-join_01.sqlpp b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-lt-join_01.sqlpp
index f717a64..3c719d8 100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-lt-join_01.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-lt-join_01.sqlpp
@@ -40,7 +40,7 @@
write output to asterix_nc1:"rttest/btree-index-join_primary-lt-join_01.adm";
select element x
-from `test1.DsOne` as x,
- `test1.DsTwo` as y
+from test1.DsOne as x,
+ test1.DsTwo as y
where (x.key1 /*+ indexnl */ < y.key2)
;
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-lt-join_02.sqlpp b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-lt-join_02.sqlpp
index e574877..d32d8a1 100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-lt-join_02.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/btree-index-join/primary-lt-join_02.sqlpp
@@ -40,7 +40,7 @@
write output to asterix_nc1:"rttest/btree-index-join_primary-lt-join_02.adm";
select element x
-from `test1.DsOne` as x,
- `test1.DsTwo` as y
+from test1.DsOne as x,
+ test1.DsTwo as y
where (x.key2 /*+ indexnl */ > y.key1)
;
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/nested-index/btree-index-join/primary-equi-join-neg_01.sqlpp b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/nested-index/btree-index-join/primary-equi-join-neg_01.sqlpp
index 3debb01..427eaef 100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/nested-index/btree-index-join/primary-equi-join-neg_01.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/nested-index/btree-index-join/primary-equi-join-neg_01.sqlpp
@@ -45,7 +45,7 @@
write output to asterix_nc1:"rttest/btree-index-join_primary-equi-join-neg_01.adm";
select element x
-from `test1.DsOne` as x,
- `test1.DsTwo` as y
+from test1.DsOne as x,
+ test1.DsTwo as y
where (x.nested.key1 = y.nested.key2)
;
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/nested-index/btree-index-join/primary-equi-join_01.sqlpp b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/nested-index/btree-index-join/primary-equi-join_01.sqlpp
index 7ed0a41..5969055 100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/nested-index/btree-index-join/primary-equi-join_01.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/nested-index/btree-index-join/primary-equi-join_01.sqlpp
@@ -45,7 +45,7 @@
write output to asterix_nc1:"rttest/btree-index-join_primary-equi-join_01.adm";
select element x
-from `test1.DsOne` as x,
- `test1.DsTwo` as y
+from test1.DsOne as x,
+ test1.DsTwo as y
where (x.nested.key1 /*+ indexnl */ = y.nested.key2)
;
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/nested-index/btree-index-join/primary-equi-join_02.sqlpp b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/nested-index/btree-index-join/primary-equi-join_02.sqlpp
index 421b65d..8bc2165 100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/nested-index/btree-index-join/primary-equi-join_02.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/nested-index/btree-index-join/primary-equi-join_02.sqlpp
@@ -45,7 +45,7 @@
write output to asterix_nc1:"rttest/btree-index-join_primary-equi-join_02.adm";
select element x
-from `test1.DsOne` as x,
- `test1.DsTwo` as y
+from test1.DsOne as x,
+ test1.DsTwo as y
where (x.nested.key2 /*+ indexnl */ = y.nested.key1)
;
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/nested-index/btree-index-join/primary-ge-join_01.sqlpp b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/nested-index/btree-index-join/primary-ge-join_01.sqlpp
index 434c47d..4e13cc4 100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/nested-index/btree-index-join/primary-ge-join_01.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/nested-index/btree-index-join/primary-ge-join_01.sqlpp
@@ -45,7 +45,7 @@
write output to asterix_nc1:"rttest/btree-index-join_primary-ge-join_01.adm";
select element x
-from `test1.DsOne` as x,
- `test1.DsTwo` as y
+from test1.DsOne as x,
+ test1.DsTwo as y
where (x.nested.key1 /*+ indexnl */ >= y.nested.key2)
;
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/nested-index/btree-index-join/primary-gt-join_01.sqlpp b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/nested-index/btree-index-join/primary-gt-join_01.sqlpp
index 45827bf..c006a0d 100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/nested-index/btree-index-join/primary-gt-join_01.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/nested-index/btree-index-join/primary-gt-join_01.sqlpp
@@ -45,7 +45,7 @@
write output to asterix_nc1:"rttest/btree-index-join_primary-gt-join_01.adm";
select element x
-from `test1.DsOne` as x,
- `test1.DsTwo` as y
+from test1.DsOne as x,
+ test1.DsTwo as y
where (x.nested.key1 /*+ indexnl */ > y.nested.key2)
;
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/nested-index/btree-index-join/primary-le-join_01.sqlpp b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/nested-index/btree-index-join/primary-le-join_01.sqlpp
index a2029f3..c36ed5b 100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/nested-index/btree-index-join/primary-le-join_01.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/nested-index/btree-index-join/primary-le-join_01.sqlpp
@@ -45,7 +45,7 @@
write output to asterix_nc1:"rttest/btree-index-join_primary-le-join_01.adm";
select element x
-from `test1.DsOne` as x,
- `test1.DsTwo` as y
+from test1.DsOne as x,
+ test1.DsTwo as y
where (x.nested.key1 /*+ indexnl */ <= y.nested.key2)
;
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/nested-index/btree-index-join/primary-lt-join_01.sqlpp b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/nested-index/btree-index-join/primary-lt-join_01.sqlpp
index 20e535f..cb4241d 100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/nested-index/btree-index-join/primary-lt-join_01.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/nested-index/btree-index-join/primary-lt-join_01.sqlpp
@@ -45,7 +45,7 @@
write output to asterix_nc1:"rttest/btree-index-join_primary-lt-join_01.adm";
select element x
-from `test1.DsOne` as x,
- `test1.DsTwo` as y
+from test1.DsOne as x,
+ test1.DsTwo as y
where (x.nested.key1 /*+ indexnl */ < y.nested.key2)
;
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/results/multipart-dataverse/index/index-01.plan b/asterixdb/asterix-app/src/test/resources/optimizerts/results/multipart-dataverse/index/index-01.plan
new file mode 100644
index 0000000..394213e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/results/multipart-dataverse/index/index-01.plan
@@ -0,0 +1,15 @@
+-- DISTRIBUTE_RESULT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$25(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/results/multipart-dataverse/index/index-02.plan b/asterixdb/asterix-app/src/test/resources/optimizerts/results/multipart-dataverse/index/index-02.plan
new file mode 100644
index 0000000..394213e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/results/multipart-dataverse/index/index-02.plan
@@ -0,0 +1,15 @@
+-- DISTRIBUTE_RESULT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$25(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/results_parser_sqlpp/query-issue550.ast b/asterixdb/asterix-app/src/test/resources/optimizerts/results_parser_sqlpp/query-issue550.ast
index 042bd3d..d1ebcac 100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/results_parser_sqlpp/query-issue550.ast
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/results_parser_sqlpp/query-issue550.ast
@@ -162,7 +162,7 @@
Variable [ Name=$srec ]
]
FROM [ FunctionCall asterix.dataset@1[
- LiteralExpr [STRING] [s]
+ LiteralExpr [STRING] [Default.s]
]
AS Variable [ Name=$srec ]
]
diff --git a/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/1.ast b/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/1.ast
index f26febf..9dcc461 100644
--- a/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/1.ast
+++ b/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/1.ast
@@ -12,7 +12,7 @@
]
]
FROM [ FunctionCall asterix.dataset@1[
- LiteralExpr [STRING] [User]
+ LiteralExpr [STRING] [Default.User]
]
AS Variable [ Name=$user ]
]
diff --git a/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/2.ast b/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/2.ast
index a605028..9b6b992 100644
--- a/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/2.ast
+++ b/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/2.ast
@@ -19,7 +19,7 @@
]
]
FROM [ FunctionCall asterix.dataset@1[
- LiteralExpr [STRING] [Event]
+ LiteralExpr [STRING] [Default.Event]
]
AS Variable [ Name=$event ]
,
@@ -105,7 +105,7 @@
]
]
FROM [ FunctionCall asterix.dataset@1[
- LiteralExpr [STRING] [es]
+ LiteralExpr [STRING] [Default.es]
]
AS Variable [ Name=$e ]
]
diff --git a/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/4.ast b/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/4.ast
index 9599255..48ad773 100644
--- a/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/4.ast
+++ b/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/4.ast
@@ -9,7 +9,7 @@
]
]
FROM [ FunctionCall asterix.dataset@1[
- LiteralExpr [STRING] [SIGroup]
+ LiteralExpr [STRING] [Default.SIGroup]
]
AS Variable [ Name=$sig ]
]
@@ -29,7 +29,7 @@
]
]
FROM [ FunctionCall asterix.dataset@1[
- LiteralExpr [STRING] [SIGroup]
+ LiteralExpr [STRING] [Default.SIGroup]
]
AS Variable [ Name=$similar_sig ]
]
diff --git a/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/5.ast b/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/5.ast
index 9eb7cf8..b629e2e 100644
--- a/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/5.ast
+++ b/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/5.ast
@@ -9,7 +9,7 @@
]
]
FROM [ FunctionCall asterix.dataset@1[
- LiteralExpr [STRING] [Event]
+ LiteralExpr [STRING] [Default.Event]
]
AS Variable [ Name=$event ]
]
@@ -29,7 +29,7 @@
]
]
FROM [ FunctionCall asterix.dataset@1[
- LiteralExpr [STRING] [Events]
+ LiteralExpr [STRING] [Default.Events]
]
AS Variable [ Name=$collocated_event ]
]
diff --git a/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/6.ast b/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/6.ast
index 854c7d1..b784f99 100644
--- a/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/6.ast
+++ b/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/6.ast
@@ -17,7 +17,7 @@
]
]
FROM [ FunctionCall asterix.dataset@1[
- LiteralExpr [STRING] [Users]
+ LiteralExpr [STRING] [Default.Users]
]
AS Variable [ Name=$user ]
]
@@ -42,7 +42,7 @@
]
]
FROM [ FunctionCall asterix.dataset@1[
- LiteralExpr [STRING] [Users]
+ LiteralExpr [STRING] [Default.Users]
]
AS Variable [ Name=$similar_user ]
]
diff --git a/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/ANYInFieldAccessor.ast b/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/ANYInFieldAccessor.ast
index 3e3575d..1ea056d 100644
--- a/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/ANYInFieldAccessor.ast
+++ b/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/ANYInFieldAccessor.ast
@@ -20,12 +20,12 @@
]
]
FROM [ FunctionCall asterix.dataset@1[
- LiteralExpr [STRING] [User]
+ LiteralExpr [STRING] [Default.User]
]
AS Variable [ Name=$user ]
,
FunctionCall asterix.dataset@1[
- LiteralExpr [STRING] [Movie]
+ LiteralExpr [STRING] [Default.Movie]
]
AS Variable [ Name=$mv ]
]
diff --git a/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/LetFor.ast b/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/LetFor.ast
index 4623444..275455a 100644
--- a/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/LetFor.ast
+++ b/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/LetFor.ast
@@ -6,7 +6,7 @@
Variable [ Name=$u ]
]
FROM [ FunctionCall asterix.dataset@1[
- LiteralExpr [STRING] [User]
+ LiteralExpr [STRING] [Default.User]
]
AS Variable [ Name=$u ]
]
diff --git a/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/columnalias.ast b/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/columnalias.ast
index b27701c..7d76691 100644
--- a/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/columnalias.ast
+++ b/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/columnalias.ast
@@ -4,7 +4,7 @@
root
]
FROM [ FunctionCall asterix.dataset@1[
- LiteralExpr [STRING] [tbl_name]
+ LiteralExpr [STRING] [Default.tbl_name]
]
AS Variable [ Name=$t ]
]
diff --git a/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/columnalias2.ast b/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/columnalias2.ast
index 563b883..cd6ce26 100644
--- a/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/columnalias2.ast
+++ b/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/columnalias2.ast
@@ -35,7 +35,7 @@
root
]
FROM [ FunctionCall asterix.dataset@1[
- LiteralExpr [STRING] [tbl_name]
+ LiteralExpr [STRING] [Default.tbl_name]
]
AS Variable [ Name=$root ]
]
diff --git a/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/columnalias3.ast b/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/columnalias3.ast
index 65cedd3..f72802f 100644
--- a/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/columnalias3.ast
+++ b/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/columnalias3.ast
@@ -9,7 +9,7 @@
]
]
FROM [ FunctionCall asterix.dataset@1[
- LiteralExpr [STRING] [tbl_name]
+ LiteralExpr [STRING] [Default.tbl_name]
]
AS Variable [ Name=$t ]
]
diff --git a/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/functionDecl1.ast b/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/functionDecl1.ast
index 2443f90..60fc518 100644
--- a/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/functionDecl1.ast
+++ b/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/functionDecl1.ast
@@ -107,7 +107,7 @@
Variable [ Name=$e ]
]
FROM [ FunctionCall asterix.dataset@1[
- LiteralExpr [STRING] [Events]
+ LiteralExpr [STRING] [Default.Events]
]
AS Variable [ Name=$e ]
]
diff --git a/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/nestedFLWOGR.ast b/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/nestedFLWOGR.ast
index aa25b42..3f27a49 100644
--- a/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/nestedFLWOGR.ast
+++ b/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/nestedFLWOGR.ast
@@ -19,7 +19,7 @@
Variable [ Name=$u ]
]
FROM [ FunctionCall asterix.dataset@1[
- LiteralExpr [STRING] [User]
+ LiteralExpr [STRING] [Default.User]
]
AS Variable [ Name=$u ]
]
diff --git a/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/nestedFLWOGR2.ast b/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/nestedFLWOGR2.ast
index 463ddc4..3434965 100644
--- a/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/nestedFLWOGR2.ast
+++ b/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/nestedFLWOGR2.ast
@@ -35,7 +35,7 @@
Variable [ Name=$tmp ]
]
FROM [ FunctionCall asterix.dataset@1[
- LiteralExpr [STRING] [number]
+ LiteralExpr [STRING] [Default.number]
]
AS Variable [ Name=$tmp ]
]
diff --git a/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/nestedFLWOGR3.ast b/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/nestedFLWOGR3.ast
index f4d8694..64b580f 100644
--- a/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/nestedFLWOGR3.ast
+++ b/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/nestedFLWOGR3.ast
@@ -19,7 +19,7 @@
]
]
FROM [ FunctionCall asterix.dataset@1[
- LiteralExpr [STRING] [Event]
+ LiteralExpr [STRING] [Default.Event]
]
AS Variable [ Name=$event ]
,
@@ -105,7 +105,7 @@
]
]
FROM [ FunctionCall asterix.dataset@1[
- LiteralExpr [STRING] [es]
+ LiteralExpr [STRING] [Default.es]
]
AS Variable [ Name=$e ]
]
diff --git a/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/nestedFor.ast b/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/nestedFor.ast
index 52a9319..1fe5d95 100644
--- a/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/nestedFor.ast
+++ b/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/nestedFor.ast
@@ -20,12 +20,12 @@
]
]
FROM [ FunctionCall asterix.dataset@1[
- LiteralExpr [STRING] [User]
+ LiteralExpr [STRING] [Default.User]
]
AS Variable [ Name=$user ]
,
FunctionCall asterix.dataset@1[
- LiteralExpr [STRING] [Movie]
+ LiteralExpr [STRING] [Default.Movie]
]
AS Variable [ Name=$mv ]
]
diff --git a/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/numberInFieldAccessor.ast b/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/numberInFieldAccessor.ast
index 605113f..bc58ab8 100644
--- a/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/numberInFieldAccessor.ast
+++ b/asterixdb/asterix-app/src/test/resources/parserts/results_parser_sqlpp/numberInFieldAccessor.ast
@@ -20,12 +20,12 @@
]
]
FROM [ FunctionCall asterix.dataset@1[
- LiteralExpr [STRING] [User]
+ LiteralExpr [STRING] [Default.User]
]
AS Variable [ Name=$user ]
,
FunctionCall asterix.dataset@1[
- LiteralExpr [STRING] [Movie]
+ LiteralExpr [STRING] [Default.Movie]
]
AS Variable [ Name=$mv ]
]
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/index_1/index_1.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/index_1/index_1.1.ddl.sqlpp
new file mode 100644
index 0000000..910b6df
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/index_1/index_1.1.ddl.sqlpp
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description: index in a dataverse with a multipart name
+ */
+
+drop dataverse x.y if exists;
+create dataverse x.y;
+
+use x.y;
+
+create type Emp as
+ closed {
+ id : bigint,
+ fname : string,
+ lname : string,
+ age : bigint,
+ dept : string
+};
+
+create dataset employee(Emp) primary key id;
+
+create index idx_employee_f_l_name on employee (fname,lname) type btree;
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/index_1/index_1.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/index_1/index_1.2.update.sqlpp
new file mode 100644
index 0000000..72db572
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/index_1/index_1.2.update.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description: index in a dataverse with a multipart name
+ */
+
+use x.y;
+
+load dataset employee using localfs ((`path`=`asterix_nc1://data/names.adm`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/index_1/index_1.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/index_1/index_1.3.query.sqlpp
new file mode 100644
index 0000000..ee3a680
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/index_1/index_1.3.query.sqlpp
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description: index in a dataverse with a multipart name
+ */
+
+use x.y;
+
+select element l
+from employee as l
+where ((l.fname = 'Julio') and (l.lname = 'Isa'))
+;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.1.ddl.sqlpp
new file mode 100644
index 0000000..1089bc3
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.1.ddl.sqlpp
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description: name resolution for a dataverse with a multipart name
+ */
+
+drop dataverse sales.east if exists;
+create dataverse sales.east;
+
+create type sales.east.orderType as {
+ oid: bigint
+};
+
+create dataset sales.east.orders(sales.east.orderType) primary key oid;
+
+drop dataverse sales.west if exists;
+create dataverse sales.west;
+
+create type sales.west.orderType as {
+ oid: bigint
+};
+
+create dataset sales.west.orders(sales.west.orderType) primary key oid;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.10.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.10.query.sqlpp
new file mode 100644
index 0000000..ca18151
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.10.query.sqlpp
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description: name resolution for a dataverse with a multipart name
+ */
+
+/* Single name resolution rule with a nested field */
+
+select x.y.z
+from sales.east.orders
+order by oid
+limit 2
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.11.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.11.query.sqlpp
new file mode 100644
index 0000000..c2143ce
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.11.query.sqlpp
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description: name resolution for a dataverse with a multipart name
+ */
+
+/* Join */
+
+select east.east, west.west
+from sales.east.orders east, sales.west.orders west
+where east.oid = west.oid
+order by east.oid
+limit 2
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.2.update.sqlpp
new file mode 100644
index 0000000..09104e4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.2.update.sqlpp
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description: name resolution for a dataverse with a multipart name
+ */
+
+use sales.east;
+
+insert into orders ({'oid' : 100, 'pid': 0, 'amount': 1, 'east': [11, 111], 'x': { 'y': {'z': 11 } } });
+insert into orders ({'oid' : 101, 'pid': 1, 'amount': 2, 'east': [22, 222], 'x': { 'y': {'z': 22 } } });
+insert into orders ({'oid' : 102, 'pid': 2, 'amount': 3, 'east': [33, 333], 'x': { 'y': {'z': 33 } } });
+insert into orders ({'oid' : 103, 'pid': 1, 'amount': 4, 'east': [44, 444], 'x': { 'y': {'z': 44 } } });
+insert into orders ({'oid' : 104, 'pid': 0, 'amount': 6, 'east': [66, 666], 'x': { 'y': {'z': 66 } } });
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.3.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.3.update.sqlpp
new file mode 100644
index 0000000..77f6f30
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.3.update.sqlpp
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description: name resolution for a dataverse with a multipart name
+ */
+
+use sales.west;
+
+insert into orders ({'oid' : 100, 'pid': 0, 'amount': 1, 'west': [1111, 11111], 'x': { 'y': {'z': 1111 } } });
+insert into orders ({'oid' : 101, 'pid': 1, 'amount': 2, 'west': [2222, 22222], 'x': { 'y': {'z': 2222 } } });
+insert into orders ({'oid' : 102, 'pid': 2, 'amount': 3, 'west': [3333, 33333], 'x': { 'y': {'z': 3333 } } });
+insert into orders ({'oid' : 103, 'pid': 1, 'amount': 4, 'west': [4444, 44444], 'x': { 'y': {'z': 4444 } } });
+insert into orders ({'oid' : 104, 'pid': 0, 'amount': 6, 'west': [6666, 66666], 'x': { 'y': {'z': 6666 } } });
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.4.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.4.query.sqlpp
new file mode 100644
index 0000000..6e589a6
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.4.query.sqlpp
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description: name resolution for a dataverse with a multipart name
+ */
+
+use sales.east;
+
+select value o
+from orders o
+order by oid
+limit 2
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.5.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.5.query.sqlpp
new file mode 100644
index 0000000..e3adbad
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.5.query.sqlpp
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description: name resolution for a dataverse with a multipart name
+ */
+
+/* Use backticks */
+
+use `sales`.`east`;
+
+select value o
+from orders o
+order by oid
+limit 2
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.6.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.6.query.sqlpp
new file mode 100644
index 0000000..0905b35
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.6.query.sqlpp
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description: name resolution for a dataverse with a multipart name
+ */
+
+/* FROM fully qualified dataset name */
+
+select value o
+from sales.east.orders o
+order by oid
+limit 2
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.7.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.7.query.sqlpp
new file mode 100644
index 0000000..7103abc
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.7.query.sqlpp
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description: name resolution for a dataverse with a multipart name
+ */
+
+/* Local variables first in FROM clause */
+
+select value v
+from sales.east.orders as sales, sales.east as v
+order by v
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.8.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.8.query.sqlpp
new file mode 100644
index 0000000..97e6263
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.8.query.sqlpp
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description: name resolution for a dataverse with a multipart name
+ */
+
+/* Local variables first in SELECT clause */
+
+select sales.east[0] v
+from sales.east.orders as sales
+order by v
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.9.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.9.query.sqlpp
new file mode 100644
index 0000000..d6d1320
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/resolution_1/resolution_1.9.query.sqlpp
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description: name resolution for a dataverse with a multipart name
+ */
+
+/* Test auto-generated variable name */
+
+select orders
+from sales.east.orders
+order by oid
+limit 2
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/special_chars_1/special_chars_1.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/special_chars_1/special_chars_1.1.ddl.sqlpp
new file mode 100644
index 0000000..471cd01
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/special_chars_1/special_chars_1.1.ddl.sqlpp
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description: special characters in multipart dataverse name
+ */
+
+drop dataverse `a.b`.`c@d` if exists;
+create dataverse `a.b`.`c@d`;
+
+use `a.b`.`c@d`;
+
+create type Emp as
+ closed {
+ id : bigint,
+ fname : string,
+ lname : string,
+ age : bigint,
+ dept : string
+};
+
+create dataset employee(Emp) primary key id;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/special_chars_1/special_chars_1.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/special_chars_1/special_chars_1.2.update.sqlpp
new file mode 100644
index 0000000..73a003d
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/special_chars_1/special_chars_1.2.update.sqlpp
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description: special characters in multipart dataverse name
+ */
+
+load dataset `a.b`.`c@d`.employee using localfs ((`path`=`asterix_nc1://data/names.adm`),(`format`=`delimited-text`),(`delimiter`=`|`));
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/special_chars_1/special_chars_1.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/special_chars_1/special_chars_1.3.query.sqlpp
new file mode 100644
index 0000000..84b899d
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/special_chars_1/special_chars_1.3.query.sqlpp
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description: special characters in multipart dataverse name
+ */
+
+use `a.b`.`c@d`;
+
+select element l
+from employee as l
+where ((l.fname = 'Julio') and (l.lname = 'Isa'))
+;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/special_chars_1/special_chars_1.4.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/special_chars_1/special_chars_1.4.query.sqlpp
new file mode 100644
index 0000000..86b6f68
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/special_chars_1/special_chars_1.4.query.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description: special characters in multipart dataverse name
+ */
+
+select element l
+from `a.b`.`c@d`.employee as l
+where ((l.fname = 'Julio') and (l.lname = 'Isa'))
+;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/udf_1/udf_1.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/udf_1/udf_1.1.ddl.sqlpp
new file mode 100644
index 0000000..6b01a11
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/udf_1/udf_1.1.ddl.sqlpp
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description: user-defined function in a dataverse with a multipart name
+ */
+
+drop dataverse x.y if exists;
+create dataverse x.y;
+
+create function x.y.echo(v) {
+ v
+};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/udf_1/udf_1.2.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/udf_1/udf_1.2.query.sqlpp
new file mode 100644
index 0000000..76263f7
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/udf_1/udf_1.2.query.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description: user-defined function in a dataverse with a multipart name
+ */
+
+select value x.y.echo(r)
+from range(1,2) r
+order by r
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/udf_1/udf_1.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/udf_1/udf_1.3.query.sqlpp
new file mode 100644
index 0000000..1e5bb2e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/multipart-dataverse/udf_1/udf_1.3.query.sqlpp
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description: user-defined function in a dataverse with a multipart name
+ */
+
+use x.y;
+
+select value echo(r)
+from range(1,2) r
+order by r
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/user-defined-functions/check-dependencies-1/check-dependencies-1.3.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/user-defined-functions/check-dependencies-1/check-dependencies-1.3.ddl.sqlpp
index 215118b..85e966a 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/user-defined-functions/check-dependencies-1/check-dependencies-1.3.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/user-defined-functions/check-dependencies-1/check-dependencies-1.3.ddl.sqlpp
@@ -26,9 +26,7 @@
drop function C.f3@2;
drop function C.f2@2;
-drop dataset B.TweetMessages2;
-
-drop dataverse C;
drop dataverse B;
+drop dataverse C;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/index_1/index_1.3.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/index_1/index_1.3.adm
new file mode 100644
index 0000000..99238f8
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/index_1/index_1.3.adm
@@ -0,0 +1 @@
+{ "id": 881, "fname": "Julio", "lname": "Isa", "age": 38, "dept": "Sales" }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/resolution_1/resolution_1.10.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/resolution_1/resolution_1.10.adm
new file mode 100644
index 0000000..cdd804a
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/resolution_1/resolution_1.10.adm
@@ -0,0 +1,2 @@
+{ "z": 11 }
+{ "z": 22 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/resolution_1/resolution_1.11.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/resolution_1/resolution_1.11.adm
new file mode 100644
index 0000000..3f84b21
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/resolution_1/resolution_1.11.adm
@@ -0,0 +1,2 @@
+{ "east": [ 11, 111 ], "west": [ 1111, 11111 ] }
+{ "east": [ 22, 222 ], "west": [ 2222, 22222 ] }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/resolution_1/resolution_1.4.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/resolution_1/resolution_1.4.adm
new file mode 100644
index 0000000..438a875
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/resolution_1/resolution_1.4.adm
@@ -0,0 +1,2 @@
+{ "oid": 100, "pid": 0, "amount": 1, "east": [ 11, 111 ], "x": { "y": { "z": 11 } } }
+{ "oid": 101, "pid": 1, "amount": 2, "east": [ 22, 222 ], "x": { "y": { "z": 22 } } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/resolution_1/resolution_1.5.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/resolution_1/resolution_1.5.adm
new file mode 100644
index 0000000..438a875
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/resolution_1/resolution_1.5.adm
@@ -0,0 +1,2 @@
+{ "oid": 100, "pid": 0, "amount": 1, "east": [ 11, 111 ], "x": { "y": { "z": 11 } } }
+{ "oid": 101, "pid": 1, "amount": 2, "east": [ 22, 222 ], "x": { "y": { "z": 22 } } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/resolution_1/resolution_1.6.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/resolution_1/resolution_1.6.adm
new file mode 100644
index 0000000..438a875
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/resolution_1/resolution_1.6.adm
@@ -0,0 +1,2 @@
+{ "oid": 100, "pid": 0, "amount": 1, "east": [ 11, 111 ], "x": { "y": { "z": 11 } } }
+{ "oid": 101, "pid": 1, "amount": 2, "east": [ 22, 222 ], "x": { "y": { "z": 22 } } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/resolution_1/resolution_1.7.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/resolution_1/resolution_1.7.adm
new file mode 100644
index 0000000..d2f3f89
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/resolution_1/resolution_1.7.adm
@@ -0,0 +1,10 @@
+11
+22
+33
+44
+66
+111
+222
+333
+444
+666
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/resolution_1/resolution_1.8.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/resolution_1/resolution_1.8.adm
new file mode 100644
index 0000000..85a0da8
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/resolution_1/resolution_1.8.adm
@@ -0,0 +1,5 @@
+{ "v": 11 }
+{ "v": 22 }
+{ "v": 33 }
+{ "v": 44 }
+{ "v": 66 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/resolution_1/resolution_1.9.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/resolution_1/resolution_1.9.adm
new file mode 100644
index 0000000..9dd18c2
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/resolution_1/resolution_1.9.adm
@@ -0,0 +1,2 @@
+{ "orders": { "oid": 100, "pid": 0, "amount": 1, "east": [ 11, 111 ], "x": { "y": { "z": 11 } } } }
+{ "orders": { "oid": 101, "pid": 1, "amount": 2, "east": [ 22, 222 ], "x": { "y": { "z": 22 } } } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/special_chars_1/special_chars_1.3.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/special_chars_1/special_chars_1.3.adm
new file mode 100644
index 0000000..99238f8
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/special_chars_1/special_chars_1.3.adm
@@ -0,0 +1 @@
+{ "id": 881, "fname": "Julio", "lname": "Isa", "age": 38, "dept": "Sales" }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/special_chars_1/special_chars_1.4.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/special_chars_1/special_chars_1.4.adm
new file mode 100644
index 0000000..99238f8
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/special_chars_1/special_chars_1.4.adm
@@ -0,0 +1 @@
+{ "id": 881, "fname": "Julio", "lname": "Isa", "age": 38, "dept": "Sales" }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/udf_1/udf_1.2.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/udf_1/udf_1.2.adm
new file mode 100644
index 0000000..7a754f4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/udf_1/udf_1.2.adm
@@ -0,0 +1,2 @@
+1
+2
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/udf_1/udf_1.3.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/udf_1/udf_1.3.adm
new file mode 100644
index 0000000..7a754f4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/multipart-dataverse/udf_1/udf_1.3.adm
@@ -0,0 +1,2 @@
+1
+2
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/user-defined-functions/check-dependencies-1/check-dependencies-1.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/user-defined-functions/check-dependencies-1/check-dependencies-1.1.adm
index 50f19bb..50ba196 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/user-defined-functions/check-dependencies-1/check-dependencies-1.1.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/user-defined-functions/check-dependencies-1/check-dependencies-1.1.adm
@@ -1,5 +1,5 @@
{ "DataverseName": "B", "Name": "f0", "Dependencies": [ [ ], [ ] ] }
-{ "DataverseName": "B", "Name": "f5", "Dependencies": [ [ [ "C", "TweetMessages" ], [ "B", "TweetMessages2" ] ], [ [ "C", "f1", "2" ], [ "B", "f0", "2" ] ] ] }
+{ "DataverseName": "B", "Name": "f5", "Dependencies": [ [ [ "B", "TweetMessages2" ], [ "C", "TweetMessages" ] ], [ [ "C", "f1", "2" ], [ "B", "f0", "2" ] ] ] }
{ "DataverseName": "C", "Name": "f1", "Dependencies": [ [ ], [ ] ] }
{ "DataverseName": "C", "Name": "f2", "Dependencies": [ [ [ "C", "TweetMessages" ] ], [ [ "C", "f1", "2" ], [ "B", "f0", "2" ] ] ] }
{ "DataverseName": "C", "Name": "f3", "Dependencies": [ [ ], [ [ "C", "f2", "2" ] ] ] }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite.xml b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite.xml
index f16b187..3fcce1c 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite.xml
@@ -793,7 +793,7 @@
<output-dir compare="Text">partition-by-nonexistent-field</output-dir>
<expected-error>Field "id" is not found</expected-error>
<expected-error>Cannot find dataset</expected-error>
- <expected-error>Could not find dataset</expected-error>
+ <expected-error>Cannot find dataset</expected-error>
</compilation-unit>
</test-case>
<test-case FilePath="misc">
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
index d301f29..3a0d50b 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
@@ -45,7 +45,6 @@
<test-case FilePath="api">
<compilation-unit name="request-param-validation">
<output-dir compare="Text">request-param-validation</output-dir>
- <source-location>false</source-location>
<expected-error>Invalid value for parameter "format": foo</expected-error>
<expected-error>Invalid value for parameter "pretty": bar</expected-error>
<expected-error>Invalid value for parameter "plan-format": blah</expected-error>
@@ -71,6 +70,7 @@
<expected-error>Invalid value for parameter "profile": true</expected-error>
<expected-error>Invalid value for parameter "profile": foo</expected-error>
<expected-error>Invalid value for parameter "profile": foo</expected-error>
+ <source-location>false</source-location>
</compilation-unit>
</test-case>
<test-case FilePath="api">
@@ -6452,6 +6452,28 @@
</compilation-unit>
</test-case>
</test-group>
+ <test-group name="multipart-dataverse">
+ <test-case FilePath="multipart-dataverse">
+ <compilation-unit name="index_1">
+ <output-dir compare="Text">index_1</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="multipart-dataverse">
+ <compilation-unit name="resolution_1">
+ <output-dir compare="Text">resolution_1</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="multipart-dataverse">
+ <compilation-unit name="special_chars_1">
+ <output-dir compare="Text">special_chars_1</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="multipart-dataverse">
+ <compilation-unit name="udf_1">
+ <output-dir compare="Text">udf_1</output-dir>
+ </compilation-unit>
+ </test-case>
+ </test-group>
<test-group name="index">
<test-group name="index/validations">
<test-case FilePath="index/validations">
diff --git a/asterixdb/asterix-common/pom.xml b/asterixdb/asterix-common/pom.xml
index 760e027..2d354a7 100644
--- a/asterixdb/asterix-common/pom.xml
+++ b/asterixdb/asterix-common/pom.xml
@@ -184,6 +184,11 @@
<artifactId>commons-lang3</artifactId>
</dependency>
<dependency>
+ <groupId>org.apache.commons</groupId>
+ <artifactId>commons-collections4</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
<groupId>org.apache.hyracks</groupId>
<artifactId>algebricks-common</artifactId>
</dependency>
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IMetadataLockManager.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IMetadataLockManager.java
index bed9869..1b92394 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IMetadataLockManager.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IMetadataLockManager.java
@@ -18,6 +18,7 @@
*/
package org.apache.asterix.common.api;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.metadata.LockList;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -33,7 +34,7 @@
* @throws AlgebricksException
* if lock couldn't be acquired
*/
- void acquireDataverseReadLock(LockList locks, String dataverseName) throws AlgebricksException;
+ void acquireDataverseReadLock(LockList locks, DataverseName dataverseName) throws AlgebricksException;
/**
* Acquire write lock on the dataverse
@@ -45,55 +46,67 @@
* @throws AlgebricksException
* if lock couldn't be acquired
*/
- void acquireDataverseWriteLock(LockList locks, String dataverseName) throws AlgebricksException;
+ void acquireDataverseWriteLock(LockList locks, DataverseName dataverseName) throws AlgebricksException;
/**
* Acquire read lock on the dataset (for queries)
*
* @param locks
* the lock list to add the new lock to
- * @param datasetFullyQualifiedName
- * the fully qualified name of the dataset
+ * @param dataverseName
+ * the dataverse name
+ * @param datasetName
+ * the name of the dataset in the given dataverse
* @throws AlgebricksException
* if lock couldn't be acquired
*/
- void acquireDatasetReadLock(LockList locks, String datasetFullyQualifiedName) throws AlgebricksException;
+ void acquireDatasetReadLock(LockList locks, DataverseName dataverseName, String datasetName)
+ throws AlgebricksException;
/**
* Acquire write lock on the dataset (for dataset create, dataset drop, and index drop)
*
* @param locks
* the lock list to add the new lock to
- * @param datasetFullyQualifiedName
- * the fully qualified name of the dataset
+ * @param dataverseName
+ * the dataverse name
+ * @param datasetName
+ * the name of the dataset in the given dataverse
* @throws AlgebricksException
* if lock couldn't be acquired
*/
- void acquireDatasetWriteLock(LockList locks, String datasetFullyQualifiedName) throws AlgebricksException;
+ void acquireDatasetWriteLock(LockList locks, DataverseName dataverseName, String datasetName)
+ throws AlgebricksException;
/**
* Acquire modify lock on the dataset (for inserts, upserts, deletes) Mutually exclusive with create index lock
*
* @param locks
* the lock list to add the new lock to
- * @param datasetFullyQualifiedName
- * the fully qualified name of the dataset
+ * @param dataverseName
+ * the dataverse name
+ * @param datasetName
+ * the name of the dataset in the given dataverse
* @throws AlgebricksException
* if lock couldn't be acquired
*/
- void acquireDatasetModifyLock(LockList locks, String datasetFullyQualifiedName) throws AlgebricksException;
+ void acquireDatasetModifyLock(LockList locks, DataverseName dataverseName, String datasetName)
+ throws AlgebricksException;
/**
* Acquire create index lock on the dataset (for index creation) Mutually exclusive with modify lock
*
* @param locks
* the lock list to add the new lock to
- * @param datasetFullyQualifiedName
- * the fully qualified name of the dataset
+ * @param dataverseName
+ * the dataverse name
+ * @param datasetName
+ * the name of the dataset in the given dataverse
* @throws AlgebricksException
* if lock couldn't be acquired
*/
- void acquireDatasetCreateIndexLock(LockList locks, String datasetFullyQualifiedName) throws AlgebricksException;
+ void acquireDatasetCreateIndexLock(LockList locks, DataverseName dataverseName, String datasetName)
+ throws AlgebricksException;
/**
* Acquire exclusive modify lock on the dataset. only a single thread can acquire this lock and it is mutually
@@ -101,12 +114,14 @@
*
* @param locks
* the lock list to add the new lock to
- * @param datasetFullyQualifiedName
- * the fully qualified name of the dataset
+ * @param dataverseName
+ * the dataverse name
+ * @param datasetName
+ * the name of the dataset in the given dataverse
* @throws AlgebricksException
* if lock couldn't be acquired
*/
- void acquireDatasetExclusiveModificationLock(LockList locks, String datasetFullyQualifiedName)
+ void acquireDatasetExclusiveModificationLock(LockList locks, DataverseName dataverseName, String datasetName)
throws AlgebricksException;
/**
@@ -114,24 +129,30 @@
*
* @param locks
* the lock list to add the new lock to
- * @param functionFullyQualifiedName
- * the fully qualified name of the function
+ * @param dataverseName
+ * the dataverse name
+ * @param functionName
+ * the name of the function in the given dataverse
* @throws AlgebricksException
* if lock couldn't be acquired
*/
- void acquireFunctionReadLock(LockList locks, String functionFullyQualifiedName) throws AlgebricksException;
+ void acquireFunctionReadLock(LockList locks, DataverseName dataverseName, String functionName)
+ throws AlgebricksException;
/**
* Acquire write lock on the function
*
* @param locks
* the lock list to add the new lock to
- * @param functionFullyQualifiedName
- * the fully qualified name of the function
+ * @param dataverseName
+ * the dataverse name
+ * @param functionName
+ * the name of the function in the given dataverse
* @throws AlgebricksException
* if lock couldn't be acquired
*/
- void acquireFunctionWriteLock(LockList locks, String functionFullyQualifiedName) throws AlgebricksException;
+ void acquireFunctionWriteLock(LockList locks, DataverseName dataverseName, String functionName)
+ throws AlgebricksException;
/**
* Acquire read lock on the node group
@@ -162,96 +183,114 @@
*
* @param locks
* the lock list to add the new lock to
- * @param entityFullyQualifiedName
- * the fully qualified name of the active entity
+ * @param dataverseName
+ * the dataverse name
+ * @param entityName
+ * the name of the active entity in the given dataverse
* @throws AlgebricksException
* if lock couldn't be acquired
*/
- void acquireActiveEntityReadLock(LockList locks, String entityFullyQualifiedName) throws AlgebricksException;
+ void acquireActiveEntityReadLock(LockList locks, DataverseName dataverseName, String entityName)
+ throws AlgebricksException;
/**
* Acquire write lock on the active entity
*
* @param locks
* the lock list to add the new lock to
- * @param entityFullyQualifiedName
- * the fully qualified name of the active entity
+ * @param dataverseName
+ * the dataverse name
+ * @param entityName
+ * the name of the active entity in the given dataverse
* @throws AlgebricksException
* if lock couldn't be acquired
*/
- void acquireActiveEntityWriteLock(LockList locks, String entityFullyQualifiedName) throws AlgebricksException;
+ void acquireActiveEntityWriteLock(LockList locks, DataverseName dataverseName, String entityName)
+ throws AlgebricksException;
/**
* Acquire read lock on the feed policy
*
* @param locks
* the lock list to add the new lock to
- * @param feedPolicyFullyQualifiedName
- * the fully qualified name of the feed policy
+ * @param dataverseName
+ * the dataverse name
+ * @param feedPolicyName
+ * the name of the feed policy in the given dataverse
* @throws AlgebricksException
* if lock couldn't be acquired
*/
- void acquireFeedPolicyWriteLock(LockList locks, String feedPolicyFullyQualifiedName) throws AlgebricksException;
+ void acquireFeedPolicyWriteLock(LockList locks, DataverseName dataverseName, String feedPolicyName)
+ throws AlgebricksException;
/**
* Acquire write lock on the feed policy
*
* @param locks
* the lock list to add the new lock to
- * @param feedPolicyFullyQualifiedName
- * the fully qualified name of the feed policy
+ * @param dataverseName
+ * the dataverse name
+ * @param feedPolicyName
+ * the name of the feed policy in the given dataverse
* @throws AlgebricksException
* if lock couldn't be acquired
*/
- void acquireFeedPolicyReadLock(LockList locks, String feedPolicyFullyQualifiedName) throws AlgebricksException;
+ void acquireFeedPolicyReadLock(LockList locks, DataverseName dataverseName, String feedPolicyName)
+ throws AlgebricksException;
/**
* Acquire read lock on the merge policy
*
* @param locks
* the lock list to add the new lock to
- * @param mergePolicyFullyQualifiedName
- * the fully qualified name of the merge policy
+ * @param mergePolicyName
+ * the name of the merge policy in the given dataverse
* @throws AlgebricksException
* if lock couldn't be acquired
*/
- void acquireMergePolicyReadLock(LockList locks, String mergePolicyFullyQualifiedName) throws AlgebricksException;
+ void acquireMergePolicyReadLock(LockList locks, String mergePolicyName) throws AlgebricksException;
/**
* Acquire write lock on the merge policy
*
* @param locks
* the lock list to add the new lock to
- * @param mergePolicyFullyQualifiedName
- * the fully qualified name of the merge policy
+ * @param mergePolicyName
+ * the name of the merge policy in the given dataverse
* @throws AlgebricksException
* if lock couldn't be acquired
*/
- void acquireMergePolicyWriteLock(LockList locks, String mergePolicyFullyQualifiedName) throws AlgebricksException;
+ void acquireMergePolicyWriteLock(LockList locks, String mergePolicyName) throws AlgebricksException;
/**
* Acquire read lock on the data type
*
* @param locks
* the lock list to add the new lock to
- * @param datatypeFullyQualifiedName
- * the fully qualified name of the data type
+ * @param dataverseName
+ * the dataverse name
+ * @param datatypeName
+ * the name of the data type in the given dataverse
* @throws AlgebricksException
* if lock couldn't be acquired
*/
- void acquireDataTypeReadLock(LockList locks, String datatypeFullyQualifiedName) throws AlgebricksException;
+ void acquireDataTypeReadLock(LockList locks, DataverseName dataverseName, String datatypeName)
+ throws AlgebricksException;
/**
* Acquire write lock on the data type
*
* @param locks
* the lock list to add the new lock to
- * @param datatypeFullyQualifiedName
- * the fully qualified name of the data type
+ * @param dataverseName
+ * the dataverse name
+ * @param datatypeName
+ * the name of the data type in the given dataverse
* @throws AlgebricksException
* if lock couldn't be acquired
*/
- void acquireDataTypeWriteLock(LockList locks, String datatypeFullyQualifiedName) throws AlgebricksException;
+ void acquireDataTypeWriteLock(LockList locks, DataverseName dataverseName, String datatypeName)
+ throws AlgebricksException;
/**
* Acquire read lock on the extension entity
@@ -260,13 +299,15 @@
* the lock list to add the new lock to
* @param extension
* the extension key
- * @param extensionEntityFullyQualifiedName
- * the fully qualified name of the extension entity
+ * @param dataverseName
+ * the dataverse name
+ * @param extensionEntityName
+ * the name of the extension entity in the given dataverse
* @throws AlgebricksException
* if lock couldn't be acquired
*/
- void acquireExtensionReadLock(LockList locks, String extension, String extensionEntityFullyQualifiedName)
- throws AlgebricksException;
+ void acquireExtensionEntityReadLock(LockList locks, String extension, DataverseName dataverseName,
+ String extensionEntityName) throws AlgebricksException;
/**
* Acquire write lock on the extension entity
@@ -275,36 +316,43 @@
* the lock list to add the new lock to
* @param extension
* the extension key
- * @param extensionEntityFullyQualifiedName
- * the fully qualified name of the extension entity
+ * @param dataverseName
+ * the dataverse name
+ * @param extensionEntityName
+ * the name of the extension entity in the given dataverse
* @throws AlgebricksException
* if lock couldn't be acquired
*/
- void acquireExtensionWriteLock(LockList locks, String extension, String extensionEntityFullyQualifiedName)
- throws AlgebricksException;
+ void acquireExtensionEntityWriteLock(LockList locks, String extension, DataverseName dataverseName,
+ String extensionEntityName) throws AlgebricksException;
/**
* Upgrade a previously acquired exclusive modification lock on the dataset to a write lock
*
* @param locks
* the lock list to add the new lock to
- * @param datasetFullyQualifiedName
- * the fully qualified name of the dataset
+ * @param dataverseName
+ * the dataverse name
+ * @param datasetName
+ * the name of the dataset in the given dataverse
* @throws AlgebricksException
* if lock couldn't be upgraded
*/
- void upgradeDatasetLockToWrite(LockList locks, String datasetFullyQualifiedName) throws AlgebricksException;
+ void upgradeDatasetLockToWrite(LockList locks, DataverseName dataverseName, String datasetName)
+ throws AlgebricksException;
/**
* Downgrade an upgraded dataset write lock to an exclusive modification lock
*
* @param locks
* the lock list to add the new lock to
- * @param datasetFullyQualifiedName
- * the fully qualified name of the dataset
+ * @param dataverseName
+ * the dataverse name
+ * @param datasetName
+ * the name of the dataset in the given dataverse
* @throws AlgebricksException
* if lock couldn't be downgraded
*/
- void downgradeDatasetLockToExclusiveModify(LockList locks, String datasetFullyQualifiedName)
+ void downgradeDatasetLockToExclusiveModify(LockList locks, DataverseName dataverseName, String datasetName)
throws AlgebricksException;
}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/functions/FunctionConstants.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/functions/FunctionConstants.java
index 9e7859c..7343a11 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/functions/FunctionConstants.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/functions/FunctionConstants.java
@@ -18,8 +18,18 @@
*/
package org.apache.asterix.common.functions;
-public interface FunctionConstants {
+import org.apache.asterix.common.metadata.DataverseName;
+import org.apache.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
+
+public final class FunctionConstants {
public static final String ASTERIX_NS = "asterix";
+ public static final DataverseName ASTERIX_DV = DataverseName.createBuiltinDataverseName(ASTERIX_NS);
+
+ public static final DataverseName ALGEBRICKS_DV =
+ DataverseName.createBuiltinDataverseName(AlgebricksBuiltinFunctions.ALGEBRICKS_NS);
+
+ private FunctionConstants() {
+ }
}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/functions/FunctionSignature.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/functions/FunctionSignature.java
index d7b054d..9bbf2e7 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/functions/FunctionSignature.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/functions/FunctionSignature.java
@@ -19,21 +19,28 @@
package org.apache.asterix.common.functions;
import java.io.Serializable;
+import java.util.Objects;
+import org.apache.asterix.common.metadata.DataverseName;
+import org.apache.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
public class FunctionSignature implements Serializable {
- private static final long serialVersionUID = 1L;
- private String namespace;
+
+ private static final long serialVersionUID = 2L;
+
+ private DataverseName dataverseName;
+
private String name;
+
private int arity;
public FunctionSignature(FunctionIdentifier fi) {
- this(fi.getNamespace(), fi.getName(), fi.getArity());
+ this(getDataverseName(fi), fi.getName(), fi.getArity());
}
- public FunctionSignature(String namespace, String name, int arity) {
- this.namespace = namespace;
+ public FunctionSignature(DataverseName dataverseName, String name, int arity) {
+ this.dataverseName = dataverseName;
this.name = name;
this.arity = arity;
}
@@ -42,27 +49,36 @@
public boolean equals(Object o) {
if (!(o instanceof FunctionSignature)) {
return false;
- } else {
- FunctionSignature f = ((FunctionSignature) o);
- return ((namespace != null && namespace.equals(f.getNamespace())
- || (namespace == null && f.getNamespace() == null))) && name.equals(f.getName())
- && (arity == f.getArity() || arity == FunctionIdentifier.VARARGS
- || f.getArity() == FunctionIdentifier.VARARGS);
}
+ FunctionSignature f = ((FunctionSignature) o);
+ return Objects.equals(dataverseName, f.dataverseName) && name.equals(f.name)
+ && (arity == f.arity || arity == FunctionIdentifier.VARARGS || f.arity == FunctionIdentifier.VARARGS);
}
@Override
public String toString() {
- return namespace + "." + name + "@" + arity;
+ return toString(true);
+ }
+
+ public String toString(boolean includeArity) {
+ String namespaceCanonicalForm = dataverseName != null ? dataverseName.getCanonicalForm() : null;
+ int len = (namespaceCanonicalForm != null ? namespaceCanonicalForm.length() : 4) + 1 + name.length()
+ + (includeArity ? 3 : 0);
+ StringBuilder sb = new StringBuilder(len);
+ sb.append(namespaceCanonicalForm).append('.').append(name);
+ if (includeArity) {
+ sb.append('@').append(arity);
+ }
+ return sb.toString();
}
@Override
public int hashCode() {
- return (namespace + "." + name).hashCode();
+ return Objects.hash(dataverseName, name);
}
- public String getNamespace() {
- return namespace;
+ public DataverseName getDataverseName() {
+ return dataverseName;
}
public String getName() {
@@ -73,8 +89,8 @@
return arity;
}
- public void setNamespace(String namespace) {
- this.namespace = namespace;
+ public void setDataverseName(DataverseName dataverseName) {
+ this.dataverseName = dataverseName;
}
public void setName(String name) {
@@ -85,4 +101,28 @@
this.arity = arity;
}
+ public FunctionIdentifier createFunctionIdentifier() {
+ return createFunctionIdentifier(dataverseName, name, arity);
+ }
+
+ public static FunctionIdentifier createFunctionIdentifier(DataverseName dataverseName, String functionName,
+ int arity) {
+ return new FunctionIdentifier(dataverseName.getCanonicalForm(), functionName, arity);
+ }
+
+ public static FunctionIdentifier createFunctionIdentifier(DataverseName dataverseName, String functionName) {
+ return new FunctionIdentifier(dataverseName.getCanonicalForm(), functionName);
+ }
+
+ public static DataverseName getDataverseName(FunctionIdentifier fi) {
+ String dataverseCanonicalName = fi.getNamespace();
+ switch (dataverseCanonicalName) {
+ case FunctionConstants.ASTERIX_NS:
+ return FunctionConstants.ASTERIX_DV;
+ case AlgebricksBuiltinFunctions.ALGEBRICKS_NS:
+ return FunctionConstants.ALGEBRICKS_DV;
+ default:
+ return DataverseName.createFromCanonicalForm(dataverseCanonicalName);
+ }
+ }
}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/library/ILibraryManager.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/library/ILibraryManager.java
index c1598d9..d7f4309 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/library/ILibraryManager.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/library/ILibraryManager.java
@@ -22,6 +22,7 @@
import java.net.URLClassLoader;
import java.util.List;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.hyracks.algebricks.common.utils.Pair;
import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -30,10 +31,11 @@
/**
* Registers the library class loader with the external library manager.
* <code>dataverseName</code> and <code>libraryName</code> uniquely identifies a class loader.
+ * @param dataverseName
* @param libraryName
* @param classLoader
*/
- void registerLibraryClassLoader(String dataverseName, String libraryName, URLClassLoader classLoader)
+ void registerLibraryClassLoader(DataverseName dataverseName, String libraryName, URLClassLoader classLoader)
throws HyracksDataException;
/**
@@ -43,11 +45,10 @@
/**
* De-registers a library class loader.
- *
- * @param dataverseName
+ * @param dataverseName
* @param libraryName
*/
- void deregisterLibraryClassLoader(String dataverseName, String libraryName);
+ void deregisterLibraryClassLoader(DataverseName dataverseName, String libraryName);
/**
* Finds a class loader for a given pair of dataverse name and library name.
@@ -56,7 +57,7 @@
* @param libraryName
* @return the library class loader associated with the dataverse and library.
*/
- ClassLoader getLibraryClassLoader(String dataverseName, String libraryName);
+ ClassLoader getLibraryClassLoader(DataverseName dataverseName, String libraryName);
/**
* Add function parameters to library manager if it exists.
@@ -65,7 +66,7 @@
* @param parameters
*/
- void addFunctionParameters(String dataverseName, String fullFunctionName, List<String> parameters);
+ void addFunctionParameters(DataverseName dataverseName, String fullFunctionName, List<String> parameters);
/**
* Get a list of parameters.
@@ -73,5 +74,5 @@
* @param fullFunctionName
* @return A list contains all pre-specified function parameters.
*/
- List<String> getFunctionParameters(String dataverseName, String fullFunctionName);
+ List<String> getFunctionParameters(DataverseName dataverseName, String fullFunctionName);
}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/DataverseName.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/DataverseName.java
new file mode 100644
index 0000000..5ec3f1a
--- /dev/null
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/DataverseName.java
@@ -0,0 +1,335 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.common.metadata;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.Objects;
+import java.util.function.BiConsumer;
+
+import org.apache.commons.lang3.StringUtils;
+
+/**
+ * This class represents a dataverse name.
+ * The logical model is an ordered list of strings (name parts).
+ * Use {@link #create(List)} to create a dataverse name from its parts
+ * and {@link #getParts()} to obtain a list of parts from given dataverse name.
+ * <p>
+ * Each dataverse name can be encoded into a single string (called a canonical form) by
+ * {@link #getCanonicalForm()} and decoded back from it with {@link #createFromCanonicalForm(String)}.
+ * The canonical form encoding concatenates name parts together with {@link #SEPARATOR_CHAR '.'} character.
+ * The {@link #ESCAPE_CHAR '@'} character is used to escape {@link #SEPARATOR_CHAR '.'} and itself in each name part
+ * prior to concatenation.
+ * <p>
+ * E.g. the canonical form for a dataverse name {@code ["a", "b", "c"]} is {@code "a.b.c"}
+ * <p>
+ * {@link #toString()} returns a display form which is a {@link #SEPARATOR_CHAR '.'} separated concatenation
+ * of name parts without escaping. In general it's impossible to reconstruct a dataverse name from its display form.
+ * <p>
+ * Notes:
+ * <li>
+ * <ul>
+ * {@link #getCanonicalForm()} is faster than {@link #getParts()} because this class stores the canonical form,
+ * so {@link #getCanonicalForm()} just returns it while {@link #getParts()} performs parsing and string construction
+ * for each name part.
+ * </ul>
+ * <ul>
+ * {@link #toString()} result is cached, subsequent invocations just return the cached value.
+ * </ul>
+ * </li>
+ */
+public final class DataverseName implements Serializable, Comparable<DataverseName> {
+
+ private static final long serialVersionUID = 1L;
+
+ public static final char SEPARATOR_CHAR = '.';
+
+ private static final char ESCAPE_CHAR = '@';
+
+ private static final char[] SEPARATOR_AND_ESCAPE_CHARS = new char[] { SEPARATOR_CHAR, ESCAPE_CHAR };
+
+ private final boolean isMultiPart;
+
+ private final String canonicalForm;
+
+ private transient volatile String displayForm;
+
+ private DataverseName(String canonicalForm, boolean isMultiPart) {
+ this.canonicalForm = Objects.requireNonNull(canonicalForm);
+ this.isMultiPart = isMultiPart;
+ }
+
+ /**
+ * Returns whether this dataverse name contains multiple name parts or not.
+ */
+ public boolean isMultiPart() {
+ return isMultiPart;
+ }
+
+ /**
+ * Returns a scalar encoding of this dataverse name.
+ * The returned value can be used to reconstruct this name by calling {@link #createFromCanonicalForm(String)}.
+ * <p>
+ * Warning: changing the canonical form encoding will impact backwards compatibility because it's stored in the
+ * metadata datasets and might be returned to users through public APIs.
+ */
+ public String getCanonicalForm() {
+ return canonicalForm;
+ }
+
+ /**
+ * Returns a new list containing dataverse name parts
+ */
+ public List<String> getParts() {
+ List<String> parts = new ArrayList<>(isMultiPart ? 4 : 1);
+ getParts(parts);
+ return parts;
+ }
+
+ /**
+ * Appends dataverse name parts into a given list
+ */
+ public void getParts(Collection<? super String> outParts) {
+ if (isMultiPart) {
+ decodeCanonicalForm(canonicalForm, DataverseName::addPartToCollection, outParts);
+ } else {
+ outParts.add(decodeSinglePartNameFromCanonicalForm(canonicalForm));
+ }
+ }
+
+ /**
+ * Returns a display form which is a {@link #SEPARATOR_CHAR '.'} separated concatenation of name parts without
+ * escaping. In general it's impossible to reconstruct a dataverse name from its display form, so this method
+ * should not be used when roundtripability is required.
+ */
+ @Override
+ public String toString() {
+ return getDisplayForm();
+ }
+
+ private String getDisplayForm() {
+ String result = displayForm;
+ if (result == null) {
+ displayForm = result = createDisplayForm();
+ }
+ return result;
+ }
+
+ private String createDisplayForm() {
+ if (isMultiPart) {
+ StringBuilder displayForm = new StringBuilder(canonicalForm.length() + 1);
+ decodeCanonicalForm(canonicalForm, DataverseName::addPartToDisplayForm, displayForm);
+ return displayForm.substring(0, displayForm.length() - 1); // remove last separator char
+ } else {
+ return decodeSinglePartNameFromCanonicalForm(canonicalForm);
+ }
+ }
+
+ @Override
+ public int hashCode() {
+ return canonicalForm.hashCode();
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (!(obj instanceof DataverseName)) {
+ return false;
+ }
+ DataverseName that = (DataverseName) obj;
+ return canonicalForm.equals(that.canonicalForm);
+ }
+
+ @Override
+ public int compareTo(DataverseName that) {
+ return canonicalForm.compareTo(that.canonicalForm);
+ }
+
+ /**
+ * Creates a new dataverse name from a given list of name parts.
+ * Equivalent to {@code create(parts, 0, parts.size())}.
+ */
+ public static DataverseName create(List<String> parts) {
+ return create(parts, 0, parts.size());
+ }
+
+ /**
+ * Creates a new dataverse name from a given list of name parts.
+ *
+ * @param parts
+ * list of name parts
+ * @param fromIndex
+ * index to start from
+ * @param toIndex
+ * index to stop at (exclusive, value at that index is not used)
+ */
+ public static DataverseName create(List<String> parts, int fromIndex, int toIndex) {
+ int partCount = toIndex - fromIndex;
+ return partCount == 1 ? createSinglePartName(parts.get(fromIndex))
+ : createMultiPartName(parts, fromIndex, toIndex);
+ }
+
+ /**
+ * Creates a new dataverse name from its scalar encoding (canonical form) returned by {@link #getCanonicalForm()}
+ */
+ public static DataverseName createFromCanonicalForm(String canonicalForm) {
+ boolean isMultiPart = isMultiPartCanonicalForm(canonicalForm);
+ return new DataverseName(canonicalForm, isMultiPart);
+ }
+
+ /**
+ * Creates a single-part dataverse name.
+ * Equivalent to {@code create(Collections.singletonList(singlePart))}, but performs faster.
+ */
+ public static DataverseName createSinglePartName(String singlePart) {
+ String canonicalForm = encodeSinglePartNamePartIntoCanonicalForm(singlePart);
+ return new DataverseName(canonicalForm, false);
+ }
+
+ /**
+ * Creates a new dataverse name for a built-in dataverse.
+ * Validates that the canonical form of the created dataverse name is the same as its given single name part.
+ */
+ public static DataverseName createBuiltinDataverseName(String singlePart) {
+ if (StringUtils.containsAny(singlePart, SEPARATOR_AND_ESCAPE_CHARS)) {
+ throw new IllegalArgumentException(singlePart);
+ }
+ DataverseName dataverseName = createSinglePartName(singlePart); // 1-part name
+ String canonicalForm = dataverseName.getCanonicalForm();
+ if (!canonicalForm.equals(singlePart)) {
+ throw new IllegalStateException(canonicalForm + "!=" + singlePart);
+ }
+ return dataverseName;
+ }
+
+ private static DataverseName createMultiPartName(List<String> parts, int fromIndex, int toIndex) {
+ String canonicalForm = encodeMultiPartNameIntoCanonicalForm(parts, fromIndex, toIndex);
+ return new DataverseName(canonicalForm, true);
+ }
+
+ private static String encodeMultiPartNameIntoCanonicalForm(List<String> parts, int fromIndex, int toIndex) {
+ Objects.requireNonNull(parts);
+ int partCount = toIndex - fromIndex;
+ if (partCount <= 0) {
+ throw new IllegalArgumentException(fromIndex + "," + toIndex);
+ }
+ StringBuilder sb = new StringBuilder(32);
+ for (int i = 0; i < partCount; i++) {
+ if (i > 0) {
+ sb.append(SEPARATOR_CHAR);
+ }
+ encodePartIntoCanonicalForm(parts.get(fromIndex + i), sb);
+ }
+ return sb.toString();
+ }
+
+ private static String encodeSinglePartNamePartIntoCanonicalForm(String singlePart) {
+ if (StringUtils.indexOfAny(singlePart, SEPARATOR_AND_ESCAPE_CHARS) < 0) {
+ // no escaping needed
+ return singlePart;
+ }
+ StringBuilder sb = new StringBuilder(singlePart.length() + 4);
+ encodePartIntoCanonicalForm(singlePart, sb);
+ return sb.toString();
+ }
+
+ private static void encodePartIntoCanonicalForm(String part, StringBuilder out) {
+ for (int i = 0, ln = part.length(); i < ln; i++) {
+ char c = part.charAt(i);
+ if (c == SEPARATOR_CHAR || c == ESCAPE_CHAR) {
+ out.append(ESCAPE_CHAR);
+ }
+ out.append(c);
+ }
+ }
+
+ private static <T> void decodeCanonicalForm(String canonicalForm, BiConsumer<CharSequence, T> partConsumer,
+ T partConsumerArg) {
+ int ln = canonicalForm.length();
+ StringBuilder sb = new StringBuilder(ln);
+ for (int i = 0; i < ln; i++) {
+ char c = canonicalForm.charAt(i);
+ switch (c) {
+ case SEPARATOR_CHAR:
+ partConsumer.accept(sb, partConsumerArg);
+ sb.setLength(0);
+ break;
+ case ESCAPE_CHAR:
+ i++;
+ c = canonicalForm.charAt(i);
+ // fall through to 'default'
+ default:
+ sb.append(c);
+ break;
+ }
+ }
+ if (sb.length() > 0) {
+ partConsumer.accept(sb, partConsumerArg);
+ }
+ }
+
+ // optimization for a single part name
+ private String decodeSinglePartNameFromCanonicalForm(String canonicalForm) {
+ if (canonicalForm.indexOf(ESCAPE_CHAR) < 0) {
+ // no escaping was done
+ return canonicalForm;
+ }
+
+ StringBuilder singlePart = new StringBuilder(canonicalForm.length());
+ for (int i = 0, ln = canonicalForm.length(); i < ln; i++) {
+ char c = canonicalForm.charAt(i);
+ switch (c) {
+ case SEPARATOR_CHAR:
+ throw new IllegalStateException(canonicalForm); // should never happen
+ case ESCAPE_CHAR:
+ i++;
+ c = canonicalForm.charAt(i);
+ // fall through to 'default'
+ default:
+ singlePart.append(c);
+ break;
+ }
+ }
+ return singlePart.toString();
+ }
+
+ private static boolean isMultiPartCanonicalForm(String canonicalForm) {
+ for (int i = 0, ln = canonicalForm.length(); i < ln; i++) {
+ char c = canonicalForm.charAt(i);
+ switch (c) {
+ case SEPARATOR_CHAR:
+ return true;
+ case ESCAPE_CHAR:
+ i++;
+ break;
+ }
+ }
+ return false;
+ }
+
+ private static void addPartToCollection(CharSequence part, Collection<? super String> out) {
+ out.add(part.toString());
+ }
+
+ private static void addPartToDisplayForm(CharSequence part, StringBuilder out) {
+ out.append(part).append(SEPARATOR_CHAR);
+ }
+}
\ No newline at end of file
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/IDataset.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/IDataset.java
index 276e294..55c5b97 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/IDataset.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/IDataset.java
@@ -25,7 +25,7 @@
/**
* @return the dataverse name
*/
- String getDataverseName();
+ DataverseName getDataverseName();
/**
* @return the dataset name
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/IMetadataLock.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/IMetadataLock.java
index ebae47e..1f77aa0 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/IMetadataLock.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/IMetadataLock.java
@@ -49,6 +49,14 @@
}
}
+ interface LockKey {
+ @Override
+ boolean equals(Object o);
+
+ @Override
+ int hashCode();
+ }
+
/**
* Acquire a lock
*
@@ -68,9 +76,9 @@
/**
* Get the lock's key
*
- * @return the key identiying the lock
+ * @return the key identifying the lock
*/
- String getKey();
+ LockKey getKey();
/**
* upgrade the lock
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/LockList.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/LockList.java
index 6f2bc39..43a1849 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/LockList.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/LockList.java
@@ -21,6 +21,7 @@
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
+import java.util.Map;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.common.exceptions.ErrorCode;
@@ -33,7 +34,7 @@
*/
public class LockList {
private final List<MutablePair<IMetadataLock, IMetadataLock.Mode>> locks = new ArrayList<>();
- private final HashMap<String, Integer> indexes = new HashMap<>();
+ private final Map<IMetadataLock.LockKey, Integer> indexes = new HashMap<>();
private boolean lockPhase = true;
/**
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/storage/DatasetCopyIdentifier.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/storage/DatasetCopyIdentifier.java
index 6500c8a..bf72c19 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/storage/DatasetCopyIdentifier.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/storage/DatasetCopyIdentifier.java
@@ -21,20 +21,22 @@
import java.io.Serializable;
import java.util.Objects;
+import org.apache.asterix.common.metadata.DataverseName;
+
public class DatasetCopyIdentifier implements Serializable {
private static final long serialVersionUID = 1L;
+ private final DataverseName dataverse;
private final String dataset;
- private final String dataverse;
private final String rebalance;
- private DatasetCopyIdentifier(String dataverse, String datasetName, String rebalance) {
+ private DatasetCopyIdentifier(DataverseName dataverse, String datasetName, String rebalance) {
this.dataverse = dataverse;
this.dataset = datasetName;
this.rebalance = rebalance;
}
- public static DatasetCopyIdentifier of(String dataverse, String datasetName, String rebalance) {
+ public static DatasetCopyIdentifier of(DataverseName dataverse, String datasetName, String rebalance) {
return new DatasetCopyIdentifier(dataverse, datasetName, rebalance);
}
@@ -64,13 +66,13 @@
return Objects.hash(dataverse, dataset, rebalance);
}
- public String getDataverse() {
+ public DataverseName getDataverse() {
return dataverse;
}
public boolean isMatch(ResourceReference resourceReference) {
- return resourceReference.getDataverse().equals(dataverse) && resourceReference.getDataset().equals(dataset)
- && resourceReference.getRebalance().equals(rebalance);
+ return resourceReference.getDataverse().equals(dataverse.getCanonicalForm())
+ && resourceReference.getDataset().equals(dataset) && resourceReference.getRebalance().equals(rebalance);
}
@Override
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/storage/ResourceReference.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/storage/ResourceReference.java
index ae949fe..9d934e4 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/storage/ResourceReference.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/storage/ResourceReference.java
@@ -29,7 +29,7 @@
protected String root;
protected String partition;
- protected String dataverse;
+ protected String dataverse; // == DataverseName.getCanonicalForm()
protected String dataset;
protected String rebalance;
protected String index;
@@ -52,7 +52,7 @@
return partition;
}
- public String getDataverse() {
+ public String getDataverse() { //TODO(MULTI_PART_DATAVERSE_NAME):REVISIT
return dataverse;
}
@@ -96,7 +96,7 @@
ref.index = tokens[--offset];
ref.rebalance = tokens[--offset];
ref.dataset = tokens[--offset];
- ref.dataverse = tokens[--offset];
+ ref.dataverse = tokens[--offset]; //TODO(MULTI_PART_DATAVERSE_NAME):REVISIT
ref.partition = tokens[--offset];
ref.root = tokens[--offset];
}
@@ -105,10 +105,6 @@
return Integer.parseInt(partition.substring(StorageConstants.PARTITION_DIR_PREFIX.length()));
}
- public String getDatasetFullyQualifiedName() {
- return dataverse + '.' + dataset;
- }
-
@Override
public boolean equals(Object o) {
if (this == o) {
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/StoragePathUtil.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/StoragePathUtil.java
index aa2c7af..587b8b3 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/StoragePathUtil.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/StoragePathUtil.java
@@ -23,6 +23,7 @@
import java.nio.file.Paths;
import org.apache.asterix.common.cluster.ClusterPartition;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.storage.ResourceReference;
import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
@@ -63,13 +64,13 @@
.toString();
}
- public static String prepareDataverseIndexName(String dataverseName, String datasetName, String idxName,
+ public static String prepareDataverseIndexName(DataverseName dataverseName, String datasetName, String idxName,
long rebalanceCount) {
return prepareDataverseIndexName(dataverseName, prepareFullIndexName(datasetName, idxName, rebalanceCount));
}
- public static String prepareDataverseIndexName(String dataverseName, String fullIndexName) {
- return dataverseName + File.separator + fullIndexName;
+ public static String prepareDataverseIndexName(DataverseName dataverseName, String fullIndexName) {
+ return dataverseName.getCanonicalForm() + File.separator + fullIndexName; //TODO(MULTI_PART_DATAVERSE_NAME):REVISIT
}
private static String prepareFullIndexName(String datasetName, String idxName, long rebalanceCount) {
diff --git a/asterixdb/asterix-common/src/test/java/org/apache/asterix/common/metadata/DataverseNameTest.java b/asterixdb/asterix-common/src/test/java/org/apache/asterix/common/metadata/DataverseNameTest.java
new file mode 100644
index 0000000..1a47a3f
--- /dev/null
+++ b/asterixdb/asterix-common/src/test/java/org/apache/asterix/common/metadata/DataverseNameTest.java
@@ -0,0 +1,244 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.common.metadata;
+
+import static org.apache.asterix.common.functions.FunctionConstants.ASTERIX_NS;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.function.Supplier;
+
+import org.apache.commons.collections4.ListUtils;
+import org.apache.hyracks.algebricks.common.utils.Pair;
+import org.apache.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ * Unit test for {@link DataverseName}
+ */
+public class DataverseNameTest {
+
+ private static final List<String> TEST_BUILTIN_DATAVERSE_NAME_PARAMS = Arrays.asList(
+ // 1-part-name
+ // default dataverse
+ "Default",
+ // metadata dataverse
+ "Metadata",
+ // dataverse for Algebricks functions
+ AlgebricksBuiltinFunctions.ALGEBRICKS_NS,
+ // dataverse for Asterix functions
+ ASTERIX_NS);
+
+ private static final List<String> TEST_BUILTIN_DATAVERSE_INVALID_NAME_PARAMS = Arrays.asList(
+ // separator character is not allowed
+ "a.b",
+ // escape character is not allowed
+ "c@d");
+
+ private static final List<Pair<String, String>> TEST_SINGLE_PART_NAME_PARAMS = Arrays.asList(
+ // <1-part-name, canonical-form>
+ new Pair<>("abc", "abc"),
+ // with escape character
+ new Pair<>("a@b", "a@@b"),
+ // with separator character
+ new Pair<>("a.b", "a@.b"),
+ // with both escape and separator characters
+ new Pair<>("a@.b", "a@@@.b"));
+
+ private static final List<Pair<List<String>, String>> TEST_MULTI_PART_NAME_PARAMS = Arrays.asList(
+ // <multi-part-name, canonical-form>
+ new Pair<>(Arrays.asList("aa", "bb", "cc"), "aa.bb.cc"),
+ // with escape character
+ new Pair<>(Arrays.asList("a@a@", "@b@b", "@c@c"), "a@@a@@.@@b@@b.@@c@@c"),
+ // with separator character
+ new Pair<>(Arrays.asList("a.a.", ".b.b.", ".c.c"), "a@.a@..@.b@.b@..@.c@.c"),
+ // with both escape and separator characters
+ new Pair<>(Arrays.asList("a@a.", "@b.b@", ".c@c"), "a@@a@..@@b@.b@@.@.c@@c"),
+ // with both escape and separator characters repeated
+ new Pair<>(Arrays.asList("a@@a..", "@@b..b@@", "..c@@c"), "a@@@@a@.@..@@@@b@.@.b@@@@.@.@.c@@@@c"));
+
+ @Test
+ public void testBuiltinDataverseName() {
+ for (String p : TEST_BUILTIN_DATAVERSE_NAME_PARAMS) {
+ testBuiltinDataverseNameImpl(p);
+ }
+ }
+
+ @Test
+ public void testSinglePartName() {
+ for (Pair<String, String> p : TEST_SINGLE_PART_NAME_PARAMS) {
+ String singlePart = p.first;
+ String expectedCanonicalForm = p.second;
+ testSinglePartNameImpl(singlePart, expectedCanonicalForm);
+ }
+ }
+
+ @Test
+ public void testMultiPartName() {
+ // test single part names
+ for (Pair<String, String> p : TEST_SINGLE_PART_NAME_PARAMS) {
+ List<String> parts = Collections.singletonList(p.first);
+ String expectedCanonicalForm = p.second;
+ testMultiPartNameImpl(parts, expectedCanonicalForm);
+ }
+ // test multi part names
+ for (Pair<List<String>, String> p : TEST_MULTI_PART_NAME_PARAMS) {
+ List<String> parts = p.first;
+ String expectedCanonicalForm = p.second;
+ testMultiPartNameImpl(parts, expectedCanonicalForm);
+ }
+ }
+
+ private void testBuiltinDataverseNameImpl(String singlePart) {
+ DataverseName dvBuiltin = DataverseName.createBuiltinDataverseName(singlePart);
+ DataverseName dv = DataverseName.createSinglePartName(singlePart);
+ Assert.assertEquals("same-builtin", dv, dvBuiltin);
+ // part = canonical-form = persistent-form for builtins
+ testSinglePartNameImpl(singlePart, singlePart);
+ }
+
+ private void testSinglePartNameImpl(String singlePart, String expectedCanonicalForm) {
+ List<String> parts = Collections.singletonList(singlePart);
+
+ // construction using createSinglePartName()
+ DataverseName dvConstr1 = DataverseName.createSinglePartName(singlePart);
+ testDataverseNameImpl(dvConstr1, parts, expectedCanonicalForm);
+
+ // construction using create(list)
+ DataverseName dvConstr2 = DataverseName.create(Collections.singletonList(singlePart));
+ testDataverseNameImpl(dvConstr2, parts, expectedCanonicalForm);
+
+ // construction using create(list, from, to)
+ DataverseName dvConstr3 = DataverseName.create(Arrays.asList(null, null, singlePart, null, null), 2, 3);
+ testDataverseNameImpl(dvConstr3, parts, expectedCanonicalForm);
+ }
+
+ private void testMultiPartNameImpl(List<String> parts, String expectedCanonicalForm) {
+ // construction using create(list)
+ DataverseName dvConstr1 = DataverseName.create(parts);
+ testDataverseNameImpl(dvConstr1, parts, expectedCanonicalForm);
+
+ // construction using create(list, from, to)
+ List<String> dv2InputParts =
+ ListUtils.union(ListUtils.union(Arrays.asList(null, null), parts), Arrays.asList(null, null));
+ DataverseName dvConstr2 = DataverseName.create(dv2InputParts, 2, 2 + parts.size());
+ testDataverseNameImpl(dvConstr2, parts, expectedCanonicalForm);
+ }
+
+ private void testDataverseNameImpl(DataverseName dataverseName, List<String> parts, String expectedCanonicalForm) {
+ boolean isMultiPart = parts.size() > 1;
+ Assert.assertEquals("is-multipart", isMultiPart, dataverseName.isMultiPart());
+
+ // test getParts()
+ Assert.assertArrayEquals("get-parts-0", parts.toArray(), dataverseName.getParts().toArray());
+ List<String> outParts = new ArrayList<>();
+ dataverseName.getParts(outParts);
+ Assert.assertArrayEquals("get-parts-1", parts.toArray(), outParts.toArray());
+
+ // test canonical form
+ Assert.assertEquals("canonical-form", expectedCanonicalForm, dataverseName.getCanonicalForm());
+ DataverseName dvFromCanonical = DataverseName.createFromCanonicalForm(expectedCanonicalForm);
+ Assert.assertEquals("canonical-form-round-trip", dataverseName, dvFromCanonical);
+ Assert.assertEquals("canonical-form-round-trip-cmp", 0, dataverseName.compareTo(dvFromCanonical));
+ Assert.assertEquals("canonical-form-round-trip-hash", dataverseName.hashCode(), dvFromCanonical.hashCode());
+
+ // test display form
+ String expectedDisplayForm = String.join(".", parts);
+ Assert.assertEquals("display-form", expectedDisplayForm, dataverseName.toString());
+ }
+
+ @Test
+ public void testCompare() {
+ List<DataverseName> dvList =
+ Arrays.asList(DataverseName.createSinglePartName("a"), DataverseName.create(Arrays.asList("a", "a")),
+ DataverseName.createSinglePartName("aa"), DataverseName.createSinglePartName("b"));
+
+ for (int i = 0; i < dvList.size() - 1; i++) {
+ for (int j = i + 1; j < dvList.size(); j++) {
+ testCompareImpl(dvList.get(i), dvList.get(j));
+ }
+ }
+ }
+
+ private void testCompareImpl(DataverseName left, DataverseName right) {
+ String label = left.getCanonicalForm() + " ? " + right.getCanonicalForm();
+ Assert.assertNotEquals(left, right);
+ Assert.assertTrue(label, left.compareTo(right) < 0);
+ Assert.assertTrue(label, right.compareTo(left) > 0);
+ }
+
+ @Test
+ public void testExceptions() {
+ // 1. Invalid names for builtin dataverses
+ for (String p : TEST_BUILTIN_DATAVERSE_INVALID_NAME_PARAMS) {
+ testInvalidBuiltinDataverseNameImpl(p);
+ }
+ // 2. NullPointerException
+ testRuntimeException(() -> DataverseName.create(null), NullPointerException.class);
+ testRuntimeException(() -> DataverseName.create(null, 0, 0), NullPointerException.class);
+ testRuntimeException(() -> DataverseName.create(null, 0, 1), NullPointerException.class);
+ testRuntimeException(() -> DataverseName.create(null, 0, 2), NullPointerException.class);
+ testRuntimeException(() -> DataverseName.createSinglePartName(null), NullPointerException.class);
+ testRuntimeException(() -> DataverseName.createBuiltinDataverseName(null), NullPointerException.class);
+ testRuntimeException(() -> DataverseName.createFromCanonicalForm(null), NullPointerException.class);
+ testRuntimeException(() -> DataverseName.create(Collections.singletonList(null)), NullPointerException.class);
+ testRuntimeException(() -> DataverseName.create(Arrays.asList(null, null)), NullPointerException.class);
+ // 3. IndexOutOfBoundsException
+ testRuntimeException(() -> DataverseName.create(Collections.emptyList(), 0, 1),
+ IndexOutOfBoundsException.class);
+ testRuntimeException(() -> DataverseName.create(Collections.emptyList(), 0, 2),
+ IndexOutOfBoundsException.class);
+ // 4. IllegalArgumentException
+ testRuntimeException(() -> DataverseName.create(Collections.emptyList()), IllegalArgumentException.class);
+ testRuntimeException(() -> DataverseName.create(Collections.emptyList(), 0, 0), IllegalArgumentException.class);
+ testRuntimeException(() -> DataverseName.create(Arrays.asList("a", "b", "c"), 2, 1),
+ IllegalArgumentException.class);
+ }
+
+ private <E extends RuntimeException> void testRuntimeException(Supplier<DataverseName> supplier,
+ Class<E> exceptionClass) {
+ try {
+ supplier.get();
+ Assert.fail("Did not get expected exception " + exceptionClass.getName());
+ } catch (RuntimeException e) {
+ if (!exceptionClass.isInstance(e)) {
+ try {
+ Assert.fail(
+ "Expected to catch " + exceptionClass.getName() + ", but caught " + e.getClass().getName());
+ } catch (AssertionError ae) {
+ ae.initCause(e);
+ throw ae;
+ }
+ }
+ }
+ }
+
+ private void testInvalidBuiltinDataverseNameImpl(String singlePart) {
+ try {
+ DataverseName.createBuiltinDataverseName(singlePart);
+ Assert.fail(singlePart);
+ } catch (IllegalArgumentException e) {
+ // this error is expected
+ }
+ }
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/AdapterIdentifier.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/AdapterIdentifier.java
index 9a466d6..9f27b41 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/AdapterIdentifier.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/AdapterIdentifier.java
@@ -20,22 +20,24 @@
import java.io.Serializable;
+import org.apache.asterix.common.metadata.DataverseName;
+
/**
* A unique identifier for a data source adapter.
*/
public class AdapterIdentifier implements Serializable {
- private static final long serialVersionUID = 1L;
+ private static final long serialVersionUID = 2L;
- private final String dataverseName;
+ private final DataverseName dataverseName;
private final String adapterName;
- public AdapterIdentifier(String namespace, String name) {
- this.dataverseName = namespace;
+ public AdapterIdentifier(DataverseName dataverse, String name) {
+ this.dataverseName = dataverse;
this.adapterName = name;
}
- public String getNamespace() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
@@ -61,6 +63,6 @@
return false;
}
AdapterIdentifier a = (AdapterIdentifier) o;
- return dataverseName.equals(a.getNamespace()) && adapterName.equals(a.getName());
+ return dataverseName.equals(a.dataverseName) && adapterName.equals(a.adapterName);
}
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeed.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeed.java
index 2acf394..dd1d1b7 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeed.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeed.java
@@ -22,12 +22,13 @@
import java.util.Map;
import org.apache.asterix.active.EntityId;
+import org.apache.asterix.common.metadata.DataverseName;
public interface IFeed extends Serializable {
public String getFeedName();
- public String getDataverseName();
+ public DataverseName getDataverseName();
public EntityId getFeedId();
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedConnectionId.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedConnectionId.java
index fab5a8f..73edc6e 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedConnectionId.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedConnectionId.java
@@ -21,6 +21,7 @@
import java.io.Serializable;
import org.apache.asterix.active.EntityId;
+import org.apache.asterix.common.metadata.DataverseName;
/**
* A unique identifier for a feed connection. A feed connection is an instance of a data feed that is flowing into a
@@ -41,8 +42,8 @@
this.hash = toString().hashCode();
}
- public FeedConnectionId(String dataverse, String feedName, String datasetName) {
- this(new EntityId(FEED_EXTENSION_NAME, dataverse, feedName), datasetName);
+ public FeedConnectionId(DataverseName dataverseName, String feedName, String datasetName) {
+ this(new EntityId(FEED_EXTENSION_NAME, dataverseName, feedName), datasetName);
}
public EntityId getFeedId() {
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/policy/FeedPolicy.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/policy/FeedPolicy.java
index da5907c..79c5281 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/policy/FeedPolicy.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/policy/FeedPolicy.java
@@ -21,10 +21,12 @@
import java.io.Serializable;
import java.util.Map;
+import org.apache.asterix.common.metadata.DataverseName;
+
public class FeedPolicy implements Serializable {
- private static final long serialVersionUID = 1L;
- private final String dataverseName;
+ private static final long serialVersionUID = 2L;
+ private final DataverseName dataverseName;
// Enforced to be unique within a dataverse.
private final String policyName;
// A description of the policy
@@ -32,14 +34,15 @@
// The policy properties associated with the feed dataset
private Map<String, String> properties;
- public FeedPolicy(String dataverseName, String policyName, String description, Map<String, String> properties) {
+ public FeedPolicy(DataverseName dataverseName, String policyName, String description,
+ Map<String, String> properties) {
this.dataverseName = dataverseName;
this.policyName = policyName;
this.description = description;
this.properties = properties;
}
- public String getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/ExternalFile.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/ExternalFile.java
index 8fbab3c..26a3a76 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/ExternalFile.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/ExternalFile.java
@@ -24,6 +24,7 @@
import java.util.Objects;
import org.apache.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
+import org.apache.asterix.common.metadata.DataverseName;
public class ExternalFile implements Serializable, Comparable<ExternalFile> {
@@ -33,7 +34,7 @@
*/
private static final long serialVersionUID = 1L;
- private String dataverseName;
+ private DataverseName dataverseName;
private String datasetName;
private Date lastModefiedTime;
private long size;
@@ -42,7 +43,7 @@
private ExternalFilePendingOp pendingOp;
public ExternalFile() {
- this.dataverseName = "";
+ this.dataverseName = null;
this.datasetName = "";
this.fileNumber = -1;
this.fileName = "";
@@ -51,7 +52,7 @@
this.pendingOp = ExternalFilePendingOp.NO_OP;
}
- public ExternalFile(String dataverseName, String datasetName, int fileNumber, String fileName,
+ public ExternalFile(DataverseName dataverseName, String datasetName, int fileNumber, String fileName,
Date lastModefiedTime, long size, ExternalFilePendingOp pendingOp) {
this.dataverseName = dataverseName;
this.datasetName = datasetName;
@@ -62,11 +63,11 @@
this.setPendingOp(pendingOp);
}
- public String getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
- public void setDataverseName(String dataverseName) {
+ public void setDataverseName(DataverseName dataverseName) {
this.dataverseName = dataverseName;
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunction.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunction.java
index d8196b4..f3dc0fe 100755
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunction.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunction.java
@@ -23,7 +23,9 @@
import org.apache.asterix.common.api.IApplicationContext;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.exceptions.RuntimeDataException;
+import org.apache.asterix.common.functions.FunctionSignature;
import org.apache.asterix.common.library.ILibraryManager;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.external.api.IExternalFunction;
import org.apache.asterix.external.api.IFunctionFactory;
import org.apache.asterix.external.api.IFunctionHelper;
@@ -61,7 +63,7 @@
ILibraryManager libraryManager = appCtx.getLibraryManager();
String[] fnameComponents = finfo.getFunctionIdentifier().getName().split("#");
String functionLibary = fnameComponents[0];
- String dataverse = finfo.getFunctionIdentifier().getNamespace();
+ DataverseName dataverse = FunctionSignature.getDataverseName(finfo.getFunctionIdentifier());
functionHelper = new JavaFunctionHelper(finfo, resultBuffer,
libraryManager.getFunctionParameters(dataverse, finfo.getFunctionIdentifier().getName()));
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalLibraryManager.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalLibraryManager.java
index 60c8bfd..3425b13 100755
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalLibraryManager.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalLibraryManager.java
@@ -29,6 +29,7 @@
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.exceptions.RuntimeDataException;
import org.apache.asterix.common.library.ILibraryManager;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.hyracks.algebricks.common.utils.Pair;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
@@ -40,7 +41,7 @@
private static final Logger LOGGER = LogManager.getLogger();
@Override
- public void registerLibraryClassLoader(String dataverseName, String libraryName, URLClassLoader classLoader)
+ public void registerLibraryClassLoader(DataverseName dataverseName, String libraryName, URLClassLoader classLoader)
throws RuntimeDataException {
String key = getKey(dataverseName, libraryName);
synchronized (libraryClassLoaders) {
@@ -61,7 +62,7 @@
}
@Override
- public void deregisterLibraryClassLoader(String dataverseName, String libraryName) {
+ public void deregisterLibraryClassLoader(DataverseName dataverseName, String libraryName) {
String key = getKey(dataverseName, libraryName);
synchronized (libraryClassLoaders) {
URLClassLoader cl = libraryClassLoaders.get(key);
@@ -77,23 +78,23 @@
}
@Override
- public ClassLoader getLibraryClassLoader(String dataverseName, String libraryName) {
+ public ClassLoader getLibraryClassLoader(DataverseName dataverseName, String libraryName) {
String key = getKey(dataverseName, libraryName);
return libraryClassLoaders.get(key);
}
@Override
- public void addFunctionParameters(String dataverseName, String fullFunctionName, List<String> parameters) {
+ public void addFunctionParameters(DataverseName dataverseName, String fullFunctionName, List<String> parameters) {
externalFunctionParameters.put(dataverseName + "." + fullFunctionName, parameters);
}
@Override
- public List<String> getFunctionParameters(String dataverseName, String fullFunctionName) {
+ public List<String> getFunctionParameters(DataverseName dataverseName, String fullFunctionName) {
return externalFunctionParameters.getOrDefault(dataverseName + "." + fullFunctionName, Collections.emptyList());
}
- private static String getKey(String dataverseName, String libraryName) {
- return dataverseName + "." + libraryName;
+ private static String getKey(DataverseName dataverseName, String libraryName) {
+ return dataverseName + "." + libraryName; //TODO(MULTI_PART_DATAVERSE_NAME):REVISIT
}
private static Pair<String, String> getDataverseAndLibararyName(String key) {
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorDescriptor.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorDescriptor.java
index 7a0341a..7c61653 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorDescriptor.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorDescriptor.java
@@ -105,7 +105,7 @@
INcApplicationContext runtimeCtx =
(INcApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext();
ILibraryManager libraryManager = runtimeCtx.getLibraryManager();
- ClassLoader classLoader = libraryManager.getLibraryClassLoader(feedId.getDataverse(), adaptorLibraryName);
+ ClassLoader classLoader = libraryManager.getLibraryClassLoader(feedId.getDataverseName(), adaptorLibraryName);
if (classLoader != null) {
try {
adapterFactory = (IAdapterFactory) (classLoader.loadClass(adaptorFactoryClassName).newInstance());
@@ -117,7 +117,7 @@
} else {
RuntimeDataException err = new RuntimeDataException(
ErrorCode.OPERATORS_FEED_INTAKE_OPERATOR_DESCRIPTOR_CLASSLOADER_NOT_CONFIGURED, adaptorLibraryName,
- feedId.getDataverse());
+ feedId.getDataverseName());
LOGGER.error(err.getMessage());
throw err;
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DatasourceFactoryProvider.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DatasourceFactoryProvider.java
index 8024dc4..ef8597a 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DatasourceFactoryProvider.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DatasourceFactoryProvider.java
@@ -32,6 +32,7 @@
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.exceptions.RuntimeDataException;
import org.apache.asterix.common.library.ILibraryManager;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.external.api.IExternalDataSourceFactory;
import org.apache.asterix.external.api.IExternalDataSourceFactory.DataSourceType;
import org.apache.asterix.external.api.IInputStreamFactory;
@@ -67,7 +68,7 @@
Map<String, String> configuration) throws HyracksDataException {
IInputStreamFactory streamSourceFactory;
if (ExternalDataUtils.isExternal(streamSource)) {
- String dataverse = ExternalDataUtils.getDataverse(configuration);
+ DataverseName dataverse = ExternalDataUtils.getDataverse(configuration);
streamSourceFactory =
ExternalDataUtils.createExternalInputStreamFactory(libraryManager, dataverse, streamSource);
} else {
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java
index a418cbf..93844d1 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java
@@ -25,6 +25,7 @@
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.exceptions.RuntimeDataException;
import org.apache.asterix.common.library.ILibraryManager;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.external.api.IDataParserFactory;
import org.apache.asterix.external.api.IExternalDataSourceFactory.DataSourceType;
import org.apache.asterix.external.api.IInputStreamFactory;
@@ -117,7 +118,7 @@
&& (aString.trim().length() > 1));
}
- public static ClassLoader getClassLoader(ILibraryManager libraryManager, String dataverse, String library) {
+ public static ClassLoader getClassLoader(ILibraryManager libraryManager, DataverseName dataverse, String library) {
return libraryManager.getLibraryClassLoader(dataverse, library);
}
@@ -129,8 +130,8 @@
return aString.trim().split(FeedConstants.NamingConstants.LIBRARY_NAME_SEPARATOR)[1];
}
- public static IInputStreamFactory createExternalInputStreamFactory(ILibraryManager libraryManager, String dataverse,
- String stream) throws HyracksDataException {
+ public static IInputStreamFactory createExternalInputStreamFactory(ILibraryManager libraryManager,
+ DataverseName dataverse, String stream) throws HyracksDataException {
try {
String libraryName = getLibraryName(stream);
String className = getExternalClassName(stream);
@@ -141,8 +142,8 @@
}
}
- public static String getDataverse(Map<String, String> configuration) {
- return configuration.get(ExternalDataConstants.KEY_DATAVERSE);
+ public static DataverseName getDataverse(Map<String, String> configuration) {
+ return DataverseName.createFromCanonicalForm(configuration.get(ExternalDataConstants.KEY_DATAVERSE));
}
public static String getRecordFormat(Map<String, String> configuration) {
@@ -220,7 +221,7 @@
throw new AsterixException("to use " + ExternalDataConstants.EXTERNAL + " reader, the parameter "
+ ExternalDataConstants.KEY_READER_FACTORY + " must be specified.");
}
- String[] libraryAndFactory = readerFactory.split(ExternalDataConstants.EXTERNAL_LIBRARY_SEPARATOR);
+ String[] libraryAndFactory = readerFactory.split(ExternalDataConstants.EXTERNAL_LIBRARY_SEPARATOR); //TODO(MULTI_PART_DATAVERSE_NAME):REVISIT
if (libraryAndFactory.length != 2) {
throw new AsterixException("The parameter " + ExternalDataConstants.KEY_READER_FACTORY
+ " must follow the format \"DataverseName.LibraryName#ReaderFactoryFullyQualifiedName\"");
@@ -230,8 +231,9 @@
throw new AsterixException("The parameter " + ExternalDataConstants.KEY_READER_FACTORY
+ " must follow the format \"DataverseName.LibraryName#ReaderFactoryFullyQualifiedName\"");
}
-
- ClassLoader classLoader = libraryManager.getLibraryClassLoader(dataverseAndLibrary[0], dataverseAndLibrary[1]);
+ DataverseName dataverseName = DataverseName.createSinglePartName(dataverseAndLibrary[0]); //TODO(MULTI_PART_DATAVERSE_NAME):REVISIT
+ String libraryName = dataverseAndLibrary[1];
+ ClassLoader classLoader = libraryManager.getLibraryClassLoader(dataverseName, libraryName);
try {
return (IRecordReaderFactory<?>) classLoader.loadClass(libraryAndFactory[1]).newInstance();
} catch (InstantiationException | IllegalAccessException | ClassNotFoundException e) {
@@ -239,8 +241,8 @@
}
}
- public static IDataParserFactory createExternalParserFactory(ILibraryManager libraryManager, String dataverse,
- String parserFactoryName) throws AsterixException {
+ public static IDataParserFactory createExternalParserFactory(ILibraryManager libraryManager,
+ DataverseName dataverse, String parserFactoryName) throws AsterixException {
try {
String library = parserFactoryName.substring(0,
parserFactoryName.indexOf(ExternalDataConstants.EXTERNAL_LIBRARY_SEPARATOR));
@@ -262,11 +264,11 @@
}
}
- public static void prepareFeed(Map<String, String> configuration, String dataverseName, String feedName) {
+ public static void prepareFeed(Map<String, String> configuration, DataverseName dataverseName, String feedName) {
if (!configuration.containsKey(ExternalDataConstants.KEY_IS_FEED)) {
configuration.put(ExternalDataConstants.KEY_IS_FEED, ExternalDataConstants.TRUE);
}
- configuration.put(ExternalDataConstants.KEY_DATAVERSE, dataverseName);
+ configuration.put(ExternalDataConstants.KEY_DATAVERSE, dataverseName.getCanonicalForm());
configuration.put(ExternalDataConstants.KEY_FEED_NAME, feedName);
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedUtils.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedUtils.java
index ecaced6..2110dee 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedUtils.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedUtils.java
@@ -33,6 +33,7 @@
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.exceptions.ErrorCode;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.utils.StoragePathUtil;
import org.apache.asterix.runtime.utils.RuntimeUtils;
import org.apache.commons.lang3.StringUtils;
@@ -83,13 +84,9 @@
private FeedUtils() {
}
- private static String prepareDataverseFeedName(String dataverseName, String feedName) {
- return dataverseName + File.separator + feedName;
- }
-
- public static FileSplit splitsForAdapter(String dataverseName, String feedName, String nodeName,
+ public static FileSplit splitsForAdapter(DataverseName dataverseName, String feedName, String nodeName,
ClusterPartition partition) {
- File relPathFile = new File(prepareDataverseFeedName(dataverseName, feedName));
+ String relPathFile = dataverseName.getCanonicalForm() + File.separator + feedName; //TODO(MULTI_PART_DATAVERSE_NAME):REVISIT
String storagePartitionPath = StoragePathUtil.prepareStoragePartitionPath(partition.getPartitionId());
// Note: feed adapter instances in a single node share the feed logger
// format: 'storage dir name'/partition_#/dataverse/feed/node
@@ -97,8 +94,8 @@
return StoragePathUtil.getFileSplitForClusterPartition(partition, f.getPath());
}
- public static FileSplit[] splitsForAdapter(ICcApplicationContext appCtx, String dataverseName, String feedName,
- AlgebricksPartitionConstraint partitionConstraints) throws AsterixException {
+ public static FileSplit[] splitsForAdapter(ICcApplicationContext appCtx, DataverseName dataverseName,
+ String feedName, AlgebricksPartitionConstraint partitionConstraints) throws AsterixException {
if (partitionConstraints.getPartitionConstraintType() == PartitionConstraintType.COUNT) {
throw new AsterixException("Can't create file splits for adapter with count partitioning constraints");
}
diff --git a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/feed/test/InputHandlerTest.java b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/feed/test/InputHandlerTest.java
index b6343b9..2c64ce3 100644
--- a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/feed/test/InputHandlerTest.java
+++ b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/feed/test/InputHandlerTest.java
@@ -32,6 +32,7 @@
import org.apache.asterix.active.ActiveRuntimeId;
import org.apache.asterix.active.EntityId;
import org.apache.asterix.common.memory.ConcurrentFramePool;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.external.feed.dataflow.FeedRuntimeInputHandler;
import org.apache.asterix.external.feed.management.FeedConnectionId;
import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
@@ -59,7 +60,7 @@
private static final int DEFAULT_FRAME_SIZE = 32768;
private static final int NUM_FRAMES = 128;
private static final long FEED_MEM_BUDGET = DEFAULT_FRAME_SIZE * NUM_FRAMES;
- private static final String DATAVERSE = "dataverse";
+ private static final DataverseName DATAVERSE = DataverseName.createSinglePartName("dataverse");
private static final String DATASET = "dataset";
private static final String FEED = "feed";
private static final String NODE_ID = "NodeId";
diff --git a/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/rewrites/AqlStatementRewriter.java b/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/rewrites/AqlStatementRewriter.java
index 5ca91b6..af015b7 100644
--- a/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/rewrites/AqlStatementRewriter.java
+++ b/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/rewrites/AqlStatementRewriter.java
@@ -22,17 +22,18 @@
import org.apache.asterix.lang.aql.visitor.AqlDeleteRewriteVisitor;
import org.apache.asterix.lang.common.base.IStatementRewriter;
import org.apache.asterix.lang.common.base.Statement;
+import org.apache.asterix.metadata.declared.MetadataProvider;
class AqlStatementRewriter implements IStatementRewriter {
@Override
- public void rewrite(Statement stmt) throws CompilationException {
- rewriteDeleteStatement(stmt);
+ public void rewrite(Statement stmt, MetadataProvider metadataProvider) throws CompilationException {
+ rewriteDeleteStatement(stmt, metadataProvider);
}
- private void rewriteDeleteStatement(Statement stmt) throws CompilationException {
+ private void rewriteDeleteStatement(Statement stmt, MetadataProvider metadataProvider) throws CompilationException {
if (stmt != null) {
- AqlDeleteRewriteVisitor visitor = new AqlDeleteRewriteVisitor();
+ AqlDeleteRewriteVisitor visitor = new AqlDeleteRewriteVisitor(metadataProvider);
stmt.accept(visitor, null);
}
}
diff --git a/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/visitor/AQLToSQLPPPrintVisitor.java b/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/visitor/AQLToSQLPPPrintVisitor.java
index 59dfbb5..35a3b45 100644
--- a/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/visitor/AQLToSQLPPPrintVisitor.java
+++ b/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/visitor/AQLToSQLPPPrintVisitor.java
@@ -30,7 +30,7 @@
import java.util.Set;
import org.apache.asterix.common.exceptions.CompilationException;
-import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.aql.clause.DistinctClause;
import org.apache.asterix.lang.aql.clause.ForClause;
import org.apache.asterix.lang.aql.expression.FLWOGRExpression;
@@ -47,7 +47,6 @@
import org.apache.asterix.lang.common.expression.CallExpr;
import org.apache.asterix.lang.common.expression.FieldAccessor;
import org.apache.asterix.lang.common.expression.GbyVariableExpressionPair;
-import org.apache.asterix.lang.common.expression.LiteralExpr;
import org.apache.asterix.lang.common.expression.OperatorExpr;
import org.apache.asterix.lang.common.expression.VariableExpr;
import org.apache.asterix.lang.common.statement.DataverseDecl;
@@ -57,6 +56,8 @@
import org.apache.asterix.lang.common.struct.Identifier;
import org.apache.asterix.lang.common.struct.OperatorType;
import org.apache.asterix.lang.common.struct.VarIdentifier;
+import org.apache.asterix.lang.common.util.ExpressionUtils;
+import org.apache.asterix.lang.common.util.FunctionUtil;
import org.apache.asterix.lang.common.visitor.FormatPrintVisitor;
import org.apache.hyracks.algebricks.common.utils.Pair;
@@ -264,7 +265,7 @@
@Override
public Void visit(DataverseDecl dv, Integer step) throws CompilationException {
- out.println(skip(step) + "use " + normalize(dv.getDataverseName().getValue()) + ";\n\n");
+ out.println(skip(step) + "use " + generateDataverseName(dv.getDataverseName()) + ";\n\n");
return null;
}
@@ -302,14 +303,17 @@
@Override
public Void visit(CallExpr callExpr, Integer step) throws CompilationException {
- FunctionSignature signature = callExpr.getFunctionSignature();
- if (signature.getNamespace() != null && signature.getNamespace().equals("Metadata")
- && signature.getName().equals("dataset") && signature.getArity() == 1) {
- LiteralExpr expr = (LiteralExpr) callExpr.getExprList().get(0);
- out.print(normalize(expr.getValue().getStringValue()));
+ if (FunctionUtil.isBuiltinDatasetFunction(callExpr.getFunctionSignature())) {
+ Pair<DataverseName, String> dataset = FunctionUtil.parseDatasetFunctionArguments(callExpr.getExprList(),
+ null, callExpr.getSourceLocation(), ExpressionUtils::getStringLiteral);
+ if (dataset.first != null) {
+ out.print(generateDataverseName(dataset.first));
+ out.print(".");
+ }
+ out.print(normalize(dataset.second));
} else {
printHints(callExpr.getHints(), step);
- out.print(generateFullName(callExpr.getFunctionSignature().getNamespace(),
+ out.print(generateFullName(callExpr.getFunctionSignature().getDataverseName(),
callExpr.getFunctionSignature().getName()) + "(");
printDelimitedExpressions(callExpr.getExprList(), COMMA, step);
out.print(")");
diff --git a/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/visitor/AqlDeleteRewriteVisitor.java b/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/visitor/AqlDeleteRewriteVisitor.java
index 22ef0e3..e53caa2 100644
--- a/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/visitor/AqlDeleteRewriteVisitor.java
+++ b/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/visitor/AqlDeleteRewriteVisitor.java
@@ -22,6 +22,7 @@
import java.util.List;
import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.aql.clause.ForClause;
import org.apache.asterix.lang.aql.expression.FLWOGRExpression;
import org.apache.asterix.lang.aql.visitor.base.AbstractAqlAstVisitor;
@@ -34,20 +35,27 @@
import org.apache.asterix.lang.common.literal.StringLiteral;
import org.apache.asterix.lang.common.statement.DeleteStatement;
import org.apache.asterix.lang.common.statement.Query;
-import org.apache.asterix.lang.common.struct.Identifier;
+import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.om.functions.BuiltinFunctions;
public class AqlDeleteRewriteVisitor extends AbstractAqlAstVisitor<Void, Void> {
+ private final MetadataProvider metadataProvider;
+
+ public AqlDeleteRewriteVisitor(MetadataProvider metadataProvider) {
+ this.metadataProvider = metadataProvider;
+ }
+
@Override
public Void visit(DeleteStatement deleteStmt, Void visitArg) {
List<Expression> arguments = new ArrayList<>();
- Identifier dataverseName = deleteStmt.getDataverseName();
- Identifier datasetName = deleteStmt.getDatasetName();
- String arg = dataverseName == null ? datasetName.getValue()
- : dataverseName.getValue() + "." + datasetName.getValue();
- LiteralExpr argumentLiteral = new LiteralExpr(new StringLiteral(arg));
- arguments.add(argumentLiteral);
+ DataverseName dataverseName = deleteStmt.getDataverseName();
+ if (dataverseName == null) {
+ dataverseName = metadataProvider.getDefaultDataverseName();
+ }
+ String datasetName = deleteStmt.getDatasetName();
+ arguments.add(new LiteralExpr(new StringLiteral(dataverseName.getCanonicalForm())));
+ arguments.add(new LiteralExpr(new StringLiteral(datasetName)));
CallExpr callExpression = new CallExpr(new FunctionSignature(BuiltinFunctions.DATASET), arguments);
List<Clause> clauseList = new ArrayList<>();
diff --git a/asterixdb/asterix-lang-aql/src/main/javacc/AQL.jj b/asterixdb/asterix-lang-aql/src/main/javacc/AQL.jj
index f8aa57c..87d0b93 100644
--- a/asterixdb/asterix-lang-aql/src/main/javacc/AQL.jj
+++ b/asterixdb/asterix-lang-aql/src/main/javacc/AQL.jj
@@ -63,6 +63,7 @@
import org.apache.asterix.common.config.DatasetConfig.IndexType;
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.aql.clause.DistinctClause;
import org.apache.asterix.lang.aql.clause.ForClause;
import org.apache.asterix.lang.aql.expression.FLWOGRExpression;
@@ -188,6 +189,8 @@
private static final String INT_TYPE_NAME = "int";
+ private DataverseName defaultDataverse;
+
private static class IndexParams {
public IndexType type;
public int gramLength;
@@ -199,7 +202,7 @@
};
private static class FunctionName {
- public String dataverse = null;
+ public DataverseName dataverse = null;
public String library = null;
public String function = null;
public String hint = null;
@@ -381,8 +384,8 @@
{
<USE> <DATAVERSE> dvName = Identifier()
{
- defaultDataverse = dvName;
- return new DataverseDecl(new Identifier(dvName));
+ defaultDataverse = DataverseName.createSinglePartName(dvName);
+ return new DataverseDecl(defaultDataverse);
}
}
@@ -417,7 +420,7 @@
TypeDecl TypeSpecification(String hint, boolean dgen) throws ParseException:
{
- Pair<Identifier,Identifier> nameComponents = null;
+ Pair<DataverseName,Identifier> nameComponents = null;
boolean ifNotExists = false;
TypeExpression typeExpr = null;
}
@@ -467,9 +470,9 @@
DatasetDecl DatasetSpecification() throws ParseException:
{
- Pair<Identifier,Identifier> nameComponents = null;
+ Pair<DataverseName,Identifier> nameComponents = null;
boolean ifNotExists = false;
- Pair<Identifier,Identifier> typeComponents = null;
+ Pair<DataverseName,Identifier> typeComponents = null;
String adapterName = null;
Map<String,String> properties = null;
FunctionSignature appliedFunction = null;
@@ -479,7 +482,7 @@
DatasetDecl dsetDecl = null;
boolean autogenerated = false;
Pair<Integer, List<String>> filterField = null;
- Pair<Identifier,Identifier> metaTypeComponents = new Pair<Identifier, Identifier>(null, null);
+ Pair<DataverseName,Identifier> metaTypeComponents = new Pair<DataverseName, Identifier>(null, null);
RecordConstructor withRecord = null;
}
{
@@ -568,7 +571,7 @@
RefreshExternalDatasetStatement RefreshExternalDatasetStatement() throws ParseException:
{
RefreshExternalDatasetStatement redss = new RefreshExternalDatasetStatement();
- Pair<Identifier,Identifier> nameComponents = null;
+ Pair<DataverseName,Identifier> nameComponents = null;
String datasetName = null;
}
{
@@ -587,7 +590,7 @@
CreateIndexStatement cis = new CreateIndexStatement();
String indexName = null;
boolean ifNotExists = false;
- Pair<Identifier,Identifier> nameComponents = null;
+ Pair<DataverseName,Identifier> nameComponents = null;
Pair<Integer, Pair<List<String>, IndexedTypeExpression>> fieldPair = null;
IndexParams indexType = null;
boolean enforced = false;
@@ -702,7 +705,7 @@
ifNotExists = IfNotExists()
( <WITH> <FORMAT> format = StringLiteral() )?
{
- return new CreateDataverseStatement(new Identifier(dvName), format, ifNotExists);
+ return new CreateDataverseStatement(DataverseName.createSinglePartName(dvName), format, ifNotExists);
}
}
@@ -742,10 +745,9 @@
CreateFeedStatement FeedSpecification() throws ParseException:
{
- Pair<Identifier,Identifier> nameComponents = null;
+ Pair<DataverseName,Identifier> nameComponents = null;
boolean ifNotExists = false;
CreateFeedStatement cfs = null;
- Pair<Identifier,Identifier> sourceNameComponents = null;
RecordConstructor withRecord = null;
}
{
@@ -920,8 +922,8 @@
Statement DropStatement() throws ParseException:
{
String id = null;
- Pair<Identifier,Identifier> pairId = null;
- Triple<Identifier,Identifier,Identifier> tripleId = null;
+ Pair<DataverseName,Identifier> pairId = null;
+ Triple<DataverseName,Identifier,Identifier> tripleId = null;
FunctionSignature funcSig = null;
boolean ifExists = false;
Statement stmt = null;
@@ -947,7 +949,7 @@
}
| <DATAVERSE> id = Identifier() ifExists = IfExists()
{
- stmt = new DataverseDropStatement(new Identifier(id), ifExists);
+ stmt = new DataverseDropStatement(DataverseName.createSinglePartName(id), ifExists);
}
| <FUNCTION> funcSig = FunctionSignature() ifExists = IfExists()
{
@@ -985,7 +987,7 @@
InsertStatement InsertStatement() throws ParseException:
{
- Pair<Identifier,Identifier> nameComponents = null;
+ Pair<DataverseName,Identifier> nameComponents = null;
VariableExpr var = null;
Query query;
Expression returnExpression = null;
@@ -1009,7 +1011,7 @@
UpsertStatement UpsertStatement() throws ParseException:
{
- Pair<Identifier,Identifier> nameComponents = null;
+ Pair<DataverseName,Identifier> nameComponents = null;
VariableExpr var = null;
Query query;
Expression returnExpression = null;
@@ -1035,7 +1037,7 @@
{
VariableExpr var = null;
Expression condition = null;
- Pair<Identifier, Identifier> nameComponents;
+ Pair<DataverseName, Identifier> nameComponents;
}
{
<DELETE> var = Variable()
@@ -1131,12 +1133,12 @@
LoadStatement LoadStatement() throws ParseException:
{
- Identifier dataverseName = null;
+ DataverseName dataverseName = null;
Identifier datasetName = null;
boolean alreadySorted = false;
String adapterName;
Map<String,String> properties;
- Pair<Identifier,Identifier> nameComponents = null;
+ Pair<DataverseName,Identifier> nameComponents = null;
}
{
<LOAD> <DATASET> nameComponents = QualifiedName()
@@ -1169,7 +1171,7 @@
Statement CompactStatement() throws ParseException:
{
- Pair<Identifier,Identifier> nameComponents = null;
+ Pair<DataverseName,Identifier> nameComponents = null;
Statement stmt = null;
}
{
@@ -1184,8 +1186,8 @@
Statement FeedStatement() throws ParseException:
{
- Pair<Identifier,Identifier> feedNameComponents = null;
- Pair<Identifier,Identifier> datasetNameComponents = null;
+ Pair<DataverseName,Identifier> feedNameComponents = null;
+ Pair<DataverseName,Identifier> datasetNameComponents = null;
Map<String,String> configuration = null;
List<FunctionSignature> appliedFunctions = new ArrayList<FunctionSignature>();
@@ -1383,7 +1385,7 @@
TypeReferenceExpression TypeReference() throws ParseException:
{
- Pair<Identifier,Identifier> id = null;
+ Pair<DataverseName,Identifier> id = null;
}
{
id = QualifiedName()
@@ -1448,7 +1450,7 @@
result.function = first;
} else if (third == null) {
if (secondAfterDot) {
- result.dataverse = first;
+ result.dataverse = DataverseName.createSinglePartName(first);
result.library = null;
result.function = second;
} else {
@@ -1457,7 +1459,7 @@
result.function = second;
}
} else {
- result.dataverse = first;
+ result.dataverse = DataverseName.createSinglePartName(first);
result.library = second;
result.function = third;
}
@@ -1470,15 +1472,15 @@
}
-Pair<Identifier,Identifier> TypeName() throws ParseException:
+Pair<DataverseName,Identifier> TypeName() throws ParseException:
{
- Pair<Identifier,Identifier> name = null;
+ Pair<DataverseName,Identifier> name = null;
}
{
name = QualifiedName()
{
if (name.first == null) {
- name.first = new Identifier(defaultDataverse);
+ name.first = defaultDataverse;
}
return name;
}
@@ -1564,7 +1566,7 @@
}
}
-Pair<Identifier,Identifier> QualifiedName() throws ParseException:
+Pair<DataverseName,Identifier> QualifiedName() throws ParseException:
{
String first = null;
String second = null;
@@ -1572,20 +1574,20 @@
{
first = Identifier() (<DOT> second = Identifier())?
{
- Identifier id1 = null;
+ DataverseName id1 = null;
Identifier id2 = null;
if (second == null) {
id2 = new Identifier(first);
} else
{
- id1 = new Identifier(first);
+ id1 = DataverseName.createSinglePartName(first);
id2 = new Identifier(second);
}
- return new Pair<Identifier,Identifier>(id1, id2);
+ return new Pair<DataverseName,Identifier>(id1, id2);
}
}
-Triple<Identifier,Identifier,Identifier> DoubleQualifiedName() throws ParseException:
+Triple<DataverseName,Identifier,Identifier> DoubleQualifiedName() throws ParseException:
{
String first = null;
String second = null;
@@ -1594,18 +1596,18 @@
{
first = Identifier() <DOT> second = Identifier() (<DOT> third = Identifier())?
{
- Identifier id1 = null;
+ DataverseName id1 = null;
Identifier id2 = null;
Identifier id3 = null;
if (third == null) {
id2 = new Identifier(first);
id3 = new Identifier(second);
} else {
- id1 = new Identifier(first);
+ id1 = DataverseName.createSinglePartName(first);
id2 = new Identifier(second);
id3 = new Identifier(third);
}
- return new Triple<Identifier,Identifier,Identifier>(id1, id2, id3);
+ return new Triple<DataverseName,Identifier,Identifier>(id1, id2, id3);
}
}
@@ -2278,7 +2280,7 @@
}
| ( <LEFTPAREN> nameArg = Expression() <RIGHTPAREN> ) )
{
- String dataverse = MetadataConstants.METADATA_DATAVERSE_NAME;
+ DataverseName dataverse = MetadataConstants.METADATA_DATAVERSE_NAME;
FunctionSignature signature = lookupFunctionSignature(dataverse, funcName, 1);
if (signature == null) {
signature = new FunctionSignature(dataverse, funcName, 1);
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/base/IStatementRewriter.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/base/IStatementRewriter.java
index 0584665..e5fd574 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/base/IStatementRewriter.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/base/IStatementRewriter.java
@@ -19,14 +19,17 @@
package org.apache.asterix.lang.common.base;
import org.apache.asterix.common.exceptions.CompilationException;
+import org.apache.asterix.metadata.declared.MetadataProvider;
public interface IStatementRewriter {
/**
* @param statement,
* a non-query statement.
+ * @param metadataProvider
+ * a metadata provider
*/
- void rewrite(Statement statement) throws CompilationException;
+ void rewrite(Statement statement, MetadataProvider metadataProvider) throws CompilationException;
String toExternalVariableName(String statementParameterName);
}
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/context/FunctionSignatures.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/context/FunctionSignatures.java
index 376ab94..5a68180 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/context/FunctionSignatures.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/context/FunctionSignatures.java
@@ -22,16 +22,17 @@
import java.util.Map;
import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.common.metadata.DataverseName;
public class FunctionSignatures {
private final Map<FunctionSignature, FunctionExpressionMap> functionMap;
public FunctionSignatures() {
- functionMap = new HashMap<FunctionSignature, FunctionExpressionMap>();
+ functionMap = new HashMap<>();
}
- public FunctionSignature get(String dataverse, String name, int arity) {
- FunctionSignature fid = new FunctionSignature(dataverse, name, arity);
+ public FunctionSignature get(DataverseName dataverseName, String name, int arity) {
+ FunctionSignature fid = new FunctionSignature(dataverseName, name, arity);
FunctionExpressionMap possibleFD = functionMap.get(fid);
if (possibleFD == null) {
return null;
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/context/Scope.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/context/Scope.java
index 92be7c9..5037b44 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/context/Scope.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/context/Scope.java
@@ -29,6 +29,7 @@
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.expression.VariableExpr;
import org.apache.asterix.lang.common.parser.ScopeChecker;
import org.apache.asterix.lang.common.struct.Identifier;
@@ -127,13 +128,13 @@
* # of arguments
* @return FunctionDescriptor of the function found; otherwise null
*/
- public FunctionSignature findFunctionSignature(String dataverse, String name, int arity) {
+ public FunctionSignature findFunctionSignature(DataverseName dataverseName, String name, int arity) {
FunctionSignature fd = null;
if (functionSignatures != null) {
- fd = functionSignatures.get(dataverse, name, arity);
+ fd = functionSignatures.get(dataverseName, name, arity);
}
if (fd == null && parent != null) {
- fd = parent.findFunctionSignature(dataverse, name, arity);
+ fd = parent.findFunctionSignature(dataverseName, name, arity);
}
return fd;
}
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/expression/TypeReferenceExpression.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/expression/TypeReferenceExpression.java
index a76e5bb..ea0b1b1 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/expression/TypeReferenceExpression.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/expression/TypeReferenceExpression.java
@@ -21,19 +21,20 @@
import java.util.Objects;
import org.apache.asterix.common.exceptions.CompilationException;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.struct.Identifier;
import org.apache.asterix.lang.common.visitor.base.ILangVisitor;
import org.apache.hyracks.algebricks.common.utils.Pair;
public class TypeReferenceExpression extends AbstractTypeExpression {
- private final Pair<Identifier, Identifier> ident;
+ private final Pair<DataverseName, Identifier> ident;
- public TypeReferenceExpression(Pair<Identifier, Identifier> ident) {
+ public TypeReferenceExpression(Pair<DataverseName, Identifier> ident) {
this.ident = ident;
}
- public Pair<Identifier, Identifier> getIdent() {
+ public Pair<DataverseName, Identifier> getIdent() {
return ident;
}
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/parser/ScopeChecker.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/parser/ScopeChecker.java
index 58470e3..f5aa489 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/parser/ScopeChecker.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/parser/ScopeChecker.java
@@ -22,6 +22,7 @@
import java.util.Stack;
import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.context.RootScopeFactory;
import org.apache.asterix.lang.common.context.Scope;
import org.apache.asterix.lang.common.struct.Identifier;
@@ -42,8 +43,6 @@
protected String[] inputLines;
- protected String defaultDataverse;
-
public ScopeChecker() {
scopeStack.push(RootScopeFactory.createRootScope(this));
}
@@ -139,9 +138,9 @@
*
* @return functionDescriptor
*/
- public final FunctionSignature lookupFunctionSignature(String dataverse, String name, int arity) {
- if (dataverse != null) {
- return getCurrentScope().findFunctionSignature(dataverse, name, arity);
+ public final FunctionSignature lookupFunctionSignature(DataverseName dataverseName, String name, int arity) {
+ if (dataverseName != null) {
+ return getCurrentScope().findFunctionSignature(dataverseName, name, arity);
} else {
return null;
}
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/CompactStatement.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/CompactStatement.java
index d35516b..4c79029 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/CompactStatement.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/CompactStatement.java
@@ -19,6 +19,7 @@
package org.apache.asterix.lang.common.statement;
import org.apache.asterix.common.exceptions.CompilationException;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.AbstractStatement;
import org.apache.asterix.lang.common.base.Statement;
import org.apache.asterix.lang.common.struct.Identifier;
@@ -26,10 +27,10 @@
public class CompactStatement extends AbstractStatement {
- private final Identifier dataverseName;
+ private final DataverseName dataverseName;
private final Identifier datasetName;
- public CompactStatement(Identifier dataverseName, Identifier datasetName) {
+ public CompactStatement(DataverseName dataverseName, Identifier datasetName) {
this.dataverseName = dataverseName;
this.datasetName = datasetName;
}
@@ -39,7 +40,7 @@
return Statement.Kind.COMPACT;
}
- public Identifier getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/ConnectFeedStatement.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/ConnectFeedStatement.java
index eaa95cd..90892d2 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/ConnectFeedStatement.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/ConnectFeedStatement.java
@@ -22,6 +22,7 @@
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.AbstractStatement;
import org.apache.asterix.lang.common.base.Statement;
import org.apache.asterix.lang.common.struct.Identifier;
@@ -31,7 +32,7 @@
public class ConnectFeedStatement extends AbstractStatement {
- private final Identifier dataverseName;
+ private final DataverseName dataverseName;
private final Identifier datasetName;
private final String feedName;
private final String policy;
@@ -39,14 +40,14 @@
private int varCounter;
private final List<FunctionSignature> appliedFunctions;
- public ConnectFeedStatement(Pair<Identifier, Identifier> feedNameCmp, Pair<Identifier, Identifier> datasetNameCmp,
- List<FunctionSignature> appliedFunctions, String policy, String whereClauseBody, int varCounter) {
+ public ConnectFeedStatement(Pair<DataverseName, Identifier> feedNameCmp,
+ Pair<DataverseName, Identifier> datasetNameCmp, List<FunctionSignature> appliedFunctions, String policy,
+ String whereClauseBody, int varCounter) {
if (feedNameCmp.first != null && datasetNameCmp.first != null
- && !feedNameCmp.first.getValue().equals(datasetNameCmp.first.getValue())) {
+ && !feedNameCmp.first.equals(datasetNameCmp.first)) {
throw new IllegalArgumentException("Dataverse for source feed and target dataset do not match");
}
- this.dataverseName = feedNameCmp.first != null ? feedNameCmp.first
- : datasetNameCmp.first != null ? datasetNameCmp.first : null;
+ this.dataverseName = feedNameCmp.first != null ? feedNameCmp.first : datasetNameCmp.first;
this.datasetName = datasetNameCmp.second;
this.feedName = feedNameCmp.second.getValue();
this.policy = policy != null ? policy : BuiltinFeedPolicies.DEFAULT_POLICY.getPolicyName();
@@ -55,7 +56,7 @@
this.appliedFunctions = appliedFunctions;
}
- public Identifier getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/CreateDataverseStatement.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/CreateDataverseStatement.java
index 9881f83..2276755 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/CreateDataverseStatement.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/CreateDataverseStatement.java
@@ -19,25 +19,25 @@
package org.apache.asterix.lang.common.statement;
import org.apache.asterix.common.exceptions.CompilationException;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.AbstractStatement;
import org.apache.asterix.lang.common.base.Statement;
-import org.apache.asterix.lang.common.struct.Identifier;
import org.apache.asterix.lang.common.visitor.base.ILangVisitor;
import org.apache.asterix.runtime.formats.NonTaggedDataFormat;
public class CreateDataverseStatement extends AbstractStatement {
- private Identifier dataverseName;
+ private DataverseName dataverseName;
private String format;
private boolean ifNotExists;
- public CreateDataverseStatement(Identifier dataverseName, String format, boolean ifNotExists) {
+ public CreateDataverseStatement(DataverseName dataverseName, String format, boolean ifNotExists) {
this.dataverseName = dataverseName;
this.format = (format == null) ? NonTaggedDataFormat.class.getName() : format;
this.ifNotExists = ifNotExists;
}
- public Identifier getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/CreateFeedStatement.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/CreateFeedStatement.java
index 9f01b1c..f12dfce 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/CreateFeedStatement.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/CreateFeedStatement.java
@@ -21,6 +21,7 @@
import java.util.Map;
import org.apache.asterix.common.exceptions.CompilationException;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.AbstractStatement;
import org.apache.asterix.lang.common.expression.RecordConstructor;
import org.apache.asterix.lang.common.struct.Identifier;
@@ -37,18 +38,18 @@
*/
public class CreateFeedStatement extends AbstractStatement {
- private final Pair<Identifier, Identifier> qName;
+ private final Pair<DataverseName, Identifier> qName;
private final boolean ifNotExists;
private final AdmObjectNode withObjectNode;
- public CreateFeedStatement(Pair<Identifier, Identifier> qName, RecordConstructor withRecord, boolean ifNotExists)
+ public CreateFeedStatement(Pair<DataverseName, Identifier> qName, RecordConstructor withRecord, boolean ifNotExists)
throws AlgebricksException {
this.qName = qName;
this.ifNotExists = ifNotExists;
this.withObjectNode = withRecord == null ? null : ExpressionUtils.toNode(withRecord);
}
- public Identifier getDataverseName() {
+ public DataverseName getDataverseName() {
return qName.first;
}
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/CreateIndexStatement.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/CreateIndexStatement.java
index 2e9f1f9..05fc055 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/CreateIndexStatement.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/CreateIndexStatement.java
@@ -23,6 +23,7 @@
import org.apache.asterix.common.config.DatasetConfig.IndexType;
import org.apache.asterix.common.exceptions.CompilationException;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.AbstractStatement;
import org.apache.asterix.lang.common.base.Statement;
import org.apache.asterix.lang.common.expression.IndexedTypeExpression;
@@ -33,7 +34,7 @@
public class CreateIndexStatement extends AbstractStatement {
private Identifier indexName;
- private Identifier dataverseName;
+ private DataverseName dataverseName;
private Identifier datasetName;
private List<Pair<List<String>, IndexedTypeExpression>> fieldExprs = new ArrayList<>();
private List<Integer> fieldIndexIndicators = new ArrayList<>();
@@ -63,11 +64,11 @@
this.indexName = indexName;
}
- public Identifier getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
- public void setDataverseName(Identifier dataverseName) {
+ public void setDataverseName(DataverseName dataverseName) {
this.dataverseName = dataverseName;
}
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DatasetDecl.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DatasetDecl.java
index ac019fc..b6a3d66 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DatasetDecl.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DatasetDecl.java
@@ -22,6 +22,7 @@
import org.apache.asterix.common.config.DatasetConfig.DatasetType;
import org.apache.asterix.common.exceptions.CompilationException;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.AbstractStatement;
import org.apache.asterix.lang.common.base.Statement;
import org.apache.asterix.lang.common.expression.RecordConstructor;
@@ -35,10 +36,10 @@
public class DatasetDecl extends AbstractStatement {
protected final Identifier name;
- protected final Identifier dataverse;
- protected final Identifier itemTypeDataverse;
+ protected final DataverseName dataverse;
+ protected final DataverseName itemTypeDataverse;
protected final Identifier itemTypeName;
- protected final Identifier metaItemTypeDataverse;
+ protected final DataverseName metaItemTypeDataverse;
protected final Identifier metaItemTypeName;
protected final Identifier nodegroupName;
protected final DatasetType datasetType;
@@ -47,24 +48,16 @@
private final AdmObjectNode withObjectNode;
protected final boolean ifNotExists;
- public DatasetDecl(Identifier dataverse, Identifier name, Identifier itemTypeDataverse, Identifier itemTypeName,
- Identifier metaItemTypeDataverse, Identifier metaItemTypeName, Identifier nodeGroupName,
- Map<String, String> hints, DatasetType datasetType, IDatasetDetailsDecl idd, RecordConstructor withRecord,
- boolean ifNotExists) throws CompilationException {
+ public DatasetDecl(DataverseName dataverse, Identifier name, DataverseName itemTypeDataverse,
+ Identifier itemTypeName, DataverseName metaItemTypeDataverse, Identifier metaItemTypeName,
+ Identifier nodeGroupName, Map<String, String> hints, DatasetType datasetType, IDatasetDetailsDecl idd,
+ RecordConstructor withRecord, boolean ifNotExists) throws CompilationException {
this.dataverse = dataverse;
this.name = name;
this.itemTypeName = itemTypeName;
- if (itemTypeDataverse.getValue() == null) {
- this.itemTypeDataverse = dataverse;
- } else {
- this.itemTypeDataverse = itemTypeDataverse;
- }
+ this.itemTypeDataverse = itemTypeDataverse == null ? dataverse : itemTypeDataverse;
this.metaItemTypeName = metaItemTypeName;
- if (metaItemTypeDataverse == null || metaItemTypeDataverse.getValue() == null) {
- this.metaItemTypeDataverse = dataverse;
- } else {
- this.metaItemTypeDataverse = metaItemTypeDataverse;
- }
+ this.metaItemTypeDataverse = metaItemTypeDataverse == null ? dataverse : metaItemTypeDataverse;
this.nodegroupName = nodeGroupName;
this.hints = hints;
this.withObjectNode = DatasetDeclParametersUtil.validateAndGetWithObjectNode(withRecord);
@@ -85,38 +78,26 @@
return name;
}
+ public DataverseName getDataverse() {
+ return dataverse;
+ }
+
public Identifier getItemTypeName() {
return itemTypeName;
}
- public Identifier getItemTypeDataverse() {
+ public DataverseName getItemTypeDataverse() {
return itemTypeDataverse;
}
- public String getQualifiedTypeName() {
- if (itemTypeDataverse == dataverse) {
- return itemTypeName.getValue();
- } else {
- return itemTypeDataverse.getValue() + "." + itemTypeName.getValue();
- }
- }
-
public Identifier getMetaItemTypeName() {
return metaItemTypeName;
}
- public Identifier getMetaItemTypeDataverse() {
+ public DataverseName getMetaItemTypeDataverse() {
return metaItemTypeDataverse;
}
- public String getQualifiedMetaTypeName() {
- if (metaItemTypeDataverse == dataverse) {
- return metaItemTypeName.getValue();
- } else {
- return metaItemTypeDataverse.getValue() + "." + metaItemTypeName.getValue();
- }
- }
-
public Identifier getNodegroupName() {
return nodegroupName;
}
@@ -179,10 +160,6 @@
return datasetDetailsDecl;
}
- public Identifier getDataverse() {
- return dataverse;
- }
-
@Override
public byte getCategory() {
return Category.DDL;
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DataverseDecl.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DataverseDecl.java
index 99303ce..829c407 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DataverseDecl.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DataverseDecl.java
@@ -19,20 +19,20 @@
package org.apache.asterix.lang.common.statement;
import org.apache.asterix.common.exceptions.CompilationException;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.AbstractStatement;
import org.apache.asterix.lang.common.base.Statement;
-import org.apache.asterix.lang.common.struct.Identifier;
import org.apache.asterix.lang.common.visitor.base.ILangVisitor;
public class DataverseDecl extends AbstractStatement {
- private Identifier dataverseName;
+ private DataverseName dataverseName;
- public DataverseDecl(Identifier dataverseName) {
+ public DataverseDecl(DataverseName dataverseName) {
this.dataverseName = dataverseName;
}
- public Identifier getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DataverseDropStatement.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DataverseDropStatement.java
index 9e184c2..c20bd32 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DataverseDropStatement.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DataverseDropStatement.java
@@ -19,17 +19,17 @@
package org.apache.asterix.lang.common.statement;
import org.apache.asterix.common.exceptions.CompilationException;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.AbstractStatement;
import org.apache.asterix.lang.common.base.Statement;
-import org.apache.asterix.lang.common.struct.Identifier;
import org.apache.asterix.lang.common.visitor.base.ILangVisitor;
public class DataverseDropStatement extends AbstractStatement {
- private Identifier dataverseName;
+ private DataverseName dataverseName;
private boolean ifExists;
- public DataverseDropStatement(Identifier dataverseName, boolean ifExists) {
+ public DataverseDropStatement(DataverseName dataverseName, boolean ifExists) {
this.dataverseName = dataverseName;
this.ifExists = ifExists;
}
@@ -39,7 +39,7 @@
return Statement.Kind.DATAVERSE_DROP;
}
- public Identifier getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DeleteStatement.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DeleteStatement.java
index f05efbc..67180bc 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DeleteStatement.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DeleteStatement.java
@@ -21,6 +21,7 @@
import java.util.Objects;
import org.apache.asterix.common.exceptions.CompilationException;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.AbstractStatement;
import org.apache.asterix.lang.common.base.Expression;
import org.apache.asterix.lang.common.base.Statement;
@@ -31,13 +32,13 @@
public class DeleteStatement extends AbstractStatement {
private VariableExpr vars;
- private Identifier dataverseName;
+ private DataverseName dataverseName;
private Identifier datasetName;
private Expression condition;
private int varCounter;
private Query rewrittenQuery;
- public DeleteStatement(VariableExpr vars, Identifier dataverseName, Identifier datasetName, Expression condition,
+ public DeleteStatement(VariableExpr vars, DataverseName dataverseName, Identifier datasetName, Expression condition,
int varCounter) {
this.vars = vars;
this.dataverseName = dataverseName;
@@ -55,12 +56,12 @@
return vars;
}
- public Identifier getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
- public Identifier getDatasetName() {
- return datasetName;
+ public String getDatasetName() {
+ return datasetName.getValue();
}
public Expression getCondition() {
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DisconnectFeedStatement.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DisconnectFeedStatement.java
index cb33452..20306c5 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DisconnectFeedStatement.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DisconnectFeedStatement.java
@@ -19,6 +19,7 @@
package org.apache.asterix.lang.common.statement;
import org.apache.asterix.common.exceptions.CompilationException;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.AbstractStatement;
import org.apache.asterix.lang.common.base.Statement;
import org.apache.asterix.lang.common.struct.Identifier;
@@ -27,29 +28,28 @@
public class DisconnectFeedStatement extends AbstractStatement {
- private final Identifier dataverseName;
+ private final DataverseName dataverseName;
private final Identifier feedName;
private final Identifier datasetName;
- public DisconnectFeedStatement(Identifier dataverseName, Identifier feedName, Identifier datasetName) {
+ public DisconnectFeedStatement(DataverseName dataverseName, Identifier feedName, Identifier datasetName) {
this.feedName = feedName;
this.datasetName = datasetName;
this.dataverseName = dataverseName;
}
- public DisconnectFeedStatement(Pair<Identifier, Identifier> feedNameComponent,
- Pair<Identifier, Identifier> datasetNameComponent) {
+ public DisconnectFeedStatement(Pair<DataverseName, Identifier> feedNameComponent,
+ Pair<DataverseName, Identifier> datasetNameComponent) {
if (feedNameComponent.first != null && datasetNameComponent.first != null
- && !feedNameComponent.first.getValue().equals(datasetNameComponent.first.getValue())) {
+ && !feedNameComponent.first.equals(datasetNameComponent.first)) {
throw new IllegalArgumentException("Dataverse for source feed and target dataset do not match");
}
- this.dataverseName = feedNameComponent.first != null ? feedNameComponent.first
- : datasetNameComponent.first != null ? datasetNameComponent.first : null;
+ this.dataverseName = feedNameComponent.first != null ? feedNameComponent.first : datasetNameComponent.first;
this.datasetName = datasetNameComponent.second;
this.feedName = feedNameComponent.second;
}
- public Identifier getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DropDatasetStatement.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DropDatasetStatement.java
index 1434620..557a647 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DropDatasetStatement.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DropDatasetStatement.java
@@ -19,6 +19,7 @@
package org.apache.asterix.lang.common.statement;
import org.apache.asterix.common.exceptions.CompilationException;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.AbstractStatement;
import org.apache.asterix.lang.common.base.Statement;
import org.apache.asterix.lang.common.struct.Identifier;
@@ -26,11 +27,11 @@
public class DropDatasetStatement extends AbstractStatement {
- private final Identifier dataverseName;
+ private final DataverseName dataverseName;
private final Identifier datasetName;
private boolean ifExists;
- public DropDatasetStatement(Identifier dataverseName, Identifier datasetName, boolean ifExists) {
+ public DropDatasetStatement(DataverseName dataverseName, Identifier datasetName, boolean ifExists) {
this.dataverseName = dataverseName;
this.datasetName = datasetName;
this.ifExists = ifExists;
@@ -41,7 +42,7 @@
return Statement.Kind.DATASET_DROP;
}
- public Identifier getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/FeedDropStatement.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/FeedDropStatement.java
index ef6c096..c91d1a1 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/FeedDropStatement.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/FeedDropStatement.java
@@ -19,6 +19,7 @@
package org.apache.asterix.lang.common.statement;
import org.apache.asterix.common.exceptions.CompilationException;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.AbstractStatement;
import org.apache.asterix.lang.common.base.Statement;
import org.apache.asterix.lang.common.struct.Identifier;
@@ -26,11 +27,11 @@
public class FeedDropStatement extends AbstractStatement {
- private final Identifier dataverseName;
+ private final DataverseName dataverseName;
private final Identifier feedName;
private boolean ifExists;
- public FeedDropStatement(Identifier dataverseName, Identifier feedName, boolean ifExists) {
+ public FeedDropStatement(DataverseName dataverseName, Identifier feedName, boolean ifExists) {
this.dataverseName = dataverseName;
this.feedName = feedName;
this.ifExists = ifExists;
@@ -41,7 +42,7 @@
return Statement.Kind.DROP_FEED;
}
- public Identifier getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/FeedPolicyDropStatement.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/FeedPolicyDropStatement.java
index a95254b..21a6a39 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/FeedPolicyDropStatement.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/FeedPolicyDropStatement.java
@@ -19,6 +19,7 @@
package org.apache.asterix.lang.common.statement;
import org.apache.asterix.common.exceptions.CompilationException;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.AbstractStatement;
import org.apache.asterix.lang.common.base.Statement;
import org.apache.asterix.lang.common.struct.Identifier;
@@ -26,11 +27,11 @@
public class FeedPolicyDropStatement extends AbstractStatement {
- private final Identifier dataverseName;
+ private final DataverseName dataverseName;
private final Identifier policyName;
private boolean ifExists;
- public FeedPolicyDropStatement(Identifier dataverseName, Identifier policyName, boolean ifExists) {
+ public FeedPolicyDropStatement(DataverseName dataverseName, Identifier policyName, boolean ifExists) {
this.dataverseName = dataverseName;
this.policyName = policyName;
this.ifExists = ifExists;
@@ -41,7 +42,7 @@
return Statement.Kind.DROP_FEED_POLICY;
}
- public Identifier getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/IndexDropStatement.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/IndexDropStatement.java
index 39e1d31..c3583698 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/IndexDropStatement.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/IndexDropStatement.java
@@ -19,6 +19,7 @@
package org.apache.asterix.lang.common.statement;
import org.apache.asterix.common.exceptions.CompilationException;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.AbstractStatement;
import org.apache.asterix.lang.common.base.Statement;
import org.apache.asterix.lang.common.struct.Identifier;
@@ -26,12 +27,12 @@
public class IndexDropStatement extends AbstractStatement {
- private Identifier dataverseName;
+ private DataverseName dataverseName;
private Identifier datasetName;
private Identifier indexName;
private boolean ifExists;
- public IndexDropStatement(Identifier dataverseName, Identifier datasetName, Identifier indexName,
+ public IndexDropStatement(DataverseName dataverseName, Identifier datasetName, Identifier indexName,
boolean ifExists) {
this.dataverseName = dataverseName;
this.datasetName = datasetName;
@@ -44,7 +45,7 @@
return Statement.Kind.INDEX_DROP;
}
- public Identifier getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/InsertStatement.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/InsertStatement.java
index efa58fc..5290bfd 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/InsertStatement.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/InsertStatement.java
@@ -23,6 +23,7 @@
import java.util.Objects;
import org.apache.asterix.common.exceptions.CompilationException;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.AbstractStatement;
import org.apache.asterix.lang.common.base.Expression;
import org.apache.asterix.lang.common.base.IReturningStatement;
@@ -33,14 +34,14 @@
public class InsertStatement extends AbstractStatement implements IReturningStatement {
- private final Identifier dataverseName;
+ private final DataverseName dataverseName;
private final Identifier datasetName;
private final Query query;
private final VariableExpr var;
private Expression returnExpression;
private int varCounter;
- public InsertStatement(Identifier dataverseName, Identifier datasetName, Query query, int varCounter,
+ public InsertStatement(DataverseName dataverseName, Identifier datasetName, Query query, int varCounter,
VariableExpr var, Expression returnExpression) {
this.dataverseName = dataverseName;
this.datasetName = datasetName;
@@ -55,12 +56,12 @@
return Statement.Kind.INSERT;
}
- public Identifier getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
- public Identifier getDatasetName() {
- return datasetName;
+ public String getDatasetName() {
+ return datasetName.getValue();
}
public Query getQuery() {
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/LoadStatement.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/LoadStatement.java
index 5366bc0..42801f0 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/LoadStatement.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/LoadStatement.java
@@ -21,6 +21,7 @@
import java.util.Map;
import org.apache.asterix.common.exceptions.CompilationException;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.AbstractStatement;
import org.apache.asterix.lang.common.base.Statement;
import org.apache.asterix.lang.common.struct.Identifier;
@@ -29,12 +30,12 @@
public class LoadStatement extends AbstractStatement {
private Identifier datasetName;
- private Identifier dataverseName;
+ private DataverseName dataverseName;
private String adapter;
private Map<String, String> properties;
private boolean dataIsLocallySorted;
- public LoadStatement(Identifier dataverseName, Identifier datasetName, String adapter,
+ public LoadStatement(DataverseName dataverseName, Identifier datasetName, String adapter,
Map<String, String> propertiees, boolean dataIsLocallySorted) {
this.dataverseName = dataverseName;
this.datasetName = datasetName;
@@ -59,11 +60,11 @@
this.properties = properties;
}
- public Identifier getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
- public void setDataverseName(Identifier dataverseName) {
+ public void setDataverseName(DataverseName dataverseName) {
this.dataverseName = dataverseName;
}
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/RefreshExternalDatasetStatement.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/RefreshExternalDatasetStatement.java
index 4ca9d97..30a02fb 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/RefreshExternalDatasetStatement.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/RefreshExternalDatasetStatement.java
@@ -19,6 +19,7 @@
package org.apache.asterix.lang.common.statement;
import org.apache.asterix.common.exceptions.CompilationException;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.AbstractStatement;
import org.apache.asterix.lang.common.base.Statement;
import org.apache.asterix.lang.common.struct.Identifier;
@@ -26,7 +27,7 @@
public class RefreshExternalDatasetStatement extends AbstractStatement {
- private Identifier dataverseName;
+ private DataverseName dataverseName;
private Identifier datasetName;
public Identifier getDatasetName() {
@@ -37,11 +38,11 @@
this.datasetName = datasetName;
}
- public Identifier getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
- public void setDataverseName(Identifier dataverseName) {
+ public void setDataverseName(DataverseName dataverseName) {
this.dataverseName = dataverseName;
}
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/StartFeedStatement.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/StartFeedStatement.java
index 3ea791d..7dc652e 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/StartFeedStatement.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/StartFeedStatement.java
@@ -20,6 +20,7 @@
package org.apache.asterix.lang.common.statement;
import org.apache.asterix.common.exceptions.CompilationException;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.AbstractStatement;
import org.apache.asterix.lang.common.struct.Identifier;
import org.apache.asterix.lang.common.visitor.base.ILangVisitor;
@@ -28,10 +29,10 @@
public class StartFeedStatement extends AbstractStatement {
public static final String WAIT_FOR_COMPLETION = "wait-for-completion-feed";
- private Identifier dataverseName;
+ private DataverseName dataverseName;
private Identifier feedName;
- public StartFeedStatement(Pair<Identifier, Identifier> feedNameComp) {
+ public StartFeedStatement(Pair<DataverseName, Identifier> feedNameComp) {
dataverseName = feedNameComp.first;
feedName = feedNameComp.second;
}
@@ -51,7 +52,7 @@
return Category.UPDATE;
}
- public Identifier getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/StopFeedStatement.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/StopFeedStatement.java
index 99e5069..4cfcc0a 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/StopFeedStatement.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/StopFeedStatement.java
@@ -19,6 +19,7 @@
package org.apache.asterix.lang.common.statement;
import org.apache.asterix.common.exceptions.CompilationException;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.AbstractStatement;
import org.apache.asterix.lang.common.struct.Identifier;
import org.apache.asterix.lang.common.visitor.base.ILangVisitor;
@@ -26,10 +27,10 @@
public class StopFeedStatement extends AbstractStatement {
- private final Identifier dataverseName;
+ private final DataverseName dataverseName;
private final Identifier feedName;
- public StopFeedStatement(Pair<Identifier, Identifier> feedNameComp) {
+ public StopFeedStatement(Pair<DataverseName, Identifier> feedNameComp) {
this.dataverseName = feedNameComp.first;
this.feedName = feedNameComp.second;
}
@@ -49,7 +50,7 @@
return visitor.visit(this, arg);
}
- public Identifier getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/TypeDecl.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/TypeDecl.java
index 5430945..a840fea 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/TypeDecl.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/TypeDecl.java
@@ -20,6 +20,7 @@
import org.apache.asterix.common.annotations.TypeDataGen;
import org.apache.asterix.common.exceptions.CompilationException;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.AbstractStatement;
import org.apache.asterix.lang.common.base.Statement;
import org.apache.asterix.lang.common.expression.TypeExpression;
@@ -28,13 +29,13 @@
public class TypeDecl extends AbstractStatement {
- private final Identifier dataverseName;
+ private final DataverseName dataverseName;
private final Identifier ident;
private final TypeExpression typeDef;
private final TypeDataGen datagenAnnotation;
private final boolean ifNotExists;
- public TypeDecl(Identifier dataverseName, Identifier ident, TypeExpression typeDef, TypeDataGen datagen,
+ public TypeDecl(DataverseName dataverseName, Identifier ident, TypeExpression typeDef, TypeDataGen datagen,
boolean ifNotExists) {
this.dataverseName = dataverseName;
this.ident = ident;
@@ -43,7 +44,7 @@
this.ifNotExists = ifNotExists;
}
- public TypeDecl(Identifier dataverse, Identifier ident, TypeExpression typeDef) {
+ public TypeDecl(DataverseName dataverse, Identifier ident, TypeExpression typeDef) {
this(dataverse, ident, typeDef, null, false);
}
@@ -51,7 +52,7 @@
return ident;
}
- public Identifier getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/TypeDropStatement.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/TypeDropStatement.java
index 9f59f98..73aef54 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/TypeDropStatement.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/TypeDropStatement.java
@@ -19,6 +19,7 @@
package org.apache.asterix.lang.common.statement;
import org.apache.asterix.common.exceptions.CompilationException;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.AbstractStatement;
import org.apache.asterix.lang.common.base.Statement;
import org.apache.asterix.lang.common.struct.Identifier;
@@ -26,11 +27,11 @@
public class TypeDropStatement extends AbstractStatement {
- private final Identifier dataverseName;
+ private final DataverseName dataverseName;
private Identifier typeName;
private boolean ifExists;
- public TypeDropStatement(Identifier dataverseName, Identifier typeName, boolean ifExists) {
+ public TypeDropStatement(DataverseName dataverseName, Identifier typeName, boolean ifExists) {
this.dataverseName = dataverseName;
this.typeName = typeName;
this.ifExists = ifExists;
@@ -41,7 +42,7 @@
return Statement.Kind.TYPE_DROP;
}
- public Identifier getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/UpsertStatement.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/UpsertStatement.java
index 178a8f9..f4f48f7 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/UpsertStatement.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/UpsertStatement.java
@@ -18,6 +18,7 @@
*/
package org.apache.asterix.lang.common.statement;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.Expression;
import org.apache.asterix.lang.common.base.Statement;
import org.apache.asterix.lang.common.expression.VariableExpr;
@@ -25,7 +26,7 @@
public class UpsertStatement extends InsertStatement {
- public UpsertStatement(Identifier dataverseName, Identifier datasetName, Query query, int varCounter,
+ public UpsertStatement(DataverseName dataverseName, Identifier datasetName, Query query, int varCounter,
VariableExpr var, Expression returnExpression) {
super(dataverseName, datasetName, query, varCounter, var, returnExpression);
}
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/CommonFunctionMapUtil.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/CommonFunctionMapUtil.java
index 7701502..a7ec834 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/CommonFunctionMapUtil.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/CommonFunctionMapUtil.java
@@ -165,10 +165,10 @@
String lowerCaseName = name.toLowerCase();
String mappedName = getFunctionMapping(lowerCaseName);
if (mappedName != null) {
- return new FunctionSignature(fs.getNamespace(), mappedName, fs.getArity());
+ return new FunctionSignature(fs.getDataverseName(), mappedName, fs.getArity());
}
String understoreName = lowerCaseName.replace('_', '-');
- FunctionSignature newFs = new FunctionSignature(fs.getNamespace(), understoreName, fs.getArity());
+ FunctionSignature newFs = new FunctionSignature(fs.getDataverseName(), understoreName, fs.getArity());
return BuiltinFunctions.isBuiltinCompilerFunction(newFs, true) ? newFs : fs;
}
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/ExpressionUtils.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/ExpressionUtils.java
index de5e931..dd21152 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/ExpressionUtils.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/ExpressionUtils.java
@@ -103,4 +103,14 @@
public static <T> Collection<T> emptyIfNull(Collection<T> coll) {
return coll == null ? Collections.emptyList() : coll;
}
+
+ public static String getStringLiteral(Expression arg) {
+ if (arg.getKind() == Expression.Kind.LITERAL_EXPRESSION) {
+ Literal item = ((LiteralExpr) arg).getValue();
+ if (item.getLiteralType() == Literal.Type.STRING) {
+ return item.getStringValue();
+ }
+ }
+ return null;
+ }
}
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/FunctionUtil.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/FunctionUtil.java
index 24384f2..3bd2504 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/FunctionUtil.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/FunctionUtil.java
@@ -20,27 +20,30 @@
package org.apache.asterix.lang.common.util;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.List;
+import java.util.Objects;
import java.util.Set;
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.functions.FunctionConstants;
import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.Expression;
import org.apache.asterix.lang.common.base.IQueryRewriter;
import org.apache.asterix.lang.common.expression.CallExpr;
-import org.apache.asterix.lang.common.expression.LiteralExpr;
import org.apache.asterix.lang.common.statement.FunctionDecl;
import org.apache.asterix.metadata.MetadataManager;
import org.apache.asterix.metadata.MetadataTransactionContext;
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Function;
-import org.apache.asterix.metadata.utils.DatasetUtil;
import org.apache.asterix.om.functions.BuiltinFunctions;
+import org.apache.asterix.om.utils.ConstantExpressionUtil;
+import org.apache.commons.lang3.mutable.Mutable;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.common.utils.Pair;
+import org.apache.hyracks.algebricks.common.utils.Triple;
+import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
import org.apache.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
@@ -50,12 +53,17 @@
public static final String IMPORT_PRIVATE_FUNCTIONS = "import-private-functions";
+ private static final DataverseName FN_DATASET_DATAVERSE_NAME =
+ FunctionSignature.getDataverseName(BuiltinFunctions.DATASET);
+
+ private static final String FN_DATASET_NAME = BuiltinFunctions.DATASET.getName();
+
public static IFunctionInfo getFunctionInfo(FunctionIdentifier fi) {
return BuiltinFunctions.getAsterixFunctionInfo(fi);
}
public static IFunctionInfo getFunctionInfo(FunctionSignature fs) {
- return getFunctionInfo(new FunctionIdentifier(fs.getNamespace(), fs.getName(), fs.getArity()));
+ return getFunctionInfo(fs.createFunctionIdentifier());
}
public static IFunctionInfo getBuiltinFunctionInfo(String functionName, int arity) {
@@ -112,21 +120,21 @@
return functionDecls;
}
String value = (String) metadataProvider.getConfig().get(FunctionUtil.IMPORT_PRIVATE_FUNCTIONS);
- boolean includePrivateFunctions = (value != null) ? Boolean.valueOf(value.toLowerCase()) : false;
+ boolean includePrivateFunctions = (value != null) && Boolean.parseBoolean(value.toLowerCase());
Set<CallExpr> functionCalls = functionCollector.getFunctionCalls(expression);
for (CallExpr functionCall : functionCalls) {
FunctionSignature signature = functionCall.getFunctionSignature();
if (declaredFunctions != null && declaredFunctions.contains(signature)) {
continue;
}
- if (signature.getNamespace() == null) {
- signature.setNamespace(metadataProvider.getDefaultDataverseName());
+ if (signature.getDataverseName() == null) {
+ signature.setDataverseName(metadataProvider.getDefaultDataverseName());
}
- String namespace = signature.getNamespace();
+ DataverseName namespace = signature.getDataverseName();
// Checks the existence of the referred dataverse.
try {
- if (!namespace.equals(FunctionConstants.ASTERIX_NS)
- && !namespace.equals(AlgebricksBuiltinFunctions.ALGEBRICKS_NS)
+ if (!namespace.equals(FunctionConstants.ASTERIX_DV)
+ && !namespace.equals(FunctionConstants.ALGEBRICKS_DV)
&& metadataProvider.findDataverse(namespace) == null) {
throw new CompilationException(ErrorCode.COMPILATION_ERROR, functionCall.getSourceLocation(),
"In function call \"" + namespace + "." + signature.getName() + "(...)\", the dataverse \""
@@ -177,30 +185,25 @@
return functionDecls;
}
- public static List<List<List<String>>> getFunctionDependencies(IQueryRewriter rewriter, Expression expression,
- MetadataProvider metadataProvider) throws CompilationException {
+ public static List<List<Triple<DataverseName, String, String>>> getFunctionDependencies(IQueryRewriter rewriter,
+ Expression expression, MetadataProvider metadataProvider) throws CompilationException {
Set<CallExpr> functionCalls = rewriter.getFunctionCalls(expression);
//Get the List of used functions and used datasets
- List<List<String>> datasourceDependencies = new ArrayList<>();
- List<List<String>> functionDependencies = new ArrayList<>();
+ List<Triple<DataverseName, String, String>> datasourceDependencies = new ArrayList<>();
+ List<Triple<DataverseName, String, String>> functionDependencies = new ArrayList<>();
for (CallExpr functionCall : functionCalls) {
FunctionSignature signature = functionCall.getFunctionSignature();
- FunctionIdentifier fid =
- new FunctionIdentifier(signature.getNamespace(), signature.getName(), signature.getArity());
- if (fid.equals(BuiltinFunctions.DATASET)) {
- Pair<String, String> path = DatasetUtil.getDatasetInfo(metadataProvider,
- ((LiteralExpr) functionCall.getExprList().get(0)).getValue().getStringValue());
- datasourceDependencies.add(Arrays.asList(path.first, path.second));
- }
-
- else if (BuiltinFunctions.isBuiltinCompilerFunction(signature, false)) {
- continue;
- } else {
- functionDependencies.add(Arrays.asList(signature.getNamespace(), signature.getName(),
+ if (isBuiltinDatasetFunction(signature)) {
+ Pair<DataverseName, String> datasetReference = parseDatasetFunctionArguments(functionCall.getExprList(),
+ metadataProvider.getDefaultDataverseName(), functionCall.getSourceLocation(),
+ ExpressionUtils::getStringLiteral);
+ datasourceDependencies.add(new Triple<>(datasetReference.first, datasetReference.second, null));
+ } else if (!BuiltinFunctions.isBuiltinCompilerFunction(signature, false)) {
+ functionDependencies.add(new Triple<>(signature.getDataverseName(), signature.getName(),
Integer.toString(signature.getArity())));
}
}
- List<List<List<String>>> dependencies = new ArrayList<>();
+ List<List<Triple<DataverseName, String, String>>> dependencies = new ArrayList<>(2);
dependencies.add(datasourceDependencies);
dependencies.add(functionDependencies);
return dependencies;
@@ -208,10 +211,67 @@
private static Function lookupUserDefinedFunctionDecl(MetadataTransactionContext mdTxnCtx,
FunctionSignature signature) throws AlgebricksException {
- if (signature.getNamespace() == null) {
+ if (signature.getDataverseName() == null) {
return null;
}
return MetadataManager.INSTANCE.getFunction(mdTxnCtx, signature);
}
+ public static boolean isBuiltinDatasetFunction(FunctionSignature fs) {
+ return Objects.equals(FN_DATASET_DATAVERSE_NAME, fs.getDataverseName())
+ && Objects.equals(FN_DATASET_NAME, fs.getName());
+ }
+
+ public static Pair<DataverseName, String> parseDatasetFunctionArguments(
+ List<Mutable<ILogicalExpression>> datasetFnArgs, DataverseName defaultDataverseName,
+ SourceLocation sourceLoc) throws CompilationException {
+ return parseDatasetFunctionArguments(datasetFnArgs, defaultDataverseName, sourceLoc,
+ FunctionUtil::getStringConstant);
+ }
+
+ public static <T> Pair<DataverseName, String> parseDatasetFunctionArguments(List<T> datasetFnArgs,
+ DataverseName defaultDataverseName, SourceLocation sourceLoc,
+ java.util.function.Function<T, String> argExtractFunction) throws CompilationException {
+ DataverseName dataverseName;
+ String datasetName;
+ switch (datasetFnArgs.size()) {
+ case 1: // AQL BACK-COMPAT case
+ String datasetArgBackCompat = argExtractFunction.apply(datasetFnArgs.get(0));
+ if (datasetArgBackCompat == null) {
+ throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
+ "Invalid argument to dataset()");
+ }
+ int pos = datasetArgBackCompat.indexOf('.');
+ if (pos > 0 && pos < datasetArgBackCompat.length() - 1) {
+ dataverseName = DataverseName.createSinglePartName(datasetArgBackCompat.substring(0, pos)); // AQL BACK-COMPAT
+ datasetName = datasetArgBackCompat.substring(pos + 1);
+ } else {
+ dataverseName = defaultDataverseName;
+ datasetName = datasetArgBackCompat;
+ }
+ break;
+ case 2:
+ String dataverseNameArg = argExtractFunction.apply(datasetFnArgs.get(0));
+ if (dataverseNameArg == null) {
+ throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
+ "Invalid argument to dataset()");
+ }
+ dataverseName = DataverseName.createFromCanonicalForm(dataverseNameArg);
+
+ datasetName = argExtractFunction.apply(datasetFnArgs.get(1));
+ if (datasetName == null) {
+ throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
+ "Invalid argument to dataset()");
+ }
+ break;
+ default:
+ throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
+ "Invalid number of arguments to dataset()");
+ }
+ return new Pair<>(dataverseName, datasetName);
+ }
+
+ private static String getStringConstant(Mutable<ILogicalExpression> arg) {
+ return ConstantExpressionUtil.getStringConstant(arg.getValue());
+ }
}
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/visitor/AbstractInlineUdfsVisitor.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/visitor/AbstractInlineUdfsVisitor.java
index 1aab7f7..f934d60 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/visitor/AbstractInlineUdfsVisitor.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/visitor/AbstractInlineUdfsVisitor.java
@@ -27,6 +27,7 @@
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.Expression;
import org.apache.asterix.lang.common.base.Expression.Kind;
import org.apache.asterix.lang.common.base.ILangExpression;
@@ -378,17 +379,17 @@
wrappedQuery.setBody(fnDecl.getFuncBody());
wrappedQuery.setTopLevel(false);
- String fnNamespace = fnDecl.getSignature().getNamespace();
+ DataverseName fnDataverseName = fnDecl.getSignature().getDataverseName();
Dataverse defaultDataverse = metadataProvider.getDefaultDataverse();
Dataverse fnDataverse;
- if (fnNamespace == null || fnNamespace.equals(defaultDataverse.getDataverseName())) {
+ if (fnDataverseName == null || fnDataverseName.equals(defaultDataverse.getDataverseName())) {
fnDataverse = defaultDataverse;
} else {
try {
- fnDataverse = metadataProvider.findDataverse(fnNamespace);
+ fnDataverse = metadataProvider.findDataverse(fnDataverseName);
} catch (AlgebricksException e) {
- throw new CompilationException(ErrorCode.UNKNOWN_DATAVERSE, e, sourceLoc, fnNamespace);
+ throw new CompilationException(ErrorCode.UNKNOWN_DATAVERSE, e, sourceLoc, fnDataverseName);
}
}
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/visitor/FormatPrintVisitor.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/visitor/FormatPrintVisitor.java
index 6b734dd..d241a71 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/visitor/FormatPrintVisitor.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/visitor/FormatPrintVisitor.java
@@ -32,6 +32,7 @@
import org.apache.asterix.common.config.DatasetConfig.IndexType;
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.Expression;
import org.apache.asterix.lang.common.base.Literal;
import org.apache.asterix.lang.common.clause.GroupbyClause;
@@ -99,6 +100,7 @@
import org.apache.asterix.lang.common.struct.QuantifiedPair;
import org.apache.asterix.lang.common.struct.UnaryExprType;
import org.apache.asterix.lang.common.visitor.base.ILangVisitor;
+import org.apache.asterix.metadata.utils.MetadataConstants;
import org.apache.hyracks.algebricks.common.utils.Pair;
import org.apache.hyracks.algebricks.core.algebra.expressions.IExpressionAnnotation;
@@ -115,6 +117,7 @@
protected String dataverseSymbol = " dataverse ";
protected String datasetSymbol = " dataset ";
protected String assignSymbol = ":=";
+ private final List<String> dataverseNameParts = new ArrayList<>();
public FormatPrintVisitor(PrintWriter out) {
this.out = out;
@@ -214,7 +217,7 @@
@Override
public Void visit(CallExpr callExpr, Integer step) throws CompilationException {
printHints(callExpr.getHints(), step);
- out.print(generateFullName(callExpr.getFunctionSignature().getNamespace(),
+ out.print(generateFullName(callExpr.getFunctionSignature().getDataverseName(),
callExpr.getFunctionSignature().getName()) + "(");
printDelimitedExpressions(callExpr.getExprList(), COMMA, step);
out.print(")");
@@ -398,8 +401,8 @@
@Override
public Void visit(TypeReferenceExpression t, Integer arg) throws CompilationException {
- if (t.getIdent().first != null && t.getIdent().first.getValue() != null) {
- out.print(normalize(t.getIdent().first.getValue()));
+ if (t.getIdent().first != null && t.getIdent().first != null) {
+ out.print(generateDataverseName(t.getIdent().first));
out.print('.');
}
out.print(normalize(t.getIdent().second.getValue()));
@@ -457,8 +460,8 @@
public Void visit(DatasetDecl dd, Integer step) throws CompilationException {
if (dd.getDatasetType() == DatasetType.INTERNAL) {
out.print(skip(step) + "create " + datasetSymbol + generateFullName(dd.getDataverse(), dd.getName())
- + generateIfNotExists(dd.getIfNotExists()) + "(" + dd.getQualifiedTypeName() + ")"
- + " primary key ");
+ + generateIfNotExists(dd.getIfNotExists()) + "("
+ + generateFullName(dd.getItemTypeDataverse(), dd.getItemTypeName()) + ")" + " primary key ");
printDelimitedKeys(((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getPartitioningExprs(), ",");
if (((InternalDetailsDecl) dd.getDatasetDetailsDecl()).isAutogenerated()) {
out.print(" autogenerated ");
@@ -466,7 +469,8 @@
} else if (dd.getDatasetType() == DatasetType.EXTERNAL) {
out.print(
skip(step) + "create external " + datasetSymbol + generateFullName(dd.getDataverse(), dd.getName())
- + "(" + dd.getQualifiedTypeName() + ")" + generateIfNotExists(dd.getIfNotExists()));
+ + "(" + generateFullName(dd.getItemTypeDataverse(), dd.getItemTypeName()) + ")"
+ + generateIfNotExists(dd.getIfNotExists()));
ExternalDetailsDecl externalDetails = (ExternalDetailsDecl) dd.getDatasetDetailsDecl();
out.print(" using " + revertStringToQuoted(externalDetails.getAdapter()));
printConfiguration(externalDetails.getProperties());
@@ -498,7 +502,7 @@
@Override
public Void visit(DataverseDecl dv, Integer step) throws CompilationException {
- out.println(skip(step) + "use " + dataverseSymbol + normalize(dv.getDataverseName().getValue()) + ";\n\n");
+ out.println(skip(step) + "use " + dataverseSymbol + generateDataverseName(dv.getDataverseName()) + ";\n\n");
return null;
}
@@ -676,7 +680,7 @@
@Override
public Void visit(CreateDataverseStatement del, Integer step) throws CompilationException {
out.print(CREATE + dataverseSymbol);
- out.print(normalize(del.getDataverseName().getValue()));
+ out.print(generateDataverseName(del.getDataverseName()));
out.print(generateIfNotExists(del.getIfNotExists()));
String format = del.getFormat();
if (format != null && !format.equals(DEFAULT_DATAVERSE_FORMAT)) {
@@ -708,7 +712,7 @@
@Override
public Void visit(DataverseDropStatement del, Integer step) throws CompilationException {
out.print(skip(step) + "drop " + dataverseSymbol);
- out.print(normalize(del.getDataverseName().getValue()));
+ out.print(generateDataverseName(del.getDataverseName()));
out.println(generateIfExists(del.getIfExists()) + SEMICOLON);
return null;
}
@@ -809,8 +813,8 @@
public Void visit(CreateFunctionStatement cfs, Integer step) throws CompilationException {
out.print(skip(step) + CREATE + " function ");
out.print(generateIfNotExists(cfs.getIfNotExists()));
- out.print(
- this.generateFullName(cfs.getFunctionSignature().getNamespace(), cfs.getFunctionSignature().getName()));
+ out.print(this.generateFullName(cfs.getFunctionSignature().getDataverseName(),
+ cfs.getFunctionSignature().getName()));
out.print("(");
printDelimitedStrings(cfs.getParamList(), COMMA);
out.println(") {");
@@ -984,14 +988,28 @@
return str;
}
- protected String generateFullName(String namespace, String identifier) {
- String dataversePrefix = namespace != null && !namespace.equals("Metadata") ? normalize(namespace) + "." : "";
+ protected String generateDataverseName(DataverseName dataverseName) {
+ StringBuilder sb = new StringBuilder();
+ dataverseNameParts.clear();
+ dataverseName.getParts(dataverseNameParts);
+ for (int i = 0, ln = dataverseNameParts.size(); i < ln; i++) {
+ if (i > 0) {
+ sb.append(DataverseName.SEPARATOR_CHAR);
+ }
+ sb.append(normalize(dataverseNameParts.get(i)));
+ }
+ return sb.toString();
+ }
+
+ protected String generateFullName(DataverseName dataverseName, String identifier) {
+ String dataversePrefix =
+ dataverseName != null && !dataverseName.equals(MetadataConstants.METADATA_DATAVERSE_NAME)
+ ? generateDataverseName(dataverseName) + "." : "";
return dataversePrefix + normalize(identifier);
}
- protected String generateFullName(Identifier dv, Identifier ds) {
- String dataverse = dv != null ? dv.getValue() : null;
- return generateFullName(dataverse, ds.getValue());
+ protected String generateFullName(DataverseName dataverseName, Identifier ds) {
+ return generateFullName(dataverseName, ds.getValue());
}
protected String generateIfNotExists(boolean ifNotExits) {
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/visitor/QueryPrintVisitor.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/visitor/QueryPrintVisitor.java
index ff55880..6afe4e0 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/visitor/QueryPrintVisitor.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/visitor/QueryPrintVisitor.java
@@ -25,6 +25,7 @@
import org.apache.asterix.common.config.DatasetConfig.DatasetType;
import org.apache.asterix.common.exceptions.CompilationException;
+import org.apache.asterix.common.functions.FunctionSignature;
import org.apache.asterix.lang.common.base.Expression;
import org.apache.asterix.lang.common.base.Literal;
import org.apache.asterix.lang.common.clause.GroupbyClause;
@@ -64,6 +65,7 @@
import org.apache.asterix.lang.common.struct.OperatorType;
import org.apache.asterix.lang.common.struct.QuantifiedPair;
import org.apache.asterix.lang.common.visitor.base.AbstractQueryExpressionVisitor;
+import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
public abstract class QueryPrintVisitor extends AbstractQueryExpressionVisitor<Void, Integer> {
protected final PrintWriter out;
@@ -142,14 +144,30 @@
@Override
public Void visit(CallExpr pf, Integer step) throws CompilationException {
- out.println(skip(step) + "FunctionCall " + pf.getFunctionSignature().toString() + "[");
- for (Expression expr : pf.getExprList()) {
+ return printFunctionCall(pf.getFunctionSignature(), pf.getFunctionSignature().getArity(), pf.getExprList(),
+ step);
+ }
+
+ protected Void printFunctionCall(FunctionSignature fs, int arity, List<Expression> argList, Integer step)
+ throws CompilationException {
+ out.print(skip(step) + "FunctionCall ");
+ printFunctionSignature(out, fs, arity);
+ out.println("[");
+ for (Expression expr : argList) {
expr.accept(this, step + 1);
}
out.println(skip(step) + "]");
return null;
}
+ private static void printFunctionSignature(PrintWriter out, FunctionSignature fs, int arity) {
+ out.print(fs.toString(false));
+ if (arity != FunctionIdentifier.VARARGS) {
+ out.print("@");
+ out.print(arity);
+ }
+ }
+
@Override
public Void visit(OperatorExpr ifbo, Integer step) throws CompilationException {
List<Expression> exprList = ifbo.getExprList();
@@ -329,8 +347,8 @@
@Override
public Void visit(TypeReferenceExpression t, Integer arg) throws CompilationException {
- if (t.getIdent().first != null && t.getIdent().first.getValue() != null) {
- out.print(t.getIdent().first.getValue());
+ if (t.getIdent().first != null && t.getIdent().first != null) {
+ out.print(t.getIdent().first);
out.print('.');
}
out.print(t.getIdent().second.getValue());
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/SqlppStatementRewriter.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/SqlppStatementRewriter.java
index 7908636..4a08874 100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/SqlppStatementRewriter.java
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/SqlppStatementRewriter.java
@@ -23,17 +23,18 @@
import org.apache.asterix.lang.common.base.Statement;
import org.apache.asterix.lang.sqlpp.util.SqlppVariableUtil;
import org.apache.asterix.lang.sqlpp.visitor.SqlppDeleteRewriteVisitor;
+import org.apache.asterix.metadata.declared.MetadataProvider;
class SqlppStatementRewriter implements IStatementRewriter {
@Override
- public void rewrite(Statement stmt) throws CompilationException {
- rewriteDeleteStatement(stmt);
+ public void rewrite(Statement stmt, MetadataProvider metadataProvider) throws CompilationException {
+ rewriteDeleteStatement(stmt, metadataProvider);
}
- private void rewriteDeleteStatement(Statement stmt) throws CompilationException {
+ private void rewriteDeleteStatement(Statement stmt, MetadataProvider metadataProvider) throws CompilationException {
if (stmt != null) {
- SqlppDeleteRewriteVisitor visitor = new SqlppDeleteRewriteVisitor();
+ SqlppDeleteRewriteVisitor visitor = new SqlppDeleteRewriteVisitor(metadataProvider);
stmt.accept(visitor, null);
}
}
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/OperatorExpressionVisitor.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/OperatorExpressionVisitor.java
index 23b4d60..6d07a7b 100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/OperatorExpressionVisitor.java
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/OperatorExpressionVisitor.java
@@ -94,7 +94,7 @@
}
}
- private Expression processInOperator(OperatorExpr operatorExpr, OperatorType opType) throws CompilationException {
+ private Expression processInOperator(OperatorExpr operatorExpr, OperatorType opType) {
VariableExpr bindingVar = new VariableExpr(context.newVariable());
bindingVar.setSourceLocation(operatorExpr.getSourceLocation());
Expression itemExpr = operatorExpr.getExprList().get(0);
@@ -123,7 +123,7 @@
private Expression processConcatOperator(OperatorExpr operatorExpr) {
// All operators have to be "||"s (according to the grammar).
- CallExpr callExpr = new CallExpr(new FunctionSignature(FunctionConstants.ASTERIX_NS, FunctionMapUtil.CONCAT, 1),
+ CallExpr callExpr = new CallExpr(new FunctionSignature(FunctionConstants.ASTERIX_DV, FunctionMapUtil.CONCAT, 1),
operatorExpr.getExprList());
callExpr.setSourceLocation(operatorExpr.getSourceLocation());
return callExpr;
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/Sql92AggregateFunctionVisitor.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/Sql92AggregateFunctionVisitor.java
index 6d2dec7..cb6b396 100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/Sql92AggregateFunctionVisitor.java
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/Sql92AggregateFunctionVisitor.java
@@ -144,7 +144,7 @@
}
FieldAccessor faInner = new FieldAccessor(fromBindingVar, groupVarField);
faInner.setSourceLocation(usedVar.getSourceLocation());
- Expression faOuter = VariableCheckAndRewriteVisitor.resolveAsFieldAccess(faInner, usedVar.getVar(),
+ Expression faOuter = VariableCheckAndRewriteVisitor.generateFieldAccess(faInner, usedVar.getVar(),
usedVar.getSourceLocation());
varExprMap.put(usedVar, faOuter);
}
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/VariableCheckAndRewriteVisitor.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/VariableCheckAndRewriteVisitor.java
index aaf8feb..70739ed 100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/VariableCheckAndRewriteVisitor.java
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/VariableCheckAndRewriteVisitor.java
@@ -28,6 +28,7 @@
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.Expression;
import org.apache.asterix.lang.common.base.Expression.Kind;
import org.apache.asterix.lang.common.base.ILangExpression;
@@ -48,15 +49,13 @@
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Dataset;
import org.apache.asterix.om.functions.BuiltinFunctions;
-import org.apache.commons.lang3.StringUtils;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.algebricks.common.utils.Pair;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.api.exceptions.SourceLocation;
public class VariableCheckAndRewriteVisitor extends AbstractSqlppExpressionScopingVisitor {
- private static final FunctionSignature FN_DATASET = new FunctionSignature(BuiltinFunctions.DATASET);
-
protected final MetadataProvider metadataProvider;
/**
@@ -70,105 +69,151 @@
}
@Override
- public Expression visit(FieldAccessor fa, ILangExpression parent) throws CompilationException {
- Expression leadingExpr = fa.getExpr();
- if (leadingExpr.getKind() != Kind.VARIABLE_EXPRESSION) {
- fa.setExpr(leadingExpr.accept(this, parent));
- return fa;
- } else {
- VariableExpr varExpr = (VariableExpr) leadingExpr;
- String lastIdentifier = fa.getIdent().getValue();
- Expression resolvedExpr = resolve(varExpr,
- /* Resolves within the dataverse that has the same name as the variable name. */
- SqlppVariableUtil.toUserDefinedVariableName(varExpr.getVar().getValue()).getValue(), lastIdentifier,
- parent);
- if (resolvedExpr.getKind() == Kind.CALL_EXPRESSION) {
- CallExpr callExpr = (CallExpr) resolvedExpr;
- if (callExpr.getFunctionSignature().equals(FN_DATASET)) {
- // The field access is resolved to be a dataset access in the form of "dataverse.dataset".
- return resolvedExpr;
- }
- }
- fa.setExpr(resolvedExpr);
- return fa;
+ public Expression visit(VariableExpr varExpr, ILangExpression parent) throws CompilationException {
+ if (resolveAsVariableReference(varExpr)) {
+ return varExpr;
}
+ DataverseName dataverseName = metadataProvider.getDefaultDataverseName();
+ String datasetName = SqlppVariableUtil.toUserDefinedVariableName(varExpr.getVar().getValue()).getValue();
+ CallExpr datasetExpr = resolveAsDataset(dataverseName, datasetName, parent, varExpr);
+ return datasetExpr != null ? datasetExpr : resolveAsFieldAccessOverContextVar(varExpr);
}
@Override
- public Expression visit(VariableExpr varExpr, ILangExpression parent) throws CompilationException {
- return resolve(varExpr, metadataProvider.getDefaultDataverseName(),
- SqlppVariableUtil.toUserDefinedVariableName(varExpr.getVar().getValue()).getValue(), parent);
+ public Expression visit(FieldAccessor fa, ILangExpression parent) throws CompilationException {
+ Expression leadingExpr = fa.getExpr();
+ if (leadingExpr.getKind() == Kind.VARIABLE_EXPRESSION) {
+ // resolving a.b
+ VariableExpr leadingVarExpr = (VariableExpr) leadingExpr;
+ if (resolveAsVariableReference(leadingVarExpr)) {
+ return fa;
+ } else {
+ String dataverseNamePart =
+ SqlppVariableUtil.toUserDefinedVariableName(leadingVarExpr.getVar().getValue()).getValue();
+ DataverseName dataverseName = DataverseName.createSinglePartName(dataverseNamePart); // 1-part name
+ String datasetName = fa.getIdent().getValue();
+ CallExpr datasetExpr = resolveAsDataset(dataverseName, datasetName, parent, leadingVarExpr);
+ if (datasetExpr != null) {
+ return datasetExpr;
+ } else {
+ fa.setExpr(resolveAsFieldAccessOverContextVar(leadingVarExpr));
+ return fa;
+ }
+ }
+ } else {
+ List<String> dataverseNameParts = new ArrayList<>(4);
+ Pair<VariableExpr, FieldAccessor> topExprs = new Pair<>(null, null);
+ if (extractDataverseName(fa.getExpr(), dataverseNameParts, topExprs)) {
+ // resolving a.b.c(.x)*
+ VariableExpr topVarExpr = topExprs.getFirst(); // = a
+ if (resolveAsVariableReference(topVarExpr)) {
+ return fa;
+ } else {
+ DataverseName dataverseName = DataverseName.create(dataverseNameParts);
+ String datasetName = fa.getIdent().getValue();
+ CallExpr datasetExpr = resolveAsDataset(dataverseName, datasetName, parent, topVarExpr);
+ if (datasetExpr != null) {
+ return datasetExpr;
+ }
+ FieldAccessor topFaExpr = topExprs.getSecond(); // = a.b
+ topFaExpr.setExpr(resolveAsFieldAccessOverContextVar(topVarExpr));
+ return fa;
+ }
+ } else {
+ fa.setExpr(leadingExpr.accept(this, parent));
+ return fa;
+ }
+ }
}
- // Resolve a variable expression with dataverse name and dataset name.
- private Expression resolve(VariableExpr varExpr, String dataverseName, String datasetName, ILangExpression parent)
- throws CompilationException {
-
+ private boolean resolveAsVariableReference(VariableExpr varExpr) throws CompilationException {
VarIdentifier varId = varExpr.getVar();
String varName = varId.getValue();
- SourceLocation sourceLoc = varExpr.getSourceLocation();
- VarIdentifier var = lookupVariable(varName, sourceLoc);
- if (var != null) {
- // Exists such an identifier
- varExpr.setIsNewVar(false);
- varExpr.setVar(var);
- return varExpr;
- }
-
- if (SqlppVariableUtil.isExternalVariableIdentifier(varId)) {
- throw new CompilationException(ErrorCode.PARAMETER_NO_VALUE, sourceLoc,
- SqlppVariableUtil.variableNameToDisplayedFieldName(varId.getValue()));
- }
-
- boolean resolveToDataset = parent.accept(CheckDatasetOnlyResolutionVisitor.INSTANCE, varExpr);
- if (resolveToDataset) {
- // resolve the undefined identifier reference as a dataset access.
- // for a From/Join/UNNEST/Quantifiers binding expression
- return resolveAsDataset(dataverseName, datasetName, sourceLoc);
- } else {
- // resolve the undefined identifier reference as a field access on a context variable
- Map<VariableExpr, Set<? extends Scope.SymbolAnnotation>> localVars =
- scopeChecker.getCurrentScope().getLiveVariables(scopeChecker.getPrecedingScope());
- Set<VariableExpr> contextVars = Scope.findVariablesAnnotatedBy(localVars.keySet(),
- SqlppVariableAnnotation.CONTEXT_VARIABLE, localVars, sourceLoc);
- VariableExpr contextVar = pickContextVar(contextVars, varExpr);
- return resolveAsFieldAccess(contextVar, varId, sourceLoc);
- }
- }
-
- private VarIdentifier lookupVariable(String varName, SourceLocation sourceLoc) throws CompilationException {
if (scopeChecker.isInForbiddenScopes(varName)) {
- throw new CompilationException(ErrorCode.FORBIDDEN_SCOPE, sourceLoc);
+ throw new CompilationException(ErrorCode.FORBIDDEN_SCOPE, varExpr.getSourceLocation());
}
Identifier ident = scopeChecker.lookupSymbol(varName);
- return ident != null ? (VarIdentifier) ident : null;
+ if (ident == null) {
+ if (SqlppVariableUtil.isExternalVariableIdentifier(varId)) {
+ throw new CompilationException(ErrorCode.PARAMETER_NO_VALUE, varExpr.getSourceLocation(),
+ SqlppVariableUtil.variableNameToDisplayedFieldName(varId.getValue()));
+ } else {
+ return false;
+ }
+ }
+ // Exists such an identifier
+ varExpr.setIsNewVar(false);
+ varExpr.setVar((VarIdentifier) ident);
+ return true;
}
- private Expression resolveAsDataset(String dataverseName, String datasetName, SourceLocation sourceLoc)
- throws CompilationException {
+ // try resolving the undefined identifier reference as a dataset access.
+ // for a From/Join/UNNEST/Quantifiers binding expression
+ private CallExpr resolveAsDataset(DataverseName dataverseName, String datasetName, ILangExpression parent,
+ VariableExpr varExpr) throws CompilationException {
+ if (!parent.accept(CheckDatasetOnlyResolutionVisitor.INSTANCE, varExpr)) {
+ return null;
+ }
+ SourceLocation sourceLoc = varExpr.getSourceLocation();
Dataset dataset = findDataset(dataverseName, datasetName, sourceLoc);
if (dataset == null) {
throw createUnresolvableError(dataverseName, datasetName, sourceLoc);
}
metadataProvider.addAccessedDataset(dataset);
- List<Expression> argList = new ArrayList<>(1);
- argList.add(new LiteralExpr(new StringLiteral(dataset.getFullyQualifiedName())));
+ List<Expression> argList = new ArrayList<>(2);
+ argList.add(new LiteralExpr(new StringLiteral(dataset.getDataverseName().getCanonicalForm())));
+ argList.add(new LiteralExpr(new StringLiteral(dataset.getDatasetName())));
CallExpr callExpr = new CallExpr(new FunctionSignature(BuiltinFunctions.DATASET), argList);
callExpr.setSourceLocation(sourceLoc);
return callExpr;
}
+ // resolve the undefined identifier reference as a field access on a context variable
+ private FieldAccessor resolveAsFieldAccessOverContextVar(VariableExpr varExpr) throws CompilationException {
+ Map<VariableExpr, Set<? extends Scope.SymbolAnnotation>> localVars =
+ scopeChecker.getCurrentScope().getLiveVariables(scopeChecker.getPrecedingScope());
+ Set<VariableExpr> contextVars = Scope.findVariablesAnnotatedBy(localVars.keySet(),
+ SqlppVariableAnnotation.CONTEXT_VARIABLE, localVars, varExpr.getSourceLocation());
+ VariableExpr contextVar = pickContextVar(contextVars, varExpr);
+ return generateFieldAccess(contextVar, varExpr.getVar(), varExpr.getSourceLocation());
+ }
+
// Rewrites for an field access by name
- static Expression resolveAsFieldAccess(Expression sourceExpr, VarIdentifier fieldVar, SourceLocation sourceLoc) {
+ static FieldAccessor generateFieldAccess(Expression sourceExpr, VarIdentifier fieldVar, SourceLocation sourceLoc) {
VarIdentifier fieldName = SqlppVariableUtil.toUserDefinedVariableName(fieldVar.getValue());
FieldAccessor fa = new FieldAccessor(sourceExpr, fieldName);
fa.setSourceLocation(sourceLoc);
return fa;
}
- private CompilationException createUnresolvableError(String dataverseName, String datasetName,
+ private static boolean extractDataverseName(Expression expr, List<String> outDataverseName,
+ Pair<VariableExpr, FieldAccessor> outTopExprs) {
+ switch (expr.getKind()) {
+ case VARIABLE_EXPRESSION:
+ VariableExpr varExpr = (VariableExpr) expr;
+ String varName = SqlppVariableUtil.toUserDefinedVariableName(varExpr.getVar().getValue()).getValue();
+ outDataverseName.add(varName);
+ outTopExprs.setFirst(varExpr);
+ return true;
+ case FIELD_ACCESSOR_EXPRESSION:
+ FieldAccessor faExpr = (FieldAccessor) expr;
+ if (extractDataverseName(faExpr.getExpr(), outDataverseName, outTopExprs)) {
+ outDataverseName.add(faExpr.getIdent().getValue());
+ if (outTopExprs.getSecond() == null) {
+ outTopExprs.setSecond(faExpr);
+ }
+ return true;
+ } else {
+ return false;
+ }
+ default:
+ return false;
+ }
+ }
+
+ private CompilationException createUnresolvableError(DataverseName dataverseName, String datasetName,
SourceLocation sourceLoc) {
- String defaultDataverseName = metadataProvider.getDefaultDataverseName();
+ DataverseName defaultDataverseName = metadataProvider.getDefaultDataverseName();
if (dataverseName == null && defaultDataverseName == null) {
return new CompilationException(ErrorCode.NAME_RESOLVE_UNKNOWN_DATASET, sourceLoc, datasetName);
}
@@ -177,30 +222,15 @@
dataverseName == null ? defaultDataverseName : dataverseName);
}
- private Dataset findDataset(String dataverseName, String datasetName, SourceLocation sourceLoc)
+ private Dataset findDataset(DataverseName dataverseName, String datasetName, SourceLocation sourceLoc)
throws CompilationException {
try {
- Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
- if (dataset != null) {
- return dataset;
- }
- return findDatasetByFullyQualifiedName(datasetName);
+ return metadataProvider.findDataset(dataverseName, datasetName);
} catch (AlgebricksException e) {
throw new CompilationException(ErrorCode.COMPILATION_ERROR, e, sourceLoc, e.getMessage());
}
}
- private Dataset findDatasetByFullyQualifiedName(String name) throws AlgebricksException {
- if (name.indexOf('.') < 0) {
- return null;
- }
- String[] path = StringUtils.split(name, '.');
- if (path.length != 2) {
- return null;
- }
- return metadataProvider.findDataset(path[0], path[1]);
- }
-
@Override
public Expression visit(CallExpr callExpr, ILangExpression arg) throws CompilationException {
// skip variables inside SQL-92 aggregates (they will be resolved by SqlppGroupByAggregationSugarVisitor)
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/FunctionMapUtil.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/FunctionMapUtil.java
index 8df2616..b092f4a 100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/FunctionMapUtil.java
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/FunctionMapUtil.java
@@ -100,7 +100,7 @@
String name = applySql92AggregateNameMapping(fs.getName().toLowerCase());
String prefix =
CORE_AGGREGATE_PREFIX_FUNCTIONS.contains(name) ? CORE_AGGREGATE_PREFIX : CORE_SQL_AGGREGATE_PREFIX;
- return new FunctionSignature(FunctionConstants.ASTERIX_NS, prefix + name, fs.getArity());
+ return new FunctionSignature(FunctionConstants.ASTERIX_DV, prefix + name, fs.getArity());
}
/**
@@ -142,7 +142,7 @@
FunctionIdentifier fi = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, internalName, ns.getArity());
IFunctionInfo finfo = FunctionUtil.getFunctionInfo(fi);
if (finfo != null && BuiltinFunctions.getAggregateFunction(finfo.getFunctionIdentifier()) != null) {
- return new FunctionSignature(FunctionConstants.ASTERIX_NS, internalName, ns.getArity());
+ return new FunctionSignature(FunctionConstants.ASTERIX_DV, internalName, ns.getArity());
}
} else if (checkSql92Aggregate) {
if (isSql92AggregateFunction(ns)) {
@@ -154,7 +154,7 @@
throw new CompilationException(ErrorCode.COMPILATION_UNEXPECTED_WINDOW_EXPRESSION, sourceLoc);
}
}
- return new FunctionSignature(ns.getNamespace(), ns.getName(), ns.getArity());
+ return new FunctionSignature(ns.getDataverseName(), ns.getName(), ns.getArity());
}
/**
@@ -170,7 +170,7 @@
if (internalFuncName == null) {
return callExpr;
}
- callExpr.setFunctionSignature(new FunctionSignature(FunctionConstants.ASTERIX_NS, internalFuncName, 1));
+ callExpr.setFunctionSignature(new FunctionSignature(FunctionConstants.ASTERIX_DV, internalFuncName, 1));
ListConstructor listConstr =
new ListConstructor(ListConstructor.Type.ORDERED_LIST_CONSTRUCTOR, callExpr.getExprList());
listConstr.setSourceLocation(callExpr.getSourceLocation());
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/SqlppStatementUtil.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/SqlppStatementUtil.java
index e41b9ac..42f62ba 100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/SqlppStatementUtil.java
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/SqlppStatementUtil.java
@@ -18,6 +18,10 @@
*/
package org.apache.asterix.lang.sqlpp.util;
+import java.util.List;
+
+import org.apache.asterix.common.metadata.DataverseName;
+
public class SqlppStatementUtil {
private static final String IF_EXISTS = " IF EXISTS ";
@@ -45,15 +49,15 @@
}
@SuppressWarnings("squid:S1172") // unused variable
- public static StringBuilder getCreateDataverseStatement(StringBuilder stringBuilder, String dataverseName,
+ public static StringBuilder getCreateDataverseStatement(StringBuilder stringBuilder, DataverseName dataverseName,
boolean ifNotExists, int version) {
stringBuilder.append(CREATE_DATAVERSE);
- enclose(stringBuilder, dataverseName);
+ encloseDataverseName(stringBuilder, dataverseName);
return ifNotExists(stringBuilder, ifNotExists).append(SEMI_COLON);
}
@SuppressWarnings("squid:S1172") // unused variable
- public static StringBuilder getDropDatasetStatement(StringBuilder stringBuilder, String dataverseName,
+ public static StringBuilder getDropDatasetStatement(StringBuilder stringBuilder, DataverseName dataverseName,
String datasetName, boolean ifExists, int version) {
stringBuilder.append(DROP_DATASET);
enclose(stringBuilder, dataverseName, datasetName);
@@ -61,7 +65,7 @@
}
@SuppressWarnings("squid:S1172") // unused variable
- public static StringBuilder getCreateIndexStatement(StringBuilder stringBuilder, String dataverseName,
+ public static StringBuilder getCreateIndexStatement(StringBuilder stringBuilder, DataverseName dataverseName,
String datasetName, String indexName, String fields, int version) {
stringBuilder.append(CREATE_INDEX);
enclose(stringBuilder, indexName).append(ON);
@@ -69,7 +73,7 @@
}
@SuppressWarnings("squid:S1172") // unused variable
- public static StringBuilder getCreatePrimaryIndexStatement(StringBuilder stringBuilder, String dataverseName,
+ public static StringBuilder getCreatePrimaryIndexStatement(StringBuilder stringBuilder, DataverseName dataverseName,
String datasetName, String indexName, int version) {
stringBuilder.append(CREATE_PRIMARY_INDEX);
enclose(stringBuilder, indexName).append(ON);
@@ -77,7 +81,7 @@
}
@SuppressWarnings("squid:S1172") // unused variable
- public static StringBuilder getDropIndexStatement(StringBuilder stringBuilder, String dataverseName,
+ public static StringBuilder getDropIndexStatement(StringBuilder stringBuilder, DataverseName dataverseName,
String datasetName, String indexName, boolean ifExists, int version) {
stringBuilder.append(DROP_INDEX);
enclose(stringBuilder, dataverseName, datasetName, indexName);
@@ -103,30 +107,47 @@
}
/**
- * Same as {@link SqlppStatementUtil#enclose(StringBuilder, String)} but for a qualified identifier.
+ * Encloses each part of the {@param dataverseName} in back-ticks and concatenates them with
+ * {@link DataverseName#SEPARATOR_CHAR} separator
+ * @param stringBuilder where the dataverse name will be appended
+ * @param dataverseName a dataverse name which could be a valid one or one that needs to be delimited
+ * @return {@param stringBuilder} with the <i>delimited</i> dataverseName appended
+ */
+ public static StringBuilder encloseDataverseName(StringBuilder stringBuilder, DataverseName dataverseName) {
+ List<String> parts = dataverseName.getParts();
+ for (int i = 0, ln = parts.size(); i < ln; i++) {
+ if (i > 0) {
+ stringBuilder.append(DataverseName.SEPARATOR_CHAR);
+ }
+ enclose(stringBuilder, parts.get(i));
+ }
+ return stringBuilder;
+ }
+
+ /**
+ * Encloses a dataverse name and a given idenfitier.
* @param stringBuilder where the identifier will be appended
- * @param identifier1 the qualifying identifier
- * @param identifier2 the qualified identifier
+ * @param dataverseName the dataverse name
+ * @param identifier the identifier
* @return {@param stringBuilder} with the <i>delimited</i> qualified identifier appended
*/
- public static StringBuilder enclose(StringBuilder stringBuilder, String identifier1, String identifier2) {
- return stringBuilder.append(BACK_TICK).append(identifier1).append(BACK_TICK).append(DOT).append(BACK_TICK)
- .append(identifier2).append(BACK_TICK);
+ public static StringBuilder enclose(StringBuilder stringBuilder, DataverseName dataverseName, String identifier) {
+ encloseDataverseName(stringBuilder, dataverseName).append(DOT);
+ return enclose(stringBuilder, identifier);
}
/**
* Same as {@link SqlppStatementUtil#enclose(StringBuilder, String)} but for a double qualified identifier.
* @param stringBuilder where the identifier will be appended
- * @param identifier1 the 1st qualifying identifier
- * @param identifier2 the 2nd qualifying identifier
- * @param identifier3 the qualified identifier
+ * @param dataverseName the 1st qualifying identifier
+ * @param identifier1 the 2nd qualifying identifier
+ * @param identifier2 the qualified identifier
* @return {@param stringBuilder} with the <i>delimited</i> qualified identifier appended
*/
- public static StringBuilder enclose(StringBuilder stringBuilder, String identifier1, String identifier2,
- String identifier3) {
- return stringBuilder.append(BACK_TICK).append(identifier1).append(BACK_TICK).append(DOT).append(BACK_TICK)
- .append(identifier2).append(BACK_TICK).append(DOT).append(BACK_TICK).append(identifier3)
- .append(BACK_TICK);
+ public static StringBuilder enclose(StringBuilder stringBuilder, DataverseName dataverseName, String identifier1,
+ String identifier2) {
+ enclose(stringBuilder, dataverseName, identifier1).append(DOT);
+ return enclose(stringBuilder, identifier2);
}
public static String enclose(String identifier) {
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/SqlppAstPrintVisitor.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/SqlppAstPrintVisitor.java
index a3b752b..33446de 100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/SqlppAstPrintVisitor.java
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/SqlppAstPrintVisitor.java
@@ -19,8 +19,10 @@
package org.apache.asterix.lang.sqlpp.visitor;
import java.io.PrintWriter;
+import java.util.Collections;
import java.util.List;
import java.util.Map;
+import java.util.stream.Collectors;
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.functions.FunctionSignature;
@@ -32,8 +34,11 @@
import org.apache.asterix.lang.common.expression.CallExpr;
import org.apache.asterix.lang.common.expression.GbyVariableExpressionPair;
import org.apache.asterix.lang.common.expression.ListSliceExpression;
+import org.apache.asterix.lang.common.expression.LiteralExpr;
import org.apache.asterix.lang.common.expression.VariableExpr;
+import org.apache.asterix.lang.common.literal.StringLiteral;
import org.apache.asterix.lang.common.struct.Identifier;
+import org.apache.asterix.lang.common.util.FunctionUtil;
import org.apache.asterix.lang.common.visitor.QueryPrintVisitor;
import org.apache.asterix.lang.sqlpp.clause.AbstractBinaryCorrelateClause;
import org.apache.asterix.lang.sqlpp.clause.FromClause;
@@ -84,7 +89,7 @@
out.print(skip(step) + "AS ");
fromTerm.getLeftVariable().accept(this, 0);
if (fromTerm.hasPositionalVariable()) {
- out.println(" AT ");
+ out.println(" AT");
fromTerm.getPositionalVariable().accept(this, 0);
}
if (fromTerm.hasCorrelateClauses()) {
@@ -252,11 +257,15 @@
if (BuiltinFunctions.isBuiltinCompilerFunction(normalizedFunctionSignature, true)) {
functionSignature = normalizedFunctionSignature;
}
- out.println(skip(step) + "FunctionCall " + functionSignature.toString() + "[");
- for (Expression expr : pf.getExprList()) {
- expr.accept(this, step + 1);
+ //TODO(MULTI_PART_DATAVERSE_NAME):temporary workaround to preserve AST reference results
+ if (FunctionUtil.isBuiltinDatasetFunction(functionSignature)) {
+ String singleArg = pf.getExprList().stream().map(LiteralExpr.class::cast).map(LiteralExpr::getValue)
+ .map(StringLiteral.class::cast).map(StringLiteral::getValue).collect(Collectors.joining("."));
+ printFunctionCall(functionSignature, 1,
+ Collections.singletonList(new LiteralExpr(new StringLiteral(singleArg))), step);
+ } else {
+ printFunctionCall(functionSignature, functionSignature.getArity(), pf.getExprList(), step);
}
- out.println(skip(step) + "]");
return null;
}
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/SqlppDeleteRewriteVisitor.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/SqlppDeleteRewriteVisitor.java
index 8cfd04b..966d153 100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/SqlppDeleteRewriteVisitor.java
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/SqlppDeleteRewriteVisitor.java
@@ -23,6 +23,7 @@
import java.util.List;
import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.Expression;
import org.apache.asterix.lang.common.clause.WhereClause;
import org.apache.asterix.lang.common.expression.CallExpr;
@@ -31,7 +32,6 @@
import org.apache.asterix.lang.common.literal.StringLiteral;
import org.apache.asterix.lang.common.statement.DeleteStatement;
import org.apache.asterix.lang.common.statement.Query;
-import org.apache.asterix.lang.common.struct.Identifier;
import org.apache.asterix.lang.sqlpp.clause.FromClause;
import org.apache.asterix.lang.sqlpp.clause.FromTerm;
import org.apache.asterix.lang.sqlpp.clause.SelectBlock;
@@ -41,6 +41,7 @@
import org.apache.asterix.lang.sqlpp.expression.SelectExpression;
import org.apache.asterix.lang.sqlpp.struct.SetOperationInput;
import org.apache.asterix.lang.sqlpp.visitor.base.AbstractSqlppAstVisitor;
+import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.om.functions.BuiltinFunctions;
/**
@@ -49,15 +50,22 @@
*/
public class SqlppDeleteRewriteVisitor extends AbstractSqlppAstVisitor<Void, Void> {
+ private final MetadataProvider metadataProvider;
+
+ public SqlppDeleteRewriteVisitor(MetadataProvider metadataProvider) {
+ this.metadataProvider = metadataProvider;
+ }
+
@Override
public Void visit(DeleteStatement deleteStmt, Void visitArg) {
List<Expression> arguments = new ArrayList<>();
- Identifier dataverseName = deleteStmt.getDataverseName();
- Identifier datasetName = deleteStmt.getDatasetName();
- String arg = dataverseName == null ? datasetName.getValue()
- : dataverseName.getValue() + "." + datasetName.getValue();
- LiteralExpr argumentLiteral = new LiteralExpr(new StringLiteral(arg));
- arguments.add(argumentLiteral);
+ DataverseName dataverseName = deleteStmt.getDataverseName();
+ if (dataverseName == null) {
+ dataverseName = metadataProvider.getDefaultDataverseName();
+ }
+ String datasetName = deleteStmt.getDatasetName();
+ arguments.add(new LiteralExpr(new StringLiteral(dataverseName.getCanonicalForm())));
+ arguments.add(new LiteralExpr(new StringLiteral(datasetName)));
CallExpr callExpression = new CallExpr(new FunctionSignature(BuiltinFunctions.DATASET), arguments);
callExpression.setSourceLocation(deleteStmt.getSourceLocation());
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/SqlppFormatPrintVisitor.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/SqlppFormatPrintVisitor.java
index e541363..9901c8c 100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/SqlppFormatPrintVisitor.java
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/SqlppFormatPrintVisitor.java
@@ -330,7 +330,7 @@
@Override
public Void visit(WindowExpression windowExpr, Integer step) throws CompilationException {
out.print(skip(step) + "window ");
- out.print(generateFullName(windowExpr.getFunctionSignature().getNamespace(),
+ out.print(generateFullName(windowExpr.getFunctionSignature().getDataverseName(),
windowExpr.getFunctionSignature().getName()) + "(");
printDelimitedExpressions(windowExpr.getExprList(), COMMA, step);
out.print(")");
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/javacc/SQLPP.jj b/asterixdb/asterix-lang-sqlpp/src/main/javacc/SQLPP.jj
index 38ace29..13cebb6 100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/javacc/SQLPP.jj
+++ b/asterixdb/asterix-lang-sqlpp/src/main/javacc/SQLPP.jj
@@ -70,6 +70,7 @@
import org.apache.asterix.common.exceptions.WarningUtil;
import org.apache.asterix.common.functions.FunctionConstants;
import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.AbstractClause;
import org.apache.asterix.lang.common.base.AbstractLangExpression;
import org.apache.asterix.lang.common.base.AbstractStatement;
@@ -225,6 +226,8 @@
private int externalVarCounter;
+ private DataverseName defaultDataverse;
+
private final WarningCollector warningCollector = new WarningCollector();
private final Map<SourceLocation, String> hintCollector = new HashMap<SourceLocation, String>();
@@ -240,7 +243,7 @@
};
private static class FunctionName {
- public String dataverse;
+ public DataverseName dataverse;
public String library;
public String function;
public SqlppHint hint;
@@ -556,13 +559,13 @@
DataverseDecl DataverseDeclaration() throws ParseException:
{
Token startToken = null;
- String dvName = null;
+ List<String> dvName = null;
}
{
- <USE> { startToken = token; } dvName = Identifier()
+ <USE> { startToken = token; } dvName = MultipartIdentifier()
{
- defaultDataverse = dvName;
- DataverseDecl dvDecl = new DataverseDecl(new Identifier(dvName));
+ defaultDataverse = DataverseName.create(dvName);
+ DataverseDecl dvDecl = new DataverseDecl(defaultDataverse);
return addSourceLocation(dvDecl, startToken);
}
}
@@ -591,7 +594,7 @@
TypeDecl TypeSpecification(Token startStmtToken) throws ParseException:
{
- Pair<Identifier,Identifier> nameComponents = null;
+ Pair<DataverseName,Identifier> nameComponents = null;
boolean ifNotExists = false;
TypeExpression typeExpr = null;
}
@@ -647,9 +650,9 @@
DatasetDecl DatasetSpecification(Token startStmtToken) throws ParseException:
{
- Pair<Identifier,Identifier> nameComponents = null;
+ Pair<DataverseName,Identifier> nameComponents = null;
boolean ifNotExists = false;
- Pair<Identifier,Identifier> typeComponents = null;
+ Pair<DataverseName,Identifier> typeComponents = null;
String adapterName = null;
Map<String,String> properties = null;
FunctionSignature appliedFunction = null;
@@ -659,7 +662,7 @@
DatasetDecl stmt = null;
boolean autogenerated = false;
Pair<Integer, List<String>> filterField = null;
- Pair<Identifier,Identifier> metaTypeComponents = new Pair<Identifier, Identifier>(null, null);
+ Pair<DataverseName,Identifier> metaTypeComponents = new Pair<DataverseName, Identifier>(null, null);
RecordConstructor withRecord = null;
}
{
@@ -750,7 +753,7 @@
RefreshExternalDatasetStatement RefreshExternalDatasetStatement() throws ParseException:
{
Token startToken = null;
- Pair<Identifier,Identifier> nameComponents = null;
+ Pair<DataverseName,Identifier> nameComponents = null;
String datasetName = null;
}
{
@@ -768,7 +771,7 @@
CreateIndexStatement stmt = new CreateIndexStatement();
String indexName = null;
boolean ifNotExists = false;
- Pair<Identifier,Identifier> nameComponents = null;
+ Pair<DataverseName,Identifier> nameComponents = null;
Pair<Integer, Pair<List<String>, IndexedTypeExpression>> fieldPair = null;
IndexParams indexType = null;
boolean enforced = false;
@@ -872,14 +875,14 @@
CreateDataverseStatement DataverseSpecification(Token startStmtToken) throws ParseException :
{
- String dvName = null;
+ List<String> dvName = null;
boolean ifNotExists = false;
}
{
- <DATAVERSE> dvName = Identifier()
+ <DATAVERSE> dvName = MultipartIdentifier()
ifNotExists = IfNotExists()
{
- CreateDataverseStatement stmt = new CreateDataverseStatement(new Identifier(dvName), null, ifNotExists);
+ CreateDataverseStatement stmt = new CreateDataverseStatement(DataverseName.create(dvName), null, ifNotExists);
return addSourceLocation(stmt, startStmtToken);
}
}
@@ -895,7 +898,7 @@
Token beginPos;
Token endPos;
FunctionName fctName = null;
- String currentDataverse = defaultDataverse;
+ DataverseName currentDataverse = defaultDataverse;
createNewScope();
}
@@ -927,7 +930,7 @@
CreateFeedStatement FeedSpecification(Token startStmtToken) throws ParseException:
{
- Pair<Identifier,Identifier> nameComponents = null;
+ Pair<DataverseName,Identifier> nameComponents = null;
boolean ifNotExists = false;
String adapterName = null;
Map<String,String> properties = null;
@@ -1016,20 +1019,20 @@
void ApplyFunction(List<FunctionSignature> funcSigs) throws ParseException:
{
- FunctionName functioName = null;
+ FunctionName functionName = null;
String fqFunctionName = null;
}
{
- <APPLY> <FUNCTION> functioName = FunctionName()
+ <APPLY> <FUNCTION> functionName = FunctionName()
{
- fqFunctionName = functioName.library == null ? functioName.function : functioName.library + "#" + functioName.function;
- funcSigs.add(new FunctionSignature(functioName.dataverse, fqFunctionName, 1));
+ fqFunctionName = functionName.library == null ? functionName.function : functionName.library + "#" + functionName.function;
+ funcSigs.add(new FunctionSignature(functionName.dataverse, fqFunctionName, 1));
}
(
- <COMMA> functioName = FunctionName()
+ <COMMA> functionName = FunctionName()
{
- fqFunctionName = functioName.library == null ? functioName.function : functioName.library + "#" + functioName.function;
- funcSigs.add(new FunctionSignature(functioName.dataverse, fqFunctionName, 1));
+ fqFunctionName = functionName.library == null ? functionName.function : functionName.library + "#" + functionName.function;
+ funcSigs.add(new FunctionSignature(functionName.dataverse, fqFunctionName, 1));
}
)*
}
@@ -1092,8 +1095,9 @@
{
Token startToken = null;
String id = null;
- Pair<Identifier,Identifier> pairId = null;
- Triple<Identifier,Identifier,Identifier> tripleId = null;
+ List<String> multipartId = null;
+ Pair<DataverseName,Identifier> pairId = null;
+ Triple<DataverseName,Identifier,Identifier> tripleId = null;
FunctionSignature funcSig = null;
boolean ifExists = false;
AbstractStatement stmt = null;
@@ -1117,9 +1121,9 @@
{
stmt = new TypeDropStatement(pairId.first, pairId.second, ifExists);
}
- | <DATAVERSE> id = Identifier() ifExists = IfExists()
+ | <DATAVERSE> multipartId = MultipartIdentifier() ifExists = IfExists()
{
- stmt = new DataverseDropStatement(new Identifier(id), ifExists);
+ stmt = new DataverseDropStatement(DataverseName.create(multipartId), ifExists);
}
| <FUNCTION> funcSig = FunctionSignature() ifExists = IfExists()
{
@@ -1156,7 +1160,7 @@
InsertStatement InsertStatement() throws ParseException:
{
Token startToken = null;
- Pair<Identifier,Identifier> nameComponents = null;
+ Pair<DataverseName,Identifier> nameComponents = null;
VariableExpr var = null;
Query query = null;
Expression returnExpression = null;
@@ -1180,7 +1184,7 @@
UpsertStatement UpsertStatement() throws ParseException:
{
Token startToken = null;
- Pair<Identifier,Identifier> nameComponents = null;
+ Pair<DataverseName,Identifier> nameComponents = null;
VariableExpr var = null;
Query query = null;
Expression returnExpression = null;
@@ -1206,7 +1210,7 @@
Token startToken = null;
VariableExpr var = null;
Expression condition = null;
- Pair<Identifier, Identifier> nameComponents;
+ Pair<DataverseName, Identifier> nameComponents;
}
{
<DELETE> { startToken = token; }
@@ -1310,12 +1314,12 @@
LoadStatement LoadStatement() throws ParseException:
{
Token startToken = null;
- Identifier dataverseName = null;
+ DataverseName dataverseName = null;
Identifier datasetName = null;
boolean alreadySorted = false;
String adapterName;
Map<String,String> properties;
- Pair<Identifier,Identifier> nameComponents = null;
+ Pair<DataverseName,Identifier> nameComponents = null;
}
{
<LOAD> { startToken = token; } Dataset() nameComponents = QualifiedName()
@@ -1350,7 +1354,7 @@
Statement CompactStatement() throws ParseException:
{
Token startToken = null;
- Pair<Identifier,Identifier> nameComponents = null;
+ Pair<DataverseName,Identifier> nameComponents = null;
}
{
<COMPACT> { startToken = token; } Dataset() nameComponents = QualifiedName()
@@ -1379,7 +1383,7 @@
Statement StartStatement(Token startStmtToken) throws ParseException:
{
- Pair<Identifier,Identifier> feedNameComponents = null;
+ Pair<DataverseName,Identifier> feedNameComponents = null;
AbstractStatement stmt = null;
}
@@ -1393,7 +1397,7 @@
AbstractStatement StopStatement(Token startStmtToken) throws ParseException:
{
- Pair<Identifier,Identifier> feedNameComponents = null;
+ Pair<DataverseName,Identifier> feedNameComponents = null;
AbstractStatement stmt = null;
}
@@ -1407,8 +1411,8 @@
AbstractStatement DisconnectStatement(Token startStmtToken) throws ParseException:
{
- Pair<Identifier,Identifier> feedNameComponents = null;
- Pair<Identifier,Identifier> datasetNameComponents = null;
+ Pair<DataverseName,Identifier> feedNameComponents = null;
+ Pair<DataverseName,Identifier> datasetNameComponents = null;
AbstractStatement stmt = null;
}
@@ -1426,8 +1430,8 @@
AbstractStatement ConnectStatement(Token startStmtToken) throws ParseException:
{
- Pair<Identifier,Identifier> feedNameComponents = null;
- Pair<Identifier,Identifier> datasetNameComponents = null;
+ Pair<DataverseName,Identifier> feedNameComponents = null;
+ Pair<DataverseName,Identifier> datasetNameComponents = null;
Map<String,String> configuration = null;
List<FunctionSignature> appliedFunctions = new ArrayList<FunctionSignature>();
@@ -1643,7 +1647,7 @@
TypeReferenceExpression TypeReference() throws ParseException:
{
- Pair<Identifier,Identifier> id = null;
+ Pair<DataverseName,Identifier> id = null;
}
{
id = QualifiedName()
@@ -1689,64 +1693,54 @@
FunctionName FunctionName() throws ParseException:
{
- String first = null;
- String second = null;
- String third = null;
- boolean secondAfterDot = false;
+ Triple<List<String>, SourceLocation, SqlppHint> prefix = null;
+ String suffix = null;
}
{
- first = Identifier()
+ // Note: there's a copy of this production in PrimaryExpr() (LOOKAHEAD for FunctionCallExpr())
+ // that copy must be kept in sync with this code
+ prefix = MultipartIdentifierWithHints(SqlppHint.INDEXED_NESTED_LOOP_JOIN_HINT,
+ SqlppHint.SKIP_SECONDARY_INDEX_SEARCH_HINT)
+ (<SHARP> suffix = Identifier())?
{
FunctionName result = new FunctionName();
- result.sourceLoc = getSourceLocation(token);
- Token hintToken = fetchHint(token, SqlppHint.INDEXED_NESTED_LOOP_JOIN_HINT,
- SqlppHint.SKIP_SECONDARY_INDEX_SEARCH_HINT);
- if (hintToken != null) {
- result.hint = hintToken.hint;
+ result.sourceLoc = prefix.second;
+ result.hint = prefix.third;
+ List<String> list = prefix.first;
+ int ln = list.size();
+ String last = list.get(ln - 1);
+ if (suffix == null) {
+ // prefix = (dv_part1.dv_part2...dv_partN.)?func_name
+ // no library name
+ result.function = last;
+ } else {
+ // prefix = (dv_part1.dv_part2...dv_partN.)?lib_name
+ // suffix = func_name
+ result.library = last;
+ result.function = suffix;
}
- }
- ( <DOT> second = Identifier()
- {
- secondAfterDot = true;
+ if (ln > 1) {
+ result.dataverse = DataverseName.create(list, 0, ln - 1);
+ } else {
+ result.dataverse = defaultDataverse;
}
- (<SHARP> third = Identifier())? | <SHARP> second = Identifier() )?
- {
- if (second == null) {
- result.dataverse = defaultDataverse;
- result.library = null;
- result.function = first;
- } else if (third == null) {
- if (secondAfterDot) {
- result.dataverse = first;
- result.library = null;
- result.function = second;
- } else {
- result.dataverse = defaultDataverse;
- result.library = first;
- result.function = second;
- }
- } else {
- result.dataverse = first;
- result.library = second;
- result.function = third;
- }
- if (result.function.equalsIgnoreCase(INT_TYPE_NAME)) {
- result.function = BuiltinType.AINT64.getTypeName();
- }
- return result;
+ if (result.function.equalsIgnoreCase(INT_TYPE_NAME)) {
+ result.function = BuiltinType.AINT64.getTypeName();
}
+ return result;
+ }
}
-Pair<Identifier,Identifier> TypeName() throws ParseException:
+Pair<DataverseName,Identifier> TypeName() throws ParseException:
{
- Pair<Identifier,Identifier> name = null;
+ Pair<DataverseName,Identifier> name = null;
}
{
name = QualifiedName()
{
if (name.first == null) {
- name.first = new Identifier(defaultDataverse);
+ name.first = defaultDataverse;
}
return name;
}
@@ -1860,48 +1854,71 @@
}
}
-Pair<Identifier,Identifier> QualifiedName() throws ParseException:
+List<String> MultipartIdentifier() throws ParseException:
{
- String first = null;
- String second = null;
+ Triple<List<String>, SourceLocation, SqlppHint> result = null;
}
{
- first = Identifier() (<DOT> second = Identifier())?
+ result = MultipartIdentifierWithHints(null)
{
- Identifier id1 = null;
- Identifier id2 = null;
- if (second == null) {
- id2 = new Identifier(first);
- } else
- {
- id1 = new Identifier(first);
- id2 = new Identifier(second);
- }
- return new Pair<Identifier,Identifier>(id1, id2);
+ return result.first;
}
}
-Triple<Identifier,Identifier,Identifier> DoubleQualifiedName() throws ParseException:
+Triple<List<String>, SourceLocation, SqlppHint> MultipartIdentifierWithHints(SqlppHint... expectedHints)
+ throws ParseException:
{
- String first = null;
- String second = null;
- String third = null;
+ List<String> list = new ArrayList<String>();
+ SourceLocation sourceLoc = null;
+ SqlppHint hint = null;
+ String item = null;
}
{
- first = Identifier() <DOT> second = Identifier() (<DOT> third = Identifier())?
+ item = Identifier()
{
- Identifier id1 = null;
- Identifier id2 = null;
- Identifier id3 = null;
- if (third == null) {
- id2 = new Identifier(first);
- id3 = new Identifier(second);
- } else {
- id1 = new Identifier(first);
- id2 = new Identifier(second);
- id3 = new Identifier(third);
+ list.add(item);
+ sourceLoc = getSourceLocation(token);
+ if (expectedHints != null && expectedHints.length > 0) {
+ Token hintToken = fetchHint(token, expectedHints);
+ if (hintToken != null) {
+ hint = hintToken.hint;
+ }
}
- return new Triple<Identifier,Identifier,Identifier>(id1, id2, id3);
+ }
+ (<DOT> item = Identifier() { list.add(item); } )*
+ {
+ return new Triple<List<String>, SourceLocation, SqlppHint>(list, sourceLoc, hint);
+ }
+}
+
+Pair<DataverseName,Identifier> QualifiedName() throws ParseException:
+{
+ List<String> list = null;
+}
+{
+ list = MultipartIdentifier()
+ {
+ int len = list.size();
+ DataverseName id1 = len > 1 ? DataverseName.create(list, 0, len - 1) : null;
+ Identifier id2 = new Identifier(list.get(len - 1));
+ return new Pair<DataverseName,Identifier>(id1, id2);
+ }
+}
+
+Triple<DataverseName, Identifier, Identifier> DoubleQualifiedName() throws ParseException:
+{
+ List<String> list = new ArrayList<String>();
+ String item = null;
+}
+{
+ item = Identifier() { list.add(item); }
+ (<DOT> item = Identifier() { list.add(item); } )+
+ {
+ int len = list.size();
+ DataverseName id1 = len > 2 ? DataverseName.create(list, 0, len - 2) : null;
+ Identifier id2 = new Identifier(list.get(len - 2));
+ Identifier id3 = new Identifier(list.get(len - 1));
+ return new Triple<DataverseName,Identifier,Identifier>(id1, id2, id3);
}
}
@@ -2495,8 +2512,8 @@
Expression expr = null;
}
{
- ( LOOKAHEAD(4)
- expr = FunctionCallExpr()
+ (
+ LOOKAHEAD(Identifier() (<DOT> Identifier())* (<SHARP> Identifier())? <LEFTPAREN>) expr = FunctionCallExpr()
| expr = CaseExpr()
| expr = Literal()
| expr = VariableRef()
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataCache.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataCache.java
index 0cd03d7..9d738ed 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataCache.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataCache.java
@@ -20,12 +20,14 @@
package org.apache.asterix.metadata;
import java.util.ArrayList;
+import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.asterix.common.config.DatasetConfig.DatasetType;
import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.api.IMetadataEntity;
import org.apache.asterix.metadata.entities.CompactionPolicy;
import org.apache.asterix.metadata.entities.Dataset;
@@ -50,36 +52,36 @@
public class MetadataCache {
// Key is dataverse name.
- protected final Map<String, Dataverse> dataverses = new HashMap<>();
+ protected final Map<DataverseName, Dataverse> dataverses = new HashMap<>();
// Key is dataverse name. Key of value map is dataset name.
- protected final Map<String, Map<String, Dataset>> datasets = new HashMap<>();
+ protected final Map<DataverseName, Map<String, Dataset>> datasets = new HashMap<>();
// Key is dataverse name. Key of value map is dataset name. Key of value map of value map is index name.
- protected final Map<String, Map<String, Map<String, Index>>> indexes = new HashMap<>();
+ protected final Map<DataverseName, Map<String, Map<String, Index>>> indexes = new HashMap<>();
// Key is dataverse name. Key of value map is datatype name.
- protected final Map<String, Map<String, Datatype>> datatypes = new HashMap<>();
- // Key is dataverse name.
+ protected final Map<DataverseName, Map<String, Datatype>> datatypes = new HashMap<>();
+ // Key is node group name.
protected final Map<String, NodeGroup> nodeGroups = new HashMap<>();
// Key is function Identifier . Key of value map is function name.
protected final Map<FunctionSignature, Function> functions = new HashMap<>();
- // Key is adapter dataverse. Key of value map is the adapter name
- protected final Map<String, Map<String, DatasourceAdapter>> adapters = new HashMap<>();
+ // Key is adapter dataverse name. Key of value map is the adapter name
+ protected final Map<DataverseName, Map<String, DatasourceAdapter>> adapters = new HashMap<>();
// Key is DataverseName, Key of the value map is the Policy name
- protected final Map<String, Map<String, FeedPolicyEntity>> feedPolicies = new HashMap<>();
+ protected final Map<DataverseName, Map<String, FeedPolicyEntity>> feedPolicies = new HashMap<>();
// Key is library dataverse. Key of value map is the library name
- protected final Map<String, Map<String, Library>> libraries = new HashMap<>();
+ protected final Map<DataverseName, Map<String, Library>> libraries = new HashMap<>();
// Key is library dataverse. Key of value map is the feed name
- protected final Map<String, Map<String, Feed>> feeds = new HashMap<>();
+ protected final Map<DataverseName, Map<String, Feed>> feeds = new HashMap<>();
// Key is DataverseName, Key of the value map is the Policy name
- protected final Map<String, Map<String, CompactionPolicy>> compactionPolicies = new HashMap<>();
+ protected final Map<DataverseName, Map<String, CompactionPolicy>> compactionPolicies = new HashMap<>();
// Key is DataverseName, Key of value map is feedConnectionId
- protected final Map<String, Map<String, FeedConnection>> feedConnections = new HashMap<>();
+ protected final Map<DataverseName, Map<String, FeedConnection>> feedConnections = new HashMap<>();
// Atomically executes all metadata operations in ctx's log.
public void commit(MetadataTransactionContext ctx) {
// Forward roll the operations written in ctx's log.
int logIx = 0;
- ArrayList<MetadataLogicalOperation> opLog = ctx.getOpLog();
+ List<MetadataLogicalOperation> opLog = ctx.getOpLog();
try {
for (logIx = 0; logIx < opLog.size(); logIx++) {
doOperation(opLog.get(logIx));
@@ -135,11 +137,12 @@
synchronized (dataverses) {
synchronized (datasets) {
synchronized (datatypes) {
- if (!dataverses.containsKey(dataverse)) {
- datasets.put(dataverse.getDataverseName(), new HashMap<String, Dataset>());
- datatypes.put(dataverse.getDataverseName(), new HashMap<String, Datatype>());
- adapters.put(dataverse.getDataverseName(), new HashMap<String, DatasourceAdapter>());
- return dataverses.put(dataverse.getDataverseName(), dataverse);
+ DataverseName dataverseName = dataverse.getDataverseName();
+ if (!dataverses.containsKey(dataverseName)) {
+ datasets.put(dataverseName, new HashMap<>());
+ datatypes.put(dataverseName, new HashMap<>());
+ adapters.put(dataverseName, new HashMap<>());
+ return dataverses.put(dataverseName, dataverse);
}
return null;
}
@@ -197,7 +200,7 @@
}
public CompactionPolicy addCompactionPolicyIfNotExists(CompactionPolicy compactionPolicy) {
- synchronized (compactionPolicy) {
+ synchronized (compactionPolicies) {
Map<String, CompactionPolicy> p = compactionPolicies.get(compactionPolicy.getDataverseName());
if (p == null) {
p = new HashMap<>();
@@ -216,7 +219,7 @@
synchronized (compactionPolicies) {
Map<String, CompactionPolicy> p = compactionPolicies.get(compactionPolicy.getDataverseName());
if (p != null && p.get(compactionPolicy.getPolicyName()) != null) {
- return p.remove(compactionPolicy);
+ return p.remove(compactionPolicy.getPolicyName());
}
return null;
}
@@ -239,7 +242,7 @@
compactionPolicies.remove(dataverse.getDataverseName());
List<FunctionSignature> markedFunctionsForRemoval = new ArrayList<>();
for (FunctionSignature signature : functions.keySet()) {
- if (signature.getNamespace().equals(dataverse.getDataverseName())) {
+ if (signature.getDataverseName().equals(dataverse.getDataverseName())) {
markedFunctionsForRemoval.add(signature);
}
}
@@ -311,13 +314,13 @@
}
}
- public Dataverse getDataverse(String dataverseName) {
+ public Dataverse getDataverse(DataverseName dataverseName) {
synchronized (dataverses) {
return dataverses.get(dataverseName);
}
}
- public Dataset getDataset(String dataverseName, String datasetName) {
+ public Dataset getDataset(DataverseName dataverseName, String datasetName) {
synchronized (datasets) {
Map<String, Dataset> m = datasets.get(dataverseName);
if (m == null) {
@@ -327,7 +330,7 @@
}
}
- public Index getIndex(String dataverseName, String datasetName, String indexName) {
+ public Index getIndex(DataverseName dataverseName, String datasetName, String indexName) {
synchronized (indexes) {
Map<String, Map<String, Index>> datasetMap = indexes.get(dataverseName);
if (datasetMap == null) {
@@ -341,7 +344,7 @@
}
}
- public Datatype getDatatype(String dataverseName, String datatypeName) {
+ public Datatype getDatatype(DataverseName dataverseName, String datatypeName) {
synchronized (datatypes) {
Map<String, Datatype> m = datatypes.get(dataverseName);
if (m == null) {
@@ -363,29 +366,23 @@
}
}
- public List<Dataset> getDataverseDatasets(String dataverseName) {
- List<Dataset> retDatasets = new ArrayList<>();
+ public List<Dataset> getDataverseDatasets(DataverseName dataverseName) {
synchronized (datasets) {
Map<String, Dataset> m = datasets.get(dataverseName);
if (m == null) {
- return retDatasets;
+ return Collections.emptyList();
}
- m.forEach((key, value) -> retDatasets.add(value));
- return retDatasets;
+ return new ArrayList<>(m.values());
}
}
- public List<Index> getDatasetIndexes(String dataverseName, String datasetName) {
- List<Index> retIndexes = new ArrayList<>();
+ public List<Index> getDatasetIndexes(DataverseName dataverseName, String datasetName) {
synchronized (datasets) {
Map<String, Index> map = indexes.get(dataverseName).get(datasetName);
if (map == null) {
- return retIndexes;
+ return Collections.emptyList();
}
- for (Map.Entry<String, Index> entry : map.entrySet()) {
- retIndexes.add(entry.getValue());
- }
- return retIndexes;
+ return new ArrayList<>(map.values());
}
}
@@ -430,7 +427,7 @@
}
public Object addFeedPolicyIfNotExists(FeedPolicyEntity feedPolicy) {
- synchronized (feedPolicy) {
+ synchronized (feedPolicies) {
Map<String, FeedPolicyEntity> p = feedPolicies.get(feedPolicy.getDataverseName());
if (p == null) {
p = new HashMap<>();
@@ -449,7 +446,7 @@
synchronized (feedPolicies) {
Map<String, FeedPolicyEntity> p = feedPolicies.get(feedPolicy.getDataverseName());
if (p != null && p.get(feedPolicy.getPolicyName()) != null) {
- return p.remove(feedPolicy).getPolicyName();
+ return p.remove(feedPolicy.getPolicyName()).getPolicyName();
}
return null;
}
@@ -458,10 +455,10 @@
public DatasourceAdapter addAdapterIfNotExists(DatasourceAdapter adapter) {
synchronized (adapters) {
Map<String, DatasourceAdapter> adaptersInDataverse =
- adapters.get(adapter.getAdapterIdentifier().getNamespace());
+ adapters.get(adapter.getAdapterIdentifier().getDataverseName());
if (adaptersInDataverse == null) {
adaptersInDataverse = new HashMap<>();
- adapters.put(adapter.getAdapterIdentifier().getNamespace(), adaptersInDataverse);
+ adapters.put(adapter.getAdapterIdentifier().getDataverseName(), adaptersInDataverse);
}
DatasourceAdapter adapterObject = adaptersInDataverse.get(adapter.getAdapterIdentifier().getName());
if (adapterObject == null) {
@@ -474,7 +471,7 @@
public DatasourceAdapter dropAdapterIfExists(DatasourceAdapter adapter) {
synchronized (adapters) {
Map<String, DatasourceAdapter> adaptersInDataverse =
- adapters.get(adapter.getAdapterIdentifier().getNamespace());
+ adapters.get(adapter.getAdapterIdentifier().getDataverseName());
if (adaptersInDataverse != null) {
return adaptersInDataverse.remove(adapter.getAdapterIdentifier().getName());
}
@@ -485,13 +482,13 @@
public Library addLibraryIfNotExists(Library library) {
synchronized (libraries) {
Map<String, Library> libsInDataverse = libraries.get(library.getDataverseName());
- boolean needToAddd = (libsInDataverse == null || libsInDataverse.get(library.getName()) != null);
- if (needToAddd) {
+ boolean needToAdd = (libsInDataverse == null || libsInDataverse.get(library.getName()) != null);
+ if (needToAdd) {
if (libsInDataverse == null) {
libsInDataverse = new HashMap<>();
libraries.put(library.getDataverseName(), libsInDataverse);
}
- return libsInDataverse.put(library.getDataverseName(), library);
+ return libsInDataverse.put(library.getName(), library);
}
return null;
}
@@ -511,8 +508,8 @@
synchronized (feedConnections) {
Map<String, FeedConnection> feedConnsInDataverse = feedConnections.get(feedConnection.getDataverseName());
if (feedConnsInDataverse == null) {
- feedConnections.put(feedConnection.getDataverseName(), new HashMap<>());
- feedConnsInDataverse = feedConnections.get(feedConnection.getDataverseName());
+ feedConnsInDataverse = new HashMap<>();
+ feedConnections.put(feedConnection.getDataverseName(), feedConnsInDataverse);
}
return feedConnsInDataverse.put(feedConnection.getConnectionId(), feedConnection);
}
@@ -533,8 +530,8 @@
synchronized (feeds) {
Map<String, Feed> feedsInDataverse = feeds.get(feed.getDataverseName());
if (feedsInDataverse == null) {
- feeds.put(feed.getDataverseName(), new HashMap<>());
- feedsInDataverse = feeds.get(feed.getDataverseName());
+ feedsInDataverse = new HashMap<>();
+ feeds.put(feed.getDataverseName(), feedsInDataverse);
}
return feedsInDataverse.put(feed.getFeedName(), feed);
}
@@ -570,7 +567,7 @@
/**
* Represents a logical operation against the metadata.
*/
- protected class MetadataLogicalOperation {
+ protected static class MetadataLogicalOperation {
// Entity to be added/dropped.
public final IMetadataEntity<?> entity;
// True for add, false for drop.
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataManager.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataManager.java
index e5ee163..eb5f59a 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataManager.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataManager.java
@@ -20,7 +20,6 @@
package org.apache.asterix.metadata;
import java.rmi.RemoteException;
-import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
@@ -33,6 +32,7 @@
import org.apache.asterix.common.exceptions.MetadataException;
import org.apache.asterix.common.exceptions.RuntimeDataException;
import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.transactions.ITxnIdFactory;
import org.apache.asterix.common.transactions.TxnId;
import org.apache.asterix.external.indexing.ExternalFile;
@@ -170,7 +170,7 @@
}
@Override
- public void dropDataverse(MetadataTransactionContext ctx, String dataverseName) throws AlgebricksException {
+ public void dropDataverse(MetadataTransactionContext ctx, DataverseName dataverseName) throws AlgebricksException {
try {
metadataNode.dropDataverse(ctx.getTxnId(), dataverseName);
} catch (RemoteException e) {
@@ -189,7 +189,8 @@
}
@Override
- public Dataverse getDataverse(MetadataTransactionContext ctx, String dataverseName) throws AlgebricksException {
+ public Dataverse getDataverse(MetadataTransactionContext ctx, DataverseName dataverseName)
+ throws AlgebricksException {
// First look in the context to see if this transaction created the
// requested dataverse itself (but the dataverse is still uncommitted).
Dataverse dataverse = ctx.getDataverse(dataverseName);
@@ -222,13 +223,12 @@
}
@Override
- public List<Dataset> getDataverseDatasets(MetadataTransactionContext ctx, String dataverseName)
+ public List<Dataset> getDataverseDatasets(MetadataTransactionContext ctx, DataverseName dataverseName)
throws AlgebricksException {
- List<Dataset> dataverseDatasets = new ArrayList<>();
+ List<Dataset> dataverseDatasets;
try {
- // Assuming that the transaction can read its own writes on the
- // metadata node.
- dataverseDatasets.addAll(metadataNode.getDataverseDatasets(ctx.getTxnId(), dataverseName));
+ // Assuming that the transaction can read its own writes on the metadata node.
+ dataverseDatasets = metadataNode.getDataverseDatasets(ctx.getTxnId(), dataverseName);
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
}
@@ -250,7 +250,7 @@
}
@Override
- public void dropDataset(MetadataTransactionContext ctx, String dataverseName, String datasetName)
+ public void dropDataset(MetadataTransactionContext ctx, DataverseName dataverseName, String datasetName)
throws AlgebricksException {
try {
metadataNode.dropDataset(ctx.getTxnId(), dataverseName, datasetName);
@@ -262,7 +262,7 @@
}
@Override
- public Dataset getDataset(MetadataTransactionContext ctx, String dataverseName, String datasetName)
+ public Dataset getDataset(MetadataTransactionContext ctx, DataverseName dataverseName, String datasetName)
throws AlgebricksException {
// First look in the context to see if this transaction created the
@@ -298,13 +298,13 @@
}
@Override
- public List<Index> getDatasetIndexes(MetadataTransactionContext ctx, String dataverseName, String datasetName)
- throws AlgebricksException {
- List<Index> datasetIndexes = new ArrayList<>();
+ public List<Index> getDatasetIndexes(MetadataTransactionContext ctx, DataverseName dataverseName,
+ String datasetName) throws AlgebricksException {
Dataset dataset = getDataset(ctx, dataverseName, datasetName);
if (dataset == null) {
- return datasetIndexes;
+ return Collections.emptyList();
}
+ List<Index> datasetIndexes;
try {
datasetIndexes = metadataNode.getDatasetIndexes(ctx.getTxnId(), dataverseName, datasetName);
} catch (RemoteException e) {
@@ -325,9 +325,8 @@
}
@Override
- public CompactionPolicy getCompactionPolicy(MetadataTransactionContext ctx, String dataverse, String policyName)
- throws AlgebricksException {
-
+ public CompactionPolicy getCompactionPolicy(MetadataTransactionContext ctx, DataverseName dataverse,
+ String policyName) throws AlgebricksException {
CompactionPolicy compactionPolicy;
try {
compactionPolicy = metadataNode.getCompactionPolicy(ctx.getTxnId(), dataverse, policyName);
@@ -353,7 +352,7 @@
}
@Override
- public void dropDatatype(MetadataTransactionContext ctx, String dataverseName, String datatypeName)
+ public void dropDatatype(MetadataTransactionContext ctx, DataverseName dataverseName, String datatypeName)
throws AlgebricksException {
try {
metadataNode.dropDatatype(ctx.getTxnId(), dataverseName, datatypeName);
@@ -364,7 +363,7 @@
}
@Override
- public Datatype getDatatype(MetadataTransactionContext ctx, String dataverseName, String datatypeName)
+ public Datatype getDatatype(MetadataTransactionContext ctx, DataverseName dataverseName, String datatypeName)
throws AlgebricksException {
// First look in the context to see if this transaction created the
// requested datatype itself (but the datatype is still uncommitted).
@@ -423,12 +422,11 @@
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
}
mdTxnCtx.addAdapter(adapter);
-
}
@Override
- public void dropIndex(MetadataTransactionContext ctx, String dataverseName, String datasetName, String indexName)
- throws AlgebricksException {
+ public void dropIndex(MetadataTransactionContext ctx, DataverseName dataverseName, String datasetName,
+ String indexName) throws AlgebricksException {
try {
metadataNode.dropIndex(ctx.getTxnId(), dataverseName, datasetName, indexName);
} catch (RemoteException e) {
@@ -438,8 +436,8 @@
}
@Override
- public Index getIndex(MetadataTransactionContext ctx, String dataverseName, String datasetName, String indexName)
- throws AlgebricksException {
+ public Index getIndex(MetadataTransactionContext ctx, DataverseName dataverseName, String datasetName,
+ String indexName) throws AlgebricksException {
// First look in the context to see if this transaction created the
// requested index itself (but the index is still uncommitted).
@@ -493,7 +491,7 @@
modifyNodegroup(ctx, nodeGroup, Operation.UPSERT);
}
- public void modifyNodegroup(MetadataTransactionContext ctx, NodeGroup nodeGroup, Operation op)
+ private void modifyNodegroup(MetadataTransactionContext ctx, NodeGroup nodeGroup, Operation op)
throws AlgebricksException {
try {
metadataNode.modifyNodeGroup(ctx.getTxnId(), nodeGroup, op);
@@ -587,7 +585,7 @@
// in the cache.
return null;
}
- if (ctx.getDataverse(functionSignature.getNamespace()) != null) {
+ if (ctx.getDataverse(functionSignature.getDataverseName()) != null) {
// This transaction has dropped and subsequently created the same
// dataverse.
return null;
@@ -608,16 +606,15 @@
ctx.addFunction(function);
}
return function;
-
}
@Override
- public List<Function> getFunctions(MetadataTransactionContext ctx, String dataverseName)
+ public List<Function> getDataverseFunctions(MetadataTransactionContext ctx, DataverseName dataverseName)
throws AlgebricksException {
try {
- return metadataNode.getFunctions(ctx.getTxnId(), dataverseName);
+ return metadataNode.getDataverseFunctions(ctx.getTxnId(), dataverseName);
} catch (RemoteException e) {
- throw new MetadataException(e);
+ throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
}
}
@@ -651,23 +648,7 @@
}
@Override
- public List<Function> getDataverseFunctions(MetadataTransactionContext ctx, String dataverseName)
- throws AlgebricksException {
- List<Function> dataverseFunctions;
- try {
- // Assuming that the transaction can read its own writes on the
- // metadata node.
- dataverseFunctions = metadataNode.getDataverseFunctions(ctx.getTxnId(), dataverseName);
- } catch (RemoteException e) {
- throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
- }
- // Don't update the cache to avoid checking against the transaction's
- // uncommitted functions.
- return dataverseFunctions;
- }
-
- @Override
- public void dropAdapter(MetadataTransactionContext ctx, String dataverseName, String name)
+ public void dropAdapter(MetadataTransactionContext ctx, DataverseName dataverseName, String name)
throws AlgebricksException {
try {
metadataNode.dropAdapter(ctx.getTxnId(), dataverseName, name);
@@ -678,7 +659,7 @@
}
@Override
- public DatasourceAdapter getAdapter(MetadataTransactionContext ctx, String dataverseName, String name)
+ public DatasourceAdapter getAdapter(MetadataTransactionContext ctx, DataverseName dataverseName, String name)
throws AlgebricksException {
DatasourceAdapter adapter;
try {
@@ -690,7 +671,7 @@
}
@Override
- public void dropLibrary(MetadataTransactionContext ctx, String dataverseName, String libraryName)
+ public void dropLibrary(MetadataTransactionContext ctx, DataverseName dataverseName, String libraryName)
throws AlgebricksException {
try {
metadataNode.dropLibrary(ctx.getTxnId(), dataverseName, libraryName);
@@ -701,7 +682,7 @@
}
@Override
- public List<Library> getDataverseLibraries(MetadataTransactionContext ctx, String dataverseName)
+ public List<Library> getDataverseLibraries(MetadataTransactionContext ctx, DataverseName dataverseName)
throws AlgebricksException {
List<Library> dataverseLibaries;
try {
@@ -727,8 +708,8 @@
}
@Override
- public Library getLibrary(MetadataTransactionContext ctx, String dataverseName, String libraryName)
- throws AlgebricksException, RemoteException {
+ public Library getLibrary(MetadataTransactionContext ctx, DataverseName dataverseName, String libraryName)
+ throws AlgebricksException {
Library library;
try {
library = metadataNode.getLibrary(ctx.getTxnId(), dataverseName, libraryName);
@@ -739,12 +720,11 @@
}
@Override
- public FeedPolicyEntity getFeedPolicy(MetadataTransactionContext ctx, String dataverse, String policyName)
- throws AlgebricksException {
-
+ public FeedPolicyEntity getFeedPolicy(MetadataTransactionContext ctx, DataverseName dataverseName,
+ String policyName) throws AlgebricksException {
FeedPolicyEntity feedPolicy;
try {
- feedPolicy = metadataNode.getFeedPolicy(ctx.getTxnId(), dataverse, policyName);
+ feedPolicy = metadataNode.getFeedPolicy(ctx.getTxnId(), dataverseName, policyName);
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
}
@@ -752,10 +732,11 @@
}
@Override
- public Feed getFeed(MetadataTransactionContext ctx, String dataverse, String feedName) throws AlgebricksException {
+ public Feed getFeed(MetadataTransactionContext ctx, DataverseName dataverseName, String feedName)
+ throws AlgebricksException {
Feed feed;
try {
- feed = metadataNode.getFeed(ctx.getTxnId(), dataverse, feedName);
+ feed = metadataNode.getFeed(ctx.getTxnId(), dataverseName, feedName);
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
}
@@ -763,27 +744,29 @@
}
@Override
- public List<Feed> getFeeds(MetadataTransactionContext ctx, String dataverse) throws AlgebricksException {
+ public List<Feed> getFeeds(MetadataTransactionContext ctx, DataverseName dataverseName) throws AlgebricksException {
List<Feed> feeds;
try {
- feeds = metadataNode.getFeeds(ctx.getTxnId(), dataverse);
+ feeds = metadataNode.getFeeds(ctx.getTxnId(), dataverseName);
} catch (RemoteException e) {
- throw new MetadataException(e);
+ throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
}
return feeds;
}
@Override
- public void dropFeed(MetadataTransactionContext ctx, String dataverse, String feedName) throws AlgebricksException {
- Feed feed = null;
- List<FeedConnection> feedConnections = null;
+ public void dropFeed(MetadataTransactionContext ctx, DataverseName dataverseName, String feedName)
+ throws AlgebricksException {
+ Feed feed;
+ List<FeedConnection> feedConnections;
try {
- feed = metadataNode.getFeed(ctx.getTxnId(), dataverse, feedName);
- feedConnections = metadataNode.getFeedConnections(ctx.getTxnId(), dataverse, feedName);
- metadataNode.dropFeed(ctx.getTxnId(), dataverse, feedName);
+ feed = metadataNode.getFeed(ctx.getTxnId(), dataverseName, feedName);
+ feedConnections = metadataNode.getFeedConnections(ctx.getTxnId(), dataverseName, feedName);
+ metadataNode.dropFeed(ctx.getTxnId(), dataverseName, feedName);
for (FeedConnection feedConnection : feedConnections) {
- metadataNode.dropFeedConnection(ctx.getTxnId(), dataverse, feedName, feedConnection.getDatasetName());
- ctx.dropFeedConnection(dataverse, feedName, feedConnection.getDatasetName());
+ metadataNode.dropFeedConnection(ctx.getTxnId(), dataverseName, feedName,
+ feedConnection.getDatasetName());
+ ctx.dropFeedConnection(dataverseName, feedName, feedConnection.getDatasetName());
}
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
@@ -813,7 +796,7 @@
}
@Override
- public void dropFeedConnection(MetadataTransactionContext ctx, String dataverseName, String feedName,
+ public void dropFeedConnection(MetadataTransactionContext ctx, DataverseName dataverseName, String feedName,
String datasetName) throws AlgebricksException {
try {
metadataNode.dropFeedConnection(ctx.getTxnId(), dataverseName, feedName, datasetName);
@@ -824,8 +807,8 @@
}
@Override
- public FeedConnection getFeedConnection(MetadataTransactionContext ctx, String dataverseName, String feedName,
- String datasetName) throws AlgebricksException {
+ public FeedConnection getFeedConnection(MetadataTransactionContext ctx, DataverseName dataverseName,
+ String feedName, String datasetName) throws AlgebricksException {
try {
return metadataNode.getFeedConnection(ctx.getTxnId(), dataverseName, feedName, datasetName);
} catch (RemoteException e) {
@@ -834,8 +817,8 @@
}
@Override
- public List<FeedConnection> getFeedConections(MetadataTransactionContext ctx, String dataverseName, String feedName)
- throws AlgebricksException {
+ public List<FeedConnection> getFeedConections(MetadataTransactionContext ctx, DataverseName dataverseName,
+ String feedName) throws AlgebricksException {
try {
return metadataNode.getFeedConnections(ctx.getTxnId(), dataverseName, feedName);
} catch (RemoteException e) {
@@ -844,11 +827,11 @@
}
@Override
- public List<DatasourceAdapter> getDataverseAdapters(MetadataTransactionContext mdTxnCtx, String dataverse)
- throws AlgebricksException {
+ public List<DatasourceAdapter> getDataverseAdapters(MetadataTransactionContext mdTxnCtx,
+ DataverseName dataverseName) throws AlgebricksException {
List<DatasourceAdapter> dataverseAdapters;
try {
- dataverseAdapters = metadataNode.getDataverseAdapters(mdTxnCtx.getTxnId(), dataverse);
+ dataverseAdapters = metadataNode.getDataverseAdapters(mdTxnCtx.getTxnId(), dataverseName);
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
}
@@ -856,7 +839,7 @@
}
@Override
- public void dropFeedPolicy(MetadataTransactionContext mdTxnCtx, String dataverseName, String policyName)
+ public void dropFeedPolicy(MetadataTransactionContext mdTxnCtx, DataverseName dataverseName, String policyName)
throws AlgebricksException {
FeedPolicyEntity feedPolicy;
try {
@@ -868,11 +851,12 @@
mdTxnCtx.dropFeedPolicy(feedPolicy);
}
- public List<FeedPolicyEntity> getDataversePolicies(MetadataTransactionContext mdTxnCtx, String dataverse)
- throws AlgebricksException {
+ @Override
+ public List<FeedPolicyEntity> getDataverseFeedPolicies(MetadataTransactionContext mdTxnCtx,
+ DataverseName dataverseName) throws AlgebricksException {
List<FeedPolicyEntity> dataverseFeedPolicies;
try {
- dataverseFeedPolicies = metadataNode.getDataversePolicies(mdTxnCtx.getTxnId(), dataverse);
+ dataverseFeedPolicies = metadataNode.getDataverseFeedPolicies(mdTxnCtx.getTxnId(), dataverseName);
} catch (RemoteException e) {
throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
}
@@ -911,7 +895,7 @@
}
@Override
- public ExternalFile getExternalFile(MetadataTransactionContext ctx, String dataverseName, String datasetName,
+ public ExternalFile getExternalFile(MetadataTransactionContext ctx, DataverseName dataverseName, String datasetName,
Integer fileNumber) throws AlgebricksException {
ExternalFile file;
try {
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
index e6a768d..45534c7 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
@@ -32,6 +32,7 @@
import org.apache.asterix.common.config.DatasetConfig.IndexType;
import org.apache.asterix.common.dataflow.LSMIndexUtil;
import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.metadata.MetadataIndexImmutableProperties;
import org.apache.asterix.common.transactions.IRecoveryManager.ResourceType;
import org.apache.asterix.common.transactions.ITransactionContext;
@@ -82,6 +83,7 @@
import org.apache.asterix.metadata.entitytupletranslators.MetadataTupleTranslatorProvider;
import org.apache.asterix.metadata.entitytupletranslators.NodeGroupTupleTranslator;
import org.apache.asterix.metadata.entitytupletranslators.NodeTupleTranslator;
+import org.apache.asterix.metadata.utils.DatasetUtil;
import org.apache.asterix.metadata.valueextractors.MetadataEntityValueExtractor;
import org.apache.asterix.metadata.valueextractors.TupleCopyValueExtractor;
import org.apache.asterix.om.base.AInt32;
@@ -98,6 +100,7 @@
import org.apache.asterix.transaction.management.opcallbacks.UpsertOperationCallback;
import org.apache.asterix.transaction.management.service.transaction.DatasetIdFactory;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.algebricks.common.utils.Triple;
import org.apache.hyracks.api.dataflow.value.IBinaryComparator;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
@@ -166,14 +169,14 @@
}
@Override
- public void beginTransaction(TxnId transactionId) throws RemoteException {
+ public void beginTransaction(TxnId transactionId) {
TransactionOptions options = new TransactionOptions(AtomicityLevel.ATOMIC);
transactionSubsystem.getTransactionManager().beginTransaction(transactionId, options);
}
@SuppressWarnings("squid:S1181")
@Override
- public void commitTransaction(TxnId txnId) throws RemoteException {
+ public void commitTransaction(TxnId txnId) {
try {
transactionSubsystem.getTransactionManager().commitTransaction(txnId);
} catch (Throwable th) {
@@ -185,7 +188,7 @@
@SuppressWarnings("squid:S1181")
@Override
- public void abortTransaction(TxnId txnId) throws RemoteException {
+ public void abortTransaction(TxnId txnId) {
try {
transactionSubsystem.getTransactionManager().abortTransaction(txnId);
} catch (Throwable th) {
@@ -264,8 +267,7 @@
* @throws AlgebricksException
*/
private <T> List<T> getEntities(TxnId txnId, ITupleReference searchKey,
- IMetadataEntityTupleTranslator<T> tupleTranslator, IMetadataIndex index)
- throws AlgebricksException, RemoteException {
+ IMetadataEntityTupleTranslator<T> tupleTranslator, IMetadataIndex index) throws AlgebricksException {
try {
IValueExtractor<T> valueExtractor = new MetadataEntityValueExtractor<>(tupleTranslator);
List<T> results = new ArrayList<>();
@@ -277,52 +279,49 @@
}
@Override
- public <T extends IExtensionMetadataEntity> void addEntity(TxnId txnId, T entity)
- throws AlgebricksException, RemoteException {
+ public <T extends IExtensionMetadataEntity> void addEntity(TxnId txnId, T entity) throws AlgebricksException {
ExtensionMetadataDataset<T> index = (ExtensionMetadataDataset<T>) extensionDatasets.get(entity.getDatasetId());
if (index == null) {
throw new AlgebricksException("Metadata Extension Index: " + entity.getDatasetId() + " was not found");
}
- IMetadataEntityTupleTranslator<T> tupleTranslator = index.getTupleTranslator();
+ IMetadataEntityTupleTranslator<T> tupleTranslator = index.getTupleTranslator(true);
addEntity(txnId, entity, tupleTranslator, index);
}
@Override
- public <T extends IExtensionMetadataEntity> void upsertEntity(TxnId txnId, T entity)
- throws AlgebricksException, RemoteException {
+ public <T extends IExtensionMetadataEntity> void upsertEntity(TxnId txnId, T entity) throws AlgebricksException {
ExtensionMetadataDataset<T> index = (ExtensionMetadataDataset<T>) extensionDatasets.get(entity.getDatasetId());
if (index == null) {
throw new AlgebricksException("Metadata Extension Index: " + entity.getDatasetId() + " was not found");
}
- IMetadataEntityTupleTranslator<T> tupleTranslator = index.getTupleTranslator();
+ IMetadataEntityTupleTranslator<T> tupleTranslator = index.getTupleTranslator(true);
upsertEntity(txnId, entity, tupleTranslator, index);
}
@Override
- public <T extends IExtensionMetadataEntity> void deleteEntity(TxnId txnId, T entity)
- throws AlgebricksException, RemoteException {
+ public <T extends IExtensionMetadataEntity> void deleteEntity(TxnId txnId, T entity) throws AlgebricksException {
ExtensionMetadataDataset<T> index = (ExtensionMetadataDataset<T>) extensionDatasets.get(entity.getDatasetId());
if (index == null) {
throw new AlgebricksException("Metadata Extension Index: " + entity.getDatasetId() + " was not found");
}
- IMetadataEntityTupleTranslator<T> tupleTranslator = index.getTupleTranslator();
+ IMetadataEntityTupleTranslator<T> tupleTranslator = index.getTupleTranslator(true);
deleteEntity(txnId, entity, tupleTranslator, index);
}
@Override
public <T extends IExtensionMetadataEntity> List<T> getEntities(TxnId txnId, IExtensionMetadataSearchKey searchKey)
- throws AlgebricksException, RemoteException {
+ throws AlgebricksException {
ExtensionMetadataDataset<T> index =
(ExtensionMetadataDataset<T>) extensionDatasets.get(searchKey.getDatasetId());
if (index == null) {
throw new AlgebricksException("Metadata Extension Index: " + searchKey.getDatasetId() + " was not found");
}
- IMetadataEntityTupleTranslator<T> tupleTranslator = index.getTupleTranslator();
+ IMetadataEntityTupleTranslator<T> tupleTranslator = index.getTupleTranslator(false);
return getEntities(txnId, searchKey.getSearchKey(), tupleTranslator, index);
}
@Override
- public void addDataverse(TxnId txnId, Dataverse dataverse) throws AlgebricksException, RemoteException {
+ public void addDataverse(TxnId txnId, Dataverse dataverse) throws AlgebricksException {
try {
DataverseTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDataverseTupleTranslator(true);
ITupleReference tuple = tupleReaderWriter.getTupleFromMetadataEntity(dataverse);
@@ -338,7 +337,7 @@
}
@Override
- public void addDataset(TxnId txnId, Dataset dataset) throws AlgebricksException, RemoteException {
+ public void addDataset(TxnId txnId, Dataset dataset) throws AlgebricksException {
try {
// Insert into the 'dataset' dataset.
DatasetTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDatasetTupleTranslator(true);
@@ -379,7 +378,7 @@
}
@Override
- public void addNode(TxnId txnId, Node node) throws AlgebricksException, RemoteException {
+ public void addNode(TxnId txnId, Node node) throws AlgebricksException {
try {
NodeTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getNodeTupleTranslator(true);
ITupleReference tuple = tupleReaderWriter.getTupleFromMetadataEntity(node);
@@ -394,8 +393,7 @@
}
@Override
- public void modifyNodeGroup(TxnId txnId, NodeGroup nodeGroup, Operation modificationOp)
- throws AlgebricksException, RemoteException {
+ public void modifyNodeGroup(TxnId txnId, NodeGroup nodeGroup, Operation modificationOp) throws AlgebricksException {
try {
NodeGroupTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getNodeGroupTupleTranslator(true);
ITupleReference tuple = tupleReaderWriter.getTupleFromMetadataEntity(nodeGroup);
@@ -411,7 +409,7 @@
}
@Override
- public void addDatatype(TxnId txnId, Datatype datatype) throws AlgebricksException, RemoteException {
+ public void addDatatype(TxnId txnId, Datatype datatype) throws AlgebricksException {
try {
DatatypeTupleTranslator tupleReaderWriter =
tupleTranslatorProvider.getDataTypeTupleTranslator(txnId, this, true);
@@ -428,7 +426,7 @@
}
@Override
- public void addFunction(TxnId txnId, Function function) throws AlgebricksException, RemoteException {
+ public void addFunction(TxnId txnId, Function function) throws AlgebricksException {
try {
// Insert into the 'function' dataset.
FunctionTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getFunctionTupleTranslator(true);
@@ -514,7 +512,7 @@
}
@Override
- public void dropDataverse(TxnId txnId, String dataverseName) throws AlgebricksException, RemoteException {
+ public void dropDataverse(TxnId txnId, DataverseName dataverseName) throws AlgebricksException {
try {
confirmDataverseCanBeDeleted(txnId, dataverseName);
@@ -568,7 +566,7 @@
dropFeed(txnId, dataverseName, feed.getFeedName());
}
- List<FeedPolicyEntity> feedPolicies = getDataversePolicies(txnId, dataverseName);
+ List<FeedPolicyEntity> feedPolicies = getDataverseFeedPolicies(txnId, dataverseName);
if (feedPolicies != null && !feedPolicies.isEmpty()) {
// Drop all feed ingestion policies in this dataverse.
for (FeedPolicyEntity feedPolicy : feedPolicies) {
@@ -594,14 +592,12 @@
}
@Override
- public void dropDataset(TxnId txnId, String dataverseName, String datasetName)
- throws AlgebricksException, RemoteException {
+ public void dropDataset(TxnId txnId, DataverseName dataverseName, String datasetName) throws AlgebricksException {
dropDataset(txnId, dataverseName, datasetName, false);
}
- public void dropDataset(TxnId txnId, String dataverseName, String datasetName, boolean force)
- throws AlgebricksException, RemoteException {
-
+ public void dropDataset(TxnId txnId, DataverseName dataverseName, String datasetName, boolean force)
+ throws AlgebricksException {
if (!force) {
confirmDatasetCanBeDeleted(txnId, dataverseName, datasetName);
}
@@ -655,8 +651,8 @@
}
@Override
- public void dropIndex(TxnId txnId, String dataverseName, String datasetName, String indexName)
- throws AlgebricksException, RemoteException {
+ public void dropIndex(TxnId txnId, DataverseName dataverseName, String datasetName, String indexName)
+ throws AlgebricksException {
try {
ITupleReference searchKey = createTuple(dataverseName, datasetName, indexName);
// Searches the index for the tuple to be deleted. Acquires an S
@@ -675,8 +671,7 @@
}
@Override
- public boolean dropNodegroup(TxnId txnId, String nodeGroupName, boolean failSilently)
- throws AlgebricksException, RemoteException {
+ public boolean dropNodegroup(TxnId txnId, String nodeGroupName, boolean failSilently) throws AlgebricksException {
List<String> datasetNames = getDatasetNamesPartitionedOnThisNodeGroup(txnId, nodeGroupName);
if (!datasetNames.isEmpty()) {
if (failSilently) {
@@ -709,8 +704,7 @@
}
@Override
- public void dropDatatype(TxnId txnId, String dataverseName, String datatypeName)
- throws AlgebricksException, RemoteException {
+ public void dropDatatype(TxnId txnId, DataverseName dataverseName, String datatypeName) throws AlgebricksException {
confirmDatatypeIsUnused(txnId, dataverseName, datatypeName);
@@ -739,8 +733,8 @@
}
}
- private void forceDropDatatype(TxnId txnId, String dataverseName, String datatypeName)
- throws AlgebricksException, RemoteException {
+ private void forceDropDatatype(TxnId txnId, DataverseName dataverseName, String datatypeName)
+ throws AlgebricksException {
try {
ITupleReference searchKey = createTuple(dataverseName, datatypeName);
// Searches the index for the tuple to be deleted. Acquires an S
@@ -763,7 +757,7 @@
}
@Override
- public List<Dataverse> getDataverses(TxnId txnId) throws AlgebricksException, RemoteException {
+ public List<Dataverse> getDataverses(TxnId txnId) throws AlgebricksException {
try {
DataverseTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDataverseTupleTranslator(false);
IValueExtractor<Dataverse> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
@@ -776,7 +770,7 @@
}
@Override
- public Dataverse getDataverse(TxnId txnId, String dataverseName) throws AlgebricksException, RemoteException {
+ public Dataverse getDataverse(TxnId txnId, DataverseName dataverseName) throws AlgebricksException {
try {
ITupleReference searchKey = createTuple(dataverseName);
DataverseTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDataverseTupleTranslator(false);
@@ -793,8 +787,7 @@
}
@Override
- public List<Dataset> getDataverseDatasets(TxnId txnId, String dataverseName)
- throws AlgebricksException, RemoteException {
+ public List<Dataset> getDataverseDatasets(TxnId txnId, DataverseName dataverseName) throws AlgebricksException {
try {
ITupleReference searchKey = createTuple(dataverseName);
DatasetTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDatasetTupleTranslator(false);
@@ -808,7 +801,7 @@
}
@Override
- public List<Feed> getDataverseFeeds(TxnId txnId, String dataverseName) throws AlgebricksException, RemoteException {
+ public List<Feed> getDataverseFeeds(TxnId txnId, DataverseName dataverseName) throws AlgebricksException {
try {
ITupleReference searchKey = createTuple(dataverseName);
FeedTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getFeedTupleTranslator(false);
@@ -822,8 +815,7 @@
}
@Override
- public List<Library> getDataverseLibraries(TxnId txnId, String dataverseName)
- throws AlgebricksException, RemoteException {
+ public List<Library> getDataverseLibraries(TxnId txnId, DataverseName dataverseName) throws AlgebricksException {
try {
ITupleReference searchKey = createTuple(dataverseName);
LibraryTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getLibraryTupleTranslator(false);
@@ -836,8 +828,7 @@
}
}
- private List<Datatype> getDataverseDatatypes(TxnId txnId, String dataverseName)
- throws AlgebricksException, RemoteException {
+ private List<Datatype> getDataverseDatatypes(TxnId txnId, DataverseName dataverseName) throws AlgebricksException {
try {
ITupleReference searchKey = createTuple(dataverseName);
DatatypeTupleTranslator tupleReaderWriter =
@@ -852,8 +843,7 @@
}
@Override
- public Dataset getDataset(TxnId txnId, String dataverseName, String datasetName)
- throws AlgebricksException, RemoteException {
+ public Dataset getDataset(TxnId txnId, DataverseName dataverseName, String datasetName) throws AlgebricksException {
try {
ITupleReference searchKey = createTuple(dataverseName, datasetName);
DatasetTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDatasetTupleTranslator(false);
@@ -869,7 +859,7 @@
}
}
- public List<Dataset> getAllDatasets(TxnId txnId) throws AlgebricksException, RemoteException {
+ public List<Dataset> getAllDatasets(TxnId txnId) throws AlgebricksException {
try {
DatasetTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDatasetTupleTranslator(false);
IValueExtractor<Dataset> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
@@ -881,7 +871,7 @@
}
}
- public List<Function> getAllFunctions(TxnId txnId) throws AlgebricksException, RemoteException {
+ public List<Function> getAllFunctions(TxnId txnId) throws AlgebricksException {
try {
FunctionTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getFunctionTupleTranslator(false);
IValueExtractor<Function> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
@@ -893,7 +883,7 @@
}
}
- public List<Datatype> getAllDatatypes(TxnId txnId) throws AlgebricksException, RemoteException {
+ public List<Datatype> getAllDatatypes(TxnId txnId) throws AlgebricksException {
try {
DatatypeTupleTranslator tupleReaderWriter =
tupleTranslatorProvider.getDataTypeTupleTranslator(txnId, this, false);
@@ -906,8 +896,7 @@
}
}
- private void confirmDataverseCanBeDeleted(TxnId txnId, String dataverseName)
- throws AlgebricksException, RemoteException {
+ private void confirmDataverseCanBeDeleted(TxnId txnId, DataverseName dataverseName) throws AlgebricksException {
// If a dataset from a DIFFERENT dataverse
// uses a type from this dataverse
// throw an error
@@ -931,33 +920,32 @@
if (function.getDataverseName().equals(dataverseName)) {
continue;
}
- for (List<String> datasetDependency : function.getDependencies().get(0)) {
- if (datasetDependency.get(0).equals(dataverseName)) {
+ for (Triple<DataverseName, String, String> datasetDependency : function.getDependencies().get(0)) {
+ if (datasetDependency.first.equals(dataverseName)) {
throw new AlgebricksException("Cannot drop dataverse. Function " + function.getDataverseName() + "."
+ function.getName() + "@" + function.getArity() + " depends on dataset "
- + datasetDependency.get(0) + "." + datasetDependency.get(1));
+ + datasetDependency.first + "." + datasetDependency.second);
}
}
- for (List<String> functionDependency : function.getDependencies().get(1)) {
- if (functionDependency.get(0).equals(dataverseName)) {
+ for (Triple<DataverseName, String, String> functionDependency : function.getDependencies().get(1)) {
+ if (functionDependency.first.equals(dataverseName)) {
throw new AlgebricksException(
"Cannot drop dataverse. Function " + function.getDataverseName() + "." + function.getName()
- + "@" + function.getArity() + " depends on function " + functionDependency.get(0)
- + "." + functionDependency.get(1) + "@" + functionDependency.get(2));
+ + "@" + function.getArity() + " depends on function " + functionDependency.first
+ + "." + functionDependency.second + "@" + functionDependency.third);
}
}
}
}
- private void confirmFunctionCanBeDeleted(TxnId txnId, FunctionSignature signature)
- throws AlgebricksException, RemoteException {
+ private void confirmFunctionCanBeDeleted(TxnId txnId, FunctionSignature signature) throws AlgebricksException {
// If any other function uses this function, throw an error
List<Function> functions = getAllFunctions(txnId);
for (Function function : functions) {
- for (List<String> functionalDependency : function.getDependencies().get(1)) {
- if (functionalDependency.get(0).equals(signature.getNamespace())
- && functionalDependency.get(1).equals(signature.getName())
- && functionalDependency.get(2).equals(Integer.toString(signature.getArity()))) {
+ for (Triple<DataverseName, String, String> functionalDependency : function.getDependencies().get(1)) {
+ if (functionalDependency.first.equals(signature.getDataverseName())
+ && functionalDependency.second.equals(signature.getName())
+ && functionalDependency.third.equals(Integer.toString(signature.getArity()))) {
throw new AlgebricksException("Cannot drop function " + signature + " being used by function "
+ function.getDataverseName() + "." + function.getName() + "@" + function.getArity());
}
@@ -965,14 +953,15 @@
}
}
- private void confirmDatasetCanBeDeleted(TxnId txnId, String dataverseName, String datasetName)
- throws AlgebricksException, RemoteException {
+ private void confirmDatasetCanBeDeleted(TxnId txnId, DataverseName dataverseName, String datasetName)
+ throws AlgebricksException {
// If any function uses this type, throw an error
List<Function> functions = getAllFunctions(txnId);
for (Function function : functions) {
- for (List<String> datasetDependency : function.getDependencies().get(0)) {
- if (datasetDependency.get(0).equals(dataverseName) && datasetDependency.get(1).equals(datasetName)) {
- throw new AlgebricksException("Cannot drop dataset " + dataverseName + "." + datasetName
+ for (Triple<DataverseName, String, String> datasetDependency : function.getDependencies().get(0)) {
+ if (datasetDependency.first.equals(dataverseName) && datasetDependency.second.equals(datasetName)) {
+ throw new AlgebricksException("Cannot drop dataset "
+ + DatasetUtil.getFullyQualifiedDisplayName(dataverseName, datasetName)
+ " being used by function " + function.getDataverseName() + "." + function.getName() + "@"
+ function.getArity());
}
@@ -980,14 +969,14 @@
}
}
- private void confirmDatatypeIsUnused(TxnId txnId, String dataverseName, String datatypeName)
- throws AlgebricksException, RemoteException {
+ private void confirmDatatypeIsUnused(TxnId txnId, DataverseName dataverseName, String datatypeName)
+ throws AlgebricksException {
confirmDatatypeIsUnusedByDatatypes(txnId, dataverseName, datatypeName);
confirmDatatypeIsUnusedByDatasets(txnId, dataverseName, datatypeName);
}
- private void confirmDatatypeIsUnusedByDatasets(TxnId txnId, String dataverseName, String datatypeName)
- throws AlgebricksException, RemoteException {
+ private void confirmDatatypeIsUnusedByDatasets(TxnId txnId, DataverseName dataverseName, String datatypeName)
+ throws AlgebricksException {
// If any dataset uses this type, throw an error
List<Dataset> datasets = getAllDatasets(txnId);
for (Dataset set : datasets) {
@@ -998,8 +987,8 @@
}
}
- private void confirmDatatypeIsUnusedByDatatypes(TxnId txnId, String dataverseName, String datatypeName)
- throws AlgebricksException, RemoteException {
+ private void confirmDatatypeIsUnusedByDatatypes(TxnId txnId, DataverseName dataverseName, String datatypeName)
+ throws AlgebricksException {
// If any datatype uses this type, throw an error
// TODO: Currently this loads all types into memory. This will need to be fixed
// for large numbers of types
@@ -1021,8 +1010,8 @@
}
}
- private List<String> getNestedComplexDatatypeNamesForThisDatatype(TxnId txnId, String dataverseName,
- String datatypeName) throws AlgebricksException, RemoteException {
+ private List<String> getNestedComplexDatatypeNamesForThisDatatype(TxnId txnId, DataverseName dataverseName,
+ String datatypeName) throws AlgebricksException {
// Return all field types that aren't builtin types
Datatype parentType = getDatatype(txnId, dataverseName, datatypeName);
@@ -1047,7 +1036,7 @@
}
private List<String> getDatasetNamesPartitionedOnThisNodeGroup(TxnId txnId, String nodegroup)
- throws AlgebricksException, RemoteException {
+ throws AlgebricksException {
// this needs to scan the datasets and return the datasets that use this
// nodegroup
List<String> nodeGroupDatasets = new ArrayList<>();
@@ -1062,8 +1051,8 @@
}
@Override
- public Index getIndex(TxnId txnId, String dataverseName, String datasetName, String indexName)
- throws AlgebricksException, RemoteException {
+ public Index getIndex(TxnId txnId, DataverseName dataverseName, String datasetName, String indexName)
+ throws AlgebricksException {
try {
ITupleReference searchKey = createTuple(dataverseName, datasetName, indexName);
IndexTupleTranslator tupleReaderWriter =
@@ -1081,8 +1070,8 @@
}
@Override
- public List<Index> getDatasetIndexes(TxnId txnId, String dataverseName, String datasetName)
- throws AlgebricksException, RemoteException {
+ public List<Index> getDatasetIndexes(TxnId txnId, DataverseName dataverseName, String datasetName)
+ throws AlgebricksException {
try {
ITupleReference searchKey = createTuple(dataverseName, datasetName);
IndexTupleTranslator tupleReaderWriter =
@@ -1097,8 +1086,8 @@
}
@Override
- public Datatype getDatatype(TxnId txnId, String dataverseName, String datatypeName)
- throws AlgebricksException, RemoteException {
+ public Datatype getDatatype(TxnId txnId, DataverseName dataverseName, String datatypeName)
+ throws AlgebricksException {
try {
ITupleReference searchKey = createTuple(dataverseName, datatypeName);
DatatypeTupleTranslator tupleReaderWriter =
@@ -1116,7 +1105,7 @@
}
@Override
- public NodeGroup getNodeGroup(TxnId txnId, String nodeGroupName) throws AlgebricksException, RemoteException {
+ public NodeGroup getNodeGroup(TxnId txnId, String nodeGroupName) throws AlgebricksException {
try {
ITupleReference searchKey = createTuple(nodeGroupName);
NodeGroupTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getNodeGroupTupleTranslator(false);
@@ -1133,10 +1122,9 @@
}
@Override
- public Function getFunction(TxnId txnId, FunctionSignature functionSignature)
- throws AlgebricksException, RemoteException {
+ public Function getFunction(TxnId txnId, FunctionSignature functionSignature) throws AlgebricksException {
try {
- ITupleReference searchKey = createTuple(functionSignature.getNamespace(), functionSignature.getName(),
+ ITupleReference searchKey = createTuple(functionSignature.getDataverseName(), functionSignature.getName(),
Integer.toString(functionSignature.getArity()));
FunctionTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getFunctionTupleTranslator(false);
List<Function> results = new ArrayList<>();
@@ -1152,7 +1140,7 @@
}
@Override
- public List<Function> getFunctions(TxnId txnId, String dataverseName) throws AlgebricksException, RemoteException {
+ public List<Function> getDataverseFunctions(TxnId txnId, DataverseName dataverseName) throws AlgebricksException {
try {
ITupleReference searchKey = createTuple(dataverseName);
FunctionTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getFunctionTupleTranslator(false);
@@ -1166,13 +1154,12 @@
}
@Override
- public void dropFunction(TxnId txnId, FunctionSignature functionSignature)
- throws AlgebricksException, RemoteException {
+ public void dropFunction(TxnId txnId, FunctionSignature functionSignature) throws AlgebricksException {
dropFunction(txnId, functionSignature, false);
}
private void dropFunction(TxnId txnId, FunctionSignature functionSignature, boolean force)
- throws AlgebricksException, RemoteException {
+ throws AlgebricksException {
if (!force) {
confirmFunctionCanBeDeleted(txnId, functionSignature);
}
@@ -1183,7 +1170,7 @@
}
try {
// Delete entry from the 'function' dataset.
- ITupleReference searchKey = createTuple(functionSignature.getNamespace(), functionSignature.getName(),
+ ITupleReference searchKey = createTuple(functionSignature.getDataverseName(), functionSignature.getName(),
Integer.toString(functionSignature.getArity()));
// Searches the index for the tuple to be deleted. Acquires an S
// lock on the 'function' dataset.
@@ -1202,7 +1189,7 @@
}
private ITupleReference getTupleToBeDeleted(TxnId txnId, IMetadataIndex metadataIndex, ITupleReference searchKey)
- throws AlgebricksException, HyracksDataException, RemoteException {
+ throws AlgebricksException, HyracksDataException {
IValueExtractor<ITupleReference> valueExtractor = new TupleCopyValueExtractor(metadataIndex.getTypeTraits());
List<ITupleReference> results = new ArrayList<>();
searchIndex(txnId, metadataIndex, searchKey, valueExtractor, results);
@@ -1287,8 +1274,7 @@
}
private <T> void searchIndex(TxnId txnId, IMetadataIndex index, ITupleReference searchKey,
- IValueExtractor<T> valueExtractor, List<T> results)
- throws AlgebricksException, HyracksDataException, RemoteException {
+ IValueExtractor<T> valueExtractor, List<T> results) throws AlgebricksException, HyracksDataException {
IBinaryComparatorFactory[] comparatorFactories = index.getKeyBinaryComparatorFactory();
if (index.getFile() == null) {
throw new AlgebricksException("No file for Index " + index.getDataverseName() + "." + index.getIndexName());
@@ -1316,8 +1302,7 @@
}
private <T> void search(IIndexAccessor indexAccessor, RangePredicate rangePred, List<T> results,
- IValueExtractor<T> valueExtractor, TxnId txnId)
- throws HyracksDataException, RemoteException, AlgebricksException {
+ IValueExtractor<T> valueExtractor, TxnId txnId) throws HyracksDataException, AlgebricksException {
IIndexCursor rangeCursor = indexAccessor.createSearchCursor(false);
try {
indexAccessor.search(rangeCursor, rangePred);
@@ -1338,7 +1323,7 @@
}
@Override
- public void initializeDatasetIdFactory(TxnId txnId) throws AlgebricksException, RemoteException {
+ public void initializeDatasetIdFactory(TxnId txnId) throws AlgebricksException {
int mostRecentDatasetId;
try {
String resourceName = MetadataPrimaryIndexes.DATASET_DATASET.getFile().getRelativePath();
@@ -1356,7 +1341,7 @@
}
private int getMostRecentDatasetIdFromStoredDatasetIndex(IIndex indexInstance, TxnId txnId)
- throws HyracksDataException, RemoteException, AlgebricksException {
+ throws HyracksDataException, AlgebricksException {
DatasetTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDatasetTupleTranslator(false);
IValueExtractor<Dataset> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
RangePredicate rangePred = new RangePredicate(null, null, true, true, null, null);
@@ -1389,43 +1374,41 @@
return mostRecentDatasetId;
}
- public static ITupleReference createTuple(String... fields) {
- ISerializerDeserializer<AString> stringSerde =
- SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING);
- AMutableString aString = new AMutableString("");
- ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fields.length);
- for (String s : fields) {
- aString.setValue(s);
- try {
- stringSerde.serialize(aString, tupleBuilder.getDataOutput());
- } catch (HyracksDataException e) {
- // This should never happen
- throw new IllegalStateException("Failed to create search tuple!!!! This should never happen", e);
- }
+ public static ITupleReference createTuple(DataverseName dataverseName, String... rest) {
+ return createTuple(dataverseName.getCanonicalForm(), rest);
+ }
+
+ public static ITupleReference createTuple(String first, String... rest) {
+ try {
+ ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(1 + rest.length);
+ ISerializerDeserializer<AString> stringSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING);
+ AMutableString aString = new AMutableString(first);
+ stringSerde.serialize(aString, tupleBuilder.getDataOutput());
tupleBuilder.addFieldEndOffset();
+ for (String s : rest) {
+ aString.setValue(s);
+ stringSerde.serialize(aString, tupleBuilder.getDataOutput());
+ tupleBuilder.addFieldEndOffset();
+ }
+ ArrayTupleReference tuple = new ArrayTupleReference();
+ tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
+ return tuple;
+ } catch (HyracksDataException e) {
+ // This should never happen
+ throw new IllegalStateException("Failed to create search tuple", e);
}
+ }
+
+ public static ITupleReference createTuple() {
+ ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(0);
ArrayTupleReference tuple = new ArrayTupleReference();
tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
return tuple;
}
@Override
- public List<Function> getDataverseFunctions(TxnId txnId, String dataverseName)
- throws AlgebricksException, RemoteException {
- try {
- ITupleReference searchKey = createTuple(dataverseName);
- FunctionTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getFunctionTupleTranslator(false);
- IValueExtractor<Function> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
- List<Function> results = new ArrayList<>();
- searchIndex(txnId, MetadataPrimaryIndexes.FUNCTION_DATASET, searchKey, valueExtractor, results);
- return results;
- } catch (HyracksDataException e) {
- throw new AlgebricksException(e);
- }
- }
-
- @Override
- public void addAdapter(TxnId txnId, DatasourceAdapter adapter) throws AlgebricksException, RemoteException {
+ public void addAdapter(TxnId txnId, DatasourceAdapter adapter) throws AlgebricksException {
try {
// Insert into the 'Adapter' dataset.
DatasourceAdapterTupleTranslator tupleReaderWriter =
@@ -1435,7 +1418,8 @@
} catch (HyracksDataException e) {
if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) {
throw new AlgebricksException("A adapter with this name " + adapter.getAdapterIdentifier().getName()
- + " already exists in dataverse '" + adapter.getAdapterIdentifier().getNamespace() + "'.", e);
+ + " already exists in dataverse '" + adapter.getAdapterIdentifier().getDataverseName() + "'.",
+ e);
} else {
throw new AlgebricksException(e);
}
@@ -1443,8 +1427,7 @@
}
@Override
- public void dropAdapter(TxnId txnId, String dataverseName, String adapterName)
- throws AlgebricksException, RemoteException {
+ public void dropAdapter(TxnId txnId, DataverseName dataverseName, String adapterName) throws AlgebricksException {
DatasourceAdapter adapter = getAdapter(txnId, dataverseName, adapterName);
if (adapter == null) {
throw new AlgebricksException("Cannot drop adapter '" + adapter + "' because it doesn't exist.");
@@ -1468,8 +1451,8 @@
}
@Override
- public DatasourceAdapter getAdapter(TxnId txnId, String dataverseName, String adapterName)
- throws AlgebricksException, RemoteException {
+ public DatasourceAdapter getAdapter(TxnId txnId, DataverseName dataverseName, String adapterName)
+ throws AlgebricksException {
try {
ITupleReference searchKey = createTuple(dataverseName, adapterName);
DatasourceAdapterTupleTranslator tupleReaderWriter =
@@ -1487,8 +1470,7 @@
}
@Override
- public void addCompactionPolicy(TxnId txnId, CompactionPolicy compactionPolicy)
- throws AlgebricksException, RemoteException {
+ public void addCompactionPolicy(TxnId txnId, CompactionPolicy compactionPolicy) throws AlgebricksException {
try {
// Insert into the 'CompactionPolicy' dataset.
CompactionPolicyTupleTranslator tupleReaderWriter =
@@ -1506,10 +1488,10 @@
}
@Override
- public CompactionPolicy getCompactionPolicy(TxnId txnId, String dataverse, String policyName)
- throws AlgebricksException, RemoteException {
+ public CompactionPolicy getCompactionPolicy(TxnId txnId, DataverseName dataverseName, String policyName)
+ throws AlgebricksException {
try {
- ITupleReference searchKey = createTuple(dataverse, policyName);
+ ITupleReference searchKey = createTuple(dataverseName, policyName);
CompactionPolicyTupleTranslator tupleReaderWriter =
tupleTranslatorProvider.getCompactionPolicyTupleTranslator(false);
List<CompactionPolicy> results = new ArrayList<>();
@@ -1525,8 +1507,8 @@
}
@Override
- public List<DatasourceAdapter> getDataverseAdapters(TxnId txnId, String dataverseName)
- throws AlgebricksException, RemoteException {
+ public List<DatasourceAdapter> getDataverseAdapters(TxnId txnId, DataverseName dataverseName)
+ throws AlgebricksException {
try {
ITupleReference searchKey = createTuple(dataverseName);
DatasourceAdapterTupleTranslator tupleReaderWriter =
@@ -1541,7 +1523,7 @@
}
@Override
- public void addLibrary(TxnId txnId, Library library) throws AlgebricksException, RemoteException {
+ public void addLibrary(TxnId txnId, Library library) throws AlgebricksException {
try {
// Insert into the 'Library' dataset.
LibraryTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getLibraryTupleTranslator(true);
@@ -1559,8 +1541,7 @@
}
@Override
- public void dropLibrary(TxnId txnId, String dataverseName, String libraryName)
- throws AlgebricksException, RemoteException {
+ public void dropLibrary(TxnId txnId, DataverseName dataverseName, String libraryName) throws AlgebricksException {
Library library = getLibrary(txnId, dataverseName, libraryName);
if (library == null) {
throw new AlgebricksException("Cannot drop library '" + library + "' because it doesn't exist.");
@@ -1584,8 +1565,7 @@
}
@Override
- public Library getLibrary(TxnId txnId, String dataverseName, String libraryName)
- throws AlgebricksException, RemoteException {
+ public Library getLibrary(TxnId txnId, DataverseName dataverseName, String libraryName) throws AlgebricksException {
try {
ITupleReference searchKey = createTuple(dataverseName, libraryName);
LibraryTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getLibraryTupleTranslator(false);
@@ -1607,7 +1587,7 @@
}
@Override
- public void addFeedPolicy(TxnId txnId, FeedPolicyEntity feedPolicy) throws AlgebricksException, RemoteException {
+ public void addFeedPolicy(TxnId txnId, FeedPolicyEntity feedPolicy) throws AlgebricksException {
try {
// Insert into the 'FeedPolicy' dataset.
FeedPolicyTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getFeedPolicyTupleTranslator(true);
@@ -1624,10 +1604,10 @@
}
@Override
- public FeedPolicyEntity getFeedPolicy(TxnId txnId, String dataverse, String policyName)
- throws AlgebricksException, RemoteException {
+ public FeedPolicyEntity getFeedPolicy(TxnId txnId, DataverseName dataverseName, String policyName)
+ throws AlgebricksException {
try {
- ITupleReference searchKey = createTuple(dataverse, policyName);
+ ITupleReference searchKey = createTuple(dataverseName, policyName);
FeedPolicyTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getFeedPolicyTupleTranslator(false);
List<FeedPolicyEntity> results = new ArrayList<>();
IValueExtractor<FeedPolicyEntity> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
@@ -1644,7 +1624,8 @@
@Override
public void addFeedConnection(TxnId txnId, FeedConnection feedConnection) throws AlgebricksException {
try {
- FeedConnectionTupleTranslator tupleReaderWriter = new FeedConnectionTupleTranslator(true);
+ FeedConnectionTupleTranslator tupleReaderWriter =
+ tupleTranslatorProvider.getFeedConnectionTupleTranslator(true);
ITupleReference feedConnTuple = tupleReaderWriter.getTupleFromMetadataEntity(feedConnection);
insertTupleIntoIndex(txnId, MetadataPrimaryIndexes.FEED_CONNECTION_DATASET, feedConnTuple);
} catch (HyracksDataException e) {
@@ -1653,11 +1634,12 @@
}
@Override
- public List<FeedConnection> getFeedConnections(TxnId txnId, String dataverseName, String feedName)
- throws AlgebricksException, RemoteException {
+ public List<FeedConnection> getFeedConnections(TxnId txnId, DataverseName dataverseName, String feedName)
+ throws AlgebricksException {
try {
ITupleReference searchKey = createTuple(dataverseName, feedName);
- FeedConnectionTupleTranslator tupleReaderWriter = new FeedConnectionTupleTranslator(false);
+ FeedConnectionTupleTranslator tupleReaderWriter =
+ tupleTranslatorProvider.getFeedConnectionTupleTranslator(false);
List<FeedConnection> results = new ArrayList<>();
IValueExtractor<FeedConnection> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
searchIndex(txnId, MetadataPrimaryIndexes.FEED_CONNECTION_DATASET, searchKey, valueExtractor, results);
@@ -1668,11 +1650,12 @@
}
@Override
- public FeedConnection getFeedConnection(TxnId txnId, String dataverseName, String feedName, String datasetName)
- throws AlgebricksException, RemoteException {
+ public FeedConnection getFeedConnection(TxnId txnId, DataverseName dataverseName, String feedName,
+ String datasetName) throws AlgebricksException {
try {
ITupleReference searchKey = createTuple(dataverseName, feedName, datasetName);
- FeedConnectionTupleTranslator tupleReaderWriter = new FeedConnectionTupleTranslator(false);
+ FeedConnectionTupleTranslator tupleReaderWriter =
+ tupleTranslatorProvider.getFeedConnectionTupleTranslator(false);
List<FeedConnection> results = new ArrayList<>();
IValueExtractor<FeedConnection> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
searchIndex(txnId, MetadataPrimaryIndexes.FEED_CONNECTION_DATASET, searchKey, valueExtractor, results);
@@ -1686,8 +1669,8 @@
}
@Override
- public void dropFeedConnection(TxnId txnId, String dataverseName, String feedName, String datasetName)
- throws AlgebricksException, RemoteException {
+ public void dropFeedConnection(TxnId txnId, DataverseName dataverseName, String feedName, String datasetName)
+ throws AlgebricksException {
try {
ITupleReference searchKey = createTuple(dataverseName, feedName, datasetName);
ITupleReference tuple =
@@ -1699,7 +1682,7 @@
}
@Override
- public void addFeed(TxnId txnId, Feed feed) throws AlgebricksException, RemoteException {
+ public void addFeed(TxnId txnId, Feed feed) throws AlgebricksException {
try {
// Insert into the 'Feed' dataset.
FeedTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getFeedTupleTranslator(true);
@@ -1716,9 +1699,9 @@
}
@Override
- public Feed getFeed(TxnId txnId, String dataverse, String feedName) throws AlgebricksException, RemoteException {
+ public Feed getFeed(TxnId txnId, DataverseName dataverseName, String feedName) throws AlgebricksException {
try {
- ITupleReference searchKey = createTuple(dataverse, feedName);
+ ITupleReference searchKey = createTuple(dataverseName, feedName);
FeedTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getFeedTupleTranslator(false);
List<Feed> results = new ArrayList<>();
IValueExtractor<Feed> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
@@ -1733,9 +1716,9 @@
}
@Override
- public List<Feed> getFeeds(TxnId txnId, String dataverse) throws AlgebricksException, RemoteException {
+ public List<Feed> getFeeds(TxnId txnId, DataverseName dataverseName) throws AlgebricksException {
try {
- ITupleReference searchKey = createTuple(dataverse);
+ ITupleReference searchKey = createTuple(dataverseName);
FeedTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getFeedTupleTranslator(false);
List<Feed> results = new ArrayList<>();
IValueExtractor<Feed> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
@@ -1747,9 +1730,9 @@
}
@Override
- public void dropFeed(TxnId txnId, String dataverse, String feedName) throws AlgebricksException, RemoteException {
+ public void dropFeed(TxnId txnId, DataverseName dataverseName, String feedName) throws AlgebricksException {
try {
- ITupleReference searchKey = createTuple(dataverse, feedName);
+ ITupleReference searchKey = createTuple(dataverseName, feedName);
// Searches the index for the tuple to be deleted. Acquires an S
// lock on the 'nodegroup' dataset.
ITupleReference tuple = getTupleToBeDeleted(txnId, MetadataPrimaryIndexes.FEED_DATASET, searchKey);
@@ -1765,8 +1748,7 @@
}
@Override
- public void dropFeedPolicy(TxnId txnId, String dataverseName, String policyName)
- throws AlgebricksException, RemoteException {
+ public void dropFeedPolicy(TxnId txnId, DataverseName dataverseName, String policyName) throws AlgebricksException {
try {
ITupleReference searchKey = createTuple(dataverseName, policyName);
ITupleReference tuple = getTupleToBeDeleted(txnId, MetadataPrimaryIndexes.FEED_POLICY_DATASET, searchKey);
@@ -1782,10 +1764,10 @@
}
@Override
- public List<FeedPolicyEntity> getDataversePolicies(TxnId txnId, String dataverse)
- throws AlgebricksException, RemoteException {
+ public List<FeedPolicyEntity> getDataverseFeedPolicies(TxnId txnId, DataverseName dataverseName)
+ throws AlgebricksException {
try {
- ITupleReference searchKey = createTuple(dataverse);
+ ITupleReference searchKey = createTuple(dataverseName);
FeedPolicyTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getFeedPolicyTupleTranslator(false);
IValueExtractor<FeedPolicyEntity> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
List<FeedPolicyEntity> results = new ArrayList<>();
@@ -1797,7 +1779,7 @@
}
@Override
- public void addExternalFile(TxnId txnId, ExternalFile externalFile) throws AlgebricksException, RemoteException {
+ public void addExternalFile(TxnId txnId, ExternalFile externalFile) throws AlgebricksException {
try {
// Insert into the 'externalFiles' dataset.
ExternalFileTupleTranslator tupleReaderWriter =
@@ -1816,8 +1798,7 @@
}
@Override
- public List<ExternalFile> getExternalFiles(TxnId txnId, Dataset dataset)
- throws AlgebricksException, RemoteException {
+ public List<ExternalFile> getExternalFiles(TxnId txnId, Dataset dataset) throws AlgebricksException {
try {
ITupleReference searchKey = createTuple(dataset.getDataverseName(), dataset.getDatasetName());
ExternalFileTupleTranslator tupleReaderWriter =
@@ -1832,8 +1813,8 @@
}
@Override
- public void dropExternalFile(TxnId txnId, String dataverseName, String datasetName, int fileNumber)
- throws AlgebricksException, RemoteException {
+ public void dropExternalFile(TxnId txnId, DataverseName dataverseName, String datasetName, int fileNumber)
+ throws AlgebricksException {
try {
// Delete entry from the 'ExternalFile' dataset.
ITupleReference searchKey = createExternalFileSearchTuple(dataverseName, datasetName, fileNumber);
@@ -1853,7 +1834,7 @@
}
@Override
- public void dropExternalFiles(TxnId txnId, Dataset dataset) throws AlgebricksException, RemoteException {
+ public void dropExternalFiles(TxnId txnId, Dataset dataset) throws AlgebricksException {
List<ExternalFile> files = getExternalFiles(txnId, dataset);
// loop through files and delete them
for (int i = 0; i < files.size(); i++) {
@@ -1864,8 +1845,8 @@
// This method is used to create a search tuple for external data file since the
// search tuple has an int value
- public ITupleReference createExternalFileSearchTuple(String dataverseName, String datasetName, int fileNumber)
- throws HyracksDataException {
+ public ITupleReference createExternalFileSearchTuple(DataverseName dataverseName, String datasetName,
+ int fileNumber) throws HyracksDataException {
ISerializerDeserializer<AString> stringSerde =
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING);
ISerializerDeserializer<AInt32> intSerde =
@@ -1875,7 +1856,7 @@
ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(3);
// dataverse field
- aString.setValue(dataverseName);
+ aString.setValue(dataverseName.getCanonicalForm());
stringSerde.serialize(aString, tupleBuilder.getDataOutput());
tupleBuilder.addFieldEndOffset();
@@ -1894,8 +1875,8 @@
}
@Override
- public ExternalFile getExternalFile(TxnId txnId, String dataverseName, String datasetName, Integer fileNumber)
- throws AlgebricksException, RemoteException {
+ public ExternalFile getExternalFile(TxnId txnId, DataverseName dataverseName, String datasetName,
+ Integer fileNumber) throws AlgebricksException {
try {
ITupleReference searchKey = createExternalFileSearchTuple(dataverseName, datasetName, fileNumber);
ExternalFileTupleTranslator tupleReaderWriter =
@@ -1913,12 +1894,11 @@
}
@Override
- public void updateDataset(TxnId txnId, Dataset dataset) throws AlgebricksException, RemoteException {
+ public void updateDataset(TxnId txnId, Dataset dataset) throws AlgebricksException {
try {
// This method will delete previous entry of the dataset and insert the new one
// Delete entry from the 'datasets' dataset.
- ITupleReference searchKey;
- searchKey = createTuple(dataset.getDataverseName(), dataset.getDatasetName());
+ ITupleReference searchKey = createTuple(dataset.getDataverseName(), dataset.getDatasetName());
// Searches the index for the tuple to be deleted. Acquires an S
// lock on the 'dataset' dataset.
ITupleReference datasetTuple =
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataTransactionContext.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataTransactionContext.java
index 367f568..810e4ca 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataTransactionContext.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataTransactionContext.java
@@ -20,8 +20,10 @@
package org.apache.asterix.metadata;
import java.util.ArrayList;
+import java.util.List;
import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.transactions.TxnId;
import org.apache.asterix.external.dataset.adapter.AdapterIdentifier;
import org.apache.asterix.metadata.entities.CompactionPolicy;
@@ -116,24 +118,23 @@
public void addCompactionPolicy(CompactionPolicy compactionPolicy) {
droppedCache.dropCompactionPolicy(compactionPolicy);
logAndApply(new MetadataLogicalOperation(compactionPolicy, true));
-
}
- public void dropDataset(String dataverseName, String datasetName) {
+ public void dropDataset(DataverseName dataverseName, String datasetName) {
Dataset dataset = new Dataset(dataverseName, datasetName, null, null, null, null, null, null, null, null, -1,
MetadataUtil.PENDING_NO_OP);
droppedCache.addDatasetIfNotExists(dataset);
logAndApply(new MetadataLogicalOperation(dataset, false));
}
- public void dropIndex(String dataverseName, String datasetName, String indexName) {
+ public void dropIndex(DataverseName dataverseName, String datasetName, String indexName) {
Index index = new Index(dataverseName, datasetName, indexName, null, null, null, null, false, false, false,
MetadataUtil.PENDING_NO_OP);
droppedCache.addIndexIfNotExists(index);
logAndApply(new MetadataLogicalOperation(index, false));
}
- public void dropDataverse(String dataverseName) {
+ public void dropDataverse(DataverseName dataverseName) {
Dataverse dataverse = new Dataverse(dataverseName, null, MetadataUtil.PENDING_NO_OP);
droppedCache.addDataverseIfNotExists(dataverse);
logAndApply(new MetadataLogicalOperation(dataverse, false));
@@ -144,7 +145,7 @@
logAndApply(new MetadataLogicalOperation(library, true));
}
- public void dropDataDatatype(String dataverseName, String datatypeName) {
+ public void dropDataDatatype(DataverseName dataverseName, String datatypeName) {
Datatype datatype = new Datatype(dataverseName, datatypeName, null, false);
droppedCache.addDatatypeIfNotExists(datatype);
logAndApply(new MetadataLogicalOperation(datatype, false));
@@ -162,14 +163,14 @@
logAndApply(new MetadataLogicalOperation(function, false));
}
- public void dropAdapter(String dataverseName, String adapterName) {
+ public void dropAdapter(DataverseName dataverseName, String adapterName) {
AdapterIdentifier adapterIdentifier = new AdapterIdentifier(dataverseName, adapterName);
DatasourceAdapter adapter = new DatasourceAdapter(adapterIdentifier, null, null);
droppedCache.addAdapterIfNotExists(adapter);
logAndApply(new MetadataLogicalOperation(adapter, false));
}
- public void dropLibrary(String dataverseName, String libraryName) {
+ public void dropLibrary(DataverseName dataverseName, String libraryName) {
Library library = new Library(dataverseName, libraryName);
droppedCache.addLibraryIfNotExists(library);
logAndApply(new MetadataLogicalOperation(library, false));
@@ -180,18 +181,18 @@
doOperation(op);
}
- public boolean dataverseIsDropped(String dataverseName) {
+ public boolean dataverseIsDropped(DataverseName dataverseName) {
return droppedCache.getDataverse(dataverseName) != null;
}
- public boolean datasetIsDropped(String dataverseName, String datasetName) {
+ public boolean datasetIsDropped(DataverseName dataverseName, String datasetName) {
if (droppedCache.getDataverse(dataverseName) != null) {
return true;
}
return droppedCache.getDataset(dataverseName, datasetName) != null;
}
- public boolean indexIsDropped(String dataverseName, String datasetName, String indexName) {
+ public boolean indexIsDropped(DataverseName dataverseName, String datasetName, String indexName) {
if (droppedCache.getDataverse(dataverseName) != null) {
return true;
}
@@ -201,7 +202,7 @@
return droppedCache.getIndex(dataverseName, datasetName, indexName) != null;
}
- public boolean datatypeIsDropped(String dataverseName, String datatypeName) {
+ public boolean datatypeIsDropped(DataverseName dataverseName, String datatypeName) {
if (droppedCache.getDataverse(dataverseName) != null) {
return true;
}
@@ -216,7 +217,7 @@
return droppedCache.getFunction(functionSignature) != null;
}
- public ArrayList<MetadataLogicalOperation> getOpLog() {
+ public List<MetadataLogicalOperation> getOpLog() {
return opLog;
}
@@ -241,7 +242,7 @@
logAndApply(new MetadataLogicalOperation(feedConnection, true));
}
- public void dropFeedConnection(String dataverseName, String feedName, String datasetName) {
+ public void dropFeedConnection(DataverseName dataverseName, String feedName, String datasetName) {
FeedConnection feedConnection =
new FeedConnection(dataverseName, feedName, datasetName, null, null, null, null);
droppedCache.addFeedConnectionIfNotExists(feedConnection);
@@ -254,5 +255,4 @@
droppedCache.clear();
opLog.clear();
}
-
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/ExtensionMetadataDataset.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/ExtensionMetadataDataset.java
index ebb9cac..aafcb5d 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/ExtensionMetadataDataset.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/ExtensionMetadataDataset.java
@@ -45,7 +45,7 @@
return indexId;
}
- public IMetadataEntityTupleTranslator<T> getTupleTranslator() {
- return tupleTranslatorFactory.createTupleTranslator();
+ public IMetadataEntityTupleTranslator<T> getTupleTranslator(boolean getTuple) {
+ return tupleTranslatorFactory.createTupleTranslator(getTuple);
}
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataEntityTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataEntityTupleTranslator.java
index cd84256..f8415eb 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataEntityTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataEntityTupleTranslator.java
@@ -19,9 +19,6 @@
package org.apache.asterix.metadata.api;
-import java.io.Serializable;
-import java.rmi.RemoteException;
-
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -32,7 +29,7 @@
* representation in a Hyracks tuple, and vice versa. Implementations of this
* interface are intended to be used within an IMetadataNode.
*/
-public interface IMetadataEntityTupleTranslator<T> extends Serializable {
+public interface IMetadataEntityTupleTranslator<T> {
/**
* Transforms a metadata entity of type T from a given tuple to a Java object
@@ -44,10 +41,8 @@
* @return A new instance of a metadata entity of type T.
* @throws AlgebricksException
* @throws HyracksDataException
- * @throws RemoteException
*/
- T getMetadataEntityFromTuple(ITupleReference tuple)
- throws AlgebricksException, HyracksDataException, RemoteException;
+ T getMetadataEntityFromTuple(ITupleReference tuple) throws AlgebricksException, HyracksDataException;
/**
* Serializes the given metadata entity of type T into an appropriate tuple
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataEntityTupleTranslatorFactory.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataEntityTupleTranslatorFactory.java
index e472383..ecab692 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataEntityTupleTranslatorFactory.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataEntityTupleTranslatorFactory.java
@@ -25,5 +25,5 @@
/**
* @return an instance of IMetadataEntityTupleTranslator
*/
- IMetadataEntityTupleTranslator<T> createTupleTranslator();
+ IMetadataEntityTupleTranslator<T> createTupleTranslator(boolean getTuple);
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataIndex.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataIndex.java
index 6c220af..522c2e6 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataIndex.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataIndex.java
@@ -22,6 +22,7 @@
import java.io.Serializable;
import java.util.List;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.transactions.DatasetId;
import org.apache.asterix.om.types.ARecordType;
import org.apache.asterix.om.types.IAType;
@@ -35,7 +36,7 @@
* Descriptor interface for a primary or secondary index on metadata datasets.
*/
public interface IMetadataIndex extends Serializable {
- public String getDataverseName();
+ public DataverseName getDataverseName();
public String getNodeGroupName();
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataManager.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataManager.java
index a646893..e6992e3 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataManager.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataManager.java
@@ -24,6 +24,7 @@
import org.apache.asterix.common.exceptions.ACIDException;
import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.metadata.IMetadataBootstrap;
import org.apache.asterix.external.indexing.ExternalFile;
import org.apache.asterix.metadata.MetadataTransactionContext;
@@ -118,7 +119,7 @@
* @throws AlgebricksException
* For example, if the dataverse does not exist.
*/
- Dataverse getDataverse(MetadataTransactionContext ctx, String dataverseName) throws AlgebricksException;
+ Dataverse getDataverse(MetadataTransactionContext ctx, DataverseName dataverseName) throws AlgebricksException;
/**
* Retrieves all datasets belonging to the given dataverse.
@@ -131,7 +132,8 @@
* @throws AlgebricksException
* For example, if the dataverse does not exist.
*/
- List<Dataset> getDataverseDatasets(MetadataTransactionContext ctx, String dataverseName) throws AlgebricksException;
+ List<Dataset> getDataverseDatasets(MetadataTransactionContext ctx, DataverseName dataverseName)
+ throws AlgebricksException;
/**
* Deletes the dataverse with given name, and all it's associated datasets,
@@ -139,11 +141,12 @@
*
* @param ctx
* MetadataTransactionContext of an active metadata transaction.
- * @return A list of dataset instances.
+ * @param dataverseName
+ * Name of the dataverse to drop.
* @throws AlgebricksException
* For example, if the dataverse does not exist.
*/
- void dropDataverse(MetadataTransactionContext ctx, String dataverseName) throws AlgebricksException;
+ void dropDataverse(MetadataTransactionContext ctx, DataverseName dataverseName) throws AlgebricksException;
/**
* Inserts a new dataset into the metadata.
@@ -170,7 +173,7 @@
* @throws AlgebricksException
* For example, if the dataset does not exist.
*/
- Dataset getDataset(MetadataTransactionContext ctx, String dataverseName, String datasetName)
+ Dataset getDataset(MetadataTransactionContext ctx, DataverseName dataverseName, String datasetName)
throws AlgebricksException;
/**
@@ -186,7 +189,7 @@
* @throws AlgebricksException
* For example, if the dataset and/or dataverse does not exist.
*/
- List<Index> getDatasetIndexes(MetadataTransactionContext ctx, String dataverseName, String datasetName)
+ List<Index> getDatasetIndexes(MetadataTransactionContext ctx, DataverseName dataverseName, String datasetName)
throws AlgebricksException;
/**
@@ -201,7 +204,7 @@
* @throws AlgebricksException
* For example, if the dataset and/or dataverse does not exist.
*/
- void dropDataset(MetadataTransactionContext ctx, String dataverseName, String datasetName)
+ void dropDataset(MetadataTransactionContext ctx, DataverseName dataverseName, String datasetName)
throws AlgebricksException;
/**
@@ -232,7 +235,7 @@
* @throws AlgebricksException
* For example, if the index does not exist.
*/
- Index getIndex(MetadataTransactionContext ctx, String dataverseName, String datasetName, String indexName)
+ Index getIndex(MetadataTransactionContext ctx, DataverseName dataverseName, String datasetName, String indexName)
throws AlgebricksException;
/**
@@ -249,7 +252,7 @@
* @throws AlgebricksException
* For example, if the index does not exist.
*/
- void dropIndex(MetadataTransactionContext ctx, String dataverseName, String datasetName, String indexName)
+ void dropIndex(MetadataTransactionContext ctx, DataverseName dataverseName, String datasetName, String indexName)
throws AlgebricksException;
/**
@@ -277,7 +280,7 @@
* @throws AlgebricksException
* For example, if the datatype does not exist.
*/
- Datatype getDatatype(MetadataTransactionContext ctx, String dataverseName, String datatypeName)
+ Datatype getDatatype(MetadataTransactionContext ctx, DataverseName dataverseName, String datatypeName)
throws AlgebricksException;
/**
@@ -293,7 +296,7 @@
* For example, if there are still datasets using the type to be
* deleted.
*/
- void dropDatatype(MetadataTransactionContext ctx, String dataverseName, String datatypeName)
+ void dropDatatype(MetadataTransactionContext ctx, DataverseName dataverseName, String datatypeName)
throws AlgebricksException;
/**
@@ -375,15 +378,11 @@
* MetadataTransactionContext of an active metadata transaction.
* @param functionSignature
* the functions signature (unique to the function)
- * @return
* @throws AlgebricksException
*/
-
Function getFunction(MetadataTransactionContext ctx, FunctionSignature functionSignature)
throws AlgebricksException;
- List<Function> getFunctions(MetadataTransactionContext ctx, String dataverseName) throws AlgebricksException;
-
/**
* @param ctx
* MetadataTransactionContext of an active metadata transaction.
@@ -394,6 +393,20 @@
void dropFunction(MetadataTransactionContext ctx, FunctionSignature functionSignature) throws AlgebricksException;
/**
+ * Retrieves all functions belonging to the given dataverse.
+ *
+ * @param ctx
+ * MetadataTransactionContext of an active metadata transaction.
+ * @param dataverseName
+ * Name of the dataverse of which to find all functions.
+ * @return A list of function instances.
+ * @throws AlgebricksException
+ * For example, if the dataverse does not exist.
+ */
+ List<Function> getDataverseFunctions(MetadataTransactionContext ctx, DataverseName dataverseName)
+ throws AlgebricksException;
+
+ /**
* @param mdTxnCtx
* MetadataTransactionContext of an active metadata transaction.
* @param adapter
@@ -410,10 +423,9 @@
* the dataverse associated with the adapter being searched
* @param name
* name of the adapter
- * @return
* @throws AlgebricksException
*/
- DatasourceAdapter getAdapter(MetadataTransactionContext ctx, String dataverseName, String name)
+ DatasourceAdapter getAdapter(MetadataTransactionContext ctx, DataverseName dataverseName, String name)
throws AlgebricksException;
/**
@@ -425,17 +437,17 @@
* name of the adapter
* @throws AlgebricksException
*/
- void dropAdapter(MetadataTransactionContext ctx, String dataverseName, String name) throws AlgebricksException;
+ void dropAdapter(MetadataTransactionContext ctx, DataverseName dataverseName, String name)
+ throws AlgebricksException;
/**
* @param ctx
* MetadataTransactionContext of an active metadata transaction.
* @param dataverseName
* the dataverse whose associated adapters are being requested
- * @return
* @throws AlgebricksException
*/
- List<DatasourceAdapter> getDataverseAdapters(MetadataTransactionContext ctx, String dataverseName)
+ List<DatasourceAdapter> getDataverseAdapters(MetadataTransactionContext ctx, DataverseName dataverseName)
throws AlgebricksException;
/**
@@ -452,16 +464,7 @@
* @return
* @throws AlgebricksException
*/
- CompactionPolicy getCompactionPolicy(MetadataTransactionContext ctx, String dataverse, String policyName)
- throws AlgebricksException;
-
- /**
- * @param ctx
- * @param dataverseName
- * @return
- * @throws AlgebricksException
- */
- List<Function> getDataverseFunctions(MetadataTransactionContext ctx, String dataverseName)
+ CompactionPolicy getCompactionPolicy(MetadataTransactionContext ctx, DataverseName dataverse, String policyName)
throws AlgebricksException;
/**
@@ -473,14 +476,15 @@
/**
* @param ctx
- * @param dataverse
+ * @param dataverseName
* @param feedName
* @return
* @throws AlgebricksException
*/
- Feed getFeed(MetadataTransactionContext ctx, String dataverse, String feedName) throws AlgebricksException;
+ Feed getFeed(MetadataTransactionContext ctx, DataverseName dataverseName, String feedName)
+ throws AlgebricksException;
- List<Feed> getFeeds(MetadataTransactionContext ctx, String dataverse) throws AlgebricksException;
+ List<Feed> getFeeds(MetadataTransactionContext ctx, DataverseName dataverseName) throws AlgebricksException;
/**
* @param ctx
@@ -488,7 +492,7 @@
* @param feedName
* @throws AlgebricksException
*/
- void dropFeed(MetadataTransactionContext ctx, String dataverse, String feedName) throws AlgebricksException;
+ void dropFeed(MetadataTransactionContext ctx, DataverseName dataverse, String feedName) throws AlgebricksException;
/**
* @param ctx
@@ -499,20 +503,24 @@
/**
* @param ctx
- * @param dataverse
+ * @param dataverseName
* @param policyName
* @throws AlgebricksException
*/
- void dropFeedPolicy(MetadataTransactionContext ctx, String dataverse, String policyName) throws AlgebricksException;
+ void dropFeedPolicy(MetadataTransactionContext ctx, DataverseName dataverseName, String policyName)
+ throws AlgebricksException;
/**
* @param ctx
- * @param dataverse
+ * @param dataverseName
* @param policyName
* @return
* @throws AlgebricksException
*/
- FeedPolicyEntity getFeedPolicy(MetadataTransactionContext ctx, String dataverse, String policyName)
+ FeedPolicyEntity getFeedPolicy(MetadataTransactionContext ctx, DataverseName dataverseName, String policyName)
+ throws AlgebricksException;
+
+ List<FeedPolicyEntity> getDataverseFeedPolicies(MetadataTransactionContext mdTxnCtx, DataverseName dataverseName)
throws AlgebricksException;
void initializeDatasetIdFactory(MetadataTransactionContext ctx) throws AlgebricksException;
@@ -532,7 +540,7 @@
* the library does not exists.
* @throws AlgebricksException
*/
- void dropLibrary(MetadataTransactionContext ctx, String dataverseName, String libraryName)
+ void dropLibrary(MetadataTransactionContext ctx, DataverseName dataverseName, String libraryName)
throws AlgebricksException;
/**
@@ -558,7 +566,7 @@
* @throws AlgebricksException
* @throws RemoteException
*/
- Library getLibrary(MetadataTransactionContext ctx, String dataverseName, String libraryName)
+ Library getLibrary(MetadataTransactionContext ctx, DataverseName dataverseName, String libraryName)
throws AlgebricksException, RemoteException;
/**
@@ -571,7 +579,7 @@
* @return Library
* @throws AlgebricksException
*/
- List<Library> getDataverseLibraries(MetadataTransactionContext ctx, String dataverseName)
+ List<Library> getDataverseLibraries(MetadataTransactionContext ctx, DataverseName dataverseName)
throws AlgebricksException;
/**
@@ -625,7 +633,7 @@
* @return
* @throws AlgebricksException
*/
- ExternalFile getExternalFile(MetadataTransactionContext mdTxnCtx, String dataverseName, String datasetName,
+ ExternalFile getExternalFile(MetadataTransactionContext mdTxnCtx, DataverseName dataverseName, String datasetName,
Integer fileNumber) throws AlgebricksException;
/**
@@ -696,13 +704,13 @@
*/
void addFeedConnection(MetadataTransactionContext ctx, FeedConnection feedConnection) throws AlgebricksException;
- void dropFeedConnection(MetadataTransactionContext ctx, String dataverseName, String feedName, String datasetName)
- throws AlgebricksException;
-
- FeedConnection getFeedConnection(MetadataTransactionContext ctx, String dataverseName, String feedName,
+ void dropFeedConnection(MetadataTransactionContext ctx, DataverseName dataverseName, String feedName,
String datasetName) throws AlgebricksException;
- List<FeedConnection> getFeedConections(MetadataTransactionContext ctx, String dataverseName, String feedName)
+ FeedConnection getFeedConnection(MetadataTransactionContext ctx, DataverseName dataverseName, String feedName,
+ String datasetName) throws AlgebricksException;
+
+ List<FeedConnection> getFeedConections(MetadataTransactionContext ctx, DataverseName dataverseName, String feedName)
throws AlgebricksException;
long getMaxTxnId();
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataNode.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataNode.java
index f837bec..e299f43 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataNode.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataNode.java
@@ -25,6 +25,7 @@
import java.util.List;
import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.transactions.TxnId;
import org.apache.asterix.external.indexing.ExternalFile;
import org.apache.asterix.metadata.entities.CompactionPolicy;
@@ -114,7 +115,7 @@
* For example, if the dataverse does not exist.
* @throws RemoteException
*/
- Dataverse getDataverse(TxnId txnId, String dataverseName) throws AlgebricksException, RemoteException;
+ Dataverse getDataverse(TxnId txnId, DataverseName dataverseName) throws AlgebricksException, RemoteException;
/**
* Retrieves all datasets belonging to the given dataverse, acquiring local
@@ -128,7 +129,8 @@
* @throws AlgebricksException
* For example, if the dataverse does not exist. RemoteException
*/
- List<Dataset> getDataverseDatasets(TxnId txnId, String dataverseName) throws AlgebricksException, RemoteException;
+ List<Dataset> getDataverseDatasets(TxnId txnId, DataverseName dataverseName)
+ throws AlgebricksException, RemoteException;
/**
* Deletes the dataverse with given name, and all it's associated datasets,
@@ -142,7 +144,7 @@
* For example, if the dataverse does not exist.
* @throws RemoteException
*/
- void dropDataverse(TxnId txnId, String dataverseName) throws AlgebricksException, RemoteException;
+ void dropDataverse(TxnId txnId, DataverseName dataverseName) throws AlgebricksException, RemoteException;
/**
* Inserts a new dataset into the metadata, acquiring local locks on behalf of
@@ -173,7 +175,7 @@
* For example, if the dataset does not exist.
* @throws RemoteException
*/
- Dataset getDataset(TxnId txnId, String dataverseName, String datasetName)
+ Dataset getDataset(TxnId txnId, DataverseName dataverseName, String datasetName)
throws AlgebricksException, RemoteException;
/**
@@ -191,7 +193,7 @@
* For example, if the dataset and/or dataverse does not exist.
* @throws RemoteException
*/
- List<Index> getDatasetIndexes(TxnId txnId, String dataverseName, String datasetName)
+ List<Index> getDatasetIndexes(TxnId txnId, DataverseName dataverseName, String datasetName)
throws AlgebricksException, RemoteException;
/**
@@ -208,7 +210,8 @@
* For example, if the dataset and/or dataverse does not exist.
* @throws RemoteException
*/
- void dropDataset(TxnId txnId, String dataverseName, String datasetName) throws AlgebricksException, RemoteException;
+ void dropDataset(TxnId txnId, DataverseName dataverseName, String datasetName)
+ throws AlgebricksException, RemoteException;
/**
* Inserts an index into the metadata, acquiring local locks on behalf of the
@@ -241,7 +244,7 @@
* For example, if the index does not exist.
* @throws RemoteException
*/
- Index getIndex(TxnId txnId, String dataverseName, String datasetName, String indexName)
+ Index getIndex(TxnId txnId, DataverseName dataverseName, String datasetName, String indexName)
throws AlgebricksException, RemoteException;
/**
@@ -259,7 +262,7 @@
* For example, if the index does not exist.
* @throws RemoteException
*/
- void dropIndex(TxnId txnId, String dataverseName, String datasetName, String indexName)
+ void dropIndex(TxnId txnId, DataverseName dataverseName, String datasetName, String indexName)
throws AlgebricksException, RemoteException;
/**
@@ -291,7 +294,7 @@
* For example, if the datatype does not exist.
* @throws RemoteException
*/
- Datatype getDatatype(TxnId txnId, String dataverseName, String datatypeName)
+ Datatype getDatatype(TxnId txnId, DataverseName dataverseName, String datatypeName)
throws AlgebricksException, RemoteException;
/**
@@ -309,7 +312,7 @@
* deleted.
* @throws RemoteException
*/
- void dropDatatype(TxnId txnId, String dataverseName, String datatypeName)
+ void dropDatatype(TxnId txnId, DataverseName dataverseName, String datatypeName)
throws AlgebricksException, RemoteException;
/**
@@ -388,7 +391,21 @@
*/
Function getFunction(TxnId txnId, FunctionSignature functionSignature) throws AlgebricksException, RemoteException;
- List<Function> getFunctions(TxnId txnId, String dataverseName) throws AlgebricksException, RemoteException;
+ /**
+ * Retrieves all functions belonging to the given dataverse, acquiring local
+ * locks on behalf of the given transaction id.
+ *
+ * @param txnId
+ * A globally unique id for an active metadata transaction.
+ * @param dataverseName
+ * Name of the dataverse of which to find all functions.
+ * @return A list of function instances.
+ * @throws AlgebricksException
+ * For example, if the dataverse does not exist.
+ * @throws RemoteException
+ */
+ List<Function> getDataverseFunctions(TxnId txnId, DataverseName dataverseName)
+ throws AlgebricksException, RemoteException;
/**
* Deletes a function, acquiring local locks on behalf of the given transaction
@@ -420,22 +437,12 @@
/**
* @param txnId
* @param dataverseName
- * @return List<Function> A list containing the functions in the specified
- * dataverse
- * @throws AlgebricksException
- * @throws RemoteException
- */
- List<Function> getDataverseFunctions(TxnId txnId, String dataverseName) throws AlgebricksException, RemoteException;
-
- /**
- * @param txnId
- * @param dataverseName
* @return List<Adapter> A list containing the adapters in the specified
* dataverse
* @throws AlgebricksException
* @throws RemoteException
*/
- List<DatasourceAdapter> getDataverseAdapters(TxnId txnId, String dataverseName)
+ List<DatasourceAdapter> getDataverseAdapters(TxnId txnId, DataverseName dataverseName)
throws AlgebricksException, RemoteException;
/**
@@ -446,7 +453,7 @@
* @throws AlgebricksException
* @throws RemoteException
*/
- DatasourceAdapter getAdapter(TxnId txnId, String dataverseName, String adapterName)
+ DatasourceAdapter getAdapter(TxnId txnId, DataverseName dataverseName, String adapterName)
throws AlgebricksException, RemoteException;
/**
@@ -463,7 +470,8 @@
* @throws AlgebricksException
* @throws RemoteException
*/
- void dropAdapter(TxnId txnId, String dataverseName, String adapterName) throws AlgebricksException, RemoteException;
+ void dropAdapter(TxnId txnId, DataverseName dataverseName, String adapterName)
+ throws AlgebricksException, RemoteException;
/**
* @param txnId
@@ -487,13 +495,13 @@
/**
* @param txnId
- * @param dataverse
+ * @param dataverseName
* @param policy
* @return
* @throws AlgebricksException
* @throws RemoteException
*/
- CompactionPolicy getCompactionPolicy(TxnId txnId, String dataverse, String policy)
+ CompactionPolicy getCompactionPolicy(TxnId txnId, DataverseName dataverseName, String policy)
throws AlgebricksException, RemoteException;
/**
@@ -520,24 +528,25 @@
/**
* @param txnId
- * @param dataverse
+ * @param dataverseName
* @param feedName
* @return
* @throws AlgebricksException
* @throws RemoteException
*/
- Feed getFeed(TxnId txnId, String dataverse, String feedName) throws AlgebricksException, RemoteException;
+ Feed getFeed(TxnId txnId, DataverseName dataverseName, String feedName) throws AlgebricksException, RemoteException;
- List<Feed> getFeeds(TxnId txnId, String dataverse) throws AlgebricksException, RemoteException;
+ List<Feed> getFeeds(TxnId txnId, DataverseName dataverseName) throws AlgebricksException, RemoteException;
/**
* @param txnId
- * @param dataverse
+ * @param dataverseName
* @param feedName
* @throws AlgebricksException
* @throws RemoteException
*/
- void dropFeed(TxnId txnId, String dataverse, String feedName) throws AlgebricksException, RemoteException;
+ void dropFeed(TxnId txnId, DataverseName dataverseName, String feedName)
+ throws AlgebricksException, RemoteException;
/**
* @param txnId
@@ -549,13 +558,13 @@
/**
* @param txnId
- * @param dataverse
+ * @param dataverseName
* @param policy
* @return
* @throws AlgebricksException
* @throws RemoteException
*/
- FeedPolicyEntity getFeedPolicy(TxnId txnId, String dataverse, String policy)
+ FeedPolicyEntity getFeedPolicy(TxnId txnId, DataverseName dataverseName, String policy)
throws AlgebricksException, RemoteException;
/**
@@ -572,7 +581,8 @@
* @throws AlgebricksException
* @throws RemoteException
*/
- void dropLibrary(TxnId txnId, String dataverseName, String libraryName) throws AlgebricksException, RemoteException;
+ void dropLibrary(TxnId txnId, DataverseName dataverseName, String libraryName)
+ throws AlgebricksException, RemoteException;
/**
* Adds a library, acquiring local locks on behalf of the given transaction id.
@@ -598,7 +608,7 @@
* @throws AlgebricksException
* @throws RemoteException
*/
- Library getLibrary(TxnId txnId, String dataverseName, String libraryName)
+ Library getLibrary(TxnId txnId, DataverseName dataverseName, String libraryName)
throws AlgebricksException, RemoteException;
/**
@@ -612,7 +622,8 @@
* @throws AlgebricksException
* @throws RemoteException
*/
- List<Library> getDataverseLibraries(TxnId txnId, String dataverseName) throws AlgebricksException, RemoteException;
+ List<Library> getDataverseLibraries(TxnId txnId, DataverseName dataverseName)
+ throws AlgebricksException, RemoteException;
/**
* @param txnId
@@ -621,7 +632,7 @@
* @throws AlgebricksException
* @throws RemoteException
*/
- List<Feed> getDataverseFeeds(TxnId txnId, String dataverseName) throws AlgebricksException, RemoteException;
+ List<Feed> getDataverseFeeds(TxnId txnId, DataverseName dataverseName) throws AlgebricksException, RemoteException;
/**
* delete a give feed (ingestion) policy
@@ -633,17 +644,17 @@
* @throws RemoteException
* @throws AlgebricksException
*/
- void dropFeedPolicy(TxnId txnId, String dataverseName, String policyName)
+ void dropFeedPolicy(TxnId txnId, DataverseName dataverseName, String policyName)
throws AlgebricksException, RemoteException;
/**
* @param txnId
- * @param dataverse
+ * @param dataverseName
* @return
* @throws AlgebricksException
* @throws RemoteException
*/
- List<FeedPolicyEntity> getDataversePolicies(TxnId txnId, String dataverse)
+ List<FeedPolicyEntity> getDataverseFeedPolicies(TxnId txnId, DataverseName dataverseName)
throws AlgebricksException, RemoteException;
/**
@@ -683,7 +694,7 @@
* @throws AlgebricksException
* @throws RemoteException
*/
- void dropExternalFile(TxnId txnId, String dataverseName, String datasetName, int fileNumber)
+ void dropExternalFile(TxnId txnId, DataverseName dataverseName, String datasetName, int fileNumber)
throws AlgebricksException, RemoteException;
/**
@@ -716,7 +727,7 @@
* For example, if the index does not exist.
* @throws RemoteException
*/
- ExternalFile getExternalFile(TxnId txnId, String dataverseName, String datasetName, Integer fileNumber)
+ ExternalFile getExternalFile(TxnId txnId, DataverseName dataverseName, String datasetName, Integer fileNumber)
throws AlgebricksException, RemoteException;
/**
@@ -781,12 +792,12 @@
void addFeedConnection(TxnId txnId, FeedConnection feedConnection) throws AlgebricksException, RemoteException;
- FeedConnection getFeedConnection(TxnId txnId, String dataverseName, String feedName, String datasetName)
+ FeedConnection getFeedConnection(TxnId txnId, DataverseName dataverseName, String feedName, String datasetName)
throws AlgebricksException, RemoteException;
- void dropFeedConnection(TxnId txnId, String dataverseName, String feedName, String datasetName)
+ void dropFeedConnection(TxnId txnId, DataverseName dataverseName, String feedName, String datasetName)
throws AlgebricksException, RemoteException;
- List<FeedConnection> getFeedConnections(TxnId txnId, String dataverseName, String feedName)
+ List<FeedConnection> getFeedConnections(TxnId txnId, DataverseName dataverseName, String feedName)
throws AlgebricksException, RemoteException;
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IValueExtractor.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IValueExtractor.java
index 4cc7719..0f184d3 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IValueExtractor.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IValueExtractor.java
@@ -20,7 +20,6 @@
package org.apache.asterix.metadata.api;
import java.io.IOException;
-import java.rmi.RemoteException;
import org.apache.asterix.common.transactions.TxnId;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -47,5 +46,5 @@
* @throws HyracksDataException
* @throws IOException
*/
- T getValue(TxnId txnId, ITupleReference tuple) throws AlgebricksException, HyracksDataException, RemoteException;
+ T getValue(TxnId txnId, ITupleReference tuple) throws AlgebricksException, HyracksDataException;
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBuiltinEntities.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBuiltinEntities.java
index c4f5bcb..938cb22 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBuiltinEntities.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBuiltinEntities.java
@@ -18,6 +18,7 @@
*/
package org.apache.asterix.metadata.bootstrap;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.entities.Datatype;
import org.apache.asterix.metadata.entities.Dataverse;
import org.apache.asterix.metadata.utils.MetadataConstants;
@@ -27,7 +28,7 @@
public class MetadataBuiltinEntities {
//--------------------------------------- Dataverses ----------------------------------------//
- public static final String DEFAULT_DATAVERSE_NAME = "Default";
+ public static final DataverseName DEFAULT_DATAVERSE_NAME = DataverseName.createBuiltinDataverseName("Default");
public static final Dataverse DEFAULT_DATAVERSE =
new Dataverse(DEFAULT_DATAVERSE_NAME, NonTaggedDataFormat.class.getName(), MetadataUtil.PENDING_NO_OP);
//--------------------------------------- Datatypes -----------------------------------------//
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataIndex.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataIndex.java
index 4103a2c..c33cac1 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataIndex.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataIndex.java
@@ -23,6 +23,7 @@
import java.util.Arrays;
import java.util.List;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.metadata.MetadataIndexImmutableProperties;
import org.apache.asterix.common.transactions.DatasetId;
import org.apache.asterix.common.transactions.ImmutableDatasetId;
@@ -182,7 +183,7 @@
}
@Override
- public String getDataverseName() {
+ public DataverseName getDataverseName() {
return MetadataConstants.METADATA_DATAVERSE_NAME;
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataPrimaryIndexes.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataPrimaryIndexes.java
index 5377c9d..f612c44 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataPrimaryIndexes.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataPrimaryIndexes.java
@@ -31,8 +31,6 @@
* Contains static primary-index descriptors of all metadata datasets.
*/
public class MetadataPrimaryIndexes {
- public static final MetadataIndexImmutableProperties PROPERTIES_METADATA =
- new MetadataIndexImmutableProperties(MetadataConstants.METADATA_DATAVERSE_NAME, 0, 0);
public static final MetadataIndexImmutableProperties PROPERTIES_DATAVERSE =
new MetadataIndexImmutableProperties(MetadataConstants.DATAVERSE_DATASET_NAME, 1, 1);
public static final MetadataIndexImmutableProperties PROPERTIES_DATASET =
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataRecordTypes.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataRecordTypes.java
index d9309d9..ac36680 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataRecordTypes.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataRecordTypes.java
@@ -74,7 +74,7 @@
public static final String FIELD_NAME_KIND = "Kind";
public static final String FIELD_NAME_LANGUAGE = "Language";
public static final String FIELD_NAME_LAST_REFRESH_TIME = "LastRefreshTime";
- public static final String FIELD_NAME_METADATA_DATAVERSE = "MetatypeDataverseName";
+ public static final String FIELD_NAME_METATYPE_DATAVERSE_NAME = "MetatypeDataverseName";
public static final String FIELD_NAME_METATYPE_NAME = "MetatypeName";
public static final String FIELD_NAME_NAME = "Name";
public static final String FIELD_NAME_NODE_NAME = "NodeName";
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/DataSourceId.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/DataSourceId.java
index d61ae15..c27ac42 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/DataSourceId.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/DataSourceId.java
@@ -20,10 +20,17 @@
package org.apache.asterix.metadata.declared;
import java.util.Arrays;
+import java.util.Objects;
+
+import org.apache.asterix.common.metadata.DataverseName;
public final class DataSourceId {
- private String[] components;
+ private final DataverseName dataverseName;
+
+ private final String datasourceName;
+
+ private final String[] parameters;
/**
* The original constructor taking
@@ -33,34 +40,33 @@
* @param datasourceName
* the name for this datasource
*/
- public DataSourceId(String dataverseName, String datasourceName) {
- this(new String[] { dataverseName, datasourceName });
+ public DataSourceId(DataverseName dataverseName, String datasourceName) {
+ this(dataverseName, datasourceName, null);
}
/**
- * An extended constructor taking an arbitrary number of name components.
+ * An extended constructor taking an arbitrary number of name parameters.
* This constructor allows the definition of datasources that have the same dataverse name and datasource name but
* that would expose different behavior. It enables the definition of (compile-time) parameterized datasources.
* Please note that the first 2 parameters still need to be 1) a dataverse name and 2) a datasource name.
- *
- * @param components
- * name components used to construct the datasource identifier.
*/
- public DataSourceId(String... components) {
- this.components = components;
+ public DataSourceId(DataverseName dataverseName, String datasourceName, String[] parameters) {
+ this.dataverseName = dataverseName;
+ this.datasourceName = datasourceName;
+ this.parameters = parameters;
}
@Override
public String toString() {
- return String.join(".", components);
+ return dataverseName + "." + datasourceName + (parameters != null ? "." + String.join(".", parameters) : "");
}
- public String getDataverseName() {
- return components[0];
+ public DataverseName getDataverseName() {
+ return dataverseName;
}
public String getDatasourceName() {
- return components[1];
+ return datasourceName;
}
@Override
@@ -71,11 +77,15 @@
if (o == null || getClass() != o.getClass()) {
return false;
}
- return Arrays.equals(components, ((DataSourceId) o).components);
+ DataSourceId that = (DataSourceId) o;
+ return dataverseName.equals(that.dataverseName) && datasourceName.equals(that.datasourceName)
+ && Arrays.equals(parameters, that.parameters);
}
@Override
public int hashCode() {
- return Arrays.hashCode(components);
+ int result = Objects.hash(dataverseName, datasourceName);
+ result = 31 * result + Arrays.hashCode(parameters);
+ return result;
}
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/DataSourceIndex.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/DataSourceIndex.java
index 96ed2dd..05498b9 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/DataSourceIndex.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/DataSourceIndex.java
@@ -19,6 +19,7 @@
package org.apache.asterix.metadata.declared;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.entities.Index;
import org.apache.hyracks.algebricks.core.algebra.metadata.IDataSource;
import org.apache.hyracks.algebricks.core.algebra.metadata.IDataSourceIndex;
@@ -26,16 +27,17 @@
public class DataSourceIndex implements IDataSourceIndex<String, DataSourceId> {
private final Index index;
- private final String dataset;
- private final String dataverse;
+ private final DataverseName dataverseName;
+ private final String datasetName;
private final MetadataProvider metadataProvider;
// Every transactions needs to work with its own instance of an
// MetadataProvider.
- public DataSourceIndex(Index index, String dataverse, String dataset, MetadataProvider metadataProvider) {
+ public DataSourceIndex(Index index, DataverseName dataverseName, String datasetName,
+ MetadataProvider metadataProvider) {
this.index = index;
- this.dataset = dataset;
- this.dataverse = dataverse;
+ this.dataverseName = dataverseName;
+ this.datasetName = datasetName;
this.metadataProvider = metadataProvider;
}
@@ -43,7 +45,7 @@
@Override
public IDataSource<DataSourceId> getDataSource() {
try {
- DataSourceId sourceId = new DataSourceId(dataverse, dataset);
+ DataSourceId sourceId = new DataSourceId(dataverseName, datasetName);
return metadataProvider.lookupSourceInMetadata(sourceId);
} catch (Exception me) {
return null;
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/DatasetDataSource.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/DatasetDataSource.java
index 62cce05..f3a88ab 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/DatasetDataSource.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/DatasetDataSource.java
@@ -23,6 +23,7 @@
import org.apache.asterix.common.config.DatasetConfig.DatasetType;
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.exceptions.ErrorCode;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.external.api.IAdapterFactory;
import org.apache.asterix.metadata.IDatasetDetails;
import org.apache.asterix.metadata.MetadataManager;
@@ -116,7 +117,7 @@
return metadataProvider.buildExternalDatasetDataScannerRuntime(jobSpec, itemType, adapterFactory);
case INTERNAL:
DataSourceId id = getId();
- String dataverseName = id.getDataverseName();
+ DataverseName dataverseName = id.getDataverseName();
String datasetName = id.getDatasourceName();
Index primaryIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(),
dataverseName, datasetName, datasetName);
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/FunctionDataSource.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/FunctionDataSource.java
index 8e79683..93927dd 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/FunctionDataSource.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/FunctionDataSource.java
@@ -27,6 +27,7 @@
import org.apache.asterix.common.cluster.IClusterStateManager;
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.exceptions.ErrorCode;
+import org.apache.asterix.common.functions.FunctionSignature;
import org.apache.asterix.external.adapter.factory.GenericAdapterFactory;
import org.apache.asterix.metadata.api.IDatasourceFunction;
import org.apache.asterix.om.types.IAType;
@@ -99,13 +100,6 @@
}
protected static DataSourceId createDataSourceId(FunctionIdentifier fid, String... parameters) {
- int paramCount = parameters != null ? parameters.length : 0;
- String[] components = new String[paramCount + 2];
- components[0] = fid.getNamespace();
- components[1] = fid.getName();
- if (paramCount > 0) {
- System.arraycopy(parameters, 0, components, 2, paramCount);
- }
- return new DataSourceId(components);
+ return new DataSourceId(FunctionSignature.getDataverseName(fid), fid.getName(), parameters);
}
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/LoadableDataSource.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/LoadableDataSource.java
index 3460a46..e1ad67e 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/LoadableDataSource.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/LoadableDataSource.java
@@ -26,6 +26,7 @@
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.exceptions.ErrorCode;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.external.api.IAdapterFactory;
import org.apache.asterix.metadata.entities.Dataset;
import org.apache.asterix.metadata.entities.InternalDatasetDetails;
@@ -50,6 +51,9 @@
public class LoadableDataSource extends DataSource {
+ private static final DataverseName LOADABLE_DV = DataverseName.createSinglePartName("loadable_dv"); // 1-part name
+ private static final String LOADABLE_DS = "loadable_ds";
+
private final Dataset targetDataset;
private final List<List<String>> partitioningKeys;
private final String adapter;
@@ -58,7 +62,7 @@
public LoadableDataSource(Dataset targetDataset, IAType itemType, IAType metaItemType, String adapter,
Map<String, String> properties) throws AlgebricksException, IOException {
- super(new DataSourceId("loadable_dv", "loadable_ds"), itemType, metaItemType, Type.LOADABLE, null);
+ super(new DataSourceId(LOADABLE_DV, LOADABLE_DS), itemType, metaItemType, Type.LOADABLE, null);
this.targetDataset = targetDataset;
this.adapter = adapter;
this.adapterProperties = properties;
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataManagerUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataManagerUtil.java
index e819d65..cc96ce6 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataManagerUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataManagerUtil.java
@@ -23,6 +23,7 @@
import org.apache.asterix.common.cluster.IClusterStateManager;
import org.apache.asterix.common.config.DatasetConfig.DatasetType;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.MetadataManager;
import org.apache.asterix.metadata.MetadataTransactionContext;
import org.apache.asterix.metadata.entities.Dataset;
@@ -46,34 +47,34 @@
throw new AssertionError("This util class should not be initialized.");
}
- public static IAType findType(MetadataTransactionContext mdTxnCtx, String dataverse, String typeName)
+ public static IAType findType(MetadataTransactionContext mdTxnCtx, DataverseName dataverseName, String typeName)
throws AlgebricksException {
- if (dataverse == null || typeName == null) {
+ if (dataverseName == null || typeName == null) {
return null;
}
- Datatype type = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataverse, typeName);
+ Datatype type = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataverseName, typeName);
if (type == null) {
- throw new AlgebricksException("Type name '" + typeName + "' unknown in dataverse '" + dataverse + "'");
+ throw new AlgebricksException("Type name '" + typeName + "' unknown in dataverse '" + dataverseName + "'");
}
return type.getDatatype();
}
- public static ARecordType findOutputRecordType(MetadataTransactionContext mdTxnCtx, String dataverse,
+ public static ARecordType findOutputRecordType(MetadataTransactionContext mdTxnCtx, DataverseName dataverseName,
String outputRecordType) throws AlgebricksException {
if (outputRecordType == null) {
return null;
}
- if (dataverse == null) {
+ if (dataverseName == null) {
throw new AlgebricksException("Cannot declare output-record-type with no dataverse!");
}
- IAType type = findType(mdTxnCtx, dataverse, outputRecordType);
+ IAType type = findType(mdTxnCtx, dataverseName, outputRecordType);
if (!(type instanceof ARecordType)) {
throw new AlgebricksException("Type " + outputRecordType + " is not a record type!");
}
return (ARecordType) type;
}
- public static DatasourceAdapter getAdapter(MetadataTransactionContext mdTxnCtx, String dataverseName,
+ public static DatasourceAdapter getAdapter(MetadataTransactionContext mdTxnCtx, DataverseName dataverseName,
String adapterName) throws AlgebricksException {
DatasourceAdapter adapter;
// search in default namespace (built-in adapter)
@@ -86,12 +87,12 @@
return adapter;
}
- public static Dataset findDataset(MetadataTransactionContext mdTxnCtx, String dataverse, String dataset)
+ public static Dataset findDataset(MetadataTransactionContext mdTxnCtx, DataverseName dataverseName, String dataset)
throws AlgebricksException {
- return MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverse, dataset);
+ return MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseName, dataset);
}
- public static Dataset findExistingDataset(MetadataTransactionContext mdTxnCtx, String dataverseName,
+ public static Dataset findExistingDataset(MetadataTransactionContext mdTxnCtx, DataverseName dataverseName,
String datasetName) throws AlgebricksException {
Dataset dataset = findDataset(mdTxnCtx, dataverseName, datasetName);
if (dataset == null) {
@@ -118,22 +119,22 @@
return MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, nodeGroupName).getNodeNames();
}
- public static Feed findFeed(MetadataTransactionContext mdTxnCtx, String dataverse, String feedName)
+ public static Feed findFeed(MetadataTransactionContext mdTxnCtx, DataverseName dataverseName, String feedName)
throws AlgebricksException {
- return MetadataManager.INSTANCE.getFeed(mdTxnCtx, dataverse, feedName);
+ return MetadataManager.INSTANCE.getFeed(mdTxnCtx, dataverseName, feedName);
}
- public static FeedConnection findFeedConnection(MetadataTransactionContext mdTxnCtx, String dataverse,
+ public static FeedConnection findFeedConnection(MetadataTransactionContext mdTxnCtx, DataverseName dataverseName,
String feedName, String datasetName) throws AlgebricksException {
- return MetadataManager.INSTANCE.getFeedConnection(mdTxnCtx, dataverse, feedName, datasetName);
+ return MetadataManager.INSTANCE.getFeedConnection(mdTxnCtx, dataverseName, feedName, datasetName);
}
- public static FeedPolicyEntity findFeedPolicy(MetadataTransactionContext mdTxnCtx, String dataverse,
+ public static FeedPolicyEntity findFeedPolicy(MetadataTransactionContext mdTxnCtx, DataverseName dataverseName,
String policyName) throws AlgebricksException {
- return MetadataManager.INSTANCE.getFeedPolicy(mdTxnCtx, dataverse, policyName);
+ return MetadataManager.INSTANCE.getFeedPolicy(mdTxnCtx, dataverseName, policyName);
}
- public static List<Index> getDatasetIndexes(MetadataTransactionContext mdTxnCtx, String dataverseName,
+ public static List<Index> getDatasetIndexes(MetadataTransactionContext mdTxnCtx, DataverseName dataverseName,
String datasetName) throws AlgebricksException {
return MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
}
@@ -144,17 +145,17 @@
}
public static DataSource lookupSourceInMetadata(IClusterStateManager clusterStateManager,
- MetadataTransactionContext mdTxnCtx, DataSourceId aqlId) throws AlgebricksException {
- Dataset dataset = findDataset(mdTxnCtx, aqlId.getDataverseName(), aqlId.getDatasourceName());
+ MetadataTransactionContext mdTxnCtx, DataSourceId id) throws AlgebricksException {
+ Dataset dataset = findDataset(mdTxnCtx, id.getDataverseName(), id.getDatasourceName());
if (dataset == null) {
- throw new AlgebricksException("Datasource with id " + aqlId + " was not found.");
+ throw new AlgebricksException("Datasource with id " + id + " was not found.");
}
IAType itemType = findType(mdTxnCtx, dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
IAType metaItemType = findType(mdTxnCtx, dataset.getMetaItemTypeDataverseName(), dataset.getMetaItemTypeName());
INodeDomain domain = findNodeDomain(clusterStateManager, mdTxnCtx, dataset.getNodeGroupName());
byte datasourceType = dataset.getDatasetType().equals(DatasetType.EXTERNAL) ? DataSource.Type.EXTERNAL_DATASET
: DataSource.Type.INTERNAL_DATASET;
- return new DatasetDataSource(aqlId, dataset, itemType, metaItemType, datasourceType,
- dataset.getDatasetDetails(), domain);
+ return new DatasetDataSource(id, dataset, itemType, metaItemType, datasourceType, dataset.getDatasetDetails(),
+ domain);
}
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java
index 0a72ceb..6f54a0c 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java
@@ -39,6 +39,7 @@
import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.dataflow.LSMTreeInsertDeleteOperatorDescriptor;
import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.metadata.LockList;
import org.apache.asterix.common.storage.ICompressionManager;
import org.apache.asterix.common.transactions.ITxnIdFactory;
@@ -227,7 +228,7 @@
return defaultDataverse;
}
- public String getDefaultDataverseName() {
+ public DataverseName getDefaultDataverseName() {
return defaultDataverse.getDataverseName();
}
@@ -325,16 +326,15 @@
getProperty("output-record-type"));
}
- public Dataset findDataset(String dataverse, String dataset) throws AlgebricksException {
- String dv =
- dataverse == null ? (defaultDataverse == null ? null : defaultDataverse.getDataverseName()) : dataverse;
- if (dv == null) {
+ public Dataset findDataset(DataverseName dataverseName, String datasetName) throws AlgebricksException {
+ DataverseName dvName = dataverseName == null
+ ? (defaultDataverse == null ? null : defaultDataverse.getDataverseName()) : dataverseName;
+ if (dvName == null) {
return null;
}
- String fqName = dv + '.' + dataset;
- appCtx.getMetadataLockManager().acquireDataverseReadLock(locks, dv);
- appCtx.getMetadataLockManager().acquireDatasetReadLock(locks, fqName);
- return MetadataManagerUtil.findDataset(mdTxnCtx, dv, dataset);
+ appCtx.getMetadataLockManager().acquireDataverseReadLock(locks, dvName);
+ appCtx.getMetadataLockManager().acquireDatasetReadLock(locks, dvName, datasetName);
+ return MetadataManagerUtil.findDataset(mdTxnCtx, dvName, datasetName);
}
public INodeDomain findNodeDomain(String nodeGroupName) throws AlgebricksException {
@@ -345,8 +345,8 @@
return MetadataManagerUtil.findNodes(mdTxnCtx, nodeGroupName);
}
- public IAType findType(String dataverse, String typeName) throws AlgebricksException {
- return MetadataManagerUtil.findType(mdTxnCtx, dataverse, typeName);
+ public IAType findType(DataverseName dataverseName, String typeName) throws AlgebricksException {
+ return MetadataManagerUtil.findType(mdTxnCtx, dataverseName, typeName);
}
public IAType findType(Dataset dataset) throws AlgebricksException {
@@ -357,17 +357,17 @@
return findType(dataset.getMetaItemTypeDataverseName(), dataset.getMetaItemTypeName());
}
- public Feed findFeed(String dataverse, String feedName) throws AlgebricksException {
- return MetadataManagerUtil.findFeed(mdTxnCtx, dataverse, feedName);
+ public Feed findFeed(DataverseName dataverseName, String feedName) throws AlgebricksException {
+ return MetadataManagerUtil.findFeed(mdTxnCtx, dataverseName, feedName);
}
- public FeedConnection findFeedConnection(String dataverseName, String feedName, String datasetName)
+ public FeedConnection findFeedConnection(DataverseName dataverseName, String feedName, String datasetName)
throws AlgebricksException {
return MetadataManagerUtil.findFeedConnection(mdTxnCtx, dataverseName, feedName, datasetName);
}
- public FeedPolicyEntity findFeedPolicy(String dataverse, String policyName) throws AlgebricksException {
- return MetadataManagerUtil.findFeedPolicy(mdTxnCtx, dataverse, policyName);
+ public FeedPolicyEntity findFeedPolicy(DataverseName dataverseName, String policyName) throws AlgebricksException {
+ return MetadataManagerUtil.findFeedPolicy(mdTxnCtx, dataverseName, policyName);
}
@Override
@@ -390,11 +390,12 @@
: null;
}
- public Index getIndex(String dataverseName, String datasetName, String indexName) throws AlgebricksException {
+ public Index getIndex(DataverseName dataverseName, String datasetName, String indexName)
+ throws AlgebricksException {
return MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataverseName, datasetName, indexName);
}
- public List<Index> getDatasetIndexes(String dataverseName, String datasetName) throws AlgebricksException {
+ public List<Index> getDatasetIndexes(DataverseName dataverseName, String datasetName) throws AlgebricksException {
return MetadataManagerUtil.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
}
@@ -426,7 +427,7 @@
}
}
- public Dataverse findDataverse(String dataverseName) throws AlgebricksException {
+ public Dataverse findDataverse(DataverseName dataverseName) throws AlgebricksException {
return MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
}
@@ -620,7 +621,7 @@
IDataSource<DataSourceId> dataSource, IOperatorSchema propagatedSchema, List<LogicalVariable> keys,
LogicalVariable payload, List<LogicalVariable> additionalNonKeyFields, JobGenContext context,
JobSpecification spec) throws AlgebricksException {
- String dataverseName = dataSource.getId().getDataverseName();
+ DataverseName dataverseName = dataSource.getId().getDataverseName();
String datasetName = dataSource.getId().getDatasourceName();
Dataset dataset = MetadataManagerUtil.findExistingDataset(mdTxnCtx, dataverseName, datasetName);
int numKeys = keys.size();
@@ -720,7 +721,7 @@
JobGenContext context, JobSpecification spec, boolean bulkload) throws AlgebricksException {
String indexName = dataSourceIndex.getId();
- String dataverseName = dataSourceIndex.getDataSource().getId().getDataverseName();
+ DataverseName dataverseName = dataSourceIndex.getDataSource().getId().getDataverseName();
String datasetName = dataSourceIndex.getDataSource().getId().getDatasourceName();
IOperatorSchema inputSchema;
@@ -778,7 +779,7 @@
protected IAdapterFactory getConfiguredAdapterFactory(Dataset dataset, String adapterName,
Map<String, String> configuration, ARecordType itemType, ARecordType metaType) throws AlgebricksException {
try {
- configuration.put(ExternalDataConstants.KEY_DATAVERSE, dataset.getDataverseName());
+ configuration.put(ExternalDataConstants.KEY_DATAVERSE, dataset.getDataverseName().getCanonicalForm());
IAdapterFactory adapterFactory = AdapterFactoryProvider.getAdapterFactory(
getApplicationContext().getServiceContext(), adapterName, configuration, itemType, metaType);
@@ -814,9 +815,9 @@
numKeyFields / 2);
}
- public Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitAndConstraints(String dataverse) {
+ public Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitAndConstraints(DataverseName dataverseName) {
return SplitsAndConstraintsUtil.getDataverseSplitProviderAndConstraints(appCtx.getClusterStateManager(),
- dataverse);
+ dataverseName);
}
public FileSplit[] splitsForIndex(MetadataTransactionContext mdTxnCtx, Dataset dataset, String indexName)
@@ -824,8 +825,8 @@
return SplitsAndConstraintsUtil.getIndexSplits(dataset, indexName, mdTxnCtx, appCtx.getClusterStateManager());
}
- public DatasourceAdapter getAdapter(MetadataTransactionContext mdTxnCtx, String dataverseName, String adapterName)
- throws AlgebricksException {
+ public DatasourceAdapter getAdapter(MetadataTransactionContext mdTxnCtx, DataverseName dataverseName,
+ String adapterName) throws AlgebricksException {
DatasourceAdapter adapter;
// search in default namespace (built-in adapter)
adapter = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, MetadataConstants.METADATA_DATAVERSE_NAME, adapterName);
@@ -1082,7 +1083,7 @@
LogicalVariable upsertIndicatorVar, List<LogicalVariable> prevSecondaryKeys,
LogicalVariable prevAdditionalFilteringKey) throws AlgebricksException {
String indexName = dataSourceIndex.getId();
- String dataverseName = dataSourceIndex.getDataSource().getId().getDataverseName();
+ DataverseName dataverseName = dataSourceIndex.getDataSource().getId().getDataverseName();
String datasetName = dataSourceIndex.getDataSource().getId().getDatasourceName();
Dataset dataset = MetadataManagerUtil.findExistingDataset(mdTxnCtx, dataverseName, datasetName);
@@ -1119,7 +1120,7 @@
}
}
- private Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getBTreeRuntime(String dataverseName,
+ private Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getBTreeRuntime(DataverseName dataverseName,
String datasetName, String indexName, IOperatorSchema propagatedSchema, List<LogicalVariable> primaryKeys,
List<LogicalVariable> secondaryKeys, List<LogicalVariable> additionalNonKeyFields,
AsterixTupleFilterFactory filterFactory, RecordDescriptor inputRecordDesc, JobGenContext context,
@@ -1206,7 +1207,7 @@
}
}
- private Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getRTreeRuntime(String dataverseName,
+ private Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getRTreeRuntime(DataverseName dataverseName,
String datasetName, String indexName, IOperatorSchema propagatedSchema, List<LogicalVariable> primaryKeys,
List<LogicalVariable> secondaryKeys, List<LogicalVariable> additionalNonKeyFields,
AsterixTupleFilterFactory filterFactory, RecordDescriptor recordDesc, JobGenContext context,
@@ -1303,13 +1304,14 @@
return new Pair<>(op, splitsAndConstraint.second);
}
- private Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getInvertedIndexRuntime(String dataverseName,
- String datasetName, String indexName, IOperatorSchema propagatedSchema, List<LogicalVariable> primaryKeys,
- List<LogicalVariable> secondaryKeys, List<LogicalVariable> additionalNonKeyFields,
- AsterixTupleFilterFactory filterFactory, RecordDescriptor recordDesc, JobGenContext context,
- JobSpecification spec, IndexOperation indexOp, IndexType indexType, boolean bulkload,
- LogicalVariable upsertIndicatorVar, List<LogicalVariable> prevSecondaryKeys,
- List<LogicalVariable> prevAdditionalFilteringKeys) throws AlgebricksException {
+ private Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getInvertedIndexRuntime(
+ DataverseName dataverseName, String datasetName, String indexName, IOperatorSchema propagatedSchema,
+ List<LogicalVariable> primaryKeys, List<LogicalVariable> secondaryKeys,
+ List<LogicalVariable> additionalNonKeyFields, AsterixTupleFilterFactory filterFactory,
+ RecordDescriptor recordDesc, JobGenContext context, JobSpecification spec, IndexOperation indexOp,
+ IndexType indexType, boolean bulkload, LogicalVariable upsertIndicatorVar,
+ List<LogicalVariable> prevSecondaryKeys, List<LogicalVariable> prevAdditionalFilteringKeys)
+ throws AlgebricksException {
// Check the index is length-partitioned or not.
boolean isPartitioned;
if (indexType == IndexType.LENGTH_PARTITIONED_WORD_INVIX
@@ -1418,10 +1420,10 @@
}
// Get a Tokenizer for the bulk-loading data into a n-gram or keyword index.
- private Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getBinaryTokenizerRuntime(String dataverseName,
- String datasetName, String indexName, IOperatorSchema inputSchema, IOperatorSchema propagatedSchema,
- List<LogicalVariable> primaryKeys, List<LogicalVariable> secondaryKeys, RecordDescriptor recordDesc,
- JobSpecification spec, IndexType indexType) throws AlgebricksException {
+ private Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getBinaryTokenizerRuntime(
+ DataverseName dataverseName, String datasetName, String indexName, IOperatorSchema inputSchema,
+ IOperatorSchema propagatedSchema, List<LogicalVariable> primaryKeys, List<LogicalVariable> secondaryKeys,
+ RecordDescriptor recordDesc, JobSpecification spec, IndexType indexType) throws AlgebricksException {
// Sanity checks.
if (primaryKeys.size() > 1) {
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/BuiltinTypeMap.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/BuiltinTypeMap.java
index 14f76eb..d46f133 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/BuiltinTypeMap.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/BuiltinTypeMap.java
@@ -19,13 +19,12 @@
package org.apache.asterix.metadata.entities;
-import java.rmi.RemoteException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
-import org.apache.asterix.common.exceptions.MetadataException;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.transactions.TxnId;
import org.apache.asterix.metadata.MetadataNode;
import org.apache.asterix.om.types.AUnionType;
@@ -94,16 +93,12 @@
return new HashSet<>(_builtinTypeMap.values());
}
- public static IAType getTypeFromTypeName(MetadataNode metadataNode, TxnId txnId, String dataverseName,
+ public static IAType getTypeFromTypeName(MetadataNode metadataNode, TxnId txnId, DataverseName dataverseName,
String typeName, boolean optional) throws AlgebricksException {
IAType type = _builtinTypeMap.get(typeName);
if (type == null) {
- try {
- Datatype dt = metadataNode.getDatatype(txnId, dataverseName, typeName);
- type = dt.getDatatype();
- } catch (RemoteException e) {
- throw new MetadataException(e);
- }
+ Datatype dt = metadataNode.getDatatype(txnId, dataverseName, typeName);
+ type = dt.getDatatype();
}
if (optional) {
type = AUnionType.createUnknownableType(type);
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/CompactionPolicy.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/CompactionPolicy.java
index 2d90e6f..1b01d43 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/CompactionPolicy.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/CompactionPolicy.java
@@ -19,6 +19,7 @@
package org.apache.asterix.metadata.entities;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.MetadataCache;
import org.apache.asterix.metadata.api.IMetadataEntity;
@@ -27,20 +28,20 @@
*/
public class CompactionPolicy implements IMetadataEntity<CompactionPolicy> {
- private static final long serialVersionUID = 1L;
+ private static final long serialVersionUID = 2L;
- private final String dataverseName;
+ private final DataverseName dataverseName;
// Enforced to be unique within a dataverse.
private final String policyName;
private final String className;
- public CompactionPolicy(String dataverseName, String policyName, String className) {
+ public CompactionPolicy(DataverseName dataverseName, String policyName, String className) {
this.dataverseName = dataverseName;
this.policyName = policyName;
this.className = className;
}
- public String getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java
index 2be7070..cc4bb09 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java
@@ -37,6 +37,7 @@
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.ioopcallbacks.LSMIndexIOOperationCallbackFactory;
import org.apache.asterix.common.ioopcallbacks.LSMIndexPageWriteCallbackFactory;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.metadata.IDataset;
import org.apache.asterix.common.transactions.IRecoveryManager.ResourceType;
import org.apache.asterix.common.utils.JobUtils;
@@ -125,7 +126,7 @@
/*
* Constants
*/
- private static final long serialVersionUID = 1L;
+ private static final long serialVersionUID = 2L;
private static final Logger LOGGER = LogManager.getLogger();
private static final RTreeResourceFactoryProvider rTreeResourceFactoryProvider =
RTreeResourceFactoryProvider.INSTANCE;
@@ -135,9 +136,9 @@
* Members
*/
private final int datasetId;
- private final String dataverseName;
+ private final DataverseName dataverseName;
private final String datasetName;
- private final String recordTypeDataverseName;
+ private final DataverseName recordTypeDataverseName;
private final String recordTypeName;
private final String nodeGroupName;
private final String compactionPolicyFactory;
@@ -145,25 +146,26 @@
private final Map<String, String> compactionPolicyProperties;
private final DatasetType datasetType;
private final IDatasetDetails datasetDetails;
- private final String metaTypeDataverseName;
+ private final DataverseName metaTypeDataverseName;
private final String metaTypeName;
private final long rebalanceCount;
private int pendingOp;
private final String compressionScheme;
- public Dataset(String dataverseName, String datasetName, String recordTypeDataverseName, String recordTypeName,
- String nodeGroupName, String compactionPolicy, Map<String, String> compactionPolicyProperties,
- IDatasetDetails datasetDetails, Map<String, String> hints, DatasetType datasetType, int datasetId,
- int pendingOp) {
+ public Dataset(DataverseName dataverseName, String datasetName, DataverseName recordTypeDataverseName,
+ String recordTypeName, String nodeGroupName, String compactionPolicy,
+ Map<String, String> compactionPolicyProperties, IDatasetDetails datasetDetails, Map<String, String> hints,
+ DatasetType datasetType, int datasetId, int pendingOp) {
this(dataverseName, datasetName, recordTypeDataverseName, recordTypeName, /*metaTypeDataverseName*/null,
/*metaTypeName*/null, nodeGroupName, compactionPolicy, compactionPolicyProperties, datasetDetails,
hints, datasetType, datasetId, pendingOp, CompressionManager.NONE);
}
- public Dataset(String dataverseName, String datasetName, String itemTypeDataverseName, String itemTypeName,
- String metaItemTypeDataverseName, String metaItemTypeName, String nodeGroupName, String compactionPolicy,
- Map<String, String> compactionPolicyProperties, IDatasetDetails datasetDetails, Map<String, String> hints,
- DatasetType datasetType, int datasetId, int pendingOp, String compressionScheme) {
+ public Dataset(DataverseName dataverseName, String datasetName, DataverseName itemTypeDataverseName,
+ String itemTypeName, DataverseName metaItemTypeDataverseName, String metaItemTypeName, String nodeGroupName,
+ String compactionPolicy, Map<String, String> compactionPolicyProperties, IDatasetDetails datasetDetails,
+ Map<String, String> hints, DatasetType datasetType, int datasetId, int pendingOp,
+ String compressionScheme) {
this(dataverseName, datasetName, itemTypeDataverseName, itemTypeName, metaItemTypeDataverseName,
metaItemTypeName, nodeGroupName, compactionPolicy, compactionPolicyProperties, datasetDetails, hints,
datasetType, datasetId, pendingOp, 0L, compressionScheme);
@@ -177,10 +179,11 @@
dataset.compressionScheme);
}
- public Dataset(String dataverseName, String datasetName, String itemTypeDataverseName, String itemTypeName,
- String metaItemTypeDataverseName, String metaItemTypeName, String nodeGroupName, String compactionPolicy,
- Map<String, String> compactionPolicyProperties, IDatasetDetails datasetDetails, Map<String, String> hints,
- DatasetType datasetType, int datasetId, int pendingOp, long rebalanceCount, String compressionScheme) {
+ public Dataset(DataverseName dataverseName, String datasetName, DataverseName itemTypeDataverseName,
+ String itemTypeName, DataverseName metaItemTypeDataverseName, String metaItemTypeName, String nodeGroupName,
+ String compactionPolicy, Map<String, String> compactionPolicyProperties, IDatasetDetails datasetDetails,
+ Map<String, String> hints, DatasetType datasetType, int datasetId, int pendingOp, long rebalanceCount,
+ String compressionScheme) {
this.dataverseName = dataverseName;
this.datasetName = datasetName;
this.recordTypeName = itemTypeName;
@@ -200,7 +203,7 @@
}
@Override
- public String getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
@@ -213,7 +216,7 @@
return recordTypeName;
}
- public String getItemTypeDataverseName() {
+ public DataverseName getItemTypeDataverseName() {
return recordTypeDataverseName;
}
@@ -249,7 +252,7 @@
return pendingOp;
}
- public String getMetaItemTypeDataverseName() {
+ public DataverseName getMetaItemTypeDataverseName() {
return metaTypeDataverseName;
}
@@ -851,10 +854,6 @@
return partitions;
}
- public String getFullyQualifiedName() {
- return dataverseName + '.' + datasetName;
- }
-
public String getCompressionScheme() {
return compressionScheme;
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Datatype.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Datatype.java
index 0580756..8e5dbd9 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Datatype.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Datatype.java
@@ -19,6 +19,7 @@
package org.apache.asterix.metadata.entities;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.MetadataCache;
import org.apache.asterix.metadata.api.IMetadataEntity;
import org.apache.asterix.om.types.IAType;
@@ -28,22 +29,22 @@
*/
public class Datatype implements IMetadataEntity<Datatype> {
- private static final long serialVersionUID = 1L;
+ private static final long serialVersionUID = 2L;
- private final String dataverseName;
+ private final DataverseName dataverseName;
// Enforced to be unique within a dataverse.
private final String datatypeName;
private final IAType datatype;
private final boolean isAnonymous;
- public Datatype(String dataverseName, String datatypeName, IAType datatype, boolean isAnonymous) {
+ public Datatype(DataverseName dataverseName, String datatypeName, IAType datatype, boolean isAnonymous) {
this.dataverseName = dataverseName;
this.datatypeName = datatypeName;
this.datatype = datatype;
this.isAnonymous = isAnonymous;
}
- public String getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataverse.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataverse.java
index 35d21a0..5cde932 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataverse.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataverse.java
@@ -19,6 +19,7 @@
package org.apache.asterix.metadata.entities;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.MetadataCache;
import org.apache.asterix.metadata.api.IMetadataEntity;
@@ -27,19 +28,19 @@
*/
public class Dataverse implements IMetadataEntity<Dataverse> {
- private static final long serialVersionUID = 1L;
+ private static final long serialVersionUID = 2L;
// Enforced to be unique within an Asterix cluster..
- private final String dataverseName;
+ private final DataverseName dataverseName;
private final String dataFormat;
private final int pendingOp;
- public Dataverse(String dataverseName, String format, int pendingOp) {
+ public Dataverse(DataverseName dataverseName, String format, int pendingOp) {
this.dataverseName = dataverseName;
this.dataFormat = format;
this.pendingOp = pendingOp;
}
- public String getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
@@ -79,5 +80,4 @@
public int hashCode() {
return dataverseName.hashCode();
}
-
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Feed.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Feed.java
index 40c7ede..cd0c0f6 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Feed.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Feed.java
@@ -22,6 +22,7 @@
import java.util.Map;
import org.apache.asterix.active.EntityId;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.external.feed.api.IFeed;
import org.apache.asterix.metadata.MetadataCache;
import org.apache.asterix.metadata.api.IMetadataEntity;
@@ -30,7 +31,7 @@
* Feed POJO
*/
public class Feed implements IMetadataEntity<Feed>, IFeed {
- private static final long serialVersionUID = 1L;
+ private static final long serialVersionUID = 2L;
public static final String EXTENSION_NAME = "Feed";
/** A unique identifier for the feed */
@@ -40,7 +41,7 @@
/** Feed configurations */
private Map<String, String> feedConfiguration;
- public Feed(String dataverseName, String feedName, Map<String, String> feedConfiguration) {
+ public Feed(DataverseName dataverseName, String feedName, Map<String, String> feedConfiguration) {
this.feedId = new EntityId(EXTENSION_NAME, dataverseName, feedName);
this.displayName = "(" + feedId + ")";
this.feedConfiguration = feedConfiguration;
@@ -52,8 +53,8 @@
}
@Override
- public String getDataverseName() {
- return feedId.getDataverse();
+ public DataverseName getDataverseName() {
+ return feedId.getDataverseName();
}
@Override
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/FeedConnection.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/FeedConnection.java
index 2029e49..154391a 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/FeedConnection.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/FeedConnection.java
@@ -23,6 +23,7 @@
import org.apache.asterix.active.EntityId;
import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.external.util.FeedUtils;
import org.apache.asterix.metadata.MetadataCache;
import org.apache.asterix.metadata.api.IMetadataEntity;
@@ -32,11 +33,11 @@
*/
public class FeedConnection implements IMetadataEntity<FeedConnection> {
- private static final long serialVersionUID = 1L;
+ private static final long serialVersionUID = 2L;
private EntityId feedId;
private String connectionId;
- private String dataverseName;
+ private DataverseName dataverseName;
private String feedName;
private String datasetName;
private String policyName;
@@ -44,7 +45,7 @@
private String outputType;
private List<FunctionSignature> appliedFunctions;
- public FeedConnection(String dataverseName, String feedName, String datasetName,
+ public FeedConnection(DataverseName dataverseName, String feedName, String datasetName,
List<FunctionSignature> appliedFunctions, String policyName, String whereClauseBody, String outputType) {
this.dataverseName = dataverseName;
this.feedName = feedName;
@@ -87,7 +88,7 @@
return null;
}
- public String getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/FeedPolicyEntity.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/FeedPolicyEntity.java
index 0b0deb9..65076c8 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/FeedPolicyEntity.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/FeedPolicyEntity.java
@@ -21,6 +21,7 @@
import java.util.Map;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.external.feed.policy.FeedPolicy;
import org.apache.asterix.metadata.MetadataCache;
import org.apache.asterix.metadata.api.IMetadataEntity;
@@ -30,12 +31,12 @@
*/
public class FeedPolicyEntity extends FeedPolicy implements IMetadataEntity<FeedPolicyEntity> {
- public FeedPolicyEntity(String dataverseName, String policyName, String description,
+ public FeedPolicyEntity(DataverseName dataverseName, String policyName, String description,
Map<String, String> properties) {
super(dataverseName, policyName, description, properties);
}
- private static final long serialVersionUID = 1L;
+ private static final long serialVersionUID = 2L;
@Override
public FeedPolicyEntity addToCache(MetadataCache cache) {
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Function.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Function.java
index 128e704..8525985 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Function.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Function.java
@@ -19,23 +19,25 @@
package org.apache.asterix.metadata.entities;
import java.util.ArrayList;
+import java.util.Collections;
import java.util.List;
import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.MetadataCache;
import org.apache.asterix.metadata.api.IMetadataEntity;
+import org.apache.hyracks.algebricks.common.utils.Triple;
public class Function implements IMetadataEntity<Function> {
- private static final long serialVersionUID = 1L;
+ private static final long serialVersionUID = 2L;
public static final String LANGUAGE_AQL = "AQL";
public static final String LANGUAGE_SQLPP = "SQLPP";
public static final String LANGUAGE_JAVA = "JAVA";
public static final String RETURNTYPE_VOID = "VOID";
- public static final String NOT_APPLICABLE = "N/A";
private final FunctionSignature signature;
- private final List<List<List<String>>> dependencies;
+ private final List<List<Triple<DataverseName, String, String>>> dependencies;
private final List<String> arguments;
private final String body;
private final String returnType;
@@ -43,7 +45,7 @@
private final String kind;
public Function(FunctionSignature signature, List<String> arguments, String returnType, String functionBody,
- String language, String functionKind, List<List<List<String>>> dependencies) {
+ String language, String functionKind, List<List<Triple<DataverseName, String, String>>> dependencies) {
this.signature = signature;
this.arguments = arguments;
this.body = functionBody;
@@ -51,9 +53,9 @@
this.language = language;
this.kind = functionKind;
if (dependencies == null) {
- this.dependencies = new ArrayList<>();
- this.dependencies.add(new ArrayList<>());
- this.dependencies.add(new ArrayList<>());
+ this.dependencies = new ArrayList<>(2);
+ this.dependencies.add(Collections.emptyList());
+ this.dependencies.add(Collections.emptyList());
} else {
this.dependencies = dependencies;
}
@@ -63,8 +65,8 @@
return signature;
}
- public String getDataverseName() {
- return signature.getNamespace();
+ public DataverseName getDataverseName() {
+ return signature.getDataverseName();
}
public String getName() {
@@ -79,7 +81,7 @@
return arguments;
}
- public List<List<List<String>>> getDependencies() {
+ public List<List<Triple<DataverseName, String, String>>> getDependencies() {
return dependencies;
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Index.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Index.java
index 4084824..ee8622c 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Index.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Index.java
@@ -25,6 +25,7 @@
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.exceptions.ErrorCode;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.transactions.IRecoveryManager.ResourceType;
import org.apache.asterix.metadata.MetadataCache;
import org.apache.asterix.metadata.api.IMetadataEntity;
@@ -40,10 +41,10 @@
*/
public class Index implements IMetadataEntity<Index>, Comparable<Index> {
- private static final long serialVersionUID = 1L;
+ private static final long serialVersionUID = 2L;
public static final int RECORD_INDICATOR = 0;
- private final String dataverseName;
+ private final DataverseName dataverseName;
// Enforced to be unique within a dataverse.
private final String datasetName;
// Enforced to be unique within a dataverse, dataset combination.
@@ -60,7 +61,7 @@
// Type of pending operations with respect to atomic DDL operation
private int pendingOp;
- public Index(String dataverseName, String datasetName, String indexName, IndexType indexType,
+ public Index(DataverseName dataverseName, String datasetName, String indexName, IndexType indexType,
List<List<String>> keyFieldNames, List<Integer> keyFieldSourceIndicators, List<IAType> keyFieldTypes,
int gramLength, boolean overrideKeyFieldTypes, boolean isEnforced, boolean isPrimaryIndex, int pendingOp) {
this.dataverseName = dataverseName;
@@ -77,14 +78,14 @@
this.pendingOp = pendingOp;
}
- public Index(String dataverseName, String datasetName, String indexName, IndexType indexType,
+ public Index(DataverseName dataverseName, String datasetName, String indexName, IndexType indexType,
List<List<String>> keyFieldNames, List<Integer> keyFieldSourceIndicators, List<IAType> keyFieldTypes,
boolean overrideKeyFieldTypes, boolean isEnforced, boolean isPrimaryIndex, int pendingOp) {
this(dataverseName, datasetName, indexName, indexType, keyFieldNames, keyFieldSourceIndicators, keyFieldTypes,
-1, overrideKeyFieldTypes, isEnforced, isPrimaryIndex, pendingOp);
}
- public String getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverseName;
}
@@ -297,6 +298,6 @@
@Override
public String toString() {
- return dataverseName + '.' + datasetName + '.' + indexName;
+ return dataverseName + "." + datasetName + "." + indexName;
}
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Library.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Library.java
index 8ffc266..87ead6a 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Library.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Library.java
@@ -18,22 +18,23 @@
*/
package org.apache.asterix.metadata.entities;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.MetadataCache;
import org.apache.asterix.metadata.api.IMetadataEntity;
public class Library implements IMetadataEntity<Library> {
- private static final long serialVersionUID = 1L;
+ private static final long serialVersionUID = 2L;
- private final String dataverse;
+ private final DataverseName dataverse;
private final String name;
- public Library(String dataverseName, String libraryName) {
+ public Library(DataverseName dataverseName, String libraryName) {
this.dataverse = dataverseName;
this.name = libraryName;
}
- public String getDataverseName() {
+ public DataverseName getDataverseName() {
return dataverse;
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/AbstractTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/AbstractTupleTranslator.java
index e9e6366..16137ef 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/AbstractTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/AbstractTupleTranslator.java
@@ -19,53 +19,94 @@
package org.apache.asterix.metadata.entitytupletranslators;
+import java.io.ByteArrayInputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+
import org.apache.asterix.builders.IARecordBuilder;
import org.apache.asterix.builders.RecordBuilder;
import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
import org.apache.asterix.metadata.api.IMetadataEntityTupleTranslator;
+import org.apache.asterix.metadata.api.IMetadataIndex;
import org.apache.asterix.om.base.ABoolean;
import org.apache.asterix.om.base.AInt32;
+import org.apache.asterix.om.base.AInt64;
import org.apache.asterix.om.base.AMutableString;
+import org.apache.asterix.om.base.ARecord;
import org.apache.asterix.om.base.AString;
+import org.apache.asterix.om.types.ARecordType;
import org.apache.asterix.om.types.BuiltinType;
+import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
/**
* Contains common members shared across all concrete implementations of
* IMetadataEntityTupleTranslator.
*/
public abstract class AbstractTupleTranslator<T> implements IMetadataEntityTupleTranslator<T> {
- private static final long serialVersionUID = 1L;
- protected AMutableString aString = new AMutableString("");
+
@SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AString> stringSerde =
+ protected final ISerializerDeserializer<AString> stringSerde =
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING);
@SuppressWarnings("unchecked")
- protected ISerializerDeserializer<ABoolean> booleanSerde =
+ protected final ISerializerDeserializer<ABoolean> booleanSerde =
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ABOOLEAN);
@SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AInt32> int32Serde =
+ protected final ISerializerDeserializer<AInt32> int32Serde =
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT32);
+ @SuppressWarnings("unchecked")
+ protected final ISerializerDeserializer<AInt64> int64Serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
- protected final transient IARecordBuilder recordBuilder;
- protected final transient ArrayBackedValueStorage fieldValue;
- protected final transient ArrayTupleBuilder tupleBuilder;
- protected final transient ArrayTupleReference tuple;
+ protected final ISerializerDeserializer<ARecord> recordSerDes;
- public AbstractTupleTranslator(boolean getTuple, int fieldCount) {
+ protected AMutableString aString = new AMutableString("");
+
+ protected final ARecordType payloadRecordType;
+ protected final int payloadTupleFieldIndex;
+ protected final IARecordBuilder recordBuilder;
+ protected final ArrayBackedValueStorage fieldName;
+ protected final ArrayBackedValueStorage fieldValue;
+ protected final ArrayTupleBuilder tupleBuilder;
+ protected final ArrayTupleReference tuple;
+
+ @SuppressWarnings("unchecked")
+ protected AbstractTupleTranslator(boolean getTuple, IMetadataIndex metadataIndex, int payloadTupleFieldIndex) {
+ payloadRecordType = metadataIndex.getPayloadRecordType();
+ recordSerDes = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(payloadRecordType);
+ this.payloadTupleFieldIndex = payloadTupleFieldIndex;
if (getTuple) {
recordBuilder = new RecordBuilder();
+ fieldName = payloadRecordType.isOpen() ? new ArrayBackedValueStorage() : null;
fieldValue = new ArrayBackedValueStorage();
- tupleBuilder = new ArrayTupleBuilder(fieldCount);
+ tupleBuilder = new ArrayTupleBuilder(metadataIndex.getFieldCount());
tuple = new ArrayTupleReference();
} else {
recordBuilder = null;
+ fieldName = null;
fieldValue = null;
tupleBuilder = null;
tuple = null;
}
}
+
+ @Override
+ public final T getMetadataEntityFromTuple(ITupleReference frameTuple)
+ throws HyracksDataException, AlgebricksException {
+ byte[] serRecord = frameTuple.getFieldData(payloadTupleFieldIndex);
+ int recordStartOffset = frameTuple.getFieldStart(payloadTupleFieldIndex);
+ int recordLength = frameTuple.getFieldLength(payloadTupleFieldIndex);
+ ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
+ DataInput in = new DataInputStream(stream);
+ ARecord datasetRecord = recordSerDes.deserialize(in);
+ return createMetadataEntityFromARecord(datasetRecord);
+ }
+
+ protected abstract T createMetadataEntityFromARecord(ARecord aRecord)
+ throws HyracksDataException, AlgebricksException;
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/CompactionPolicyTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/CompactionPolicyTupleTranslator.java
index 38f4206..1d8e408 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/CompactionPolicyTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/CompactionPolicyTupleTranslator.java
@@ -19,18 +19,12 @@
package org.apache.asterix.metadata.entitytupletranslators;
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
-
-import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
import org.apache.asterix.metadata.bootstrap.MetadataRecordTypes;
import org.apache.asterix.metadata.entities.CompactionPolicy;
import org.apache.asterix.om.base.ARecord;
import org.apache.asterix.om.base.AString;
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
-import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -38,56 +32,34 @@
* Translates a Dataset metadata entity to an ITupleReference and vice versa.
*/
public class CompactionPolicyTupleTranslator extends AbstractTupleTranslator<CompactionPolicy> {
- private static final long serialVersionUID = 5291424952240239023L;
-
- // Field indexes of serialized CompactionPolicy in a tuple.
- // Key field.
- public static final int COMPACTION_POLICY_DATAVERSE_NAME_FIELD_INDEX = 0;
-
- public static final int COMPACTION_POLICY_NAME_FIELD_INDEX = 1;
// Payload field containing serialized compactionPolicy.
- public static final int COMPACTION_POLICY_PAYLOAD_TUPLE_FIELD_INDEX = 2;
-
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<ARecord> recordSerDes = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(MetadataRecordTypes.COMPACTION_POLICY_RECORDTYPE);
+ private static final int COMPACTION_POLICY_PAYLOAD_TUPLE_FIELD_INDEX = 2;
protected CompactionPolicyTupleTranslator(boolean getTuple) {
- super(getTuple, MetadataPrimaryIndexes.COMPACTION_POLICY_DATASET.getFieldCount());
+ super(getTuple, MetadataPrimaryIndexes.COMPACTION_POLICY_DATASET, COMPACTION_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
}
@Override
- public CompactionPolicy getMetadataEntityFromTuple(ITupleReference tuple) throws HyracksDataException {
- byte[] serRecord = tuple.getFieldData(COMPACTION_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
- int recordStartOffset = tuple.getFieldStart(COMPACTION_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
- int recordLength = tuple.getFieldLength(COMPACTION_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
- ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
- DataInput in = new DataInputStream(stream);
- ARecord compactionPolicyRecord = recordSerDes.deserialize(in);
- return createCompactionPolicyFromARecord(compactionPolicyRecord);
- }
-
- private CompactionPolicy createCompactionPolicyFromARecord(ARecord compactionPolicyRecord) {
- CompactionPolicy compactionPolicy = null;
- String dataverseName = ((AString) compactionPolicyRecord
+ protected CompactionPolicy createMetadataEntityFromARecord(ARecord compactionPolicyRecord) {
+ String dataverseCanonicalName = ((AString) compactionPolicyRecord
.getValueByPos(MetadataRecordTypes.COMPACTION_POLICY_ARECORD_DATAVERSE_NAME_FIELD_INDEX))
.getStringValue();
+ DataverseName dataverseName = DataverseName.createFromCanonicalForm(dataverseCanonicalName);
String policyName = ((AString) compactionPolicyRecord
.getValueByPos(MetadataRecordTypes.COMPACTION_POLICY_ARECORD_POLICY_NAME_FIELD_INDEX)).getStringValue();
String className = ((AString) compactionPolicyRecord
.getValueByPos(MetadataRecordTypes.COMPACTION_POLICY_ARECORD_CLASSNAME_FIELD_INDEX)).getStringValue();
- compactionPolicy = new CompactionPolicy(dataverseName, policyName, className);
- return compactionPolicy;
+ return new CompactionPolicy(dataverseName, policyName, className);
}
@Override
- public ITupleReference getTupleFromMetadataEntity(CompactionPolicy compactionPolicy)
- throws HyracksDataException, AlgebricksException {
+ public ITupleReference getTupleFromMetadataEntity(CompactionPolicy compactionPolicy) throws HyracksDataException {
+ String dataverseCanonicalName = compactionPolicy.getDataverseName().getCanonicalForm();
tupleBuilder.reset();
- aString.setValue(compactionPolicy.getDataverseName());
+ aString.setValue(dataverseCanonicalName);
stringSerde.serialize(aString, tupleBuilder.getDataOutput());
tupleBuilder.addFieldEndOffset();
@@ -99,7 +71,7 @@
// write field 0
fieldValue.reset();
- aString.setValue(compactionPolicy.getDataverseName());
+ aString.setValue(dataverseCanonicalName);
stringSerde.serialize(aString, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.COMPACTION_POLICY_ARECORD_DATAVERSE_NAME_FIELD_INDEX, fieldValue);
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
index 7f8b9bf6..64b703b 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
@@ -19,9 +19,6 @@
package org.apache.asterix.metadata.entitytupletranslators;
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
import java.io.DataOutput;
import java.util.ArrayList;
import java.util.Calendar;
@@ -37,7 +34,7 @@
import org.apache.asterix.builders.UnorderedListBuilder;
import org.apache.asterix.common.config.DatasetConfig.DatasetType;
import org.apache.asterix.common.config.DatasetConfig.TransactionState;
-import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.IDatasetDetails;
import org.apache.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
import org.apache.asterix.metadata.bootstrap.MetadataRecordTypes;
@@ -67,8 +64,6 @@
import org.apache.asterix.om.types.BuiltinType;
import org.apache.asterix.om.types.IAType;
import org.apache.asterix.runtime.compression.CompressionManager;
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
-import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -77,51 +72,36 @@
* Translates a Dataset metadata entity to an ITupleReference and vice versa.
*/
public class DatasetTupleTranslator extends AbstractTupleTranslator<Dataset> {
- private static final long serialVersionUID = 1L;
+
// Payload field containing serialized Dataset.
- public static final int DATASET_PAYLOAD_TUPLE_FIELD_INDEX = 2;
+ private static final int DATASET_PAYLOAD_TUPLE_FIELD_INDEX = 2;
- @SuppressWarnings("unchecked")
- protected final ISerializerDeserializer<ARecord> recordSerDes =
- SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(MetadataRecordTypes.DATASET_RECORDTYPE);
- protected final transient AMutableInt32 aInt32;
- protected final transient ISerializerDeserializer<AInt32> aInt32Serde;
- protected final transient AMutableInt64 aBigInt;
- protected final transient ISerializerDeserializer<AInt64> aBigIntSerde;
- protected final transient ArrayBackedValueStorage fieldName = new ArrayBackedValueStorage();
+ protected AMutableInt32 aInt32;
+ protected AMutableInt64 aInt64;
- @SuppressWarnings("unchecked")
protected DatasetTupleTranslator(boolean getTuple) {
- super(getTuple, MetadataPrimaryIndexes.DATASET_DATASET.getFieldCount());
- aInt32 = new AMutableInt32(-1);
- aBigInt = new AMutableInt64(-1);
- aInt32Serde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT32);
- aBigIntSerde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
+ super(getTuple, MetadataPrimaryIndexes.DATASET_DATASET, DATASET_PAYLOAD_TUPLE_FIELD_INDEX);
+ if (getTuple) {
+ aInt32 = new AMutableInt32(-1);
+ aInt64 = new AMutableInt64(-1);
+ }
}
@Override
- public Dataset getMetadataEntityFromTuple(ITupleReference frameTuple) throws HyracksDataException {
- byte[] serRecord = frameTuple.getFieldData(DATASET_PAYLOAD_TUPLE_FIELD_INDEX);
- int recordStartOffset = frameTuple.getFieldStart(DATASET_PAYLOAD_TUPLE_FIELD_INDEX);
- int recordLength = frameTuple.getFieldLength(DATASET_PAYLOAD_TUPLE_FIELD_INDEX);
- ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
- DataInput in = new DataInputStream(stream);
- ARecord datasetRecord = recordSerDes.deserialize(in);
- return createDatasetFromARecord(datasetRecord);
- }
-
- protected Dataset createDatasetFromARecord(ARecord datasetRecord) throws HyracksDataException {
- String dataverseName =
+ protected Dataset createMetadataEntityFromARecord(ARecord datasetRecord) {
+ String dataverseCanonicalName =
((AString) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATAVERSENAME_FIELD_INDEX))
.getStringValue();
+ DataverseName dataverseName = DataverseName.createFromCanonicalForm(dataverseCanonicalName);
String datasetName =
((AString) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATASETNAME_FIELD_INDEX))
.getStringValue();
String typeName =
((AString) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATATYPENAME_FIELD_INDEX))
.getStringValue();
- String typeDataverseName = ((AString) datasetRecord
+ String typeDataverseCanonicalName = ((AString) datasetRecord
.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATATYPEDATAVERSENAME_FIELD_INDEX)).getStringValue();
+ DataverseName typeDataverseName = DataverseName.createFromCanonicalForm(typeDataverseCanonicalName);
DatasetType datasetType = DatasetType.valueOf(
((AString) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATASETTYPE_FIELD_INDEX))
.getStringValue());
@@ -141,12 +121,12 @@
.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_COMPACTION_POLICY_PROPERTIES_FIELD_INDEX))
.getCursor();
Map<String, String> compactionPolicyProperties = new LinkedHashMap<>();
- String key;
- String value;
while (cursor.next()) {
ARecord field = (ARecord) cursor.get();
- key = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_NAME_FIELD_INDEX)).getStringValue();
- value = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_VALUE_FIELD_INDEX)).getStringValue();
+ String key =
+ ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_NAME_FIELD_INDEX)).getStringValue();
+ String value =
+ ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_VALUE_FIELD_INDEX)).getStringValue();
compactionPolicyProperties.put(key, value);
}
switch (datasetType) {
@@ -165,9 +145,8 @@
List<List<String>> partitioningKey = new ArrayList<>();
List<IAType> partitioningKeyType = new ArrayList<>();
- AOrderedList fieldNameList;
while (cursor.next()) {
- fieldNameList = (AOrderedList) cursor.get();
+ AOrderedList fieldNameList = (AOrderedList) cursor.get();
IACursor nestedFieldNameCursor = (fieldNameList.getCursor());
List<String> nestedFieldName = new ArrayList<>();
while (nestedFieldNameCursor.next()) {
@@ -226,9 +205,9 @@
Map<String, String> properties = new HashMap<>();
while (cursor.next()) {
ARecord field = (ARecord) cursor.get();
- key = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_NAME_FIELD_INDEX))
+ String key = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_NAME_FIELD_INDEX))
.getStringValue();
- value = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_VALUE_FIELD_INDEX))
+ String value = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_VALUE_FIELD_INDEX))
.getStringValue();
properties.put(key, value);
}
@@ -247,13 +226,14 @@
Map<String, String> hints = getDatasetHints(datasetRecord);
- String metaTypeDataverseName = null;
+ DataverseName metaTypeDataverseName = null;
String metaTypeName = null;
int metaTypeDataverseNameIndex =
- datasetRecord.getType().getFieldIndex(MetadataRecordTypes.FIELD_NAME_METADATA_DATAVERSE);
+ datasetRecord.getType().getFieldIndex(MetadataRecordTypes.FIELD_NAME_METATYPE_DATAVERSE_NAME);
if (metaTypeDataverseNameIndex >= 0) {
- metaTypeDataverseName =
+ String metaTypeDataverseCanonicalName =
((AString) datasetRecord.getValueByPos(metaTypeDataverseNameIndex)).getStringValue();
+ metaTypeDataverseName = DataverseName.createFromCanonicalForm(metaTypeDataverseCanonicalName);
int metaTypeNameIndex = datasetRecord.getType().getFieldIndex(MetadataRecordTypes.FIELD_NAME_METATYPE_NAME);
metaTypeName = ((AString) datasetRecord.getValueByPos(metaTypeNameIndex)).getStringValue();
}
@@ -289,13 +269,14 @@
}
@Override
- public ITupleReference getTupleFromMetadataEntity(Dataset dataset)
- throws HyracksDataException, AlgebricksException {
+ public ITupleReference getTupleFromMetadataEntity(Dataset dataset) throws HyracksDataException {
OrderedListBuilder listBuilder = new OrderedListBuilder();
ArrayBackedValueStorage itemValue = new ArrayBackedValueStorage();
+ String dataverseCanonicalName = dataset.getDataverseName().getCanonicalForm();
+
// write the key in the first 2 fields of the tuple
tupleBuilder.reset();
- aString.setValue(dataset.getDataverseName());
+ aString.setValue(dataverseCanonicalName);
stringSerde.serialize(aString, tupleBuilder.getDataOutput());
tupleBuilder.addFieldEndOffset();
aString.setValue(dataset.getDatasetName());
@@ -308,7 +289,7 @@
// write field 0
fieldValue.reset();
- aString.setValue(dataset.getDataverseName());
+ aString.setValue(dataverseCanonicalName);
stringSerde.serialize(aString, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.DATASET_ARECORD_DATAVERSENAME_FIELD_INDEX, fieldValue);
@@ -320,7 +301,7 @@
// write field 2
fieldValue.reset();
- aString.setValue(dataset.getItemTypeDataverseName());
+ aString.setValue(dataset.getItemTypeDataverseName().getCanonicalForm());
stringSerde.serialize(aString, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.DATASET_ARECORD_DATATYPEDATAVERSENAME_FIELD_INDEX, fieldValue);
@@ -394,13 +375,13 @@
// write field 12
fieldValue.reset();
aInt32.setValue(dataset.getDatasetId());
- aInt32Serde.serialize(aInt32, fieldValue.getDataOutput());
+ int32Serde.serialize(aInt32, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.DATASET_ARECORD_DATASETID_FIELD_INDEX, fieldValue);
// write field 13
fieldValue.reset();
aInt32.setValue(dataset.getPendingOp());
- aInt32Serde.serialize(aInt32, fieldValue.getDataOutput());
+ int32Serde.serialize(aInt32, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.DATASET_ARECORD_PENDINGOP_FIELD_INDEX, fieldValue);
// write open fields
@@ -416,9 +397,6 @@
/**
* Keep protected to allow other extensions to add additional fields
- *
- * @param dataset
- * @throws HyracksDataException
*/
protected void writeOpenFields(Dataset dataset) throws HyracksDataException {
writeMetaPart(dataset);
@@ -430,10 +408,10 @@
if (dataset.hasMetaPart()) {
// write open field 1, the meta item type Dataverse name.
fieldName.reset();
- aString.setValue(MetadataRecordTypes.FIELD_NAME_METADATA_DATAVERSE);
+ aString.setValue(MetadataRecordTypes.FIELD_NAME_METATYPE_DATAVERSE_NAME);
stringSerde.serialize(aString, fieldName.getDataOutput());
fieldValue.reset();
- aString.setValue(dataset.getMetaItemTypeDataverseName());
+ aString.setValue(dataset.getMetaItemTypeDataverseName().getCanonicalForm());
stringSerde.serialize(aString, fieldValue.getDataOutput());
recordBuilder.addField(fieldName, fieldValue);
@@ -477,16 +455,15 @@
aString.setValue(MetadataRecordTypes.DATASET_ARECORD_REBALANCE_FIELD_NAME);
stringSerde.serialize(aString, fieldName.getDataOutput());
fieldValue.reset();
- aBigInt.setValue(dataset.getRebalanceCount());
- aBigIntSerde.serialize(aBigInt, fieldValue.getDataOutput());
+ aInt64.setValue(dataset.getRebalanceCount());
+ int64Serde.serialize(aInt64, fieldValue.getDataOutput());
recordBuilder.addField(fieldName, fieldValue);
}
}
protected void writeDatasetDetailsRecordType(IARecordBuilder recordBuilder, Dataset dataset, DataOutput dataOutput)
throws HyracksDataException {
-
- dataset.getDatasetDetails().writeDatasetDetailsRecordType(fieldValue.getDataOutput());
+ dataset.getDatasetDetails().writeDatasetDetailsRecordType(dataOutput);
switch (dataset.getDatasetType()) {
case INTERNAL:
recordBuilder.addField(MetadataRecordTypes.DATASET_ARECORD_INTERNALDETAILS_FIELD_INDEX, fieldValue);
@@ -514,14 +491,11 @@
return hints;
}
- @SuppressWarnings("unchecked")
protected void writeDatasetHintRecord(String name, String value, DataOutput out) throws HyracksDataException {
IARecordBuilder propertyRecordBuilder = new RecordBuilder();
ArrayBackedValueStorage fieldValue = new ArrayBackedValueStorage();
propertyRecordBuilder.reset(MetadataRecordTypes.DATASET_HINTS_RECORDTYPE);
AMutableString aString = new AMutableString("");
- ISerializerDeserializer<AString> stringSerde =
- SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING);
// write field 0
fieldValue.reset();
@@ -537,5 +511,4 @@
propertyRecordBuilder.write(out, true);
}
-
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java
index 8f630cf..a35ef8a 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java
@@ -19,60 +19,34 @@
package org.apache.asterix.metadata.entitytupletranslators;
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
import java.util.Calendar;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.external.api.IDataSourceAdapter;
import org.apache.asterix.external.dataset.adapter.AdapterIdentifier;
-import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
import org.apache.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
import org.apache.asterix.metadata.bootstrap.MetadataRecordTypes;
import org.apache.asterix.metadata.entities.DatasourceAdapter;
import org.apache.asterix.om.base.ARecord;
import org.apache.asterix.om.base.AString;
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
-import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
public class DatasourceAdapterTupleTranslator extends AbstractTupleTranslator<DatasourceAdapter> {
- private static final long serialVersionUID = 6183434454125673504L;
-
- // Field indexes of serialized Adapter in a tuple.
- // First key field.
- public static final int ADAPTER_DATAVERSENAME_TUPLE_FIELD_INDEX = 0;
- // Second key field.
- public static final int ADAPTER_NAME_TUPLE_FIELD_INDEX = 1;
// Payload field containing serialized Adapter.
- public static final int ADAPTER_PAYLOAD_TUPLE_FIELD_INDEX = 2;
-
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<ARecord> recordSerDes = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(MetadataRecordTypes.DATASOURCE_ADAPTER_RECORDTYPE);
+ private static final int ADAPTER_PAYLOAD_TUPLE_FIELD_INDEX = 2;
protected DatasourceAdapterTupleTranslator(boolean getTuple) {
- super(getTuple, MetadataPrimaryIndexes.DATASOURCE_ADAPTER_DATASET.getFieldCount());
+ super(getTuple, MetadataPrimaryIndexes.DATASOURCE_ADAPTER_DATASET, ADAPTER_PAYLOAD_TUPLE_FIELD_INDEX);
}
@Override
- public DatasourceAdapter getMetadataEntityFromTuple(ITupleReference tuple)
- throws AlgebricksException, HyracksDataException {
- byte[] serRecord = tuple.getFieldData(ADAPTER_PAYLOAD_TUPLE_FIELD_INDEX);
- int recordStartOffset = tuple.getFieldStart(ADAPTER_PAYLOAD_TUPLE_FIELD_INDEX);
- int recordLength = tuple.getFieldLength(ADAPTER_PAYLOAD_TUPLE_FIELD_INDEX);
- ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
- DataInput in = new DataInputStream(stream);
- ARecord adapterRecord = recordSerDes.deserialize(in);
- return createAdapterFromARecord(adapterRecord);
- }
-
- private DatasourceAdapter createAdapterFromARecord(ARecord adapterRecord) {
- String dataverseName = ((AString) adapterRecord
+ protected DatasourceAdapter createMetadataEntityFromARecord(ARecord adapterRecord) {
+ String dataverseCanonicalName = ((AString) adapterRecord
.getValueByPos(MetadataRecordTypes.DATASOURCE_ADAPTER_ARECORD_DATAVERSENAME_FIELD_INDEX))
.getStringValue();
+ DataverseName dataverseName = DataverseName.createFromCanonicalForm(dataverseCanonicalName);
String adapterName =
((AString) adapterRecord.getValueByPos(MetadataRecordTypes.DATASOURCE_ADAPTER_ARECORD_NAME_FIELD_INDEX))
.getStringValue();
@@ -86,14 +60,17 @@
}
@Override
- public ITupleReference getTupleFromMetadataEntity(DatasourceAdapter adapter)
- throws HyracksDataException, AlgebricksException {
+ public ITupleReference getTupleFromMetadataEntity(DatasourceAdapter adapter) throws HyracksDataException {
+ AdapterIdentifier adapterIdentifier = adapter.getAdapterIdentifier();
+ String dataverseCanonicalName = adapterIdentifier.getDataverseName().getCanonicalForm();
+
// write the key in the first 2 fields of the tuple
tupleBuilder.reset();
- aString.setValue(adapter.getAdapterIdentifier().getNamespace());
+
+ aString.setValue(dataverseCanonicalName);
stringSerde.serialize(aString, tupleBuilder.getDataOutput());
tupleBuilder.addFieldEndOffset();
- aString.setValue(adapter.getAdapterIdentifier().getName());
+ aString.setValue(adapterIdentifier.getName());
stringSerde.serialize(aString, tupleBuilder.getDataOutput());
tupleBuilder.addFieldEndOffset();
@@ -103,13 +80,13 @@
// write field 0
fieldValue.reset();
- aString.setValue(adapter.getAdapterIdentifier().getNamespace());
+ aString.setValue(dataverseCanonicalName);
stringSerde.serialize(aString, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.DATASOURCE_ADAPTER_ARECORD_DATAVERSENAME_FIELD_INDEX, fieldValue);
// write field 1
fieldValue.reset();
- aString.setValue(adapter.getAdapterIdentifier().getName());
+ aString.setValue(adapterIdentifier.getName());
stringSerde.serialize(aString, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.DATASOURCE_ADAPTER_ARECORD_NAME_FIELD_INDEX, fieldValue);
@@ -138,5 +115,4 @@
tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
return tuple;
}
-
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java
index 32951d5..5009f98 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java
@@ -19,18 +19,14 @@
package org.apache.asterix.metadata.entitytupletranslators;
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
import java.io.DataOutput;
-import java.rmi.RemoteException;
import java.util.Calendar;
import org.apache.asterix.builders.IARecordBuilder;
import org.apache.asterix.builders.OrderedListBuilder;
import org.apache.asterix.builders.RecordBuilder;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.transactions.TxnId;
-import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
import org.apache.asterix.metadata.MetadataNode;
import org.apache.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
import org.apache.asterix.metadata.bootstrap.MetadataRecordTypes;
@@ -51,7 +47,6 @@
import org.apache.asterix.om.types.IAType;
import org.apache.asterix.om.utils.NonTaggedFormatUtil;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
-import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
@@ -61,15 +56,9 @@
* Translates a Datatype metadata entity to an ITupleReference and vice versa.
*/
public class DatatypeTupleTranslator extends AbstractTupleTranslator<Datatype> {
- private static final long serialVersionUID = -2324433490801381399L;
- // Field indexes of serialized Dataset in a tuple.
- // First key field.
- public static final int DATATYPE_DATAVERSENAME_TUPLE_FIELD_INDEX = 0;
- // Second key field.
- public static final int DATATYPE_DATATYPE_TUPLE_FIELD_INDEX = 1;
// Payload field containing serialized Datatype.
- public static final int DATATYPE_PAYLOAD_TUPLE_FIELD_INDEX = 2;
+ private static final int DATATYPE_PAYLOAD_TUPLE_FIELD_INDEX = 2;
public enum DerivedTypeTag {
RECORD,
@@ -77,34 +66,21 @@
ORDEREDLIST
}
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<ARecord> recordSerDes =
- SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(MetadataRecordTypes.DATATYPE_RECORDTYPE);
- private final MetadataNode metadataNode;
- private final TxnId txnId;
+ protected final MetadataNode metadataNode;
+ protected final TxnId txnId;
protected DatatypeTupleTranslator(TxnId txnId, MetadataNode metadataNode, boolean getTuple) {
- super(getTuple, MetadataPrimaryIndexes.DATATYPE_DATASET.getFieldCount());
+ super(getTuple, MetadataPrimaryIndexes.DATATYPE_DATASET, DATATYPE_PAYLOAD_TUPLE_FIELD_INDEX);
this.txnId = txnId;
this.metadataNode = metadataNode;
}
@Override
- public Datatype getMetadataEntityFromTuple(ITupleReference frameTuple)
- throws AlgebricksException, HyracksDataException {
- byte[] serRecord = frameTuple.getFieldData(DATATYPE_PAYLOAD_TUPLE_FIELD_INDEX);
- int recordStartOffset = frameTuple.getFieldStart(DATATYPE_PAYLOAD_TUPLE_FIELD_INDEX);
- int recordLength = frameTuple.getFieldLength(DATATYPE_PAYLOAD_TUPLE_FIELD_INDEX);
- ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
- DataInput in = new DataInputStream(stream);
- ARecord datatypeRecord = recordSerDes.deserialize(in);
- return createDataTypeFromARecord(datatypeRecord);
- }
-
- private Datatype createDataTypeFromARecord(ARecord datatypeRecord) throws AlgebricksException {
- String dataverseName =
+ protected Datatype createMetadataEntityFromARecord(ARecord datatypeRecord) throws AlgebricksException {
+ String dataverseCanonicalName =
((AString) datatypeRecord.getValueByPos(MetadataRecordTypes.DATATYPE_ARECORD_DATAVERSENAME_FIELD_INDEX))
.getStringValue();
+ DataverseName dataverseName = DataverseName.createFromCanonicalForm(dataverseCanonicalName);
String datatypeName =
((AString) datatypeRecord.getValueByPos(MetadataRecordTypes.DATATYPE_ARECORD_DATATYPENAME_FIELD_INDEX))
.getStringValue();
@@ -123,8 +99,7 @@
ARecord recordType = (ARecord) derivedTypeRecord
.getValueByPos(MetadataRecordTypes.DERIVEDTYPE_ARECORD_RECORD_FIELD_INDEX);
boolean isOpen = ((ABoolean) recordType
- .getValueByPos(MetadataRecordTypes.RECORDTYPE_ARECORD_ISOPEN_FIELD_INDEX)).getBoolean()
- .booleanValue();
+ .getValueByPos(MetadataRecordTypes.RECORDTYPE_ARECORD_ISOPEN_FIELD_INDEX)).getBoolean();
int numberOfFields = ((AOrderedList) recordType
.getValueByPos(MetadataRecordTypes.RECORDTYPE_ARECORD_FIELDS_FIELD_INDEX)).size();
IACursor cursor = ((AOrderedList) recordType
@@ -142,8 +117,7 @@
((AString) field.getValueByPos(MetadataRecordTypes.FIELD_ARECORD_FIELDTYPE_FIELD_INDEX))
.getStringValue();
boolean isNullable = ((ABoolean) field
- .getValueByPos(MetadataRecordTypes.FIELD_ARECORD_ISNULLABLE_FIELD_INDEX)).getBoolean()
- .booleanValue();
+ .getValueByPos(MetadataRecordTypes.FIELD_ARECORD_ISNULLABLE_FIELD_INDEX)).getBoolean();
fieldTypes[fieldId] = BuiltinTypeMap.getTypeFromTypeName(metadataNode, txnId, dataverseName,
fieldTypeName, isNullable);
fieldId++;
@@ -177,11 +151,12 @@
}
@Override
- public ITupleReference getTupleFromMetadataEntity(Datatype dataType)
- throws HyracksDataException, AlgebricksException {
+ public ITupleReference getTupleFromMetadataEntity(Datatype dataType) throws HyracksDataException {
+ String dataverseCanonicalName = dataType.getDataverseName().getCanonicalForm();
+
// write the key in the first two fields of the tuple
tupleBuilder.reset();
- aString.setValue(dataType.getDataverseName());
+ aString.setValue(dataverseCanonicalName);
stringSerde.serialize(aString, tupleBuilder.getDataOutput());
tupleBuilder.addFieldEndOffset();
aString.setValue(dataType.getDatatypeName());
@@ -193,7 +168,7 @@
// write field 0
fieldValue.reset();
- aString.setValue(dataType.getDataverseName());
+ aString.setValue(dataverseCanonicalName);
stringSerde.serialize(aString, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.DATATYPE_ARECORD_DATAVERSENAME_FIELD_INDEX, fieldValue);
@@ -232,7 +207,7 @@
private void writeDerivedTypeRecord(Datatype type, AbstractComplexType derivedDatatype, DataOutput out)
throws HyracksDataException {
- DerivedTypeTag tag = null;
+ DerivedTypeTag tag;
IARecordBuilder derivedRecordBuilder = new RecordBuilder();
ArrayBackedValueStorage fieldValue = new ArrayBackedValueStorage();
switch (derivedDatatype.getTypeTag()) {
@@ -308,7 +283,7 @@
ARecordType recType = (ARecordType) type;
OrderedListBuilder listBuilder = new OrderedListBuilder();
listBuilder.reset(new AOrderedListType(MetadataRecordTypes.FIELD_RECORDTYPE, null));
- IAType fieldType = null;
+ IAType fieldType;
for (int i = 0; i < recType.getFieldNames().length; i++) {
fieldType = recType.getFieldTypes()[i];
@@ -365,7 +340,7 @@
}
private String handleNestedDerivedType(String typeName, AbstractComplexType nestedType, Datatype topLevelType,
- String dataverseName, String datatypeName) throws HyracksDataException {
+ DataverseName dataverseName, String datatypeName) throws HyracksDataException {
try {
metadataNode.addDatatype(txnId, new Datatype(dataverseName, typeName, nestedType, true));
} catch (AlgebricksException e) {
@@ -380,11 +355,6 @@
throw hde;
}
}
- } catch (RemoteException e) {
- // TODO: This should not be a HyracksDataException. Can't
- // fix this currently because of BTree exception model whose
- // fixes must get in.
- throw HyracksDataException.create(e);
}
return typeName;
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java
index 133ab35..10fe6bf 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DataverseTupleTranslator.java
@@ -19,12 +19,9 @@
package org.apache.asterix.metadata.entitytupletranslators;
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
import java.util.Calendar;
-import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
import org.apache.asterix.metadata.bootstrap.MetadataRecordTypes;
import org.apache.asterix.metadata.entities.Dataverse;
@@ -32,9 +29,6 @@
import org.apache.asterix.om.base.AMutableInt32;
import org.apache.asterix.om.base.ARecord;
import org.apache.asterix.om.base.AString;
-import org.apache.asterix.om.types.BuiltinType;
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
-import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -42,47 +36,36 @@
* Translates a Dataverse metadata entity to an ITupleReference and vice versa.
*/
public class DataverseTupleTranslator extends AbstractTupleTranslator<Dataverse> {
- private static final long serialVersionUID = -3196752600543191613L;
- // Field indexes of serialized Dataverse in a tuple.
- // Key field.
- public static final int DATAVERSE_DATAVERSENAME_TUPLE_FIELD_INDEX = 0;
// Payload field containing serialized Dataverse.
- public static final int DATAVERSE_PAYLOAD_TUPLE_FIELD_INDEX = 1;
+ private static final int DATAVERSE_PAYLOAD_TUPLE_FIELD_INDEX = 1;
- private transient AMutableInt32 aInt32;
- protected ISerializerDeserializer<AInt32> aInt32Serde;
+ protected AMutableInt32 aInt32;
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<ARecord> recordSerDes =
- SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(MetadataRecordTypes.DATAVERSE_RECORDTYPE);
-
- @SuppressWarnings("unchecked")
protected DataverseTupleTranslator(boolean getTuple) {
- super(getTuple, MetadataPrimaryIndexes.DATAVERSE_DATASET.getFieldCount());
- aInt32 = new AMutableInt32(-1);
- aInt32Serde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT32);
+ super(getTuple, MetadataPrimaryIndexes.DATAVERSE_DATASET, DATAVERSE_PAYLOAD_TUPLE_FIELD_INDEX);
+ if (getTuple) {
+ aInt32 = new AMutableInt32(-1);
+ }
}
@Override
- public Dataverse getMetadataEntityFromTuple(ITupleReference frameTuple) throws HyracksDataException {
- byte[] serRecord = frameTuple.getFieldData(DATAVERSE_PAYLOAD_TUPLE_FIELD_INDEX);
- int recordStartOffset = frameTuple.getFieldStart(DATAVERSE_PAYLOAD_TUPLE_FIELD_INDEX);
- int recordLength = frameTuple.getFieldLength(DATAVERSE_PAYLOAD_TUPLE_FIELD_INDEX);
- ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
- DataInput in = new DataInputStream(stream);
- ARecord dataverseRecord = recordSerDes.deserialize(in);
- return new Dataverse(((AString) dataverseRecord.getValueByPos(0)).getStringValue(),
- ((AString) dataverseRecord.getValueByPos(1)).getStringValue(),
- ((AInt32) dataverseRecord.getValueByPos(3)).getIntegerValue());
+ protected Dataverse createMetadataEntityFromARecord(ARecord dataverseRecord) {
+ String dataverseCanonicalName = ((AString) dataverseRecord.getValueByPos(0)).getStringValue();
+ DataverseName dataverseName = DataverseName.createFromCanonicalForm(dataverseCanonicalName);
+ String format = ((AString) dataverseRecord.getValueByPos(1)).getStringValue();
+ int pendingOp = ((AInt32) dataverseRecord.getValueByPos(3)).getIntegerValue();
+
+ return new Dataverse(dataverseName, format, pendingOp);
}
@Override
- public ITupleReference getTupleFromMetadataEntity(Dataverse instance)
- throws HyracksDataException, AlgebricksException {
+ public ITupleReference getTupleFromMetadataEntity(Dataverse dataverse) throws HyracksDataException {
+ String dataverseCanonicalName = dataverse.getDataverseName().getCanonicalForm();
+
// write the key in the first field of the tuple
tupleBuilder.reset();
- aString.setValue(instance.getDataverseName());
+ aString.setValue(dataverseCanonicalName);
stringSerde.serialize(aString, tupleBuilder.getDataOutput());
tupleBuilder.addFieldEndOffset();
@@ -90,13 +73,13 @@
recordBuilder.reset(MetadataRecordTypes.DATAVERSE_RECORDTYPE);
// write field 0
fieldValue.reset();
- aString.setValue(instance.getDataverseName());
+ aString.setValue(dataverseCanonicalName);
stringSerde.serialize(aString, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.DATAVERSE_ARECORD_NAME_FIELD_INDEX, fieldValue);
// write field 1
fieldValue.reset();
- aString.setValue(instance.getDataFormat());
+ aString.setValue(dataverse.getDataFormat());
stringSerde.serialize(aString, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.DATAVERSE_ARECORD_FORMAT_FIELD_INDEX, fieldValue);
@@ -108,10 +91,11 @@
// write field 3
fieldValue.reset();
- aInt32.setValue(instance.getPendingOp());
- aInt32Serde.serialize(aInt32, fieldValue.getDataOutput());
+ aInt32.setValue(dataverse.getPendingOp());
+ int32Serde.serialize(aInt32, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.DATAVERSE_ARECORD_PENDINGOP_FIELD_INDEX, fieldValue);
+ // write record
recordBuilder.write(tupleBuilder.getDataOutput(), true);
tupleBuilder.addFieldEndOffset();
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/ExternalFileTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/ExternalFileTupleTranslator.java
index a9fada0..abf3719 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/ExternalFileTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/ExternalFileTupleTranslator.java
@@ -18,12 +18,10 @@
*/
package org.apache.asterix.metadata.entitytupletranslators;
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
import java.util.Date;
import org.apache.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.external.indexing.ExternalFile;
import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
import org.apache.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
@@ -37,60 +35,36 @@
import org.apache.asterix.om.base.ARecord;
import org.apache.asterix.om.base.AString;
import org.apache.asterix.om.types.BuiltinType;
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
public class ExternalFileTupleTranslator extends AbstractTupleTranslator<ExternalFile> {
- private static final long serialVersionUID = -4966958481117396312L;
- // Field indexes of serialized ExternalFile in a tuple.
- // First key field.
- public static final int EXTERNAL_FILE_DATAVERSENAME_TUPLE_FIELD_INDEX = 0;
- // Second key field.
- public static final int EXTERNAL_FILE_DATASETNAME_TUPLE_FIELD_INDEX = 1;
- // Third key field
- public static final int EXTERNAL_FILE_NUMBER_TUPLE_FIELD_INDEX = 2;
// Payload field containing serialized ExternalFile.
- public static final int EXTERNAL_FILE_PAYLOAD_TUPLE_FIELD_INDEX = 3;
+ private static final int EXTERNAL_FILE_PAYLOAD_TUPLE_FIELD_INDEX = 3;
- protected transient AMutableInt32 aInt32 = new AMutableInt32(0);
- protected transient AMutableDateTime aDateTime = new AMutableDateTime(0);
- protected transient AMutableInt64 aInt64 = new AMutableInt64(0);
+ protected AMutableInt32 aInt32;
+ protected AMutableInt64 aInt64;
+ protected AMutableDateTime aDateTime;
+ protected ISerializerDeserializer<ADateTime> dateTimeSerde;
@SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AInt32> intSerde =
- SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT32);
- @SuppressWarnings("unchecked")
- protected ISerializerDeserializer<ADateTime> dateTimeSerde =
- SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADATETIME);
- @SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AInt64> longSerde =
- SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<ARecord> recordSerDes = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(MetadataRecordTypes.EXTERNAL_FILE_RECORDTYPE);
-
protected ExternalFileTupleTranslator(boolean getTuple) {
- super(getTuple, MetadataPrimaryIndexes.EXTERNAL_FILE_DATASET.getFieldCount());
+ super(getTuple, MetadataPrimaryIndexes.EXTERNAL_FILE_DATASET, EXTERNAL_FILE_PAYLOAD_TUPLE_FIELD_INDEX);
+ if (getTuple) {
+ aInt32 = new AMutableInt32(0);
+ aInt64 = new AMutableInt64(0);
+ aDateTime = new AMutableDateTime(0);
+ dateTimeSerde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADATETIME);
+ }
}
@Override
- public ExternalFile getMetadataEntityFromTuple(ITupleReference tuple)
- throws AlgebricksException, HyracksDataException {
- byte[] serRecord = tuple.getFieldData(EXTERNAL_FILE_PAYLOAD_TUPLE_FIELD_INDEX);
- int recordStartOffset = tuple.getFieldStart(EXTERNAL_FILE_PAYLOAD_TUPLE_FIELD_INDEX);
- int recordLength = tuple.getFieldLength(EXTERNAL_FILE_PAYLOAD_TUPLE_FIELD_INDEX);
- ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
- DataInput in = new DataInputStream(stream);
- ARecord externalFileRecord = recordSerDes.deserialize(in);
- return createExternalFileFromARecord(externalFileRecord);
- }
-
- private ExternalFile createExternalFileFromARecord(ARecord externalFileRecord) {
- String dataverseName = ((AString) externalFileRecord
+ protected ExternalFile createMetadataEntityFromARecord(ARecord externalFileRecord) {
+ String dataverseCanonicalName = ((AString) externalFileRecord
.getValueByPos(MetadataRecordTypes.EXTERNAL_FILE_ARECORD_DATAVERSENAME_FIELD_INDEX)).getStringValue();
+ DataverseName dataverseName = DataverseName.createFromCanonicalForm(dataverseCanonicalName);
String datasetName = ((AString) externalFileRecord
.getValueByPos(MetadataRecordTypes.EXTERNAL_FILE_ARECORD_DATASET_NAME_FIELD_INDEX)).getStringValue();
int fileNumber = ((AInt32) externalFileRecord
@@ -109,12 +83,13 @@
}
@Override
- public ITupleReference getTupleFromMetadataEntity(ExternalFile externalFile)
- throws AlgebricksException, HyracksDataException {
+ public ITupleReference getTupleFromMetadataEntity(ExternalFile externalFile) throws HyracksDataException {
+ String dataverseCanonicalName = externalFile.getDataverseName().getCanonicalForm();
+
// write the key in the first 3 fields of the tuple
tupleBuilder.reset();
// dataverse name
- aString.setValue(externalFile.getDataverseName());
+ aString.setValue(dataverseCanonicalName);
stringSerde.serialize(aString, tupleBuilder.getDataOutput());
tupleBuilder.addFieldEndOffset();
// dataset name
@@ -123,7 +98,7 @@
tupleBuilder.addFieldEndOffset();
// file number
aInt32.setValue(externalFile.getFileNumber());
- intSerde.serialize(aInt32, tupleBuilder.getDataOutput());
+ int32Serde.serialize(aInt32, tupleBuilder.getDataOutput());
tupleBuilder.addFieldEndOffset();
// write the pay-load in the fourth field of the tuple
@@ -131,7 +106,7 @@
// write field 0
fieldValue.reset();
- aString.setValue(externalFile.getDataverseName());
+ aString.setValue(dataverseCanonicalName);
stringSerde.serialize(aString, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.EXTERNAL_FILE_ARECORD_DATAVERSENAME_FIELD_INDEX, fieldValue);
@@ -144,7 +119,7 @@
// write field 2
fieldValue.reset();
aInt32.setValue(externalFile.getFileNumber());
- intSerde.serialize(aInt32, fieldValue.getDataOutput());
+ int32Serde.serialize(aInt32, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.EXTERNAL_FILE_ARECORD_FILE_NUMBER_FIELD_INDEX, fieldValue);
// write field 3
@@ -156,7 +131,7 @@
// write field 4
fieldValue.reset();
aInt64.setValue(externalFile.getSize());
- longSerde.serialize(aInt64, fieldValue.getDataOutput());
+ int64Serde.serialize(aInt64, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.EXTERNAL_FILE_ARECORD_FILE_SIZE_FIELD_INDEX, fieldValue);
// write field 5
@@ -168,7 +143,7 @@
// write field 6
fieldValue.reset();
aInt32.setValue(externalFile.getPendingOp().ordinal());
- intSerde.serialize(aInt32, fieldValue.getDataOutput());
+ int32Serde.serialize(aInt32, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.EXTERNAL_FILE_ARECORD_FILE_PENDING_OP_FIELD_INDEX, fieldValue);
// write record
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedConnectionTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedConnectionTupleTranslator.java
index 800e5df..a976e10 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedConnectionTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedConnectionTupleTranslator.java
@@ -19,16 +19,13 @@
package org.apache.asterix.metadata.entitytupletranslators;
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
import java.util.ArrayList;
import java.util.List;
import org.apache.asterix.builders.IARecordBuilder;
import org.apache.asterix.builders.UnorderedListBuilder;
import org.apache.asterix.common.functions.FunctionSignature;
-import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
import org.apache.asterix.metadata.bootstrap.MetadataRecordTypes;
import org.apache.asterix.metadata.entities.FeedConnection;
@@ -39,136 +36,124 @@
import org.apache.asterix.om.base.AUnorderedList;
import org.apache.asterix.om.base.IACursor;
import org.apache.asterix.om.types.AUnorderedListType;
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
-import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
public class FeedConnectionTupleTranslator extends AbstractTupleTranslator<FeedConnection> {
- private static final long serialVersionUID = -1798961999812829511L;
- public static final int FEED_CONN_DATAVERSE_NAME_FIELD_INDEX = 0;
- public static final int FEED_CONN_FEED_NAME_FIELD_INDEX = 1;
- public static final int FEED_CONN_DATASET_NAME_FIELD_INDEX = 2;
-
- public static final int FEED_CONN_PAYLOAD_TUPLE_FIELD_INDEX = 3;
-
- protected final transient ArrayBackedValueStorage fieldName = new ArrayBackedValueStorage();
-
- private ISerializerDeserializer<ARecord> recordSerDes = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(MetadataRecordTypes.FEED_CONNECTION_RECORDTYPE);
+ // Payload field containing serialized ExternalFile.
+ private static final int FEED_CONNECTION_PAYLOAD_TUPLE_FIELD_INDEX = 3;
public FeedConnectionTupleTranslator(boolean getTuple) {
- super(getTuple, MetadataPrimaryIndexes.FEED_CONNECTION_DATASET.getFieldCount());
+ super(getTuple, MetadataPrimaryIndexes.FEED_CONNECTION_DATASET, FEED_CONNECTION_PAYLOAD_TUPLE_FIELD_INDEX);
}
@Override
- public FeedConnection getMetadataEntityFromTuple(ITupleReference frameTuple)
- throws AlgebricksException, HyracksDataException {
- byte[] serRecord = frameTuple.getFieldData(FEED_CONN_PAYLOAD_TUPLE_FIELD_INDEX);
- int recordStartOffset = frameTuple.getFieldStart(FEED_CONN_PAYLOAD_TUPLE_FIELD_INDEX);
- int recordLength = frameTuple.getFieldLength(FEED_CONN_PAYLOAD_TUPLE_FIELD_INDEX);
- ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
- DataInput in = new DataInputStream(stream);
- ARecord feedConnRecord = recordSerDes.deserialize(in);
- return createFeedConnFromRecord(feedConnRecord);
- }
-
- private FeedConnection createFeedConnFromRecord(ARecord feedConnRecord) {
- String dataverseName =
- ((AString) feedConnRecord.getValueByPos(MetadataRecordTypes.FEED_CONN_DATAVERSE_NAME_FIELD_INDEX))
+ protected FeedConnection createMetadataEntityFromARecord(ARecord feedConnectionRecord) {
+ String dataverseCanonicalName =
+ ((AString) feedConnectionRecord.getValueByPos(MetadataRecordTypes.FEED_CONN_DATAVERSE_NAME_FIELD_INDEX))
.getStringValue();
- String feedName = ((AString) feedConnRecord.getValueByPos(MetadataRecordTypes.FEED_CONN_FEED_NAME_FIELD_INDEX))
- .getStringValue();
+ DataverseName dataverseName = DataverseName.createFromCanonicalForm(dataverseCanonicalName);
+ String feedName =
+ ((AString) feedConnectionRecord.getValueByPos(MetadataRecordTypes.FEED_CONN_FEED_NAME_FIELD_INDEX))
+ .getStringValue();
String datasetName =
- ((AString) feedConnRecord.getValueByPos(MetadataRecordTypes.FEED_CONN_DATASET_NAME_FIELD_INDEX))
+ ((AString) feedConnectionRecord.getValueByPos(MetadataRecordTypes.FEED_CONN_DATASET_NAME_FIELD_INDEX))
.getStringValue();
- String outputType = ((AString) feedConnRecord.getValueByPos(MetadataRecordTypes.FEED_CONN_OUTPUT_TYPE_INDEX))
- .getStringValue();
- String policyName = ((AString) feedConnRecord.getValueByPos(MetadataRecordTypes.FEED_CONN_POLICY_FIELD_INDEX))
- .getStringValue();
- ArrayList<FunctionSignature> appliedFunctions = null;
- Object o = feedConnRecord.getValueByPos(MetadataRecordTypes.FEED_CONN_APPLIED_FUNCTIONS_FIELD_INDEX);
+ String outputType =
+ ((AString) feedConnectionRecord.getValueByPos(MetadataRecordTypes.FEED_CONN_OUTPUT_TYPE_INDEX))
+ .getStringValue();
+ String policyName =
+ ((AString) feedConnectionRecord.getValueByPos(MetadataRecordTypes.FEED_CONN_POLICY_FIELD_INDEX))
+ .getStringValue();
+ List<FunctionSignature> appliedFunctions = null;
+ Object o = feedConnectionRecord.getValueByPos(MetadataRecordTypes.FEED_CONN_APPLIED_FUNCTIONS_FIELD_INDEX);
IACursor cursor;
if (!(o instanceof ANull) && !(o instanceof AMissing)) {
appliedFunctions = new ArrayList<>();
- FunctionSignature functionSignature;
- cursor = ((AUnorderedList) feedConnRecord
- .getValueByPos(MetadataRecordTypes.FEED_CONN_APPLIED_FUNCTIONS_FIELD_INDEX)).getCursor();
+ AUnorderedList afList = (AUnorderedList) feedConnectionRecord
+ .getValueByPos(MetadataRecordTypes.FEED_CONN_APPLIED_FUNCTIONS_FIELD_INDEX);
+ cursor = afList.getCursor();
while (cursor.next()) {
- String[] functionFullName = ((AString) cursor.get()).getStringValue().split("\\.");
- functionSignature = new FunctionSignature(functionFullName[0], functionFullName[1], 1);
+ String afValue = ((AString) cursor.get()).getStringValue();
+ int pos = afValue.lastIndexOf('.'); //TODO(MULTI_PART_DATAVERSE_NAME):REVISIT
+ String afDataverseCanonicalName = afValue.substring(0, pos);
+ String afName = afValue.substring(pos + 1);
+ DataverseName afDataverseName = DataverseName.createFromCanonicalForm(afDataverseCanonicalName);
+ FunctionSignature functionSignature = new FunctionSignature(afDataverseName, afName, 1);
appliedFunctions.add(functionSignature);
}
}
- int whereClauseIdx = feedConnRecord.getType().getFieldIndex(MetadataRecordTypes.FIELD_NAME_WHERE_CLAUSE);
- String whereClauseBody =
- whereClauseIdx >= 0 ? ((AString) feedConnRecord.getValueByPos(whereClauseIdx)).getStringValue() : "";
+ int whereClauseIdx = feedConnectionRecord.getType().getFieldIndex(MetadataRecordTypes.FIELD_NAME_WHERE_CLAUSE);
+ String whereClauseBody = whereClauseIdx >= 0
+ ? ((AString) feedConnectionRecord.getValueByPos(whereClauseIdx)).getStringValue() : "";
return new FeedConnection(dataverseName, feedName, datasetName, appliedFunctions, policyName, whereClauseBody,
outputType);
}
@Override
- public ITupleReference getTupleFromMetadataEntity(FeedConnection me)
- throws AlgebricksException, HyracksDataException {
+ public ITupleReference getTupleFromMetadataEntity(FeedConnection feedConnection) throws HyracksDataException {
+ String dataverseCanonicalName = feedConnection.getDataverseName().getCanonicalForm();
+
tupleBuilder.reset();
// key: dataverse
- aString.setValue(me.getDataverseName());
+ aString.setValue(dataverseCanonicalName);
stringSerde.serialize(aString, tupleBuilder.getDataOutput());
tupleBuilder.addFieldEndOffset();
// key: feedName
- aString.setValue(me.getFeedName());
+ aString.setValue(feedConnection.getFeedName());
stringSerde.serialize(aString, tupleBuilder.getDataOutput());
tupleBuilder.addFieldEndOffset();
// key: dataset
- aString.setValue(me.getDatasetName());
+ aString.setValue(feedConnection.getDatasetName());
stringSerde.serialize(aString, tupleBuilder.getDataOutput());
tupleBuilder.addFieldEndOffset();
recordBuilder.reset(MetadataRecordTypes.FEED_CONNECTION_RECORDTYPE);
+
// field dataverse
fieldValue.reset();
- aString.setValue(me.getDataverseName());
+ aString.setValue(dataverseCanonicalName);
stringSerde.serialize(aString, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.FEED_CONN_DATAVERSE_NAME_FIELD_INDEX, fieldValue);
// field: feedId
fieldValue.reset();
- aString.setValue(me.getFeedName());
+ aString.setValue(feedConnection.getFeedName());
stringSerde.serialize(aString, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.FEED_CONN_FEED_NAME_FIELD_INDEX, fieldValue);
// field: dataset
fieldValue.reset();
- aString.setValue(me.getDatasetName());
+ aString.setValue(feedConnection.getDatasetName());
stringSerde.serialize(aString, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.FEED_CONN_DATASET_NAME_FIELD_INDEX, fieldValue);
// field: outputType
fieldValue.reset();
- aString.setValue(me.getOutputType());
+ aString.setValue(feedConnection.getOutputType());
stringSerde.serialize(aString, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.FEED_CONN_OUTPUT_TYPE_INDEX, fieldValue);
// field: appliedFunctions
fieldValue.reset();
- writeAppliedFunctionsField(recordBuilder, me, fieldValue);
+ writeAppliedFunctionsField(recordBuilder, feedConnection, fieldValue);
// field: policyName
fieldValue.reset();
- aString.setValue(me.getPolicyName());
+ aString.setValue(feedConnection.getPolicyName());
stringSerde.serialize(aString, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.FEED_CONN_POLICY_FIELD_INDEX, fieldValue);
- // field: whereClauseBody
- writeOpenPart(me);
+ // write open fields
+ writeOpenFields(feedConnection);
recordBuilder.write(tupleBuilder.getDataOutput(), true);
tupleBuilder.addFieldEndOffset();
@@ -177,13 +162,18 @@
return tuple;
}
- protected void writeOpenPart(FeedConnection fc) throws HyracksDataException {
- if (fc.getWhereClauseBody() != null && fc.getWhereClauseBody().length() > 0) {
+ protected void writeOpenFields(FeedConnection feedConnection) throws HyracksDataException {
+ writeWhereClauseBody(feedConnection);
+ }
+
+ private void writeWhereClauseBody(FeedConnection feedConnection) throws HyracksDataException {
+ // field: whereClauseBody
+ if (feedConnection.getWhereClauseBody() != null && feedConnection.getWhereClauseBody().length() > 0) {
fieldName.reset();
aString.setValue(MetadataRecordTypes.FIELD_NAME_WHERE_CLAUSE);
stringSerde.serialize(aString, fieldName.getDataOutput());
fieldValue.reset();
- aString.setValue(fc.getWhereClauseBody());
+ aString.setValue(feedConnection.getWhereClauseBody());
stringSerde.serialize(aString, fieldValue.getDataOutput());
recordBuilder.addField(fieldName, fieldValue);
}
@@ -200,7 +190,7 @@
List<FunctionSignature> appliedFunctions = fc.getAppliedFunctions();
for (FunctionSignature af : appliedFunctions) {
listEleBuffer.reset();
- aString.setValue(af.getNamespace() + "." + af.getName());
+ aString.setValue(af.getDataverseName().getCanonicalForm() + '.' + af.getName()); //TODO(MULTI_PART_DATAVERSE_NAME):REVISIT
stringSerde.serialize(aString, listEleBuffer.getDataOutput());
listBuilder.addItem(listEleBuffer);
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedPolicyTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedPolicyTupleTranslator.java
index a7fc822..2407d22 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedPolicyTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedPolicyTupleTranslator.java
@@ -19,9 +19,6 @@
package org.apache.asterix.metadata.entitytupletranslators;
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
import java.io.DataOutput;
import java.util.HashMap;
import java.util.Map;
@@ -29,20 +26,16 @@
import org.apache.asterix.builders.IARecordBuilder;
import org.apache.asterix.builders.RecordBuilder;
import org.apache.asterix.builders.UnorderedListBuilder;
-import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
import org.apache.asterix.metadata.bootstrap.MetadataRecordTypes;
import org.apache.asterix.metadata.entities.FeedPolicyEntity;
-import org.apache.asterix.om.base.AInt32;
import org.apache.asterix.om.base.AMutableString;
import org.apache.asterix.om.base.ARecord;
import org.apache.asterix.om.base.AString;
import org.apache.asterix.om.base.AUnorderedList;
import org.apache.asterix.om.base.IACursor;
import org.apache.asterix.om.types.AUnorderedListType;
-import org.apache.asterix.om.types.BuiltinType;
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
-import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -51,73 +44,46 @@
* Translates a Dataset metadata entity to an ITupleReference and vice versa.
*/
public class FeedPolicyTupleTranslator extends AbstractTupleTranslator<FeedPolicyEntity> {
- private static final long serialVersionUID = 826298425589924684L;
-
- // Field indexes of serialized FeedPolicy in a tuple.
- // Key field.
- public static final int FEED_POLICY_DATAVERSE_NAME_FIELD_INDEX = 0;
-
- public static final int FEED_POLICY_POLICY_NAME_FIELD_INDEX = 1;
// Payload field containing serialized feedPolicy.
- public static final int FEED_POLICY_PAYLOAD_TUPLE_FIELD_INDEX = 2;
+ private static final int FEED_POLICY_PAYLOAD_TUPLE_FIELD_INDEX = 2;
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<ARecord> recordSerDes = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(MetadataRecordTypes.FEED_POLICY_RECORDTYPE);
- protected ISerializerDeserializer<AInt32> aInt32Serde;
-
- @SuppressWarnings("unchecked")
protected FeedPolicyTupleTranslator(boolean getTuple) {
- super(getTuple, MetadataPrimaryIndexes.FEED_POLICY_DATASET.getFieldCount());
- aInt32Serde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT32);
+ super(getTuple, MetadataPrimaryIndexes.FEED_POLICY_DATASET, FEED_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
}
@Override
- public FeedPolicyEntity getMetadataEntityFromTuple(ITupleReference frameTuple) throws HyracksDataException {
- byte[] serRecord = frameTuple.getFieldData(FEED_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
- int recordStartOffset = frameTuple.getFieldStart(FEED_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
- int recordLength = frameTuple.getFieldLength(FEED_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
- ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
- DataInput in = new DataInputStream(stream);
- ARecord feedPolicyRecord = recordSerDes.deserialize(in);
- return createFeedPolicyFromARecord(feedPolicyRecord);
- }
-
- private FeedPolicyEntity createFeedPolicyFromARecord(ARecord feedPolicyRecord) {
- FeedPolicyEntity feedPolicy = null;
- String dataverseName = ((AString) feedPolicyRecord
+ protected FeedPolicyEntity createMetadataEntityFromARecord(ARecord feedPolicyRecord) {
+ String dataverseCanonicalName = ((AString) feedPolicyRecord
.getValueByPos(MetadataRecordTypes.FEED_POLICY_ARECORD_DATAVERSE_NAME_FIELD_INDEX)).getStringValue();
+ DataverseName dataverseName = DataverseName.createFromCanonicalForm(dataverseCanonicalName);
String policyName = ((AString) feedPolicyRecord
.getValueByPos(MetadataRecordTypes.FEED_POLICY_ARECORD_POLICY_NAME_FIELD_INDEX)).getStringValue();
-
String description = ((AString) feedPolicyRecord
.getValueByPos(MetadataRecordTypes.FEED_POLICY_ARECORD_DESCRIPTION_FIELD_INDEX)).getStringValue();
IACursor cursor = ((AUnorderedList) feedPolicyRecord
.getValueByPos(MetadataRecordTypes.FEED_POLICY_ARECORD_PROPERTIES_FIELD_INDEX)).getCursor();
Map<String, String> policyParamters = new HashMap<>();
- String key;
- String value;
while (cursor.next()) {
ARecord field = (ARecord) cursor.get();
- key = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_NAME_FIELD_INDEX)).getStringValue();
- value = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_VALUE_FIELD_INDEX)).getStringValue();
+ String key =
+ ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_NAME_FIELD_INDEX)).getStringValue();
+ String value =
+ ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_VALUE_FIELD_INDEX)).getStringValue();
policyParamters.put(key, value);
}
- feedPolicy = new FeedPolicyEntity(dataverseName, policyName, description, policyParamters);
- return feedPolicy;
+ return new FeedPolicyEntity(dataverseName, policyName, description, policyParamters);
}
@Override
- public ITupleReference getTupleFromMetadataEntity(FeedPolicyEntity feedPolicy)
- throws HyracksDataException, AlgebricksException {
- // write the key in the first three fields of the tuple
- ArrayBackedValueStorage itemValue = new ArrayBackedValueStorage();
+ public ITupleReference getTupleFromMetadataEntity(FeedPolicyEntity feedPolicy) throws HyracksDataException {
+ String dataverseCanonicalName = feedPolicy.getDataverseName().getCanonicalForm();
+ // write the key in the first three fields of the tuple
tupleBuilder.reset();
- aString.setValue(feedPolicy.getDataverseName());
+ aString.setValue(dataverseCanonicalName);
stringSerde.serialize(aString, tupleBuilder.getDataOutput());
tupleBuilder.addFieldEndOffset();
@@ -129,7 +95,7 @@
// write field 0
fieldValue.reset();
- aString.setValue(feedPolicy.getDataverseName());
+ aString.setValue(dataverseCanonicalName);
stringSerde.serialize(aString, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.FEED_POLICY_ARECORD_DATAVERSE_NAME_FIELD_INDEX, fieldValue);
@@ -150,6 +116,7 @@
UnorderedListBuilder listBuilder = new UnorderedListBuilder();
listBuilder.reset((AUnorderedListType) MetadataRecordTypes.FEED_POLICY_RECORDTYPE
.getFieldTypes()[MetadataRecordTypes.FEED_POLICY_ARECORD_PROPERTIES_FIELD_INDEX]);
+ ArrayBackedValueStorage itemValue = new ArrayBackedValueStorage();
for (Map.Entry<String, String> property : properties.entrySet()) {
String name = property.getKey();
String value = property.getValue();
@@ -169,13 +136,11 @@
return tuple;
}
- public void writePropertyTypeRecord(String name, String value, DataOutput out) throws HyracksDataException {
+ private void writePropertyTypeRecord(String name, String value, DataOutput out) throws HyracksDataException {
IARecordBuilder propertyRecordBuilder = new RecordBuilder();
ArrayBackedValueStorage fieldValue = new ArrayBackedValueStorage();
propertyRecordBuilder.reset(MetadataRecordTypes.POLICY_PARAMS_RECORDTYPE);
AMutableString aString = new AMutableString("");
- ISerializerDeserializer<AString> stringSerde =
- SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING);
// write field 0
fieldValue.reset();
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedTupleTranslator.java
index 8cc801d..77a3c92 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedTupleTranslator.java
@@ -19,9 +19,6 @@
package org.apache.asterix.metadata.entitytupletranslators;
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
import java.io.DataOutput;
import java.util.Calendar;
import java.util.HashMap;
@@ -30,7 +27,7 @@
import org.apache.asterix.builders.IARecordBuilder;
import org.apache.asterix.builders.RecordBuilder;
import org.apache.asterix.builders.UnorderedListBuilder;
-import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
import org.apache.asterix.metadata.bootstrap.MetadataRecordTypes;
import org.apache.asterix.metadata.entities.Feed;
@@ -40,9 +37,6 @@
import org.apache.asterix.om.base.AUnorderedList;
import org.apache.asterix.om.base.IACursor;
import org.apache.asterix.om.types.AUnorderedListType;
-import org.apache.asterix.om.types.BuiltinType;
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
-import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -51,68 +45,47 @@
* Translates a Feed metadata entity to an ITupleReference and vice versa.
*/
public class FeedTupleTranslator extends AbstractTupleTranslator<Feed> {
- private static final long serialVersionUID = -5967081194106401387L;
-
- // Field indexes of serialized Feed in a tuple.
- // Key field.
- public static final int FEED_DATAVERSE_NAME_FIELD_INDEX = 0;
-
- public static final int FEED_NAME_FIELD_INDEX = 1;
// Payload field containing serialized feed.
- public static final int FEED_PAYLOAD_TUPLE_FIELD_INDEX = 2;
-
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<ARecord> recordSerDes =
- SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(MetadataRecordTypes.FEED_RECORDTYPE);
+ private static final int FEED_PAYLOAD_TUPLE_FIELD_INDEX = 2;
protected FeedTupleTranslator(boolean getTuple) {
- super(getTuple, MetadataPrimaryIndexes.FEED_DATASET.getFieldCount());
+ super(getTuple, MetadataPrimaryIndexes.FEED_DATASET, FEED_PAYLOAD_TUPLE_FIELD_INDEX);
}
@Override
- public Feed getMetadataEntityFromTuple(ITupleReference frameTuple) throws HyracksDataException {
- byte[] serRecord = frameTuple.getFieldData(FEED_PAYLOAD_TUPLE_FIELD_INDEX);
- int recordStartOffset = frameTuple.getFieldStart(FEED_PAYLOAD_TUPLE_FIELD_INDEX);
- int recordLength = frameTuple.getFieldLength(FEED_PAYLOAD_TUPLE_FIELD_INDEX);
- ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
- DataInput in = new DataInputStream(stream);
- ARecord feedRecord = recordSerDes.deserialize(in);
- return createFeedFromARecord(feedRecord);
- }
-
- private Feed createFeedFromARecord(ARecord feedRecord) {
- Feed feed;
- String dataverseName =
+ protected Feed createMetadataEntityFromARecord(ARecord feedRecord) {
+ String dataverseCanonicalName =
((AString) feedRecord.getValueByPos(MetadataRecordTypes.FEED_ARECORD_DATAVERSE_NAME_FIELD_INDEX))
.getStringValue();
+ DataverseName dataverseName = DataverseName.createFromCanonicalForm(dataverseCanonicalName);
String feedName = ((AString) feedRecord.getValueByPos(MetadataRecordTypes.FEED_ARECORD_FEED_NAME_FIELD_INDEX))
.getStringValue();
AUnorderedList feedConfig =
(AUnorderedList) feedRecord.getValueByPos(MetadataRecordTypes.FEED_ARECORD_ADAPTOR_CONFIG_INDEX);
-
IACursor cursor = feedConfig.getCursor();
-
// restore configurations
- String key;
- String value;
Map<String, String> adaptorConfiguration = new HashMap<>();
while (cursor.next()) {
ARecord field = (ARecord) cursor.get();
- key = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_NAME_FIELD_INDEX)).getStringValue();
- value = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_VALUE_FIELD_INDEX)).getStringValue();
+ String key =
+ ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_NAME_FIELD_INDEX)).getStringValue();
+ String value =
+ ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_VALUE_FIELD_INDEX)).getStringValue();
adaptorConfiguration.put(key, value);
}
- feed = new Feed(dataverseName, feedName, adaptorConfiguration);
- return feed;
+
+ return new Feed(dataverseName, feedName, adaptorConfiguration);
}
@Override
- public ITupleReference getTupleFromMetadataEntity(Feed feed) throws HyracksDataException, AlgebricksException {
+ public ITupleReference getTupleFromMetadataEntity(Feed feed) throws HyracksDataException {
+ String dataverseCanonicalName = feed.getDataverseName().getCanonicalForm();
+
// write the key in the first two fields of the tuple
tupleBuilder.reset();
- aString.setValue(feed.getDataverseName());
+ aString.setValue(dataverseCanonicalName);
stringSerde.serialize(aString, tupleBuilder.getDataOutput());
tupleBuilder.addFieldEndOffset();
@@ -124,7 +97,7 @@
// write dataverse name
fieldValue.reset();
- aString.setValue(feed.getDataverseName());
+ aString.setValue(dataverseCanonicalName);
stringSerde.serialize(aString, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.FEED_ARECORD_DATAVERSE_NAME_FIELD_INDEX, fieldValue);
@@ -170,14 +143,11 @@
recordBuilder.addField(MetadataRecordTypes.FEED_ARECORD_ADAPTOR_CONFIG_INDEX, fieldValueBuffer);
}
- @SuppressWarnings("unchecked")
- public void writePropertyTypeRecord(String name, String value, DataOutput out) throws HyracksDataException {
+ private void writePropertyTypeRecord(String name, String value, DataOutput out) throws HyracksDataException {
IARecordBuilder propertyRecordBuilder = new RecordBuilder();
ArrayBackedValueStorage fieldValue = new ArrayBackedValueStorage();
propertyRecordBuilder.reset(MetadataRecordTypes.DATASOURCE_ADAPTER_PROPERTIES_RECORDTYPE);
AMutableString aString = new AMutableString("");
- ISerializerDeserializer<AString> stringSerde =
- SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING);
// write field 0
fieldValue.reset();
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java
index c4e42d1b..ff7c675 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FunctionTupleTranslator.java
@@ -19,15 +19,12 @@
package org.apache.asterix.metadata.entitytupletranslators;
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
import java.util.ArrayList;
import java.util.List;
import org.apache.asterix.builders.OrderedListBuilder;
import org.apache.asterix.common.functions.FunctionSignature;
-import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
import org.apache.asterix.metadata.bootstrap.MetadataRecordTypes;
import org.apache.asterix.metadata.entities.Function;
@@ -38,7 +35,7 @@
import org.apache.asterix.om.types.AOrderedListType;
import org.apache.asterix.om.types.BuiltinType;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
-import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.algebricks.common.utils.Triple;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -47,48 +44,35 @@
* Translates a Function metadata entity to an ITupleReference and vice versa.
*/
public class FunctionTupleTranslator extends AbstractTupleTranslator<Function> {
- private static final long serialVersionUID = 1147594449575992161L;
-
- // Field indexes of serialized Function in a tuple.
- // First key field.
- public static final int FUNCTION_DATAVERSENAME_TUPLE_FIELD_INDEX = 0;
- // Second key field.
- public static final int FUNCTION_FUNCTIONNAME_TUPLE_FIELD_INDEX = 1;
- // Third key field.
- public static final int FUNCTION_FUNCTIONARITY_TUPLE_FIELD_INDEX = 2;
// Payload field containing serialized Function.
- public static final int FUNCTION_PAYLOAD_TUPLE_FIELD_INDEX = 3;
+ private static final int FUNCTION_PAYLOAD_TUPLE_FIELD_INDEX = 3;
- private transient OrderedListBuilder dependenciesListBuilder = new OrderedListBuilder();
- private transient OrderedListBuilder dependencyListBuilder = new OrderedListBuilder();
- private transient OrderedListBuilder dependencyNameListBuilder = new OrderedListBuilder();
- private transient AOrderedListType stringList = new AOrderedListType(BuiltinType.ASTRING, null);
- private transient AOrderedListType ListofLists =
- new AOrderedListType(new AOrderedListType(BuiltinType.ASTRING, null), null);
-
- private ISerializerDeserializer<ARecord> recordSerDes =
- SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(MetadataRecordTypes.FUNCTION_RECORDTYPE);
+ protected OrderedListBuilder dependenciesListBuilder;
+ protected OrderedListBuilder dependencyListBuilder;
+ protected OrderedListBuilder dependencyNameListBuilder;
+ protected List<String> dependencySubnames;
+ protected AOrderedListType stringList;
+ protected AOrderedListType listOfLists;
protected FunctionTupleTranslator(boolean getTuple) {
- super(getTuple, MetadataPrimaryIndexes.FUNCTION_DATASET.getFieldCount());
+ super(getTuple, MetadataPrimaryIndexes.FUNCTION_DATASET, FUNCTION_PAYLOAD_TUPLE_FIELD_INDEX);
+ if (getTuple) {
+ dependenciesListBuilder = new OrderedListBuilder();
+ dependencyListBuilder = new OrderedListBuilder();
+ dependencyNameListBuilder = new OrderedListBuilder();
+ dependencySubnames = new ArrayList<>(3);
+ stringList = new AOrderedListType(BuiltinType.ASTRING, null);
+ listOfLists = new AOrderedListType(new AOrderedListType(BuiltinType.ASTRING, null), null);
+ }
}
@Override
- public Function getMetadataEntityFromTuple(ITupleReference frameTuple) throws HyracksDataException {
- byte[] serRecord = frameTuple.getFieldData(FUNCTION_PAYLOAD_TUPLE_FIELD_INDEX);
- int recordStartOffset = frameTuple.getFieldStart(FUNCTION_PAYLOAD_TUPLE_FIELD_INDEX);
- int recordLength = frameTuple.getFieldLength(FUNCTION_PAYLOAD_TUPLE_FIELD_INDEX);
- ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
- DataInput in = new DataInputStream(stream);
- ARecord functionRecord = recordSerDes.deserialize(in);
- return createFunctionFromARecord(functionRecord);
- }
-
- private Function createFunctionFromARecord(ARecord functionRecord) {
- String dataverseName =
+ protected Function createMetadataEntityFromARecord(ARecord functionRecord) throws AlgebricksException {
+ String dataverseCanonicalName =
((AString) functionRecord.getValueByPos(MetadataRecordTypes.FUNCTION_ARECORD_DATAVERSENAME_FIELD_INDEX))
.getStringValue();
+ DataverseName dataverseName = DataverseName.createFromCanonicalForm(dataverseCanonicalName);
String functionName =
((AString) functionRecord.getValueByPos(MetadataRecordTypes.FUNCTION_ARECORD_FUNCTIONNAME_FIELD_INDEX))
.getStringValue();
@@ -117,44 +101,49 @@
IACursor dependenciesCursor = ((AOrderedList) functionRecord
.getValueByPos(MetadataRecordTypes.FUNCTION_ARECORD_FUNCTION_DEPENDENCIES_FIELD_INDEX)).getCursor();
- List<List<List<String>>> dependencies = new ArrayList<>();
- AOrderedList dependencyList;
- AOrderedList qualifiedList;
- int i = 0;
+ List<List<Triple<DataverseName, String, String>>> dependencies = new ArrayList<>();
while (dependenciesCursor.next()) {
- dependencies.add(new ArrayList<>());
- dependencyList = (AOrderedList) dependenciesCursor.get();
- IACursor qualifiedDependencyCursor = dependencyList.getCursor();
- int j = 0;
+ List<Triple<DataverseName, String, String>> dependencyList = new ArrayList<>();
+ IACursor qualifiedDependencyCursor = ((AOrderedList) dependenciesCursor.get()).getCursor();
while (qualifiedDependencyCursor.next()) {
- qualifiedList = (AOrderedList) qualifiedDependencyCursor.get();
- IACursor qualifiedNameCursor = qualifiedList.getCursor();
- dependencies.get(i).add(new ArrayList<>());
- while (qualifiedNameCursor.next()) {
- dependencies.get(i).get(j).add(((AString) qualifiedNameCursor.get()).getStringValue());
- }
- j++;
+ Triple<DataverseName, String, String> dependency =
+ getDependency((AOrderedList) qualifiedDependencyCursor.get());
+ dependencyList.add(dependency);
}
- i++;
-
+ dependencies.add(dependencyList);
}
FunctionSignature signature = new FunctionSignature(dataverseName, functionName, Integer.parseInt(arity));
return new Function(signature, params, returnType, definition, language, functionKind, dependencies);
}
+ private Triple<DataverseName, String, String> getDependency(AOrderedList dependencySubnames) {
+ String dataverseCanonicalName = ((AString) dependencySubnames.getItem(0)).getStringValue();
+ DataverseName dataverseName = DataverseName.createFromCanonicalForm(dataverseCanonicalName);
+ String second = null, third = null;
+ int ln = dependencySubnames.size();
+ if (ln > 1) {
+ second = ((AString) dependencySubnames.getItem(1)).getStringValue();
+ if (ln > 2) {
+ third = ((AString) dependencySubnames.getItem(2)).getStringValue();
+ }
+ }
+ return new Triple<>(dataverseName, second, third);
+ }
+
@Override
- public ITupleReference getTupleFromMetadataEntity(Function function)
- throws HyracksDataException, AlgebricksException {
+ public ITupleReference getTupleFromMetadataEntity(Function function) throws HyracksDataException {
+ String dataverseCanonicalName = function.getDataverseName().getCanonicalForm();
+
// write the key in the first 2 fields of the tuple
tupleBuilder.reset();
- aString.setValue(function.getDataverseName());
+ aString.setValue(dataverseCanonicalName);
stringSerde.serialize(aString, tupleBuilder.getDataOutput());
tupleBuilder.addFieldEndOffset();
aString.setValue(function.getName());
stringSerde.serialize(aString, tupleBuilder.getDataOutput());
tupleBuilder.addFieldEndOffset();
- aString.setValue(function.getArity() + "");
+ aString.setValue(String.valueOf(function.getArity()));
stringSerde.serialize(aString, tupleBuilder.getDataOutput());
tupleBuilder.addFieldEndOffset();
@@ -164,7 +153,7 @@
// write field 0
fieldValue.reset();
- aString.setValue(function.getDataverseName());
+ aString.setValue(dataverseCanonicalName);
stringSerde.serialize(aString, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.FUNCTION_ARECORD_DATAVERSENAME_FIELD_INDEX, fieldValue);
@@ -176,7 +165,7 @@
// write field 2
fieldValue.reset();
- aString.setValue(function.getArity() + "");
+ aString.setValue(String.valueOf(function.getArity()));
stringSerde.serialize(aString, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.FUNCTION_ARECORD_FUNCTION_ARITY_FIELD_INDEX, fieldValue);
@@ -222,12 +211,12 @@
// write field 8
dependenciesListBuilder.reset((AOrderedListType) MetadataRecordTypes.FUNCTION_RECORDTYPE
.getFieldTypes()[MetadataRecordTypes.FUNCTION_ARECORD_FUNCTION_DEPENDENCIES_FIELD_INDEX]);
- List<List<List<String>>> dependenciesList = function.getDependencies();
- for (List<List<String>> dependencies : dependenciesList) {
- dependencyListBuilder.reset(ListofLists);
- for (List<String> dependency : dependencies) {
+ List<List<Triple<DataverseName, String, String>>> dependenciesList = function.getDependencies();
+ for (List<Triple<DataverseName, String, String>> dependencies : dependenciesList) {
+ dependencyListBuilder.reset(listOfLists);
+ for (Triple<DataverseName, String, String> dependency : dependencies) {
dependencyNameListBuilder.reset(stringList);
- for (String subName : dependency) {
+ for (String subName : getDependencySubNames(dependency)) {
itemValue.reset();
aString.setValue(subName);
stringSerde.serialize(aString, itemValue.getDataOutput());
@@ -254,4 +243,15 @@
return tuple;
}
+ private List<String> getDependencySubNames(Triple<DataverseName, String, String> dependency) {
+ dependencySubnames.clear();
+ dependencySubnames.add(dependency.first.getCanonicalForm());
+ if (dependency.second != null) {
+ dependencySubnames.add(dependency.second);
+ }
+ if (dependency.third != null) {
+ dependencySubnames.add(dependency.third);
+ }
+ return dependencySubnames;
+ }
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java
index a154d7f..5d16f56 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java
@@ -19,16 +19,13 @@
package org.apache.asterix.metadata.entitytupletranslators;
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
-import java.rmi.RemoteException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import org.apache.asterix.builders.OrderedListBuilder;
import org.apache.asterix.common.config.DatasetConfig.IndexType;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.transactions.TxnId;
import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
import org.apache.asterix.metadata.MetadataNode;
@@ -61,67 +58,63 @@
* Translates an Index metadata entity to an ITupleReference and vice versa.
*/
public class IndexTupleTranslator extends AbstractTupleTranslator<Index> {
- private static final long serialVersionUID = 1L;
- // Field indexes of serialized Index in a tuple.
- // First key field.
- public static final int INDEX_DATAVERSENAME_TUPLE_FIELD_INDEX = 0;
- // Second key field.
- public static final int INDEX_DATASETNAME_TUPLE_FIELD_INDEX = 1;
- // Third key field.
- public static final int INDEX_INDEXNAME_TUPLE_FIELD_INDEX = 2;
+
// Payload field containing serialized Index.
- public static final int INDEX_PAYLOAD_TUPLE_FIELD_INDEX = 3;
+ private static final int INDEX_PAYLOAD_TUPLE_FIELD_INDEX = 3;
+
// Field name of open field.
public static final String GRAM_LENGTH_FIELD_NAME = "GramLength";
public static final String INDEX_SEARCHKEY_TYPE_FIELD_NAME = "SearchKeyType";
public static final String INDEX_ISENFORCED_FIELD_NAME = "IsEnforced";
public static final String INDEX_SEARCHKEY_SOURCE_INDICATOR_FIELD_NAME = "SearchKeySourceIndicator";
- private transient OrderedListBuilder listBuilder = new OrderedListBuilder();
- private transient OrderedListBuilder primaryKeyListBuilder = new OrderedListBuilder();
- private transient AOrderedListType stringList = new AOrderedListType(BuiltinType.ASTRING, null);
- private transient AOrderedListType int8List = new AOrderedListType(BuiltinType.AINT8, null);
- private transient ArrayBackedValueStorage nameValue = new ArrayBackedValueStorage();
- private transient ArrayBackedValueStorage itemValue = new ArrayBackedValueStorage();
- private transient AMutableInt8 aInt8 = new AMutableInt8((byte) 0);
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<AInt32> intSerde =
- SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT32);
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<AInt8> int8Serde =
- SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT8);
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<ARecord> recordSerde =
- SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(MetadataRecordTypes.INDEX_RECORDTYPE);
- private final MetadataNode metadataNode;
- private final TxnId txnId;
+ protected final TxnId txnId;
+ protected final MetadataNode metadataNode;
+ protected OrderedListBuilder listBuilder;
+ protected OrderedListBuilder primaryKeyListBuilder;
+ protected AOrderedListType stringList;
+ protected AOrderedListType int8List;
+ protected ArrayBackedValueStorage nameValue;
+ protected ArrayBackedValueStorage itemValue;
+ protected AMutableInt8 aInt8;
+ protected ISerializerDeserializer<AInt8> int8Serde;
+
+ @SuppressWarnings("unchecked")
protected IndexTupleTranslator(TxnId txnId, MetadataNode metadataNode, boolean getTuple) {
- super(getTuple, MetadataPrimaryIndexes.INDEX_DATASET.getFieldCount());
+ super(getTuple, MetadataPrimaryIndexes.INDEX_DATASET, INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
this.txnId = txnId;
this.metadataNode = metadataNode;
+ if (getTuple) {
+ listBuilder = new OrderedListBuilder();
+ primaryKeyListBuilder = new OrderedListBuilder();
+ stringList = new AOrderedListType(BuiltinType.ASTRING, null);
+ int8List = new AOrderedListType(BuiltinType.AINT8, null);
+ nameValue = new ArrayBackedValueStorage();
+ itemValue = new ArrayBackedValueStorage();
+ aInt8 = new AMutableInt8((byte) 0);
+ int8Serde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT8);
+ }
}
@Override
- public Index getMetadataEntityFromTuple(ITupleReference frameTuple)
- throws AlgebricksException, HyracksDataException {
- byte[] serRecord = frameTuple.getFieldData(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
- int recordStartOffset = frameTuple.getFieldStart(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
- int recordLength = frameTuple.getFieldLength(INDEX_PAYLOAD_TUPLE_FIELD_INDEX);
- ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
- DataInput in = new DataInputStream(stream);
- ARecord rec = recordSerde.deserialize(in);
- String dvName = ((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_DATAVERSENAME_FIELD_INDEX))
- .getStringValue();
- String dsName = ((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_DATASETNAME_FIELD_INDEX))
- .getStringValue();
+ protected Index createMetadataEntityFromARecord(ARecord indexRecord) throws AlgebricksException {
+ String dataverseCanonicalName =
+ ((AString) indexRecord.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_DATAVERSENAME_FIELD_INDEX))
+ .getStringValue();
+ DataverseName dataverseName = DataverseName.createFromCanonicalForm(dataverseCanonicalName);
+ String datasetName =
+ ((AString) indexRecord.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_DATASETNAME_FIELD_INDEX))
+ .getStringValue();
String indexName =
- ((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_INDEXNAME_FIELD_INDEX)).getStringValue();
- IndexType indexStructure = IndexType
- .valueOf(((AString) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_INDEXSTRUCTURE_FIELD_INDEX))
+ ((AString) indexRecord.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_INDEXNAME_FIELD_INDEX))
+ .getStringValue();
+ IndexType indexStructure = IndexType.valueOf(
+ ((AString) indexRecord.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_INDEXSTRUCTURE_FIELD_INDEX))
.getStringValue());
IACursor fieldNameCursor =
- ((AOrderedList) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_SEARCHKEY_FIELD_INDEX)).getCursor();
+ ((AOrderedList) indexRecord.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_SEARCHKEY_FIELD_INDEX))
+ .getCursor();
List<List<String>> searchKey = new ArrayList<>();
AOrderedList fieldNameList;
while (fieldNameCursor.next()) {
@@ -133,40 +126,42 @@
}
searchKey.add(nestedFieldName);
}
- int indexKeyTypeFieldPos = rec.getType().getFieldIndex(INDEX_SEARCHKEY_TYPE_FIELD_NAME);
+ int indexKeyTypeFieldPos = indexRecord.getType().getFieldIndex(INDEX_SEARCHKEY_TYPE_FIELD_NAME);
IACursor fieldTypeCursor = new ACollectionCursor();
if (indexKeyTypeFieldPos > 0) {
- fieldTypeCursor = ((AOrderedList) rec.getValueByPos(indexKeyTypeFieldPos)).getCursor();
+ fieldTypeCursor = ((AOrderedList) indexRecord.getValueByPos(indexKeyTypeFieldPos)).getCursor();
}
List<IAType> searchKeyType = new ArrayList<>(searchKey.size());
while (fieldTypeCursor.next()) {
String typeName = ((AString) fieldTypeCursor.get()).getStringValue();
- IAType fieldType = BuiltinTypeMap.getTypeFromTypeName(metadataNode, txnId, dvName, typeName, false);
+ IAType fieldType = BuiltinTypeMap.getTypeFromTypeName(metadataNode, txnId, dataverseName, typeName, false);
searchKeyType.add(fieldType);
}
boolean isOverridingKeyTypes = !searchKeyType.isEmpty();
- int isEnforcedFieldPos = rec.getType().getFieldIndex(INDEX_ISENFORCED_FIELD_NAME);
+ int isEnforcedFieldPos = indexRecord.getType().getFieldIndex(INDEX_ISENFORCED_FIELD_NAME);
Boolean isEnforcingKeys = false;
if (isEnforcedFieldPos > 0) {
- isEnforcingKeys = ((ABoolean) rec.getValueByPos(isEnforcedFieldPos)).getBoolean();
+ isEnforcingKeys = ((ABoolean) indexRecord.getValueByPos(isEnforcedFieldPos)).getBoolean();
}
Boolean isPrimaryIndex =
- ((ABoolean) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_ISPRIMARY_FIELD_INDEX)).getBoolean();
- int pendingOp =
- ((AInt32) rec.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_PENDINGOP_FIELD_INDEX)).getIntegerValue();
+ ((ABoolean) indexRecord.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_ISPRIMARY_FIELD_INDEX))
+ .getBoolean();
+ int pendingOp = ((AInt32) indexRecord.getValueByPos(MetadataRecordTypes.INDEX_ARECORD_PENDINGOP_FIELD_INDEX))
+ .getIntegerValue();
// Check if there is a gram length as well.
int gramLength = -1;
- int gramLenPos = rec.getType().getFieldIndex(GRAM_LENGTH_FIELD_NAME);
+ int gramLenPos = indexRecord.getType().getFieldIndex(GRAM_LENGTH_FIELD_NAME);
if (gramLenPos >= 0) {
- gramLength = ((AInt32) rec.getValueByPos(gramLenPos)).getIntegerValue();
+ gramLength = ((AInt32) indexRecord.getValueByPos(gramLenPos)).getIntegerValue();
}
// Read a field-source-indicator field.
List<Integer> keyFieldSourceIndicator = new ArrayList<>();
- int keyFieldSourceIndicatorIndex = rec.getType().getFieldIndex(INDEX_SEARCHKEY_SOURCE_INDICATOR_FIELD_NAME);
+ int keyFieldSourceIndicatorIndex =
+ indexRecord.getType().getFieldIndex(INDEX_SEARCHKEY_SOURCE_INDICATOR_FIELD_NAME);
if (keyFieldSourceIndicatorIndex >= 0) {
- IACursor cursor = ((AOrderedList) rec.getValueByPos(keyFieldSourceIndicatorIndex)).getCursor();
+ IACursor cursor = ((AOrderedList) indexRecord.getValueByPos(keyFieldSourceIndicatorIndex)).getCursor();
while (cursor.next()) {
keyFieldSourceIndicator.add((int) ((AInt8) cursor.get()).getByteValue());
}
@@ -179,72 +174,72 @@
// index key type information is not persisted, thus we extract type information
// from the record metadata
if (searchKeyType.isEmpty()) {
- try {
- Dataset dSet = metadataNode.getDataset(txnId, dvName, dsName);
- String datatypeName = dSet.getItemTypeName();
- String datatypeDataverseName = dSet.getItemTypeDataverseName();
- ARecordType recordDt = (ARecordType) metadataNode
- .getDatatype(txnId, datatypeDataverseName, datatypeName).getDatatype();
- String metatypeName = dSet.getMetaItemTypeName();
- String metatypeDataverseName = dSet.getMetaItemTypeDataverseName();
- ARecordType metaDt = null;
- if (metatypeName != null && metatypeDataverseName != null) {
- metaDt = (ARecordType) metadataNode.getDatatype(txnId, metatypeDataverseName, metatypeName)
- .getDatatype();
- }
- searchKeyType = KeyFieldTypeUtil.getKeyTypes(recordDt, metaDt, searchKey, keyFieldSourceIndicator);
- } catch (RemoteException re) {
- throw HyracksDataException.create(re);
+ Dataset dataset = metadataNode.getDataset(txnId, dataverseName, datasetName);
+ String datatypeName = dataset.getItemTypeName();
+ DataverseName datatypeDataverseName = dataset.getItemTypeDataverseName();
+ ARecordType recordDt =
+ (ARecordType) metadataNode.getDatatype(txnId, datatypeDataverseName, datatypeName).getDatatype();
+ String metatypeName = dataset.getMetaItemTypeName();
+ DataverseName metatypeDataverseName = dataset.getMetaItemTypeDataverseName();
+ ARecordType metaDt = null;
+ if (metatypeName != null && metatypeDataverseName != null) {
+ metaDt = (ARecordType) metadataNode.getDatatype(txnId, metatypeDataverseName, metatypeName)
+ .getDatatype();
}
+ searchKeyType = KeyFieldTypeUtil.getKeyTypes(recordDt, metaDt, searchKey, keyFieldSourceIndicator);
}
- return new Index(dvName, dsName, indexName, indexStructure, searchKey, keyFieldSourceIndicator, searchKeyType,
- gramLength, isOverridingKeyTypes, isEnforcingKeys, isPrimaryIndex, pendingOp);
+
+ return new Index(dataverseName, datasetName, indexName, indexStructure, searchKey, keyFieldSourceIndicator,
+ searchKeyType, gramLength, isOverridingKeyTypes, isEnforcingKeys, isPrimaryIndex, pendingOp);
}
@Override
- public ITupleReference getTupleFromMetadataEntity(Index instance) throws HyracksDataException {
+ public ITupleReference getTupleFromMetadataEntity(Index index) throws HyracksDataException {
+ String dataverseCanonicalName = index.getDataverseName().getCanonicalForm();
+
// write the key in the first 3 fields of the tuple
tupleBuilder.reset();
- aString.setValue(instance.getDataverseName());
+ aString.setValue(dataverseCanonicalName);
stringSerde.serialize(aString, tupleBuilder.getDataOutput());
tupleBuilder.addFieldEndOffset();
- aString.setValue(instance.getDatasetName());
+ aString.setValue(index.getDatasetName());
stringSerde.serialize(aString, tupleBuilder.getDataOutput());
tupleBuilder.addFieldEndOffset();
- aString.setValue(instance.getIndexName());
+ aString.setValue(index.getIndexName());
stringSerde.serialize(aString, tupleBuilder.getDataOutput());
tupleBuilder.addFieldEndOffset();
// write the payload in the fourth field of the tuple
recordBuilder.reset(MetadataRecordTypes.INDEX_RECORDTYPE);
+
// write field 0
fieldValue.reset();
- aString.setValue(instance.getDataverseName());
+ aString.setValue(dataverseCanonicalName);
stringSerde.serialize(aString, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.INDEX_ARECORD_DATAVERSENAME_FIELD_INDEX, fieldValue);
// write field 1
fieldValue.reset();
- aString.setValue(instance.getDatasetName());
+ aString.setValue(index.getDatasetName());
stringSerde.serialize(aString, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.INDEX_ARECORD_DATASETNAME_FIELD_INDEX, fieldValue);
// write field 2
fieldValue.reset();
- aString.setValue(instance.getIndexName());
+ aString.setValue(index.getIndexName());
stringSerde.serialize(aString, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.INDEX_ARECORD_INDEXNAME_FIELD_INDEX, fieldValue);
// write field 3
fieldValue.reset();
- aString.setValue(instance.getIndexType().toString());
+ aString.setValue(index.getIndexType().toString());
stringSerde.serialize(aString, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.INDEX_ARECORD_INDEXSTRUCTURE_FIELD_INDEX, fieldValue);
// write field 4
primaryKeyListBuilder.reset((AOrderedListType) MetadataRecordTypes.INDEX_RECORDTYPE
.getFieldTypes()[MetadataRecordTypes.INDEX_ARECORD_SEARCHKEY_FIELD_INDEX]);
- List<List<String>> searchKey = instance.getKeyFieldNames();
+ List<List<String>> searchKey = index.getKeyFieldNames();
for (List<String> field : searchKey) {
listBuilder.reset(stringList);
for (String subField : field) {
@@ -263,7 +258,7 @@
// write field 5
fieldValue.reset();
- if (instance.isPrimaryIndex()) {
+ if (index.isPrimaryIndex()) {
booleanSerde.serialize(ABoolean.TRUE, fieldValue.getDataOutput());
} else {
booleanSerde.serialize(ABoolean.FALSE, fieldValue.getDataOutput());
@@ -278,21 +273,43 @@
// write field 7
fieldValue.reset();
- intSerde.serialize(new AInt32(instance.getPendingOp()), fieldValue.getDataOutput());
+ int32Serde.serialize(new AInt32(index.getPendingOp()), fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.INDEX_ARECORD_PENDINGOP_FIELD_INDEX, fieldValue);
- // write optional field 8
- if (instance.getGramLength() > 0) {
+ // write open fields
+ writeOpenFields(index);
+
+ // write record
+ recordBuilder.write(tupleBuilder.getDataOutput(), true);
+ tupleBuilder.addFieldEndOffset();
+
+ tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
+ return tuple;
+ }
+
+ /**
+ * Keep protected to allow other extensions to add additional fields
+ */
+ protected void writeOpenFields(Index index) throws HyracksDataException {
+ writeGramLength(index);
+ writeSearchKeyType(index);
+ writeEnforced(index);
+ writeSearchKeySourceIndicator(index);
+ }
+
+ private void writeGramLength(Index index) throws HyracksDataException {
+ if (index.getGramLength() > 0) {
fieldValue.reset();
nameValue.reset();
aString.setValue(GRAM_LENGTH_FIELD_NAME);
stringSerde.serialize(aString, nameValue.getDataOutput());
- intSerde.serialize(new AInt32(instance.getGramLength()), fieldValue.getDataOutput());
+ int32Serde.serialize(new AInt32(index.getGramLength()), fieldValue.getDataOutput());
recordBuilder.addField(nameValue, fieldValue);
}
+ }
- if (instance.isOverridingKeyFieldTypes()) {
- // write optional field 9
+ private void writeSearchKeyType(Index index) throws HyracksDataException {
+ if (index.isOverridingKeyFieldTypes()) {
OrderedListBuilder typeListBuilder = new OrderedListBuilder();
typeListBuilder.reset(new AOrderedListType(BuiltinType.ANY, null));
nameValue.reset();
@@ -300,7 +317,7 @@
stringSerde.serialize(aString, nameValue.getDataOutput());
- List<IAType> searchKeyType = instance.getKeyFieldTypes();
+ List<IAType> searchKeyType = index.getKeyFieldTypes();
for (IAType type : searchKeyType) {
itemValue.reset();
aString.setValue(type.getTypeName());
@@ -311,21 +328,21 @@
typeListBuilder.write(fieldValue.getDataOutput(), true);
recordBuilder.addField(nameValue, fieldValue);
}
+ }
- if (instance.isEnforced()) {
- // write optional field 10
+ private void writeEnforced(Index index) throws HyracksDataException {
+ if (index.isEnforced()) {
fieldValue.reset();
nameValue.reset();
aString.setValue(INDEX_ISENFORCED_FIELD_NAME);
-
stringSerde.serialize(aString, nameValue.getDataOutput());
-
booleanSerde.serialize(ABoolean.TRUE, fieldValue.getDataOutput());
-
recordBuilder.addField(nameValue, fieldValue);
}
+ }
- List<Integer> keySourceIndicator = instance.getKeyFieldSourceIndicators();
+ private void writeSearchKeySourceIndicator(Index index) throws HyracksDataException {
+ List<Integer> keySourceIndicator = index.getKeyFieldSourceIndicators();
boolean needSerialization = false;
if (keySourceIndicator != null) {
for (int source : keySourceIndicator) {
@@ -350,12 +367,5 @@
listBuilder.write(fieldValue.getDataOutput(), true);
recordBuilder.addField(nameValue, fieldValue);
}
-
- // write record
- recordBuilder.write(tupleBuilder.getDataOutput(), true);
- tupleBuilder.addFieldEndOffset();
-
- tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
- return tuple;
}
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/LibraryTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/LibraryTupleTranslator.java
index 8d216ff..f521001 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/LibraryTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/LibraryTupleTranslator.java
@@ -19,19 +19,14 @@
package org.apache.asterix.metadata.entitytupletranslators;
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
import java.util.Calendar;
-import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
import org.apache.asterix.metadata.bootstrap.MetadataRecordTypes;
import org.apache.asterix.metadata.entities.Library;
import org.apache.asterix.om.base.ARecord;
import org.apache.asterix.om.base.AString;
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
-import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -39,40 +34,20 @@
* Translates a Library metadata entity to an ITupleReference and vice versa.
*/
public class LibraryTupleTranslator extends AbstractTupleTranslator<Library> {
- private static final long serialVersionUID = -7574173417999340281L;
-
- // Field indexes of serialized Library in a tuple.
- // First key field.
- public static final int LIBRARY_DATAVERSENAME_TUPLE_FIELD_INDEX = 0;
- // Second key field.
- public static final int LIBRARY_NAME_TUPLE_FIELD_INDEX = 1;
// Payload field containing serialized Library.
- public static final int LIBRARY_PAYLOAD_TUPLE_FIELD_INDEX = 2;
-
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<ARecord> recordSerDes =
- SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(MetadataRecordTypes.LIBRARY_RECORDTYPE);
+ private static final int LIBRARY_PAYLOAD_TUPLE_FIELD_INDEX = 2;
protected LibraryTupleTranslator(boolean getTuple) {
- super(getTuple, MetadataPrimaryIndexes.LIBRARY_DATASET.getFieldCount());
+ super(getTuple, MetadataPrimaryIndexes.LIBRARY_DATASET, LIBRARY_PAYLOAD_TUPLE_FIELD_INDEX);
}
@Override
- public Library getMetadataEntityFromTuple(ITupleReference frameTuple) throws HyracksDataException {
- byte[] serRecord = frameTuple.getFieldData(LIBRARY_PAYLOAD_TUPLE_FIELD_INDEX);
- int recordStartOffset = frameTuple.getFieldStart(LIBRARY_PAYLOAD_TUPLE_FIELD_INDEX);
- int recordLength = frameTuple.getFieldLength(LIBRARY_PAYLOAD_TUPLE_FIELD_INDEX);
- ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
- DataInput in = new DataInputStream(stream);
- ARecord libraryRecord = recordSerDes.deserialize(in);
- return createLibraryFromARecord(libraryRecord);
- }
-
- private Library createLibraryFromARecord(ARecord libraryRecord) {
- String dataverseName =
+ protected Library createMetadataEntityFromARecord(ARecord libraryRecord) {
+ String dataverseCanonicalName =
((AString) libraryRecord.getValueByPos(MetadataRecordTypes.LIBRARY_ARECORD_DATAVERSENAME_FIELD_INDEX))
.getStringValue();
+ DataverseName dataverseName = DataverseName.createFromCanonicalForm(dataverseCanonicalName);
String libraryName =
((AString) libraryRecord.getValueByPos(MetadataRecordTypes.LIBRARY_ARECORD_NAME_FIELD_INDEX))
.getStringValue();
@@ -81,11 +56,12 @@
}
@Override
- public ITupleReference getTupleFromMetadataEntity(Library library)
- throws HyracksDataException, AlgebricksException {
+ public ITupleReference getTupleFromMetadataEntity(Library library) throws HyracksDataException {
+ String dataverseCanonicalName = library.getDataverseName().getCanonicalForm();
+
// write the key in the first 2 fields of the tuple
tupleBuilder.reset();
- aString.setValue(library.getDataverseName());
+ aString.setValue(dataverseCanonicalName);
stringSerde.serialize(aString, tupleBuilder.getDataOutput());
tupleBuilder.addFieldEndOffset();
aString.setValue(library.getName());
@@ -98,7 +74,7 @@
// write field 0
fieldValue.reset();
- aString.setValue(library.getDataverseName());
+ aString.setValue(dataverseCanonicalName);
stringSerde.serialize(aString, fieldValue.getDataOutput());
recordBuilder.addField(MetadataRecordTypes.LIBRARY_ARECORD_DATAVERSENAME_FIELD_INDEX, fieldValue);
@@ -117,8 +93,8 @@
// write record
recordBuilder.write(tupleBuilder.getDataOutput(), true);
tupleBuilder.addFieldEndOffset();
+
tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
return tuple;
}
-
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/MetadataTupleTranslatorProvider.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/MetadataTupleTranslatorProvider.java
index 0625fc4..ef5256b 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/MetadataTupleTranslatorProvider.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/MetadataTupleTranslatorProvider.java
@@ -56,6 +56,10 @@
return new FeedTupleTranslator(getTuple);
}
+ public FeedConnectionTupleTranslator getFeedConnectionTupleTranslator(boolean getTuple) {
+ return new FeedConnectionTupleTranslator(getTuple);
+ }
+
public FunctionTupleTranslator getFunctionTupleTranslator(boolean getTuple) {
return new FunctionTupleTranslator(getTuple);
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeGroupTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeGroupTupleTranslator.java
index 5733de9..4d6b1b2 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeGroupTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeGroupTupleTranslator.java
@@ -19,15 +19,11 @@
package org.apache.asterix.metadata.entitytupletranslators;
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import org.apache.asterix.builders.UnorderedListBuilder;
-import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
import org.apache.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
import org.apache.asterix.metadata.bootstrap.MetadataRecordTypes;
import org.apache.asterix.metadata.entities.NodeGroup;
@@ -36,8 +32,6 @@
import org.apache.asterix.om.base.AUnorderedList;
import org.apache.asterix.om.base.IACursor;
import org.apache.asterix.om.types.AUnorderedListType;
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
-import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -47,31 +41,22 @@
*/
public class NodeGroupTupleTranslator extends AbstractTupleTranslator<NodeGroup> {
- private static final long serialVersionUID = 1L;
- // Field indexes of serialized NodeGroup in a tuple.
- // First key field.
- public static final int NODEGROUP_NODEGROUPNAME_TUPLE_FIELD_INDEX = 0;
// Payload field containing serialized NodeGroup.
- public static final int NODEGROUP_PAYLOAD_TUPLE_FIELD_INDEX = 1;
+ private static final int NODEGROUP_PAYLOAD_TUPLE_FIELD_INDEX = 1;
- private transient UnorderedListBuilder listBuilder = new UnorderedListBuilder();
- private transient ArrayBackedValueStorage itemValue = new ArrayBackedValueStorage();
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<ARecord> recordSerDes =
- SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(MetadataRecordTypes.NODEGROUP_RECORDTYPE);
+ protected UnorderedListBuilder listBuilder;
+ protected ArrayBackedValueStorage itemValue;
protected NodeGroupTupleTranslator(boolean getTuple) {
- super(getTuple, MetadataPrimaryIndexes.NODEGROUP_DATASET.getFieldCount());
+ super(getTuple, MetadataPrimaryIndexes.NODEGROUP_DATASET, NODEGROUP_PAYLOAD_TUPLE_FIELD_INDEX);
+ if (getTuple) {
+ listBuilder = new UnorderedListBuilder();
+ itemValue = new ArrayBackedValueStorage();
+ }
}
@Override
- public NodeGroup getMetadataEntityFromTuple(ITupleReference frameTuple) throws HyracksDataException {
- byte[] serRecord = frameTuple.getFieldData(NODEGROUP_PAYLOAD_TUPLE_FIELD_INDEX);
- int recordStartOffset = frameTuple.getFieldStart(NODEGROUP_PAYLOAD_TUPLE_FIELD_INDEX);
- int recordLength = frameTuple.getFieldLength(NODEGROUP_PAYLOAD_TUPLE_FIELD_INDEX);
- ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
- DataInput in = new DataInputStream(stream);
- ARecord nodeGroupRecord = recordSerDes.deserialize(in);
+ protected NodeGroup createMetadataEntityFromARecord(ARecord nodeGroupRecord) {
String gpName =
((AString) nodeGroupRecord.getValueByPos(MetadataRecordTypes.NODEGROUP_ARECORD_GROUPNAME_FIELD_INDEX))
.getStringValue();
@@ -85,8 +70,7 @@
}
@Override
- public ITupleReference getTupleFromMetadataEntity(NodeGroup instance)
- throws HyracksDataException, AlgebricksException {
+ public ITupleReference getTupleFromMetadataEntity(NodeGroup instance) throws HyracksDataException {
// write the key in the first field of the tuple
tupleBuilder.reset();
aString.setValue(instance.getNodeGroupName());
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeTupleTranslator.java
index 8510535..f0fbc4d 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/NodeTupleTranslator.java
@@ -19,16 +19,12 @@
package org.apache.asterix.metadata.entitytupletranslators;
-import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
import org.apache.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
import org.apache.asterix.metadata.bootstrap.MetadataRecordTypes;
import org.apache.asterix.metadata.entities.Node;
-import org.apache.asterix.om.base.AInt64;
import org.apache.asterix.om.base.AMutableInt64;
-import org.apache.asterix.om.types.BuiltinType;
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.asterix.om.base.ARecord;
import org.apache.hyracks.algebricks.common.exceptions.NotImplementedException;
-import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -36,30 +32,21 @@
* Translates a Node metadata entity to an ITupleReference and vice versa.
*/
public class NodeTupleTranslator extends AbstractTupleTranslator<Node> {
- private static final long serialVersionUID = -5257435809246039182L;
- // Field indexes of serialized Node in a tuple.
- // First key field.
- public static final int NODE_NODENAME_TUPLE_FIELD_INDEX = 0;
// Payload field containing serialized Node.
- public static final int NODE_PAYLOAD_TUPLE_FIELD_INDEX = 1;
+ private static final int NODE_PAYLOAD_TUPLE_FIELD_INDEX = 1;
- private transient AMutableInt64 aInt64 = new AMutableInt64(-1);
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<AInt64> int64Serde =
- SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
-
- // @SuppressWarnings("unchecked")
- // private ISerializerDeserializer<ARecord> recordSerDes =
- // NonTaggedSerializerDeserializerProvider.INSTANCE
- // .getSerializerDeserializer(recordType);
+ protected AMutableInt64 aInt64;
protected NodeTupleTranslator(boolean getTuple) {
- super(getTuple, MetadataPrimaryIndexes.NODE_DATASET.getFieldCount());
+ super(getTuple, MetadataPrimaryIndexes.NODE_DATASET, NODE_PAYLOAD_TUPLE_FIELD_INDEX);
+ if (getTuple) {
+ aInt64 = new AMutableInt64(-1);
+ }
}
@Override
- public Node getMetadataEntityFromTuple(ITupleReference frameTuple) {
+ protected Node createMetadataEntityFromARecord(ARecord nodeRecord) {
throw new NotImplementedException();
// TODO: Implement this.
// try {
@@ -85,7 +72,7 @@
}
@Override
- public ITupleReference getTupleFromMetadataEntity(Node instance) throws HyracksDataException, AlgebricksException {
+ public ITupleReference getTupleFromMetadataEntity(Node instance) throws HyracksDataException {
// write the key in the first field of the tuple
tupleBuilder.reset();
aString.setValue(instance.getNodeName());
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java
index 3ae0fec..9635479 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java
@@ -28,6 +28,7 @@
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.exceptions.MetadataException;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.external.api.IAdapterFactory;
import org.apache.asterix.external.api.IDataSourceAdapter;
import org.apache.asterix.external.api.IDataSourceAdapter.AdapterType;
@@ -62,9 +63,9 @@
private FeedMetadataUtil() {
}
- public static Dataset validateIfDatasetExists(MetadataProvider metadataProvider, String dataverse,
+ public static Dataset validateIfDatasetExists(MetadataProvider metadataProvider, DataverseName dataverseName,
String datasetName) throws AlgebricksException {
- Dataset dataset = metadataProvider.findDataset(dataverse, datasetName);
+ Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
if (dataset == null) {
throw new CompilationException("Unknown target dataset :" + datasetName);
}
@@ -76,18 +77,18 @@
return dataset;
}
- public static Feed validateIfFeedExists(String dataverse, String feedName, MetadataTransactionContext ctx)
- throws AlgebricksException {
- Feed feed = MetadataManager.INSTANCE.getFeed(ctx, dataverse, feedName);
+ public static Feed validateIfFeedExists(DataverseName dataverseName, String feedName,
+ MetadataTransactionContext ctx) throws AlgebricksException {
+ Feed feed = MetadataManager.INSTANCE.getFeed(ctx, dataverseName, feedName);
if (feed == null) {
throw new CompilationException("Unknown source feed: " + feedName);
}
return feed;
}
- public static FeedPolicyEntity validateIfPolicyExists(String dataverse, String policyName,
+ public static FeedPolicyEntity validateIfPolicyExists(DataverseName dataverseName, String policyName,
MetadataTransactionContext ctx) throws AlgebricksException {
- FeedPolicyEntity feedPolicy = MetadataManager.INSTANCE.getFeedPolicy(ctx, dataverse, policyName);
+ FeedPolicyEntity feedPolicy = MetadataManager.INSTANCE.getFeedPolicy(ctx, dataverseName, policyName);
if (feedPolicy == null) {
feedPolicy =
MetadataManager.INSTANCE.getFeedPolicy(ctx, MetadataConstants.METADATA_DATAVERSE_NAME, policyName);
@@ -276,31 +277,17 @@
}
public static ARecordType getOutputType(IFeed feed, String fqOutputType) throws AlgebricksException {
- ARecordType outputType = null;
-
if (fqOutputType == null) {
return null;
}
- String[] dataverseAndType = fqOutputType.split("[.]");
- String dataverseName;
- String datatypeName;
- if (dataverseAndType.length == 1) {
- datatypeName = dataverseAndType[0];
- dataverseName = feed.getDataverseName();
- } else if (dataverseAndType.length == 2) {
- dataverseName = dataverseAndType[0];
- datatypeName = dataverseAndType[1];
- } else {
- throw new IllegalArgumentException("Invalid parameter value " + fqOutputType);
- }
-
+ ARecordType outputType = null;
MetadataTransactionContext ctx = null;
try {
ctx = MetadataManager.INSTANCE.beginTransaction();
- Datatype t = MetadataManager.INSTANCE.getDatatype(ctx, dataverseName, datatypeName);
+ Datatype t = MetadataManager.INSTANCE.getDatatype(ctx, feed.getDataverseName(), fqOutputType);
if (t == null || t.getDatatype().getTypeTag() != ATypeTag.OBJECT) {
- throw new MetadataException(ErrorCode.FEED_METADATA_UTIL_UNEXPECTED_FEED_DATATYPE, datatypeName);
+ throw new MetadataException(ErrorCode.FEED_METADATA_UTIL_UNEXPECTED_FEED_DATATYPE, fqOutputType);
}
outputType = (ARecordType) t.getDatatype();
MetadataManager.INSTANCE.commitTransaction(ctx);
@@ -311,7 +298,7 @@
} catch (ACIDException | RemoteException e2) {
e.addSuppressed(e2);
}
- throw new MetadataException(ErrorCode.FEED_CREATE_FEED_DATATYPE_ERROR, e, datatypeName);
+ throw new MetadataException(ErrorCode.FEED_CREATE_FEED_DATATYPE_ERROR, e, fqOutputType);
}
}
return outputType;
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/functions/ExternalFunctionCompilerUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/functions/ExternalFunctionCompilerUtil.java
index e203c36..ca54e32 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/functions/ExternalFunctionCompilerUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/functions/ExternalFunctionCompilerUtil.java
@@ -35,7 +35,6 @@
import org.apache.asterix.om.types.IAType;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression.FunctionKind;
-import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
public class ExternalFunctionCompilerUtil {
@@ -66,8 +65,6 @@
private static IFunctionInfo getScalarFunctionInfo(MetadataTransactionContext txnCtx, Function function)
throws AlgebricksException {
- FunctionIdentifier fid =
- new FunctionIdentifier(function.getDataverseName(), function.getName(), function.getArity());
List<IAType> argumentTypes = new ArrayList<>();
IAType returnType = getTypeInfo(function.getReturnType(), txnCtx, function);;
for (String argumentType : function.getArguments()) {
@@ -75,7 +72,7 @@
}
IResultTypeComputer typeComputer = new ExternalTypeComputer(returnType, argumentTypes);
- return new ExternalScalarFunctionInfo(fid.getNamespace(), fid.getName(), fid.getArity(), returnType,
+ return new ExternalScalarFunctionInfo(function.getSignature().createFunctionIdentifier(), returnType,
function.getFunctionBody(), function.getLanguage(), argumentTypes, typeComputer);
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/functions/ExternalScalarFunctionInfo.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/functions/ExternalScalarFunctionInfo.java
index 93165dc..3bfcc2e 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/functions/ExternalScalarFunctionInfo.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/functions/ExternalScalarFunctionInfo.java
@@ -24,6 +24,7 @@
import org.apache.asterix.om.typecomputer.base.IResultTypeComputer;
import org.apache.asterix.om.types.IAType;
import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression.FunctionKind;
+import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
public class ExternalScalarFunctionInfo extends ExternalFunctionInfo {
@@ -31,7 +32,11 @@
public ExternalScalarFunctionInfo(String namespace, String name, int arity, IAType returnType, String body,
String language, List<IAType> argumentTypes, IResultTypeComputer rtc) {
- super(namespace, name, arity, FunctionKind.SCALAR, argumentTypes, returnType, rtc, body, language);
+ this(new FunctionIdentifier(namespace, name, arity), returnType, body, language, argumentTypes, rtc);
}
+ public ExternalScalarFunctionInfo(FunctionIdentifier fid, IAType returnType, String body, String language,
+ List<IAType> argumentTypes, IResultTypeComputer rtc) {
+ super(fid, FunctionKind.SCALAR, argumentTypes, returnType, rtc, body, language);
+ }
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/lock/DatasetLock.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/lock/DatasetLock.java
index 56da9d5..e0a6725 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/lock/DatasetLock.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/lock/DatasetLock.java
@@ -30,7 +30,7 @@
public class DatasetLock implements IMetadataLock {
- private final String key;
+ private final MetadataLockKey key;
// The lock
private final ReentrantReadWriteLock lock;
// Used for lock upgrade operation
@@ -42,7 +42,7 @@
private final MutableInt indexBuildCounter;
private final MutableInt dsModifyCounter;
- public DatasetLock(String key) {
+ public DatasetLock(MetadataLockKey key) {
this.key = key;
lock = new ReentrantReadWriteLock(true);
upgradeLock = new ReentrantReadWriteLock(true);
@@ -244,7 +244,7 @@
}
@Override
- public String getKey() {
+ public MetadataLockKey getKey() {
return key;
}
@@ -266,6 +266,6 @@
@Override
public String toString() {
- return key;
+ return String.valueOf(key);
}
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/lock/MetadataLock.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/lock/MetadataLock.java
index 48f315b..74d2232 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/lock/MetadataLock.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/lock/MetadataLock.java
@@ -24,11 +24,11 @@
import org.apache.asterix.common.metadata.IMetadataLock;
public class MetadataLock implements IMetadataLock {
- private final String key;
+ private final MetadataLockKey key;
private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock(true);
- public MetadataLock(String key) {
- this.key = key;
+ public MetadataLock(MetadataLockKey key) {
+ this.key = Objects.requireNonNull(key);
}
@Override
@@ -56,7 +56,7 @@
}
@Override
- public String getKey() {
+ public MetadataLockKey getKey() {
return key;
}
@@ -78,6 +78,6 @@
@Override
public String toString() {
- return key;
+ return String.valueOf(key);
}
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/lock/MetadataLockKey.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/lock/MetadataLockKey.java
new file mode 100644
index 0000000..0df1c2c
--- /dev/null
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/lock/MetadataLockKey.java
@@ -0,0 +1,128 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.metadata.lock;
+
+import java.util.Objects;
+
+import org.apache.asterix.common.metadata.DataverseName;
+import org.apache.asterix.common.metadata.IMetadataLock;
+
+final class MetadataLockKey implements IMetadataLock.LockKey {
+
+ enum EntityKind {
+ ACTIVE,
+ DATASET,
+ DATATYPE,
+ DATAVERSE,
+ EXTENSION,
+ FEED_POLICY,
+ FUNCTION,
+ MERGE_POLICY,
+ NODE_GROUP
+ }
+
+ private final EntityKind entityKind;
+
+ private final String entityKindExtension;
+
+ private final DataverseName dataverseName;
+
+ private final String entityName;
+
+ private MetadataLockKey(EntityKind entityKind, String entityKindExtension, DataverseName dataverseName,
+ String entityName) {
+ if (entityKind == null || (dataverseName == null && entityName == null)) {
+ throw new NullPointerException();
+ }
+ this.entityKind = entityKind;
+ this.entityKindExtension = entityKindExtension;
+ this.dataverseName = dataverseName;
+ this.entityName = entityName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+ MetadataLockKey that = (MetadataLockKey) o;
+ return entityKind == that.entityKind && Objects.equals(entityKindExtension, that.entityKindExtension)
+ && Objects.equals(dataverseName, that.dataverseName) && Objects.equals(entityName, that.entityName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(entityKind, entityKindExtension, dataverseName, entityName);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder(64);
+ sb.append(entityKind);
+ if (entityKindExtension != null) {
+ sb.append(':').append(entityKindExtension);
+ }
+ if (dataverseName != null) {
+ sb.append(':').append(dataverseName.getCanonicalForm());
+ }
+ sb.append(':').append(entityName);
+ return sb.toString();
+ }
+
+ static MetadataLockKey createDataverseLockKey(DataverseName dataverseName) {
+ return new MetadataLockKey(EntityKind.DATAVERSE, null, dataverseName, null);
+ }
+
+ static MetadataLockKey createDatasetLockKey(DataverseName dataverseName, String datasetName) {
+ return new MetadataLockKey(EntityKind.DATASET, null, dataverseName, datasetName);
+ }
+
+ static MetadataLockKey createDataTypeLockKey(DataverseName dataverseName, String datatypeName) {
+ return new MetadataLockKey(EntityKind.DATATYPE, null, dataverseName, datatypeName);
+ }
+
+ static MetadataLockKey createFunctionLockKey(DataverseName dataverseName, String functionName) {
+ return new MetadataLockKey(EntityKind.FUNCTION, null, dataverseName, functionName);
+ }
+
+ static MetadataLockKey createActiveEntityLockKey(DataverseName dataverseName, String entityName) {
+ return new MetadataLockKey(EntityKind.ACTIVE, null, dataverseName, entityName);
+ }
+
+ static MetadataLockKey createFeedPolicyLockKey(DataverseName dataverseName, String feedPolicyName) {
+ return new MetadataLockKey(EntityKind.FEED_POLICY, null, dataverseName, feedPolicyName);
+ }
+
+ static MetadataLockKey createExtensionEntityLockKey(String extension, DataverseName dataverseName,
+ String entityName) {
+ return new MetadataLockKey(EntityKind.EXTENSION, extension, dataverseName, entityName);
+ }
+
+ static MetadataLockKey createNodeGroupLockKey(String nodeGroupName) {
+ return new MetadataLockKey(EntityKind.NODE_GROUP, null, null, nodeGroupName);
+ }
+
+ static MetadataLockKey createMergePolicyLockKey(String mergePolicyName) {
+ return new MetadataLockKey(EntityKind.MERGE_POLICY, null, null, mergePolicyName);
+ }
+}
\ No newline at end of file
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/lock/MetadataLockManager.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/lock/MetadataLockManager.java
index 61bcbc7..dd2517a 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/lock/MetadataLockManager.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/lock/MetadataLockManager.java
@@ -19,195 +19,202 @@
package org.apache.asterix.metadata.lock;
import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
import java.util.function.Function;
import org.apache.asterix.common.api.IMetadataLockManager;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.metadata.IMetadataLock;
import org.apache.asterix.common.metadata.LockList;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
public class MetadataLockManager implements IMetadataLockManager {
- private static final Function<String, MetadataLock> LOCK_FUNCTION = MetadataLock::new;
- private static final Function<String, DatasetLock> DATASET_LOCK_FUNCTION = DatasetLock::new;
+ private static final Function<MetadataLockKey, MetadataLock> LOCK_FUNCTION = MetadataLock::new;
+ private static final Function<MetadataLockKey, DatasetLock> DATASET_LOCK_FUNCTION = DatasetLock::new;
- private final ConcurrentHashMap<String, IMetadataLock> mdlocks;
-
- private static final String DATAVERSE_PREFIX = "Dataverse:";
- private static final String DATASET_PREFIX = "Dataset:";
- private static final String FUNCTION_PREFIX = "Function:";
- private static final String NODE_GROUP_PREFIX = "NodeGroup:";
- private static final String ACTIVE_PREFIX = "Active:";
- private static final String FEED_POLICY_PREFIX = "FeedPolicy:";
- private static final String MERGE_POLICY_PREFIX = "MergePolicy:";
- private static final String DATATYPE_PREFIX = "DataType:";
- private static final String EXTENSION_PREFIX = "Extension:";
+ private final ConcurrentMap<MetadataLockKey, IMetadataLock> mdlocks;
public MetadataLockManager() {
mdlocks = new ConcurrentHashMap<>();
}
@Override
- public void acquireDataverseReadLock(LockList locks, String dataverseName) throws AlgebricksException {
- String key = DATAVERSE_PREFIX + dataverseName;
+ public void acquireDataverseReadLock(LockList locks, DataverseName dataverseName) throws AlgebricksException {
+ MetadataLockKey key = MetadataLockKey.createDataverseLockKey(dataverseName);
IMetadataLock lock = mdlocks.computeIfAbsent(key, LOCK_FUNCTION);
locks.add(IMetadataLock.Mode.READ, lock);
}
@Override
- public void acquireDataverseWriteLock(LockList locks, String dataverseName) throws AlgebricksException {
- String key = DATAVERSE_PREFIX + dataverseName;
+ public void acquireDataverseWriteLock(LockList locks, DataverseName dataverseName) throws AlgebricksException {
+ MetadataLockKey key = MetadataLockKey.createDataverseLockKey(dataverseName);
IMetadataLock lock = mdlocks.computeIfAbsent(key, LOCK_FUNCTION);
locks.add(IMetadataLock.Mode.WRITE, lock);
}
@Override
- public void acquireDatasetReadLock(LockList locks, String datasetName) throws AlgebricksException {
- String key = DATASET_PREFIX + datasetName;
+ public void acquireDatasetReadLock(LockList locks, DataverseName dataverseName, String datasetName)
+ throws AlgebricksException {
+ MetadataLockKey key = MetadataLockKey.createDatasetLockKey(dataverseName, datasetName);
DatasetLock lock = (DatasetLock) mdlocks.computeIfAbsent(key, DATASET_LOCK_FUNCTION);
locks.add(IMetadataLock.Mode.READ, lock);
}
@Override
- public void acquireDatasetWriteLock(LockList locks, String datasetName) throws AlgebricksException {
- String key = DATASET_PREFIX + datasetName;
+ public void acquireDatasetWriteLock(LockList locks, DataverseName dataverseName, String datasetName)
+ throws AlgebricksException {
+ MetadataLockKey key = MetadataLockKey.createDatasetLockKey(dataverseName, datasetName);
DatasetLock lock = (DatasetLock) mdlocks.computeIfAbsent(key, DATASET_LOCK_FUNCTION);
locks.add(IMetadataLock.Mode.WRITE, lock);
}
@Override
- public void acquireDatasetModifyLock(LockList locks, String datasetName) throws AlgebricksException {
- String key = DATASET_PREFIX + datasetName;
+ public void acquireDatasetModifyLock(LockList locks, DataverseName dataverseName, String datasetName)
+ throws AlgebricksException {
+ MetadataLockKey key = MetadataLockKey.createDatasetLockKey(dataverseName, datasetName);
DatasetLock lock = (DatasetLock) mdlocks.computeIfAbsent(key, DATASET_LOCK_FUNCTION);
locks.add(IMetadataLock.Mode.MODIFY, lock);
}
@Override
- public void acquireDatasetCreateIndexLock(LockList locks, String datasetName) throws AlgebricksException {
- String dsKey = DATASET_PREFIX + datasetName;
- DatasetLock lock = (DatasetLock) mdlocks.computeIfAbsent(dsKey, DATASET_LOCK_FUNCTION);
+ public void acquireDatasetCreateIndexLock(LockList locks, DataverseName dataverseName, String datasetName)
+ throws AlgebricksException {
+ MetadataLockKey key = MetadataLockKey.createDatasetLockKey(dataverseName, datasetName);
+ DatasetLock lock = (DatasetLock) mdlocks.computeIfAbsent(key, DATASET_LOCK_FUNCTION);
locks.add(IMetadataLock.Mode.INDEX_BUILD, lock);
}
@Override
- public void acquireDatasetExclusiveModificationLock(LockList locks, String datasetName) throws AlgebricksException {
- String key = DATASET_PREFIX + datasetName;
+ public void acquireDatasetExclusiveModificationLock(LockList locks, DataverseName dataverseName, String datasetName)
+ throws AlgebricksException {
+ MetadataLockKey key = MetadataLockKey.createDatasetLockKey(dataverseName, datasetName);
DatasetLock lock = (DatasetLock) mdlocks.computeIfAbsent(key, DATASET_LOCK_FUNCTION);
locks.add(IMetadataLock.Mode.EXCLUSIVE_MODIFY, lock);
}
@Override
- public void acquireFunctionReadLock(LockList locks, String functionName) throws AlgebricksException {
- String key = FUNCTION_PREFIX + functionName;
+ public void acquireFunctionReadLock(LockList locks, DataverseName dataverseName, String functionName)
+ throws AlgebricksException {
+ MetadataLockKey key = MetadataLockKey.createFunctionLockKey(dataverseName, functionName);
IMetadataLock lock = mdlocks.computeIfAbsent(key, LOCK_FUNCTION);
locks.add(IMetadataLock.Mode.READ, lock);
}
@Override
- public void acquireFunctionWriteLock(LockList locks, String functionName) throws AlgebricksException {
- String key = FUNCTION_PREFIX + functionName;
+ public void acquireFunctionWriteLock(LockList locks, DataverseName dataverseName, String functionName)
+ throws AlgebricksException {
+ MetadataLockKey key = MetadataLockKey.createFunctionLockKey(dataverseName, functionName);
IMetadataLock lock = mdlocks.computeIfAbsent(key, LOCK_FUNCTION);
locks.add(IMetadataLock.Mode.WRITE, lock);
}
@Override
public void acquireNodeGroupReadLock(LockList locks, String nodeGroupName) throws AlgebricksException {
- String key = NODE_GROUP_PREFIX + nodeGroupName;
+ MetadataLockKey key = MetadataLockKey.createNodeGroupLockKey(nodeGroupName);
IMetadataLock lock = mdlocks.computeIfAbsent(key, LOCK_FUNCTION);
locks.add(IMetadataLock.Mode.READ, lock);
}
@Override
public void acquireNodeGroupWriteLock(LockList locks, String nodeGroupName) throws AlgebricksException {
- String key = NODE_GROUP_PREFIX + nodeGroupName;
+ MetadataLockKey key = MetadataLockKey.createNodeGroupLockKey(nodeGroupName);
IMetadataLock lock = mdlocks.computeIfAbsent(key, LOCK_FUNCTION);
locks.add(IMetadataLock.Mode.WRITE, lock);
}
@Override
- public void acquireActiveEntityReadLock(LockList locks, String entityName) throws AlgebricksException {
- String key = ACTIVE_PREFIX + entityName;
+ public void acquireActiveEntityReadLock(LockList locks, DataverseName dataverseName, String entityName)
+ throws AlgebricksException {
+ MetadataLockKey key = MetadataLockKey.createActiveEntityLockKey(dataverseName, entityName);
IMetadataLock lock = mdlocks.computeIfAbsent(key, LOCK_FUNCTION);
locks.add(IMetadataLock.Mode.READ, lock);
}
@Override
- public void acquireActiveEntityWriteLock(LockList locks, String entityName) throws AlgebricksException {
- String key = ACTIVE_PREFIX + entityName;
+ public void acquireActiveEntityWriteLock(LockList locks, DataverseName dataverseName, String entityName)
+ throws AlgebricksException {
+ MetadataLockKey key = MetadataLockKey.createActiveEntityLockKey(dataverseName, entityName);
IMetadataLock lock = mdlocks.computeIfAbsent(key, LOCK_FUNCTION);
locks.add(IMetadataLock.Mode.WRITE, lock);
}
@Override
- public void acquireFeedPolicyWriteLock(LockList locks, String feedPolicyName) throws AlgebricksException {
- String key = FEED_POLICY_PREFIX + feedPolicyName;
+ public void acquireFeedPolicyWriteLock(LockList locks, DataverseName dataverseName, String feedPolicyName)
+ throws AlgebricksException {
+ MetadataLockKey key = MetadataLockKey.createFeedPolicyLockKey(dataverseName, feedPolicyName);
IMetadataLock lock = mdlocks.computeIfAbsent(key, LOCK_FUNCTION);
locks.add(IMetadataLock.Mode.WRITE, lock);
}
@Override
- public void acquireFeedPolicyReadLock(LockList locks, String feedPolicyName) throws AlgebricksException {
- String key = FEED_POLICY_PREFIX + feedPolicyName;
+ public void acquireFeedPolicyReadLock(LockList locks, DataverseName dataverseName, String feedPolicyName)
+ throws AlgebricksException {
+ MetadataLockKey key = MetadataLockKey.createFeedPolicyLockKey(dataverseName, feedPolicyName);
IMetadataLock lock = mdlocks.computeIfAbsent(key, LOCK_FUNCTION);
locks.add(IMetadataLock.Mode.READ, lock);
}
@Override
public void acquireMergePolicyReadLock(LockList locks, String mergePolicyName) throws AlgebricksException {
- String key = MERGE_POLICY_PREFIX + mergePolicyName;
+ MetadataLockKey key = MetadataLockKey.createMergePolicyLockKey(mergePolicyName);
IMetadataLock lock = mdlocks.computeIfAbsent(key, LOCK_FUNCTION);
locks.add(IMetadataLock.Mode.READ, lock);
}
@Override
public void acquireMergePolicyWriteLock(LockList locks, String mergePolicyName) throws AlgebricksException {
- String key = MERGE_POLICY_PREFIX + mergePolicyName;
+ MetadataLockKey key = MetadataLockKey.createMergePolicyLockKey(mergePolicyName);
IMetadataLock lock = mdlocks.computeIfAbsent(key, LOCK_FUNCTION);
locks.add(IMetadataLock.Mode.WRITE, lock);
}
@Override
- public void acquireDataTypeReadLock(LockList locks, String datatypeName) throws AlgebricksException {
- String key = DATATYPE_PREFIX + datatypeName;
+ public void acquireDataTypeReadLock(LockList locks, DataverseName dataverseName, String datatypeName)
+ throws AlgebricksException {
+ MetadataLockKey key = MetadataLockKey.createDataTypeLockKey(dataverseName, datatypeName);
IMetadataLock lock = mdlocks.computeIfAbsent(key, LOCK_FUNCTION);
locks.add(IMetadataLock.Mode.READ, lock);
}
@Override
- public void acquireDataTypeWriteLock(LockList locks, String datatypeName) throws AlgebricksException {
- String key = DATATYPE_PREFIX + datatypeName;
+ public void acquireDataTypeWriteLock(LockList locks, DataverseName dataverseName, String datatypeName)
+ throws AlgebricksException {
+ MetadataLockKey key = MetadataLockKey.createDataTypeLockKey(dataverseName, datatypeName);
IMetadataLock lock = mdlocks.computeIfAbsent(key, LOCK_FUNCTION);
locks.add(IMetadataLock.Mode.WRITE, lock);
}
@Override
- public void acquireExtensionReadLock(LockList locks, String extension, String entityName)
- throws AlgebricksException {
- String key = EXTENSION_PREFIX + extension + entityName;
+ public void acquireExtensionEntityReadLock(LockList locks, String extension, DataverseName dataverseName,
+ String entityName) throws AlgebricksException {
+ MetadataLockKey key = MetadataLockKey.createExtensionEntityLockKey(extension, dataverseName, entityName);
IMetadataLock lock = mdlocks.computeIfAbsent(key, LOCK_FUNCTION);
locks.add(IMetadataLock.Mode.READ, lock);
}
@Override
- public void acquireExtensionWriteLock(LockList locks, String extension, String entityName)
- throws AlgebricksException {
- String key = EXTENSION_PREFIX + extension + entityName;
+ public void acquireExtensionEntityWriteLock(LockList locks, String extension, DataverseName dataverseName,
+ String entityName) throws AlgebricksException {
+ MetadataLockKey key = MetadataLockKey.createExtensionEntityLockKey(extension, dataverseName, entityName);
IMetadataLock lock = mdlocks.computeIfAbsent(key, LOCK_FUNCTION);
locks.add(IMetadataLock.Mode.WRITE, lock);
}
@Override
- public void upgradeDatasetLockToWrite(LockList locks, String fullyQualifiedName) throws AlgebricksException {
- String key = DATASET_PREFIX + fullyQualifiedName;
+ public void upgradeDatasetLockToWrite(LockList locks, DataverseName dataverseName, String datasetName)
+ throws AlgebricksException {
+ MetadataLockKey key = MetadataLockKey.createDatasetLockKey(dataverseName, datasetName);
DatasetLock lock = (DatasetLock) mdlocks.computeIfAbsent(key, DATASET_LOCK_FUNCTION);
locks.upgrade(IMetadataLock.Mode.UPGRADED_WRITE, lock);
}
@Override
- public void downgradeDatasetLockToExclusiveModify(LockList locks, String fullyQualifiedName)
+ public void downgradeDatasetLockToExclusiveModify(LockList locks, DataverseName dataverseName, String datasetName)
throws AlgebricksException {
- String key = DATASET_PREFIX + fullyQualifiedName;
+ MetadataLockKey key = MetadataLockKey.createDatasetLockKey(dataverseName, datasetName);
DatasetLock lock = (DatasetLock) mdlocks.computeIfAbsent(key, DATASET_LOCK_FUNCTION);
locks.downgrade(IMetadataLock.Mode.EXCLUSIVE_MODIFY, lock);
}
+
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
index 4b7d359..84166ce 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
@@ -35,6 +35,7 @@
import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.exceptions.ACIDException;
import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.transactions.IRecoveryManager;
import org.apache.asterix.external.indexing.IndexingConstants;
import org.apache.asterix.formats.base.IDataFormat;
@@ -342,7 +343,7 @@
public static JobSpecification compactDatasetJobSpec(Dataverse dataverse, String datasetName,
MetadataProvider metadataProvider) throws AlgebricksException {
- String dataverseName = dataverse.getDataverseName();
+ DataverseName dataverseName = dataverse.getDataverseName();
Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
if (dataset == null) {
throw new AsterixException("Could not find dataset " + datasetName + " in dataverse " + dataverseName);
@@ -531,12 +532,12 @@
return keyProviderOp;
}
- public static boolean isFullyQualifiedName(String datasetName) {
- return datasetName.indexOf('.') > 0; // NOSONAR a fully qualified name can't start with a .
+ public static String getFullyQualifiedDisplayName(Dataset dataset) {
+ return getFullyQualifiedDisplayName(dataset.getDataverseName(), dataset.getDatasetName());
}
- public static String getFullyQualifiedName(Dataset dataset) {
- return dataset.getDataverseName() + '.' + dataset.getDatasetName();
+ public static String getFullyQualifiedDisplayName(DataverseName dataverseName, String datasetName) {
+ return dataverseName + "." + datasetName;
}
/***
@@ -553,8 +554,8 @@
* @return the name of the created node group.
* @throws Exception
*/
- public static String createNodeGroupForNewDataset(String dataverseName, String datasetName, Set<String> ncNames,
- MetadataProvider metadataProvider) throws Exception {
+ public static String createNodeGroupForNewDataset(DataverseName dataverseName, String datasetName,
+ Set<String> ncNames, MetadataProvider metadataProvider) throws Exception {
return createNodeGroupForNewDataset(dataverseName, datasetName, 0L, ncNames, metadataProvider);
}
@@ -574,8 +575,8 @@
* @return the name of the created node group.
* @throws Exception
*/
- public static String createNodeGroupForNewDataset(String dataverseName, String datasetName, long rebalanceCount,
- Set<String> ncNames, MetadataProvider metadataProvider) throws Exception {
+ public static String createNodeGroupForNewDataset(DataverseName dataverseName, String datasetName,
+ long rebalanceCount, Set<String> ncNames, MetadataProvider metadataProvider) throws Exception {
ICcApplicationContext appCtx = metadataProvider.getApplicationContext();
String nodeGroup = dataverseName + "." + datasetName + (rebalanceCount == 0L ? "" : "_" + rebalanceCount);
MetadataTransactionContext mdTxnCtx = metadataProvider.getMetadataTxnContext();
@@ -589,18 +590,4 @@
return nodeGroup;
}
- // This doesn't work if the dataset or the dataverse name contains a '.'
- public static Pair<String, String> getDatasetInfo(MetadataProvider metadata, String datasetArg) {
- String first;
- String second;
- int i = datasetArg.indexOf('.');
- if (i > 0 && i < datasetArg.length() - 1) {
- first = datasetArg.substring(0, i);
- second = datasetArg.substring(i + 1);
- } else {
- first = metadata.getDefaultDataverse() == null ? null : metadata.getDefaultDataverse().getDataverseName();
- second = datasetArg;
- }
- return new Pair<>(first, second);
- }
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/InvertedIndexResourceFactoryProvider.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/InvertedIndexResourceFactoryProvider.java
index b763954..f1fe625 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/InvertedIndexResourceFactoryProvider.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/InvertedIndexResourceFactoryProvider.java
@@ -35,7 +35,6 @@
import org.apache.asterix.om.types.ARecordType;
import org.apache.asterix.om.types.IAType;
import org.apache.asterix.om.utils.NonTaggedFormatUtil;
-import org.apache.asterix.om.utils.RecordUtil;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.common.utils.Pair;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
@@ -151,7 +150,7 @@
// Sanity checks.
if (numPrimaryKeys > 1) {
throw new CompilationException(ErrorCode.COMPILATION_ILLEGAL_INDEX_FOR_DATASET_WITH_COMPOSITE_PRIMARY_INDEX,
- indexType, RecordUtil.toFullyQualifiedName(dataset.getDataverseName(), dataset.getDatasetName()));
+ indexType, DatasetUtil.getFullyQualifiedDisplayName(dataset));
}
if (numSecondaryKeys > 1) {
throw new CompilationException(ErrorCode.COMPILATION_ILLEGAL_INDEX_NUM_OF_FIELD, numSecondaryKeys,
@@ -187,7 +186,7 @@
// Sanity checks.
if (numPrimaryKeys > 1) {
throw new CompilationException(ErrorCode.COMPILATION_ILLEGAL_INDEX_FOR_DATASET_WITH_COMPOSITE_PRIMARY_INDEX,
- indexType, RecordUtil.toFullyQualifiedName(dataset.getDataverseName(), dataset.getDatasetName()));
+ indexType, DatasetUtil.getFullyQualifiedDisplayName(dataset));
}
if (numSecondaryKeys > 1) {
throw new CompilationException(ErrorCode.COMPILATION_ILLEGAL_INDEX_NUM_OF_FIELD, numSecondaryKeys,
@@ -224,7 +223,7 @@
// Sanity checks.
if (numPrimaryKeys > 1) {
throw new CompilationException(ErrorCode.COMPILATION_ILLEGAL_INDEX_FOR_DATASET_WITH_COMPOSITE_PRIMARY_INDEX,
- indexType, RecordUtil.toFullyQualifiedName(dataset.getDataverseName(), dataset.getDatasetName()));
+ indexType, DatasetUtil.getFullyQualifiedDisplayName(dataset));
}
if (numSecondaryKeys > 1) {
throw new CompilationException(ErrorCode.COMPILATION_ILLEGAL_INDEX_NUM_OF_FIELD, numSecondaryKeys,
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/MetadataConstants.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/MetadataConstants.java
index 8446154..c263ee6 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/MetadataConstants.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/MetadataConstants.java
@@ -19,13 +19,15 @@
package org.apache.asterix.metadata.utils;
+import org.apache.asterix.common.metadata.DataverseName;
+
/**
* Contains metadata constants
*/
public class MetadataConstants {
// Name of the dataverse the metadata lives in.
- public static final String METADATA_DATAVERSE_NAME = "Metadata";
+ public static final DataverseName METADATA_DATAVERSE_NAME = DataverseName.createBuiltinDataverseName("Metadata");
// Name of the node group where metadata is stored on.
public static final String METADATA_NODEGROUP_NAME = "MetadataGroup";
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/MetadataLockUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/MetadataLockUtil.java
index baac712..e6ab9c8 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/MetadataLockUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/MetadataLockUtil.java
@@ -19,6 +19,7 @@
package org.apache.asterix.metadata.utils;
import org.apache.asterix.common.api.IMetadataLockManager;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.metadata.LockList;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -27,10 +28,10 @@
private MetadataLockUtil() {
}
- public static void createDatasetBegin(IMetadataLockManager lockMgr, LockList locks, String dataverseName,
- String itemTypeDataverseName, String itemTypeFullyQualifiedName, String metaItemTypeDataverseName,
- String metaItemTypeFullyQualifiedName, String nodeGroupName, String compactionPolicyName,
- String datasetFullyQualifiedName, boolean isDefaultCompactionPolicy) throws AlgebricksException {
+ public static void createDatasetBegin(IMetadataLockManager lockMgr, LockList locks, DataverseName dataverseName,
+ String datasetName, DataverseName itemTypeDataverseName, String itemTypeName,
+ DataverseName metaItemTypeDataverseName, String metaItemTypeName, String nodeGroupName,
+ String compactionPolicyName, boolean isDefaultCompactionPolicy) throws AlgebricksException {
lockMgr.acquireDataverseReadLock(locks, dataverseName);
if (!dataverseName.equals(itemTypeDataverseName)) {
lockMgr.acquireDataverseReadLock(locks, itemTypeDataverseName);
@@ -39,10 +40,10 @@
&& !metaItemTypeDataverseName.equals(itemTypeDataverseName)) {
lockMgr.acquireDataverseReadLock(locks, metaItemTypeDataverseName);
}
- lockMgr.acquireDataTypeReadLock(locks, itemTypeFullyQualifiedName);
- if (metaItemTypeFullyQualifiedName != null
- && !metaItemTypeFullyQualifiedName.equals(itemTypeFullyQualifiedName)) {
- lockMgr.acquireDataTypeReadLock(locks, metaItemTypeFullyQualifiedName);
+ lockMgr.acquireDataTypeReadLock(locks, itemTypeDataverseName, itemTypeName);
+ if (metaItemTypeDataverseName != null && !metaItemTypeDataverseName.equals(itemTypeDataverseName)
+ && !metaItemTypeName.equals(itemTypeName)) {
+ lockMgr.acquireDataTypeReadLock(locks, metaItemTypeDataverseName, metaItemTypeName);
}
if (nodeGroupName != null) {
lockMgr.acquireNodeGroupReadLock(locks, nodeGroupName);
@@ -50,119 +51,123 @@
if (!isDefaultCompactionPolicy) {
lockMgr.acquireMergePolicyReadLock(locks, compactionPolicyName);
}
- lockMgr.acquireDatasetWriteLock(locks, datasetFullyQualifiedName);
+ lockMgr.acquireDatasetWriteLock(locks, dataverseName, datasetName);
}
- public static void createIndexBegin(IMetadataLockManager lockMgr, LockList locks, String dataverseName,
- String datasetFullyQualifiedName) throws AlgebricksException {
+ public static void createIndexBegin(IMetadataLockManager lockMgr, LockList locks, DataverseName dataverseName,
+ String datasetName) throws AlgebricksException {
lockMgr.acquireDataverseReadLock(locks, dataverseName);
- lockMgr.acquireDatasetCreateIndexLock(locks, datasetFullyQualifiedName);
+ lockMgr.acquireDatasetCreateIndexLock(locks, dataverseName, datasetName);
}
- public static void dropIndexBegin(IMetadataLockManager lockMgr, LockList locks, String dataverseName,
- String datasetFullyQualifiedName) throws AlgebricksException {
+ public static void dropIndexBegin(IMetadataLockManager lockMgr, LockList locks, DataverseName dataverseName,
+ String datasetName) throws AlgebricksException {
lockMgr.acquireDataverseReadLock(locks, dataverseName);
- lockMgr.acquireDatasetWriteLock(locks, datasetFullyQualifiedName);
+ lockMgr.acquireDatasetWriteLock(locks, dataverseName, datasetName);
}
- public static void createTypeBegin(IMetadataLockManager lockMgr, LockList locks, String dataverseName,
- String itemTypeFullyQualifiedName) throws AlgebricksException {
+ public static void createTypeBegin(IMetadataLockManager lockMgr, LockList locks, DataverseName dataverseName,
+ String typeName) throws AlgebricksException {
lockMgr.acquireDataverseReadLock(locks, dataverseName);
- lockMgr.acquireDataTypeWriteLock(locks, itemTypeFullyQualifiedName);
+ lockMgr.acquireDataTypeWriteLock(locks, dataverseName, typeName);
}
- public static void dropDatasetBegin(IMetadataLockManager lockMgr, LockList locks, String dataverseName,
- String datasetFullyQualifiedName) throws AlgebricksException {
+ public static void dropDatasetBegin(IMetadataLockManager lockMgr, LockList locks, DataverseName dataverseName,
+ String datasetName) throws AlgebricksException {
lockMgr.acquireDataverseReadLock(locks, dataverseName);
- lockMgr.acquireDatasetWriteLock(locks, datasetFullyQualifiedName);
+ lockMgr.acquireDatasetWriteLock(locks, dataverseName, datasetName);
}
- public static void dropTypeBegin(IMetadataLockManager lockMgr, LockList locks, String dataverseName,
- String dataTypeFullyQualifiedName) throws AlgebricksException {
+ public static void dropTypeBegin(IMetadataLockManager lockMgr, LockList locks, DataverseName dataverseName,
+ String typeName) throws AlgebricksException {
lockMgr.acquireDataverseReadLock(locks, dataverseName);
- lockMgr.acquireDataTypeWriteLock(locks, dataTypeFullyQualifiedName);
+ lockMgr.acquireDataTypeWriteLock(locks, dataverseName, typeName);
}
- public static void functionStatementBegin(IMetadataLockManager lockMgr, LockList locks, String dataverseName,
- String functionFullyQualifiedName) throws AlgebricksException {
+ public static void createFunctionBegin(IMetadataLockManager lockMgr, LockList locks, DataverseName dataverseName,
+ String functionName) throws AlgebricksException {
lockMgr.acquireDataverseReadLock(locks, dataverseName);
- lockMgr.acquireFunctionWriteLock(locks, functionFullyQualifiedName);
+ lockMgr.acquireFunctionWriteLock(locks, dataverseName, functionName);
}
- public static void modifyDatasetBegin(IMetadataLockManager lockMgr, LockList locks, String dataverseName,
- String datasetFullyQualifiedName) throws AlgebricksException {
+ public static void dropFunctionBegin(IMetadataLockManager lockMgr, LockList locks, DataverseName dataverseName,
+ String functionName) throws AlgebricksException {
lockMgr.acquireDataverseReadLock(locks, dataverseName);
- lockMgr.acquireDatasetModifyLock(locks, datasetFullyQualifiedName);
+ lockMgr.acquireFunctionWriteLock(locks, dataverseName, functionName);
+ }
+
+ public static void modifyDatasetBegin(IMetadataLockManager lockMgr, LockList locks, DataverseName dataverseName,
+ String datasetName) throws AlgebricksException {
+ lockMgr.acquireDataverseReadLock(locks, dataverseName);
+ lockMgr.acquireDatasetModifyLock(locks, dataverseName, datasetName);
}
public static void insertDeleteUpsertBegin(IMetadataLockManager lockMgr, LockList locks,
- String datasetFullyQualifiedName) throws AlgebricksException {
- lockMgr.acquireDataverseReadLock(locks,
- MetadataUtil.getDataverseFromFullyQualifiedName(datasetFullyQualifiedName));
- lockMgr.acquireDatasetModifyLock(locks, datasetFullyQualifiedName);
- }
-
- public static void dropFeedBegin(IMetadataLockManager lockMgr, LockList locks, String dataverseName,
- String feedFullyQualifiedName) throws AlgebricksException {
+ DataverseName dataverseName, String datasetName) throws AlgebricksException {
lockMgr.acquireDataverseReadLock(locks, dataverseName);
- lockMgr.acquireActiveEntityWriteLock(locks, feedFullyQualifiedName);
+ lockMgr.acquireDatasetModifyLock(locks, dataverseName, datasetName);
}
- public static void dropFeedPolicyBegin(IMetadataLockManager lockMgr, LockList locks, String dataverseName,
- String policyName) throws AlgebricksException {
- lockMgr.acquireActiveEntityWriteLock(locks, policyName);
- lockMgr.acquireDataverseReadLock(locks, dataverseName);
- }
-
- public static void startFeedBegin(IMetadataLockManager lockMgr, LockList locks, String dataverseName,
+ public static void dropFeedBegin(IMetadataLockManager lockMgr, LockList locks, DataverseName dataverseName,
String feedName) throws AlgebricksException {
lockMgr.acquireDataverseReadLock(locks, dataverseName);
- lockMgr.acquireActiveEntityReadLock(locks, feedName);
+ lockMgr.acquireActiveEntityWriteLock(locks, dataverseName, feedName);
}
- public static void stopFeedBegin(IMetadataLockManager lockMgr, LockList locks, String dataverseName,
+ public static void dropFeedPolicyBegin(IMetadataLockManager lockMgr, LockList locks, DataverseName dataverseName,
+ String policyName) throws AlgebricksException {
+ lockMgr.acquireDataverseReadLock(locks, dataverseName);
+ lockMgr.acquireActiveEntityWriteLock(locks, dataverseName, policyName);
+ }
+
+ public static void startFeedBegin(IMetadataLockManager lockMgr, LockList locks, DataverseName dataverseName,
+ String feedName) throws AlgebricksException {
+ lockMgr.acquireDataverseReadLock(locks, dataverseName);
+ lockMgr.acquireActiveEntityReadLock(locks, dataverseName, feedName);
+ }
+
+ public static void stopFeedBegin(IMetadataLockManager lockMgr, LockList locks, DataverseName dataverseName,
String feedName) throws AlgebricksException {
// TODO: dataset lock?
lockMgr.acquireDataverseReadLock(locks, dataverseName);
- lockMgr.acquireActiveEntityReadLock(locks, feedName);
+ lockMgr.acquireActiveEntityReadLock(locks, dataverseName, feedName);
}
- public static void createFeedBegin(IMetadataLockManager lockMgr, LockList locks, String dataverseName,
- String feedFullyQualifiedName) throws AlgebricksException {
+ public static void createFeedBegin(IMetadataLockManager lockMgr, LockList locks, DataverseName dataverseName,
+ String feedName) throws AlgebricksException {
lockMgr.acquireDataverseReadLock(locks, dataverseName);
- lockMgr.acquireActiveEntityWriteLock(locks, feedFullyQualifiedName);
+ lockMgr.acquireActiveEntityWriteLock(locks, dataverseName, feedName);
}
- public static void connectFeedBegin(IMetadataLockManager lockMgr, LockList locks, String dataverseName,
- String datasetFullyQualifiedName, String feedFullyQualifiedName) throws AlgebricksException {
+ public static void connectFeedBegin(IMetadataLockManager lockMgr, LockList locks, DataverseName dataverseName,
+ String datasetName, String feedName) throws AlgebricksException {
lockMgr.acquireDataverseReadLock(locks, dataverseName);
- lockMgr.acquireActiveEntityReadLock(locks, feedFullyQualifiedName);
- lockMgr.acquireDatasetReadLock(locks, datasetFullyQualifiedName);
+ lockMgr.acquireActiveEntityReadLock(locks, dataverseName, feedName);
+ lockMgr.acquireDatasetReadLock(locks, dataverseName, datasetName);
}
- public static void createFeedPolicyBegin(IMetadataLockManager lockMgr, LockList locks, String dataverseName,
+ public static void createFeedPolicyBegin(IMetadataLockManager lockMgr, LockList locks, DataverseName dataverseName,
String policyName) throws AlgebricksException {
lockMgr.acquireDataverseReadLock(locks, dataverseName);
- lockMgr.acquireFeedPolicyWriteLock(locks, policyName);
+ lockMgr.acquireFeedPolicyWriteLock(locks, dataverseName, policyName);
}
- public static void disconnectFeedBegin(IMetadataLockManager lockMgr, LockList locks, String dataverseName,
- String datasetFullyQualifiedName, String feedFullyQualifiedName) throws AlgebricksException {
+ public static void disconnectFeedBegin(IMetadataLockManager lockMgr, LockList locks, DataverseName dataverseName,
+ String datasetName, String feedName) throws AlgebricksException {
lockMgr.acquireDataverseReadLock(locks, dataverseName);
- lockMgr.acquireActiveEntityReadLock(locks, feedFullyQualifiedName);
- lockMgr.acquireDatasetReadLock(locks, datasetFullyQualifiedName);
+ lockMgr.acquireActiveEntityReadLock(locks, dataverseName, feedName);
+ lockMgr.acquireDatasetReadLock(locks, dataverseName, datasetName);
}
- public static void compactBegin(IMetadataLockManager lockMgr, LockList locks, String dataverseName,
- String datasetFullyQualifiedName) throws AlgebricksException {
+ public static void compactBegin(IMetadataLockManager lockMgr, LockList locks, DataverseName dataverseName,
+ String datasetName) throws AlgebricksException {
lockMgr.acquireDataverseReadLock(locks, dataverseName);
- lockMgr.acquireDatasetReadLock(locks, datasetFullyQualifiedName);
+ lockMgr.acquireDatasetReadLock(locks, dataverseName, datasetName);
}
- public static void refreshDatasetBegin(IMetadataLockManager lockMgr, LockList locks, String dataverseName,
- String datasetFullyQualifiedName) throws AlgebricksException {
+ public static void refreshDatasetBegin(IMetadataLockManager lockMgr, LockList locks, DataverseName dataverseName,
+ String datasetName) throws AlgebricksException {
lockMgr.acquireDataverseReadLock(locks, dataverseName);
- lockMgr.acquireDatasetExclusiveModificationLock(locks, datasetFullyQualifiedName);
+ lockMgr.acquireDatasetExclusiveModificationLock(locks, dataverseName, datasetName);
}
-
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/MetadataUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/MetadataUtil.java
index e5d4721..3133aba 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/MetadataUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/MetadataUtil.java
@@ -38,9 +38,4 @@
return "Unknown Pending Operation";
}
}
-
- public static String getDataverseFromFullyQualifiedName(String datasetName) {
- int idx = datasetName.indexOf('.');
- return datasetName.substring(0, idx);
- }
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryCorrelatedInvertedIndexOperationsHelper.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryCorrelatedInvertedIndexOperationsHelper.java
index 25071bc..734f913 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryCorrelatedInvertedIndexOperationsHelper.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryCorrelatedInvertedIndexOperationsHelper.java
@@ -27,7 +27,6 @@
import org.apache.asterix.metadata.entities.Index;
import org.apache.asterix.om.types.IAType;
import org.apache.asterix.om.utils.NonTaggedFormatUtil;
-import org.apache.asterix.om.utils.RecordUtil;
import org.apache.asterix.runtime.utils.RuntimeUtils;
import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -85,8 +84,7 @@
// Sanity checks.
if (numPrimaryKeys > 1) {
throw new CompilationException(ErrorCode.COMPILATION_ILLEGAL_INDEX_FOR_DATASET_WITH_COMPOSITE_PRIMARY_INDEX,
- sourceLoc, indexType,
- RecordUtil.toFullyQualifiedName(dataset.getDataverseName(), dataset.getDatasetName()));
+ sourceLoc, indexType, DatasetUtil.getFullyQualifiedDisplayName(dataset));
}
if (numSecondaryKeys > 1) {
throw new CompilationException(ErrorCode.COMPILATION_ILLEGAL_INDEX_NUM_OF_FIELD, sourceLoc,
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryInvertedIndexOperationsHelper.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryInvertedIndexOperationsHelper.java
index 30ebb3e..2ac2048 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryInvertedIndexOperationsHelper.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryInvertedIndexOperationsHelper.java
@@ -28,7 +28,6 @@
import org.apache.asterix.metadata.entities.Index;
import org.apache.asterix.om.types.IAType;
import org.apache.asterix.om.utils.NonTaggedFormatUtil;
-import org.apache.asterix.om.utils.RecordUtil;
import org.apache.asterix.runtime.utils.RuntimeUtils;
import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -88,8 +87,7 @@
// Sanity checks.
if (numPrimaryKeys > 1) {
throw new CompilationException(ErrorCode.COMPILATION_ILLEGAL_INDEX_FOR_DATASET_WITH_COMPOSITE_PRIMARY_INDEX,
- sourceLoc, indexType,
- RecordUtil.toFullyQualifiedName(dataset.getDataverseName(), dataset.getDatasetName()));
+ sourceLoc, indexType, DatasetUtil.getFullyQualifiedDisplayName(dataset));
}
if (numSecondaryKeys > 1) {
throw new CompilationException(ErrorCode.COMPILATION_ILLEGAL_INDEX_NUM_OF_FIELD, sourceLoc,
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SplitsAndConstraintsUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SplitsAndConstraintsUtil.java
index 8ac69e4..b93674c 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SplitsAndConstraintsUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SplitsAndConstraintsUtil.java
@@ -27,6 +27,7 @@
import org.apache.asterix.common.cluster.ClusterPartition;
import org.apache.asterix.common.cluster.IClusterStateManager;
import org.apache.asterix.common.exceptions.MetadataException;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.utils.StoragePathUtil;
import org.apache.asterix.metadata.MetadataManager;
import org.apache.asterix.metadata.MetadataTransactionContext;
@@ -43,13 +44,14 @@
private SplitsAndConstraintsUtil() {
}
- private static FileSplit[] getDataverseSplits(IClusterStateManager clusterStateManager, String dataverseName) {
+ private static FileSplit[] getDataverseSplits(IClusterStateManager clusterStateManager,
+ DataverseName dataverseName) {
List<FileSplit> splits = new ArrayList<>();
// get all partitions
ClusterPartition[] clusterPartition = clusterStateManager.getClusterPartitons();
for (int j = 0; j < clusterPartition.length; j++) {
File f = new File(StoragePathUtil.prepareStoragePartitionPath(clusterPartition[j].getPartitionId()),
- dataverseName);
+ dataverseName.getCanonicalForm()); //TODO(MULTI_PART_DATAVERSE_NAME):REVISIT
splits.add(StoragePathUtil.getFileSplitForClusterPartition(clusterPartition[j], f.getPath()));
}
return splits.toArray(new FileSplit[] {});
@@ -83,8 +85,8 @@
}
public static Pair<IFileSplitProvider, AlgebricksPartitionConstraint> getDataverseSplitProviderAndConstraints(
- IClusterStateManager clusterStateManager, String dataverse) {
- FileSplit[] splits = getDataverseSplits(clusterStateManager, dataverse);
+ IClusterStateManager clusterStateManager, DataverseName dataverseName) {
+ FileSplit[] splits = getDataverseSplits(clusterStateManager, dataverseName);
return StoragePathUtil.splitProviderAndPartitionConstraints(splits);
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/MetadataEntityValueExtractor.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/MetadataEntityValueExtractor.java
index 54d94a9..5346ffd 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/MetadataEntityValueExtractor.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/valueextractors/MetadataEntityValueExtractor.java
@@ -19,8 +19,6 @@
package org.apache.asterix.metadata.valueextractors;
-import java.rmi.RemoteException;
-
import org.apache.asterix.common.transactions.TxnId;
import org.apache.asterix.metadata.api.IMetadataEntityTupleTranslator;
import org.apache.asterix.metadata.api.IValueExtractor;
@@ -39,8 +37,7 @@
}
@Override
- public T getValue(TxnId txnId, ITupleReference tuple)
- throws AlgebricksException, HyracksDataException, RemoteException {
+ public T getValue(TxnId txnId, ITupleReference tuple) throws AlgebricksException, HyracksDataException {
return tupleReaderWriter.getMetadataEntityFromTuple(tuple);
}
}
diff --git a/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslatorTest.java b/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslatorTest.java
index 902ee41..58f03af 100644
--- a/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslatorTest.java
+++ b/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslatorTest.java
@@ -24,6 +24,7 @@
import java.util.Map;
import org.apache.asterix.common.config.DatasetConfig.DatasetType;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.entities.Dataset;
import org.apache.asterix.metadata.entities.InternalDatasetDetails;
import org.apache.asterix.metadata.entities.InternalDatasetDetails.FileStructure;
@@ -51,9 +52,10 @@
indicator == null ? null : Collections.singletonList(indicator),
Collections.singletonList(BuiltinType.AINT64), false, Collections.emptyList());
- Dataset dataset = new Dataset("test", "log", "foo", "LogType", "CB", "MetaType", "DEFAULT_NG_ALL_NODES",
- "prefix", compactionPolicyProperties, details, Collections.emptyMap(), DatasetType.INTERNAL, 115, 0,
- CompressionManager.NONE);
+ Dataset dataset = new Dataset(DataverseName.createSinglePartName("test"), "log",
+ DataverseName.createSinglePartName("foo"), "LogType", DataverseName.createSinglePartName("CB"),
+ "MetaType", "DEFAULT_NG_ALL_NODES", "prefix", compactionPolicyProperties, details,
+ Collections.emptyMap(), DatasetType.INTERNAL, 115, 0, CompressionManager.NONE);
DatasetTupleTranslator dtTranslator = new DatasetTupleTranslator(true);
ITupleReference tuple = dtTranslator.getTupleFromMetadataEntity(dataset);
Dataset deserializedDataset = dtTranslator.getMetadataEntityFromTuple(tuple);
diff --git a/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslatorTest.java b/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslatorTest.java
index 7080dee..cc02c49 100644
--- a/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslatorTest.java
+++ b/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslatorTest.java
@@ -30,6 +30,7 @@
import org.apache.asterix.common.config.DatasetConfig.DatasetType;
import org.apache.asterix.common.config.DatasetConfig.IndexType;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.metadata.MetadataNode;
import org.apache.asterix.metadata.entities.Dataset;
import org.apache.asterix.metadata.entities.Datatype;
@@ -62,19 +63,23 @@
indicator == null ? null : Collections.singletonList(indicator),
Collections.singletonList(BuiltinType.AINT64), false, Collections.emptyList());
- Dataset dataset = new Dataset("test", "d1", "foo", "LogType", "CB", "MetaType", "DEFAULT_NG_ALL_NODES",
+ DataverseName dvTest = DataverseName.createSinglePartName("test");
+ DataverseName dvFoo = DataverseName.createSinglePartName("foo");
+ DataverseName dvCB = DataverseName.createSinglePartName("CB");
+ Dataset dataset = new Dataset(dvTest, "d1", dvFoo, "LogType", dvCB, "MetaType", "DEFAULT_NG_ALL_NODES",
"prefix", compactionPolicyProperties, details, Collections.emptyMap(), DatasetType.INTERNAL, 115, 0,
CompressionManager.NONE);
- Index index = new Index("test", "d1", "i1", IndexType.BTREE,
+ Index index = new Index(dvTest, "d1", "i1", IndexType.BTREE,
Collections.singletonList(Collections.singletonList("row_id")),
indicator == null ? null : Collections.singletonList(indicator),
Collections.singletonList(BuiltinType.AINT64), -1, false, false, false, 0);
MetadataNode mockMetadataNode = mock(MetadataNode.class);
- when(mockMetadataNode.getDatatype(any(), anyString(), anyString())).thenReturn(new Datatype("test", "d1",
+ when(mockMetadataNode.getDatatype(any(), any(DataverseName.class), anyString())).thenReturn(new Datatype(
+ dvTest, "d1",
new ARecordType("", new String[] { "row_id" }, new IAType[] { BuiltinType.AINT64 }, true), true));
- when(mockMetadataNode.getDataset(any(), anyString(), anyString())).thenReturn(dataset);
+ when(mockMetadataNode.getDataset(any(), any(DataverseName.class), anyString())).thenReturn(dataset);
IndexTupleTranslator idxTranslator = new IndexTupleTranslator(null, mockMetadataNode, true);
ITupleReference tuple = idxTranslator.getTupleFromMetadataEntity(index);
diff --git a/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/functions/ExternalFunctionCompilerUtilTest.java b/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/functions/ExternalFunctionCompilerUtilTest.java
index 78145c1..85778f0 100644
--- a/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/functions/ExternalFunctionCompilerUtilTest.java
+++ b/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/functions/ExternalFunctionCompilerUtilTest.java
@@ -21,6 +21,7 @@
import java.util.LinkedList;
import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.transactions.TxnId;
import org.apache.asterix.metadata.MetadataTransactionContext;
import org.apache.asterix.metadata.entities.Function;
@@ -36,7 +37,7 @@
public void test() throws AlgebricksException {
// given
MetadataTransactionContext txnCtx = new MetadataTransactionContext(new TxnId(1));
- FunctionSignature signature = new FunctionSignature("test", "test", 0);
+ FunctionSignature signature = new FunctionSignature(DataverseName.createSinglePartName("test"), "test", 0);
Function function = new Function(signature, new LinkedList<>(), "{{ASTRING}}", "", "JAVA", "SCALAR", null);
// when
diff --git a/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/lock/MetadataLockManagerTest.java b/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/lock/MetadataLockManagerTest.java
index 4382860..9fbc1c2 100644
--- a/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/lock/MetadataLockManagerTest.java
+++ b/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/lock/MetadataLockManagerTest.java
@@ -22,6 +22,7 @@
import java.util.List;
import java.util.concurrent.Semaphore;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.common.metadata.LockList;
import org.apache.hyracks.api.util.SingleThreadEventProcessor;
import org.junit.Assert;
@@ -49,13 +50,15 @@
}
private final Statement statement;
- private final String dataset;
+ private final DataverseName dataverseName;
+ private final String datasetName;
private boolean done;
private int step = 0;
- public Request(Statement statement, String dataset) {
+ public Request(Statement statement, DataverseName dataverseName, String datasetName) {
this.statement = statement;
- this.dataset = dataset;
+ this.dataverseName = dataverseName;
+ this.datasetName = datasetName;
done = false;
}
@@ -63,8 +66,12 @@
return statement;
}
+ DataverseName dataverse() {
+ return dataverseName;
+ }
+
String dataset() {
- return dataset;
+ return datasetName;
}
synchronized void complete() {
@@ -115,28 +122,28 @@
step.acquire();
switch (req.statement()) {
case INDEX:
- lockManager.acquireDatasetCreateIndexLock(locks, req.dataset());
+ lockManager.acquireDatasetCreateIndexLock(locks, req.dataverse(), req.dataset());
break;
case MODIFY:
- lockManager.acquireDatasetModifyLock(locks, req.dataset());
+ lockManager.acquireDatasetModifyLock(locks, req.dataverse(), req.dataset());
break;
case EXCLUSIVE_MODIFY:
- lockManager.acquireDatasetExclusiveModificationLock(locks, req.dataset());
+ lockManager.acquireDatasetExclusiveModificationLock(locks, req.dataverse(), req.dataset());
break;
case EXCLUSIVE_MODIFY_UPGRADE:
- lockManager.acquireDatasetExclusiveModificationLock(locks, req.dataset());
+ lockManager.acquireDatasetExclusiveModificationLock(locks, req.dataverse(), req.dataset());
req.step();
step.acquire();
- lockManager.upgradeDatasetLockToWrite(locks, req.dataset());
+ lockManager.upgradeDatasetLockToWrite(locks, req.dataverse(), req.dataset());
break;
case EXCLUSIVE_MODIFY_UPGRADE_DOWNGRADE:
- lockManager.acquireDatasetExclusiveModificationLock(locks, req.dataset());
+ lockManager.acquireDatasetExclusiveModificationLock(locks, req.dataverse(), req.dataset());
req.step();
step.acquire();
- lockManager.upgradeDatasetLockToWrite(locks, req.dataset());
+ lockManager.upgradeDatasetLockToWrite(locks, req.dataverse(), req.dataset());
req.step();
step.acquire();
- lockManager.downgradeDatasetLockToExclusiveModify(locks, req.dataset());
+ lockManager.downgradeDatasetLockToExclusiveModify(locks, req.dataverse(), req.dataset());
break;
default:
break;
@@ -155,13 +162,14 @@
@Test
public void testDatasetLockMultipleIndexBuildsSingleModifier() throws Exception {
MetadataLockManager lockManager = new MetadataLockManager();
- String dataset = "Dataset";
+ DataverseName dataverseName = DataverseName.createSinglePartName("Dataverse");
+ String datasetName = "Dataset";
User till = new User("till", lockManager);
- Request tReq = new Request(Request.Statement.INDEX, dataset);
+ Request tReq = new Request(Request.Statement.INDEX, dataverseName, datasetName);
User dmitry = new User("dmitry", lockManager);
- Request dReq = new Request(Request.Statement.INDEX, dataset);
+ Request dReq = new Request(Request.Statement.INDEX, dataverseName, datasetName);
User mike = new User("mike", lockManager);
- Request mReq = new Request(Request.Statement.MODIFY, dataset);
+ Request mReq = new Request(Request.Statement.MODIFY, dataverseName, datasetName);
// Till builds an index
till.add(tReq);
// Dmitry builds an index
@@ -202,13 +210,14 @@
@Test
public void testDatasetLockMultipleModifiersSingleIndexBuilder() throws Exception {
MetadataLockManager lockManager = new MetadataLockManager();
- String dataset = "Dataset";
+ DataverseName dataverseName = DataverseName.createSinglePartName("Dataverse");
+ String datasetName = "Dataset";
User till = new User("till", lockManager);
- Request tReq = new Request(Request.Statement.MODIFY, dataset);
+ Request tReq = new Request(Request.Statement.MODIFY, dataverseName, datasetName);
User dmitry = new User("dmitry", lockManager);
- Request dReq = new Request(Request.Statement.MODIFY, dataset);
+ Request dReq = new Request(Request.Statement.MODIFY, dataverseName, datasetName);
User mike = new User("mike", lockManager);
- Request mReq = new Request(Request.Statement.INDEX, dataset);
+ Request mReq = new Request(Request.Statement.INDEX, dataverseName, datasetName);
// Till modifies
till.add(tReq);
// Dmitry modifies
@@ -249,13 +258,14 @@
@Test
public void testDatasetLockMultipleModifiersSingleExclusiveModifier() throws Exception {
MetadataLockManager lockManager = new MetadataLockManager();
- String dataset = "Dataset";
+ DataverseName dataverseName = DataverseName.createSinglePartName("Dataverse");
+ String datasetName = "Dataset";
User till = new User("till", lockManager);
- Request tReq = new Request(Request.Statement.MODIFY, dataset);
+ Request tReq = new Request(Request.Statement.MODIFY, dataverseName, datasetName);
User dmitry = new User("dmitry", lockManager);
- Request dReq = new Request(Request.Statement.MODIFY, dataset);
+ Request dReq = new Request(Request.Statement.MODIFY, dataverseName, datasetName);
User mike = new User("mike", lockManager);
- Request mReq = new Request(Request.Statement.EXCLUSIVE_MODIFY, dataset);
+ Request mReq = new Request(Request.Statement.EXCLUSIVE_MODIFY, dataverseName, datasetName);
// Till starts
till.add(tReq);
till.step();
@@ -280,7 +290,7 @@
// Ensure that Mike got the lock
mReq.await(1);
// Till submits another request
- tReq = new Request(Request.Statement.MODIFY, dataset);
+ tReq = new Request(Request.Statement.MODIFY, dataverseName, datasetName);
till.add(tReq);
till.step();
// Ensure that Till didn't get the lock
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/functions/BuiltinFunctions.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/functions/BuiltinFunctions.java
index 953abf0..0459195 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/functions/BuiltinFunctions.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/functions/BuiltinFunctions.java
@@ -508,7 +508,8 @@
public static final FunctionIdentifier STRING_SPLIT =
new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "split", 2);
- public static final FunctionIdentifier DATASET = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "dataset", 1);
+ public static final FunctionIdentifier DATASET =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "dataset", FunctionIdentifier.VARARGS); // 1 or 2
public static final FunctionIdentifier FEED_COLLECT =
new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "feed-collect", 6);
public static final FunctionIdentifier FEED_INTERCEPT =
@@ -1563,6 +1564,9 @@
public static final FunctionIdentifier META_KEY =
new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "meta-key", FunctionIdentifier.VARARGS);
+ public static final FunctionIdentifier DECODE_DATAVERSE_NAME =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "decode-dataverse-name", 1);
+
public static IFunctionInfo getAsterixFunctionInfo(FunctionIdentifier fid) {
return registeredFunctions.get(fid);
}
@@ -2314,6 +2318,8 @@
addFunction(META, OpenARecordTypeComputer.INSTANCE, true);
addPrivateFunction(META_KEY, AnyTypeComputer.INSTANCE, false);
+ addFunction(DECODE_DATAVERSE_NAME, OrderedListOfAStringTypeComputer.INSTANCE, true);
+
addPrivateFunction(COLLECTION_TO_SEQUENCE, CollectionToSequenceTypeComputer.INSTANCE, true);
// external lookup
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/functions/ExternalFunctionInfo.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/functions/ExternalFunctionInfo.java
index c1ad794..8938094 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/functions/ExternalFunctionInfo.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/functions/ExternalFunctionInfo.java
@@ -23,6 +23,7 @@
import org.apache.asterix.om.typecomputer.base.IResultTypeComputer;
import org.apache.asterix.om.types.IAType;
import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression.FunctionKind;
+import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
public class ExternalFunctionInfo extends FunctionInfo implements IExternalFunctionInfo {
@@ -37,8 +38,13 @@
public ExternalFunctionInfo(String namespace, String name, int arity, FunctionKind kind, List<IAType> argumentTypes,
IAType returnType, IResultTypeComputer rtc, String body, String language) {
+ this(new FunctionIdentifier(namespace, name, arity), kind, argumentTypes, returnType, rtc, body, language);
+ }
+
+ public ExternalFunctionInfo(FunctionIdentifier fid, FunctionKind kind, List<IAType> argumentTypes,
+ IAType returnType, IResultTypeComputer rtc, String body, String language) {
// TODO: fix CheckNonFunctionalExpressionVisitor once we have non-functional external functions
- super(namespace, name, arity, true);
+ super(fid, true);
this.rtc = rtc;
this.argumentTypes = argumentTypes;
this.body = body;
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/functions/FunctionInfoRepository.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/functions/FunctionInfoRepository.java
index 20839f9..e9634b5 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/functions/FunctionInfoRepository.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/functions/FunctionInfoRepository.java
@@ -32,17 +32,19 @@
functionMap = new ConcurrentHashMap<>();
}
- public IFunctionInfo get(String namespace, String name, int arity) {
- FunctionSignature functionSignature = new FunctionSignature(namespace, name, arity);
+ private IFunctionInfo get(FunctionSignature functionSignature) {
return functionMap.get(functionSignature);
}
+ private void put(FunctionSignature functionSignature, IFunctionInfo fInfo) {
+ functionMap.put(functionSignature, fInfo);
+ }
+
public IFunctionInfo get(FunctionIdentifier fid) {
- return get(fid.getNamespace(), fid.getName(), fid.getArity());
+ return get(new FunctionSignature(fid));
}
public void put(FunctionIdentifier fid, IFunctionInfo fInfo) {
- FunctionSignature functionSignature = new FunctionSignature(fid);
- functionMap.put(functionSignature, fInfo);
+ put(new FunctionSignature(fid), fInfo);
}
}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/types/TypeSignature.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/types/TypeSignature.java
index f4c7869..ac6af0a 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/types/TypeSignature.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/types/TypeSignature.java
@@ -18,16 +18,20 @@
*/
package org.apache.asterix.om.types;
+import java.util.Objects;
+
+import org.apache.asterix.common.metadata.DataverseName;
+
public class TypeSignature {
- private final String dataverse;
+ private final DataverseName dataverseName;
private final String name;
private final String alias;
- public TypeSignature(String namespace, String name) {
- this.dataverse = namespace;
+ public TypeSignature(DataverseName dataverseName, String name) {
+ this.dataverseName = dataverseName;
this.name = name;
- this.alias = dataverse + "@" + name;
+ this.alias = (dataverseName != null ? dataverseName.getCanonicalForm() : null) + "@" + name;
}
@Override
@@ -36,7 +40,7 @@
return false;
} else {
TypeSignature f = ((TypeSignature) o);
- return dataverse.equals(f.getNamespace()) && name.equals(f.getName());
+ return Objects.equals(dataverseName, f.getDataverseName()) && name.equals(f.getName());
}
}
@@ -50,12 +54,11 @@
return alias.hashCode();
}
- public String getNamespace() {
- return dataverse;
+ public DataverseName getDataverseName() {
+ return dataverseName;
}
public String getName() {
return name;
}
-
}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/utils/AdmNodeUtils.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/utils/AdmNodeUtils.java
index f8ce905..03ec540 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/utils/AdmNodeUtils.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/utils/AdmNodeUtils.java
@@ -31,6 +31,13 @@
import org.apache.asterix.object.base.AdmObjectNode;
import org.apache.asterix.object.base.AdmStringNode;
import org.apache.asterix.object.base.IAdmNode;
+import org.apache.asterix.om.base.ABoolean;
+import org.apache.asterix.om.base.ADouble;
+import org.apache.asterix.om.base.AInt64;
+import org.apache.asterix.om.base.AOrderedList;
+import org.apache.asterix.om.base.ARecord;
+import org.apache.asterix.om.base.AString;
+import org.apache.asterix.om.base.IAObject;
import org.apache.asterix.om.pointables.nonvisitor.AListPointable;
import org.apache.asterix.om.pointables.nonvisitor.ARecordPointable;
import org.apache.asterix.om.types.AOrderedListType;
@@ -57,6 +64,49 @@
return map;
}
+ public static Map<String, IAdmNode> getOpenFields(ARecord record, ARecordType recordType) {
+ Map<String, IAdmNode> map = Collections.emptyMap();
+ for (String fieldName : record.getType().getFieldNames()) {
+ if (recordType.isClosedField(fieldName)) {
+ continue;
+ }
+ if (map.isEmpty()) {
+ map = new HashMap<>();
+ }
+ IAObject value = record.getValueByPos(record.getType().getFieldIndex(fieldName));
+ map.put(fieldName, getAsAdmNode(value));
+ }
+ return map;
+ }
+
+ public static IAdmNode getAsAdmNode(IAObject value) {
+ ATypeTag tag = value.getType().getTypeTag();
+ switch (tag) {
+ case ARRAY:
+ AOrderedList inList = (AOrderedList) value;
+ int ln = inList.size();
+ AdmArrayNode outList = new AdmArrayNode(ln);
+ for (int i = 0; i < ln; i++) {
+ outList.add(getAsAdmNode(inList.getItem(i)));
+ }
+ return outList;
+ case BIGINT:
+ return new AdmBigIntNode(((AInt64) value).getLongValue());
+ case BOOLEAN:
+ return AdmBooleanNode.get(((ABoolean) value).getBoolean());
+ case DOUBLE:
+ return new AdmDoubleNode(((ADouble) value).getDoubleValue());
+ case NULL:
+ return AdmNullNode.INSTANCE;
+ case OBJECT:
+ return new AdmObjectNode(getOpenFields((ARecord) value, RecordUtil.FULLY_OPEN_RECORD_TYPE));
+ case STRING:
+ return new AdmStringNode(((AString) value).getStringValue());
+ default:
+ throw new UnsupportedOperationException("Unsupported item type: " + tag);
+ }
+ }
+
public static IAdmNode getAsAdmNode(IPointable pointable) throws IOException {
byte[] bytes = pointable.getByteArray();
int offset = pointable.getStartOffset();
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/DecodeDataverseNameDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/DecodeDataverseNameDescriptor.java
new file mode 100644
index 0000000..3274926
--- /dev/null
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/DecodeDataverseNameDescriptor.java
@@ -0,0 +1,129 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.runtime.evaluators.functions;
+
+import java.io.DataOutput;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.asterix.builders.OrderedListBuilder;
+import org.apache.asterix.common.annotations.MissingNullInOutFunction;
+import org.apache.asterix.common.metadata.DataverseName;
+import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
+import org.apache.asterix.om.base.AMutableString;
+import org.apache.asterix.om.exceptions.ExceptionUtil;
+import org.apache.asterix.om.functions.BuiltinFunctions;
+import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.asterix.om.types.AOrderedListType;
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.om.types.BuiltinType;
+import org.apache.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import org.apache.hyracks.algebricks.runtime.base.IEvaluatorContext;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
+import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.data.std.api.IPointable;
+import org.apache.hyracks.data.std.primitive.UTF8StringPointable;
+import org.apache.hyracks.data.std.primitive.VoidPointable;
+import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
+import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+@MissingNullInOutFunction
+public final class DecodeDataverseNameDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+ private static final long serialVersionUID = 1L;
+
+ public static final IFunctionDescriptorFactory FACTORY = DecodeDataverseNameDescriptor::new;
+
+ @Override
+ public IScalarEvaluatorFactory createEvaluatorFactory(IScalarEvaluatorFactory[] args) {
+ return new IScalarEvaluatorFactory() {
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public IScalarEvaluator createScalarEvaluator(IEvaluatorContext ctx) throws HyracksDataException {
+ return new AbstractScalarEval(sourceLoc, getIdentifier()) {
+ private final IScalarEvaluator eval0 = args[0].createScalarEvaluator(ctx);
+
+ private final VoidPointable arg0 = VoidPointable.FACTORY.createPointable();
+ private final UTF8StringPointable strPtr = new UTF8StringPointable();
+ private final List<String> dataverseNameParts = new ArrayList<>();
+
+ private final AOrderedListType listType = new AOrderedListType(BuiltinType.ASTRING, null);
+ private final OrderedListBuilder listBuilder = new OrderedListBuilder();
+ private final ArrayBackedValueStorage itemStorage = new ArrayBackedValueStorage();
+ private final DataOutput itemOut = itemStorage.getDataOutput();
+ private final ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();
+ private final DataOutput resultOut = resultStorage.getDataOutput();
+
+ @SuppressWarnings("rawtypes")
+ private final ISerializerDeserializer stringSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING);
+ private final AMutableString aString = new AMutableString("");
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
+ eval0.evaluate(tuple, arg0);
+ if (PointableHelper.checkAndSetMissingOrNull(result, arg0)) {
+ return;
+ }
+
+ byte[] bytes = arg0.getByteArray();
+ int offset = arg0.getStartOffset();
+ int len = arg0.getLength();
+
+ // Type check.
+ if (bytes[offset] != ATypeTag.SERIALIZED_STRING_TYPE_TAG) {
+ PointableHelper.setNull(result);
+ ExceptionUtil.warnTypeMismatch(ctx, sourceLoc, getIdentifier(), bytes[offset], 0,
+ ATypeTag.STRING);
+ return;
+ }
+
+ strPtr.set(bytes, offset + 1, len - 1);
+ String dataverseCanonicalName = strPtr.toString();
+
+ DataverseName dataverseName = DataverseName.createFromCanonicalForm(dataverseCanonicalName);
+
+ dataverseNameParts.clear();
+ dataverseName.getParts(dataverseNameParts);
+
+ resultStorage.reset();
+ listBuilder.reset(listType);
+ for (String part : dataverseNameParts) {
+ itemStorage.reset();
+ aString.setValue(part);
+ stringSerde.serialize(aString, itemOut);
+ listBuilder.addItem(itemStorage);
+ }
+ listBuilder.write(resultOut, true);
+ result.set(resultStorage);
+ }
+ };
+ }
+ };
+ }
+
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return BuiltinFunctions.DECODE_DATAVERSE_NAME;
+ }
+}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/functions/FunctionCollection.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/functions/FunctionCollection.java
index 13f8d7c..769f853 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/functions/FunctionCollection.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/functions/FunctionCollection.java
@@ -331,6 +331,7 @@
import org.apache.asterix.runtime.evaluators.functions.CreateQueryUIDDescriptor;
import org.apache.asterix.runtime.evaluators.functions.CreateRectangleDescriptor;
import org.apache.asterix.runtime.evaluators.functions.CreateUUIDDescriptor;
+import org.apache.asterix.runtime.evaluators.functions.DecodeDataverseNameDescriptor;
import org.apache.asterix.runtime.evaluators.functions.DeepEqualityDescriptor;
import org.apache.asterix.runtime.evaluators.functions.FullTextContainsDescriptor;
import org.apache.asterix.runtime.evaluators.functions.FullTextContainsWithoutOptionDescriptor;
@@ -1176,6 +1177,7 @@
fc.add(RecordPairsDescriptor.FACTORY);
// Other functions
+ fc.add(DecodeDataverseNameDescriptor.FACTORY);
fc.add(RandomWithSeedDescriptor.FACTORY);
ServiceLoader.load(IFunctionRegistrant.class).iterator().forEachRemaining(c -> c.register(fc));
diff --git a/asterixdb/asterix-test-framework/src/main/resources/Catalog.xsd b/asterixdb/asterix-test-framework/src/main/resources/Catalog.xsd
index 5212244..d96ed27 100644
--- a/asterixdb/asterix-test-framework/src/main/resources/Catalog.xsd
+++ b/asterixdb/asterix-test-framework/src/main/resources/Catalog.xsd
@@ -104,17 +104,7 @@
<xs:complexType>
<xs:sequence>
- <xs:element name="test-case" type="test:test-case" minOccurs="0" maxOccurs="unbounded">
- <xs:unique name="unique-expected-error">
- <xs:selector xpath=".//test:expected-error"/>
- <xs:field xpath="."/>
- </xs:unique>
- <xs:unique name="unique-expected-warn">
- <xs:selector xpath=".//test:expected-warn"/>
- <xs:field xpath="."/>
- </xs:unique>
- </xs:element>
-
+ <xs:element name="test-case" type="test:test-case" minOccurs="0" maxOccurs="unbounded" />
<xs:element ref="test:test-group" minOccurs="0" maxOccurs="unbounded"/>
</xs:sequence>
<xs:attribute name="name" type="xs:string" use="required"/>
@@ -173,6 +163,16 @@
</xs:annotation>
</xs:element>
+ <!-- Zero or more expected warnings for this test -->
+
+ <xs:element name="expected-warn" type="xs:string" minOccurs="0" maxOccurs="unbounded">
+ <xs:annotation>
+ <xs:documentation>
+ Zero or more expected warnings for this query.
+ </xs:documentation>
+ </xs:annotation>
+ </xs:element>
+
<!-- Whether the source location is expected in the error message -->
<xs:element name="source-location" type="xs:boolean" minOccurs="0">
@@ -183,16 +183,6 @@
</xs:documentation>
</xs:annotation>
</xs:element>
-
- <!-- Zero or more expected warnings for this test -->
-
- <xs:element name="expected-warn" type="xs:string" minOccurs="0" maxOccurs="unbounded">
- <xs:annotation>
- <xs:documentation>
- Zero or more expected warnings for this query.
- </xs:documentation>
- </xs:annotation>
- </xs:element>
</xs:sequence>
<!-- This name is always equal to the name of the test case -->
diff --git a/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/translator/ADGenDmlTranslator.java b/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/translator/ADGenDmlTranslator.java
index 9f886b5..1bb7728 100644
--- a/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/translator/ADGenDmlTranslator.java
+++ b/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/translator/ADGenDmlTranslator.java
@@ -23,6 +23,7 @@
import java.util.Map;
import org.apache.asterix.common.annotations.TypeDataGen;
+import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.lang.common.base.Statement;
import org.apache.asterix.lang.common.statement.DataverseDecl;
import org.apache.asterix.lang.common.statement.TypeDecl;
@@ -46,15 +47,14 @@
}
public void translate() throws AlgebricksException {
- String defaultDataverse = getDefaultDataverse();
+ DataverseName defaultDataverse = getDefaultDataverse();
types = new HashMap<>();
typeDataGenMap = new HashMap<>();
for (Statement stmt : statements) {
if (stmt.getKind() == Statement.Kind.TYPE_DECL) {
TypeDecl td = (TypeDecl) stmt;
- String typeDataverse =
- td.getDataverseName() == null ? defaultDataverse : td.getDataverseName().getValue();
+ DataverseName typeDataverse = td.getDataverseName() == null ? defaultDataverse : td.getDataverseName();
Map<TypeSignature, IAType> typeInStmt = TypeTranslator.computeTypes(mdTxnCtx, td.getTypeDef(),
td.getIdent().getValue(), typeDataverse, types);
@@ -69,10 +69,10 @@
}
}
- private String getDefaultDataverse() {
+ private DataverseName getDefaultDataverse() {
for (Statement stmt : statements) {
if (stmt.getKind() == Statement.Kind.DATAVERSE_DECL) {
- return ((DataverseDecl) stmt).getDataverseName().getValue();
+ return ((DataverseDecl) stmt).getDataverseName();
}
}
return null;
diff --git a/hyracks-fullstack/algebricks/algebricks-common/src/main/java/org/apache/hyracks/algebricks/common/utils/Triple.java b/hyracks-fullstack/algebricks/algebricks-common/src/main/java/org/apache/hyracks/algebricks/common/utils/Triple.java
index a30ee12..f6591e0 100644
--- a/hyracks-fullstack/algebricks/algebricks-common/src/main/java/org/apache/hyracks/algebricks/common/utils/Triple.java
+++ b/hyracks-fullstack/algebricks/algebricks-common/src/main/java/org/apache/hyracks/algebricks/common/utils/Triple.java
@@ -18,10 +18,13 @@
*/
package org.apache.hyracks.algebricks.common.utils;
+import java.io.Serializable;
import java.util.Objects;
//TODO: Remove and use apache commons lang3 instead
-public class Triple<T1, T2, T3> {
+public class Triple<T1, T2, T3> implements Serializable {
+
+ private static final long serialVersionUID = 1L;
public T1 first;
public T2 second;
public T3 third;
diff --git a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/api/IServletRequest.java b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/api/IServletRequest.java
index 8af9f23..ce1734a 100644
--- a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/api/IServletRequest.java
+++ b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/api/IServletRequest.java
@@ -19,6 +19,7 @@
package org.apache.hyracks.http.api;
import java.net.InetSocketAddress;
+import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -43,6 +44,14 @@
String getParameter(CharSequence name);
/**
+ * Get all values of a request parameter
+ *
+ * @param name
+ * @return the parameter values or null if not found
+ */
+ List<String> getParameterValues(CharSequence name);
+
+ /**
* Get the names of all request parameters
*
* @return the list of parameter names
diff --git a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/BaseRequest.java b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/BaseRequest.java
index 69f7c5f..bd56eb3 100644
--- a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/BaseRequest.java
+++ b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/BaseRequest.java
@@ -65,6 +65,12 @@
}
@Override
+ public List<String> getParameterValues(CharSequence name) {
+ List<String> values = parameters.get(String.valueOf(name));
+ return values != null ? Collections.unmodifiableList(values) : null;
+ }
+
+ @Override
public Set<String> getParameterNames() {
return Collections.unmodifiableSet(parameters.keySet());
}
diff --git a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/JSONUtil.java b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/JSONUtil.java
index 7decbe0..b66d31d 100644
--- a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/JSONUtil.java
+++ b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/JSONUtil.java
@@ -287,6 +287,23 @@
elements.forEach(o.putArray(name)::add);
}
+ public static void putArrayOrScalar(ObjectNode o, String name, List<String> elements) {
+ switch (elements.size()) {
+ case 0:
+ o.putNull(name);
+ break;
+ case 1:
+ o.put(name, elements.get(0));
+ break;
+ default:
+ ArrayNode arrayNode = o.putArray(name);
+ for (String item : elements) {
+ arrayNode.add(item);
+ }
+ break;
+ }
+ }
+
public static ObjectNode createObject() {
return OBJECT_MAPPER.createObjectNode();
}