[ASTERIXDB-3259][MTD] Change metadata node APIs to accept 'database'

- user model changes: no
- storage format changes: no
- interface changes: yes

Details:
Change metadata node APIs to accept 'database'.
The 'null' that is passed for 'database' from the metadata manager
will be changed in subsequent patches when 'database' is enabled.

Change-Id: Idc7f3fe1770869a71618082d76e66d889289cfb7
Reviewed-on: https://asterix-gerrit.ics.uci.edu/c/asterixdb/+/17782
Integration-Tests: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Reviewed-by: Ali Alsuliman <ali.al.solaiman@gmail.com>
Reviewed-by: Murtadha Hubail <mhubail@apache.org>
Tested-by: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataManager.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataManager.java
index 0ab5c7b..83c374f 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataManager.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataManager.java
@@ -181,7 +181,7 @@
     @Override
     public void dropDataverse(MetadataTransactionContext ctx, DataverseName dataverseName) throws AlgebricksException {
         try {
-            metadataNode.dropDataverse(ctx.getTxnId(), dataverseName);
+            metadataNode.dropDataverse(ctx.getTxnId(), null, dataverseName);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -192,7 +192,7 @@
     public boolean isDataverseNotEmpty(MetadataTransactionContext ctx, DataverseName dataverseName)
             throws AlgebricksException {
         try {
-            return metadataNode.isDataverseNotEmpty(ctx.getTxnId(), dataverseName);
+            return metadataNode.isDataverseNotEmpty(ctx.getTxnId(), null, dataverseName);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -229,7 +229,7 @@
             return dataverse;
         }
         try {
-            dataverse = metadataNode.getDataverse(ctx.getTxnId(), dataverseName);
+            dataverse = metadataNode.getDataverse(ctx.getTxnId(), null, dataverseName);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -247,7 +247,7 @@
         List<Dataset> dataverseDatasets;
         try {
             // Assuming that the transaction can read its own writes on the metadata node.
-            dataverseDatasets = metadataNode.getDataverseDatasets(ctx.getTxnId(), dataverseName);
+            dataverseDatasets = metadataNode.getDataverseDatasets(ctx.getTxnId(), null, dataverseName);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -272,7 +272,7 @@
     public void dropDataset(MetadataTransactionContext ctx, DataverseName dataverseName, String datasetName,
             boolean force) throws AlgebricksException {
         try {
-            metadataNode.dropDataset(ctx.getTxnId(), dataverseName, datasetName, force);
+            metadataNode.dropDataset(ctx.getTxnId(), null, dataverseName, datasetName, force);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -304,7 +304,7 @@
             return dataset;
         }
         try {
-            dataset = metadataNode.getDataset(ctx.getTxnId(), dataverseName, datasetName);
+            dataset = metadataNode.getDataset(ctx.getTxnId(), null, dataverseName, datasetName);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -325,7 +325,7 @@
         }
         List<Index> datasetIndexes;
         try {
-            datasetIndexes = metadataNode.getDatasetIndexes(ctx.getTxnId(), dataverseName, datasetName);
+            datasetIndexes = metadataNode.getDatasetIndexes(ctx.getTxnId(), null, dataverseName, datasetName);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -348,7 +348,7 @@
             String policyName) throws AlgebricksException {
         CompactionPolicy compactionPolicy;
         try {
-            compactionPolicy = metadataNode.getCompactionPolicy(ctx.getTxnId(), dataverse, policyName);
+            compactionPolicy = metadataNode.getCompactionPolicy(ctx.getTxnId(), null, dataverse, policyName);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -363,8 +363,8 @@
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
         try {
-            ctx.addDatatype(
-                    metadataNode.getDatatype(ctx.getTxnId(), datatype.getDataverseName(), datatype.getDatatypeName()));
+            ctx.addDatatype(metadataNode.getDatatype(ctx.getTxnId(), datatype.getDatabaseName(),
+                    datatype.getDataverseName(), datatype.getDatatypeName()));
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -374,7 +374,7 @@
     public void dropDatatype(MetadataTransactionContext ctx, DataverseName dataverseName, String datatypeName)
             throws AlgebricksException {
         try {
-            metadataNode.dropDatatype(ctx.getTxnId(), dataverseName, datatypeName);
+            metadataNode.dropDatatype(ctx.getTxnId(), null, dataverseName, datatypeName);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -404,7 +404,7 @@
             return datatype;
         }
         try {
-            datatype = metadataNode.getDatatype(ctx.getTxnId(), dataverseName, datatypeName);
+            datatype = metadataNode.getDatatype(ctx.getTxnId(), null, dataverseName, datatypeName);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -440,7 +440,7 @@
     public void dropIndex(MetadataTransactionContext ctx, DataverseName dataverseName, String datasetName,
             String indexName) throws AlgebricksException {
         try {
-            metadataNode.dropIndex(ctx.getTxnId(), dataverseName, datasetName, indexName);
+            metadataNode.dropIndex(ctx.getTxnId(), null, dataverseName, datasetName, indexName);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -472,7 +472,7 @@
             return index;
         }
         try {
-            index = metadataNode.getIndex(ctx.getTxnId(), dataverseName, datasetName, indexName);
+            index = metadataNode.getIndex(ctx.getTxnId(), null, dataverseName, datasetName, indexName);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -624,7 +624,7 @@
     public List<Function> getDataverseFunctions(MetadataTransactionContext ctx, DataverseName dataverseName)
             throws AlgebricksException {
         try {
-            return metadataNode.getDataverseFunctions(ctx.getTxnId(), dataverseName);
+            return metadataNode.getDataverseFunctions(ctx.getTxnId(), null, dataverseName);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -645,7 +645,7 @@
     public void dropFullTextFilter(MetadataTransactionContext mdTxnCtx, DataverseName dataverseName, String filterName)
             throws AlgebricksException {
         try {
-            metadataNode.dropFullTextFilter(mdTxnCtx.getTxnId(), dataverseName, filterName);
+            metadataNode.dropFullTextFilter(mdTxnCtx.getTxnId(), null, dataverseName, filterName);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -683,7 +683,7 @@
         }
 
         try {
-            filter = metadataNode.getFullTextFilter(ctx.getTxnId(), dataverseName, filterName);
+            filter = metadataNode.getFullTextFilter(ctx.getTxnId(), null, dataverseName, filterName);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -741,7 +741,7 @@
         }
 
         try {
-            configMetadataEntity = metadataNode.getFullTextConfig(ctx.getTxnId(), dataverseName, configName);
+            configMetadataEntity = metadataNode.getFullTextConfig(ctx.getTxnId(), null, dataverseName, configName);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -758,7 +758,7 @@
     public void dropFullTextConfig(MetadataTransactionContext mdTxnCtx, DataverseName dataverseName, String configName)
             throws AlgebricksException {
         try {
-            metadataNode.dropFullTextConfig(mdTxnCtx.getTxnId(), dataverseName, configName);
+            metadataNode.dropFullTextConfig(mdTxnCtx.getTxnId(), null, dataverseName, configName);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -798,7 +798,7 @@
     public void dropAdapter(MetadataTransactionContext ctx, DataverseName dataverseName, String name)
             throws AlgebricksException {
         try {
-            metadataNode.dropAdapter(ctx.getTxnId(), dataverseName, name);
+            metadataNode.dropAdapter(ctx.getTxnId(), null, dataverseName, name);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -810,7 +810,7 @@
             throws AlgebricksException {
         DatasourceAdapter adapter;
         try {
-            adapter = metadataNode.getAdapter(ctx.getTxnId(), dataverseName, name);
+            adapter = metadataNode.getAdapter(ctx.getTxnId(), null, dataverseName, name);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -821,7 +821,7 @@
     public void dropLibrary(MetadataTransactionContext ctx, DataverseName dataverseName, String libraryName)
             throws AlgebricksException {
         try {
-            metadataNode.dropLibrary(ctx.getTxnId(), dataverseName, libraryName);
+            metadataNode.dropLibrary(ctx.getTxnId(), null, dataverseName, libraryName);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -835,7 +835,7 @@
         try {
             // Assuming that the transaction can read its own writes on the
             // metadata node.
-            dataverseLibaries = metadataNode.getDataverseLibraries(ctx.getTxnId(), dataverseName);
+            dataverseLibaries = metadataNode.getDataverseLibraries(ctx.getTxnId(), null, dataverseName);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -859,7 +859,7 @@
             throws AlgebricksException {
         Library library;
         try {
-            library = metadataNode.getLibrary(ctx.getTxnId(), dataverseName, libraryName);
+            library = metadataNode.getLibrary(ctx.getTxnId(), null, dataverseName, libraryName);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -871,7 +871,7 @@
             String policyName) throws AlgebricksException {
         FeedPolicyEntity feedPolicy;
         try {
-            feedPolicy = metadataNode.getFeedPolicy(ctx.getTxnId(), dataverseName, policyName);
+            feedPolicy = metadataNode.getFeedPolicy(ctx.getTxnId(), null, dataverseName, policyName);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -883,7 +883,7 @@
             throws AlgebricksException {
         Feed feed;
         try {
-            feed = metadataNode.getFeed(ctx.getTxnId(), dataverseName, feedName);
+            feed = metadataNode.getFeed(ctx.getTxnId(), null, dataverseName, feedName);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -894,7 +894,7 @@
     public List<Feed> getFeeds(MetadataTransactionContext ctx, DataverseName dataverseName) throws AlgebricksException {
         List<Feed> feeds;
         try {
-            feeds = metadataNode.getFeeds(ctx.getTxnId(), dataverseName);
+            feeds = metadataNode.getFeeds(ctx.getTxnId(), null, dataverseName);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -907,11 +907,11 @@
         Feed feed;
         List<FeedConnection> feedConnections;
         try {
-            feed = metadataNode.getFeed(ctx.getTxnId(), dataverseName, feedName);
-            feedConnections = metadataNode.getFeedConnections(ctx.getTxnId(), dataverseName, feedName);
-            metadataNode.dropFeed(ctx.getTxnId(), dataverseName, feedName);
+            feed = metadataNode.getFeed(ctx.getTxnId(), null, dataverseName, feedName);
+            feedConnections = metadataNode.getFeedConnections(ctx.getTxnId(), null, dataverseName, feedName);
+            metadataNode.dropFeed(ctx.getTxnId(), null, dataverseName, feedName);
             for (FeedConnection feedConnection : feedConnections) {
-                metadataNode.dropFeedConnection(ctx.getTxnId(), dataverseName, feedName,
+                metadataNode.dropFeedConnection(ctx.getTxnId(), null, dataverseName, feedName,
                         feedConnection.getDatasetName());
                 ctx.dropFeedConnection(dataverseName, feedName, feedConnection.getDatasetName());
             }
@@ -946,7 +946,7 @@
     public void dropFeedConnection(MetadataTransactionContext ctx, DataverseName dataverseName, String feedName,
             String datasetName) throws AlgebricksException {
         try {
-            metadataNode.dropFeedConnection(ctx.getTxnId(), dataverseName, feedName, datasetName);
+            metadataNode.dropFeedConnection(ctx.getTxnId(), null, dataverseName, feedName, datasetName);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -957,7 +957,7 @@
     public FeedConnection getFeedConnection(MetadataTransactionContext ctx, DataverseName dataverseName,
             String feedName, String datasetName) throws AlgebricksException {
         try {
-            return metadataNode.getFeedConnection(ctx.getTxnId(), dataverseName, feedName, datasetName);
+            return metadataNode.getFeedConnection(ctx.getTxnId(), null, dataverseName, feedName, datasetName);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -967,7 +967,7 @@
     public List<FeedConnection> getFeedConections(MetadataTransactionContext ctx, DataverseName dataverseName,
             String feedName) throws AlgebricksException {
         try {
-            return metadataNode.getFeedConnections(ctx.getTxnId(), dataverseName, feedName);
+            return metadataNode.getFeedConnections(ctx.getTxnId(), null, dataverseName, feedName);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -978,7 +978,7 @@
             DataverseName dataverseName) throws AlgebricksException {
         List<DatasourceAdapter> dataverseAdapters;
         try {
-            dataverseAdapters = metadataNode.getDataverseAdapters(mdTxnCtx.getTxnId(), dataverseName);
+            dataverseAdapters = metadataNode.getDataverseAdapters(mdTxnCtx.getTxnId(), null, dataverseName);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -990,8 +990,8 @@
             throws AlgebricksException {
         FeedPolicyEntity feedPolicy;
         try {
-            feedPolicy = metadataNode.getFeedPolicy(mdTxnCtx.getTxnId(), dataverseName, policyName);
-            metadataNode.dropFeedPolicy(mdTxnCtx.getTxnId(), dataverseName, policyName);
+            feedPolicy = metadataNode.getFeedPolicy(mdTxnCtx.getTxnId(), null, dataverseName, policyName);
+            metadataNode.dropFeedPolicy(mdTxnCtx.getTxnId(), null, dataverseName, policyName);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -1003,7 +1003,7 @@
             DataverseName dataverseName) throws AlgebricksException {
         List<FeedPolicyEntity> dataverseFeedPolicies;
         try {
-            dataverseFeedPolicies = metadataNode.getDataverseFeedPolicies(mdTxnCtx.getTxnId(), dataverseName);
+            dataverseFeedPolicies = metadataNode.getDataverseFeedPolicies(mdTxnCtx.getTxnId(), null, dataverseName);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -1034,7 +1034,7 @@
     @Override
     public void dropExternalFile(MetadataTransactionContext ctx, ExternalFile externalFile) throws AlgebricksException {
         try {
-            metadataNode.dropExternalFile(ctx.getTxnId(), externalFile.getDataverseName(),
+            metadataNode.dropExternalFile(ctx.getTxnId(), null, externalFile.getDataverseName(),
                     externalFile.getDatasetName(), externalFile.getFileNumber());
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
@@ -1046,7 +1046,7 @@
             Integer fileNumber) throws AlgebricksException {
         ExternalFile file;
         try {
-            file = metadataNode.getExternalFile(ctx.getTxnId(), dataverseName, datasetName, fileNumber);
+            file = metadataNode.getExternalFile(ctx.getTxnId(), null, dataverseName, datasetName, fileNumber);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -1066,7 +1066,7 @@
     public void dropSynonym(MetadataTransactionContext ctx, DataverseName dataverseName, String synonymName)
             throws AlgebricksException {
         try {
-            metadataNode.dropSynonym(ctx.getTxnId(), dataverseName, synonymName);
+            metadataNode.dropSynonym(ctx.getTxnId(), null, dataverseName, synonymName);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -1076,7 +1076,7 @@
     public Synonym getSynonym(MetadataTransactionContext ctx, DataverseName dataverseName, String synonymName)
             throws AlgebricksException {
         try {
-            return metadataNode.getSynonym(ctx.getTxnId(), dataverseName, synonymName);
+            return metadataNode.getSynonym(ctx.getTxnId(), null, dataverseName, synonymName);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
@@ -1086,7 +1086,7 @@
     public List<Synonym> getDataverseSynonyms(MetadataTransactionContext ctx, DataverseName dataverseName)
             throws AlgebricksException {
         try {
-            return metadataNode.getDataverseSynonyms(ctx.getTxnId(), dataverseName);
+            return metadataNode.getDataverseSynonyms(ctx.getTxnId(), null, dataverseName);
         } catch (RemoteException e) {
             throw new MetadataException(ErrorCode.REMOTE_EXCEPTION_WHEN_CALLING_METADATA_NODE, e);
         }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
index c0075ec..519a29d 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
@@ -477,16 +477,15 @@
     public void addFullTextFilter(TxnId txnId, FullTextFilterMetadataEntity filterMetadataEntity)
             throws RemoteException, AlgebricksException {
         insertFullTextFilterMetadataEntityToCatalog(txnId, filterMetadataEntity);
-        return;
     }
 
     @Override
-    public FullTextFilterMetadataEntity getFullTextFilter(TxnId txnId, DataverseName dataverseName, String filterName)
-            throws AlgebricksException {
+    public FullTextFilterMetadataEntity getFullTextFilter(TxnId txnId, String database, DataverseName dataverseName,
+            String filterName) throws AlgebricksException {
         try {
             FullTextFilterMetadataEntityTupleTranslator translator =
                     tupleTranslatorProvider.getFullTextFilterTupleTranslator(true);
-            ITupleReference searchKey = createTuple(dataverseName.getCanonicalForm(), filterName);
+            ITupleReference searchKey = createTuple(database, dataverseName, filterName);
             IValueExtractor<FullTextFilterMetadataEntity> valueExtractor =
                     new MetadataEntityValueExtractor<>(translator);
             List<FullTextFilterMetadataEntity> results = new ArrayList<>();
@@ -502,18 +501,18 @@
     }
 
     @Override
-    public void dropFullTextFilter(TxnId txnId, DataverseName dataverseName, String filterName)
+    public void dropFullTextFilter(TxnId txnId, String database, DataverseName dataverseName, String filterName)
             throws AlgebricksException {
-        dropFullTextFilterDescriptor(txnId, dataverseName, filterName, false);
+        dropFullTextFilterDescriptor(txnId, database, dataverseName, filterName, false);
     }
 
-    private void dropFullTextFilterDescriptor(TxnId txnId, DataverseName dataverseName, String filterName,
-            boolean force) throws AlgebricksException {
+    private void dropFullTextFilterDescriptor(TxnId txnId, String database, DataverseName dataverseName,
+            String filterName, boolean force) throws AlgebricksException {
         if (!force) {
-            confirmFullTextFilterCanBeDeleted(txnId, dataverseName, filterName);
+            confirmFullTextFilterCanBeDeleted(txnId, database, dataverseName, filterName);
         }
         try {
-            ITupleReference key = createTuple(dataverseName.getCanonicalForm(), filterName);
+            ITupleReference key = createTuple(database, dataverseName, filterName);
             deleteTupleFromIndex(txnId, mdIndexesProvider.getFullTextFilterEntity().getIndex(), key);
         } catch (HyracksDataException e) {
             throw new AlgebricksException(e);
@@ -555,15 +554,15 @@
     }
 
     @Override
-    public FullTextConfigMetadataEntity getFullTextConfig(TxnId txnId, DataverseName dataverseName, String configName)
-            throws AlgebricksException {
+    public FullTextConfigMetadataEntity getFullTextConfig(TxnId txnId, String database, DataverseName dataverseName,
+            String configName) throws AlgebricksException {
         FullTextConfigMetadataEntityTupleTranslator translator =
                 tupleTranslatorProvider.getFullTextConfigTupleTranslator(true);
 
         ITupleReference searchKey;
         List<FullTextConfigMetadataEntity> results = new ArrayList<>();
         try {
-            searchKey = createTuple(dataverseName.getCanonicalForm(), configName);
+            searchKey = createTuple(database, dataverseName, configName);
             IValueExtractor<FullTextConfigMetadataEntity> valueExtractor =
                     new MetadataEntityValueExtractor<>(translator);
             searchIndex(txnId, mdIndexesProvider.getFullTextConfigEntity().getIndex(), searchKey, valueExtractor,
@@ -576,24 +575,23 @@
             return null;
         }
 
-        FullTextConfigMetadataEntity result = results.get(0);
-        return result;
+        return results.get(0);
     }
 
     @Override
-    public void dropFullTextConfig(TxnId txnId, DataverseName dataverseName, String configName)
+    public void dropFullTextConfig(TxnId txnId, String database, DataverseName dataverseName, String configName)
             throws AlgebricksException {
-        dropFullTextConfigDescriptor(txnId, dataverseName, configName, false);
+        dropFullTextConfigDescriptor(txnId, database, dataverseName, configName, false);
     }
 
-    private void dropFullTextConfigDescriptor(TxnId txnId, DataverseName dataverseName, String configName,
-            boolean force) throws AlgebricksException {
+    private void dropFullTextConfigDescriptor(TxnId txnId, String database, DataverseName dataverseName,
+            String configName, boolean force) throws AlgebricksException {
         if (!force) {
-            confirmFullTextConfigCanBeDeleted(txnId, dataverseName, configName);
+            confirmFullTextConfigCanBeDeleted(txnId, database, dataverseName, configName);
         }
 
         try {
-            ITupleReference key = createTuple(dataverseName.getCanonicalForm(), configName);
+            ITupleReference key = createTuple(database, dataverseName, configName);
             deleteTupleFromIndex(txnId, mdIndexesProvider.getFullTextConfigEntity().getIndex(), key);
         } catch (HyracksDataException e) {
             throw new AlgebricksException(e);
@@ -679,31 +677,33 @@
     }
 
     @Override
-    public void dropDataverse(TxnId txnId, DataverseName dataverseName) throws AlgebricksException {
+    public void dropDataverse(TxnId txnId, String database, DataverseName dataverseName) throws AlgebricksException {
         try {
-            confirmDataverseCanBeDeleted(txnId, dataverseName);
+            confirmDataverseCanBeDeleted(txnId, database, dataverseName);
 
             // Drop all feeds and connections in this dataverse.
             // Feeds may depend on datatypes and adapters
-            List<Feed> dataverseFeeds = getDataverseFeeds(txnId, dataverseName);
+            List<Feed> dataverseFeeds = getDataverseFeeds(txnId, database, dataverseName);
             for (Feed feed : dataverseFeeds) {
-                List<FeedConnection> feedConnections = getFeedConnections(txnId, dataverseName, feed.getFeedName());
+                List<FeedConnection> feedConnections =
+                        getFeedConnections(txnId, database, dataverseName, feed.getFeedName());
                 for (FeedConnection feedConnection : feedConnections) {
-                    dropFeedConnection(txnId, dataverseName, feed.getFeedName(), feedConnection.getDatasetName());
+                    dropFeedConnection(txnId, database, dataverseName, feed.getFeedName(),
+                            feedConnection.getDatasetName());
                 }
-                dropFeed(txnId, dataverseName, feed.getFeedName());
+                dropFeed(txnId, database, dataverseName, feed.getFeedName());
             }
 
             // Drop all feed ingestion policies in this dataverse.
-            List<FeedPolicyEntity> feedPolicies = getDataverseFeedPolicies(txnId, dataverseName);
+            List<FeedPolicyEntity> feedPolicies = getDataverseFeedPolicies(txnId, database, dataverseName);
             for (FeedPolicyEntity feedPolicy : feedPolicies) {
-                dropFeedPolicy(txnId, dataverseName, feedPolicy.getPolicyName());
+                dropFeedPolicy(txnId, database, dataverseName, feedPolicy.getPolicyName());
             }
 
             // Drop all functions in this dataverse.
             // Functions may depend on libraries, datasets, functions, datatypes, synonyms
             // As a side effect, acquires an S lock on the 'Function' dataset on behalf of txnId.
-            List<Function> dataverseFunctions = getDataverseFunctions(txnId, dataverseName);
+            List<Function> dataverseFunctions = getDataverseFunctions(txnId, database, dataverseName);
             for (Function function : dataverseFunctions) {
                 dropFunction(txnId, function.getSignature(), true);
             }
@@ -711,59 +711,60 @@
             // Drop all adapters in this dataverse.
             // Adapters depend on libraries.
             // As a side effect, acquires an S lock on the 'Adapter' dataset on behalf of txnId.
-            List<DatasourceAdapter> dataverseAdapters = getDataverseAdapters(txnId, dataverseName);
+            List<DatasourceAdapter> dataverseAdapters = getDataverseAdapters(txnId, database, dataverseName);
             for (DatasourceAdapter adapter : dataverseAdapters) {
-                dropAdapter(txnId, dataverseName, adapter.getAdapterIdentifier().getName());
+                dropAdapter(txnId, database, dataverseName, adapter.getAdapterIdentifier().getName());
             }
 
             // Drop all libraries in this dataverse.
-            List<Library> dataverseLibraries = getDataverseLibraries(txnId, dataverseName);
+            List<Library> dataverseLibraries = getDataverseLibraries(txnId, database, dataverseName);
             for (Library lib : dataverseLibraries) {
-                dropLibrary(txnId, lib.getDataverseName(), lib.getName());
+                dropLibrary(txnId, lib.getDatabaseName(), lib.getDataverseName(), lib.getName());
             }
 
             // Drop all synonyms in this dataverse.
-            List<Synonym> dataverseSynonyms = getDataverseSynonyms(txnId, dataverseName);
+            List<Synonym> dataverseSynonyms = getDataverseSynonyms(txnId, database, dataverseName);
             for (Synonym synonym : dataverseSynonyms) {
-                dropSynonym(txnId, dataverseName, synonym.getSynonymName(), true);
+                dropSynonym(txnId, database, dataverseName, synonym.getSynonymName(), true);
             }
 
             // Drop all datasets and indexes in this dataverse.
             // Datasets depend on datatypes
-            List<Dataset> dataverseDatasets = getDataverseDatasets(txnId, dataverseName);
+            List<Dataset> dataverseDatasets = getDataverseDatasets(txnId, database, dataverseName);
             for (Dataset ds : dataverseDatasets) {
-                dropDataset(txnId, dataverseName, ds.getDatasetName(), true);
+                dropDataset(txnId, database, dataverseName, ds.getDatasetName(), true);
             }
 
             // Drop full-text configs in this dataverse.
             // Note that full-text configs are utilized by the index, and we need to always drop index first
             // and then full-text config
             List<FullTextConfigMetadataEntity> configMetadataEntities =
-                    getDataverseFullTextConfigs(txnId, dataverseName);
+                    getDataverseFullTextConfigs(txnId, database, dataverseName);
             for (FullTextConfigMetadataEntity configMetadataEntity : configMetadataEntities) {
-                dropFullTextConfigDescriptor(txnId, dataverseName, configMetadataEntity.getFullTextConfig().getName(),
-                        true);
+                dropFullTextConfigDescriptor(txnId, database, dataverseName,
+                        configMetadataEntity.getFullTextConfig().getName(), true);
             }
 
             // Drop full-text filters in this dataverse.
             // Note that full-text filters are utilized by the full-text configs,
             // and we need to always drop full-text configs first
             // and then full-text filter
-            List<FullTextFilterMetadataEntity> filters = getDataverseFullTextFilters(txnId, dataverseName);
+            List<FullTextFilterMetadataEntity> filters = getDataverseFullTextFilters(txnId, database, dataverseName);
             for (FullTextFilterMetadataEntity filter : filters) {
-                dropFullTextFilterDescriptor(txnId, dataverseName, filter.getFullTextFilter().getName(), true);
+                dropFullTextFilterDescriptor(txnId, database, dataverseName, filter.getFullTextFilter().getName(),
+                        true);
             }
 
             // Drop all types in this dataverse.
             // As a side effect, acquires an S lock on the 'datatype' dataset on behalf of txnId.
-            List<Datatype> dataverseDatatypes = getDataverseDatatypes(txnId, dataverseName);
+            List<Datatype> dataverseDatatypes = getDataverseDatatypes(txnId, database, dataverseName);
             for (Datatype dataverseDatatype : dataverseDatatypes) {
-                forceDropDatatype(txnId, dataverseName, dataverseDatatype.getDatatypeName());
+                forceDropDatatype(txnId, database, dataverseName, dataverseDatatype.getDatatypeName());
             }
 
             // Delete the dataverse entry from the 'dataverse' dataset.
             // As a side effect, acquires an S lock on the 'dataverse' dataset on behalf of txnId.
-            ITupleReference searchKey = createTuple(dataverseName);
+            ITupleReference searchKey = createTuple(database, dataverseName);
             ITupleReference tuple =
                     getTupleToBeDeleted(txnId, mdIndexesProvider.getDataverseEntity().getIndex(), searchKey);
             deleteTupleFromIndex(txnId, mdIndexesProvider.getDataverseEntity().getIndex(), tuple);
@@ -778,35 +779,36 @@
     }
 
     @Override
-    public boolean isDataverseNotEmpty(TxnId txnId, DataverseName dataverseName) throws AlgebricksException {
-        return !getDataverseDatatypes(txnId, dataverseName).isEmpty()
-                || !getDataverseDatasets(txnId, dataverseName).isEmpty()
-                || !getDataverseLibraries(txnId, dataverseName).isEmpty()
-                || !getDataverseAdapters(txnId, dataverseName).isEmpty()
-                || !getDataverseFunctions(txnId, dataverseName).isEmpty()
-                || !getDataverseFeedPolicies(txnId, dataverseName).isEmpty()
-                || !getDataverseFeeds(txnId, dataverseName).isEmpty()
-                || !getDataverseSynonyms(txnId, dataverseName).isEmpty()
-                || !getDataverseFullTextConfigs(txnId, dataverseName).isEmpty()
-                || !getDataverseFullTextFilters(txnId, dataverseName).isEmpty();
+    public boolean isDataverseNotEmpty(TxnId txnId, String database, DataverseName dataverseName)
+            throws AlgebricksException {
+        return !getDataverseDatatypes(txnId, database, dataverseName).isEmpty()
+                || !getDataverseDatasets(txnId, database, dataverseName).isEmpty()
+                || !getDataverseLibraries(txnId, database, dataverseName).isEmpty()
+                || !getDataverseAdapters(txnId, database, dataverseName).isEmpty()
+                || !getDataverseFunctions(txnId, database, dataverseName).isEmpty()
+                || !getDataverseFeedPolicies(txnId, database, dataverseName).isEmpty()
+                || !getDataverseFeeds(txnId, database, dataverseName).isEmpty()
+                || !getDataverseSynonyms(txnId, database, dataverseName).isEmpty()
+                || !getDataverseFullTextConfigs(txnId, database, dataverseName).isEmpty()
+                || !getDataverseFullTextFilters(txnId, database, dataverseName).isEmpty();
     }
 
     @Override
-    public void dropDataset(TxnId txnId, DataverseName dataverseName, String datasetName, boolean force)
-            throws AlgebricksException {
-        Dataset dataset = getDataset(txnId, dataverseName, datasetName);
+    public void dropDataset(TxnId txnId, String database, DataverseName dataverseName, String datasetName,
+            boolean force) throws AlgebricksException {
+        Dataset dataset = getDataset(txnId, database, dataverseName, datasetName);
         if (dataset == null) {
             throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE,
                     datasetName, dataverseName);
         }
         if (!force) {
             String datasetTypeDisplayName = DatasetUtil.getDatasetTypeDisplayName(dataset.getDatasetType());
-            confirmDatasetCanBeDeleted(txnId, datasetTypeDisplayName, dataverseName, datasetName);
+            confirmDatasetCanBeDeleted(txnId, datasetTypeDisplayName, database, dataverseName, datasetName);
         }
 
         try {
             // Delete entry from the 'datasets' dataset.
-            ITupleReference searchKey = createTuple(dataverseName, datasetName);
+            ITupleReference searchKey = createTuple(database, dataverseName, datasetName);
             // Searches the index for the tuple to be deleted. Acquires an S
             // lock on the 'dataset' dataset.
             ITupleReference datasetTuple = null;
@@ -816,19 +818,19 @@
                 switch (dataset.getDatasetType()) {
                     case INTERNAL:
                         // Delete entry(s) from the 'indexes' dataset.
-                        List<Index> datasetIndexes = getDatasetIndexes(txnId, dataverseName, datasetName);
+                        List<Index> datasetIndexes = getDatasetIndexes(txnId, database, dataverseName, datasetName);
                         if (datasetIndexes != null) {
                             for (Index index : datasetIndexes) {
-                                dropIndex(txnId, dataverseName, datasetName, index.getIndexName());
+                                dropIndex(txnId, database, dataverseName, datasetName, index.getIndexName());
                             }
                         }
                         break;
                     case EXTERNAL:
                         // Delete entry(s) from the 'indexes' dataset.
-                        datasetIndexes = getDatasetIndexes(txnId, dataverseName, datasetName);
+                        datasetIndexes = getDatasetIndexes(txnId, database, dataverseName, datasetName);
                         if (datasetIndexes != null) {
                             for (Index index : datasetIndexes) {
-                                dropIndex(txnId, dataverseName, datasetName, index.getIndexName());
+                                dropIndex(txnId, database, dataverseName, datasetName, index.getIndexName());
                             }
                         }
                         // Delete External Files
@@ -838,7 +840,8 @@
                         if (datasetFiles != null && !datasetFiles.isEmpty()) {
                             // Drop all external files in this dataset.
                             for (ExternalFile file : datasetFiles) {
-                                dropExternalFile(txnId, dataverseName, file.getDatasetName(), file.getFileNumber());
+                                dropExternalFile(txnId, database, dataverseName, file.getDatasetName(),
+                                        file.getFileNumber());
                             }
                         }
                         break;
@@ -860,10 +863,10 @@
     }
 
     @Override
-    public void dropIndex(TxnId txnId, DataverseName dataverseName, String datasetName, String indexName)
-            throws AlgebricksException {
+    public void dropIndex(TxnId txnId, String database, DataverseName dataverseName, String datasetName,
+            String indexName) throws AlgebricksException {
         try {
-            ITupleReference searchKey = createTuple(dataverseName, datasetName, indexName);
+            ITupleReference searchKey = createTuple(database, dataverseName, datasetName, indexName);
             // Searches the index for the tuple to be deleted. Acquires an S
             // lock on the 'index' dataset.
             ITupleReference tuple =
@@ -909,29 +912,31 @@
     }
 
     @Override
-    public void dropDatatype(TxnId txnId, DataverseName dataverseName, String datatypeName) throws AlgebricksException {
-        dropDatatype(txnId, dataverseName, datatypeName, false);
+    public void dropDatatype(TxnId txnId, String database, DataverseName dataverseName, String datatypeName)
+            throws AlgebricksException {
+        dropDatatype(txnId, database, dataverseName, datatypeName, false);
     }
 
-    private void dropDatatype(TxnId txnId, DataverseName dataverseName, String datatypeName, boolean force)
-            throws AlgebricksException {
+    private void dropDatatype(TxnId txnId, String database, DataverseName dataverseName, String datatypeName,
+            boolean force) throws AlgebricksException {
         if (!force) {
-            confirmDatatypeIsUnused(txnId, dataverseName, datatypeName);
+            confirmDatatypeIsUnused(txnId, database, dataverseName, datatypeName);
         }
         // Delete the datatype entry, including all it's nested anonymous types.
         try {
-            ITupleReference searchKey = createTuple(dataverseName, datatypeName);
+            ITupleReference searchKey = createTuple(database, dataverseName, datatypeName);
             // Searches the index for the tuple to be deleted. Acquires an S
             // lock on the 'datatype' dataset.
             ITupleReference tuple =
                     getTupleToBeDeleted(txnId, mdIndexesProvider.getDatatypeEntity().getIndex(), searchKey);
             // Get nested types
-            List<String> nestedTypes = getNestedComplexDatatypeNamesForThisDatatype(txnId, dataverseName, datatypeName);
+            List<String> nestedTypes =
+                    getNestedComplexDatatypeNamesForThisDatatype(txnId, database, dataverseName, datatypeName);
             deleteTupleFromIndex(txnId, mdIndexesProvider.getDatatypeEntity().getIndex(), tuple);
             for (String nestedType : nestedTypes) {
-                Datatype dt = getDatatype(txnId, dataverseName, nestedType);
+                Datatype dt = getDatatype(txnId, database, dataverseName, nestedType);
                 if (dt != null && dt.getIsAnonymous()) {
-                    dropDatatype(txnId, dataverseName, dt.getDatatypeName());
+                    dropDatatype(txnId, database, dataverseName, dt.getDatatypeName());
                 }
             }
         } catch (HyracksDataException e) {
@@ -944,10 +949,10 @@
         }
     }
 
-    private void forceDropDatatype(TxnId txnId, DataverseName dataverseName, String datatypeName)
+    private void forceDropDatatype(TxnId txnId, String database, DataverseName dataverseName, String datatypeName)
             throws AlgebricksException {
         try {
-            ITupleReference searchKey = createTuple(dataverseName, datatypeName);
+            ITupleReference searchKey = createTuple(database, dataverseName, datatypeName);
             // Searches the index for the tuple to be deleted. Acquires an S
             // lock on the 'datatype' dataset.
             ITupleReference tuple =
@@ -982,9 +987,10 @@
     }
 
     @Override
-    public Dataverse getDataverse(TxnId txnId, DataverseName dataverseName) throws AlgebricksException {
+    public Dataverse getDataverse(TxnId txnId, String database, DataverseName dataverseName)
+            throws AlgebricksException {
         try {
-            ITupleReference searchKey = createTuple(dataverseName);
+            ITupleReference searchKey = createTuple(database, dataverseName);
             DataverseTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDataverseTupleTranslator(false);
             IValueExtractor<Dataverse> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
             List<Dataverse> results = new ArrayList<>();
@@ -999,9 +1005,10 @@
     }
 
     @Override
-    public List<Dataset> getDataverseDatasets(TxnId txnId, DataverseName dataverseName) throws AlgebricksException {
+    public List<Dataset> getDataverseDatasets(TxnId txnId, String database, DataverseName dataverseName)
+            throws AlgebricksException {
         try {
-            ITupleReference searchKey = createTuple(dataverseName);
+            ITupleReference searchKey = createTuple(database, dataverseName);
             DatasetTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDatasetTupleTranslator(false);
             IValueExtractor<Dataset> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
             List<Dataset> results = new ArrayList<>();
@@ -1013,9 +1020,10 @@
     }
 
     @Override
-    public List<Feed> getDataverseFeeds(TxnId txnId, DataverseName dataverseName) throws AlgebricksException {
+    public List<Feed> getDataverseFeeds(TxnId txnId, String database, DataverseName dataverseName)
+            throws AlgebricksException {
         try {
-            ITupleReference searchKey = createTuple(dataverseName);
+            ITupleReference searchKey = createTuple(database, dataverseName);
             FeedTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getFeedTupleTranslator(false);
             IValueExtractor<Feed> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
             List<Feed> results = new ArrayList<>();
@@ -1027,9 +1035,10 @@
     }
 
     @Override
-    public List<Library> getDataverseLibraries(TxnId txnId, DataverseName dataverseName) throws AlgebricksException {
+    public List<Library> getDataverseLibraries(TxnId txnId, String database, DataverseName dataverseName)
+            throws AlgebricksException {
         try {
-            ITupleReference searchKey = createTuple(dataverseName);
+            ITupleReference searchKey = createTuple(database, dataverseName);
             LibraryTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getLibraryTupleTranslator(false);
             IValueExtractor<Library> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
             List<Library> results = new ArrayList<>();
@@ -1040,9 +1049,10 @@
         }
     }
 
-    private List<Datatype> getDataverseDatatypes(TxnId txnId, DataverseName dataverseName) throws AlgebricksException {
+    private List<Datatype> getDataverseDatatypes(TxnId txnId, String database, DataverseName dataverseName)
+            throws AlgebricksException {
         try {
-            ITupleReference searchKey = createTuple(dataverseName);
+            ITupleReference searchKey = createTuple(database, dataverseName);
             DatatypeTupleTranslator tupleReaderWriter =
                     tupleTranslatorProvider.getDataTypeTupleTranslator(txnId, this, false);
             IValueExtractor<Datatype> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
@@ -1054,9 +1064,9 @@
         }
     }
 
-    private List<FullTextConfigMetadataEntity> getDataverseFullTextConfigs(TxnId txnId, DataverseName dataverseName)
-            throws AlgebricksException {
-        ITupleReference searchKey = createTuple(dataverseName);
+    private List<FullTextConfigMetadataEntity> getDataverseFullTextConfigs(TxnId txnId, String database,
+            DataverseName dataverseName) throws AlgebricksException {
+        ITupleReference searchKey = createTuple(database, dataverseName);
         FullTextConfigMetadataEntityTupleTranslator tupleReaderWriter =
                 tupleTranslatorProvider.getFullTextConfigTupleTranslator(true);
         IValueExtractor<FullTextConfigMetadataEntity> valueExtractor =
@@ -1071,9 +1081,9 @@
         return results;
     }
 
-    private List<FullTextFilterMetadataEntity> getDataverseFullTextFilters(TxnId txnId, DataverseName dataverseName)
-            throws AlgebricksException {
-        ITupleReference searchKey = createTuple(dataverseName);
+    private List<FullTextFilterMetadataEntity> getDataverseFullTextFilters(TxnId txnId, String database,
+            DataverseName dataverseName) throws AlgebricksException {
+        ITupleReference searchKey = createTuple(database, dataverseName);
         FullTextFilterMetadataEntityTupleTranslator tupleReaderWriter =
                 tupleTranslatorProvider.getFullTextFilterTupleTranslator(true);
         IValueExtractor<FullTextFilterMetadataEntity> valueExtractor =
@@ -1089,9 +1099,10 @@
     }
 
     @Override
-    public Dataset getDataset(TxnId txnId, DataverseName dataverseName, String datasetName) throws AlgebricksException {
+    public Dataset getDataset(TxnId txnId, String database, DataverseName dataverseName, String datasetName)
+            throws AlgebricksException {
         try {
-            ITupleReference searchKey = createTuple(dataverseName, datasetName);
+            ITupleReference searchKey = createTuple(database, dataverseName, datasetName);
             DatasetTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDatasetTupleTranslator(false);
             List<Dataset> results = new ArrayList<>();
             IValueExtractor<Dataset> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
@@ -1161,7 +1172,8 @@
         }
     }
 
-    private void confirmDataverseCanBeDeleted(TxnId txnId, DataverseName dataverseName) throws AlgebricksException {
+    private void confirmDataverseCanBeDeleted(TxnId txnId, String database, DataverseName dataverseName)
+            throws AlgebricksException {
         // If a dataset from a DIFFERENT dataverse
         // uses a type from this dataverse
         // throw an error
@@ -1260,28 +1272,28 @@
     }
 
     private void confirmFunctionIsUnusedByViews(TxnId txnId, FunctionSignature signature) throws AlgebricksException {
-        confirmObjectIsUnusedByViews(txnId, "function", DependencyKind.FUNCTION, signature.getDataverseName(),
+        confirmObjectIsUnusedByViews(txnId, "function", DependencyKind.FUNCTION, null, signature.getDataverseName(),
                 signature.getName(), Integer.toString(signature.getArity()));
     }
 
     private void confirmFunctionIsUnusedByFunctions(TxnId txnId, FunctionSignature signature)
             throws AlgebricksException {
-        confirmObjectIsUnusedByFunctions(txnId, "function", DependencyKind.FUNCTION, signature.getDataverseName(),
+        confirmObjectIsUnusedByFunctions(txnId, "function", DependencyKind.FUNCTION, null, signature.getDataverseName(),
                 signature.getName(), Integer.toString(signature.getArity()));
     }
 
     private void confirmObjectIsUnusedByFunctions(TxnId txnId, String objectKindDisplayName,
-            DependencyKind dependencyKind, DataverseName dataverseName, String objectName, String objectArg)
-            throws AlgebricksException {
+            DependencyKind dependencyKind, String database, DataverseName dataverseName, String objectName,
+            String objectArg) throws AlgebricksException {
         // If any function uses this object, throw an error
         List<Function> functions = getAllFunctions(txnId);
-        confirmObjectIsUnusedByFunctionsImpl(functions, objectKindDisplayName, dependencyKind, dataverseName,
+        confirmObjectIsUnusedByFunctionsImpl(functions, objectKindDisplayName, dependencyKind, database, dataverseName,
                 objectName, objectArg);
     }
 
     private void confirmObjectIsUnusedByFunctionsImpl(List<Function> allFunctions, String objectKindDisplayName,
-            DependencyKind dependencyKind, DataverseName dataverseName, String objectName, String objectArg)
-            throws AlgebricksException {
+            DependencyKind dependencyKind, String database, DataverseName dataverseName, String objectName,
+            String objectArg) throws AlgebricksException {
         int functionDependencyIdx = Function.DEPENDENCIES_SCHEMA.indexOf(dependencyKind);
         if (functionDependencyIdx < 0) {
             throw new AlgebricksException(ErrorCode.ILLEGAL_STATE);
@@ -1307,16 +1319,17 @@
     }
 
     private void confirmObjectIsUnusedByViews(TxnId txnId, String objectKindDisplayName, DependencyKind dependencyKind,
-            DataverseName dataverseName, String objectName, String objectArg) throws AlgebricksException {
+            String database, DataverseName dataverseName, String objectName, String objectArg)
+            throws AlgebricksException {
         // If any function uses this object, throw an error
         List<Dataset> datasets = getAllDatasets(txnId);
-        confirmObjectIsUnusedByViewsImpl(datasets, objectKindDisplayName, dependencyKind, dataverseName, objectName,
-                objectArg);
+        confirmObjectIsUnusedByViewsImpl(datasets, objectKindDisplayName, dependencyKind, database, dataverseName,
+                objectName, objectArg);
     }
 
     private void confirmObjectIsUnusedByViewsImpl(List<Dataset> allDatasets, String objectKindDisplayName,
-            DependencyKind dependencyKind, DataverseName dataverseName, String objectName, String objectArg)
-            throws AlgebricksException {
+            DependencyKind dependencyKind, String database, DataverseName dataverseName, String objectName,
+            String objectArg) throws AlgebricksException {
         int viewDependencyIdx = ViewDetails.DEPENDENCIES_SCHEMA.indexOf(dependencyKind);
         if (viewDependencyIdx < 0) {
             throw new AlgebricksException(ErrorCode.ILLEGAL_STATE);
@@ -1344,8 +1357,8 @@
         }
     }
 
-    private void confirmFullTextConfigCanBeDeleted(TxnId txnId, DataverseName dataverseNameFullTextConfig,
-            String configName) throws AlgebricksException {
+    private void confirmFullTextConfigCanBeDeleted(TxnId txnId, String database,
+            DataverseName dataverseNameFullTextConfig, String configName) throws AlgebricksException {
         if (Strings.isNullOrEmpty(configName)) {
             throw new MetadataException(
                     org.apache.asterix.common.exceptions.ErrorCode.FULL_TEXT_DEFAULT_CONFIG_CANNOT_BE_DELETED_OR_CREATED);
@@ -1354,7 +1367,8 @@
         // If any index uses this full-text config, throw an error
         List<Dataset> datasets = getAllDatasets(txnId);
         for (Dataset dataset : datasets) {
-            List<Index> indexes = getDatasetIndexes(txnId, dataset.getDataverseName(), dataset.getDatasetName());
+            List<Index> indexes = getDatasetIndexes(txnId, dataset.getDatabaseName(), dataset.getDataverseName(),
+                    dataset.getDatasetName());
             for (Index index : indexes) {
                 // ToDo: to support index to access full-text config in another dataverse,
                 //   we may need to include the dataverse of the full-text config in the index.getFullTextConfigDataverse()
@@ -1376,32 +1390,32 @@
         }
     }
 
-    private void confirmDatasetCanBeDeleted(TxnId txnId, String datasetTypeDisplayName, DataverseName dataverseName,
-            String datasetName) throws AlgebricksException {
-        confirmDatasetIsUnusedByFunctions(txnId, datasetTypeDisplayName, dataverseName, datasetName);
-        confirmDatasetIsUnusedByViews(txnId, datasetTypeDisplayName, dataverseName, datasetName);
+    private void confirmDatasetCanBeDeleted(TxnId txnId, String datasetTypeDisplayName, String database,
+            DataverseName dataverseName, String datasetName) throws AlgebricksException {
+        confirmDatasetIsUnusedByFunctions(txnId, datasetTypeDisplayName, database, dataverseName, datasetName);
+        confirmDatasetIsUnusedByViews(txnId, datasetTypeDisplayName, database, dataverseName, datasetName);
     }
 
-    private void confirmDatasetIsUnusedByFunctions(TxnId txnId, String datasetKindDisplayName,
+    private void confirmDatasetIsUnusedByFunctions(TxnId txnId, String datasetKindDisplayName, String database,
             DataverseName dataverseName, String datasetName) throws AlgebricksException {
-        confirmObjectIsUnusedByFunctions(txnId, datasetKindDisplayName, DependencyKind.DATASET, dataverseName,
+        confirmObjectIsUnusedByFunctions(txnId, datasetKindDisplayName, DependencyKind.DATASET, database, dataverseName,
                 datasetName, null);
     }
 
-    private void confirmDatasetIsUnusedByViews(TxnId txnId, String datasetKindDisplayName, DataverseName dataverseName,
-            String datasetName) throws AlgebricksException {
-        confirmObjectIsUnusedByViews(txnId, datasetKindDisplayName, DependencyKind.DATASET, dataverseName, datasetName,
-                null);
+    private void confirmDatasetIsUnusedByViews(TxnId txnId, String datasetKindDisplayName, String database,
+            DataverseName dataverseName, String datasetName) throws AlgebricksException {
+        confirmObjectIsUnusedByViews(txnId, datasetKindDisplayName, DependencyKind.DATASET, database, dataverseName,
+                datasetName, null);
     }
 
-    private void confirmLibraryCanBeDeleted(TxnId txnId, DataverseName dataverseName, String libraryName)
-            throws AlgebricksException {
-        confirmLibraryIsUnusedByFunctions(txnId, dataverseName, libraryName);
-        confirmLibraryIsUnusedByAdapters(txnId, dataverseName, libraryName);
+    private void confirmLibraryCanBeDeleted(TxnId txnId, String database, DataverseName dataverseName,
+            String libraryName) throws AlgebricksException {
+        confirmLibraryIsUnusedByFunctions(txnId, database, dataverseName, libraryName);
+        confirmLibraryIsUnusedByAdapters(txnId, database, dataverseName, libraryName);
     }
 
-    private void confirmLibraryIsUnusedByFunctions(TxnId txnId, DataverseName dataverseName, String libraryName)
-            throws AlgebricksException {
+    private void confirmLibraryIsUnusedByFunctions(TxnId txnId, String database, DataverseName dataverseName,
+            String libraryName) throws AlgebricksException {
         List<Function> functions = getAllFunctions(txnId);
         for (Function function : functions) {
             if (libraryName.equals(function.getLibraryName())
@@ -1414,8 +1428,8 @@
         }
     }
 
-    private void confirmLibraryIsUnusedByAdapters(TxnId txnId, DataverseName dataverseName, String libraryName)
-            throws AlgebricksException {
+    private void confirmLibraryIsUnusedByAdapters(TxnId txnId, String database, DataverseName dataverseName,
+            String libraryName) throws AlgebricksException {
         List<DatasourceAdapter> adapters = getAllAdapters(txnId);
         for (DatasourceAdapter adapter : adapters) {
             if (libraryName.equals(adapter.getLibraryName())
@@ -1429,15 +1443,15 @@
         }
     }
 
-    private void confirmDatatypeIsUnused(TxnId txnId, DataverseName dataverseName, String datatypeName)
+    private void confirmDatatypeIsUnused(TxnId txnId, String database, DataverseName dataverseName, String datatypeName)
             throws AlgebricksException {
-        confirmDatatypeIsUnusedByDatatypes(txnId, dataverseName, datatypeName);
-        confirmDatatypeIsUnusedByDatasets(txnId, dataverseName, datatypeName);
-        confirmDatatypeIsUnusedByFunctions(txnId, dataverseName, datatypeName);
+        confirmDatatypeIsUnusedByDatatypes(txnId, database, dataverseName, datatypeName);
+        confirmDatatypeIsUnusedByDatasets(txnId, database, dataverseName, datatypeName);
+        confirmDatatypeIsUnusedByFunctions(txnId, database, dataverseName, datatypeName);
     }
 
-    private void confirmDatatypeIsUnusedByDatasets(TxnId txnId, DataverseName dataverseName, String datatypeName)
-            throws AlgebricksException {
+    private void confirmDatatypeIsUnusedByDatasets(TxnId txnId, String database, DataverseName dataverseName,
+            String datatypeName) throws AlgebricksException {
         // If any dataset uses this type, throw an error
         List<Dataset> datasets = getAllDatasets(txnId);
         for (Dataset dataset : datasets) {
@@ -1454,15 +1468,16 @@
 
         // additionally, if a view uses this type, throw an error
         // Note: for future use. currently views don't have any type dependencies
-        confirmObjectIsUnusedByViewsImpl(datasets, null, DependencyKind.TYPE, dataverseName, datatypeName, null);
+        confirmObjectIsUnusedByViewsImpl(datasets, null, DependencyKind.TYPE, database, dataverseName, datatypeName,
+                null);
     }
 
-    private void confirmDatatypeIsUnusedByDatatypes(TxnId txnId, DataverseName dataverseName, String datatypeName)
-            throws AlgebricksException {
+    private void confirmDatatypeIsUnusedByDatatypes(TxnId txnId, String database, DataverseName dataverseName,
+            String datatypeName) throws AlgebricksException {
         // If any datatype uses this type, throw an error
         // TODO: Currently this loads all types into memory. This will need to be fixed
         // for large numbers of types
-        Datatype dataTypeToBeDropped = getDatatype(txnId, dataverseName, datatypeName);
+        Datatype dataTypeToBeDropped = getDatatype(txnId, database, dataverseName, datatypeName);
         assert dataTypeToBeDropped != null;
         IAType typeToBeDropped = dataTypeToBeDropped.getDatatype();
         List<Datatype> datatypes = getAllDatatypes(txnId);
@@ -1482,14 +1497,16 @@
         }
     }
 
-    private void confirmDatatypeIsUnusedByFunctions(TxnId txnId, DataverseName dataverseName, String dataTypeName)
-            throws AlgebricksException {
-        confirmObjectIsUnusedByFunctions(txnId, "datatype", DependencyKind.TYPE, dataverseName, dataTypeName, null);
+    private void confirmDatatypeIsUnusedByFunctions(TxnId txnId, String database, DataverseName dataverseName,
+            String dataTypeName) throws AlgebricksException {
+        confirmObjectIsUnusedByFunctions(txnId, "datatype", DependencyKind.TYPE, database, dataverseName, dataTypeName,
+                null);
     }
 
-    private void confirmFullTextFilterCanBeDeleted(TxnId txnId, DataverseName dataverseName, String fullTextFilterName)
-            throws AlgebricksException {
-        List<FullTextConfigMetadataEntity> configMetadataEntities = getDataverseFullTextConfigs(txnId, dataverseName);
+    private void confirmFullTextFilterCanBeDeleted(TxnId txnId, String database, DataverseName dataverseName,
+            String fullTextFilterName) throws AlgebricksException {
+        List<FullTextConfigMetadataEntity> configMetadataEntities =
+                getDataverseFullTextConfigs(txnId, database, dataverseName);
         for (FullTextConfigMetadataEntity configMetadataEntity : configMetadataEntities) {
             FullTextConfigDescriptor config = configMetadataEntity.getFullTextConfig();
             for (String filterName : config.getFilterNames()) {
@@ -1503,10 +1520,10 @@
         }
     }
 
-    private List<String> getNestedComplexDatatypeNamesForThisDatatype(TxnId txnId, DataverseName dataverseName,
-            String datatypeName) throws AlgebricksException {
+    private List<String> getNestedComplexDatatypeNamesForThisDatatype(TxnId txnId, String database,
+            DataverseName dataverseName, String datatypeName) throws AlgebricksException {
         // Return all field types that aren't builtin types
-        Datatype parentType = getDatatype(txnId, dataverseName, datatypeName);
+        Datatype parentType = getDatatype(txnId, database, dataverseName, datatypeName);
 
         List<IAType> subTypes = null;
         if (parentType.getDatatype().getTypeTag() == ATypeTag.OBJECT) {
@@ -1547,10 +1564,10 @@
     }
 
     @Override
-    public Index getIndex(TxnId txnId, DataverseName dataverseName, String datasetName, String indexName)
-            throws AlgebricksException {
+    public Index getIndex(TxnId txnId, String database, DataverseName dataverseName, String datasetName,
+            String indexName) throws AlgebricksException {
         try {
-            ITupleReference searchKey = createTuple(dataverseName, datasetName, indexName);
+            ITupleReference searchKey = createTuple(database, dataverseName, datasetName, indexName);
             IndexTupleTranslator tupleReaderWriter =
                     tupleTranslatorProvider.getIndexTupleTranslator(txnId, this, false);
             IValueExtractor<Index> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
@@ -1566,10 +1583,10 @@
     }
 
     @Override
-    public List<Index> getDatasetIndexes(TxnId txnId, DataverseName dataverseName, String datasetName)
+    public List<Index> getDatasetIndexes(TxnId txnId, String database, DataverseName dataverseName, String datasetName)
             throws AlgebricksException {
         try {
-            ITupleReference searchKey = createTuple(dataverseName, datasetName);
+            ITupleReference searchKey = createTuple(database, dataverseName, datasetName);
             IndexTupleTranslator tupleReaderWriter =
                     tupleTranslatorProvider.getIndexTupleTranslator(txnId, this, false);
             IValueExtractor<Index> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
@@ -1582,10 +1599,10 @@
     }
 
     @Override
-    public Datatype getDatatype(TxnId txnId, DataverseName dataverseName, String datatypeName)
+    public Datatype getDatatype(TxnId txnId, String database, DataverseName dataverseName, String datatypeName)
             throws AlgebricksException {
         try {
-            ITupleReference searchKey = createTuple(dataverseName, datatypeName);
+            ITupleReference searchKey = createTuple(database, dataverseName, datatypeName);
             DatatypeTupleTranslator tupleReaderWriter =
                     tupleTranslatorProvider.getDataTypeTupleTranslator(txnId, this, false);
             IValueExtractor<Datatype> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
@@ -1619,14 +1636,15 @@
 
     @Override
     public Function getFunction(TxnId txnId, FunctionSignature functionSignature) throws AlgebricksException {
-        List<Function> functions = getFunctionsImpl(txnId, createTuple(functionSignature.getDataverseName(),
+        List<Function> functions = getFunctionsImpl(txnId, createTuple(null, functionSignature.getDataverseName(),
                 functionSignature.getName(), Integer.toString(functionSignature.getArity())));
         return functions.isEmpty() ? null : functions.get(0);
     }
 
     @Override
-    public List<Function> getDataverseFunctions(TxnId txnId, DataverseName dataverseName) throws AlgebricksException {
-        return getFunctionsImpl(txnId, createTuple(dataverseName));
+    public List<Function> getDataverseFunctions(TxnId txnId, String database, DataverseName dataverseName)
+            throws AlgebricksException {
+        return getFunctionsImpl(txnId, createTuple(database, dataverseName));
     }
 
     private List<Function> getFunctionsImpl(TxnId txnId, ITupleReference searchKey) throws AlgebricksException {
@@ -1654,8 +1672,8 @@
         }
         try {
             // Delete entry from the 'function' dataset.
-            ITupleReference searchKey = createTuple(functionSignature.getDataverseName(), functionSignature.getName(),
-                    Integer.toString(functionSignature.getArity()));
+            ITupleReference searchKey = createTuple(null, functionSignature.getDataverseName(),
+                    functionSignature.getName(), Integer.toString(functionSignature.getArity()));
             // Searches the index for the tuple to be deleted. Acquires an S lock on the 'function' dataset.
             ITupleReference functionTuple =
                     getTupleToBeDeleted(txnId, mdIndexesProvider.getFunctionEntity().getIndex(), searchKey);
@@ -1868,21 +1886,52 @@
     }
 
     public static ITupleReference createTuple(DataverseName dataverseName, String... rest) {
+        //TODO(DB): remove this method after
         return createTuple(dataverseName.getCanonicalForm(), rest);
     }
 
+    private static ITupleReference createTuple(String databaseName, DataverseName dataverseName, String... rest) {
+        //TODO(DB): pass mdIndexesProvider and use it instead of checking for null
+        if (databaseName == null) {
+            return createTuple(dataverseName.getCanonicalForm(), rest);
+        } else {
+            return createDatabaseTuple(databaseName, dataverseName, rest);
+        }
+    }
+
+    private static ITupleReference createDatabaseTuple(String databaseName, DataverseName dataverseName,
+            String... rest) {
+        try {
+            ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(2 + rest.length);
+            ISerializerDeserializer<AString> stringSerde =
+                    SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING);
+            AMutableString aString = new AMutableString(databaseName);
+            tupleBuilder.addField(stringSerde, aString);
+            aString.setValue(dataverseName.getCanonicalForm());
+            tupleBuilder.addField(stringSerde, aString);
+            for (String s : rest) {
+                aString.setValue(s);
+                tupleBuilder.addField(stringSerde, aString);
+            }
+            ArrayTupleReference tuple = new ArrayTupleReference();
+            tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
+            return tuple;
+        } catch (HyracksDataException e) {
+            // This should never happen
+            throw new IllegalStateException("Failed to create search tuple", e);
+        }
+    }
+
     public static ITupleReference createTuple(String first, String... rest) {
         try {
             ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(1 + rest.length);
             ISerializerDeserializer<AString> stringSerde =
                     SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING);
             AMutableString aString = new AMutableString(first);
-            stringSerde.serialize(aString, tupleBuilder.getDataOutput());
-            tupleBuilder.addFieldEndOffset();
+            tupleBuilder.addField(stringSerde, aString);
             for (String s : rest) {
                 aString.setValue(s);
-                stringSerde.serialize(aString, tupleBuilder.getDataOutput());
-                tupleBuilder.addFieldEndOffset();
+                tupleBuilder.addField(stringSerde, aString);
             }
             ArrayTupleReference tuple = new ArrayTupleReference();
             tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
@@ -1919,10 +1968,11 @@
     }
 
     @Override
-    public void dropAdapter(TxnId txnId, DataverseName dataverseName, String adapterName) throws AlgebricksException {
+    public void dropAdapter(TxnId txnId, String database, DataverseName dataverseName, String adapterName)
+            throws AlgebricksException {
         try {
             // Delete entry from the 'Adapter' dataset.
-            ITupleReference searchKey = createTuple(dataverseName, adapterName);
+            ITupleReference searchKey = createTuple(database, dataverseName, adapterName);
             // Searches the index for the tuple to be deleted. Acquires an S
             // lock on the 'Adapter' dataset.
             ITupleReference datasetTuple =
@@ -1939,10 +1989,10 @@
     }
 
     @Override
-    public DatasourceAdapter getAdapter(TxnId txnId, DataverseName dataverseName, String adapterName)
+    public DatasourceAdapter getAdapter(TxnId txnId, String database, DataverseName dataverseName, String adapterName)
             throws AlgebricksException {
         try {
-            ITupleReference searchKey = createTuple(dataverseName, adapterName);
+            ITupleReference searchKey = createTuple(database, dataverseName, adapterName);
             DatasourceAdapterTupleTranslator tupleReaderWriter =
                     tupleTranslatorProvider.getAdapterTupleTranslator(false);
             List<DatasourceAdapter> results = new ArrayList<>();
@@ -1978,10 +2028,10 @@
     }
 
     @Override
-    public CompactionPolicy getCompactionPolicy(TxnId txnId, DataverseName dataverseName, String policyName)
-            throws AlgebricksException {
+    public CompactionPolicy getCompactionPolicy(TxnId txnId, String database, DataverseName dataverseName,
+            String policyName) throws AlgebricksException {
         try {
-            ITupleReference searchKey = createTuple(dataverseName, policyName);
+            ITupleReference searchKey = createTuple(database, dataverseName, policyName);
             CompactionPolicyTupleTranslator tupleReaderWriter =
                     tupleTranslatorProvider.getCompactionPolicyTupleTranslator(false);
             List<CompactionPolicy> results = new ArrayList<>();
@@ -1998,10 +2048,10 @@
     }
 
     @Override
-    public List<DatasourceAdapter> getDataverseAdapters(TxnId txnId, DataverseName dataverseName)
+    public List<DatasourceAdapter> getDataverseAdapters(TxnId txnId, String database, DataverseName dataverseName)
             throws AlgebricksException {
         try {
-            ITupleReference searchKey = createTuple(dataverseName);
+            ITupleReference searchKey = createTuple(database, dataverseName);
             DatasourceAdapterTupleTranslator tupleReaderWriter =
                     tupleTranslatorProvider.getAdapterTupleTranslator(false);
             IValueExtractor<DatasourceAdapter> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
@@ -2032,18 +2082,19 @@
     }
 
     @Override
-    public void dropLibrary(TxnId txnId, DataverseName dataverseName, String libraryName) throws AlgebricksException {
-        dropLibrary(txnId, dataverseName, libraryName, false);
+    public void dropLibrary(TxnId txnId, String database, DataverseName dataverseName, String libraryName)
+            throws AlgebricksException {
+        dropLibrary(txnId, database, dataverseName, libraryName, false);
     }
 
-    private void dropLibrary(TxnId txnId, DataverseName dataverseName, String libraryName, boolean force)
-            throws AlgebricksException {
+    private void dropLibrary(TxnId txnId, String database, DataverseName dataverseName, String libraryName,
+            boolean force) throws AlgebricksException {
         if (!force) {
-            confirmLibraryCanBeDeleted(txnId, dataverseName, libraryName);
+            confirmLibraryCanBeDeleted(txnId, database, dataverseName, libraryName);
         }
         try {
             // Delete entry from the 'Library' dataset.
-            ITupleReference searchKey = createTuple(dataverseName, libraryName);
+            ITupleReference searchKey = createTuple(database, dataverseName, libraryName);
             // Searches the index for the tuple to be deleted. Acquires an S lock on the 'Library' dataset.
             ITupleReference datasetTuple =
                     getTupleToBeDeleted(txnId, mdIndexesProvider.getLibraryEntity().getIndex(), searchKey);
@@ -2059,9 +2110,10 @@
     }
 
     @Override
-    public Library getLibrary(TxnId txnId, DataverseName dataverseName, String libraryName) throws AlgebricksException {
+    public Library getLibrary(TxnId txnId, String database, DataverseName dataverseName, String libraryName)
+            throws AlgebricksException {
         try {
-            ITupleReference searchKey = createTuple(dataverseName, libraryName);
+            ITupleReference searchKey = createTuple(database, dataverseName, libraryName);
             LibraryTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getLibraryTupleTranslator(false);
             List<Library> results = new ArrayList<>();
             IValueExtractor<Library> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
@@ -2098,10 +2150,10 @@
     }
 
     @Override
-    public FeedPolicyEntity getFeedPolicy(TxnId txnId, DataverseName dataverseName, String policyName)
+    public FeedPolicyEntity getFeedPolicy(TxnId txnId, String database, DataverseName dataverseName, String policyName)
             throws AlgebricksException {
         try {
-            ITupleReference searchKey = createTuple(dataverseName, policyName);
+            ITupleReference searchKey = createTuple(database, dataverseName, policyName);
             FeedPolicyTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getFeedPolicyTupleTranslator(false);
             List<FeedPolicyEntity> results = new ArrayList<>();
             IValueExtractor<FeedPolicyEntity> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
@@ -2133,10 +2185,10 @@
     }
 
     @Override
-    public List<FeedConnection> getFeedConnections(TxnId txnId, DataverseName dataverseName, String feedName)
-            throws AlgebricksException {
+    public List<FeedConnection> getFeedConnections(TxnId txnId, String database, DataverseName dataverseName,
+            String feedName) throws AlgebricksException {
         try {
-            ITupleReference searchKey = createTuple(dataverseName, feedName);
+            ITupleReference searchKey = createTuple(database, dataverseName, feedName);
             FeedConnectionTupleTranslator tupleReaderWriter =
                     tupleTranslatorProvider.getFeedConnectionTupleTranslator(false);
             List<FeedConnection> results = new ArrayList<>();
@@ -2150,10 +2202,10 @@
     }
 
     @Override
-    public FeedConnection getFeedConnection(TxnId txnId, DataverseName dataverseName, String feedName,
+    public FeedConnection getFeedConnection(TxnId txnId, String database, DataverseName dataverseName, String feedName,
             String datasetName) throws AlgebricksException {
         try {
-            ITupleReference searchKey = createTuple(dataverseName, feedName, datasetName);
+            ITupleReference searchKey = createTuple(database, dataverseName, feedName, datasetName);
             FeedConnectionTupleTranslator tupleReaderWriter =
                     tupleTranslatorProvider.getFeedConnectionTupleTranslator(false);
             List<FeedConnection> results = new ArrayList<>();
@@ -2170,10 +2222,10 @@
     }
 
     @Override
-    public void dropFeedConnection(TxnId txnId, DataverseName dataverseName, String feedName, String datasetName)
-            throws AlgebricksException {
+    public void dropFeedConnection(TxnId txnId, String database, DataverseName dataverseName, String feedName,
+            String datasetName) throws AlgebricksException {
         try {
-            ITupleReference searchKey = createTuple(dataverseName, feedName, datasetName);
+            ITupleReference searchKey = createTuple(database, dataverseName, feedName, datasetName);
             ITupleReference tuple =
                     getTupleToBeDeleted(txnId, mdIndexesProvider.getFeedConnectionEntity().getIndex(), searchKey);
             deleteTupleFromIndex(txnId, mdIndexesProvider.getFeedConnectionEntity().getIndex(), tuple);
@@ -2205,9 +2257,10 @@
     }
 
     @Override
-    public Feed getFeed(TxnId txnId, DataverseName dataverseName, String feedName) throws AlgebricksException {
+    public Feed getFeed(TxnId txnId, String database, DataverseName dataverseName, String feedName)
+            throws AlgebricksException {
         try {
-            ITupleReference searchKey = createTuple(dataverseName, feedName);
+            ITupleReference searchKey = createTuple(database, dataverseName, feedName);
             FeedTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getFeedTupleTranslator(false);
             List<Feed> results = new ArrayList<>();
             IValueExtractor<Feed> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
@@ -2222,9 +2275,9 @@
     }
 
     @Override
-    public List<Feed> getFeeds(TxnId txnId, DataverseName dataverseName) throws AlgebricksException {
+    public List<Feed> getFeeds(TxnId txnId, String database, DataverseName dataverseName) throws AlgebricksException {
         try {
-            ITupleReference searchKey = createTuple(dataverseName);
+            ITupleReference searchKey = createTuple(database, dataverseName);
             FeedTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getFeedTupleTranslator(false);
             List<Feed> results = new ArrayList<>();
             IValueExtractor<Feed> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
@@ -2236,9 +2289,10 @@
     }
 
     @Override
-    public void dropFeed(TxnId txnId, DataverseName dataverseName, String feedName) throws AlgebricksException {
+    public void dropFeed(TxnId txnId, String database, DataverseName dataverseName, String feedName)
+            throws AlgebricksException {
         try {
-            ITupleReference searchKey = createTuple(dataverseName, feedName);
+            ITupleReference searchKey = createTuple(database, dataverseName, feedName);
             // Searches the index for the tuple to be deleted. Acquires an S
             // lock on the 'nodegroup' dataset.
             ITupleReference tuple = getTupleToBeDeleted(txnId, mdIndexesProvider.getFeedEntity().getIndex(), searchKey);
@@ -2253,9 +2307,10 @@
     }
 
     @Override
-    public void dropFeedPolicy(TxnId txnId, DataverseName dataverseName, String policyName) throws AlgebricksException {
+    public void dropFeedPolicy(TxnId txnId, String database, DataverseName dataverseName, String policyName)
+            throws AlgebricksException {
         try {
-            ITupleReference searchKey = createTuple(dataverseName, policyName);
+            ITupleReference searchKey = createTuple(database, dataverseName, policyName);
             ITupleReference tuple =
                     getTupleToBeDeleted(txnId, mdIndexesProvider.getFeedPolicyEntity().getIndex(), searchKey);
             deleteTupleFromIndex(txnId, mdIndexesProvider.getFeedPolicyEntity().getIndex(), tuple);
@@ -2270,10 +2325,10 @@
     }
 
     @Override
-    public List<FeedPolicyEntity> getDataverseFeedPolicies(TxnId txnId, DataverseName dataverseName)
+    public List<FeedPolicyEntity> getDataverseFeedPolicies(TxnId txnId, String database, DataverseName dataverseName)
             throws AlgebricksException {
         try {
-            ITupleReference searchKey = createTuple(dataverseName);
+            ITupleReference searchKey = createTuple(database, dataverseName);
             FeedPolicyTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getFeedPolicyTupleTranslator(false);
             IValueExtractor<FeedPolicyEntity> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
             List<FeedPolicyEntity> results = new ArrayList<>();
@@ -2305,7 +2360,8 @@
     @Override
     public List<ExternalFile> getExternalFiles(TxnId txnId, Dataset dataset) throws AlgebricksException {
         try {
-            ITupleReference searchKey = createTuple(dataset.getDataverseName(), dataset.getDatasetName());
+            ITupleReference searchKey =
+                    createTuple(dataset.getDatabaseName(), dataset.getDataverseName(), dataset.getDatasetName());
             ExternalFileTupleTranslator tupleReaderWriter =
                     tupleTranslatorProvider.getExternalFileTupleTranslator(false);
             IValueExtractor<ExternalFile> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
@@ -2319,11 +2375,11 @@
     }
 
     @Override
-    public void dropExternalFile(TxnId txnId, DataverseName dataverseName, String datasetName, int fileNumber)
-            throws AlgebricksException {
+    public void dropExternalFile(TxnId txnId, String database, DataverseName dataverseName, String datasetName,
+            int fileNumber) throws AlgebricksException {
         try {
             // Delete entry from the 'ExternalFile' dataset.
-            ITupleReference searchKey = createExternalFileSearchTuple(dataverseName, datasetName, fileNumber);
+            ITupleReference searchKey = createExternalFileSearchTuple(database, dataverseName, datasetName, fileNumber);
             // Searches the index for the tuple to be deleted. Acquires an S
             // lock on the 'ExternalFile' dataset.
             ITupleReference datasetTuple =
@@ -2344,15 +2400,15 @@
         List<ExternalFile> files = getExternalFiles(txnId, dataset);
         // loop through files and delete them
         for (int i = 0; i < files.size(); i++) {
-            dropExternalFile(txnId, files.get(i).getDataverseName(), files.get(i).getDatasetName(),
-                    files.get(i).getFileNumber());
+            dropExternalFile(txnId, files.get(i).getDatabaseName(), files.get(i).getDataverseName(),
+                    files.get(i).getDatasetName(), files.get(i).getFileNumber());
         }
     }
 
     // This method is used to create a search tuple for external data file since the
     // search tuple has an int value
-    public ITupleReference createExternalFileSearchTuple(DataverseName dataverseName, String datasetName,
-            int fileNumber) throws HyracksDataException {
+    private ITupleReference createExternalFileSearchTuple(String database, DataverseName dataverseName,
+            String datasetName, int fileNumber) throws HyracksDataException {
         ISerializerDeserializer<AString> stringSerde =
                 SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING);
         ISerializerDeserializer<AInt32> intSerde =
@@ -2361,6 +2417,12 @@
         AMutableString aString = new AMutableString("");
         ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(3);
 
+        // database field
+        if (mdIndexesProvider.isUsingDatabase()) {
+            aString.setValue(database);
+            stringSerde.serialize(aString, tupleBuilder.getDataOutput());
+            tupleBuilder.addFieldEndOffset();
+        }
         // dataverse field
         aString.setValue(dataverseName.getCanonicalForm());
         stringSerde.serialize(aString, tupleBuilder.getDataOutput());
@@ -2381,10 +2443,10 @@
     }
 
     @Override
-    public ExternalFile getExternalFile(TxnId txnId, DataverseName dataverseName, String datasetName,
+    public ExternalFile getExternalFile(TxnId txnId, String database, DataverseName dataverseName, String datasetName,
             Integer fileNumber) throws AlgebricksException {
         try {
-            ITupleReference searchKey = createExternalFileSearchTuple(dataverseName, datasetName, fileNumber);
+            ITupleReference searchKey = createExternalFileSearchTuple(database, dataverseName, datasetName, fileNumber);
             ExternalFileTupleTranslator tupleReaderWriter =
                     tupleTranslatorProvider.getExternalFileTupleTranslator(false);
             IValueExtractor<ExternalFile> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
@@ -2418,19 +2480,20 @@
     }
 
     @Override
-    public void dropSynonym(TxnId txnId, DataverseName dataverseName, String synonymName) throws AlgebricksException {
-        dropSynonym(txnId, dataverseName, synonymName, false);
+    public void dropSynonym(TxnId txnId, String database, DataverseName dataverseName, String synonymName)
+            throws AlgebricksException {
+        dropSynonym(txnId, database, dataverseName, synonymName, false);
     }
 
-    private void dropSynonym(TxnId txnId, DataverseName dataverseName, String synonymName, boolean force)
-            throws AlgebricksException {
+    private void dropSynonym(TxnId txnId, String database, DataverseName dataverseName, String synonymName,
+            boolean force) throws AlgebricksException {
         if (!force) {
-            confirmSynonymCanBeDeleted(txnId, dataverseName, synonymName);
+            confirmSynonymCanBeDeleted(txnId, database, dataverseName, synonymName);
         }
 
         try {
             // Delete entry from the 'Synonym' dataset.
-            ITupleReference searchKey = createTuple(dataverseName, synonymName);
+            ITupleReference searchKey = createTuple(database, dataverseName, synonymName);
             // Searches the index for the tuple to be deleted. Acquires an S
             // lock on the 'Synonym' dataset.
             ITupleReference synonymTuple =
@@ -2446,26 +2509,29 @@
         }
     }
 
-    private void confirmSynonymCanBeDeleted(TxnId txnId, DataverseName dataverseName, String synonymName)
-            throws AlgebricksException {
-        confirmSynonymIsUnusedByFunctions(txnId, dataverseName, synonymName);
-        confirmSynonymIsUnusedByViews(txnId, dataverseName, synonymName);
+    private void confirmSynonymCanBeDeleted(TxnId txnId, String database, DataverseName dataverseName,
+            String synonymName) throws AlgebricksException {
+        confirmSynonymIsUnusedByFunctions(txnId, database, dataverseName, synonymName);
+        confirmSynonymIsUnusedByViews(txnId, database, dataverseName, synonymName);
     }
 
-    private void confirmSynonymIsUnusedByFunctions(TxnId txnId, DataverseName dataverseName, String synonymName)
-            throws AlgebricksException {
-        confirmObjectIsUnusedByFunctions(txnId, "synonym", DependencyKind.SYNONYM, dataverseName, synonymName, null);
+    private void confirmSynonymIsUnusedByFunctions(TxnId txnId, String database, DataverseName dataverseName,
+            String synonymName) throws AlgebricksException {
+        confirmObjectIsUnusedByFunctions(txnId, "synonym", DependencyKind.SYNONYM, database, dataverseName, synonymName,
+                null);
     }
 
-    private void confirmSynonymIsUnusedByViews(TxnId txnId, DataverseName dataverseName, String synonymName)
-            throws AlgebricksException {
-        confirmObjectIsUnusedByViews(txnId, "synonym", DependencyKind.SYNONYM, dataverseName, synonymName, null);
+    private void confirmSynonymIsUnusedByViews(TxnId txnId, String database, DataverseName dataverseName,
+            String synonymName) throws AlgebricksException {
+        confirmObjectIsUnusedByViews(txnId, "synonym", DependencyKind.SYNONYM, database, dataverseName, synonymName,
+                null);
     }
 
     @Override
-    public Synonym getSynonym(TxnId txnId, DataverseName dataverseName, String synonymName) throws AlgebricksException {
+    public Synonym getSynonym(TxnId txnId, String database, DataverseName dataverseName, String synonymName)
+            throws AlgebricksException {
         try {
-            ITupleReference searchKey = createTuple(dataverseName, synonymName);
+            ITupleReference searchKey = createTuple(database, dataverseName, synonymName);
             SynonymTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getSynonymTupleTranslator(false);
             List<Synonym> results = new ArrayList<>();
             IValueExtractor<Synonym> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
@@ -2480,9 +2546,10 @@
     }
 
     @Override
-    public List<Synonym> getDataverseSynonyms(TxnId txnId, DataverseName dataverseName) throws AlgebricksException {
+    public List<Synonym> getDataverseSynonyms(TxnId txnId, String database, DataverseName dataverseName)
+            throws AlgebricksException {
         try {
-            ITupleReference searchKey = createTuple(dataverseName);
+            ITupleReference searchKey = createTuple(database, dataverseName);
             SynonymTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getSynonymTupleTranslator(false);
             IValueExtractor<Synonym> valueExtractor = new MetadataEntityValueExtractor<>(tupleReaderWriter);
             List<Synonym> results = new ArrayList<>();
@@ -2498,7 +2565,8 @@
         try {
             // This method will delete previous entry of the dataset and insert the new one
             // Delete entry from the 'datasets' dataset.
-            ITupleReference searchKey = createTuple(dataset.getDataverseName(), dataset.getDatasetName());
+            ITupleReference searchKey =
+                    createTuple(dataset.getDatabaseName(), dataset.getDataverseName(), dataset.getDatasetName());
             // Searches the index for the tuple to be deleted. Acquires an S lock on the 'dataset' dataset.
             ITupleReference datasetTuple =
                     getTupleToBeDeleted(txnId, mdIndexesProvider.getDatasetEntity().getIndex(), searchKey);
@@ -2519,7 +2587,7 @@
 
     @Override
     public void updateLibrary(TxnId txnId, Library library) throws AlgebricksException {
-        dropLibrary(txnId, library.getDataverseName(), library.getName(), true);
+        dropLibrary(txnId, library.getDatabaseName(), library.getDataverseName(), library.getName(), true);
         addLibrary(txnId, library);
     }
 
@@ -2531,7 +2599,7 @@
 
     @Override
     public void updateDatatype(TxnId txnId, Datatype datatype) throws AlgebricksException {
-        dropDatatype(txnId, datatype.getDataverseName(), datatype.getDatatypeName(), true);
+        dropDatatype(txnId, datatype.getDatabaseName(), datatype.getDataverseName(), datatype.getDatatypeName(), true);
         addDatatype(txnId, datatype);
     }
 
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataNode.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataNode.java
index 561a4fa..60f457f 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataNode.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataNode.java
@@ -60,21 +60,21 @@
     /**
      * Begins a local transaction against the metadata.
      *
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
     void beginTransaction(TxnId txnId) throws RemoteException;
 
     /**
      * Commits a local transaction against the metadata.
      *
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
     void commitTransaction(TxnId txnId) throws RemoteException;
 
     /**
      * Aborts a local transaction against the metadata.
      *
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
     void abortTransaction(TxnId txnId) throws RemoteException;
 
@@ -88,7 +88,7 @@
      *            Dataverse instance to be inserted.
      * @throws AlgebricksException
      *             For example, if the dataverse already exists.
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
     void addDataverse(TxnId txnId, Dataverse dataverse) throws AlgebricksException, RemoteException;
 
@@ -101,7 +101,7 @@
      * @return A list of dataverse instances.
      * @throws AlgebricksException
      *             For example, if the dataverse does not exist.
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
     List<Dataverse> getDataverses(TxnId txnId) throws AlgebricksException, RemoteException;
 
@@ -116,9 +116,10 @@
      * @return A dataverse instance.
      * @throws AlgebricksException
      *             For example, if the dataverse does not exist.
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
-    Dataverse getDataverse(TxnId txnId, DataverseName dataverseName) throws AlgebricksException, RemoteException;
+    Dataverse getDataverse(TxnId txnId, String database, DataverseName dataverseName)
+            throws AlgebricksException, RemoteException;
 
     /**
      * Retrieves all datasets belonging to the given dataverse, acquiring local
@@ -132,7 +133,7 @@
      * @throws AlgebricksException
      *             For example, if the dataverse does not exist. RemoteException
      */
-    List<Dataset> getDataverseDatasets(TxnId txnId, DataverseName dataverseName)
+    List<Dataset> getDataverseDatasets(TxnId txnId, String database, DataverseName dataverseName)
             throws AlgebricksException, RemoteException;
 
     /**
@@ -147,7 +148,8 @@
      * @throws AlgebricksException
      *             For example, if the dataverse does not exist.
      */
-    void dropDataverse(TxnId txnId, DataverseName dataverseName) throws AlgebricksException, RemoteException;
+    void dropDataverse(TxnId txnId, String database, DataverseName dataverseName)
+            throws AlgebricksException, RemoteException;
 
     /**
      * Returns {@code true} if given dataverse is not empty
@@ -157,7 +159,8 @@
      * @param dataverseName
      *            Name of the dataverse
      */
-    boolean isDataverseNotEmpty(TxnId txnId, DataverseName dataverseName) throws AlgebricksException, RemoteException;
+    boolean isDataverseNotEmpty(TxnId txnId, String database, DataverseName dataverseName)
+            throws AlgebricksException, RemoteException;
 
     /**
      * Inserts a new dataset into the metadata, acquiring local locks on behalf of
@@ -169,7 +172,7 @@
      *            Dataset instance to be inserted.
      * @throws AlgebricksException
      *             For example, if the dataset already exists.
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
     void addDataset(TxnId txnId, Dataset dataset) throws AlgebricksException, RemoteException;
 
@@ -186,9 +189,9 @@
      * @return A dataset instance.
      * @throws AlgebricksException
      *             For example, if the dataset does not exist.
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
-    Dataset getDataset(TxnId txnId, DataverseName dataverseName, String datasetName)
+    Dataset getDataset(TxnId txnId, String database, DataverseName dataverseName, String datasetName)
             throws AlgebricksException, RemoteException;
 
     /**
@@ -204,9 +207,9 @@
      * @return A list of Index instances.
      * @throws AlgebricksException
      *             For example, if the dataset and/or dataverse does not exist.
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
-    List<Index> getDatasetIndexes(TxnId txnId, DataverseName dataverseName, String datasetName)
+    List<Index> getDatasetIndexes(TxnId txnId, String database, DataverseName dataverseName, String datasetName)
             throws AlgebricksException, RemoteException;
 
     /**
@@ -223,9 +226,9 @@
      *            If true, forces drop the dataset. Setting it to true could make the metadata inconsistent.
      * @throws AlgebricksException
      *             For example, if the dataset and/or dataverse does not exist.
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
-    void dropDataset(TxnId txnId, DataverseName dataverseName, String datasetName, boolean force)
+    void dropDataset(TxnId txnId, String database, DataverseName dataverseName, String datasetName, boolean force)
             throws AlgebricksException, RemoteException;
 
     /**
@@ -239,7 +242,7 @@
      *            Index instance to be inserted.
      * @throws AlgebricksException
      *             For example, if the index already exists.
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
     void addIndex(TxnId txnId, Index index) throws AlgebricksException, RemoteException;
 
@@ -253,13 +256,14 @@
      *            Name of the datavers holding the given dataset.
      * @param datasetName
      *            Name of the dataset holding the index.
-     * @indexName Name of the index to retrieve.
+     * @param indexName
+     *            Name of the index to retrieve.
      * @return An Index instance.
      * @throws AlgebricksException
      *             For example, if the index does not exist.
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
-    Index getIndex(TxnId txnId, DataverseName dataverseName, String datasetName, String indexName)
+    Index getIndex(TxnId txnId, String database, DataverseName dataverseName, String datasetName, String indexName)
             throws AlgebricksException, RemoteException;
 
     /**
@@ -272,12 +276,13 @@
      *            Name of the datavers holding the given dataset.
      * @param datasetName
      *            Name of the dataset holding the index.
-     * @indexName Name of the index to retrieve.
+     * @param indexName
+     *            Name of the index to retrieve.
      * @throws AlgebricksException
      *             For example, if the index does not exist.
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
-    void dropIndex(TxnId txnId, DataverseName dataverseName, String datasetName, String indexName)
+    void dropIndex(TxnId txnId, String database, DataverseName dataverseName, String datasetName, String indexName)
             throws AlgebricksException, RemoteException;
 
     /**
@@ -290,7 +295,7 @@
      *            Datatype instance to be inserted.
      * @throws AlgebricksException
      *             For example, if the datatype already exists.
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
     void addDatatype(TxnId txnId, Datatype datatype) throws AlgebricksException, RemoteException;
 
@@ -307,9 +312,9 @@
      * @return A datatype instance.
      * @throws AlgebricksException
      *             For example, if the datatype does not exist.
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
-    Datatype getDatatype(TxnId txnId, DataverseName dataverseName, String datatypeName)
+    Datatype getDatatype(TxnId txnId, String database, DataverseName dataverseName, String datatypeName)
             throws AlgebricksException, RemoteException;
 
     /**
@@ -325,9 +330,9 @@
      * @throws AlgebricksException
      *             For example, if there are still datasets using the type to be
      *             deleted.
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
-    void dropDatatype(TxnId txnId, DataverseName dataverseName, String datatypeName)
+    void dropDatatype(TxnId txnId, String database, DataverseName dataverseName, String datatypeName)
             throws AlgebricksException, RemoteException;
 
     /**
@@ -341,7 +346,7 @@
      * @param modificationOp
      * @throws AlgebricksException
      *             For example, if the node group already exists.
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
     void modifyNodeGroup(TxnId txnId, NodeGroup nodeGroup,
             AbstractIndexModificationOperationCallback.Operation modificationOp)
@@ -357,7 +362,7 @@
      *            Name of node group to be retrieved.
      * @throws AlgebricksException
      *             For example, if the node group does not exist.
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
     NodeGroup getNodeGroup(TxnId txnId, String nodeGroupName) throws AlgebricksException, RemoteException;
 
@@ -376,7 +381,7 @@
      * @throws AlgebricksException
      *             For example, there are still datasets partitioned on the node
      *             group to be deleted.
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
     boolean dropNodegroup(TxnId txnId, String nodeGroupName, boolean failSilently)
             throws AlgebricksException, RemoteException;
@@ -391,7 +396,7 @@
      *            Node instance to be inserted.
      * @throws AlgebricksException
      *             For example, if the node already exists.
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
     void addNode(TxnId txnId, Node node) throws AlgebricksException, RemoteException;
 
@@ -402,7 +407,7 @@
      *            An instance of functionSignature representing the function
      * @return
      * @throws AlgebricksException
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
     Function getFunction(TxnId txnId, FunctionSignature functionSignature) throws AlgebricksException, RemoteException;
 
@@ -417,9 +422,9 @@
      * @return A list of function instances.
      * @throws AlgebricksException
      *             For example, if the dataverse does not exist.
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
-    List<Function> getDataverseFunctions(TxnId txnId, DataverseName dataverseName)
+    List<Function> getDataverseFunctions(TxnId txnId, String database, DataverseName dataverseName)
             throws AlgebricksException, RemoteException;
 
     /**
@@ -433,7 +438,7 @@
      * @throws AlgebricksException
      *             For example, there are still datasets partitioned on the node
      *             group to be deleted.
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
     void dropFunction(TxnId txnId, FunctionSignature functionSignature) throws AlgebricksException, RemoteException;
 
@@ -445,7 +450,7 @@
      * @throws AlgebricksException
      *             for example, if the function already exists or refers to an
      *             unknown function
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
     void addFunction(TxnId txnId, Function function) throws AlgebricksException, RemoteException;
 
@@ -470,8 +475,8 @@
      * @throws AlgebricksException
      *              For example, if the filter doesn't exist
      */
-    FullTextFilterMetadataEntity getFullTextFilter(TxnId txnId, DataverseName dataverseName, String filterName)
-            throws RemoteException, AlgebricksException;
+    FullTextFilterMetadataEntity getFullTextFilter(TxnId txnId, String database, DataverseName dataverseName,
+            String filterName) throws RemoteException, AlgebricksException;
 
     /**
      * @param txnId
@@ -483,7 +488,7 @@
      * @throws AlgebricksException
      *              For example, if ifExists is set to false and the filter doesn't exist
      */
-    void dropFullTextFilter(TxnId txnId, DataverseName dataverseName, String filterName)
+    void dropFullTextFilter(TxnId txnId, String database, DataverseName dataverseName, String filterName)
             throws RemoteException, AlgebricksException;
 
     /**
@@ -507,8 +512,8 @@
      * @throws AlgebricksException
      *              For example, if the full-text config doesn't exist
      */
-    FullTextConfigMetadataEntity getFullTextConfig(TxnId txnId, DataverseName dataverseName, String configName)
-            throws AlgebricksException, RemoteException;
+    FullTextConfigMetadataEntity getFullTextConfig(TxnId txnId, String database, DataverseName dataverseName,
+            String configName) throws AlgebricksException, RemoteException;
 
     /**
      * @param txnId
@@ -520,7 +525,7 @@
      * @throws AlgebricksException
      *              For example, if ifExists is set to false and the config doesn't exist
      */
-    void dropFullTextConfig(TxnId txnId, DataverseName dataverseName, String configName)
+    void dropFullTextConfig(TxnId txnId, String database, DataverseName dataverseName, String configName)
             throws RemoteException, AlgebricksException;
 
     /**
@@ -529,9 +534,9 @@
      * @return List<Adapter> A list containing the adapters in the specified
      *         dataverse
      * @throws AlgebricksException
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
-    List<DatasourceAdapter> getDataverseAdapters(TxnId txnId, DataverseName dataverseName)
+    List<DatasourceAdapter> getDataverseAdapters(TxnId txnId, String database, DataverseName dataverseName)
             throws AlgebricksException, RemoteException;
 
     /**
@@ -540,9 +545,9 @@
      * @param adapterName
      * @return
      * @throws AlgebricksException
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
-    DatasourceAdapter getAdapter(TxnId txnId, DataverseName dataverseName, String adapterName)
+    DatasourceAdapter getAdapter(TxnId txnId, String database, DataverseName dataverseName, String adapterName)
             throws AlgebricksException, RemoteException;
 
     /**
@@ -557,9 +562,9 @@
      *            Name of adapter to be deleted. AlgebricksException for example, if
      *            the adapter does not exists.
      * @throws AlgebricksException
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
-    void dropAdapter(TxnId txnId, DataverseName dataverseName, String adapterName)
+    void dropAdapter(TxnId txnId, String database, DataverseName dataverseName, String adapterName)
             throws AlgebricksException, RemoteException;
 
     /**
@@ -569,7 +574,7 @@
      *            Adapter to be inserted
      * @throws AlgebricksException
      *             for example, if the adapter already exists.
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
     void addAdapter(TxnId txnId, DatasourceAdapter adapter) throws AlgebricksException, RemoteException;
 
@@ -577,7 +582,7 @@
      * @param txnId
      * @param compactionPolicy
      * @throws AlgebricksException
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
     void addCompactionPolicy(TxnId txnId, CompactionPolicy compactionPolicy)
             throws AlgebricksException, RemoteException;
@@ -588,22 +593,22 @@
      * @param policy
      * @return
      * @throws AlgebricksException
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
-    CompactionPolicy getCompactionPolicy(TxnId txnId, DataverseName dataverseName, String policy)
+    CompactionPolicy getCompactionPolicy(TxnId txnId, String database, DataverseName dataverseName, String policy)
             throws AlgebricksException, RemoteException;
 
     /**
      * @param txnId
      * @throws AlgebricksException
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
     void initializeDatasetIdFactory(TxnId txnId) throws AlgebricksException, RemoteException;
 
     /**
      * @return
      * @throws AlgebricksException
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
     int getMostRecentDatasetId() throws AlgebricksException, RemoteException;
 
@@ -611,7 +616,7 @@
      * @param txnId
      * @param feed
      * @throws AlgebricksException
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
     void addFeed(TxnId txnId, Feed feed) throws AlgebricksException, RemoteException;
 
@@ -621,27 +626,29 @@
      * @param feedName
      * @return
      * @throws AlgebricksException
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
-    Feed getFeed(TxnId txnId, DataverseName dataverseName, String feedName) throws AlgebricksException, RemoteException;
+    Feed getFeed(TxnId txnId, String database, DataverseName dataverseName, String feedName)
+            throws AlgebricksException, RemoteException;
 
-    List<Feed> getFeeds(TxnId txnId, DataverseName dataverseName) throws AlgebricksException, RemoteException;
+    List<Feed> getFeeds(TxnId txnId, String database, DataverseName dataverseName)
+            throws AlgebricksException, RemoteException;
 
     /**
      * @param txnId
      * @param dataverseName
      * @param feedName
      * @throws AlgebricksException
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
-    void dropFeed(TxnId txnId, DataverseName dataverseName, String feedName)
+    void dropFeed(TxnId txnId, String database, DataverseName dataverseName, String feedName)
             throws AlgebricksException, RemoteException;
 
     /**
      * @param txnId
      * @param feedPolicy
      * @throws AlgebricksException
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
     void addFeedPolicy(TxnId txnId, FeedPolicyEntity feedPolicy) throws AlgebricksException, RemoteException;
 
@@ -651,9 +658,9 @@
      * @param policy
      * @return
      * @throws AlgebricksException
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
-    FeedPolicyEntity getFeedPolicy(TxnId txnId, DataverseName dataverseName, String policy)
+    FeedPolicyEntity getFeedPolicy(TxnId txnId, String database, DataverseName dataverseName, String policy)
             throws AlgebricksException, RemoteException;
 
     /**
@@ -668,9 +675,9 @@
      *            Name of library to be deleted. AlgebricksException for example, if
      *            the library does not exists.
      * @throws AlgebricksException
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
-    void dropLibrary(TxnId txnId, DataverseName dataverseName, String libraryName)
+    void dropLibrary(TxnId txnId, String database, DataverseName dataverseName, String libraryName)
             throws AlgebricksException, RemoteException;
 
     /**
@@ -682,7 +689,7 @@
      *            Library to be added
      * @throws AlgebricksException
      *             for example, if the library is already added.
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
     void addLibrary(TxnId txnId, Library library) throws AlgebricksException, RemoteException;
 
@@ -695,9 +702,9 @@
      *            name of the library that is to be retrieved
      * @return Library
      * @throws AlgebricksException
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
-    Library getLibrary(TxnId txnId, DataverseName dataverseName, String libraryName)
+    Library getLibrary(TxnId txnId, String database, DataverseName dataverseName, String libraryName)
             throws AlgebricksException, RemoteException;
 
     /**
@@ -709,9 +716,9 @@
      *            dataverse asociated with the library that is to be retrieved.
      * @return Library
      * @throws AlgebricksException
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
-    List<Library> getDataverseLibraries(TxnId txnId, DataverseName dataverseName)
+    List<Library> getDataverseLibraries(TxnId txnId, String database, DataverseName dataverseName)
             throws AlgebricksException, RemoteException;
 
     /**
@@ -719,9 +726,10 @@
      * @param dataverseName
      * @return
      * @throws AlgebricksException
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
-    List<Feed> getDataverseFeeds(TxnId txnId, DataverseName dataverseName) throws AlgebricksException, RemoteException;
+    List<Feed> getDataverseFeeds(TxnId txnId, String database, DataverseName dataverseName)
+            throws AlgebricksException, RemoteException;
 
     /**
      * delete a give feed (ingestion) policy
@@ -730,10 +738,10 @@
      * @param dataverseName
      * @param policyName
      * @return
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      * @throws AlgebricksException
      */
-    void dropFeedPolicy(TxnId txnId, DataverseName dataverseName, String policyName)
+    void dropFeedPolicy(TxnId txnId, String database, DataverseName dataverseName, String policyName)
             throws AlgebricksException, RemoteException;
 
     /**
@@ -741,9 +749,9 @@
      * @param dataverseName
      * @return
      * @throws AlgebricksException
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
-    List<FeedPolicyEntity> getDataverseFeedPolicies(TxnId txnId, DataverseName dataverseName)
+    List<FeedPolicyEntity> getDataverseFeedPolicies(TxnId txnId, String database, DataverseName dataverseName)
             throws AlgebricksException, RemoteException;
 
     /**
@@ -753,7 +761,7 @@
      *            An object representing the external file entity
      * @throws AlgebricksException
      *             for example, if the file already exists.
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
     void addExternalFile(TxnId txnId, ExternalFile externalFile) throws AlgebricksException, RemoteException;
 
@@ -763,7 +771,7 @@
      * @param dataset
      *            A dataset the files belongs to.
      * @throws AlgebricksException
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
     List<ExternalFile> getExternalFiles(TxnId txnId, Dataset dataset) throws AlgebricksException, RemoteException;
 
@@ -781,9 +789,9 @@
      * @param fileNumber
      *            the id number for the file to be deleted
      * @throws AlgebricksException
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
-    void dropExternalFile(TxnId txnId, DataverseName dataverseName, String datasetName, int fileNumber)
+    void dropExternalFile(TxnId txnId, String database, DataverseName dataverseName, String datasetName, int fileNumber)
             throws AlgebricksException, RemoteException;
 
     /**
@@ -795,7 +803,7 @@
      * @param dataset
      *            An external dataset the files belong to.
      * @throws AlgebricksException
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
     void dropExternalFiles(TxnId txnId, Dataset dataset) throws AlgebricksException, RemoteException;
 
@@ -814,10 +822,10 @@
      * @return An ExternalFile instance.
      * @throws AlgebricksException
      *             For example, if the index does not exist.
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
-    ExternalFile getExternalFile(TxnId txnId, DataverseName dataverseName, String datasetName, Integer fileNumber)
-            throws AlgebricksException, RemoteException;
+    ExternalFile getExternalFile(TxnId txnId, String database, DataverseName dataverseName, String datasetName,
+            Integer fileNumber) throws AlgebricksException, RemoteException;
 
     /**
      * Adds a synonym, acquiring local locks on behalf of the given transaction id.
@@ -828,7 +836,7 @@
      *            Synonym to be added
      * @throws AlgebricksException
      *             for example, if the synonym is already added.
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
     void addSynonym(TxnId txnId, Synonym synonym) throws AlgebricksException, RemoteException;
 
@@ -843,9 +851,9 @@
      *            Name of synonym to be deleted. AlgebricksException for example, if
      *            the synonym does not exists.
      * @throws AlgebricksException
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
-    void dropSynonym(TxnId txnId, DataverseName dataverseName, String synonymName)
+    void dropSynonym(TxnId txnId, String database, DataverseName dataverseName, String synonymName)
             throws AlgebricksException, RemoteException;
 
     /**
@@ -857,9 +865,9 @@
      *            name of the synonym that is to be retrieved
      * @return Synonym
      * @throws AlgebricksException
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
-    Synonym getSynonym(TxnId txnId, DataverseName dataverseName, String synonymName)
+    Synonym getSynonym(TxnId txnId, String database, DataverseName dataverseName, String synonymName)
             throws AlgebricksException, RemoteException;
 
     /**
@@ -871,9 +879,9 @@
      *            dataverse associated with synonyms that are to be retrieved.
      * @return list of synonyms
      * @throws AlgebricksException
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
-    List<Synonym> getDataverseSynonyms(TxnId txnId, DataverseName dataverseName)
+    List<Synonym> getDataverseSynonyms(TxnId txnId, String database, DataverseName dataverseName)
             throws AlgebricksException, RemoteException;
 
     /**
@@ -886,7 +894,7 @@
      *            updated Dataset instance.
      * @throws AlgebricksException
      *             For example, if the dataset already exists.
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
     void updateDataset(TxnId txnId, Dataset dataset) throws AlgebricksException, RemoteException;
 
@@ -929,7 +937,7 @@
      * @param txnId
      * @param entity
      * @throws AlgebricksException
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
     <T extends IExtensionMetadataEntity> void addEntity(TxnId txnId, T entity)
             throws AlgebricksException, RemoteException;
@@ -940,7 +948,7 @@
      * @param txnId
      * @param entity
      * @throws AlgebricksException
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
     <T extends IExtensionMetadataEntity> void upsertEntity(TxnId txnId, T entity)
             throws AlgebricksException, RemoteException;
@@ -951,7 +959,7 @@
      * @param txnId
      * @param entity
      * @throws AlgebricksException
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
     <T extends IExtensionMetadataEntity> void deleteEntity(TxnId txnId, T entity)
             throws AlgebricksException, RemoteException;
@@ -964,19 +972,19 @@
      * @param searchKey
      * @return
      * @throws AlgebricksException
-     * @throws RemoteException
+     * @throws RemoteException remote exception
      */
     <T extends IExtensionMetadataEntity> List<T> getEntities(TxnId txnId, IExtensionMetadataSearchKey searchKey)
             throws AlgebricksException, RemoteException;
 
     void addFeedConnection(TxnId txnId, FeedConnection feedConnection) throws AlgebricksException, RemoteException;
 
-    FeedConnection getFeedConnection(TxnId txnId, DataverseName dataverseName, String feedName, String datasetName)
-            throws AlgebricksException, RemoteException;
+    FeedConnection getFeedConnection(TxnId txnId, String database, DataverseName dataverseName, String feedName,
+            String datasetName) throws AlgebricksException, RemoteException;
 
-    void dropFeedConnection(TxnId txnId, DataverseName dataverseName, String feedName, String datasetName)
-            throws AlgebricksException, RemoteException;
+    void dropFeedConnection(TxnId txnId, String database, DataverseName dataverseName, String feedName,
+            String datasetName) throws AlgebricksException, RemoteException;
 
-    List<FeedConnection> getFeedConnections(TxnId txnId, DataverseName dataverseName, String feedName)
+    List<FeedConnection> getFeedConnections(TxnId txnId, String database, DataverseName dataverseName, String feedName)
             throws AlgebricksException, RemoteException;
 }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataIndexesProvider.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataIndexesProvider.java
index 63b6abf..f7bf610 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataIndexesProvider.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataIndexesProvider.java
@@ -108,4 +108,8 @@
                 getExternalFileEntity().getIndex(), getFeedConnectionEntity().getIndex(),
                 getFullTextConfigEntity().getIndex(), getFullTextFilterEntity().getIndex() };
     }
+
+    public boolean isUsingDatabase() {
+        return false;
+    }
 }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Datatype.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Datatype.java
index 3e637df..d3d1787 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Datatype.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Datatype.java
@@ -78,11 +78,11 @@
         return cache.dropDatatype(this);
     }
 
-    public static IAType getTypeFromTypeName(MetadataNode metadataNode, TxnId txnId, DataverseName dataverseName,
-            String typeName) throws AlgebricksException {
+    public static IAType getTypeFromTypeName(MetadataNode metadataNode, TxnId txnId, String database,
+            DataverseName dataverseName, String typeName) throws AlgebricksException {
         IAType type = BuiltinTypeMap.getBuiltinType(typeName);
         if (type == null) {
-            Datatype dt = metadataNode.getDatatype(txnId, dataverseName, typeName);
+            Datatype dt = metadataNode.getDatatype(txnId, database, dataverseName, typeName);
             if (dt != null) {
                 type = dt.getDatatype();
             }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java
index 1ad331a..e0a0db0 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatatypeTupleTranslator.java
@@ -59,7 +59,7 @@
     @Override
     protected Datatype createMetadataEntityFromARecord(ARecord datatypeRecord) throws AlgebricksException {
         int databaseNameIndex = datatypeEntity.databaseNameIndex();
-        String databaseName;
+        String databaseName = null;
         if (databaseNameIndex >= 0) {
             databaseName = ((AString) datatypeRecord.getValueByPos(databaseNameIndex)).getStringValue();
         }
@@ -113,8 +113,8 @@
                             isMissable = isNullable;
                         }
 
-                        IAType fieldType =
-                                Datatype.getTypeFromTypeName(metadataNode, txnId, dataverseName, fieldTypeName);
+                        IAType fieldType = Datatype.getTypeFromTypeName(metadataNode, txnId, databaseName,
+                                dataverseName, fieldTypeName);
                         fieldTypes[fieldId] = TypeUtil.createQuantifiedType(fieldType, isNullable, isMissable);
                         fieldId++;
                     }
@@ -125,17 +125,19 @@
                     String unorderedlistTypeName = ((AString) derivedTypeRecord
                             .getValueByPos(MetadataRecordTypes.DERIVEDTYPE_ARECORD_UNORDEREDLIST_FIELD_INDEX))
                                     .getStringValue();
-                    return new Datatype(dataverseName, datatypeName, new AUnorderedListType(
-                            Datatype.getTypeFromTypeName(metadataNode, txnId, dataverseName, unorderedlistTypeName),
-                            datatypeName), isAnonymous);
+                    return new Datatype(dataverseName, datatypeName,
+                            new AUnorderedListType(Datatype.getTypeFromTypeName(metadataNode, txnId, databaseName,
+                                    dataverseName, unorderedlistTypeName), datatypeName),
+                            isAnonymous);
                 }
                 case ORDEREDLIST: {
                     String orderedlistTypeName = ((AString) derivedTypeRecord
                             .getValueByPos(MetadataRecordTypes.DERIVEDTYPE_ARECORD_ORDEREDLIST_FIELD_INDEX))
                                     .getStringValue();
-                    return new Datatype(dataverseName, datatypeName, new AOrderedListType(
-                            Datatype.getTypeFromTypeName(metadataNode, txnId, dataverseName, orderedlistTypeName),
-                            datatypeName), isAnonymous);
+                    return new Datatype(
+                            dataverseName, datatypeName, new AOrderedListType(Datatype.getTypeFromTypeName(metadataNode,
+                                    txnId, databaseName, dataverseName, orderedlistTypeName), datatypeName),
+                            isAnonymous);
                 }
                 default:
                     throw new UnsupportedOperationException("Unsupported derived type: " + tag);
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java
index 8373574..a178b1a 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslator.java
@@ -148,7 +148,7 @@
     @Override
     protected Index createMetadataEntityFromARecord(ARecord indexRecord) throws AlgebricksException {
         int databaseNameIndex = indexEntity.databaseNameIndex();
-        String databaseName;
+        String databaseName = null;
         if (databaseNameIndex >= 0) {
             databaseName = ((AString) indexRecord.getValueByPos(databaseNameIndex)).getStringValue();
         }
@@ -294,7 +294,8 @@
                 case STRING:
                     // This is a simple element, place in a single-element list.
                     String typeName = ((AString) fieldTypeItem).getStringValue();
-                    IAType fieldType = Datatype.getTypeFromTypeName(metadataNode, txnId, dataverseName, typeName);
+                    IAType fieldType =
+                            Datatype.getTypeFromTypeName(metadataNode, txnId, databaseName, dataverseName, typeName);
                     searchKeyType.add(Collections.singletonList(fieldType));
                     break;
                 case ARRAY:
@@ -304,7 +305,8 @@
                     IACursor fieldTypeListCursor = fieldTypeList.getCursor();
                     while (fieldTypeListCursor.next()) {
                         typeName = ((AString) fieldTypeListCursor.get()).getStringValue();
-                        fieldTypes.add(Datatype.getTypeFromTypeName(metadataNode, txnId, dataverseName, typeName));
+                        fieldTypes.add(Datatype.getTypeFromTypeName(metadataNode, txnId, databaseName, dataverseName,
+                                typeName));
                     }
                     searchKeyType.add(fieldTypes);
                     break;
@@ -316,16 +318,17 @@
         if (searchKeyType.isEmpty()) {
             // if index key type information is not persisted, then we extract type information
             // from the record metadata
-            Dataset dataset = metadataNode.getDataset(txnId, dataverseName, datasetName);
+            Dataset dataset = metadataNode.getDataset(txnId, databaseName, dataverseName, datasetName);
             String datatypeName = dataset.getItemTypeName();
+            //TODO(DB): get 'database' of item type and meta type
             DataverseName datatypeDataverseName = dataset.getItemTypeDataverseName();
-            ARecordType recordDt =
-                    (ARecordType) metadataNode.getDatatype(txnId, datatypeDataverseName, datatypeName).getDatatype();
+            ARecordType recordDt = (ARecordType) metadataNode
+                    .getDatatype(txnId, null, datatypeDataverseName, datatypeName).getDatatype();
             String metatypeName = dataset.getMetaItemTypeName();
             DataverseName metatypeDataverseName = dataset.getMetaItemTypeDataverseName();
             ARecordType metaDt = null;
             if (metatypeName != null && metatypeDataverseName != null) {
-                metaDt = (ARecordType) metadataNode.getDatatype(txnId, metatypeDataverseName, metatypeName)
+                metaDt = (ARecordType) metadataNode.getDatatype(txnId, null, metatypeDataverseName, metatypeName)
                         .getDatatype();
             }
             recordDt = (ARecordType) MetadataManagerUtil.findTypeForDatasetWithoutType(recordDt, metaDt, dataset);
diff --git a/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslatorTest.java b/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslatorTest.java
index 07a7e33..9ab3e66 100644
--- a/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslatorTest.java
+++ b/asterixdb/asterix-metadata/src/test/java/org/apache/asterix/metadata/entitytupletranslators/IndexTupleTranslatorTest.java
@@ -18,6 +18,7 @@
  */
 package org.apache.asterix.metadata.entitytupletranslators;
 
+import static org.mockito.ArgumentMatchers.isNull;
 import static org.mockito.Matchers.any;
 import static org.mockito.Matchers.anyString;
 import static org.mockito.Mockito.mock;
@@ -79,10 +80,12 @@
                     Collections.singletonList(BuiltinType.AINT64), false, false, false, 0, OptionalBoolean.of(false));
 
             MetadataNode mockMetadataNode = mock(MetadataNode.class);
-            when(mockMetadataNode.getDatatype(any(), any(DataverseName.class), anyString())).thenReturn(new Datatype(
-                    dvTest, "d1",
-                    new ARecordType("", new String[] { "row_id" }, new IAType[] { BuiltinType.AINT64 }, true), true));
-            when(mockMetadataNode.getDataset(any(), any(DataverseName.class), anyString())).thenReturn(dataset);
+            when(mockMetadataNode.getDatatype(any(), isNull(), any(DataverseName.class), anyString()))
+                    .thenReturn(new Datatype(dvTest, "d1",
+                            new ARecordType("", new String[] { "row_id" }, new IAType[] { BuiltinType.AINT64 }, true),
+                            true));
+            when(mockMetadataNode.getDataset(any(), isNull(), any(DataverseName.class), anyString()))
+                    .thenReturn(dataset);
 
             IndexTupleTranslator idxTranslator =
                     new IndexTupleTranslator(null, mockMetadataNode, true, IndexEntity.of(false));