[ASTERIXDB-3259][MTD] Initial implementation of CREATE/DROP DATABASE

- user model changes: no
- storage format changes: no
- interface changes: no

Details:
Add initial implementation of CREATE/DROP DATABASE.

Change-Id: I015bf9f18f2213e1d586c5733887a44719b7cbc8
Reviewed-on: https://asterix-gerrit.ics.uci.edu/c/asterixdb/+/17822
Integration-Tests: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Tested-by: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Reviewed-by: Ali Alsuliman <ali.al.solaiman@gmail.com>
Reviewed-by: Murtadha Hubail <mhubail@apache.org>
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java
index a54f24d..64b0a71 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java
@@ -44,12 +44,14 @@
 import org.apache.asterix.lang.common.statement.AnalyzeDropStatement;
 import org.apache.asterix.lang.common.statement.AnalyzeStatement;
 import org.apache.asterix.lang.common.statement.CreateAdapterStatement;
+import org.apache.asterix.lang.common.statement.CreateDatabaseStatement;
 import org.apache.asterix.lang.common.statement.CreateDataverseStatement;
 import org.apache.asterix.lang.common.statement.CreateFeedStatement;
 import org.apache.asterix.lang.common.statement.CreateFunctionStatement;
 import org.apache.asterix.lang.common.statement.CreateLibraryStatement;
 import org.apache.asterix.lang.common.statement.CreateSynonymStatement;
 import org.apache.asterix.lang.common.statement.CreateViewStatement;
+import org.apache.asterix.lang.common.statement.DatabaseDropStatement;
 import org.apache.asterix.lang.common.statement.DatasetDecl;
 import org.apache.asterix.lang.common.statement.DataverseDropStatement;
 import org.apache.asterix.lang.common.statement.DeleteStatement;
@@ -192,11 +194,33 @@
                 }
                 break;
 
+            case CREATE_DATABASE: {
+                CreateDatabaseStatement dbCreateStmt = (CreateDatabaseStatement) stmt;
+                String dbName = dbCreateStmt.getDatabaseName().getValue();
+                invalidOperation = isSystemDatabase(dbName) || isDefaultDatabase(dbName);
+                if (invalidOperation) {
+                    message = String.format("Cannot create database: %s", dbName);
+                }
+                break;
+            }
+
+            case DATABASE_DROP: {
+                DatabaseDropStatement dbDropStmt = (DatabaseDropStatement) stmt;
+                String dbName = dbDropStmt.getDatabaseName().getValue();
+                invalidOperation = isSystemDatabase(dbName) || isDefaultDatabase(dbName);
+                if (invalidOperation) {
+                    message = String.format("Cannot drop database: %s", dbName);
+                }
+                break;
+            }
+
             case CREATE_DATAVERSE:
+                //TODO(DB): check it's not System database for all cases
                 CreateDataverseStatement dvCreateStmt = (CreateDataverseStatement) stmt;
                 dataverseName = dvCreateStmt.getDataverseName();
                 invalidOperation = FunctionConstants.ASTERIX_DV.equals(dataverseName)
-                        || FunctionConstants.ALGEBRICKS_DV.equals(dataverseName);
+                        || FunctionConstants.ALGEBRICKS_DV.equals(dataverseName) || isMetadataDataverse(dataverseName)
+                        || isDefaultDataverse(dataverseName);
                 if (invalidOperation) {
                     message = String.format(BAD_DATAVERSE_DDL_MESSAGE, "create", dataverseName);
                 }
@@ -205,7 +229,7 @@
             case DATAVERSE_DROP:
                 DataverseDropStatement dvDropStmt = (DataverseDropStatement) stmt;
                 dataverseName = dvDropStmt.getDataverseName();
-                invalidOperation = isMetadataDataverse(dataverseName);
+                invalidOperation = isMetadataDataverse(dataverseName) || isDefaultDataverse(dataverseName);
                 if (invalidOperation) {
                     message = String.format(BAD_DATAVERSE_DDL_MESSAGE, "drop", dataverseName);
                 }
@@ -402,7 +426,19 @@
         }
     }
 
+    protected static boolean isSystemDatabase(String databaseName) {
+        return MetadataConstants.SYSTEM_DATABASE.equals(databaseName);
+    }
+
+    protected static boolean isDefaultDatabase(String databaseName) {
+        return MetadataConstants.DEFAULT_DATABASE.equals(databaseName);
+    }
+
     protected static boolean isMetadataDataverse(DataverseName dataverseName) {
         return MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverseName);
     }
+
+    protected static boolean isDefaultDataverse(DataverseName dataverseName) {
+        return MetadataConstants.DEFAULT_DATAVERSE_NAME.equals(dataverseName);
+    }
 }
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
index 062be19..7ca021a 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
@@ -119,6 +119,7 @@
 import org.apache.asterix.lang.common.statement.ConnectFeedStatement;
 import org.apache.asterix.lang.common.statement.CopyStatement;
 import org.apache.asterix.lang.common.statement.CreateAdapterStatement;
+import org.apache.asterix.lang.common.statement.CreateDatabaseStatement;
 import org.apache.asterix.lang.common.statement.CreateDataverseStatement;
 import org.apache.asterix.lang.common.statement.CreateFeedPolicyStatement;
 import org.apache.asterix.lang.common.statement.CreateFeedStatement;
@@ -129,6 +130,7 @@
 import org.apache.asterix.lang.common.statement.CreateLibraryStatement;
 import org.apache.asterix.lang.common.statement.CreateSynonymStatement;
 import org.apache.asterix.lang.common.statement.CreateViewStatement;
+import org.apache.asterix.lang.common.statement.DatabaseDropStatement;
 import org.apache.asterix.lang.common.statement.DatasetDecl;
 import org.apache.asterix.lang.common.statement.DataverseDecl;
 import org.apache.asterix.lang.common.statement.DataverseDropStatement;
@@ -173,6 +175,7 @@
 import org.apache.asterix.metadata.dataset.hints.DatasetHints.DatasetNodegroupCardinalityHint;
 import org.apache.asterix.metadata.declared.MetadataProvider;
 import org.apache.asterix.metadata.entities.CompactionPolicy;
+import org.apache.asterix.metadata.entities.Database;
 import org.apache.asterix.metadata.entities.Dataset;
 import org.apache.asterix.metadata.entities.DatasourceAdapter;
 import org.apache.asterix.metadata.entities.Datatype;
@@ -367,6 +370,9 @@
                     case DATAVERSE_DECL:
                         activeDataverse = handleUseDataverseStatement(metadataProvider, stmt);
                         break;
+                    case CREATE_DATABASE:
+                        handleCreateDatabaseStatement(metadataProvider, stmt, requestParameters);
+                        break;
                     case CREATE_DATAVERSE:
                         handleCreateDataverseStatement(metadataProvider, stmt, requestParameters);
                         break;
@@ -388,6 +394,9 @@
                     case NODEGROUP_DECL:
                         handleCreateNodeGroupStatement(metadataProvider, stmt);
                         break;
+                    case DATABASE_DROP:
+                        handleDatabaseDropStatement(metadataProvider, stmt, hcc, requestParameters);
+                        break;
                     case DATAVERSE_DROP:
                         handleDataverseDropStatement(metadataProvider, stmt, hcc, requestParameters);
                         break;
@@ -619,6 +628,50 @@
         }
     }
 
+    protected void handleCreateDatabaseStatement(MetadataProvider metadataProvider, Statement stmt,
+            IRequestParameters requestParameters) throws Exception {
+        CreateDatabaseStatement stmtCreateDatabase = (CreateDatabaseStatement) stmt;
+        String database = stmtCreateDatabase.getDatabaseName().getValue();
+        //TODO(DB): validate names
+        if (isCompileOnly()) {
+            return;
+        }
+        lockUtil.createDatabaseBegin(lockManager, metadataProvider.getLocks(), database);
+        try {
+            doCreateDatabaseStatement(metadataProvider, stmtCreateDatabase, requestParameters);
+        } finally {
+            metadataProvider.getLocks().unlock();
+        }
+    }
+
+    protected boolean doCreateDatabaseStatement(MetadataProvider metadataProvider,
+            CreateDatabaseStatement stmtCreateDatabase, IRequestParameters requestParameters) throws Exception {
+        MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+        metadataProvider.setMetadataTxnContext(mdTxnCtx);
+        try {
+            String databaseName = stmtCreateDatabase.getDatabaseName().getValue();
+            Database database =
+                    MetadataManager.INSTANCE.getDatabase(metadataProvider.getMetadataTxnContext(), databaseName);
+            if (database != null) {
+                if (stmtCreateDatabase.ifNotExists()) {
+                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                    return false;
+                } else {
+                    //TODO(DB): change
+                    throw new CompilationException(ErrorCode.DATAVERSE_EXISTS, stmtCreateDatabase.getSourceLocation(),
+                            databaseName);
+                }
+            }
+            MetadataManager.INSTANCE.addDatabase(metadataProvider.getMetadataTxnContext(),
+                    new Database(databaseName, false, MetadataUtil.PENDING_NO_OP));
+            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+            return true;
+        } catch (Exception e) {
+            abort(e, e, mdTxnCtx);
+            throw e;
+        }
+    }
+
     protected void handleCreateDataverseStatement(MetadataProvider metadataProvider, Statement stmt,
             IRequestParameters requestParameters) throws Exception {
         CreateDataverseStatement stmtCreateDataverse = (CreateDataverseStatement) stmt;
@@ -1838,6 +1891,137 @@
         return typeMap.get(typeSignature);
     }
 
+    protected void handleDatabaseDropStatement(MetadataProvider metadataProvider, Statement stmt,
+            IHyracksClientConnection hcc, IRequestParameters requestParameters) throws Exception {
+        DatabaseDropStatement stmtDropDatabase = (DatabaseDropStatement) stmt;
+        SourceLocation sourceLoc = stmtDropDatabase.getSourceLocation();
+        String databaseName = stmtDropDatabase.getDatabaseName().getValue();
+        //TODO(DB): validate names
+
+        if (isSystemDatabase(databaseName) || isDefaultDatabase(databaseName)) {
+            throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
+                    databaseName + " database can't be dropped");
+        }
+        if (isCompileOnly()) {
+            return;
+        }
+        lockUtil.dropDatabaseBegin(lockManager, metadataProvider.getLocks(), databaseName);
+        try {
+            doDropDatabase(stmtDropDatabase, metadataProvider, hcc, requestParameters);
+        } finally {
+            metadataProvider.getLocks().unlock();
+        }
+    }
+
+    protected boolean doDropDatabase(DatabaseDropStatement stmtDropDatabase, MetadataProvider metadataProvider,
+            IHyracksClientConnection hcc, IRequestParameters requestParameters) throws Exception {
+        SourceLocation sourceLoc = stmtDropDatabase.getSourceLocation();
+        String databaseName = stmtDropDatabase.getDatabaseName().getValue();
+        ProgressState progress = ProgressState.NO_PROGRESS;
+        MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+        boolean bActiveTxn = true;
+        metadataProvider.setMetadataTxnContext(mdTxnCtx);
+        List<FeedEventsListener> feedsToStop = new ArrayList<>();
+        List<JobSpecification> jobsToExecute = new ArrayList<>();
+        try {
+            Database database = MetadataManager.INSTANCE.getDatabase(mdTxnCtx, databaseName);
+            if (database == null) {
+                if (stmtDropDatabase.ifExists()) {
+                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                    return false;
+                } else {
+                    throw new CompilationException(ErrorCode.UNKNOWN_DATAVERSE, sourceLoc, databaseName);
+                }
+            }
+
+            validateDatabaseStateBeforeDrop(metadataProvider, database, sourceLoc);
+
+            // #. prepare jobs which will drop corresponding feed storage
+            //TODO(DB):
+
+            // #. prepare jobs which will drop corresponding datasets with indexes
+            //TODO(DB):
+
+            // #. prepare jobs which will drop corresponding libraries
+            //TODO(DB):
+
+            // #. prepare jobs which will drop the database
+            //TODO(DB):
+
+            // #. mark PendingDropOp on the database record by
+            // first, deleting the database record from the 'Database' collection
+            // second, inserting the database record with the PendingDropOp value into the 'Database' collection
+            // Note: the delete operation fails if the database cannot be deleted due to metadata dependencies
+            MetadataManager.INSTANCE.dropDatabase(mdTxnCtx, databaseName);
+            MetadataManager.INSTANCE.addDatabase(mdTxnCtx,
+                    new Database(databaseName, database.isSystemDatabase(), MetadataUtil.PENDING_DROP_OP));
+
+            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+            bActiveTxn = false;
+            progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
+
+            for (FeedEventsListener feedListener : feedsToStop) {
+                if (feedListener.getState() != ActivityState.STOPPED) {
+                    feedListener.stop(metadataProvider);
+                }
+                feedListener.unregister();
+            }
+
+            for (JobSpecification jobSpec : jobsToExecute) {
+                runJob(hcc, jobSpec);
+            }
+
+            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+            bActiveTxn = true;
+            metadataProvider.setMetadataTxnContext(mdTxnCtx);
+
+            // #. finally, delete the database
+            MetadataManager.INSTANCE.dropDatabase(mdTxnCtx, databaseName);
+
+            // drop all node groups that no longer needed
+            //TODO(DB):
+
+            //TODO(DB): switch active database to the DEFAULT if the dropped database is the currently active one
+
+            //TODO(DB): validateDatabaseDatasetsStateAfterDrop
+
+            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+            return true;
+        } catch (Exception e) {
+            if (bActiveTxn) {
+                abort(e, e, mdTxnCtx);
+            }
+
+            if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
+                //TODO(DB): switch active database to the DEFAULT if the dropped database is the currently active one
+
+                // #. execute compensation operations
+                // remove the all artifacts in NC
+                try {
+                    for (JobSpecification jobSpec : jobsToExecute) {
+                        runJob(hcc, jobSpec);
+                    }
+                } catch (Exception e2) {
+                    // do no throw exception since still the metadata needs to be compensated.
+                    e.addSuppressed(e2);
+                }
+
+                // remove the record from the metadata.
+                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                try {
+                    MetadataManager.INSTANCE.dropDatabase(mdTxnCtx, databaseName);
+                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                } catch (Exception e2) {
+                    e.addSuppressed(e2);
+                    abort(e, e2, mdTxnCtx);
+                    throw new IllegalStateException("System is inconsistent state: pending dataverse(" + databaseName
+                            + ") couldn't be removed from the metadata", e);
+                }
+            }
+            throw e;
+        }
+    }
+
     protected void handleDataverseDropStatement(MetadataProvider metadataProvider, Statement stmt,
             IHyracksClientConnection hcc, IRequestParameters requestParameters) throws Exception {
         DataverseDropStatement stmtDropDataverse = (DataverseDropStatement) stmt;
@@ -1845,7 +2029,7 @@
         DataverseName dataverseName = stmtDropDataverse.getDataverseName();
         String database = MetadataUtil.resolveDatabase(null, dataverseName);
         metadataProvider.validateDataverseName(dataverseName, sourceLoc);
-        if (dataverseName.equals(MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME)
+        if (dataverseName.equals(MetadataConstants.DEFAULT_DATAVERSE_NAME)
                 || dataverseName.equals(MetadataConstants.METADATA_DATAVERSE_NAME)) {
             throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
                     dataverseName + " " + dataverse() + " can't be dropped");
@@ -2022,6 +2206,11 @@
         return MetadataManager.INSTANCE.isDataverseNotEmpty(mdTxnCtx, database, dataverseName);
     }
 
+    protected void validateDatabaseStateBeforeDrop(MetadataProvider metadataProvider, Database database,
+            SourceLocation sourceLoc) throws AlgebricksException {
+        // may be overridden by product extensions for additional checks before dropping the database
+    }
+
     protected void validateDataverseStateBeforeDrop(MetadataProvider metadataProvider, Dataverse dataverse,
             SourceLocation sourceLoc) throws AlgebricksException {
         // may be overridden by product extensions for additional checks before dropping the dataverse
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/common/TestDataUtil.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/common/TestDataUtil.java
index 62d061e..bbdbe82 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/common/TestDataUtil.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/common/TestDataUtil.java
@@ -35,7 +35,6 @@
 import org.apache.asterix.common.utils.Servlets;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.MetadataTransactionContext;
-import org.apache.asterix.metadata.bootstrap.MetadataBuiltinEntities;
 import org.apache.asterix.metadata.declared.MetadataProvider;
 import org.apache.asterix.metadata.entities.Dataset;
 import org.apache.asterix.metadata.utils.SplitsAndConstraintsUtil;
@@ -229,7 +228,7 @@
         Dataset dataset;
         try {
             dataset = metadataProvider.findDataset(MetadataConstants.DEFAULT_DATABASE,
-                    MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME, datasetName);
+                    MetadataConstants.DEFAULT_DATAVERSE_NAME, datasetName);
         } finally {
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
             metadataProvider.getLocks().unlock();
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/active/ActiveEventsListenerTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/active/ActiveEventsListenerTest.java
index baecf79..9cf4ec6 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/active/ActiveEventsListenerTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/active/ActiveEventsListenerTest.java
@@ -43,11 +43,11 @@
 import org.apache.asterix.common.exceptions.ErrorCode;
 import org.apache.asterix.common.metadata.DataverseName;
 import org.apache.asterix.common.metadata.IMetadataLockUtil;
+import org.apache.asterix.common.metadata.MetadataConstants;
 import org.apache.asterix.common.metadata.MetadataUtil;
 import org.apache.asterix.external.feed.watch.WaitForStateSubscriber;
 import org.apache.asterix.file.StorageComponentProvider;
 import org.apache.asterix.metadata.api.ICCExtensionManager;
-import org.apache.asterix.metadata.bootstrap.MetadataBuiltinEntities;
 import org.apache.asterix.metadata.declared.MetadataProvider;
 import org.apache.asterix.metadata.entities.Dataset;
 import org.apache.asterix.metadata.entities.Feed;
@@ -83,7 +83,7 @@
     static TestUserActor[] users;
     static String[] nodes = { "node1", "node2" };
     static ActiveNotificationHandler handler;
-    static DataverseName dataverseName = MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME;
+    static DataverseName dataverseName = MetadataConstants.DEFAULT_DATAVERSE_NAME;
     static String database = MetadataUtil.databaseFor(dataverseName);
     static String entityName = "entityName";
     static EntityId entityId = new EntityId(Feed.EXTENSION_NAME, database, dataverseName, entityName);
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java
index ac4c761..2860813 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java
@@ -109,7 +109,6 @@
 import org.apache.asterix.common.metadata.MetadataConstants;
 import org.apache.asterix.common.utils.Servlets;
 import org.apache.asterix.lang.sqlpp.util.SqlppStatementUtil;
-import org.apache.asterix.metadata.bootstrap.MetadataBuiltinEntities;
 import org.apache.asterix.runtime.evaluators.common.NumberUtils;
 import org.apache.asterix.test.server.ITestServer;
 import org.apache.asterix.test.server.TestServerProvider;
@@ -2615,7 +2614,7 @@
             if (json != null) {
                 DataverseName dvName = DataverseName.createFromCanonicalForm(json.get("DataverseName").asText());
                 if (!dvName.equals(MetadataConstants.METADATA_DATAVERSE_NAME)
-                        && !dvName.equals(MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME)) {
+                        && !dvName.equals(MetadataConstants.DEFAULT_DATAVERSE_NAME)) {
                     outDataverses.add(dvName);
                 }
             }
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/ddl/SecondaryBTreeOperationsHelperTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/ddl/SecondaryBTreeOperationsHelperTest.java
index 63b20f0..c8cccfa 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/ddl/SecondaryBTreeOperationsHelperTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/ddl/SecondaryBTreeOperationsHelperTest.java
@@ -28,7 +28,6 @@
 import org.apache.asterix.common.metadata.MetadataConstants;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.MetadataTransactionContext;
-import org.apache.asterix.metadata.bootstrap.MetadataBuiltinEntities;
 import org.apache.asterix.metadata.declared.MetadataProvider;
 import org.apache.asterix.metadata.entities.Dataset;
 import org.apache.asterix.metadata.entities.Index;
@@ -80,7 +79,7 @@
             // create the dataset
             TestDataUtil.createDataset(datasetName, fields, PKFieldName);
             final Dataset dataset = metadataProvider.findDataset(MetadataConstants.DEFAULT_DATABASE,
-                    MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME, datasetName);
+                    MetadataConstants.DEFAULT_DATAVERSE_NAME, datasetName);
             Assert.assertNotNull(dataset);
 
             Index index;
@@ -89,7 +88,7 @@
             // create a secondary primary index
             TestDataUtil.createPrimaryIndex(datasetName, primaryIndexName);
             index = metadataProvider.getIndex(MetadataConstants.DEFAULT_DATABASE,
-                    MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME, datasetName, primaryIndexName);
+                    MetadataConstants.DEFAULT_DATAVERSE_NAME, datasetName, primaryIndexName);
             Assert.assertNotNull(index);
             jobSpecification = IndexUtil.buildSecondaryIndexLoadingJobSpec(dataset, index, metadataProvider, null);
             jobSpecification.getOperatorMap().values().forEach(iOperatorDescriptor -> {
@@ -99,7 +98,7 @@
             // create a normal BTree index
             TestDataUtil.createSecondaryBTreeIndex(datasetName, secondaryIndexName, SKFieldName);
             index = metadataProvider.getIndex(MetadataConstants.DEFAULT_DATABASE,
-                    MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME, datasetName, secondaryIndexName);
+                    MetadataConstants.DEFAULT_DATAVERSE_NAME, datasetName, secondaryIndexName);
             Assert.assertNotNull(index);
             jobSpecification = IndexUtil.buildSecondaryIndexLoadingJobSpec(dataset, index, metadataProvider, null);
             final long numOfSortOperators = jobSpecification.getOperatorMap().values().stream()
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/metadata/MetadataTxnTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/metadata/MetadataTxnTest.java
index d5e85ee..0900f03 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/metadata/MetadataTxnTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/metadata/MetadataTxnTest.java
@@ -40,7 +40,6 @@
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.MetadataTransactionContext;
 import org.apache.asterix.metadata.api.IMetadataIndex;
-import org.apache.asterix.metadata.bootstrap.MetadataBuiltinEntities;
 import org.apache.asterix.metadata.bootstrap.NodeGroupEntity;
 import org.apache.asterix.metadata.declared.MetadataProvider;
 import org.apache.asterix.metadata.entities.Dataset;
@@ -122,7 +121,7 @@
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
         try {
             sourceDataset = metadataProvider.findDataset(MetadataConstants.DEFAULT_DATABASE,
-                    MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME, datasetName);
+                    MetadataConstants.DEFAULT_DATAVERSE_NAME, datasetName);
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
         } finally {
             metadataProvider.getLocks().unlock();
@@ -173,7 +172,7 @@
         Dataset sourceDataset;
         try {
             sourceDataset = metadataProvider.findDataset(MetadataConstants.DEFAULT_DATABASE,
-                    MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME, datasetName);
+                    MetadataConstants.DEFAULT_DATAVERSE_NAME, datasetName);
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
         } finally {
             metadataProvider.getLocks().unlock();
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/ParserTestExecutor.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/ParserTestExecutor.java
index fd40436..6492eb2 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/ParserTestExecutor.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/ParserTestExecutor.java
@@ -41,6 +41,7 @@
 import org.apache.asterix.common.config.GlobalConfig;
 import org.apache.asterix.common.functions.FunctionSignature;
 import org.apache.asterix.common.metadata.DataverseName;
+import org.apache.asterix.common.metadata.MetadataConstants;
 import org.apache.asterix.lang.common.base.IParser;
 import org.apache.asterix.lang.common.base.IParserFactory;
 import org.apache.asterix.lang.common.base.IQueryRewriter;
@@ -56,7 +57,6 @@
 import org.apache.asterix.lang.sqlpp.rewrites.SqlppRewriterFactory;
 import org.apache.asterix.lang.sqlpp.util.SqlppAstPrintUtil;
 import org.apache.asterix.lang.sqlpp.util.SqlppRewriteUtil;
-import org.apache.asterix.metadata.bootstrap.MetadataBuiltinEntities;
 import org.apache.asterix.metadata.declared.MetadataProvider;
 import org.apache.asterix.metadata.entities.Dataset;
 import org.apache.asterix.metadata.entities.Dataverse;
@@ -271,7 +271,7 @@
                 return dv.getDataverseName();
             }
         }
-        return MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME;
+        return MetadataConstants.DEFAULT_DATAVERSE_NAME;
     }
 
     // Rewrite queries.
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/MetadataConstants.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/MetadataConstants.java
index ce35c78..3ac7d9d 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/MetadataConstants.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/MetadataConstants.java
@@ -39,6 +39,10 @@
 
     // Name of the dataverse the metadata lives in.
     public static final DataverseName METADATA_DATAVERSE_NAME = DataverseName.createBuiltinDataverseName("Metadata");
+
+    // Name of the pre-defined default dataverse
+    public static final DataverseName DEFAULT_DATAVERSE_NAME = DataverseName.createBuiltinDataverseName("Default");
+
     // Name of the node group where metadata is stored on.
     public static final String METADATA_NODEGROUP_NAME = "MetadataGroup";
 
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
index 9d32c61..37053bf 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
@@ -369,11 +369,6 @@
     }
 
     @Override
-    public void dropDatabase(TxnId txnId, String databaseName) throws AlgebricksException, RemoteException {
-        //TODO(DB): implement
-    }
-
-    @Override
     public void addDataverse(TxnId txnId, Dataverse dataverse) throws AlgebricksException {
         try {
             DataverseTupleTranslator tupleReaderWriter = tupleTranslatorProvider.getDataverseTupleTranslator(true);
@@ -702,6 +697,27 @@
     }
 
     @Override
+    public void dropDatabase(TxnId txnId, String databaseName) throws AlgebricksException, RemoteException {
+        try {
+            //TODO(DB): delete from other metadata collections
+
+            // delete the database entry from the 'Database' collection
+            // as a side effect, acquires an S lock on the 'Database' collection on behalf of txnId
+            ITupleReference searchKey = createTuple(databaseName);
+            ITupleReference tuple =
+                    getTupleToBeDeleted(txnId, mdIndexesProvider.getDatabaseEntity().getIndex(), searchKey);
+            deleteTupleFromIndex(txnId, mdIndexesProvider.getDatabaseEntity().getIndex(), tuple);
+        } catch (HyracksDataException e) {
+            if (e.matches(ErrorCode.UPDATE_OR_DELETE_NON_EXISTENT_KEY)) {
+                throw new AsterixException(org.apache.asterix.common.exceptions.ErrorCode.UNKNOWN_DATAVERSE, e,
+                        databaseName);
+            } else {
+                throw new AlgebricksException(e);
+            }
+        }
+    }
+
+    @Override
     public void dropDataverse(TxnId txnId, String database, DataverseName dataverseName) throws AlgebricksException {
         try {
             confirmDataverseCanBeDeleted(txnId, database, dataverseName);
@@ -1216,12 +1232,10 @@
 
     private void confirmDataverseCanBeDeleted(TxnId txnId, String database, DataverseName dataverseName)
             throws AlgebricksException {
-        // If a dataset from a DIFFERENT dataverse
-        // uses a type from this dataverse
-        // throw an error
+        // If a dataset from a DIFFERENT dataverse uses a type from this dataverse throw an error
         List<Dataset> datasets = getAllDatasets(txnId);
         for (Dataset dataset : datasets) {
-            if (dataset.getDataverseName().equals(dataverseName)) {
+            if (dataset.getDataverseName().equals(dataverseName) && dataset.getDatabaseName().equals(database)) {
                 continue;
             }
             if (dataset.getItemTypeDataverseName().equals(dataverseName)) {
@@ -1256,8 +1270,7 @@
             }
         }
 
-        // If a function from a DIFFERENT dataverse
-        // uses datasets, functions, datatypes, or synonyms from this dataverse
+        // If a function from a DIFFERENT dataverse uses datasets, functions, datatypes, or synonyms from this dataverse
         // throw an error
         List<Function> functions = getAllFunctions(txnId);
         for (Function function : functions) {
@@ -1279,8 +1292,7 @@
             }
         }
 
-        // If a feed connection from a DIFFERENT dataverse applies
-        // a function from this dataverse then throw an error
+        // If a feed connection from a DIFFERENT dataverse applies a function from this dataverse then throw an error
         List<FeedConnection> feedConnections = getAllFeedConnections(txnId);
         for (FeedConnection feedConnection : feedConnections) {
             if (dataverseName.equals(feedConnection.getDataverseName())) {
@@ -1314,14 +1326,14 @@
     }
 
     private void confirmFunctionIsUnusedByViews(TxnId txnId, FunctionSignature signature) throws AlgebricksException {
-        String functionDatabase = MetadataUtil.resolveDatabase(null, signature.getDataverseName());
+        String functionDatabase = signature.getDatabaseName();
         confirmObjectIsUnusedByViews(txnId, "function", DependencyKind.FUNCTION, functionDatabase,
                 signature.getDataverseName(), signature.getName(), Integer.toString(signature.getArity()));
     }
 
     private void confirmFunctionIsUnusedByFunctions(TxnId txnId, FunctionSignature signature)
             throws AlgebricksException {
-        String functionDatabase = MetadataUtil.resolveDatabase(null, signature.getDataverseName());
+        String functionDatabase = signature.getDatabaseName();
         confirmObjectIsUnusedByFunctions(txnId, "function", DependencyKind.FUNCTION, functionDatabase,
                 signature.getDataverseName(), signature.getName(), Integer.toString(signature.getArity()));
     }
@@ -1680,7 +1692,7 @@
 
     @Override
     public Function getFunction(TxnId txnId, FunctionSignature functionSignature) throws AlgebricksException {
-        String functionDatabase = MetadataUtil.resolveDatabase(null, functionSignature.getDataverseName());
+        String functionDatabase = functionSignature.getDatabaseName();
         List<Function> functions =
                 getFunctionsImpl(txnId, createTuple(functionDatabase, functionSignature.getDataverseName(),
                         functionSignature.getName(), Integer.toString(functionSignature.getArity())));
@@ -1718,7 +1730,7 @@
         }
         try {
             // Delete entry from the 'function' dataset.
-            String functionDatabase = MetadataUtil.resolveDatabase(null, functionSignature.getDataverseName());
+            String functionDatabase = functionSignature.getDatabaseName();
             ITupleReference searchKey = createTuple(functionDatabase, functionSignature.getDataverseName(),
                     functionSignature.getName(), Integer.toString(functionSignature.getArity()));
             // Searches the index for the tuple to be deleted. Acquires an S lock on the 'function' dataset.
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBuiltinEntities.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBuiltinEntities.java
index 3dbb8aa..8f79f0c 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBuiltinEntities.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBuiltinEntities.java
@@ -18,7 +18,6 @@
  */
 package org.apache.asterix.metadata.bootstrap;
 
-import org.apache.asterix.common.metadata.DataverseName;
 import org.apache.asterix.common.metadata.MetadataConstants;
 import org.apache.asterix.common.metadata.MetadataUtil;
 import org.apache.asterix.metadata.entities.Database;
@@ -32,6 +31,7 @@
     //--------------------------------------- Databases ----------------------------------------//
     public static final Database SYSTEM_DATABASE =
             new Database(MetadataConstants.SYSTEM_DATABASE, true, MetadataUtil.PENDING_NO_OP);
+
     public static final Database DEFAULT_DATABASE =
             new Database(MetadataConstants.DEFAULT_DATABASE, false, MetadataUtil.PENDING_NO_OP);
 
@@ -39,9 +39,10 @@
     public static final Dataverse METADATA_DATAVERSE =
             new Dataverse(MetadataConstants.SYSTEM_DATABASE, MetadataConstants.METADATA_DATAVERSE_NAME,
                     NonTaggedDataFormat.NON_TAGGED_DATA_FORMAT, MetadataUtil.PENDING_NO_OP);
-    public static final DataverseName DEFAULT_DATAVERSE_NAME = DataverseName.createBuiltinDataverseName("Default");
+
     public static final Dataverse DEFAULT_DATAVERSE = new Dataverse(MetadataConstants.DEFAULT_DATABASE,
-            DEFAULT_DATAVERSE_NAME, NonTaggedDataFormat.class.getName(), MetadataUtil.PENDING_NO_OP);
+            MetadataConstants.DEFAULT_DATAVERSE_NAME, NonTaggedDataFormat.class.getName(), MetadataUtil.PENDING_NO_OP);
+
     //--------------------------------------- Datatypes -----------------------------------------//
     public static final Datatype ANY_OBJECT_DATATYPE =
             new Datatype(MetadataConstants.SYSTEM_DATABASE, MetadataConstants.METADATA_DATAVERSE_NAME,