merge asterix_stabilization r896:960

git-svn-id: https://asterixdb.googlecode.com/svn/branches/asterix_stabilization_printerfix@961 eaa15691-b419-025a-1212-ee371bd00084
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/FuzzyJoinRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/FuzzyJoinRule.java
index 5961c32..035f1df 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/FuzzyJoinRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/FuzzyJoinRule.java
@@ -263,7 +263,7 @@
         // The translator will compile metadata internally. Run this compilation
         // under the same transaction id as the "outer" compilation.
         AqlPlusExpressionToPlanTranslator translator = new AqlPlusExpressionToPlanTranslator(
-                metadataProvider.getTxnId(), metadataProvider, counter, null, null);
+                metadataProvider.getJobTxnId(), metadataProvider, counter, null, null);
 
         LogicalOperatorDeepCopyVisitor deepCopyVisitor = new LogicalOperatorDeepCopyVisitor(counter);
 
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AccessMethodUtils.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AccessMethodUtils.java
index 3215707..6651ea3 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AccessMethodUtils.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AccessMethodUtils.java
@@ -220,7 +220,7 @@
             return new ConstantExpression(optFuncExpr.getConstantVal(0));
         } else {
             // We are optimizing a join query. Determine which variable feeds the secondary index. 
-            if (optFuncExpr.getOperatorSubTree(0) == probeSubTree) {
+            if (optFuncExpr.getOperatorSubTree(0) == null || optFuncExpr.getOperatorSubTree(0) == probeSubTree) {
                 return new VariableReferenceExpression(optFuncExpr.getLogicalVar(0));
             } else {
                 return new VariableReferenceExpression(optFuncExpr.getLogicalVar(1));
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/BTreeAccessMethod.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/BTreeAccessMethod.java
index 970cd89..1379bf4 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/BTreeAccessMethod.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/BTreeAccessMethod.java
@@ -201,7 +201,14 @@
             IOptimizableFuncExpr optFuncExpr = matchedFuncExprs.get(exprIndex);
             int keyPos = indexOf(optFuncExpr.getFieldName(0), chosenIndex.getKeyFieldNames());
             if (keyPos < 0) {
-                throw new InternalError();
+                if (optFuncExpr.getNumLogicalVars() > 1) {
+                    // If we are optimizing a join, the matching field may be the second field name.
+                    keyPos = indexOf(optFuncExpr.getFieldName(1), chosenIndex.getKeyFieldNames());
+                }                
+            }
+            if (keyPos < 0) {
+                throw new AlgebricksException(
+                        "Could not match optimizable function expression to any index field name.");
             }
             ILogicalExpression searchKeyExpr = AccessMethodUtils.createSearchKeyExpr(optFuncExpr, indexSubTree,
                     probeSubTree);
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java
index 5032a48..95ec76c 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java
@@ -261,6 +261,7 @@
 
         OptimizationConfUtil.getPhysicalOptimizationConfig().setFrameSize(frameSize);
         builder.setPhysicalOptimizationConfig(OptimizationConfUtil.getPhysicalOptimizationConfig());
+        
         ICompiler compiler = compilerFactory.createCompiler(plan, queryMetadataProvider, t.getVarCounter());
         if (pc.isOptimize()) {
             compiler.optimize();
@@ -297,13 +298,6 @@
             }
         }
 
-        if (!pc.isGenerateJobSpec()) {
-            // Job spec not requested. Consider transaction against metadata
-            // committed.
-            MetadataManager.INSTANCE.commitTransaction(queryMetadataProvider.getMetadataTxnContext());
-            return null;
-        }
-
         AlgebricksPartitionConstraint clusterLocs = queryMetadataProvider.getClusterLocations();
         builder.setBinaryBooleanInspectorFactory(format.getBinaryBooleanInspectorFactory());
         builder.setBinaryIntegerInspectorFactory(format.getBinaryIntegerInspectorFactory());
@@ -320,7 +314,7 @@
 
         JobSpecification spec = compiler.createJob(AsterixAppContextInfoImpl.INSTANCE);
         // set the job event listener
-        spec.setJobletEventListenerFactory(new JobEventListenerFactory(queryMetadataProvider.getTxnId(),
+        spec.setJobletEventListenerFactory(new JobEventListenerFactory(queryMetadataProvider.getJobTxnId(),
                 isWriteTransaction));
         if (pc.isPrintJob()) {
             switch (pdf) {
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixHyracksIntegrationUtil.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixHyracksIntegrationUtil.java
index 541edd0..3c43736 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixHyracksIntegrationUtil.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixHyracksIntegrationUtil.java
@@ -17,12 +17,12 @@
 
     public static final String NC1_ID = "nc1";
     public static final String NC2_ID = "nc2";
+    public static final String[] ASTERIX_DATA_DIRS = new String[] { "nc1data", "nc2data" };
 
     public static final int DEFAULT_HYRACKS_CC_CLIENT_PORT = 1098;
 
     public static final int DEFAULT_HYRACKS_CC_CLUSTER_PORT = 1099;
 
-
     private static ClusterControllerService cc;
     private static NodeControllerService nc1;
     private static NodeControllerService nc2;
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
index be6305b..08e1c5f 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
@@ -27,7 +27,6 @@
 import edu.uci.ics.asterix.api.common.APIFramework;
 import edu.uci.ics.asterix.api.common.APIFramework.DisplayFormat;
 import edu.uci.ics.asterix.api.common.Job;
-import edu.uci.ics.asterix.api.common.Job.SubmissionMode;
 import edu.uci.ics.asterix.api.common.SessionConfig;
 import edu.uci.ics.asterix.aql.base.Statement;
 import edu.uci.ics.asterix.aql.expression.BeginFeedStatement;
@@ -83,6 +82,7 @@
 import edu.uci.ics.asterix.om.types.IAType;
 import edu.uci.ics.asterix.om.types.TypeSignature;
 import edu.uci.ics.asterix.transaction.management.exception.ACIDException;
+import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionIDFactory;
 import edu.uci.ics.asterix.translator.AbstractAqlTranslator;
 import edu.uci.ics.asterix.translator.CompiledStatements.CompiledBeginFeedStatement;
 import edu.uci.ics.asterix.translator.CompiledStatements.CompiledControlFeedStatement;
@@ -134,12 +134,12 @@
         return functionDecls;
     }
 
-    public List<QueryResult> compileAndExecute(IHyracksClientConnection hcc) throws AlgebricksException,
-            RemoteException, ACIDException, AsterixException {
+    public List<QueryResult> compileAndExecute(IHyracksClientConnection hcc) throws Exception {
         List<QueryResult> executionResult = new ArrayList<QueryResult>();
         FileSplit outputFile = null;
         IAWriterFactory writerFactory = PrinterBasedWriterFactory.INSTANCE;
         Map<String, String> config = new HashMap<String, String>();
+        List<JobSpecification> jobsToExecute = new ArrayList<JobSpecification>();
 
         for (Statement stmt : aqlStatements) {
             validateOperation(activeDefaultDataverse, stmt);
@@ -148,143 +148,159 @@
             metadataProvider.setWriterFactory(writerFactory);
             metadataProvider.setOutputFile(outputFile);
             metadataProvider.setConfig(config);
+            jobsToExecute.clear();
             try {
                 switch (stmt.getKind()) {
                     case SET: {
-                        SetStatement ss = (SetStatement) stmt;
-                        String pname = ss.getPropName();
-                        String pvalue = ss.getPropValue();
-                        config.put(pname, pvalue);
+                        handleSetStatement(metadataProvider, stmt, config, jobsToExecute);
                         break;
                     }
                     case DATAVERSE_DECL: {
-                        activeDefaultDataverse = handleUseDataverseStatement(metadataProvider, stmt);
+                        activeDefaultDataverse = handleUseDataverseStatement(metadataProvider, stmt, jobsToExecute);
                         break;
                     }
                     case CREATE_DATAVERSE: {
-                        handleCreateDataverseStatement(metadataProvider, stmt);
+                        handleCreateDataverseStatement(metadataProvider, stmt, jobsToExecute);
                         break;
                     }
                     case DATASET_DECL: {
-                        handleCreateDatasetStatement(metadataProvider, stmt, hcc);
+                        handleCreateDatasetStatement(metadataProvider, stmt, hcc, jobsToExecute);
                         break;
                     }
                     case CREATE_INDEX: {
-                        handleCreateIndexStatement(metadataProvider, stmt, hcc);
+                        handleCreateIndexStatement(metadataProvider, stmt, hcc, jobsToExecute);
                         break;
                     }
                     case TYPE_DECL: {
-                        handleCreateTypeStatement(metadataProvider, stmt);
+                        handleCreateTypeStatement(metadataProvider, stmt, jobsToExecute);
                         break;
                     }
                     case NODEGROUP_DECL: {
-                        handleCreateNodeGroupStatement(metadataProvider, stmt);
+                        handleCreateNodeGroupStatement(metadataProvider, stmt, jobsToExecute);
                         break;
                     }
                     case DATAVERSE_DROP: {
-                        handleDataverseDropStatement(metadataProvider, stmt, hcc);
+                        handleDataverseDropStatement(metadataProvider, stmt, hcc, jobsToExecute);
                         break;
                     }
                     case DATASET_DROP: {
-                        handleDatasetDropStatement(metadataProvider, stmt, hcc);
+                        handleDatasetDropStatement(metadataProvider, stmt, hcc, jobsToExecute);
                         break;
                     }
                     case INDEX_DROP: {
-                        handleIndexDropStatement(metadataProvider, stmt, hcc);
+                        handleIndexDropStatement(metadataProvider, stmt, hcc, jobsToExecute);
                         break;
                     }
                     case TYPE_DROP: {
-                        handleTypeDropStatement(metadataProvider, stmt);
+                        handleTypeDropStatement(metadataProvider, stmt, jobsToExecute);
                         break;
                     }
                     case NODEGROUP_DROP: {
-                        handleNodegroupDropStatement(metadataProvider, stmt);
+                        handleNodegroupDropStatement(metadataProvider, stmt, jobsToExecute);
                         break;
                     }
 
                     case CREATE_FUNCTION: {
-                        handleCreateFunctionStatement(metadataProvider, stmt);
+                        handleCreateFunctionStatement(metadataProvider, stmt, jobsToExecute);
                         break;
                     }
 
                     case FUNCTION_DROP: {
-                        handleFunctionDropStatement(metadataProvider, stmt);
+                        handleFunctionDropStatement(metadataProvider, stmt, jobsToExecute);
                         break;
                     }
 
                     case LOAD_FROM_FILE: {
-                        handleLoadFromFileStatement(metadataProvider, stmt, hcc);
+                        handleLoadFromFileStatement(metadataProvider, stmt, hcc, jobsToExecute);
                         break;
                     }
                     case WRITE_FROM_QUERY_RESULT: {
-                        handleWriteFromQueryResultStatement(metadataProvider, stmt, hcc);
+                        handleWriteFromQueryResultStatement(metadataProvider, stmt, hcc, jobsToExecute);
                         break;
                     }
                     case INSERT: {
-                        handleInsertStatement(metadataProvider, stmt, hcc);
+                        handleInsertStatement(metadataProvider, stmt, hcc, jobsToExecute);
                         break;
                     }
                     case DELETE: {
-                        handleDeleteStatement(metadataProvider, stmt, hcc);
+                        handleDeleteStatement(metadataProvider, stmt, hcc, jobsToExecute);
                         break;
                     }
 
                     case BEGIN_FEED: {
-                        handleBeginFeedStatement(metadataProvider, stmt, hcc);
+                        handleBeginFeedStatement(metadataProvider, stmt, hcc, jobsToExecute);
                         break;
                     }
 
                     case CONTROL_FEED: {
-                        handleControlFeedStatement(metadataProvider, stmt, hcc);
+                        handleControlFeedStatement(metadataProvider, stmt, hcc, jobsToExecute);
                         break;
                     }
 
                     case QUERY: {
-                        executionResult.add(handleQuery(metadataProvider, (Query) stmt, hcc));
-                        metadataProvider.setWriteTransaction(false);
+                        executionResult.add(handleQuery(metadataProvider, (Query) stmt, hcc, jobsToExecute));
                         break;
                     }
 
                     case WRITE: {
-                        WriteStatement ws = (WriteStatement) stmt;
-                        File f = new File(ws.getFileName());
-                        outputFile = new FileSplit(ws.getNcName().getValue(), new FileReference(f));
-                        if (ws.getWriterClassName() != null) {
-                            try {
-                                writerFactory = (IAWriterFactory) Class.forName(ws.getWriterClassName()).newInstance();
-                                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                            } catch (Exception e) {
-                                throw new AsterixException(e);
-                            }
+                        Pair<IAWriterFactory, FileSplit> result = handleWriteStatement(metadataProvider, stmt,
+                                jobsToExecute);
+                        if (result.first != null) {
+                            writerFactory = result.first;
                         }
+                        outputFile = result.second;
                         break;
                     }
 
                 }
-
+                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
             } catch (Exception e) {
                 MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
                 throw new AlgebricksException(e);
             }
+            // Following jobs are run under a separate transaction, that is committed/aborted by the JobEventListener
+            for (JobSpecification jobspec : jobsToExecute) {
+                runJob(hcc, jobspec);
+            }
         }
         return executionResult;
     }
 
-    private Dataverse handleUseDataverseStatement(AqlMetadataProvider metadataProvider, Statement stmt)
-            throws MetadataException, RemoteException, ACIDException {
+    private void handleSetStatement(AqlMetadataProvider metadataProvider, Statement stmt, Map<String, String> config,
+            List<JobSpecification> jobsToExecute) throws RemoteException, ACIDException {
+        SetStatement ss = (SetStatement) stmt;
+        String pname = ss.getPropName();
+        String pvalue = ss.getPropValue();
+        config.put(pname, pvalue);
+    }
+
+    private Pair<IAWriterFactory, FileSplit> handleWriteStatement(AqlMetadataProvider metadataProvider, Statement stmt,
+            List<JobSpecification> jobsToExecute) throws InstantiationException, IllegalAccessException,
+            ClassNotFoundException {
+        WriteStatement ws = (WriteStatement) stmt;
+        File f = new File(ws.getFileName());
+        FileSplit outputFile = new FileSplit(ws.getNcName().getValue(), new FileReference(f));
+        IAWriterFactory writerFactory = null;
+        if (ws.getWriterClassName() != null) {
+            writerFactory = (IAWriterFactory) Class.forName(ws.getWriterClassName()).newInstance();
+        }
+        return new Pair<IAWriterFactory, FileSplit>(writerFactory, outputFile);
+    }
+
+    private Dataverse handleUseDataverseStatement(AqlMetadataProvider metadataProvider, Statement stmt,
+            List<JobSpecification> jobsToExecute) throws MetadataException, RemoteException, ACIDException {
         DataverseDecl dvd = (DataverseDecl) stmt;
         String dvName = dvd.getDataverseName().getValue();
         Dataverse dv = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(), dvName);
         if (dv == null) {
-            throw new MetadataException(" Unknown dataverse " + dvName);
+            throw new MetadataException("Unknown dataverse " + dvName);
         }
-        MetadataManager.INSTANCE.commitTransaction(metadataProvider.getMetadataTxnContext());
         return dv;
-
     }
 
-    private void handleCreateDataverseStatement(AqlMetadataProvider metadataProvider, Statement stmt)
-            throws MetadataException, AlgebricksException, RemoteException, ACIDException {
+    private void handleCreateDataverseStatement(AqlMetadataProvider metadataProvider, Statement stmt,
+            List<JobSpecification> jobsToExecute) throws MetadataException, AlgebricksException, RemoteException,
+            ACIDException {
         CreateDataverseStatement stmtCreateDataverse = (CreateDataverseStatement) stmt;
         String dvName = stmtCreateDataverse.getDataverseName().getValue();
         Dataverse dv = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(), dvName);
@@ -293,11 +309,10 @@
         }
         MetadataManager.INSTANCE.addDataverse(metadataProvider.getMetadataTxnContext(), new Dataverse(dvName,
                 stmtCreateDataverse.getFormat()));
-        MetadataManager.INSTANCE.commitTransaction(metadataProvider.getMetadataTxnContext());
     }
 
     private void handleCreateDatasetStatement(AqlMetadataProvider metadataProvider, Statement stmt,
-            IHyracksClientConnection hcc) throws AsterixException, Exception {
+            IHyracksClientConnection hcc, List<JobSpecification> jobsToExecute) throws AsterixException, Exception {
         DatasetDecl dd = (DatasetDecl) stmt;
         String dataverseName = dd.getDataverse() != null ? dd.getDataverse().getValue()
                 : activeDefaultDataverse != null ? activeDefaultDataverse.getDataverseName() : null;
@@ -363,14 +378,12 @@
         if (dd.getDatasetType() == DatasetType.INTERNAL || dd.getDatasetType() == DatasetType.FEED) {
             Dataverse dataverse = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(),
                     dataverseName);
-            runCreateDatasetJob(hcc, dataverse, datasetName, metadataProvider);
+            runJob(hcc, DatasetOperations.createDatasetJobSpec(dataverse, datasetName, metadataProvider));
         }
-        MetadataManager.INSTANCE.commitTransaction(metadataProvider.getMetadataTxnContext());
-
     }
 
     private void handleCreateIndexStatement(AqlMetadataProvider metadataProvider, Statement stmt,
-            IHyracksClientConnection hcc) throws Exception {
+            IHyracksClientConnection hcc, List<JobSpecification> jobsToExecute) throws Exception {
         CreateIndexStatement stmtCreateIndex = (CreateIndexStatement) stmt;
         String dataverseName = stmtCreateIndex.getDataverseName() == null ? activeDefaultDataverse == null ? null
                 : activeDefaultDataverse.getDataverseName() : stmtCreateIndex.getDataverseName().getValue();
@@ -405,11 +418,11 @@
                     .buildSecondaryIndexLoadingJobSpec(cis, metadataProvider);
             runJob(hcc, loadIndexJobSpec);
         }
-        MetadataManager.INSTANCE.commitTransaction(metadataProvider.getMetadataTxnContext());
     }
 
-    private void handleCreateTypeStatement(AqlMetadataProvider metadataProvider, Statement stmt)
-            throws AlgebricksException, RemoteException, ACIDException, MetadataException {
+    private void handleCreateTypeStatement(AqlMetadataProvider metadataProvider, Statement stmt,
+            List<JobSpecification> jobsToExecute) throws AlgebricksException, RemoteException, ACIDException,
+            MetadataException {
         MetadataTransactionContext mdTxnCtx = metadataProvider.getMetadataTxnContext();
         TypeDecl stmtCreateType = (TypeDecl) stmt;
         String dataverseName = stmtCreateType.getDataverseName() == null ? activeDefaultDataverse == null ? null
@@ -437,11 +450,10 @@
                 MetadataManager.INSTANCE.addDatatype(mdTxnCtx, new Datatype(dataverseName, typeName, type, false));
             }
         }
-        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
     }
 
     private void handleDataverseDropStatement(AqlMetadataProvider metadataProvider, Statement stmt,
-            IHyracksClientConnection hcc) throws Exception {
+            IHyracksClientConnection hcc, List<JobSpecification> jobsToExecute) throws Exception {
         MetadataTransactionContext mdTxnCtx = metadataProvider.getMetadataTxnContext();
         DataverseDropStatement stmtDelete = (DataverseDropStatement) stmt;
         String dvName = stmtDelete.getDataverseName().getValue();
@@ -473,11 +485,10 @@
                 activeDefaultDataverse = null;
             }
         }
-        MetadataManager.INSTANCE.commitTransaction(metadataProvider.getMetadataTxnContext());
     }
 
     private void handleDatasetDropStatement(AqlMetadataProvider metadataProvider, Statement stmt,
-            IHyracksClientConnection hcc) throws Exception {
+            IHyracksClientConnection hcc, List<JobSpecification> jobsToExecute) throws Exception {
         MetadataTransactionContext mdTxnCtx = metadataProvider.getMetadataTxnContext();
         DropStatement stmtDelete = (DropStatement) stmt;
         String dataverseName = stmtDelete.getDataverseName() == null ? activeDefaultDataverse == null ? null
@@ -503,11 +514,10 @@
             }
             compileDatasetDropStatement(hcc, dataverseName, datasetName, metadataProvider);
         }
-        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
     }
 
     private void handleIndexDropStatement(AqlMetadataProvider metadataProvider, Statement stmt,
-            IHyracksClientConnection hcc) throws Exception {
+            IHyracksClientConnection hcc, List<JobSpecification> jobsToExecute) throws Exception {
         MetadataTransactionContext mdTxnCtx = metadataProvider.getMetadataTxnContext();
         IndexDropStatement stmtIndexDrop = (IndexDropStatement) stmt;
         String datasetName = stmtIndexDrop.getDatasetName().getValue();
@@ -532,11 +542,11 @@
             throw new AlgebricksException(datasetName
                     + " is an external dataset. Indexes are not maintained for external datasets.");
         }
-        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
     }
 
-    private void handleTypeDropStatement(AqlMetadataProvider metadataProvider, Statement stmt)
-            throws AlgebricksException, MetadataException, RemoteException, ACIDException {
+    private void handleTypeDropStatement(AqlMetadataProvider metadataProvider, Statement stmt,
+            List<JobSpecification> jobsToExecute) throws AlgebricksException, MetadataException, RemoteException,
+            ACIDException {
         MetadataTransactionContext mdTxnCtx = metadataProvider.getMetadataTxnContext();
         TypeDropStatement stmtTypeDrop = (TypeDropStatement) stmt;
         String dataverseName = stmtTypeDrop.getDataverseName() == null ? (activeDefaultDataverse == null ? null
@@ -552,12 +562,11 @@
         } else {
             MetadataManager.INSTANCE.dropDatatype(mdTxnCtx, dataverseName, typeName);
         }
-        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-
     }
 
-    private void handleNodegroupDropStatement(AqlMetadataProvider metadataProvider, Statement stmt)
-            throws MetadataException, AlgebricksException, RemoteException, ACIDException {
+    private void handleNodegroupDropStatement(AqlMetadataProvider metadataProvider, Statement stmt,
+            List<JobSpecification> jobsToExecute) throws MetadataException, AlgebricksException, RemoteException,
+            ACIDException {
         MetadataTransactionContext mdTxnCtx = metadataProvider.getMetadataTxnContext();
         NodeGroupDropStatement stmtDelete = (NodeGroupDropStatement) stmt;
         String nodegroupName = stmtDelete.getNodeGroupName().getValue();
@@ -568,11 +577,11 @@
         } else {
             MetadataManager.INSTANCE.dropNodegroup(mdTxnCtx, nodegroupName);
         }
-        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
     }
 
-    private void handleCreateFunctionStatement(AqlMetadataProvider metadataProvider, Statement stmt)
-            throws AlgebricksException, MetadataException, RemoteException, ACIDException {
+    private void handleCreateFunctionStatement(AqlMetadataProvider metadataProvider, Statement stmt,
+            List<JobSpecification> jobsToExecute) throws AlgebricksException, MetadataException, RemoteException,
+            ACIDException {
         MetadataTransactionContext mdTxnCtx = metadataProvider.getMetadataTxnContext();
         CreateFunctionStatement cfs = (CreateFunctionStatement) stmt;
         String dataverse = cfs.getSignature().getNamespace() == null ? activeDefaultDataverse == null ? null
@@ -588,11 +597,11 @@
                 .getArity(), cfs.getParamList(), Function.RETURNTYPE_VOID, cfs.getFunctionBody(),
                 Function.LANGUAGE_AQL, FunctionKind.SCALAR.toString());
         MetadataManager.INSTANCE.addFunction(mdTxnCtx, function);
-        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
     }
 
-    private void handleFunctionDropStatement(AqlMetadataProvider metadataProvider, Statement stmt)
-            throws MetadataException, RemoteException, ACIDException, AlgebricksException {
+    private void handleFunctionDropStatement(AqlMetadataProvider metadataProvider, Statement stmt,
+            List<JobSpecification> jobsToExecute) throws MetadataException, RemoteException, ACIDException,
+            AlgebricksException {
         MetadataTransactionContext mdTxnCtx = metadataProvider.getMetadataTxnContext();
         FunctionDropStatement stmtDropFunction = (FunctionDropStatement) stmt;
         FunctionSignature signature = stmtDropFunction.getFunctionSignature();
@@ -603,12 +612,10 @@
         } else {
             MetadataManager.INSTANCE.dropFunction(mdTxnCtx, signature);
         }
-        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
     }
 
     private void handleLoadFromFileStatement(AqlMetadataProvider metadataProvider, Statement stmt,
-            IHyracksClientConnection hcc) throws Exception {
-        List<Job> jobs = new ArrayList<Job>();
+            IHyracksClientConnection hcc, List<JobSpecification> jobsToExecute) throws Exception {
         MetadataTransactionContext mdTxnCtx = metadataProvider.getMetadataTxnContext();
         LoadFromFileStatement loadStmt = (LoadFromFileStatement) stmt;
         String dataverseName = loadStmt.getDataverseName() == null ? activeDefaultDataverse == null ? null
@@ -618,7 +625,7 @@
 
         IDataFormat format = getDataFormat(metadataProvider.getMetadataTxnContext(), dataverseName);
         Job job = DatasetOperations.createLoadDatasetJobSpec(metadataProvider, cls, format);
-        jobs.add(job);
+        jobsToExecute.add(job.getJobSpec());
         // Also load the dataset's secondary indexes.
         List<Index> datasetIndexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, loadStmt
                 .getDatasetName().getValue());
@@ -629,17 +636,13 @@
             // Create CompiledCreateIndexStatement from metadata entity 'index'.
             CompiledCreateIndexStatement cis = new CompiledCreateIndexStatement(index.getIndexName(), dataverseName,
                     index.getDatasetName(), index.getKeyFieldNames(), index.getGramLength(), index.getIndexType());
-            JobSpecification jobSpec = IndexOperations.buildSecondaryIndexLoadingJobSpec(cis, metadataProvider);
-            jobs.add(new Job(jobSpec));
-        }
-        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-        for (Job j : jobs) {
-            runJob(hcc, j.getJobSpec());
+            jobsToExecute.add(IndexOperations.buildSecondaryIndexLoadingJobSpec(cis, metadataProvider));
         }
     }
 
     private void handleWriteFromQueryResultStatement(AqlMetadataProvider metadataProvider, Statement stmt,
-            IHyracksClientConnection hcc) throws Exception {
+            IHyracksClientConnection hcc, List<JobSpecification> jobsToExecute) throws Exception {
+        metadataProvider.setWriteTransaction(true);
         WriteFromQueryResultStatement st1 = (WriteFromQueryResultStatement) stmt;
         String dataverseName = st1.getDataverseName() == null ? activeDefaultDataverse == null ? null
                 : activeDefaultDataverse.getDataverseName() : st1.getDataverseName().getValue();
@@ -647,11 +650,11 @@
                 .getDatasetName().getValue(), st1.getQuery(), st1.getVarCounter());
 
         Pair<JobSpecification, FileSplit> compiled = rewriteCompileQuery(metadataProvider, clfrqs.getQuery(), clfrqs);
-        runJob(hcc, compiled.first);
+        jobsToExecute.add(compiled.first);
     }
 
     private void handleInsertStatement(AqlMetadataProvider metadataProvider, Statement stmt,
-            IHyracksClientConnection hcc) throws Exception {
+            IHyracksClientConnection hcc, List<JobSpecification> jobsToExecute) throws Exception {
         metadataProvider.setWriteTransaction(true);
         InsertStatement stmtInsert = (InsertStatement) stmt;
         String dataverseName = stmtInsert.getDataverseName() == null ? activeDefaultDataverse == null ? null
@@ -659,11 +662,11 @@
         CompiledInsertStatement clfrqs = new CompiledInsertStatement(dataverseName, stmtInsert.getDatasetName()
                 .getValue(), stmtInsert.getQuery(), stmtInsert.getVarCounter());
         Pair<JobSpecification, FileSplit> compiled = rewriteCompileQuery(metadataProvider, clfrqs.getQuery(), clfrqs);
-        runJob(hcc, compiled.first);
+        jobsToExecute.add(compiled.first);
     }
 
     private void handleDeleteStatement(AqlMetadataProvider metadataProvider, Statement stmt,
-            IHyracksClientConnection hcc) throws Exception {
+            IHyracksClientConnection hcc, List<JobSpecification> jobsToExecute) throws Exception {
         metadataProvider.setWriteTransaction(true);
         DeleteStatement stmtDelete = (DeleteStatement) stmt;
         String dataverseName = stmtDelete.getDataverseName() == null ? activeDefaultDataverse == null ? null
@@ -672,39 +675,35 @@
                 stmtDelete.getDatasetName().getValue(), stmtDelete.getCondition(), stmtDelete.getDieClause(),
                 stmtDelete.getVarCounter(), metadataProvider);
         Pair<JobSpecification, FileSplit> compiled = rewriteCompileQuery(metadataProvider, clfrqs.getQuery(), clfrqs);
-        runJob(hcc, compiled.first);
+        jobsToExecute.add(compiled.first);
     }
 
     private Pair<JobSpecification, FileSplit> rewriteCompileQuery(AqlMetadataProvider metadataProvider, Query query,
             ICompiledDmlStatement stmt) throws AsterixException, RemoteException, AlgebricksException, JSONException,
             ACIDException {
+
+        // Query Rewriting (happens under the same ongoing metadata transaction)
         Pair<Query, Integer> reWrittenQuery = APIFramework.reWriteQuery(declaredFunctions, metadataProvider, query,
                 sessionConfig, out, pdf);
-        MetadataManager.INSTANCE.commitTransaction(metadataProvider.getMetadataTxnContext());
-        Pair<JobSpecification, FileSplit> compiled = compileQuery(sessionConfig, reWrittenQuery.first,
-                metadataProvider, reWrittenQuery.second, stmt);
+
+        // Query Compilation (happens under the same ongoing metadata
+        // transaction)
+        sessionConfig.setGenerateJobSpec(true);
+        if (metadataProvider.isWriteTransaction()) {
+            metadataProvider.setJobTxnId(TransactionIDFactory.generateTransactionId());
+        }
+        JobSpecification spec = APIFramework.compileQuery(declaredFunctions, metadataProvider, query,
+                reWrittenQuery.second, stmt == null ? null : stmt.getDatasetName(), sessionConfig, out, pdf, stmt);
+        sessionConfig.setGenerateJobSpec(false);
+
+        Pair<JobSpecification, FileSplit> compiled = new Pair<JobSpecification, FileSplit>(spec,
+                metadataProvider.getOutputFile());
         return compiled;
 
     }
 
-    private Pair<JobSpecification, FileSplit> compileQuery(SessionConfig sessionConfig, Query query,
-            AqlMetadataProvider metadataProvider, int varCounter, ICompiledDmlStatement statement)
-            throws RemoteException, AsterixException, AlgebricksException, JSONException, ACIDException {
-        sessionConfig.setGenerateJobSpec(true);
-        MetadataTransactionContext mdTxnCtxQuery = MetadataManager.INSTANCE.beginTransaction();
-        AqlMetadataProvider metadataProviderInsert = new AqlMetadataProvider(mdTxnCtxQuery, activeDefaultDataverse);
-        metadataProviderInsert.setWriterFactory(metadataProvider.getWriterFactory());
-        metadataProviderInsert.setOutputFile(metadataProvider.getOutputFile());
-        metadataProviderInsert.setConfig(metadataProvider.getConfig());
-        JobSpecification spec = APIFramework.compileQuery(declaredFunctions, metadataProvider, query, varCounter,
-                statement == null ? null : statement.getDatasetName(), sessionConfig, out, pdf, statement);
-        sessionConfig.setGenerateJobSpec(false);
-        return new Pair<JobSpecification, FileSplit>(spec, metadataProvider.getOutputFile());
-    }
-
     private void handleBeginFeedStatement(AqlMetadataProvider metadataProvider, Statement stmt,
-            IHyracksClientConnection hcc) throws Exception {
-        MetadataTransactionContext mdTxnCtx = metadataProvider.getMetadataTxnContext();
+            IHyracksClientConnection hcc, List<JobSpecification> jobsToExecute) throws Exception {
         BeginFeedStatement bfs = (BeginFeedStatement) stmt;
         String dataverseName = bfs.getDataverseName() == null ? activeDefaultDataverse == null ? null
                 : activeDefaultDataverse.getDataverseName() : bfs.getDatasetName().getValue();
@@ -713,49 +712,36 @@
                 bfs.getDatasetName().getValue(), bfs.getQuery(), bfs.getVarCounter());
 
         Dataset dataset;
-        dataset = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseName, bfs.getDatasetName().getValue());
+        dataset = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(), dataverseName, bfs
+                .getDatasetName().getValue());
         IDatasetDetails datasetDetails = dataset.getDatasetDetails();
         if (datasetDetails.getDatasetType() != DatasetType.FEED) {
             throw new IllegalArgumentException("Dataset " + bfs.getDatasetName().getValue() + " is not a feed dataset");
         }
-        bfs.initialize(mdTxnCtx, dataset);
+        bfs.initialize(metadataProvider.getMetadataTxnContext(), dataset);
         cbfs.setQuery(bfs.getQuery());
-
-        Pair<Query, Integer> reWrittenQuery = APIFramework.reWriteQuery(declaredFunctions, metadataProvider,
-                bfs.getQuery(), sessionConfig, out, pdf);
-        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-
-        Pair<JobSpecification, FileSplit> compiled = compileQuery(sessionConfig, reWrittenQuery.first,
-                metadataProvider, reWrittenQuery.second, cbfs);
-        runJob(hcc, compiled.first);
+        Pair<JobSpecification, FileSplit> compiled = rewriteCompileQuery(metadataProvider, bfs.getQuery(), cbfs);
+        jobsToExecute.add(compiled.first);
     }
 
     private void handleControlFeedStatement(AqlMetadataProvider metadataProvider, Statement stmt,
-            IHyracksClientConnection hcc) throws Exception {
-        MetadataTransactionContext mdTxnCtx = metadataProvider.getMetadataTxnContext();
+            IHyracksClientConnection hcc, List<JobSpecification> jobsToExecute) throws Exception {
         ControlFeedStatement cfs = (ControlFeedStatement) stmt;
         String dataverseName = cfs.getDataverseName() == null ? activeDefaultDataverse == null ? null
                 : activeDefaultDataverse.getDataverseName() : cfs.getDatasetName().getValue();
         CompiledControlFeedStatement clcfs = new CompiledControlFeedStatement(cfs.getOperationType(), dataverseName,
                 cfs.getDatasetName().getValue(), cfs.getAlterAdapterConfParams());
-        Job job = new Job(FeedOperations.buildControlFeedJobSpec(clcfs, metadataProvider), SubmissionMode.ASYNCHRONOUS);
-        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-        runJob(hcc, job.getJobSpec());
+        jobsToExecute.add(FeedOperations.buildControlFeedJobSpec(clcfs, metadataProvider));
     }
 
-    private QueryResult handleQuery(AqlMetadataProvider metadataProvider, Query query, IHyracksClientConnection hcc)
-            throws Exception {
+    private QueryResult handleQuery(AqlMetadataProvider metadataProvider, Query query, IHyracksClientConnection hcc,
+            List<JobSpecification> jobsToExecute) throws Exception {
         Pair<JobSpecification, FileSplit> compiled = rewriteCompileQuery(metadataProvider, query, null);
-        runJob(hcc, compiled.first);
         GlobalConfig.ASTERIX_LOGGER.info(compiled.first.toJSON().toString(1));
+        jobsToExecute.add(compiled.first);
         return new QueryResult(query, compiled.second.getLocalFile().getFile().getAbsolutePath());
     }
 
-    private void runCreateDatasetJob(IHyracksClientConnection hcc, Dataverse dataverse, String datasetName,
-            AqlMetadataProvider metadataProvider) throws AsterixException, AlgebricksException, Exception {
-        runJob(hcc, DatasetOperations.createDatasetJobSpec(dataverse, datasetName, metadataProvider));
-    }
-
     private void runCreateIndexJob(IHyracksClientConnection hcc, CreateIndexStatement stmtCreateIndex,
             AqlMetadataProvider metadataProvider) throws Exception {
         // TODO: Eventually CreateIndexStatement and
@@ -772,11 +758,12 @@
             throw new AsterixException("Failed to create job spec for creating index '"
                     + stmtCreateIndex.getDatasetName() + "." + stmtCreateIndex.getIndexName() + "'");
         }
-        runJob(hcc, new Job(spec));
+        runJob(hcc, spec);
     }
 
-    private void handleCreateNodeGroupStatement(AqlMetadataProvider metadataProvider, Statement stmt)
-            throws MetadataException, AlgebricksException, RemoteException, ACIDException {
+    private void handleCreateNodeGroupStatement(AqlMetadataProvider metadataProvider, Statement stmt,
+            List<JobSpecification> jobsToExecute) throws MetadataException, AlgebricksException, RemoteException,
+            ACIDException {
         MetadataTransactionContext mdTxnCtx = metadataProvider.getMetadataTxnContext();
         NodegroupDecl stmtCreateNodegroup = (NodegroupDecl) stmt;
         String ngName = stmtCreateNodegroup.getNodegroupName().getValue();
@@ -792,11 +779,6 @@
             }
             MetadataManager.INSTANCE.addNodegroup(mdTxnCtx, new NodeGroup(ngName, ncNames));
         }
-        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-    }
-
-    private void runJob(IHyracksClientConnection hcc, Job job) throws Exception {
-        executeJobArray(hcc, new Job[] { job }, out, pdf);
     }
 
     private void runJob(IHyracksClientConnection hcc, JobSpecification spec) throws Exception {
@@ -806,7 +788,7 @@
     private void compileIndexDropStatement(IHyracksClientConnection hcc, String dataverseName, String datasetName,
             String indexName, AqlMetadataProvider metadataProvider) throws Exception {
         CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName, indexName);
-        runJob(hcc, new Job(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider)));
+        runJob(hcc, IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider));
         MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName,
                 indexName);
     }
@@ -819,7 +801,7 @@
         if (ds.getDatasetType() == DatasetType.INTERNAL || ds.getDatasetType() == DatasetType.FEED) {
             JobSpecification[] jobSpecs = DatasetOperations.createDropDatasetJobSpec(cds, metadataProvider);
             for (JobSpecification spec : jobSpecs)
-                runJob(hcc, new Job(spec));
+                runJob(hcc, spec);
         }
         MetadataManager.INSTANCE.dropDataset(mdTxnCtx, dataverseName, datasetName);
     }
@@ -846,20 +828,4 @@
         return format;
     }
 
-    public List<Statement> getAqlStatements() {
-        return aqlStatements;
-    }
-
-    public PrintWriter getOut() {
-        return out;
-    }
-
-    public SessionConfig getPc() {
-        return sessionConfig;
-    }
-
-    public DisplayFormat getPdf() {
-        return pdf;
-    }
-
 }
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java
index ec82988..9b7c356 100644
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java
+++ b/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java
@@ -1,16 +1,13 @@
 package edu.uci.ics.asterix.test.metadata;
 
-import java.io.BufferedReader;
 import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStreamReader;
 import java.io.PrintWriter;
-import java.io.Reader;
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.List;
 import java.util.logging.Logger;
 
+import org.apache.commons.io.FileUtils;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -19,155 +16,97 @@
 import org.junit.runners.Parameterized.Parameters;
 
 import edu.uci.ics.asterix.api.common.AsterixHyracksIntegrationUtil;
-import edu.uci.ics.asterix.api.java.AsterixJavaClient;
 import edu.uci.ics.asterix.common.config.GlobalConfig;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.test.aql.TestsUtils;
+import edu.uci.ics.asterix.testframework.context.TestCaseContext;
+import edu.uci.ics.asterix.testframework.xml.TestCase.CompilationUnit;
 
 @RunWith(Parameterized.class)
 public class MetadataTest {
 
+    private TestCaseContext tcCtx;
+
     private static final Logger LOGGER = Logger.getLogger(MetadataTest.class.getName());
-
-    private static final PrintWriter ERR = new PrintWriter(System.err);
-    private static final String EXTENSION_QUERY = "aql";
-    private static final String EXTENSION_RESULT = "adm";
-    private static final String PATH_ACTUAL = "rttest/";
+    private static final String PATH_ACTUAL = "mdtest/";
     private static final String PATH_BASE = "src/test/resources/metadata/";
-    private static final String PATH_EXPECTED = PATH_BASE + "results/";
-    private static final String PATH_QUERIES = PATH_BASE + "queries/";
-    private static final String QUERIES_FILE = PATH_BASE + "queries.txt";
-    private static final String SEPARATOR = System.getProperty("file.separator");
-
-    private static String _oldConfigFileName;
-    //private static final String TEST_CONFIG_FILE_NAME = "asterix-metadata.properties";
     private static final String TEST_CONFIG_FILE_NAME = "test.properties";
+    private static final String WEB_SERVER_PORT="19002";
     private static final String[] ASTERIX_DATA_DIRS = new String[] { "nc1data", "nc2data" };
 
-    private static String aqlExtToResExt(String fname) {
-        int dot = fname.lastIndexOf('.');
-        return fname.substring(0, dot + 1) + EXTENSION_RESULT;
+    public MetadataTest(TestCaseContext tcCtx) {
+        this.tcCtx = tcCtx;
+    }
+
+    @Test
+    public void test() throws Exception {
+        List<CompilationUnit> cUnits = tcCtx.getTestCase().getCompilationUnit();
+        for (CompilationUnit cUnit : cUnits) {
+            File testFile = tcCtx.getTestFile(cUnit);
+            File expectedResultFile = tcCtx.getExpectedResultFile(cUnit);
+            File actualFile = new File(PATH_ACTUAL + File.separator
+                    + tcCtx.getTestCase().getFilePath().replace(File.separator, "_") + "_" + cUnit.getName() + ".adm");
+
+            File actualResultFile = tcCtx.getActualResultFile(cUnit, new File(PATH_ACTUAL));
+            actualResultFile.getParentFile().mkdirs();
+            try {
+                TestsUtils.runScriptAndCompareWithResult(AsterixHyracksIntegrationUtil.getHyracksClientConnection(),
+                        testFile, new PrintWriter(System.err), expectedResultFile, actualFile);
+            } catch (Exception e) {
+                LOGGER.severe("Test \"" + testFile + "\" FAILED!");
+                e.printStackTrace();
+                if (cUnit.getExpectedError().isEmpty()) {
+                    throw new Exception("Test \"" + testFile + "\" FAILED!", e);
+                }
+            }
+        }
     }
 
     @BeforeClass
     public static void setUp() throws Exception {
-        _oldConfigFileName = System.getProperty(GlobalConfig.CONFIG_FILE_PROPERTY);
         System.setProperty(GlobalConfig.CONFIG_FILE_PROPERTY, TEST_CONFIG_FILE_NAME);
+        System.setProperty(GlobalConfig.WEB_SERVER_PORT_PROPERTY, WEB_SERVER_PORT);
         File outdir = new File(PATH_ACTUAL);
         outdir.mkdirs();
+
+        File log = new File("asterix_logs");
+        if (log.exists())
+            FileUtils.deleteDirectory(log);
+        File lsn = new File("last_checkpoint_lsn");
+        lsn.deleteOnExit();
+
         AsterixHyracksIntegrationUtil.init();
+
     }
 
     @AfterClass
     public static void tearDown() throws Exception {
-        // _bootstrap.stop();
         AsterixHyracksIntegrationUtil.deinit();
         File outdir = new File(PATH_ACTUAL);
         File[] files = outdir.listFiles();
         if (files == null || files.length == 0) {
             outdir.delete();
         }
-        if (_oldConfigFileName != null) {
-            System.setProperty(GlobalConfig.CONFIG_FILE_PROPERTY, _oldConfigFileName);
-        }
+
         // clean up the files written by the ASTERIX storage manager
-        for (String d : ASTERIX_DATA_DIRS) {
+        for (String d : AsterixHyracksIntegrationUtil.ASTERIX_DATA_DIRS) {
             TestsUtils.deleteRec(new File(d));
         }
-    }
 
-    private static void suiteBuild(File f, Collection<Object[]> testArgs, String path) throws IOException {
-        BufferedReader br = null;
-        try {
-            br = new BufferedReader(new InputStreamReader(new FileInputStream(f), "UTF-8"));
-            String strLine;
-            File file;
-            while ((strLine = br.readLine()) != null) {
-                // Ignore commented out lines.
-                if (strLine.startsWith("//")) {
-                    continue;
-                }
-                file = new File(PATH_QUERIES + SEPARATOR + strLine);
-                if (file.getName().endsWith(EXTENSION_QUERY)) {
-                    String resultFileName = aqlExtToResExt(file.getName());
-                    File expectedFile = new File(PATH_EXPECTED + path + resultFileName);
-                    File actualFile = new File(PATH_ACTUAL + SEPARATOR + path.replace(SEPARATOR, "_") + resultFileName);
-                    testArgs.add(new Object[] { file, expectedFile, actualFile });
-                }
-            }
-
-        } catch (IOException e) {
-            e.printStackTrace();
-        } finally {
-            if (br != null) {
-                br.close();
-            }
-        }
+        File log = new File("asterix_logs");
+        if (log.exists())
+            FileUtils.deleteDirectory(log);
+        File lsn = new File("last_checkpoint_lsn");
+        lsn.deleteOnExit();
     }
 
     @Parameters
-    public static Collection<Object[]> tests() throws IOException {
+    public static Collection<Object[]> tests() throws Exception {
         Collection<Object[]> testArgs = new ArrayList<Object[]>();
-        suiteBuild(new File(QUERIES_FILE), testArgs, "");
+        TestCaseContext.Builder b = new TestCaseContext.Builder();
+        for (TestCaseContext ctx : b.build(new File(PATH_BASE))) {
+            testArgs.add(new Object[] { ctx });
+        }
         return testArgs;
     }
 
-    private File actualFile;
-    private File expectedFile;
-    private File queryFile;
-
-    public MetadataTest(File queryFile, File expectedFile, File actualFile) {
-        this.queryFile = queryFile;
-        this.expectedFile = expectedFile;
-        this.actualFile = actualFile;
-    }
-
-    @Test
-    public void test() throws Exception {
-        Reader query = new BufferedReader(new InputStreamReader(new FileInputStream(queryFile), "UTF-8"));
-        AsterixJavaClient asterix = new AsterixJavaClient(AsterixHyracksIntegrationUtil.getHyracksClientConnection(),
-                query, ERR);
-        try {
-            LOGGER.info("Query is: " + queryFile);
-            asterix.compile(true, false, false, false, false, true, false);
-        } catch (AsterixException e) {
-            throw new Exception("Compile ERROR for " + queryFile + ": " + e.getMessage(), e);
-        } finally {
-            query.close();
-        }
-        asterix.execute();
-        query.close();
-
-        if (actualFile.exists()) {
-            BufferedReader readerExpected = new BufferedReader(new InputStreamReader(new FileInputStream(expectedFile),
-                    "UTF-8"));
-            BufferedReader readerActual = new BufferedReader(new InputStreamReader(new FileInputStream(actualFile),
-                    "UTF-8"));
-            String lineExpected, lineActual;
-            int num = 1;
-            try {
-                while ((lineExpected = readerExpected.readLine()) != null) {
-                    lineActual = readerActual.readLine();
-                    if (lineActual == null) {
-                        throw new Exception("Result for " + queryFile + " changed at line " + num + ":\n< "
-                                + lineExpected + "\n> ");
-                    }
-                    if (!lineExpected.split("Timestamp")[0].equals(lineActual.split("Timestamp")[0])) {
-                        throw new Exception("Result for " + queryFile + " changed at line " + num + ":\n< "
-                                + lineExpected + "\n> " + lineActual);
-                    }
-                    ++num;
-                }
-                lineActual = readerActual.readLine();
-                if (lineActual != null) {
-                    throw new Exception("Result for " + queryFile + " changed at line " + num + ":\n< \n> "
-                            + lineActual);
-                }
-                actualFile.delete();
-            } finally {
-                readerExpected.close();
-                readerActual.close();
-            }
-        }
-    }
 }
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTransactionsTest.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTransactionsTest.java
deleted file mode 100644
index f2df365..0000000
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTransactionsTest.java
+++ /dev/null
@@ -1,253 +0,0 @@
-package edu.uci.ics.asterix.test.metadata;
-
-import static org.junit.Assert.fail;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.io.PrintWriter;
-import java.io.Reader;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.logging.Logger;
-
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
-
-import edu.uci.ics.asterix.api.common.AsterixHyracksIntegrationUtil;
-import edu.uci.ics.asterix.api.java.AsterixJavaClient;
-import edu.uci.ics.asterix.common.config.GlobalConfig;
-import edu.uci.ics.asterix.test.aql.TestsUtils;
-
-@RunWith(Parameterized.class)
-public class MetadataTransactionsTest {
-
-    private static final Logger LOGGER = Logger.getLogger(MetadataTransactionsTest.class.getName());
-
-    private static final PrintWriter ERR = new PrintWriter(System.err);
-    private static final String EXTENSION_QUERY = "aql";
-    private static final String EXTENSION_RESULT = "adm";
-    private static final String PATH_ACTUAL = "rttest/";
-    private static final String PATH_BASE = "src/test/resources/metadata-transactions/";
-    private static final String CHECK_STATE_QUERIES_PATH = PATH_BASE + "check-state-queries/";
-    private static final String CHECK_STATE_RESULTS_PATH = PATH_BASE + "check-state-results/";
-    private static final String CHECK_STATE_FILE = PATH_BASE + "check-state-queries.txt";
-    private static final String INIT_STATE_QUERIES_PATH = PATH_BASE + "init-state-queries/";
-    private static final String INIT_STATE_FILE = PATH_BASE + "init-state-queries.txt";
-    private static final String TEST_QUERIES_PATH = PATH_BASE + "queries/";
-    private static final String QUERIES_FILE = PATH_BASE + "queries.txt";
-
-    private static String _oldConfigFileName;
-    private static final String TEST_CONFIG_FILE_NAME = "asterix-metadata.properties";
-    private static final String[] ASTERIX_DATA_DIRS = new String[] { "nc1data", "nc2data" };
-
-    private static String aqlExtToResExt(String fname) {
-        int dot = fname.lastIndexOf('.');
-        return fname.substring(0, dot + 1) + EXTENSION_RESULT;
-    }
-
-    private static void executeQueryTuple(Object[] queryTuple, boolean expectFailure, boolean executeQuery) {
-        String queryFileName = (String) queryTuple[0];
-        String expectedFileName = (String) queryTuple[1];
-        String actualFileName = (String) queryTuple[2];
-        try {
-            Reader query = new BufferedReader(new InputStreamReader(new FileInputStream(queryFileName), "UTF-8"));
-            AsterixJavaClient asterix = new AsterixJavaClient(
-                    AsterixHyracksIntegrationUtil.getHyracksClientConnection(), query, ERR);
-            LOGGER.info("Query is: " + queryFileName);
-            try {
-                asterix.compile(true, false, false, false, false, executeQuery, false);
-            } finally {
-                query.close();
-            }
-            // We don't want to execute a query if we expect only DDL
-            // modifications.
-            if (executeQuery) {
-                asterix.execute();
-            }
-            query.close();
-        } catch (Exception e) {
-            if (!expectFailure) {
-                fail("Unexpected failure of AQL query in file: " + queryFileName + "\n" + e.getMessage());
-            }
-            return;
-        }
-        // Do we expect failure?
-        if (expectFailure) {
-            fail("Unexpected success of AQL query in file: " + queryFileName);
-        }
-        // If no expected or actual file names were given, then we don't want to
-        // compare them.
-        if (expectedFileName == null || actualFileName == null) {
-            return;
-        }
-        // Compare actual and expected results.
-        try {
-            File actualFile = new File(actualFileName);
-            File expectedFile = new File(expectedFileName);
-            if (actualFile.exists() && false) {
-                BufferedReader readerExpected = new BufferedReader(new InputStreamReader(new FileInputStream(
-                        expectedFile), "UTF-8"));
-                BufferedReader readerActual = new BufferedReader(new InputStreamReader(new FileInputStream(actualFile),
-                        "UTF-8"));
-                String lineExpected, lineActual;
-                int num = 1;
-                try {
-                    while ((lineExpected = readerExpected.readLine()) != null) {
-                        lineActual = readerActual.readLine();
-                        if (lineActual == null) {
-                            fail("Result for " + queryFileName + " changed at line " + num + ":\n< " + lineExpected
-                                    + "\n> ");
-                        }
-                        if (!lineExpected.split("Timestamp")[0].equals(lineActual.split("Timestamp")[0])) {
-                            fail("Result for " + queryFileName + " changed at line " + num + ":\n< " + lineExpected
-                                    + "\n> " + lineActual);
-                        }
-                        ++num;
-                    }
-                    lineActual = readerActual.readLine();
-                    if (lineActual != null) {
-                        fail("Result for " + queryFileName + " changed at line " + num + ":\n< \n> " + lineActual);
-                    }
-                    // actualFile.delete();
-                } finally {
-                    readerExpected.close();
-                    readerActual.close();
-                }
-            }
-        } catch (Exception e) {
-            fail("Execption occurred while comparing expected and actual results: " + e.getMessage());
-        }
-    }
-
-    // Collection of object arrays. Each object array contains exactly 3 string
-    // elements:
-    // 1. String QueryFile
-    // 2. String expectedFile
-    // 3. String actualFile
-    private static Collection<Object[]> checkQuerySuite = new ArrayList<Object[]>();
-
-    private static void checkMetadataState() {
-        for (Object[] checkTuple : checkQuerySuite) {
-            executeQueryTuple(checkTuple, false, true);
-        }
-    }
-
-    @BeforeClass
-    public static void setUp() throws Exception {
-        _oldConfigFileName = System.getProperty(GlobalConfig.CONFIG_FILE_PROPERTY);
-        System.setProperty(GlobalConfig.CONFIG_FILE_PROPERTY, TEST_CONFIG_FILE_NAME);
-        File outdir = new File(PATH_ACTUAL);
-        outdir.mkdirs();
-        AsterixHyracksIntegrationUtil.init();
-
-        // Create initial metadata state by adding the customers and orders
-        // metadata.
-        Collection<Object[]> initQuerySuite = new ArrayList<Object[]>();
-        prepareQuerySuite(INIT_STATE_FILE, INIT_STATE_QUERIES_PATH, null, null, initQuerySuite);
-        for (Object[] queryTuple : initQuerySuite) {
-            executeQueryTuple(queryTuple, false, false);
-        }
-
-        // Prepare the query suite for checking the metadata state is still
-        // correct.
-        prepareQuerySuite(CHECK_STATE_FILE, CHECK_STATE_QUERIES_PATH, CHECK_STATE_RESULTS_PATH, PATH_ACTUAL,
-                checkQuerySuite);
-
-        // Make sure the initial metadata state is set up correctly.
-        checkMetadataState();
-    }
-
-    @AfterClass
-    public static void tearDown() throws Exception {
-        // _bootstrap.stop();
-        AsterixHyracksIntegrationUtil.deinit();
-        File outdir = new File(PATH_ACTUAL);
-        File[] files = outdir.listFiles();
-        if (files == null || files.length == 0) {
-            // outdir.delete();
-        }
-        if (_oldConfigFileName != null) {
-            System.setProperty(GlobalConfig.CONFIG_FILE_PROPERTY, _oldConfigFileName);
-        }
-        // clean up the files written by the ASTERIX storage manager
-        for (String d : ASTERIX_DATA_DIRS) {
-            TestsUtils.deleteRec(new File(d));
-        }
-    }
-
-    private static void prepareQuerySuite(String queryListPath, String queryPath, String expectedPath,
-            String actualPath, Collection<Object[]> output) throws IOException {
-        BufferedReader br = null;
-        try {
-            File queryListFile = new File(queryListPath);
-            br = new BufferedReader(new InputStreamReader(new FileInputStream(queryListFile), "UTF-8"));
-            String strLine;
-            String queryFileName;
-            File queryFile;
-            while ((strLine = br.readLine()) != null) {
-                // Ignore commented test files.
-                if (strLine.startsWith("//")) {
-                    continue;
-                }
-                queryFileName = queryPath + strLine;
-                queryFile = new File(queryPath + strLine);
-                // If no expected or actual path was given, just add the
-                // queryFile.
-                if (expectedPath == null || actualPath == null) {
-                    output.add(new Object[] { queryFileName, null, null });
-                    continue;
-                }
-                // We want to compare expected and actual results. Construct the
-                // expected and actual files.
-                if (queryFile.getName().endsWith(EXTENSION_QUERY)) {
-                    String resultFileName = aqlExtToResExt(queryFile.getName());
-                    String expectedFileName = expectedPath + resultFileName;
-                    String actualFileName = actualPath + resultFileName;
-                    output.add(new Object[] { queryFileName, expectedFileName, actualFileName });
-                }
-            }
-        } catch (IOException e) {
-            e.printStackTrace();
-        } finally {
-            if (br != null) {
-                br.close();
-            }
-        }
-    }
-
-    @Parameters
-    public static Collection<Object[]> tests() throws IOException {
-        Collection<Object[]> testArgs = new ArrayList<Object[]>();
-        prepareQuerySuite(QUERIES_FILE, TEST_QUERIES_PATH, null, null, testArgs);
-        return testArgs;
-    }
-
-    private String actualFileName;
-    private String expectedFileName;
-    private String queryFileName;
-
-    public MetadataTransactionsTest(String queryFileName, String expectedFileName, String actualFileName) {
-        this.queryFileName = queryFileName;
-        this.expectedFileName = expectedFileName;
-        this.actualFileName = actualFileName;
-    }
-
-    @Test
-    public void test() throws Exception {
-        // Re-create query tuple.
-        Object[] queryTuple = new Object[] { queryFileName, expectedFileName, actualFileName };
-
-        // Execute query tuple, expecting failure.
-        executeQueryTuple(queryTuple, true, false);
-
-        // Validate metadata state after failed query above.
-        checkMetadataState();
-    }
-}
diff --git a/asterix-app/src/test/resources/metadata-transactions/queries/rollback_drop_dataset.aql b/asterix-app/src/test/resources/metadata-transactions/queries/rollback_drop_dataset.aql
index 860a714..8dfe072 100644
--- a/asterix-app/src/test/resources/metadata-transactions/queries/rollback_drop_dataset.aql
+++ b/asterix-app/src/test/resources/metadata-transactions/queries/rollback_drop_dataset.aql
@@ -1,3 +1,5 @@
+drop dataverse custord if exists;
+create dataverse custord;
 use dataverse custord;
 
 drop dataset Customers;
diff --git a/asterix-app/src/test/resources/metadata/queries.txt b/asterix-app/src/test/resources/metadata/queries.txt
deleted file mode 100644
index 747dce3..0000000
--- a/asterix-app/src/test/resources/metadata/queries.txt
+++ /dev/null
@@ -1,25 +0,0 @@
-metadata_dataverse.aql
-metadata_dataset.aql
-metadata_index.aql
-metadata_datatype.aql
-metadata_node.aql
-metadata_nodegroup.aql
-custord_q1.aql
-custord_q2.aql
-custord_q3.aql
-custord_q4.aql
-custord_dataverse.aql
-custord_dataset.aql
-custord_index.aql
-custord_datatype.aql
-custord_nodegroup.aql
-custord_q5.aql
-exceptions.aql
-custord_q7.aql
-custord_q8.aql
-metadata_dataverse.aql
-metadata_dataset.aql
-metadata_index.aql
-metadata_datatype.aql
-metadata_node.aql
-metadata_nodegroup.aql
diff --git a/asterix-app/src/test/resources/metadata/queries/meta01.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta01.aql
similarity index 78%
rename from asterix-app/src/test/resources/metadata/queries/meta01.aql
rename to asterix-app/src/test/resources/metadata/queries/basic/meta01.aql
index 15b7de8..5ce47ad 100644
--- a/asterix-app/src/test/resources/metadata/queries/meta01.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta01.aql
@@ -7,7 +7,9 @@
 drop dataverse testdv if exists;
 create dataverse testdv;
 
-write output to nc1:"rttest/meta01.adm";
+write output to nc1:"mdtest/basic_meta01.adm";
 
 for $l in dataset('Metadata.Dataverse')
 return $l
+
+drop dataverse testdv;
diff --git a/asterix-app/src/test/resources/metadata/queries/meta02.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta02.aql
similarity index 77%
rename from asterix-app/src/test/resources/metadata/queries/meta02.aql
rename to asterix-app/src/test/resources/metadata/queries/basic/meta02.aql
index f6686b1..8bfc240 100644
--- a/asterix-app/src/test/resources/metadata/queries/meta02.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta02.aql
@@ -7,9 +7,9 @@
 drop dataverse testdv if exists;
 create dataverse testdv;
 
-write output to nc1:"rttest/meta02.adm";
+write output to nc1:"mdtest/basic_meta02.adm";
 
-create type test.testtype as open {
+create type testdv.testtype as open {
 id : int32
 }
 
@@ -18,3 +18,5 @@
 for $l in dataset('Metadata.Dataset')
 where $l.DataverseName = 'testdv' and $l.DatasetName = 'dst01'
 return $l
+
+drop dataverse testdv; 
diff --git a/asterix-app/src/test/resources/metadata/queries/meta03.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta03.aql
similarity index 83%
rename from asterix-app/src/test/resources/metadata/queries/meta03.aql
rename to asterix-app/src/test/resources/metadata/queries/basic/meta03.aql
index 88c658c..0b876f2 100644
--- a/asterix-app/src/test/resources/metadata/queries/meta03.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta03.aql
@@ -7,7 +7,7 @@
 drop dataverse testdv if exists;
 create dataverse testdv;
 
-write output to nc1:"rttest/meta03.adm";
+write output to nc1:"mdtest/basic_meta03.adm";
 
 create type testdv.testtype as closed {
 id : int32
@@ -16,3 +16,6 @@
 for $l in dataset('Metadata.Datatype')
 where $l.DataverseName='testdv' and $l.DatatypeName='testtype'
 return $l
+
+drop dataverse testdv;
+
diff --git a/asterix-app/src/test/resources/metadata/queries/meta04.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta04.aql
similarity index 83%
rename from asterix-app/src/test/resources/metadata/queries/meta04.aql
rename to asterix-app/src/test/resources/metadata/queries/basic/meta04.aql
index fac73da..b2e7304 100644
--- a/asterix-app/src/test/resources/metadata/queries/meta04.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta04.aql
@@ -7,7 +7,7 @@
 drop dataverse testdv if exists;
 create dataverse testdv;
 
-write output to nc1:"rttest/meta04.adm";
+write output to nc1:"mdtest/basic_meta04.adm";
 
 create type testdv.testtype as open {
 id : int32
@@ -16,3 +16,5 @@
 for $l in dataset('Metadata.Datatype')
 where $l.DataverseName='testdv' and $l.DatatypeName='testtype'
 return $l
+
+drop dataverse testdv;
diff --git a/asterix-app/src/test/resources/metadata/queries/meta05.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta05.aql
similarity index 86%
rename from asterix-app/src/test/resources/metadata/queries/meta05.aql
rename to asterix-app/src/test/resources/metadata/queries/basic/meta05.aql
index 6a99187..9eb129d 100644
--- a/asterix-app/src/test/resources/metadata/queries/meta05.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta05.aql
@@ -7,7 +7,7 @@
 drop dataverse testdv if exists;
 create dataverse testdv;
 
-write output to nc1:"rttest/meta05.adm";
+write output to nc1:"mdtest/basic_meta05.adm";
 
 create type testdv.testtype as open {
 id : int32,
@@ -21,3 +21,5 @@
 for $l in dataset('Metadata.Index')
 where $l.DataverseName='testdv' 
 return $l
+
+drop dataverse testdv;
diff --git a/asterix-app/src/test/resources/metadata/queries/meta06.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta06.aql
similarity index 83%
rename from asterix-app/src/test/resources/metadata/queries/meta06.aql
rename to asterix-app/src/test/resources/metadata/queries/basic/meta06.aql
index c0b9a9b..6297561 100644
--- a/asterix-app/src/test/resources/metadata/queries/meta06.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta06.aql
@@ -7,7 +7,7 @@
 drop dataverse testdv if exists;
 create dataverse testdv;
 
-write output to nc1:"rttest/meta06.adm";
+write output to nc1:"mdtest/basic_meta06.adm";
 
 create function testdv.fun01(){
 "This is an AQL Bodied UDF"
@@ -16,3 +16,4 @@
 for $l in dataset('Metadata.Function')
 return $l
 
+drop dataverse testdv;
diff --git a/asterix-app/src/test/resources/metadata/queries/meta07.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta07.aql
similarity index 84%
rename from asterix-app/src/test/resources/metadata/queries/meta07.aql
rename to asterix-app/src/test/resources/metadata/queries/basic/meta07.aql
index 2455eeb..6544bca 100644
--- a/asterix-app/src/test/resources/metadata/queries/meta07.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta07.aql
@@ -6,7 +6,7 @@
 
 // Please note this query was run on two nodes, i.e; two NCs
 
-write output to nc1:"rttest/meta07.adm";
+write output to nc1:"mdtest/basic_meta07.adm";
 
 for $l in dataset('Metadata.Node')
 return $l
diff --git a/asterix-app/src/test/resources/metadata/queries/meta08.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta08.aql
similarity index 84%
rename from asterix-app/src/test/resources/metadata/queries/meta08.aql
rename to asterix-app/src/test/resources/metadata/queries/basic/meta08.aql
index e382c47..a7d536f 100644
--- a/asterix-app/src/test/resources/metadata/queries/meta08.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta08.aql
@@ -6,7 +6,7 @@
 
 // Please note this query was run on two nodes, i.e; two NCs
 
-write output to nc1:"rttest/meta08.adm";
+write output to nc1:"mdtest/basic_meta08.adm";
 
 for $l in dataset('Metadata.Nodegroup')
 return $l
diff --git a/asterix-app/src/test/resources/metadata/queries/meta09.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta09.aql
similarity index 92%
rename from asterix-app/src/test/resources/metadata/queries/meta09.aql
rename to asterix-app/src/test/resources/metadata/queries/basic/meta09.aql
index 4404707..28a3794 100644
--- a/asterix-app/src/test/resources/metadata/queries/meta09.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta09.aql
@@ -7,7 +7,7 @@
 drop dataverse test if exists;
 create dataverse test;
 
-write output to nc1:"rttest/meta09.adm";
+write output to nc1:"mdtest/basic_meta09.adm";
 
 create type test.testtype as open {
 id:int32
@@ -29,3 +29,5 @@
 for $l in dataset('Metadata.Dataset')
 where $l.DataverseName='test' and $l.DatasetName='t1'
 return $l
+
+drop dataverse test; 
diff --git a/asterix-app/src/test/resources/metadata/queries/meta10.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta10.aql
similarity index 77%
rename from asterix-app/src/test/resources/metadata/queries/meta10.aql
rename to asterix-app/src/test/resources/metadata/queries/basic/meta10.aql
index 6aae87b..452dcc3 100644
--- a/asterix-app/src/test/resources/metadata/queries/meta10.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta10.aql
@@ -8,7 +8,10 @@
 create dataverse test;
 drop dataverse test if exists;
 
-write output to nc1:"rttest/meta10.adm";
+write output to nc1:"mdtest/basic_meta10.adm";
 
+count(
 for $l in dataset('Metadata.Dataverse')
+where $l.DataverseName='test'
 return $l
+)
diff --git a/asterix-app/src/test/resources/metadata/queries/meta11.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta11.aql
similarity index 74%
rename from asterix-app/src/test/resources/metadata/queries/meta11.aql
rename to asterix-app/src/test/resources/metadata/queries/basic/meta11.aql
index a5ae81e..d781114 100644
--- a/asterix-app/src/test/resources/metadata/queries/meta11.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta11.aql
@@ -7,7 +7,7 @@
 drop dataverse test if exists;
 create dataverse test;
 
-write output to nc1:"rttest/meta11.adm";
+write output to nc1:"mdtest/basic_meta11.adm";
 
 create type test.testtype as open {
 id : int32
@@ -17,5 +17,11 @@
 
 drop dataset test.dst01;
 
+count(
 for $l in dataset('Metadata.Dataset')
+where $l.DataverseName='test' and $l.DatasetName='dst01'
 return $l
+)
+
+drop dataverse test;
+
diff --git a/asterix-app/src/test/resources/metadata/queries/meta12.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta12.aql
similarity index 85%
rename from asterix-app/src/test/resources/metadata/queries/meta12.aql
rename to asterix-app/src/test/resources/metadata/queries/basic/meta12.aql
index 481cc3a..1de7ac5 100644
--- a/asterix-app/src/test/resources/metadata/queries/meta12.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta12.aql
@@ -4,12 +4,10 @@
  * Date         : Sep 17 2012
  */
 
-// This drop index test is broken for now.
-
 drop dataverse test if exists;
 create dataverse test;
 
-write output to nc1:"rttest/meta12.adm";
+write output to nc1:"mdtest/basic_meta12.adm";
 
 create type test.testtype as open {
 id : int32,
@@ -25,3 +23,5 @@
 for $l in dataset('Metadata.Index')
 where $l.DatasetName = 'dst01'
 return $l
+
+drop dataverse test;
diff --git a/asterix-app/src/test/resources/metadata/queries/meta13.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta13.aql
similarity index 72%
rename from asterix-app/src/test/resources/metadata/queries/meta13.aql
rename to asterix-app/src/test/resources/metadata/queries/basic/meta13.aql
index 62d0964..0c0f627 100644
--- a/asterix-app/src/test/resources/metadata/queries/meta13.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta13.aql
@@ -5,12 +5,10 @@
  * Date         : Sep 17 2012
  */
 
-// this test returns NPE today
-
 drop dataverse test if exists;
 create dataverse test;
 
-write output to nc1:"rttest/meta13.adm";
+write output to nc1:"mdtest/basic_meta13.adm";
 
 create function test.foo(){
 "drop this function"
@@ -18,6 +16,9 @@
 
 drop function test.foo@0;
 
+count(
 for $l in dataset('Metadata.Function')
-return $l;
+where $l.DataverseName='test' and $l.Name='foo' and $l.Arity=0
+return $l);
 
+drop dataverse test;
diff --git a/asterix-app/src/test/resources/metadata/queries/regress_04_drop_nested_type.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta14.aql
similarity index 68%
rename from asterix-app/src/test/resources/metadata/queries/regress_04_drop_nested_type.aql
rename to asterix-app/src/test/resources/metadata/queries/basic/meta14.aql
index f8c8626..9103614 100644
--- a/asterix-app/src/test/resources/metadata/queries/regress_04_drop_nested_type.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta14.aql
@@ -4,6 +4,7 @@
  * Expected Result  :  Success
  */
 
+
 drop dataverse test if exists;
 create dataverse test;
 use dataverse test;
@@ -12,10 +13,15 @@
    bar: int32?
 };
 
+
 drop type FooType;
 
-create type FooType as closed {
-   bar: int32?
-};
+write output to nc1:"mdtest/basic_meta14.adm";
 
-drop type FooType;
\ No newline at end of file
+count(
+for $x in dataset('Metadata.Datatype')
+where $x.DataverseName='test'
+return $x
+)
+
+drop dataverse test;
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta15.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta15.aql
new file mode 100644
index 0000000..91827ef
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta15.aql
@@ -0,0 +1,11 @@
+/*
+ * Description  : Query Metadata dataset Adapter to verify to contents.
+ * Expected Res : Success
+ * Date         : 25 Nov 2012
+ */
+
+write output to nc1:"mdtest/basic_meta15.adm";
+
+for $l in dataset('Metadata.DatasourceAdapter')
+return $l
+
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta16.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta16.aql
new file mode 100644
index 0000000..8b69b4f
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta16.aql
@@ -0,0 +1,9 @@
+use dataverse Metadata;
+
+write output to nc1:"mdtest/basic_meta16.adm";
+
+for $c in dataset('Dataset')
+return $c
+
+
+
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta17.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta17.aql
new file mode 100644
index 0000000..2cd25ee
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta17.aql
@@ -0,0 +1,6 @@
+use dataverse Metadata;
+
+write output to nc1:"mdtest/basic_meta17.adm";
+     
+for $c in dataset('Datatype')
+return $c
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta18.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta18.aql
new file mode 100644
index 0000000..45f065a
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta18.aql
@@ -0,0 +1,6 @@
+use dataverse Metadata;
+
+write output to nc1:"mdtest/basic_meta18.adm";
+
+for $c in dataset('Dataverse')
+return $c
diff --git a/asterix-app/src/test/resources/metadata/queries/metadata_index.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta19.aql
similarity index 66%
rename from asterix-app/src/test/resources/metadata/queries/metadata_index.aql
rename to asterix-app/src/test/resources/metadata/queries/basic/meta19.aql
index cbe0109..21de582 100644
--- a/asterix-app/src/test/resources/metadata/queries/metadata_index.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta19.aql
@@ -1,6 +1,6 @@
 use dataverse Metadata;
 
-write output to nc1:"rttest/metadata_index.adm";
+write output to nc1:"mdtest/basic_meta19.adm";
 
 for $c in dataset('Index')
 where $c.DataverseName='Metadata'
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta20.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta20.aql
new file mode 100644
index 0000000..259f23e
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta20.aql
@@ -0,0 +1,6 @@
+use dataverse Metadata;    
+
+write output to nc1:"mdtest/basic_meta20.adm";
+
+for $c in dataset('Node')
+return $c
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta21.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta21.aql
new file mode 100644
index 0000000..b5f8110
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta21.aql
@@ -0,0 +1,6 @@
+use dataverse Metadata;
+
+write output to nc1:"mdtest/basic_meta21.adm";
+     
+for $c in dataset('Nodegroup')
+return $c
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/metadata_dataset.aql b/asterix-app/src/test/resources/metadata/queries/basic/metadata_dataset.aql
new file mode 100644
index 0000000..94f7a58
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/metadata_dataset.aql
@@ -0,0 +1,9 @@
+use dataverse Metadata;
+
+write output to nc1:"rttest/basic_metadata_dataset.adm";
+
+for $c in dataset('Dataset')
+return $c
+
+
+
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/metadata_datatype.aql b/asterix-app/src/test/resources/metadata/queries/basic/metadata_datatype.aql
new file mode 100644
index 0000000..4cc94de
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/metadata_datatype.aql
@@ -0,0 +1,6 @@
+use dataverse Metadata;
+
+write output to nc1:"rttest/basic_metadata_datatype.adm";
+     
+for $c in dataset('Datatype')
+return $c
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/metadata_dataverse.aql b/asterix-app/src/test/resources/metadata/queries/basic/metadata_dataverse.aql
new file mode 100644
index 0000000..d3edfed
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/metadata_dataverse.aql
@@ -0,0 +1,6 @@
+use dataverse Metadata;
+
+write output to nc1:"rttest/basic_metadata_dataverse.adm";
+
+for $c in dataset('Dataverse')
+return $c
diff --git a/asterix-app/src/test/resources/metadata/queries/metadata_index.aql b/asterix-app/src/test/resources/metadata/queries/basic/metadata_index.aql
similarity index 63%
copy from asterix-app/src/test/resources/metadata/queries/metadata_index.aql
copy to asterix-app/src/test/resources/metadata/queries/basic/metadata_index.aql
index cbe0109..cb78758 100644
--- a/asterix-app/src/test/resources/metadata/queries/metadata_index.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/metadata_index.aql
@@ -1,6 +1,6 @@
 use dataverse Metadata;
 
-write output to nc1:"rttest/metadata_index.adm";
+write output to nc1:"rttest/basic_metadata_index.adm";
 
 for $c in dataset('Index')
 where $c.DataverseName='Metadata'
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/metadata_node.aql b/asterix-app/src/test/resources/metadata/queries/basic/metadata_node.aql
new file mode 100644
index 0000000..2d28ed6
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/metadata_node.aql
@@ -0,0 +1,6 @@
+use dataverse Metadata;    
+
+write output to nc1:"rttest/basic_metadata_node.adm";
+
+for $c in dataset('Node')
+return $c
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/metadata_nodegroup.aql b/asterix-app/src/test/resources/metadata/queries/basic/metadata_nodegroup.aql
new file mode 100644
index 0000000..86615e6
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/basic/metadata_nodegroup.aql
@@ -0,0 +1,6 @@
+use dataverse Metadata;
+
+write output to nc1:"rttest/basic_metadata_nodegroup.adm";
+     
+for $c in dataset('Nodegroup')
+return $c
diff --git a/asterix-app/src/test/resources/metadata/queries/custord_dataset.aql b/asterix-app/src/test/resources/metadata/queries/custord_dataset.aql
deleted file mode 100644
index 530c6c9..0000000
--- a/asterix-app/src/test/resources/metadata/queries/custord_dataset.aql
+++ /dev/null
@@ -1,10 +0,0 @@
-use dataverse Metadata;
-
-write output to nc1:"rttest/custord_dataset.adm";
-
-for $c in dataset('Dataset')
-where $c.DataverseName = "custord"
-return $c
-
-
-
diff --git a/asterix-app/src/test/resources/metadata/queries/custord_datatype.aql b/asterix-app/src/test/resources/metadata/queries/custord_datatype.aql
deleted file mode 100644
index 1525b93..0000000
--- a/asterix-app/src/test/resources/metadata/queries/custord_datatype.aql
+++ /dev/null
@@ -1,7 +0,0 @@
-use dataverse Metadata;
-
-write output to nc1:"rttest/custord_datatype.adm";
-     
-for $c in dataset('Datatype')
-where $c.DataverseName = "custord"
-return $c
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/custord_dataverse.aql b/asterix-app/src/test/resources/metadata/queries/custord_dataverse.aql
deleted file mode 100644
index 277bf62..0000000
--- a/asterix-app/src/test/resources/metadata/queries/custord_dataverse.aql
+++ /dev/null
@@ -1,8 +0,0 @@
-use dataverse Metadata;
-
-write output to nc1:"rttest/custord_dataverse.adm";
-
-for $c in dataset('Dataverse')
-where $c.DataverseName = "custord"
-return $c
-
diff --git a/asterix-app/src/test/resources/metadata/queries/custord_index.aql b/asterix-app/src/test/resources/metadata/queries/custord_index.aql
deleted file mode 100644
index 95450da..0000000
--- a/asterix-app/src/test/resources/metadata/queries/custord_index.aql
+++ /dev/null
@@ -1,7 +0,0 @@
-use dataverse Metadata;
-
-write output to nc1:"rttest/custord_index.adm";
-
-for $c in dataset('Index')
-where $c.DataverseName = "custord"
-return $c
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/custord_nodegroup.aql b/asterix-app/src/test/resources/metadata/queries/custord_nodegroup.aql
deleted file mode 100644
index 090c739..0000000
--- a/asterix-app/src/test/resources/metadata/queries/custord_nodegroup.aql
+++ /dev/null
@@ -1,6 +0,0 @@
-use dataverse Metadata;
-
-write output to nc1:"rttest/metadata_nodegroup.adm";
-     
-for $c in dataset('Nodegroup')
-return $c
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/custord_q1.aql b/asterix-app/src/test/resources/metadata/queries/custord_q1.aql
deleted file mode 100644
index 4ba02b9..0000000
--- a/asterix-app/src/test/resources/metadata/queries/custord_q1.aql
+++ /dev/null
@@ -1,65 +0,0 @@
-drop dataverse custord if exists;
-
-create dataverse custord;
-
-use dataverse custord;
-
-create type StreetType as closed {
-  number: int32?,
-  name: string
-}
-
-create type AddressType as open {
-  street: StreetType,
-  city: string,
-  state: string,
-  zip: int16
-}
-
-create type CustomerType as closed {
-  cid: int32, 
-  name: string,
-  age: int32?,
-  address: AddressType?,
-  interests: {{string}},
-  children: [ {
-             name : string,
-             dob : string
-             } ]
-}
-
-create type OrderType as open {
-  oid: int32,
-  cid: int32,
-  orderstatus: string,
-  orderpriority: string,
-  clerk: string,
-  total: float,
-  items: [ {
-       number: int64,
-       storeIds: {{int8}} 
-       } ]
-}
-
-create nodegroup group1  if not exists on nc1, nc2;
-
-create dataset Customers(CustomerType)
-  partitioned by key cid, name on group1;
-
-load dataset Customers 
-using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
-(("path"="nc1://data/metadata/customerData.json"),("format"="adm"));
-        
-create dataset Orders(OrderType)
-  partitioned by key oid on group1;
-
-load dataset Orders 
-using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
-(("path"="nc1://data/metadata/orderData.json"),("format"="adm"));
-
-create index ordCustId if not exists on Orders(cid);
-
-create index custName if not exists on Customers(name, cid);
-
-create index ordClerkTotal if not exists on Orders(clerk, total);
-  
diff --git a/asterix-app/src/test/resources/metadata/queries/custord_q10.aql b/asterix-app/src/test/resources/metadata/queries/custord_q10.aql
deleted file mode 100644
index abe6041..0000000
--- a/asterix-app/src/test/resources/metadata/queries/custord_q10.aql
+++ /dev/null
@@ -1,7 +0,0 @@
-use dataverse Metadata;
-
-write output to nc1:"rttest/custord_q10.adm";
-
-for $c in dataset('Dataset')
-return $c
-
diff --git a/asterix-app/src/test/resources/metadata/queries/custord_q2.aql b/asterix-app/src/test/resources/metadata/queries/custord_q2.aql
deleted file mode 100644
index 291ca64..0000000
--- a/asterix-app/src/test/resources/metadata/queries/custord_q2.aql
+++ /dev/null
@@ -1,7 +0,0 @@
-use dataverse custord;
-     
-write output to nc1:"rttest/custord_q2.adm";
-      
-for $c in dataset('Customers')
-order by $c.cid
-return $c.address
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/custord_q3.aql b/asterix-app/src/test/resources/metadata/queries/custord_q3.aql
deleted file mode 100644
index c093baa..0000000
--- a/asterix-app/src/test/resources/metadata/queries/custord_q3.aql
+++ /dev/null
@@ -1,6 +0,0 @@
-use dataverse custord;
-
-write output to nc1:"rttest/custord_q3.adm";
-      
-for $o in dataset('Orders')
-return {"id" : $o.oid, "total": $o.total} 
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/custord_q4.aql b/asterix-app/src/test/resources/metadata/queries/custord_q4.aql
deleted file mode 100644
index 9bdf494..0000000
--- a/asterix-app/src/test/resources/metadata/queries/custord_q4.aql
+++ /dev/null
@@ -1,9 +0,0 @@
-use dataverse custord;
-
-write output to nc1:"rttest/custord_q4.adm";
-
-for $c in dataset('Customers')
-for $o in dataset('Orders')
-where $c.cid = $o.cid 
-order by $c.cid
-return {"cust_name":$c.name, "cust_age": $c.age, "order_total":$o.total} 
diff --git a/asterix-app/src/test/resources/metadata/queries/custord_q5.aql b/asterix-app/src/test/resources/metadata/queries/custord_q5.aql
deleted file mode 100644
index 716cce7..0000000
--- a/asterix-app/src/test/resources/metadata/queries/custord_q5.aql
+++ /dev/null
@@ -1,21 +0,0 @@
-drop nodegroup fuzzynodegroup if exists;
-
-drop dataverse fuzzyjoin if exists;
-
-use dataverse custord;
-
-create dataset Customers if not exists (CustomerType)
-  partitioned by key cid, name on group1;
-  
-drop dataset employees if exists;
-
-create index custName if not exists on Customers(name, cid);
-
-drop index Customers.custAddress if exists;
-
-create type StreetType if not exists as closed {
-  number: int32?,
-  name: string
-}
-
-drop type DBLPType if exists;
diff --git a/asterix-app/src/test/resources/metadata/queries/custord_q6.aql b/asterix-app/src/test/resources/metadata/queries/custord_q6.aql
deleted file mode 100644
index 231c410..0000000
--- a/asterix-app/src/test/resources/metadata/queries/custord_q6.aql
+++ /dev/null
@@ -1,7 +0,0 @@
-use dataverse custord;
-
-drop index Orders.ordClerkTotal;
-
-drop dataset Orders;
-
-drop type OrderType;
diff --git a/asterix-app/src/test/resources/metadata/queries/custord_q7.aql b/asterix-app/src/test/resources/metadata/queries/custord_q7.aql
deleted file mode 100644
index 247ed78..0000000
--- a/asterix-app/src/test/resources/metadata/queries/custord_q7.aql
+++ /dev/null
@@ -1,2 +0,0 @@
-drop dataverse custord;
- 
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/exception_drop_dataset.aql b/asterix-app/src/test/resources/metadata/queries/exception/exception_drop_dataset.aql
new file mode 100644
index 0000000..4cce09a
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/exception_drop_dataset.aql
@@ -0,0 +1 @@
+drop dataset DBLP;
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/exception_drop_dataverse.aql b/asterix-app/src/test/resources/metadata/queries/exception/exception_drop_dataverse.aql
new file mode 100644
index 0000000..b92618e
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/exception_drop_dataverse.aql
@@ -0,0 +1 @@
+drop dataverse fuzzyjoin;
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/exception_drop_index.aql b/asterix-app/src/test/resources/metadata/queries/exception/exception_drop_index.aql
new file mode 100644
index 0000000..386a71d
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/exception_drop_index.aql
@@ -0,0 +1 @@
+drop index Cust.ord;
diff --git a/asterix-app/src/test/resources/metadata/queries/custord_q8.aql b/asterix-app/src/test/resources/metadata/queries/exception/exception_drop_nodegroup.aql
similarity index 95%
rename from asterix-app/src/test/resources/metadata/queries/custord_q8.aql
rename to asterix-app/src/test/resources/metadata/queries/exception/exception_drop_nodegroup.aql
index bb4225d..664f52f 100644
--- a/asterix-app/src/test/resources/metadata/queries/custord_q8.aql
+++ b/asterix-app/src/test/resources/metadata/queries/exception/exception_drop_nodegroup.aql
@@ -1,2 +1 @@
 drop nodegroup group1;
- 
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/exception_drop_type1.aql b/asterix-app/src/test/resources/metadata/queries/exception/exception_drop_type1.aql
new file mode 100644
index 0000000..4c6c7ae
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/exception_drop_type1.aql
@@ -0,0 +1 @@
+drop type AddressType;
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/exception_drop_type2.aql b/asterix-app/src/test/resources/metadata/queries/exception/exception_drop_type2.aql
new file mode 100644
index 0000000..b97b812
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/exception_drop_type2.aql
@@ -0,0 +1 @@
+drop type CustomerType;
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/exception_drop_type3.aql b/asterix-app/src/test/resources/metadata/queries/exception/exception_drop_type3.aql
new file mode 100644
index 0000000..3812745
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/exception_drop_type3.aql
@@ -0,0 +1 @@
+drop type StreetType;
diff --git a/asterix-app/src/test/resources/metadata/queries/exceptions.aql b/asterix-app/src/test/resources/metadata/queries/exceptions.aql
deleted file mode 100644
index b6ec188..0000000
--- a/asterix-app/src/test/resources/metadata/queries/exceptions.aql
+++ /dev/null
@@ -1,21 +0,0 @@
-// Each statement (except for the second "use" statement) should throw an exception.
-
-//drop nodegroup group1;
-
-//drop dataverse fuzzyjoin;
-
-//use dataverse fuzzy;
-
-//use dataverse custord;
-
-//drop index Cust.ord;
-
-//drop index Customers.ord;
-
-//drop type AddressType;
-
-//drop type CustomerType;
-
-//drop type StreetType;
-
-//drop dataset DBLP;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/metadata_dataset.aql b/asterix-app/src/test/resources/metadata/queries/metadata_dataset.aql
deleted file mode 100644
index 723e65c..0000000
--- a/asterix-app/src/test/resources/metadata/queries/metadata_dataset.aql
+++ /dev/null
@@ -1,9 +0,0 @@
-use dataverse Metadata;
-
-write output to nc1:"rttest/metadata_dataset.adm";
-
-for $c in dataset('Dataset')
-return $c
-
-
-
diff --git a/asterix-app/src/test/resources/metadata/queries/metadata_datatype.aql b/asterix-app/src/test/resources/metadata/queries/metadata_datatype.aql
deleted file mode 100644
index a144f4f..0000000
--- a/asterix-app/src/test/resources/metadata/queries/metadata_datatype.aql
+++ /dev/null
@@ -1,6 +0,0 @@
-use dataverse Metadata;
-
-write output to nc1:"rttest/metadata_datatype.adm";
-     
-for $c in dataset('Datatype')
-return $c
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/metadata_dataverse.aql b/asterix-app/src/test/resources/metadata/queries/metadata_dataverse.aql
deleted file mode 100644
index e7e1249..0000000
--- a/asterix-app/src/test/resources/metadata/queries/metadata_dataverse.aql
+++ /dev/null
@@ -1,6 +0,0 @@
-use dataverse Metadata;
-
-write output to nc1:"rttest/metadata_dataverse.adm";
-
-for $c in dataset('Dataverse')
-return $c
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/metadata_node.aql b/asterix-app/src/test/resources/metadata/queries/metadata_node.aql
deleted file mode 100644
index ce28ac8..0000000
--- a/asterix-app/src/test/resources/metadata/queries/metadata_node.aql
+++ /dev/null
@@ -1,6 +0,0 @@
-use dataverse Metadata;    
-
-write output to nc1:"rttest/metadata_node.adm";
-
-for $c in dataset('Node')
-return $c
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/metadata_nodegroup.aql b/asterix-app/src/test/resources/metadata/queries/metadata_nodegroup.aql
deleted file mode 100644
index 090c739..0000000
--- a/asterix-app/src/test/resources/metadata/queries/metadata_nodegroup.aql
+++ /dev/null
@@ -1,6 +0,0 @@
-use dataverse Metadata;
-
-write output to nc1:"rttest/metadata_nodegroup.adm";
-     
-for $c in dataset('Nodegroup')
-return $c
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/regress_01_create_type.aql b/asterix-app/src/test/resources/metadata/queries/regress_01_create_type.aql
deleted file mode 100644
index e978572..0000000
--- a/asterix-app/src/test/resources/metadata/queries/regress_01_create_type.aql
+++ /dev/null
@@ -1,12 +0,0 @@
-drop dataverse test if exists;
-create dataverse test;
-use dataverse test;
-
-create nodegroup group1 if not exists on nc1, nc2;
-
-// Create a type in test dataverse.
-// This type will be referred to in a subsequent script.
-create type TestType as closed {
-  id: int32,
-  name: string
-}
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/regress_02_refer_existing_type.aql b/asterix-app/src/test/resources/metadata/queries/regress_02_refer_existing_type.aql
deleted file mode 100644
index aeed866..0000000
--- a/asterix-app/src/test/resources/metadata/queries/regress_02_refer_existing_type.aql
+++ /dev/null
@@ -1,6 +0,0 @@
-use dataverse test;
-
-// Refer to existing type in test dataverse.
-create type UseTestType as closed {
-  test: TestType
-}
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/regress_03_repeated_create_drop.aql b/asterix-app/src/test/resources/metadata/queries/regress_03_repeated_create_drop.aql
deleted file mode 100644
index 02009da..0000000
--- a/asterix-app/src/test/resources/metadata/queries/regress_03_repeated_create_drop.aql
+++ /dev/null
@@ -1,34 +0,0 @@
-drop dataverse test if exists;
-create dataverse test;
-use dataverse test;
-
-create type TypeOne as open {
-  f: int32
-}
-create nodegroup group1 if not exists on nc1, nc2;
-create dataset typeonedataset(TypeOne)
-partitioned by key f on group1;
-
-
-drop dataverse test if exists;  
-create dataverse test;
-use dataverse test;
-
-create type TypeTwo as open {
-  f: int32
-}
-create nodegroup group1 if not exists on nc1, nc2;
-create dataset typetwodataset(TypeTwo)
-partitioned by key f on group1;
-
-
-drop dataverse test if exists;  
-create dataverse test;
-use dataverse test;
-
-create type TypeThree as open {
-  f: int32
-}
-create nodegroup group1 if not exists on nc1, nc2;
-create dataset typethreedataset(TypeThree)
-partitioned by key f on group1;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/custord_q9.aql b/asterix-app/src/test/resources/metadata/queries/transaction/failure_previous_success.aql
similarity index 65%
rename from asterix-app/src/test/resources/metadata/queries/custord_q9.aql
rename to asterix-app/src/test/resources/metadata/queries/transaction/failure_previous_success.aql
index 2d3f960..5250eef 100644
--- a/asterix-app/src/test/resources/metadata/queries/custord_q9.aql
+++ b/asterix-app/src/test/resources/metadata/queries/transaction/failure_previous_success.aql
@@ -1,3 +1,9 @@
+/*
+ * Description  : Cause a failure by creating an existing type. Verify that rollback does not affect the pre-existing types.
+ *                Verification is done in a separate session (compilation unit).
+ * Expected Res : Success
+ * Date         : 24 Nov 2012
+ */
 drop dataverse custord if exists;
 
 create dataverse custord;
@@ -41,12 +47,8 @@
        } ]
 }
 
-create external dataset Customers(CustomerType)
-using "edu.uci.ics.asterix.external.dataset.adapter.HDFSAdapter"
-      (("hdfs"="hdfs://temp1/data1"),("n1"="v1"),("n2"="v2"), ("n3"="v3"));
-  
-create external dataset Orders(OrderType)
-using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
-(("path"="nc1:///tmp1/data1,nc2:///tmp2/data2"));
 
-
+create type StreetType as closed {
+  number: int32?,
+  name: string
+}
diff --git a/asterix-app/src/test/resources/metadata/queries/transaction/failure_subsequent_no_execution.aql b/asterix-app/src/test/resources/metadata/queries/transaction/failure_subsequent_no_execution.aql
new file mode 100644
index 0000000..d4678f2
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/transaction/failure_subsequent_no_execution.aql
@@ -0,0 +1,47 @@
+/*
+ * Description  : Create dataverse, types, nodegroup and a dataset. Cause exception by re-creating nodegroup. 
+ *                Subsequent statement(s) should not executed. This is verified in a separate session (compilation unit)
+ * Expected Res : Exception
+ * Date         : 24 Nov 2012
+ */
+drop dataverse custord if exists;
+
+create dataverse custord;
+
+use dataverse custord;
+
+create type StreetType as closed {
+  number: int32?,
+  name: string
+}
+
+create type AddressType as open {
+  street: StreetType,
+  city: string,
+  state: string,
+  zip: int16
+}
+
+create type CustomerType as closed {
+  cid: int32, 
+  name: string,
+  age: int32?,
+  address: AddressType?,
+  interests: {{string}},
+  children: [ {
+             name : string,
+             dob : string
+             } ]
+}
+
+create nodegroup group1 if not exists on nc1, nc2;
+
+create dataset Customers(CustomerType)
+  partitioned by key cid, name on group1;
+
+create nodegroup group1 on nc1, nc2;
+
+// the following statement should not get executed 
+// as the above statement causes an exception
+create index custName on Customers(name, cid);
+  
diff --git a/asterix-app/src/test/resources/metadata/queries/transaction/verify_failure_previous_success.aql b/asterix-app/src/test/resources/metadata/queries/transaction/verify_failure_previous_success.aql
new file mode 100644
index 0000000..351a9e4
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/transaction/verify_failure_previous_success.aql
@@ -0,0 +1,12 @@
+/*
+ * Description  : Verify the state of the Metadata adter the failure caused in failure_previous_success.aql 
+ * Expected Res : Success
+ * Date         : 24 Nov 2012
+ */
+use dataverse custord;
+
+write output to nc1:"mdtest/transaction_verify_failure_previous_success.adm";
+
+for $x in dataset('Metadata.Datatype')
+where $x.DataverseName='custord'
+return $x
diff --git a/asterix-app/src/test/resources/metadata/queries/transaction/verify_failure_subsequent_no_execution.aql b/asterix-app/src/test/resources/metadata/queries/transaction/verify_failure_subsequent_no_execution.aql
new file mode 100644
index 0000000..6505903
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/transaction/verify_failure_subsequent_no_execution.aql
@@ -0,0 +1,13 @@
+/*
+ * Description  : Verify the state of the metadata after the failure caused by failure_subsequent_no_execution.aql
+ * Expected Res : Success
+ * Date         : 24 Nov 2012
+ */
+
+use dataverse custord;
+
+write output to nc1:"mdtest/transaction_verify_failure_subsequent_no_execution.adm";
+
+for $x in dataset('Metadata.Index')
+where $x.DataverseName='custord'
+return $x
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta01.adm b/asterix-app/src/test/resources/metadata/results/basic/meta01.adm
new file mode 100644
index 0000000..e878a54
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta01.adm
@@ -0,0 +1,2 @@
+{ "DataverseName": "Metadata", "DataFormat": "edu.uci.ics.asterix.runtime.formats.NonTaggedDataFormat", "Timestamp": "Sat Nov 24 14:44:45 PST 2012" }
+{ "DataverseName": "testdv", "DataFormat": "edu.uci.ics.asterix.runtime.formats.NonTaggedDataFormat", "Timestamp": "Sat Nov 24 14:45:14 PST 2012" }
diff --git a/asterix-app/src/test/resources/metadata/results/meta02.adm b/asterix-app/src/test/resources/metadata/results/basic/meta02.adm
similarity index 100%
rename from asterix-app/src/test/resources/metadata/results/meta02.adm
rename to asterix-app/src/test/resources/metadata/results/basic/meta02.adm
diff --git a/asterix-app/src/test/resources/metadata/results/meta03.adm b/asterix-app/src/test/resources/metadata/results/basic/meta03.adm
similarity index 100%
rename from asterix-app/src/test/resources/metadata/results/meta03.adm
rename to asterix-app/src/test/resources/metadata/results/basic/meta03.adm
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta04.adm b/asterix-app/src/test/resources/metadata/results/basic/meta04.adm
new file mode 100644
index 0000000..8f892be
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta04.adm
@@ -0,0 +1 @@
+{ "DataverseName": "testdv", "DatatypeName": "testtype", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "id", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Sat Nov 24 14:27:13 PST 2012" }
diff --git a/asterix-app/src/test/resources/metadata/results/meta05.adm b/asterix-app/src/test/resources/metadata/results/basic/meta05.adm
similarity index 100%
rename from asterix-app/src/test/resources/metadata/results/meta05.adm
rename to asterix-app/src/test/resources/metadata/results/basic/meta05.adm
diff --git a/asterix-app/src/test/resources/metadata/results/meta06.adm b/asterix-app/src/test/resources/metadata/results/basic/meta06.adm
similarity index 100%
rename from asterix-app/src/test/resources/metadata/results/meta06.adm
rename to asterix-app/src/test/resources/metadata/results/basic/meta06.adm
diff --git a/asterix-app/src/test/resources/metadata/results/meta07.adm b/asterix-app/src/test/resources/metadata/results/basic/meta07.adm
similarity index 100%
rename from asterix-app/src/test/resources/metadata/results/meta07.adm
rename to asterix-app/src/test/resources/metadata/results/basic/meta07.adm
diff --git a/asterix-app/src/test/resources/metadata/results/meta08.adm b/asterix-app/src/test/resources/metadata/results/basic/meta08.adm
similarity index 100%
rename from asterix-app/src/test/resources/metadata/results/meta08.adm
rename to asterix-app/src/test/resources/metadata/results/basic/meta08.adm
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta09.adm b/asterix-app/src/test/resources/metadata/results/basic/meta09.adm
new file mode 100644
index 0000000..b85737d
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta09.adm
@@ -0,0 +1 @@
+{ "DataverseName": "test", "DatasetName": "t1", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Sat Nov 24 14:28:44 PST 2012" }
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta10.adm b/asterix-app/src/test/resources/metadata/results/basic/meta10.adm
new file mode 100644
index 0000000..573541a
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta10.adm
@@ -0,0 +1 @@
+0
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta11.adm b/asterix-app/src/test/resources/metadata/results/basic/meta11.adm
new file mode 100644
index 0000000..573541a
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta11.adm
@@ -0,0 +1 @@
+0
diff --git a/asterix-app/src/test/resources/metadata/results/meta12.adm b/asterix-app/src/test/resources/metadata/results/basic/meta12.adm
similarity index 100%
rename from asterix-app/src/test/resources/metadata/results/meta12.adm
rename to asterix-app/src/test/resources/metadata/results/basic/meta12.adm
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta13.adm b/asterix-app/src/test/resources/metadata/results/basic/meta13.adm
new file mode 100644
index 0000000..573541a
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta13.adm
@@ -0,0 +1 @@
+0
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta14.adm b/asterix-app/src/test/resources/metadata/results/basic/meta14.adm
new file mode 100644
index 0000000..573541a
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta14.adm
@@ -0,0 +1 @@
+0
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta15.adm b/asterix-app/src/test/resources/metadata/results/basic/meta15.adm
new file mode 100644
index 0000000..4414ed0
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta15.adm
@@ -0,0 +1,6 @@
+{ "DataverseName": "Metadata", "Name": "cnn_feed", "Classname": "edu.uci.ics.asterix.external.adapter.factory.CNNFeedAdapterFactory", "Type": "INTERNAL", "Timestamp": "Sun Nov 25 20:55:22 PST 2012" }
+{ "DataverseName": "Metadata", "Name": "hdfs", "Classname": "edu.uci.ics.asterix.external.adapter.factory.HDFSAdapterFactory", "Type": "INTERNAL", "Timestamp": "Sun Nov 25 20:55:22 PST 2012" }
+{ "DataverseName": "Metadata", "Name": "hive", "Classname": "edu.uci.ics.asterix.external.adapter.factory.HiveAdapterFactory", "Type": "INTERNAL", "Timestamp": "Sun Nov 25 20:55:22 PST 2012" }
+{ "DataverseName": "Metadata", "Name": "localfs", "Classname": "edu.uci.ics.asterix.external.adapter.factory.NCFileSystemAdapterFactory", "Type": "INTERNAL", "Timestamp": "Sun Nov 25 20:55:22 PST 2012" }
+{ "DataverseName": "Metadata", "Name": "pull_twitter", "Classname": "edu.uci.ics.asterix.external.adapter.factory.PullBasedTwitterAdapterFactory", "Type": "INTERNAL", "Timestamp": "Sun Nov 25 20:55:22 PST 2012" }
+{ "DataverseName": "Metadata", "Name": "rss_feed", "Classname": "edu.uci.ics.asterix.external.adapter.factory.RSSFeedAdapterFactory", "Type": "INTERNAL", "Timestamp": "Sun Nov 25 20:55:22 PST 2012" }
diff --git a/asterix-app/src/test/resources/metadata/results/metadata_dataset.adm b/asterix-app/src/test/resources/metadata/results/basic/meta16.adm
similarity index 100%
copy from asterix-app/src/test/resources/metadata/results/metadata_dataset.adm
copy to asterix-app/src/test/resources/metadata/results/basic/meta16.adm
diff --git a/asterix-app/src/test/resources/metadata/results/metadata_datatype.adm b/asterix-app/src/test/resources/metadata/results/basic/meta17.adm
similarity index 100%
copy from asterix-app/src/test/resources/metadata/results/metadata_datatype.adm
copy to asterix-app/src/test/resources/metadata/results/basic/meta17.adm
diff --git a/asterix-app/src/test/resources/metadata/results/metadata_dataverse.adm b/asterix-app/src/test/resources/metadata/results/basic/meta18.adm
similarity index 100%
copy from asterix-app/src/test/resources/metadata/results/metadata_dataverse.adm
copy to asterix-app/src/test/resources/metadata/results/basic/meta18.adm
diff --git a/asterix-app/src/test/resources/metadata/results/metadata_index.adm b/asterix-app/src/test/resources/metadata/results/basic/meta19.adm
similarity index 100%
copy from asterix-app/src/test/resources/metadata/results/metadata_index.adm
copy to asterix-app/src/test/resources/metadata/results/basic/meta19.adm
diff --git a/asterix-app/src/test/resources/metadata/results/meta07.adm b/asterix-app/src/test/resources/metadata/results/basic/meta20.adm
similarity index 100%
copy from asterix-app/src/test/resources/metadata/results/meta07.adm
copy to asterix-app/src/test/resources/metadata/results/basic/meta20.adm
diff --git a/asterix-app/src/test/resources/metadata/results/metadata_nodegroup.adm b/asterix-app/src/test/resources/metadata/results/basic/meta21.adm
similarity index 100%
copy from asterix-app/src/test/resources/metadata/results/metadata_nodegroup.adm
copy to asterix-app/src/test/resources/metadata/results/basic/meta21.adm
diff --git a/asterix-app/src/test/resources/metadata/results/metadata_dataset.adm b/asterix-app/src/test/resources/metadata/results/basic/metadata_dataset.adm
similarity index 100%
rename from asterix-app/src/test/resources/metadata/results/metadata_dataset.adm
rename to asterix-app/src/test/resources/metadata/results/basic/metadata_dataset.adm
diff --git a/asterix-app/src/test/resources/metadata/results/metadata_datatype.adm b/asterix-app/src/test/resources/metadata/results/basic/metadata_datatype.adm
similarity index 100%
rename from asterix-app/src/test/resources/metadata/results/metadata_datatype.adm
rename to asterix-app/src/test/resources/metadata/results/basic/metadata_datatype.adm
diff --git a/asterix-app/src/test/resources/metadata/results/metadata_dataverse.adm b/asterix-app/src/test/resources/metadata/results/basic/metadata_dataverse.adm
similarity index 100%
rename from asterix-app/src/test/resources/metadata/results/metadata_dataverse.adm
rename to asterix-app/src/test/resources/metadata/results/basic/metadata_dataverse.adm
diff --git a/asterix-app/src/test/resources/metadata/results/metadata_index.adm b/asterix-app/src/test/resources/metadata/results/basic/metadata_index.adm
similarity index 100%
rename from asterix-app/src/test/resources/metadata/results/metadata_index.adm
rename to asterix-app/src/test/resources/metadata/results/basic/metadata_index.adm
diff --git a/asterix-app/src/test/resources/metadata/results/metadata_node.adm b/asterix-app/src/test/resources/metadata/results/basic/metadata_node.adm
similarity index 100%
rename from asterix-app/src/test/resources/metadata/results/metadata_node.adm
rename to asterix-app/src/test/resources/metadata/results/basic/metadata_node.adm
diff --git a/asterix-app/src/test/resources/metadata/results/metadata_nodegroup.adm b/asterix-app/src/test/resources/metadata/results/basic/metadata_nodegroup.adm
similarity index 100%
rename from asterix-app/src/test/resources/metadata/results/metadata_nodegroup.adm
rename to asterix-app/src/test/resources/metadata/results/basic/metadata_nodegroup.adm
diff --git a/asterix-app/src/test/resources/metadata/results/custord_dataset.adm b/asterix-app/src/test/resources/metadata/results/custord/custord_dataset.adm
similarity index 100%
rename from asterix-app/src/test/resources/metadata/results/custord_dataset.adm
rename to asterix-app/src/test/resources/metadata/results/custord/custord_dataset.adm
diff --git a/asterix-app/src/test/resources/metadata/results/custord_datatype.adm b/asterix-app/src/test/resources/metadata/results/custord/custord_datatype.adm
similarity index 100%
rename from asterix-app/src/test/resources/metadata/results/custord_datatype.adm
rename to asterix-app/src/test/resources/metadata/results/custord/custord_datatype.adm
diff --git a/asterix-app/src/test/resources/metadata/results/custord_dataverse.adm b/asterix-app/src/test/resources/metadata/results/custord/custord_dataverse.adm
similarity index 100%
rename from asterix-app/src/test/resources/metadata/results/custord_dataverse.adm
rename to asterix-app/src/test/resources/metadata/results/custord/custord_dataverse.adm
diff --git a/asterix-app/src/test/resources/metadata/results/custord_index.adm b/asterix-app/src/test/resources/metadata/results/custord/custord_index.adm
similarity index 100%
rename from asterix-app/src/test/resources/metadata/results/custord_index.adm
rename to asterix-app/src/test/resources/metadata/results/custord/custord_index.adm
diff --git a/asterix-app/src/test/resources/metadata/results/custord_nodegroup.adm b/asterix-app/src/test/resources/metadata/results/custord/custord_nodegroup.adm
similarity index 100%
rename from asterix-app/src/test/resources/metadata/results/custord_nodegroup.adm
rename to asterix-app/src/test/resources/metadata/results/custord/custord_nodegroup.adm
diff --git a/asterix-app/src/test/resources/metadata/results/custord_q10.adm b/asterix-app/src/test/resources/metadata/results/custord/custord_q10.adm
similarity index 100%
rename from asterix-app/src/test/resources/metadata/results/custord_q10.adm
rename to asterix-app/src/test/resources/metadata/results/custord/custord_q10.adm
diff --git a/asterix-app/src/test/resources/metadata/results/custord_q2.adm b/asterix-app/src/test/resources/metadata/results/custord/custord_q2.adm
similarity index 100%
rename from asterix-app/src/test/resources/metadata/results/custord_q2.adm
rename to asterix-app/src/test/resources/metadata/results/custord/custord_q2.adm
diff --git a/asterix-app/src/test/resources/metadata/results/custord_q3.adm b/asterix-app/src/test/resources/metadata/results/custord/custord_q3.adm
similarity index 100%
rename from asterix-app/src/test/resources/metadata/results/custord_q3.adm
rename to asterix-app/src/test/resources/metadata/results/custord/custord_q3.adm
diff --git a/asterix-app/src/test/resources/metadata/results/custord_q4.adm b/asterix-app/src/test/resources/metadata/results/custord/custord_q4.adm
similarity index 100%
rename from asterix-app/src/test/resources/metadata/results/custord_q4.adm
rename to asterix-app/src/test/resources/metadata/results/custord/custord_q4.adm
diff --git a/asterix-app/src/test/resources/metadata/results/meta01.adm b/asterix-app/src/test/resources/metadata/results/meta01.adm
deleted file mode 100644
index 90ef844..0000000
--- a/asterix-app/src/test/resources/metadata/results/meta01.adm
+++ /dev/null
@@ -1,2 +0,0 @@
-taverseName": "Metadata", "DataFormat": "edu.uci.ics.asterix.runtime.formats.NonTaggedDataFormat", "Timestamp": "Sat Sep 15 14:30:48 PDT 2012" }
-{ "DataverseName": "testdv", "DataFormat": "edu.uci.ics.asterix.runtime.formats.NonTaggedDataFormat", "Timestamp": "Sat Sep 15 14:31:34 PDT 2012" }
diff --git a/asterix-app/src/test/resources/metadata/results/meta04.adm b/asterix-app/src/test/resources/metadata/results/meta04.adm
deleted file mode 100644
index 7ad95e2..0000000
--- a/asterix-app/src/test/resources/metadata/results/meta04.adm
+++ /dev/null
@@ -1 +0,0 @@
-{ "DataverseName": "testdv", "DatatypeName": "testtype", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": false, "Fields": [ { "FieldName": "id", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Sat Sep 15 14:56:51 PDT 2012" }
diff --git a/asterix-app/src/test/resources/metadata/results/meta09.adm b/asterix-app/src/test/resources/metadata/results/meta09.adm
deleted file mode 100644
index bddffc2..0000000
--- a/asterix-app/src/test/resources/metadata/results/meta09.adm
+++ /dev/null
@@ -1 +0,0 @@
-taverseName": "test", "DatasetName": "t1", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Sep 17 12:47:50 PDT 2012" }
diff --git a/asterix-app/src/test/resources/metadata/results/meta10.adm b/asterix-app/src/test/resources/metadata/results/meta10.adm
deleted file mode 100644
index 7dce05f..0000000
--- a/asterix-app/src/test/resources/metadata/results/meta10.adm
+++ /dev/null
@@ -1 +0,0 @@
-{ "DataverseName": "Metadata", "DataFormat": "edu.uci.ics.asterix.runtime.formats.NonTaggedDataFormat", "Timestamp": "Mon Sep 17 12:46:38 PDT 2012" }
diff --git a/asterix-app/src/test/resources/metadata/results/meta11.adm b/asterix-app/src/test/resources/metadata/results/meta11.adm
deleted file mode 100644
index ed8670f..0000000
--- a/asterix-app/src/test/resources/metadata/results/meta11.adm
+++ /dev/null
@@ -1,8 +0,0 @@
-{ "DataverseName": "Metadata", "DatasetName": "Adapter", "DataTypeName": "AdapterRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name" ], "PrimaryKey": [ "DataverseName", "Name" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Sep 17 13:09:22 PDT 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Dataset", "DataTypeName": "DatasetRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName" ], "PrimaryKey": [ "DataverseName", "DatasetName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Sep 17 13:09:22 PDT 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Datatype", "DataTypeName": "DatatypeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatatypeName" ], "PrimaryKey": [ "DataverseName", "DatatypeName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Sep 17 13:09:22 PDT 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Dataverse", "DataTypeName": "DataverseRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName" ], "PrimaryKey": [ "DataverseName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Sep 17 13:09:22 PDT 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Function", "DataTypeName": "FunctionRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "Name", "Arity" ], "PrimaryKey": [ "DataverseName", "Name", "Arity" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Sep 17 13:09:22 PDT 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Index", "DataTypeName": "IndexRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "DataverseName", "DatasetName", "IndexName" ], "PrimaryKey": [ "DataverseName", "DatasetName", "IndexName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Sep 17 13:09:22 PDT 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Node", "DataTypeName": "NodeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "NodeName" ], "PrimaryKey": [ "NodeName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Sep 17 13:09:22 PDT 2012" }
-{ "DataverseName": "Metadata", "DatasetName": "Nodegroup", "DataTypeName": "NodeGroupRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "GroupName" ], "PrimaryKey": [ "GroupName" ], "GroupName": "MetadataGroup" }, "ExternalDetails": null, "FeedDetails": null, "Timestamp": "Mon Sep 17 13:09:22 PDT 2012" }
diff --git a/asterix-app/src/test/resources/metadata/results/transaction/verify_failure_previous_success.adm b/asterix-app/src/test/resources/metadata/results/transaction/verify_failure_previous_success.adm
new file mode 100644
index 0000000..3d0da4d
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/results/transaction/verify_failure_previous_success.adm
@@ -0,0 +1,13 @@
+{ "DataverseName": "custord", "DatatypeName": "AddressType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "street", "FieldType": "StreetType" }, { "FieldName": "city", "FieldType": "string" }, { "FieldName": "state", "FieldType": "string" }, { "FieldName": "zip", "FieldType": "int16" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Sat Nov 24 17:20:04 PST 2012" }
+{ "DataverseName": "custord", "DatatypeName": "CustomerType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": false, "Fields": [ { "FieldName": "cid", "FieldType": "int32" }, { "FieldName": "name", "FieldType": "string" }, { "FieldName": "age", "FieldType": "Field_age_in_CustomerType" }, { "FieldName": "address", "FieldType": "Field_address_in_CustomerType" }, { "FieldName": "interests", "FieldType": "Field_interests_in_CustomerType" }, { "FieldName": "children", "FieldType": "Field_children_in_CustomerType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Sat Nov 24 17:20:04 PST 2012" }
+{ "DataverseName": "custord", "DatatypeName": "Field_address_in_CustomerType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "AddressType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Sat Nov 24 17:20:04 PST 2012" }
+{ "DataverseName": "custord", "DatatypeName": "Field_age_in_CustomerType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "int32" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Sat Nov 24 17:20:04 PST 2012" }
+{ "DataverseName": "custord", "DatatypeName": "Field_children_in_CustomerType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_children_in_CustomerType_ItemType" }, "Timestamp": "Sat Nov 24 17:20:04 PST 2012" }
+{ "DataverseName": "custord", "DatatypeName": "Field_children_in_CustomerType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "name", "FieldType": "string" }, { "FieldName": "dob", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Sat Nov 24 17:20:04 PST 2012" }
+{ "DataverseName": "custord", "DatatypeName": "Field_interests_in_CustomerType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "string", "OrderedList": null }, "Timestamp": "Sat Nov 24 17:20:04 PST 2012" }
+{ "DataverseName": "custord", "DatatypeName": "Field_items_in_OrderType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_items_in_OrderType_ItemType" }, "Timestamp": "Sat Nov 24 17:20:04 PST 2012" }
+{ "DataverseName": "custord", "DatatypeName": "Field_items_in_OrderType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "number", "FieldType": "int64" }, { "FieldName": "storeIds", "FieldType": "Field_storeIds_in_Field_items_in_OrderType_ItemType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Sat Nov 24 17:20:04 PST 2012" }
+{ "DataverseName": "custord", "DatatypeName": "Field_number_in_StreetType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "int32" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Sat Nov 24 17:20:04 PST 2012" }
+{ "DataverseName": "custord", "DatatypeName": "Field_storeIds_in_Field_items_in_OrderType_ItemType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "int8", "OrderedList": null }, "Timestamp": "Sat Nov 24 17:20:04 PST 2012" }
+{ "DataverseName": "custord", "DatatypeName": "OrderType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "oid", "FieldType": "int32" }, { "FieldName": "cid", "FieldType": "int32" }, { "FieldName": "orderstatus", "FieldType": "string" }, { "FieldName": "orderpriority", "FieldType": "string" }, { "FieldName": "clerk", "FieldType": "string" }, { "FieldName": "total", "FieldType": "float" }, { "FieldName": "items", "FieldType": "Field_items_in_OrderType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Sat Nov 24 17:20:04 PST 2012" }
+{ "DataverseName": "custord", "DatatypeName": "StreetType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": false, "Fields": [ { "FieldName": "number", "FieldType": "Field_number_in_StreetType" }, { "FieldName": "name", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Sat Nov 24 17:20:04 PST 2012" }
diff --git a/asterix-app/src/test/resources/metadata/results/transaction/verify_failure_subsequent_no_execution.adm b/asterix-app/src/test/resources/metadata/results/transaction/verify_failure_subsequent_no_execution.adm
new file mode 100644
index 0000000..7ba26bd
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/results/transaction/verify_failure_subsequent_no_execution.adm
@@ -0,0 +1 @@
+{ "DataverseName": "custord", "DatasetName": "Customers", "IndexName": "Customers", "IndexStructure": "BTREE", "SearchKey": [ "cid", "name" ], "IsPrimary": true, "Timestamp": "Sat Nov 24 17:23:18 PST 2012" }
diff --git a/asterix-app/src/test/resources/metadata/testsuite.xml b/asterix-app/src/test/resources/metadata/testsuite.xml
new file mode 100644
index 0000000..78dc3b5
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/testsuite.xml
@@ -0,0 +1,173 @@
+<test-suite xmlns="urn:xml.testframework.asterix.ics.uci.edu" ResultOffsetPath="results" QueryOffsetPath="queries" QueryFileExtension=".aql">
+  <test-group name="basic">
+    <test-case FilePath="basic">
+      <compilation-unit name="meta01">
+        <output-file compare="Text">meta01.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="meta02">
+        <output-file compare="Text">meta02.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="meta03">
+        <output-file compare="Text">meta03.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="meta04">
+        <output-file compare="Text">meta04.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="meta05">
+        <output-file compare="Text">meta05.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="meta06">
+        <output-file compare="Text">meta06.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="meta07">
+        <output-file compare="Text">meta07.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="meta08">
+        <output-file compare="Text">meta08.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="meta09">
+        <output-file compare="Text">meta09.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="meta10">
+        <output-file compare="Text">meta10.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="meta11">
+        <output-file compare="Text">meta11.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="meta12">
+        <output-file compare="Text">meta12.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="meta13">
+        <output-file compare="Text">meta13.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="meta14">
+        <output-file compare="Text">meta14.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="meta15">
+        <output-file compare="Text">meta15.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="meta16">
+        <output-file compare="Text">meta16.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="meta17">
+        <output-file compare="Text">meta17.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="meta18">
+        <output-file compare="Text">meta18.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="meta19">
+        <output-file compare="Text">meta19.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="meta20">
+        <output-file compare="Text">meta20.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="basic">
+      <compilation-unit name="meta21">
+        <output-file compare="Text">meta21.adm</output-file>
+      </compilation-unit>
+    </test-case>
+  </test-group>
+  <test-group name="exception">
+    <test-case FilePath="exception">
+      <compilation-unit name="exception_drop_dataset"> 
+        <expected-error>MetadataException</expected-error>
+        <output-file compare="Text">none.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="exception">
+      <compilation-unit name="exception_drop_dataverse"> 
+        <expected-error>MetadataException</expected-error>
+        <output-file compare="Text">none.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="exception">
+      <compilation-unit name="exception_drop_index"> 
+        <expected-error>MetadataException</expected-error>
+        <output-file compare="Text">none.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="exception">
+      <compilation-unit name="exception_drop_nodegroup"> 
+        <expected-error>MetadataException</expected-error>
+        <output-file compare="Text">none.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="exception">
+      <compilation-unit name="exception_drop_type1"> 
+        <expected-error>MetadataException</expected-error>
+        <output-file compare="Text">none.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="exception">
+      <compilation-unit name="exception_drop_type2"> 
+        <expected-error>MetadataException</expected-error>
+        <output-file compare="Text">none.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="exception">
+      <compilation-unit name="exception_drop_type3"> 
+        <output-file compare="Text">none.adm</output-file>
+        <expected-error>MetadataException</expected-error>
+      </compilation-unit>
+    </test-case>
+  </test-group>
+  <test-group name="transaction">
+    <test-case FilePath="transaction">
+      <compilation-unit name="failure_previous_success">
+        <output-file compare="Text">failure_previous_success.adm</output-file>
+        <expected-error>MetadataException</expected-error>
+      </compilation-unit>
+      <compilation-unit name="verify_failure_previous_success">
+        <output-file compare="Text">verify_failure_previous_success.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="transaction">
+      <compilation-unit name="failure_subsequent_no_execution">
+        <output-file compare="Text">failure_subsequent_no_execution.adm</output-file>
+        <expected-error>MetadataException</expected-error>
+      </compilation-unit>
+      <compilation-unit name="verify_failure_subsequent_no_execution">
+        <output-file compare="Text">verify_failure_subsequent_no_execution.adm</output-file>
+      </compilation-unit>
+    </test-case>
+  </test-group>
+</test-suite>
diff --git a/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join-multiindex.aql b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join-multiindex.aql
new file mode 100644
index 0000000..44cdef8
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/queries/btree-index-join/secondary-equi-join-multiindex.aql
@@ -0,0 +1,60 @@
+/*
+ * Description    : Equi joins two datasets, FacebookUsers and FacebookMessages, based on their user's id.
+ *                  We first expect FacebookUsers' primary index to be used 
+ *                  to satisfy the range condition on it's primary key. 
+ *                  FacebookMessages has a secondary btree index on author-id-copy, and given the 'indexnl' hint 
+ *                  we expect the join to be transformed into an indexed nested-loop join.
+ * Success        : Yes
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type EmploymentType as closed { 
+  organization-name: string,
+  start-date: date,
+  end-date: date?
+} 
+
+create type FacebookUserType as closed { 
+  id: int32,
+  id-copy: int32,
+  alias: string,
+  name: string,
+  user-since: datetime,
+  user-since-copy: datetime,
+  friend-ids: {{ int32 }},
+  employment: [EmploymentType]
+} 
+
+create type FacebookMessageType as closed { 
+  message-id: int32,
+  message-id-copy: int32,
+  author-id: int32,
+  author-id-copy: int32,
+  in-response-to: int32?,
+  sender-location: point?,
+  message: string
+} 
+
+create dataset FacebookUsers(FacebookUserType)
+partitioned by key id;
+
+create dataset FacebookMessages(FacebookMessageType)
+partitioned by key message-id;
+
+create index fbmIdxAutId if not exists on FacebookMessages(author-id-copy);
+
+write output to nc1:"rttest/btree-index-join_title-secondary-equi-join-multiindex.adm";
+
+for $user in dataset('FacebookUsers')
+for $message in dataset('FacebookMessages')
+where $user.id /*+ indexnl */ = $message.author-id-copy
+and $user.id >= 11000 and $user.id <= 12000
+return {
+  "fbu-ID": $user.id,
+  "fbm-auth-ID": $message.author-id,
+  "uname": $user.name,
+  "message": $message.message
+}
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join-multiindex.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join-multiindex.plan
new file mode 100644
index 0000000..76a5aa8
--- /dev/null
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join-multiindex.plan
@@ -0,0 +1,19 @@
+-- SINK_WRITE  |PARTITIONED|
+  -- RANDOM_MERGE_EXCHANGE  |PARTITIONED|
+    -- STREAM_PROJECT  |PARTITIONED|
+      -- ASSIGN  |PARTITIONED|
+        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+          -- BTREE_SEARCH  |PARTITIONED|
+            -- ONE_TO_ONE_EXCHANGE  |LOCAL|
+              -- STABLE_SORT [$$29(ASC)]  |LOCAL|
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  -- STREAM_PROJECT  |PARTITIONED|
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      -- BTREE_SEARCH  |PARTITIONED|
+                        -- BROADCAST_EXCHANGE  |PARTITIONED|
+                          -- ASSIGN  |PARTITIONED|
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              -- BTREE_SEARCH  |PARTITIONED|
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  -- ASSIGN  |PARTITIONED|
+                                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
\ No newline at end of file
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java
index a300f9b..fad1731 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java
@@ -121,6 +121,7 @@
     private Map<String, String> config;
     private IAWriterFactory writerFactory;
     private FileSplit outputFile;
+    private long jobTxnId;
 
     private final Dataverse defaultDataverse;
 
@@ -147,6 +148,10 @@
         this.defaultDataverse = defaultDataverse;
         this.stores = AsterixProperties.INSTANCE.getStores();
     }
+    
+    public void setJobTxnId(long txnId){
+    	this.jobTxnId = txnId;
+    }
 
     public Dataverse getDefaultDataverse() {
         return defaultDataverse;
@@ -404,7 +409,7 @@
                         dataset.getDatasetName(), indexName);
                 int numSecondaryKeys = secondaryIndex.getKeyFieldNames().size();
                 numKeys += numSecondaryKeys;
-                keysStartIndex = outputRecDesc.getFieldCount() - numKeys;
+                keysStartIndex = outputVars.size() - numKeys;
             }
             IBinaryComparatorFactory[] comparatorFactories = JobGenHelper.variablesToAscBinaryComparatorFactories(
                     outputVars, keysStartIndex, numKeys, typeEnv, context);
@@ -654,7 +659,7 @@
             TreeIndexInsertUpdateDeleteOperatorDescriptor btreeBulkLoad = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
                     spec, recordDesc, appContext.getStorageManagerInterface(), appContext.getIndexRegistryProvider(),
                     splitsAndConstraint.first, typeTraits, comparatorFactories, fieldPermutation, indexOp,
-                    new BTreeDataflowHelperFactory(), null, NoOpOperationCallbackProvider.INSTANCE, mdTxnCtx.getTxnId());
+                    new BTreeDataflowHelperFactory(), null, NoOpOperationCallbackProvider.INSTANCE, jobTxnId);
             return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(btreeBulkLoad,
                     splitsAndConstraint.second);
         } catch (MetadataException me) {
@@ -812,12 +817,12 @@
             IAsterixApplicationContextInfo appContext = (IAsterixApplicationContextInfo) context.getAppContext();
             Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = splitProviderAndPartitionConstraintsForInternalOrFeedDataset(
                     dataverseName, datasetName, indexName);
-            TreeIndexInsertUpdateDeleteOperatorDescriptor btreeBulkLoad = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+            TreeIndexInsertUpdateDeleteOperatorDescriptor btreeInsert = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
                     spec, recordDesc, appContext.getStorageManagerInterface(), appContext.getIndexRegistryProvider(),
                     splitsAndConstraint.first, typeTraits, comparatorFactories, fieldPermutation, indexOp,
                     new BTreeDataflowHelperFactory(), filterFactory, NoOpOperationCallbackProvider.INSTANCE,
-                    mdTxnCtx.getTxnId());
-            return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(btreeBulkLoad,
+                    jobTxnId);
+            return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(btreeInsert,
                     splitsAndConstraint.second);
         } catch (MetadataException e) {
             throw new AlgebricksException(e);
@@ -884,15 +889,15 @@
                     spec, recordDesc, appContext.getStorageManagerInterface(), appContext.getIndexRegistryProvider(),
                     splitsAndConstraint.first, typeTraits, comparatorFactories, fieldPermutation, indexOp,
                     new RTreeDataflowHelperFactory(valueProviderFactories), filterFactory,
-                    NoOpOperationCallbackProvider.INSTANCE, mdTxnCtx.getTxnId());
+                    NoOpOperationCallbackProvider.INSTANCE, jobTxnId);
             return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(rtreeUpdate, splitsAndConstraint.second);
         } catch (MetadataException me) {
             throw new AlgebricksException(me);
         }
     }
 
-    public long getTxnId() {
-        return mdTxnCtx.getTxnId();
+    public long getJobTxnId() {
+        return jobTxnId;
     }
 
     public static ITreeIndexFrameFactory createBTreeNSMInteriorFrameFactory(ITypeTraits[] typeTraits) {