Fixed issue 113.

git-svn-id: https://asterixdb.googlecode.com/svn/branches/asterix-fix-issue-113@250 eaa15691-b419-025a-1212-ee371bd00084
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/DdlTranslator.java b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/DdlTranslator.java
index 422e732..5948445 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/DdlTranslator.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/DdlTranslator.java
@@ -215,6 +215,9 @@
                     MetadataManager.INSTANCE.addDataset(mdTxnCtx,
                             new Dataset(compiledDeclarations.getDataverseName(), datasetName, itemTypeName,
                                     datasetDetails, dsType));
+                    if (dd.getDatasetType() == DatasetType.INTERNAL || dd.getDatasetType() == DatasetType.FEED) {
+                        compileDatasetDeclStatement(hcc, datasetName);
+                    }
                     break;
                 }
 
@@ -240,7 +243,8 @@
                         MetadataManager.INSTANCE.addIndex(mdTxnCtx, new Index(compiledDeclarations.getDataverseName(),
                                 datasetName, indexName, stmtCreateIndex.getIndexType(),
                                 stmtCreateIndex.getFieldExprs(), false));
-                    }
+                        compileCreateIndexStatement(hcc, stmtCreateIndex);
+                    }                            
                     break;
                 }
                 case TYPE_DECL: {
@@ -475,6 +479,36 @@
         }
     }
 
+    private void compileDatasetDeclStatement(IHyracksClientConnection hcc, String datasetName) throws AsterixException,
+            AlgebricksException, Exception {
+        runJob(hcc, DatasetOperations.createDatasetJobSpec(datasetName, compiledDeclarations));
+    }
+    
+    private void compileCreateIndexStatement(IHyracksClientConnection hcc, CreateIndexStatement stmtCreateIndex) throws Exception {
+        JobSpecification spec = null;
+        switch (stmtCreateIndex.getIndexType()) {
+            case BTREE: {
+                spec = IndexOperations.createBtreeIndexJobSpec(stmtCreateIndex.getDatasetName().getValue(),
+                        stmtCreateIndex.getIndexName().getValue(), stmtCreateIndex.getFieldExprs(),
+                        compiledDeclarations);
+                break;
+            }
+            case RTREE: {
+                spec = IndexOperations.createRtreeIndexJobSpec(stmtCreateIndex.getDatasetName().getValue(),
+                        stmtCreateIndex.getIndexName().getValue(), stmtCreateIndex.getFieldExprs(),
+                        compiledDeclarations);
+                break;
+            }
+            default: {
+                throw new AsterixException("Create index not implemented for index type: "
+                        + stmtCreateIndex.getIndexType());
+            }
+        }
+        if (spec != null) {
+            runJob(hcc, spec);
+        }
+    }
+	
     private void compileDatasetDropStatement(IHyracksClientConnection hcc, MetadataTransactionContext mdTxnCtx,
             String datasetName) throws Exception {
         CompiledDatasetDropStatement cds = new CompiledDatasetDropStatement(datasetName);
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
index fcc0158..2b341db 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
@@ -69,6 +69,7 @@
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexDropOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
@@ -137,6 +138,36 @@
         return specs;
     }
 
+    // TODO: Lots of common code in this file. Refactor everything after merging in asterix-fix-issue-9.
+    public static JobSpecification createDatasetJobSpec(String datasetName,
+            AqlCompiledMetadataDeclarations metadata) throws AsterixException, AlgebricksException {
+        AqlCompiledDatasetDecl compiledDatasetDecl = metadata.findDataset(datasetName);
+        if (compiledDatasetDecl == null) {
+            throw new AsterixException("Could not find dataset " + datasetName);
+        }
+        JobSpecification spec = new JobSpecification();
+        IBinaryComparatorFactory[] comparatorFactories = DatasetUtils.computeKeysBinaryComparatorFactories(
+                compiledDatasetDecl, metadata.getFormat().getBinaryComparatorFactoryProvider());
+        ITypeTraits[] typeTraits = DatasetUtils.computeTupleTypeTraits(compiledDatasetDecl, metadata);
+        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadata
+                .splitProviderAndPartitionConstraintsForInternalOrFeedDataset(datasetName, datasetName);
+        FileSplit[] fs = splitsAndConstraint.first.getFileSplits();
+        StringBuilder sb = new StringBuilder();
+        for (int i = 0; i < fs.length; i++) {
+            sb.append(stringOf(fs[i]) + " ");
+        }
+        LOGGER.info("CREATING File Splits: " + sb.toString());
+        IIndexRegistryProvider<IIndex> indexRegistryProvider = AsterixIndexRegistryProvider.INSTANCE;
+        IStorageManagerInterface storageManager = AsterixStorageManagerInterface.INSTANCE;
+        TreeIndexCreateOperatorDescriptor indexCreateOp = new TreeIndexCreateOperatorDescriptor(spec,
+                storageManager, indexRegistryProvider, splitsAndConstraint.first, typeTraits, comparatorFactories,
+                new BTreeDataflowHelperFactory(), NoOpOperationCallbackProvider.INSTANCE);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, indexCreateOp,
+                splitsAndConstraint.second);
+        spec.addRoot(indexCreateOp);
+        return spec;
+    }
+    
     public static Job createLoadDatasetJobSpec(CompiledLoadFromFileStatement loadStmt,
             AqlCompiledMetadataDeclarations metadata) throws AsterixException, AlgebricksException {
         String datasetName = loadStmt.getDatasetName();
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/IndexOperations.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/IndexOperations.java
index 95a65bd..13a3fdf 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/IndexOperations.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/IndexOperations.java
@@ -56,6 +56,7 @@
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexDropOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
 import edu.uci.ics.hyracks.storage.am.invertedindex.dataflow.BinaryTokenizerOperatorDescriptor;
@@ -73,15 +74,15 @@
 
         switch (createIndexStmt.getIndexType()) {
             case BTREE: {
-                return createBtreeIndexJobSpec(createIndexStmt, datasetDecls);
+                return loadBtreeIndexJobSpec(createIndexStmt, datasetDecls);
             }
 
             case RTREE: {
-                return createRtreeIndexJobSpec(createIndexStmt, datasetDecls);
+                return loadRtreeIndexJobSpec(createIndexStmt, datasetDecls);
             }
 
             case KEYWORD: {
-                return createKeywordIndexJobSpec(createIndexStmt, datasetDecls);
+                return loadKeywordIndexJobSpec(createIndexStmt, datasetDecls);
             }
 
             case QGRAM: {
@@ -95,7 +96,7 @@
 
         }
     }
-
+    
     public static JobSpecification createSecondaryIndexDropJobSpec(CompiledIndexDropStatement deleteStmt,
             AqlCompiledMetadataDeclarations datasetDecls) throws AlgebricksException, MetadataException {
         String datasetName = deleteStmt.getDatasetName();
@@ -116,8 +117,69 @@
         return spec;
     }
 
+    // TODO: Lots of common code in this file. Refactor everything after merging in asterix-fix-issue-9.
+    public static JobSpecification createBtreeIndexJobSpec(String datasetName, String secondaryIndexName, List<String> secondaryKeyFields,
+            AqlCompiledMetadataDeclarations metadata) throws AsterixException, AlgebricksException {
+        JobSpecification spec = new JobSpecification();
+
+        AqlCompiledDatasetDecl compiledDatasetDecl = metadata.findDataset(datasetName);
+        if (compiledDatasetDecl == null) {
+            throw new AlgebricksException("Unknown dataset " + datasetName);
+        }
+        ARecordType itemType = (ARecordType) metadata.findType(compiledDatasetDecl.getItemTypeName());
+        if (compiledDatasetDecl.getDatasetType() == DatasetType.EXTERNAL) {
+            throw new AsterixException("Cannot index an external dataset (" + datasetName + ").");
+        }
+        AqlCompiledDatasetDecl srcCompiledDatasetDecl = compiledDatasetDecl;
+        int numPrimaryKeys = DatasetUtils.getPartitioningFunctions(compiledDatasetDecl).size();
+        IIndexRegistryProvider<IIndex> indexRegistryProvider = AsterixIndexRegistryProvider.INSTANCE;
+        IStorageManagerInterface storageManager = AsterixStorageManagerInterface.INSTANCE;
+        IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[numPrimaryKeys];
+        ITypeTraits[] primaryTypeTraits = new ITypeTraits[numPrimaryKeys + 1];
+        int i = 0;
+        List<Triple<IEvaluatorFactory, ScalarFunctionCallExpression, IAType>> partitioningFunctions = DatasetUtils
+                .getPartitioningFunctions(srcCompiledDatasetDecl);
+        for (Triple<IEvaluatorFactory, ScalarFunctionCallExpression, IAType> evalFactoryAndType : partitioningFunctions) {
+            IAType keyType = evalFactoryAndType.third;
+            primaryComparatorFactories[i] = AqlBinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(
+                    keyType, true);
+            primaryTypeTraits[i] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(keyType);
+            ++i;
+        }
+        primaryTypeTraits[numPrimaryKeys] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(itemType);
+        int numSecondaryKeys = secondaryKeyFields.size();
+        IEvaluatorFactory[] evalFactories = new IEvaluatorFactory[numSecondaryKeys];
+        IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[numSecondaryKeys
+                + numPrimaryKeys];
+        ITypeTraits[] secondaryTypeTraits = new ITypeTraits[numSecondaryKeys + numPrimaryKeys];
+        for (i = 0; i < numSecondaryKeys; i++) {
+            evalFactories[i] = metadata.getFormat().getFieldAccessEvaluatorFactory(itemType, secondaryKeyFields.get(i),
+                    numPrimaryKeys);
+            IAType keyType = AqlCompiledIndexDecl.keyFieldType(secondaryKeyFields.get(i), itemType);
+            secondaryComparatorFactories[i] = AqlBinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(
+                    keyType, true);
+            secondaryTypeTraits[i] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(keyType);
+        }
+        // fill in serializers and comparators for primary index fields
+        for (i = 0; i < numPrimaryKeys; i++) {
+            secondaryComparatorFactories[numSecondaryKeys + i] = primaryComparatorFactories[i];
+            secondaryTypeTraits[numSecondaryKeys + i] = primaryTypeTraits[i];
+        }
+        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint = metadata
+                .splitProviderAndPartitionConstraintsForInternalOrFeedDataset(datasetName, secondaryIndexName);
+        TreeIndexCreateOperatorDescriptor secondaryIndexCreateOp = new TreeIndexCreateOperatorDescriptor(spec,
+                storageManager, indexRegistryProvider, secondarySplitsAndConstraint.first, secondaryTypeTraits,
+                secondaryComparatorFactories, new BTreeDataflowHelperFactory(),
+                NoOpOperationCallbackProvider.INSTANCE);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, secondaryIndexCreateOp,
+                secondarySplitsAndConstraint.second);
+        spec.addRoot(secondaryIndexCreateOp);
+        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+        return spec;
+    }
+    
     @SuppressWarnings("unchecked")
-    public static JobSpecification createBtreeIndexJobSpec(CompiledCreateIndexStatement createIndexStmt,
+    public static JobSpecification loadBtreeIndexJobSpec(CompiledCreateIndexStatement createIndexStmt,
             AqlCompiledMetadataDeclarations metadata) throws AsterixException, AlgebricksException {
 
         JobSpecification spec = new JobSpecification();
@@ -319,8 +381,77 @@
 
     }
 
+    // TODO: Lots of common code in this file. Refactor everything after merging in asterix-fix-issue-9.
+    public static JobSpecification createRtreeIndexJobSpec(String datasetName, String secondaryIndexName, List<String> secondaryKeyFields,
+            AqlCompiledMetadataDeclarations metadata) throws AsterixException, AlgebricksException {
+        JobSpecification spec = new JobSpecification();
+        String primaryIndexName = datasetName;
+        AqlCompiledDatasetDecl compiledDatasetDecl = metadata.findDataset(primaryIndexName);
+        if (compiledDatasetDecl == null) {
+            throw new AsterixException("Could not find dataset " + primaryIndexName);
+        }
+        ARecordType itemType = (ARecordType) metadata.findType(compiledDatasetDecl.getItemTypeName());
+        if (compiledDatasetDecl.getDatasetType() == DatasetType.EXTERNAL) {
+            throw new AsterixException("Cannot index an external dataset (" + primaryIndexName + ").");
+        }
+        int numPrimaryKeys = DatasetUtils.getPartitioningFunctions(compiledDatasetDecl).size();
+        IIndexRegistryProvider<IIndex> indexRegistryProvider = AsterixIndexRegistryProvider.INSTANCE;
+        IStorageManagerInterface storageManager = AsterixStorageManagerInterface.INSTANCE;
+        IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[numPrimaryKeys];
+        ITypeTraits[] primaryTypeTraits = new ITypeTraits[numPrimaryKeys + 1];
+        int i = 0;
+        for (Triple<IEvaluatorFactory, ScalarFunctionCallExpression, IAType> evalFactoryAndType : DatasetUtils
+                .getPartitioningFunctions(compiledDatasetDecl)) {
+            IAType keyType = evalFactoryAndType.third;
+            primaryComparatorFactories[i] = AqlBinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(
+                    keyType, true);
+            primaryTypeTraits[i] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(keyType);
+            ++i;
+        }
+        primaryTypeTraits[numPrimaryKeys] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(itemType);
+        int numSecondaryKeys = secondaryKeyFields.size();
+        if (numSecondaryKeys != 1) {
+            throw new AsterixException(
+                    "Cannot use "
+                            + numSecondaryKeys
+                            + " fields as a key for the R-tree index. There can be only one field as a key for the R-tree index.");
+        }
+        IAType spatialType = AqlCompiledIndexDecl.keyFieldType(secondaryKeyFields.get(0), itemType);
+        if (spatialType == null) {
+            throw new AsterixException("Could not find field " + secondaryKeyFields.get(0) + " in the schema.");
+        }
+        int dimension = NonTaggedFormatUtil.getNumDimensions(spatialType.getTypeTag());
+        int numNestedSecondaryKeyFields = dimension * 2;
+        IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[numNestedSecondaryKeyFields];
+        ITypeTraits[] secondaryTypeTraits = new ITypeTraits[numNestedSecondaryKeyFields + numPrimaryKeys];
+        IPrimitiveValueProviderFactory[] valueProviderFactories = new IPrimitiveValueProviderFactory[numNestedSecondaryKeyFields];
+        IAType keyType = AqlCompiledIndexDecl.keyFieldType(secondaryKeyFields.get(0), itemType);
+        IAType nestedKeyType = NonTaggedFormatUtil.getNestedSpatialType(keyType.getTypeTag());
+        for (i = 0; i < numNestedSecondaryKeyFields; i++) {
+            secondaryComparatorFactories[i] = AqlBinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(
+                    nestedKeyType, true);
+            secondaryTypeTraits[i] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(nestedKeyType);
+            valueProviderFactories[i] = AqlPrimitiveValueProviderFactory.INSTANCE;
+        }
+        // fill in serializers and comparators for primary index fields
+        for (i = 0; i < numPrimaryKeys; i++) {
+            secondaryTypeTraits[numNestedSecondaryKeyFields + i] = primaryTypeTraits[i];
+        }
+        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint = metadata
+                .splitProviderAndPartitionConstraintsForInternalOrFeedDataset(primaryIndexName, secondaryIndexName);
+        TreeIndexCreateOperatorDescriptor secondaryIndexCreateOp = new TreeIndexCreateOperatorDescriptor(spec,
+                storageManager, indexRegistryProvider, secondarySplitsAndConstraint.first, secondaryTypeTraits,
+                secondaryComparatorFactories, new RTreeDataflowHelperFactory(valueProviderFactories),
+                NoOpOperationCallbackProvider.INSTANCE);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, secondaryIndexCreateOp,
+                secondarySplitsAndConstraint.second);
+        spec.addRoot(secondaryIndexCreateOp);
+        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+        return spec;
+    }
+    
     @SuppressWarnings("unchecked")
-    public static JobSpecification createRtreeIndexJobSpec(CompiledCreateIndexStatement createIndexStmt,
+    public static JobSpecification loadRtreeIndexJobSpec(CompiledCreateIndexStatement createIndexStmt,
             AqlCompiledMetadataDeclarations metadata) throws AsterixException, AlgebricksException {
 
         JobSpecification spec = new JobSpecification();
@@ -527,7 +658,7 @@
     }
 
     @SuppressWarnings("unchecked")
-    public static JobSpecification createKeywordIndexJobSpec(CompiledCreateIndexStatement createIndexStmt,
+    public static JobSpecification loadKeywordIndexJobSpec(CompiledCreateIndexStatement createIndexStmt,
             AqlCompiledMetadataDeclarations datasetDecls) throws AsterixException, AlgebricksException {
 
         JobSpecification spec = new JobSpecification();
diff --git a/asterix-app/src/test/resources/runtimets/ignore.txt b/asterix-app/src/test/resources/runtimets/ignore.txt
index 0964e88..2b456ca 100644
--- a/asterix-app/src/test/resources/runtimets/ignore.txt
+++ b/asterix-app/src/test/resources/runtimets/ignore.txt
@@ -15,6 +15,4 @@
 fuzzyjoin
 failure/q1_pricing_summary_report_failure.aql
 open-closed
-dml/insert-into-empty-dataset-with-index_02.aql
-dml/insert-into-empty-dataset-with-index_01.aql
 dml/load-from-hdfs.aql
diff --git a/asterix-app/src/test/resources/runtimets/only.txt b/asterix-app/src/test/resources/runtimets/only.txt
index b399dbe..e69de29 100644
--- a/asterix-app/src/test/resources/runtimets/only.txt
+++ b/asterix-app/src/test/resources/runtimets/only.txt
@@ -1 +0,0 @@
-dml/insert-into-empty-dataset-with-index_01.aql
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset-with-index_01.aql b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset-with-index.aql
similarity index 93%
rename from asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset-with-index_01.aql
rename to asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset-with-index.aql
index 2be5372..00b60cb 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset-with-index_01.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset-with-index.aql
@@ -43,7 +43,8 @@
 }
 );
 
-write output to nc1:"rttest/dml_insert-into-empty-dataset-with-index_01.adm";      
+write output to nc1:"rttest/dml_insert-into-empty-dataset-with-index.adm";      
 for $c in dataset('LineID')
+where $c.l_suppkey < 100 and $c.l_linenumber<5
 order by $c.l_orderkey, $c.l_linenumber
 return $c 
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset-with-index_02.aql b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset-with-index_02.aql
deleted file mode 100644
index 947b4d4..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset-with-index_02.aql
+++ /dev/null
@@ -1,49 +0,0 @@
-/* 
- * Test case Name  : insert-into-empty-dataset-with-index.aql
- * Description     : Check that we can insert into an empty dataset and its empty secondary indexes 
- * Expected Result : Success
- * Date            : May 2 2012
- */
-
-drop dataverse test if exists;
-create dataverse test;
-use dataverse test;
-
-create type LineIDType as closed {
-  l_orderkey: int32, 
-  l_linenumber: int32, 
-  l_suppkey: int32
-}
-
-create dataset LineID(LineIDType)
-  partitioned by key l_orderkey, l_linenumber;
-
-create index idx_LineID_partkey on LineID(l_linenumber);
-create index idx_LineID_suppkey on LineID(l_suppkey);
-
-insert into dataset LineID (
-let $x:=1
-let $y:=2
-let $z:=3
-return {
-	"l_orderkey": $x,
-	"l_linenumber": $y,
-	"l_suppkey": $z
-}
-);
-
-insert into dataset LineID (
-let $x:=2
-let $y:=3
-let $z:=4
-return {
-	"l_orderkey": $x,
-	"l_linenumber": $y,
-	"l_suppkey": $z
-}
-);
-
-write output to nc1:"rttest/dml_insert-into-empty-dataset-with-index_02.adm";      
-for $c in dataset('LineID')
-order by $c.l_orderkey, $c.l_linenumber
-return $c 
diff --git a/asterix-app/src/test/resources/runtimets/results/dml/insert-into-empty-dataset-with-index_01.adm b/asterix-app/src/test/resources/runtimets/results/dml/insert-into-empty-dataset-with-index.adm
similarity index 100%
rename from asterix-app/src/test/resources/runtimets/results/dml/insert-into-empty-dataset-with-index_01.adm
rename to asterix-app/src/test/resources/runtimets/results/dml/insert-into-empty-dataset-with-index.adm
diff --git a/asterix-app/src/test/resources/runtimets/results/dml/insert-into-empty-dataset-with-index_02.adm b/asterix-app/src/test/resources/runtimets/results/dml/insert-into-empty-dataset-with-index_02.adm
deleted file mode 100644
index 1b6c344..0000000
--- a/asterix-app/src/test/resources/runtimets/results/dml/insert-into-empty-dataset-with-index_02.adm
+++ /dev/null
@@ -1,2 +0,0 @@
-{ "l_orderkey": 1, "l_linenumber": 2, "l_suppkey": 3 }
-{ "l_orderkey": 2, "l_linenumber": 3, "l_suppkey": 4 }