Merge branch 'asterix_lsm_stabilization' of https://code.google.com/p/asterixdb into jarodwen/hotfix363
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
index 976ec7c..b9f81d6 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
@@ -82,6 +82,7 @@
 import edu.uci.ics.asterix.metadata.entities.Index;
 import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails;
 import edu.uci.ics.asterix.metadata.entities.NodeGroup;
+import edu.uci.ics.asterix.om.types.ARecordType;
 import edu.uci.ics.asterix.om.types.ATypeTag;
 import edu.uci.ics.asterix.om.types.IAType;
 import edu.uci.ics.asterix.om.types.TypeSignature;
@@ -368,6 +369,7 @@
 
         String dataverseName = null;
         String datasetName = null;
+        Dataset dataset = null;
         try {
             DatasetDecl dd = (DatasetDecl) stmt;
             dataverseName = dd.getDataverse() != null ? dd.getDataverse().getValue()
@@ -404,6 +406,8 @@
                     }
                     List<String> partitioningExprs = ((InternalDetailsDecl) dd.getDatasetDetailsDecl())
                             .getPartitioningExprs();
+                    ARecordType aRecordType = (ARecordType) itemType;
+                    aRecordType.validatePartitioningExpressions(partitioningExprs);
                     String ngName = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getNodegroupName().getValue();
                     datasetDetails = new InternalDatasetDetails(InternalDatasetDetails.FileStructure.BTREE,
                             InternalDatasetDetails.PartitioningStrategy.HASH, partitioningExprs, partitioningExprs,
@@ -423,6 +427,8 @@
                     }
                     List<String> partitioningExprs = ((FeedDetailsDecl) dd.getDatasetDetailsDecl())
                             .getPartitioningExprs();
+                    ARecordType aRecordType = (ARecordType) itemType;
+                    aRecordType.validatePartitioningExpressions(partitioningExprs);
                     String ngName = ((FeedDetailsDecl) dd.getDatasetDetailsDecl()).getNodegroupName().getValue();
                     String adapter = ((FeedDetailsDecl) dd.getDatasetDetailsDecl()).getAdapterFactoryClassname();
                     Map<String, String> configuration = ((FeedDetailsDecl) dd.getDatasetDetailsDecl())
@@ -436,8 +442,8 @@
             }
 
             //#. add a new dataset with PendingAddOp
-            Dataset dataset = new Dataset(dataverseName, datasetName, itemTypeName, datasetDetails, dd.getHints(),
-                    dsType, DatasetIdFactory.generateDatasetId(), IMetadataEntity.PENDING_ADD_OP);
+            dataset = new Dataset(dataverseName, datasetName, itemTypeName, datasetDetails, dd.getHints(), dsType,
+                    DatasetIdFactory.generateDatasetId(), IMetadataEntity.PENDING_ADD_OP);
             MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
 
             if (dd.getDatasetType() == DatasetType.INTERNAL || dd.getDatasetType() == DatasetType.FEED) {
@@ -470,35 +476,37 @@
                 MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
             }
 
-            //#. execute compensation operations
-            //   remove the index in NC
-            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-            bActiveTxn = true;
-            metadataProvider.setMetadataTxnContext(mdTxnCtx);
-            CompiledDatasetDropStatement cds = new CompiledDatasetDropStatement(dataverseName, datasetName);
-            try {
-                JobSpecification jobSpec = DatasetOperations.createDropDatasetJobSpec(cds, metadataProvider);
-                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                bActiveTxn = false;
+            if (dataset != null) {
+                //#. execute compensation operations
+                //   remove the index in NC
+                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                bActiveTxn = true;
+                metadataProvider.setMetadataTxnContext(mdTxnCtx);
+                CompiledDatasetDropStatement cds = new CompiledDatasetDropStatement(dataverseName, datasetName);
+                try {
+                    JobSpecification jobSpec = DatasetOperations.createDropDatasetJobSpec(cds, metadataProvider);
+                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                    bActiveTxn = false;
 
-                runJob(hcc, jobSpec, true);
-            } catch (Exception e3) {
-                if (bActiveTxn) {
-                    MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+                    runJob(hcc, jobSpec, true);
+                } catch (Exception e3) {
+                    if (bActiveTxn) {
+                        MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+                    }
+                    //do no throw exception since still the metadata needs to be compensated. 
                 }
-                //do no throw exception since still the metadata needs to be compensated. 
-            }
 
-            //   remove the record from the metadata.
-            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-            metadataProvider.setMetadataTxnContext(mdTxnCtx);
-            try {
-                MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
-                        datasetName);
-                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-            } catch (Exception e2) {
-                MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
-                throw new AlgebricksException(e2);
+                //   remove the record from the metadata.
+                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                metadataProvider.setMetadataTxnContext(mdTxnCtx);
+                try {
+                    MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
+                            datasetName);
+                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                } catch (Exception e2) {
+                    MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+                    throw new AlgebricksException(e2);
+                }
             }
 
             throw new AlgebricksException(e);
@@ -518,6 +526,7 @@
         String dataverseName = null;
         String datasetName = null;
         String indexName = null;
+        JobSpecification spec = null;
         try {
             CreateIndexStatement stmtCreateIndex = (CreateIndexStatement) stmt;
             dataverseName = stmtCreateIndex.getDataverseName() == null ? activeDefaultDataverse == null ? null
@@ -557,7 +566,7 @@
             //#. create the index artifact in NC.
             CompiledCreateIndexStatement cis = new CompiledCreateIndexStatement(index.getIndexName(), dataverseName,
                     index.getDatasetName(), index.getKeyFieldNames(), index.getGramLength(), index.getIndexType());
-            JobSpecification spec = IndexOperations.buildSecondaryIndexCreationJobSpec(cis, metadataProvider);
+            spec = IndexOperations.buildSecondaryIndexCreationJobSpec(cis, metadataProvider);
             if (spec == null) {
                 throw new AsterixException("Failed to create job spec for creating index '"
                         + stmtCreateIndex.getDatasetName() + "." + stmtCreateIndex.getIndexName() + "'");
@@ -599,37 +608,38 @@
                 MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
             }
 
-            //#. execute compensation operations
-            //   remove the index in NC
-            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-            bActiveTxn = true;
-            metadataProvider.setMetadataTxnContext(mdTxnCtx);
-            CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName, indexName);
-            try {
-                JobSpecification jobSpec = IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider);
-                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                bActiveTxn = false;
+            if (spec != null) {
+                //#. execute compensation operations
+                //   remove the index in NC
+                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                bActiveTxn = true;
+                metadataProvider.setMetadataTxnContext(mdTxnCtx);
+                CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName, indexName);
+                try {
+                    JobSpecification jobSpec = IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider);
+                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                    bActiveTxn = false;
 
-                runJob(hcc, jobSpec, true);
-            } catch (Exception e3) {
-                if (bActiveTxn) {
-                    MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+                    runJob(hcc, jobSpec, true);
+                } catch (Exception e3) {
+                    if (bActiveTxn) {
+                        MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+                    }
+                    //do no throw exception since still the metadata needs to be compensated. 
                 }
-                //do no throw exception since still the metadata needs to be compensated. 
-            }
 
-            //   remove the record from the metadata.
-            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-            metadataProvider.setMetadataTxnContext(mdTxnCtx);
-            try {
-                MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
-                        datasetName, indexName);
-                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-            } catch (Exception e2) {
-                MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
-                throw new AlgebricksException(e2);
+                //   remove the record from the metadata.
+                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                metadataProvider.setMetadataTxnContext(mdTxnCtx);
+                try {
+                    MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
+                            datasetName, indexName);
+                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                } catch (Exception e2) {
+                    MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+                    throw new AlgebricksException(e2);
+                }
             }
-
             throw new AlgebricksException(e);
         } finally {
             releaseWriteLatch();
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
index d4c8c34..b9b9524 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
@@ -148,17 +148,6 @@
             throw new AsterixException("Could not find dataset " + datasetName + " in datavetse " + dataverseName);
         }
         ARecordType itemType = (ARecordType) metadata.findType(dataverseName, dataset.getItemTypeName());
-        for (String keyField : DatasetUtils.getPartitioningKeys(dataset)) {
-            try {
-                if (!itemType.isClosedField(keyField)) {
-                    throw new AsterixException("Cannot partition dataset \"" + dataset.getDatasetName()
-                            + "\" by key \"" + keyField + "\" since it is not a valid field of \""
-                            + itemType.getTypeName() + "\"");
-                }
-            } catch (IOException e) {
-                throw new AsterixException(e);
-            }
-        }
         JobSpecification spec = new JobSpecification();
         IBinaryComparatorFactory[] comparatorFactories = DatasetUtils.computeKeysBinaryComparatorFactories(dataset,
                 itemType, format.getBinaryComparatorFactoryProvider());
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_255_create_dataset_error_1/issue_255_create_dataset_error_1.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_255_create_dataset_error_1/issue_255_create_dataset_error_1.1.ddl.aql
new file mode 100644
index 0000000..32ccf6d
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_255_create_dataset_error_1/issue_255_create_dataset_error_1.1.ddl.aql
@@ -0,0 +1,16 @@
+/*
+ * Description  : create a dataset partitioned on non-existent field
+ * Expected Res : Failure
+ * Date         : 14 April 2013
+ * Issue        : 272
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type opentype as open {
+id:int32
+}
+
+create dataset testds(opentype) primary key name;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_255_create_dataset_error_2/issue_255_create_dataset_error_2.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_255_create_dataset_error_2/issue_255_create_dataset_error_2.1.ddl.aql
new file mode 100644
index 0000000..add51df
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_255_create_dataset_error_2/issue_255_create_dataset_error_2.1.ddl.aql
@@ -0,0 +1,21 @@
+/*
+ * Description  : create a dataset partitioned on ARecord type
+ * Expected Res : Failure
+ * Date         : 14 April 2013
+ * Issue        : 272
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type opentype1 as open {
+id:int32
+}
+
+create type opentype2 as open {
+id:int32,
+open-type:opentype1
+}
+
+create dataset testds(opentype2) primary key open-type;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_255_create_feed_error/issue_255_create_feed_error.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_255_create_feed_error/issue_255_create_feed_error.1.ddl.aql
new file mode 100644
index 0000000..647b93a
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_255_create_feed_error/issue_255_create_feed_error.1.ddl.aql
@@ -0,0 +1,23 @@
+/*
+ * Description  : create a dataset partitioned on non-existent field
+ * Expected Res : Failure
+ * Date         : 14 April 2013
+ * Issue        : 272
+ */
+
+drop dataverse feeds if exists;
+create dataverse feeds;
+use dataverse feeds;
+
+create type TweetType as closed {
+  id: string,
+  username : string,
+  location : string,
+  text : string,
+  timestamp : string
+}      
+
+create feed dataset TweetFeed(TweetType)
+using "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory"
+(("output-type-name"="TweetType"),("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("tuple-interval"="10"))
+primary key name;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_272_create_index_error_1/issue_272_create_index_error_1.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_272_create_index_error_1/issue_272_create_index_error_1.1.ddl.aql
new file mode 100644
index 0000000..6ba4d86
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_272_create_index_error_1/issue_272_create_index_error_1.1.ddl.aql
@@ -0,0 +1,13 @@
+/*
+ * Description  : create an index on a non-existent dataset
+ * Expected Res : Failure
+ * Date         : 14 April 2013
+ * Issue        : 272
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
+use dataverse test;
+create index loc_index on Foo(name);
+
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_272_create_index_error_2/issue_272_create_index_error_2.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_272_create_index_error_2/issue_272_create_index_error_2.1.ddl.aql
new file mode 100644
index 0000000..a1f9771
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_272_create_index_error_2/issue_272_create_index_error_2.1.ddl.aql
@@ -0,0 +1,14 @@
+/*
+ * Description  : create an index on a non-existent dataset
+ * Expected Res : Failure
+ * Date         : 14 April 2013
+ * Issue        : 272
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
+use dataverse test;
+
+create index loc_index if not exists on Foo(name);
+
diff --git a/asterix-app/src/test/resources/metadata/testsuite.xml b/asterix-app/src/test/resources/metadata/testsuite.xml
index d4fb915..fb833b7 100644
--- a/asterix-app/src/test/resources/metadata/testsuite.xml
+++ b/asterix-app/src/test/resources/metadata/testsuite.xml
@@ -212,6 +212,36 @@
         <expected-error>AsterixException</expected-error>
       </compilation-unit>
     </test-case>
+    <test-case FilePath="exception">
+      <compilation-unit name="issue_272_create_index_error_1">
+        <output-dir compare="Text">none</output-dir>
+        <expected-error>edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="exception">
+      <compilation-unit name="issue_272_create_index_error_2">
+        <output-dir compare="Text">none</output-dir>
+        <expected-error>edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="exception">
+      <compilation-unit name="issue_255_create_dataset_error_1">
+        <output-dir compare="Text">none</output-dir>
+        <expected-error>edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="exception">
+      <compilation-unit name="issue_255_create_dataset_error_2">
+        <output-dir compare="Text">none</output-dir>
+        <expected-error>edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="exception">
+      <compilation-unit name="issue_255_create_feed_error">
+        <output-dir compare="Text">none</output-dir>
+        <expected-error>edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
+      </compilation-unit>
+    </test-case>
   </test-group>
   <test-group name="transaction">
     <test-case FilePath="transaction">
@@ -233,4 +263,4 @@
       </compilation-unit>
     </test-case>
   </test-group>
-</test-suite>
+</test-suite>
\ No newline at end of file
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ARecordType.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ARecordType.java
index dc0afcd..788010b 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ARecordType.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ARecordType.java
@@ -30,6 +30,7 @@
 import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.om.base.IAObject;
 import edu.uci.ics.asterix.om.visitors.IOMVisitor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;
 import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
@@ -232,12 +233,40 @@
      *            the name of the field to check
      * @return true if fieldName is a closed field, otherwise false
      * @throws IOException
-     *             if an error occurs while serializing fieldName
      */
     public boolean isClosedField(String fieldName) throws IOException {
         return findFieldPosition(fieldName) != -1;
     }
 
+    /**
+     * Validates the partitioning expression that will be used to partition a dataset.
+     * 
+     * @param partitioningExprs
+     *            a list of partitioning expressions that will be validated
+     * @throws AlgebricksException
+     *             (if the validation failed), IOException
+     */
+    public void validatePartitioningExpressions(List<String> partitioningExprs) throws AlgebricksException, IOException {
+        for (String fieldName : partitioningExprs) {
+            IAType fieldType = getFieldType(fieldName);
+            if (fieldType == null) {
+                throw new AlgebricksException("A field with this name  \"" + fieldName + "\" could not be found.");
+            } else if (fieldType.getTypeTag() == ATypeTag.RECORD) {
+                throw new AlgebricksException("The partitioning key \"" + fieldName + "\" cannot be of type "
+                        + ATypeTag.RECORD + ".");
+            }
+        }
+    }
+
+    public boolean doesFieldExist(String fieldName) {
+        for (String f : fieldNames) {
+            if (f.compareTo(fieldName) == 0) {
+                return true;
+            }
+        }
+        return false;
+    }
+
     @Override
     public String getDisplayName() {
         return "ARecord";
@@ -274,6 +303,7 @@
         }
         return h;
     }
+
     @Override
     public JSONObject toJSON() throws JSONException {
         JSONObject type = new JSONObject();