1) merge from asterix_lsm_stabilization 2) added properties to asterix configuration xml following appends to google doc
diff --git a/.gitignore b/.gitignore
index 3dff533..1108a44 100644
--- a/.gitignore
+++ b/.gitignore
@@ -2,3 +2,6 @@
 .classpath
 .settings
 .project
+ClusterControllerService
+asterix-app/rttest
+asterix-app/mdtest
\ No newline at end of file
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
index 976ec7c..b9f81d6 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
@@ -82,6 +82,7 @@
 import edu.uci.ics.asterix.metadata.entities.Index;
 import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails;
 import edu.uci.ics.asterix.metadata.entities.NodeGroup;
+import edu.uci.ics.asterix.om.types.ARecordType;
 import edu.uci.ics.asterix.om.types.ATypeTag;
 import edu.uci.ics.asterix.om.types.IAType;
 import edu.uci.ics.asterix.om.types.TypeSignature;
@@ -368,6 +369,7 @@
 
         String dataverseName = null;
         String datasetName = null;
+        Dataset dataset = null;
         try {
             DatasetDecl dd = (DatasetDecl) stmt;
             dataverseName = dd.getDataverse() != null ? dd.getDataverse().getValue()
@@ -404,6 +406,8 @@
                     }
                     List<String> partitioningExprs = ((InternalDetailsDecl) dd.getDatasetDetailsDecl())
                             .getPartitioningExprs();
+                    ARecordType aRecordType = (ARecordType) itemType;
+                    aRecordType.validatePartitioningExpressions(partitioningExprs);
                     String ngName = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getNodegroupName().getValue();
                     datasetDetails = new InternalDatasetDetails(InternalDatasetDetails.FileStructure.BTREE,
                             InternalDatasetDetails.PartitioningStrategy.HASH, partitioningExprs, partitioningExprs,
@@ -423,6 +427,8 @@
                     }
                     List<String> partitioningExprs = ((FeedDetailsDecl) dd.getDatasetDetailsDecl())
                             .getPartitioningExprs();
+                    ARecordType aRecordType = (ARecordType) itemType;
+                    aRecordType.validatePartitioningExpressions(partitioningExprs);
                     String ngName = ((FeedDetailsDecl) dd.getDatasetDetailsDecl()).getNodegroupName().getValue();
                     String adapter = ((FeedDetailsDecl) dd.getDatasetDetailsDecl()).getAdapterFactoryClassname();
                     Map<String, String> configuration = ((FeedDetailsDecl) dd.getDatasetDetailsDecl())
@@ -436,8 +442,8 @@
             }
 
             //#. add a new dataset with PendingAddOp
-            Dataset dataset = new Dataset(dataverseName, datasetName, itemTypeName, datasetDetails, dd.getHints(),
-                    dsType, DatasetIdFactory.generateDatasetId(), IMetadataEntity.PENDING_ADD_OP);
+            dataset = new Dataset(dataverseName, datasetName, itemTypeName, datasetDetails, dd.getHints(), dsType,
+                    DatasetIdFactory.generateDatasetId(), IMetadataEntity.PENDING_ADD_OP);
             MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
 
             if (dd.getDatasetType() == DatasetType.INTERNAL || dd.getDatasetType() == DatasetType.FEED) {
@@ -470,35 +476,37 @@
                 MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
             }
 
-            //#. execute compensation operations
-            //   remove the index in NC
-            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-            bActiveTxn = true;
-            metadataProvider.setMetadataTxnContext(mdTxnCtx);
-            CompiledDatasetDropStatement cds = new CompiledDatasetDropStatement(dataverseName, datasetName);
-            try {
-                JobSpecification jobSpec = DatasetOperations.createDropDatasetJobSpec(cds, metadataProvider);
-                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                bActiveTxn = false;
+            if (dataset != null) {
+                //#. execute compensation operations
+                //   remove the index in NC
+                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                bActiveTxn = true;
+                metadataProvider.setMetadataTxnContext(mdTxnCtx);
+                CompiledDatasetDropStatement cds = new CompiledDatasetDropStatement(dataverseName, datasetName);
+                try {
+                    JobSpecification jobSpec = DatasetOperations.createDropDatasetJobSpec(cds, metadataProvider);
+                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                    bActiveTxn = false;
 
-                runJob(hcc, jobSpec, true);
-            } catch (Exception e3) {
-                if (bActiveTxn) {
-                    MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+                    runJob(hcc, jobSpec, true);
+                } catch (Exception e3) {
+                    if (bActiveTxn) {
+                        MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+                    }
+                    //do no throw exception since still the metadata needs to be compensated. 
                 }
-                //do no throw exception since still the metadata needs to be compensated. 
-            }
 
-            //   remove the record from the metadata.
-            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-            metadataProvider.setMetadataTxnContext(mdTxnCtx);
-            try {
-                MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
-                        datasetName);
-                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-            } catch (Exception e2) {
-                MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
-                throw new AlgebricksException(e2);
+                //   remove the record from the metadata.
+                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                metadataProvider.setMetadataTxnContext(mdTxnCtx);
+                try {
+                    MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
+                            datasetName);
+                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                } catch (Exception e2) {
+                    MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+                    throw new AlgebricksException(e2);
+                }
             }
 
             throw new AlgebricksException(e);
@@ -518,6 +526,7 @@
         String dataverseName = null;
         String datasetName = null;
         String indexName = null;
+        JobSpecification spec = null;
         try {
             CreateIndexStatement stmtCreateIndex = (CreateIndexStatement) stmt;
             dataverseName = stmtCreateIndex.getDataverseName() == null ? activeDefaultDataverse == null ? null
@@ -557,7 +566,7 @@
             //#. create the index artifact in NC.
             CompiledCreateIndexStatement cis = new CompiledCreateIndexStatement(index.getIndexName(), dataverseName,
                     index.getDatasetName(), index.getKeyFieldNames(), index.getGramLength(), index.getIndexType());
-            JobSpecification spec = IndexOperations.buildSecondaryIndexCreationJobSpec(cis, metadataProvider);
+            spec = IndexOperations.buildSecondaryIndexCreationJobSpec(cis, metadataProvider);
             if (spec == null) {
                 throw new AsterixException("Failed to create job spec for creating index '"
                         + stmtCreateIndex.getDatasetName() + "." + stmtCreateIndex.getIndexName() + "'");
@@ -599,37 +608,38 @@
                 MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
             }
 
-            //#. execute compensation operations
-            //   remove the index in NC
-            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-            bActiveTxn = true;
-            metadataProvider.setMetadataTxnContext(mdTxnCtx);
-            CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName, indexName);
-            try {
-                JobSpecification jobSpec = IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider);
-                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                bActiveTxn = false;
+            if (spec != null) {
+                //#. execute compensation operations
+                //   remove the index in NC
+                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                bActiveTxn = true;
+                metadataProvider.setMetadataTxnContext(mdTxnCtx);
+                CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName, indexName);
+                try {
+                    JobSpecification jobSpec = IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider);
+                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                    bActiveTxn = false;
 
-                runJob(hcc, jobSpec, true);
-            } catch (Exception e3) {
-                if (bActiveTxn) {
-                    MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+                    runJob(hcc, jobSpec, true);
+                } catch (Exception e3) {
+                    if (bActiveTxn) {
+                        MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+                    }
+                    //do no throw exception since still the metadata needs to be compensated. 
                 }
-                //do no throw exception since still the metadata needs to be compensated. 
-            }
 
-            //   remove the record from the metadata.
-            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-            metadataProvider.setMetadataTxnContext(mdTxnCtx);
-            try {
-                MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
-                        datasetName, indexName);
-                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-            } catch (Exception e2) {
-                MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
-                throw new AlgebricksException(e2);
+                //   remove the record from the metadata.
+                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                metadataProvider.setMetadataTxnContext(mdTxnCtx);
+                try {
+                    MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
+                            datasetName, indexName);
+                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                } catch (Exception e2) {
+                    MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+                    throw new AlgebricksException(e2);
+                }
             }
-
             throw new AlgebricksException(e);
         } finally {
             releaseWriteLatch();
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
index d4c8c34..b9b9524 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
@@ -148,17 +148,6 @@
             throw new AsterixException("Could not find dataset " + datasetName + " in datavetse " + dataverseName);
         }
         ARecordType itemType = (ARecordType) metadata.findType(dataverseName, dataset.getItemTypeName());
-        for (String keyField : DatasetUtils.getPartitioningKeys(dataset)) {
-            try {
-                if (!itemType.isClosedField(keyField)) {
-                    throw new AsterixException("Cannot partition dataset \"" + dataset.getDatasetName()
-                            + "\" by key \"" + keyField + "\" since it is not a valid field of \""
-                            + itemType.getTypeName() + "\"");
-                }
-            } catch (IOException e) {
-                throw new AsterixException(e);
-            }
-        }
         JobSpecification spec = new JobSpecification();
         IBinaryComparatorFactory[] comparatorFactories = DatasetUtils.computeKeysBinaryComparatorFactories(dataset,
                 itemType, format.getBinaryComparatorFactoryProvider());
diff --git a/asterix-app/src/main/resources/log.properties b/asterix-app/src/main/resources/log.properties
new file mode 100644
index 0000000..ee8040a
--- /dev/null
+++ b/asterix-app/src/main/resources/log.properties
@@ -0,0 +1 @@
+group_commit_wait_period=1
\ No newline at end of file
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java
index 0dbf6d4..299c43c 100644
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java
+++ b/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java
@@ -231,7 +231,7 @@
 
         List<CompilationUnit> cUnits = tcCtx.getTestCase().getCompilationUnit();
         for (CompilationUnit cUnit : cUnits) {
-            LOGGER.info("[TEST]: " + tcCtx.getTestCase().getFilePath() + "/" + cUnit.getName());
+            LOGGER.severe("[TEST]: " + tcCtx.getTestCase().getFilePath() + "/" + cUnit.getName());
 
             testFileCtxs = tcCtx.getTestFiles(cUnit);
             expectedResultFileCtxs = tcCtx.getExpectedResultFiles(cUnit);
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_255_create_dataset_error_1/issue_255_create_dataset_error_1.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_255_create_dataset_error_1/issue_255_create_dataset_error_1.1.ddl.aql
new file mode 100644
index 0000000..32ccf6d
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_255_create_dataset_error_1/issue_255_create_dataset_error_1.1.ddl.aql
@@ -0,0 +1,16 @@
+/*
+ * Description  : create a dataset partitioned on non-existent field
+ * Expected Res : Failure
+ * Date         : 14 April 2013
+ * Issue        : 272
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type opentype as open {
+id:int32
+}
+
+create dataset testds(opentype) primary key name;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_255_create_dataset_error_2/issue_255_create_dataset_error_2.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_255_create_dataset_error_2/issue_255_create_dataset_error_2.1.ddl.aql
new file mode 100644
index 0000000..add51df
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_255_create_dataset_error_2/issue_255_create_dataset_error_2.1.ddl.aql
@@ -0,0 +1,21 @@
+/*
+ * Description  : create a dataset partitioned on ARecord type
+ * Expected Res : Failure
+ * Date         : 14 April 2013
+ * Issue        : 272
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type opentype1 as open {
+id:int32
+}
+
+create type opentype2 as open {
+id:int32,
+open-type:opentype1
+}
+
+create dataset testds(opentype2) primary key open-type;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_255_create_feed_error/issue_255_create_feed_error.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_255_create_feed_error/issue_255_create_feed_error.1.ddl.aql
new file mode 100644
index 0000000..647b93a
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_255_create_feed_error/issue_255_create_feed_error.1.ddl.aql
@@ -0,0 +1,23 @@
+/*
+ * Description  : create a dataset partitioned on non-existent field
+ * Expected Res : Failure
+ * Date         : 14 April 2013
+ * Issue        : 272
+ */
+
+drop dataverse feeds if exists;
+create dataverse feeds;
+use dataverse feeds;
+
+create type TweetType as closed {
+  id: string,
+  username : string,
+  location : string,
+  text : string,
+  timestamp : string
+}      
+
+create feed dataset TweetFeed(TweetType)
+using "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory"
+(("output-type-name"="TweetType"),("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("tuple-interval"="10"))
+primary key name;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_272_create_index_error_1/issue_272_create_index_error_1.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_272_create_index_error_1/issue_272_create_index_error_1.1.ddl.aql
new file mode 100644
index 0000000..6ba4d86
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_272_create_index_error_1/issue_272_create_index_error_1.1.ddl.aql
@@ -0,0 +1,13 @@
+/*
+ * Description  : create an index on a non-existent dataset
+ * Expected Res : Failure
+ * Date         : 14 April 2013
+ * Issue        : 272
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
+use dataverse test;
+create index loc_index on Foo(name);
+
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_272_create_index_error_2/issue_272_create_index_error_2.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_272_create_index_error_2/issue_272_create_index_error_2.1.ddl.aql
new file mode 100644
index 0000000..a1f9771
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_272_create_index_error_2/issue_272_create_index_error_2.1.ddl.aql
@@ -0,0 +1,14 @@
+/*
+ * Description  : create an index on a non-existent dataset
+ * Expected Res : Failure
+ * Date         : 14 April 2013
+ * Issue        : 272
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
+use dataverse test;
+
+create index loc_index if not exists on Foo(name);
+
diff --git a/asterix-app/src/test/resources/metadata/testsuite.xml b/asterix-app/src/test/resources/metadata/testsuite.xml
index d4fb915..fb833b7 100644
--- a/asterix-app/src/test/resources/metadata/testsuite.xml
+++ b/asterix-app/src/test/resources/metadata/testsuite.xml
@@ -212,6 +212,36 @@
         <expected-error>AsterixException</expected-error>
       </compilation-unit>
     </test-case>
+    <test-case FilePath="exception">
+      <compilation-unit name="issue_272_create_index_error_1">
+        <output-dir compare="Text">none</output-dir>
+        <expected-error>edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="exception">
+      <compilation-unit name="issue_272_create_index_error_2">
+        <output-dir compare="Text">none</output-dir>
+        <expected-error>edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="exception">
+      <compilation-unit name="issue_255_create_dataset_error_1">
+        <output-dir compare="Text">none</output-dir>
+        <expected-error>edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="exception">
+      <compilation-unit name="issue_255_create_dataset_error_2">
+        <output-dir compare="Text">none</output-dir>
+        <expected-error>edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="exception">
+      <compilation-unit name="issue_255_create_feed_error">
+        <output-dir compare="Text">none</output-dir>
+        <expected-error>edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
+      </compilation-unit>
+    </test-case>
   </test-group>
   <test-group name="transaction">
     <test-case FilePath="transaction">
@@ -233,4 +263,4 @@
       </compilation-unit>
     </test-case>
   </test-group>
-</test-suite>
+</test-suite>
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/constructor/rectangle_01/rectangle_01.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/constructor/rectangle_01/rectangle_01.1.ddl.aql
new file mode 100644
index 0000000..efde712
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/constructor/rectangle_01/rectangle_01.1.ddl.aql
@@ -0,0 +1,10 @@
+/*
+ * Description  : create a rectangle constructor
+ * Expected Res : Success
+ * Date         : 18 April 2013
+ * Issue        : 272
+ */
+ 
+drop dataverse test if exists;
+create dataverse test;
+
diff --git a/asterix-app/src/test/resources/runtimets/queries/constructor/rectangle_01/rectangle_01.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/constructor/rectangle_01/rectangle_01.2.update.aql
new file mode 100644
index 0000000..f7e538e
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/constructor/rectangle_01/rectangle_01.2.update.aql
@@ -0,0 +1,7 @@
+/*
+ * Description  : create a rectangle constructor
+ * Expected Res : Success
+ * Date         : 18 April 2013
+ * Issue        : 272
+ */
+ 
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/constructor/rectangle_01/rectangle_01.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/constructor/rectangle_01/rectangle_01.3.query.aql
new file mode 100644
index 0000000..696746f
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/constructor/rectangle_01/rectangle_01.3.query.aql
@@ -0,0 +1,12 @@
+/*
+ * Description  : create a rectangle constructor
+ * Expected Res : Success
+ * Date         : 18 April 2013
+ * Issue        : 272
+ */
+ 
+use dataverse test;
+
+let $r1 := rectangle("5.1,11.8 87.6,15.6548")
+let $r2 := rectangle("0.1234,-1.00e-10 5.5487,0.48765")
+return {"rectangle1": $r1,"rectangle2": $r2}
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree/delete-rtree.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree/delete-rtree.1.ddl.aql
index d8ab247..381ad3f 100644
--- a/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree/delete-rtree.1.ddl.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree/delete-rtree.1.ddl.aql
@@ -14,4 +14,6 @@
 
 create dataset MyData(MyRecord)
   primary key id;
+  
+create index rtree_index_loc on MyData(loc) type rtree;
 
diff --git a/asterix-app/src/test/resources/runtimets/queries/temporal/duration_comps/duration_comps.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/temporal/duration_comps/duration_comps.1.ddl.aql
new file mode 100644
index 0000000..cb55dad
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/temporal/duration_comps/duration_comps.1.ddl.aql
@@ -0,0 +1,8 @@
+/*
+ * Description      :   Test cases for duration comparison functions
+ * Expected Result  :   Success
+ * Date             :   19 Apr, 2013
+ */
+
+drop dataverse test if exists;
+create dataverse test;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/temporal/duration_comps/duration_comps.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/temporal/duration_comps/duration_comps.2.update.aql
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/temporal/duration_comps/duration_comps.2.update.aql
diff --git a/asterix-app/src/test/resources/runtimets/queries/temporal/duration_comps/duration_comps.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/temporal/duration_comps/duration_comps.3.query.aql
new file mode 100644
index 0000000..77fcc4f
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/temporal/duration_comps/duration_comps.3.query.aql
@@ -0,0 +1,11 @@
+use dataverse test;
+
+let $dr1 := duration("-P3D")
+let $dr2 := duration("P1D")
+let $dr3 := duration("P1Y")
+let $dr4 := duration("P13M")
+let $dr5 := duration("PT24H")
+let $dr6 := duration-from-months(months-of-year-month-duration($dr3))
+let $dr7 := duration-from-ms(ms-of-day-time-duration($dr1))
+
+return { "yearMonthGreaterComp" : year-month-duration-greater-than($dr4, $dr3), "dayTimeGreaterComp" : day-time-duration-greater-than($dr2, $dr1), "yearMonthLessComp" : year-month-duration-less-than($dr4, $dr3), "dayTimeLessComp" : day-time-duration-less-than($dr2, $dr1), "equal1": duration-equal($dr2, $dr5), "equal2": duration-equal($dr1, $dr5), "equal3": duration-equal($dr6, $dr3), "equal4": duration-equal($dr7, $dr1) }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/constructor/rectangle_01/rectangle_01.1.adm b/asterix-app/src/test/resources/runtimets/results/constructor/rectangle_01/rectangle_01.1.adm
new file mode 100644
index 0000000..32f14b3
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/constructor/rectangle_01/rectangle_01.1.adm
@@ -0,0 +1 @@
+{ "rectangle1": rectangle("5.1,11.8 87.6,15.6548"), "rectangle2": rectangle("0.1234,-1.0E-10 5.5487,0.48765") }
diff --git a/asterix-app/src/test/resources/runtimets/results/spatial/cell-aggregation-with-filtering/cell-aggregation-with-filtering.1.adm b/asterix-app/src/test/resources/runtimets/results/spatial/cell-aggregation-with-filtering/cell-aggregation-with-filtering.1.adm
index e95bac0..25859de 100644
--- a/asterix-app/src/test/resources/runtimets/results/spatial/cell-aggregation-with-filtering/cell-aggregation-with-filtering.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/spatial/cell-aggregation-with-filtering/cell-aggregation-with-filtering.1.adm
@@ -1,2 +1,2 @@
-{ "cell": rectangle("33.5,-101.5 | 36.5,-98.5"), "count": 1 }
-{ "cell": rectangle("33.5,-98.5 | 36.5,-95.5"), "count": 2 }
\ No newline at end of file
+{ "cell": rectangle("33.5,-101.5 36.5,-98.5"), "count": 1 }
+{ "cell": rectangle("33.5,-98.5 36.5,-95.5"), "count": 2 }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/spatial/cell-aggregation/cell-aggregation.1.adm b/asterix-app/src/test/resources/runtimets/results/spatial/cell-aggregation/cell-aggregation.1.adm
index 137ed1c..0014d49 100644
--- a/asterix-app/src/test/resources/runtimets/results/spatial/cell-aggregation/cell-aggregation.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/spatial/cell-aggregation/cell-aggregation.1.adm
@@ -1,3 +1,3 @@
-{ "cell": rectangle("5.0,5.0 | 10.0,10.0"), "count": 1 }
-{ "cell": rectangle("5.0,0.0 | 10.0,5.0"), "count": 3 }
-{ "cell": rectangle("0.0,0.0 | 5.0,5.0"), "count": 12 }
\ No newline at end of file
+{ "cell": rectangle("5.0,5.0 10.0,10.0"), "count": 1 }
+{ "cell": rectangle("5.0,0.0 10.0,5.0"), "count": 3 }
+{ "cell": rectangle("0.0,0.0 5.0,5.0"), "count": 12 }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/temporal/duration_comps/duration_comps.1.adm b/asterix-app/src/test/resources/runtimets/results/temporal/duration_comps/duration_comps.1.adm
new file mode 100644
index 0000000..26b32fc
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/temporal/duration_comps/duration_comps.1.adm
@@ -0,0 +1 @@
+{ "yearMonthGreaterComp": true, "dayTimeGreaterComp": true, "yearMonthLessComp": false, "dayTimeLessComp": false, "equal1": true, "equal2": false, "equal3": true, "equal4": true }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/testsuite.xml b/asterix-app/src/test/resources/runtimets/testsuite.xml
index 700f06f..dd29a7b 100644
--- a/asterix-app/src/test/resources/runtimets/testsuite.xml
+++ b/asterix-app/src/test/resources/runtimets/testsuite.xml
@@ -551,6 +551,11 @@
       </compilation-unit>
     </test-case>
     <test-case FilePath="constructor">
+      <compilation-unit name="rectangle_01">
+        <output-dir compare="Text">rectangle_01</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="constructor">
       <compilation-unit name="point_01">
         <output-dir compare="Text">point_01</output-dir>
       </compilation-unit>
@@ -4200,6 +4205,11 @@
         <output-dir compare="Text">interval</output-dir>
       </compilation-unit>
   	</test-case>
+  	<test-case FilePath="temporal">
+  		<compilation-unit name="duration_comps">
+        <output-dir compare="Text">duration_comps</output-dir>
+      </compilation-unit>
+  	</test-case>
   </test-group>
   <test-group name="leftouterjoin">
     <test-case FilePath="leftouterjoin">
@@ -4213,5 +4223,4 @@
       </compilation-unit>
     </test-case>
   </test-group>
-</test-suite>
-
+</test-suite>
\ No newline at end of file
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/driver/InstallerDriver.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/driver/InstallerDriver.java
index 65a31c0..80e522b 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/driver/InstallerDriver.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/driver/InstallerDriver.java
@@ -132,10 +132,6 @@
         buffer.append("configure" + ":" + " Auto-generate configuration for local psedu-distributed Asterix instance"
                 + "\n");
         buffer.append("shutdown " + ":" + " Shutdown the installer service" + "\n");
-        buffer.append("validate " + ":" + " Validates the installer/cluster configuration" + "\n");
-        buffer.append("configure" + ":" + " Auto-generate configuration for local psedu-distributed Asterix instance"
-                + "\n");
-        buffer.append("shutdown " + ":" + " Shutdown the installer service" + "\n");
         buffer.append("help     " + ":" + " Provides usage description of a command" + "\n");
 
         LOGGER.info(buffer.toString());
diff --git a/asterix-installer/src/main/resources/clusters/local/conf/asterix-conf.xml b/asterix-installer/src/main/resources/clusters/local/conf/asterix-conf.xml
index 195131a..b94e491 100644
--- a/asterix-installer/src/main/resources/clusters/local/conf/asterix-conf.xml
+++ b/asterix-installer/src/main/resources/clusters/local/conf/asterix-conf.xml
@@ -3,71 +3,138 @@
 	<property>
 		<name>nc_java_opts</name>
 		<value>-Xmx1024m</value>
+                <description>JVM parameters for each Node Contoller (NC)</description>
 	</property>
 
 	<property>
 		<name>cc_java_opts</name>
 		<value>-Xmx1024m</value>
+                <description>JVM parameters for each Cluster Contoller (CC)</description>
 	</property>
 
 	<property>
 		<name>size_memory_component</name>
 		<value>512m</value>
+                <description></description>
 	</property>
 
 	<property>
 		<name>total_size_memory_component</name>
 		<value>512m</value>
+                <description></description>
 	</property>
 
 	<property>
 		<name>log_buffer_num_pages</name>
 		<value>8</value>
+                <description></description>
 	</property>
 
 	<property>
 		<name>log_buffer_page_size</name>
 		<value></value>
+                <description></description>
 	</property>
 
 	<property>
 		<name>log_buffer_page_size</name>
 		<value>128m</value>
+                <description></description>
 	</property>
 
 	<property>
 		<name>group_commit_interval</name>
 		<value>200ms</value>
+                <description></description>
 	</property>
 
 	<property>
 		<name>sort_op_memory</name>
 		<value>200m</value>
+                <description></description>
 	</property>
 
 	<property>
 		<name>join_op_memory</name>
 		<value>200ms</value>
+                <description></description>
 	</property>
 
 	<property>
 		<name>web_interface_port</name>
 		<value>19001</value>
+                <description></description>
 	</property>
 
 	<property>
 		<name>nc_port</name>
 		<value>14601</value>
+                <description></description>
 	</property>
 
 	<property>
 		<name>num_pages_buffer_cache</name>
 		<value>8</value>
+                <description></description>
 	</property>
 
 	<property>
 		<name>log_level</name>
 		<value>INFO</value>
+                <description></description>
 	</property>
 
+	<property>
+		<name>lsn_threshold</name>
+		<value>64m</value>
+                <description></description>
+	</property>
+
+	<property>
+		<name>checkpointTermsInSecs</name>
+		<value>120</value>
+                <description></description>
+	</property>
+
+	<property>
+		<name>escalate_threshold_entity_to_dataset</name>
+		<value>8</value>
+                <description></description>
+	</property>
+
+	<property>
+		<name>num_pages_buffer_cache</name>
+		<value>1000</value>
+                <description></description>
+	</property>
+
+	<property>
+		<name>logPageSize</name>
+		<value>131072</value>
+                <description></description>
+	</property>
+
+	<property>
+		<name>numLogPages</name>
+		<value>131072</value>
+                <description></description>
+	</property>
+
+	<property>
+		<name>logPartitionSize</name>
+		<value>2147483648</value>
+                <description></description>
+	</property>
+
+	<property>
+		<name>shrink_timer_threshold</name>
+		<value>120000</value>
+                <description></description>
+	</property>
+
+	<property>
+		<name>numLogPages</name>
+		<value>131072</value>
+                <description></description>
+	</property>
 </asterixConfiguration>
diff --git a/asterix-installer/src/main/resources/schema/asterix-conf.xsd b/asterix-installer/src/main/resources/schema/asterix-conf.xsd
index 3c7ae34..9ebc5ed 100644
--- a/asterix-installer/src/main/resources/schema/asterix-conf.xsd
+++ b/asterix-installer/src/main/resources/schema/asterix-conf.xsd
@@ -7,6 +7,7 @@
 
 	<xs:element name="name" type="xs:string" />
 	<xs:element name="value" type="xs:string" />
+	<xs:element name="description" type="xs:string" />
 	
 	<!-- definition of complex elements -->
 	<xs:element name="property">
@@ -14,6 +15,7 @@
 			<xs:sequence>
 				<xs:element ref="mg:name" />
 				<xs:element ref="mg:value" />
+				<xs:element ref="mg:description" />
 			</xs:sequence>
 		</xs:complexType>
 	</xs:element>
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ARectanglePrinter.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ARectanglePrinter.java
index 0618f28..f83118b 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ARectanglePrinter.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ARectanglePrinter.java
@@ -17,9 +17,14 @@
 
     @Override
     public void print(byte[] b, int s, int l, PrintStream ps) throws AlgebricksException {
-        ps.print("rectangle(\"" + ADoubleSerializerDeserializer.getDouble(b, s + 1) + ",");
+        ps.print("rectangle(\"");
+        ps.print(ADoubleSerializerDeserializer.getDouble(b, s + 1));
+        ps.print(",");
         ps.print(ADoubleSerializerDeserializer.getDouble(b, s + 9));
-        ps.print(" | " + ADoubleSerializerDeserializer.getDouble(b, s + 17) + ",");
-        ps.print(ADoubleSerializerDeserializer.getDouble(b, s + 25) + "\")");
+        ps.print(" ");
+        ps.print(ADoubleSerializerDeserializer.getDouble(b, s + 17));
+        ps.print(",");
+        ps.print(ADoubleSerializerDeserializer.getDouble(b, s + 25));
+        ps.print("\")");
     }
 }
\ No newline at end of file
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/AObjectSerializerDeserializer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/AObjectSerializerDeserializer.java
index b5b7303..2e4f72c 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/AObjectSerializerDeserializer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/AObjectSerializerDeserializer.java
@@ -23,6 +23,7 @@
 import edu.uci.ics.asterix.om.base.APoint3D;
 import edu.uci.ics.asterix.om.base.APolygon;
 import edu.uci.ics.asterix.om.base.ARecord;
+import edu.uci.ics.asterix.om.base.ARectangle;
 import edu.uci.ics.asterix.om.base.AString;
 import edu.uci.ics.asterix.om.base.ATime;
 import edu.uci.ics.asterix.om.base.AUnorderedList;
@@ -97,6 +98,9 @@
             case LINE: {
                 return ALineSerializerDeserializer.INSTANCE.deserialize(in);
             }
+            case RECTANGLE: {
+                return ARectangleSerializerDeserializer.INSTANCE.deserialize(in);
+            }
             case POLYGON: {
                 return APolygonSerializerDeserializer.INSTANCE.deserialize(in);
             }
@@ -199,6 +203,10 @@
                 ALineSerializerDeserializer.INSTANCE.serialize((ALine) instance, out);
                 break;
             }
+            case RECTANGLE: {
+                ARectangleSerializerDeserializer.INSTANCE.serialize((ARectangle) instance, out);
+                break;
+            }
             case POLYGON: {
                 APolygonSerializerDeserializer.INSTANCE.serialize((APolygon) instance, out);
                 break;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixBuiltinFunctions.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixBuiltinFunctions.java
index fa3179a..3cad3be 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixBuiltinFunctions.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixBuiltinFunctions.java
@@ -414,6 +414,24 @@
             "current-date", 0);
     public final static FunctionIdentifier CURRENT_DATETIME = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
             "current-datetime", 0);
+    public final static FunctionIdentifier DURATION_EQUAL = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "duration-equal", 2);
+    public final static FunctionIdentifier YEAR_MONTH_DURATION_GREATER_THAN = new FunctionIdentifier(
+            FunctionConstants.ASTERIX_NS, "year-month-duration-greater-than", 2);
+    public final static FunctionIdentifier YEAR_MONTH_DURATION_LESS_THAN = new FunctionIdentifier(
+            FunctionConstants.ASTERIX_NS, "year-month-duration-less-than", 2);
+    public final static FunctionIdentifier DAY_TIME_DURATION_GREATER_THAN = new FunctionIdentifier(
+            FunctionConstants.ASTERIX_NS, "day-time-duration-greater-than", 2);
+    public final static FunctionIdentifier DAY_TIME_DURATION_LESS_THAN = new FunctionIdentifier(
+            FunctionConstants.ASTERIX_NS, "day-time-duration-less-than", 2);
+    public final static FunctionIdentifier DURATION_FROM_MONTHS = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "duration-from-months", 1);
+    public final static FunctionIdentifier MONTHS_OF_YEAR_MONTH_DURATION = new FunctionIdentifier(
+            FunctionConstants.ASTERIX_NS, "months-of-year-month-duration", 1);
+    public final static FunctionIdentifier DURATION_FROM_MILLISECONDS = new FunctionIdentifier(
+            FunctionConstants.ASTERIX_NS, "duration-from-ms", 1);
+    public final static FunctionIdentifier MILLISECONDS_OF_DAY_TIME_DURATION = new FunctionIdentifier(
+            FunctionConstants.ASTERIX_NS, "ms-of-day-time-duration", 1);
 
     // spatial
     public final static FunctionIdentifier CREATE_POINT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
@@ -815,6 +833,15 @@
         add(CURRENT_DATE, ADateTypeComputer.INSTANCE);
         add(CURRENT_TIME, ATimeTypeComputer.INSTANCE);
         add(CURRENT_DATETIME, ADateTimeTypeComputer.INSTANCE);
+        add(DAY_TIME_DURATION_GREATER_THAN, OptionalABooleanTypeComputer.INSTANCE);
+        add(DAY_TIME_DURATION_LESS_THAN, OptionalABooleanTypeComputer.INSTANCE);
+        add(YEAR_MONTH_DURATION_GREATER_THAN, OptionalABooleanTypeComputer.INSTANCE);
+        add(YEAR_MONTH_DURATION_LESS_THAN, OptionalABooleanTypeComputer.INSTANCE);
+        add(DURATION_EQUAL, OptionalABooleanTypeComputer.INSTANCE);
+        add(DURATION_FROM_MONTHS, OptionalADurationTypeComputer.INSTANCE);
+        add(DURATION_FROM_MILLISECONDS, OptionalADurationTypeComputer.INSTANCE);
+        add(MONTHS_OF_YEAR_MONTH_DURATION, OptionalAInt32TypeComputer.INSTANCE);
+        add(MILLISECONDS_OF_DAY_TIME_DURATION, OptionalAInt64TypeComputer.INSTANCE);
 
         // interval constructors
         add(INTERVAL_CONSTRUCTOR_DATE, OptionalAIntervalTypeComputer.INSTANCE);
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ARecordType.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ARecordType.java
index dc0afcd..788010b 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ARecordType.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ARecordType.java
@@ -30,6 +30,7 @@
 import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.om.base.IAObject;
 import edu.uci.ics.asterix.om.visitors.IOMVisitor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;
 import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
@@ -232,12 +233,40 @@
      *            the name of the field to check
      * @return true if fieldName is a closed field, otherwise false
      * @throws IOException
-     *             if an error occurs while serializing fieldName
      */
     public boolean isClosedField(String fieldName) throws IOException {
         return findFieldPosition(fieldName) != -1;
     }
 
+    /**
+     * Validates the partitioning expression that will be used to partition a dataset.
+     * 
+     * @param partitioningExprs
+     *            a list of partitioning expressions that will be validated
+     * @throws AlgebricksException
+     *             (if the validation failed), IOException
+     */
+    public void validatePartitioningExpressions(List<String> partitioningExprs) throws AlgebricksException, IOException {
+        for (String fieldName : partitioningExprs) {
+            IAType fieldType = getFieldType(fieldName);
+            if (fieldType == null) {
+                throw new AlgebricksException("A field with this name  \"" + fieldName + "\" could not be found.");
+            } else if (fieldType.getTypeTag() == ATypeTag.RECORD) {
+                throw new AlgebricksException("The partitioning key \"" + fieldName + "\" cannot be of type "
+                        + ATypeTag.RECORD + ".");
+            }
+        }
+    }
+
+    public boolean doesFieldExist(String fieldName) {
+        for (String f : fieldNames) {
+            if (f.compareTo(fieldName) == 0) {
+                return true;
+            }
+        }
+        return false;
+    }
+
     @Override
     public String getDisplayName() {
         return "ARecord";
@@ -274,6 +303,7 @@
         }
         return h;
     }
+
     @Override
     public JSONObject toJSON() throws JSONException {
         JSONObject type = new JSONObject();
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DayTimeDurationComparatorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DayTimeDurationComparatorDescriptor.java
new file mode 100644
index 0000000..bb83016
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DayTimeDurationComparatorDescriptor.java
@@ -0,0 +1,149 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ABoolean;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.EnumDeserializer;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class DayTimeDurationComparatorDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier GREATER_THAN_FID = AsterixBuiltinFunctions.DAY_TIME_DURATION_GREATER_THAN;
+    public final static FunctionIdentifier LESS_THAN_FID = AsterixBuiltinFunctions.DAY_TIME_DURATION_LESS_THAN;
+
+    // allowed input types
+    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+    private final static byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
+
+    private final boolean isGreaterThan;
+
+    private DayTimeDurationComparatorDescriptor(boolean isGreaterThan) {
+        this.isGreaterThan = isGreaterThan;
+    }
+
+    public final static IFunctionDescriptorFactory GREATER_THAN_FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new DayTimeDurationComparatorDescriptor(true);
+        }
+    };
+
+    public final static IFunctionDescriptorFactory LESS_THAN_FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new DayTimeDurationComparatorDescriptor(false);
+        }
+    };
+
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+                    private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+                    private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+                    private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+                    private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+
+                    // possible output types
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ABoolean> boolSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ABOOLEAN);
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        argOut0.reset();
+                        eval0.evaluate(tuple);
+                        argOut1.reset();
+                        eval1.evaluate(tuple);
+
+                        try {
+                            if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+                                    || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                                return;
+                            }
+
+                            if (argOut0.getByteArray()[0] != SER_DURATION_TYPE_TAG
+                                    || argOut1.getByteArray()[0] != SER_DURATION_TYPE_TAG) {
+                                throw new AlgebricksException(getIdentifier().getName()
+                                        + ": expects type NULL/DURATION, NULL/DURATION but got "
+                                        + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0])
+                                        + " and "
+                                        + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut1.getByteArray()[0]));
+                            }
+
+                            if ((ADurationSerializerDeserializer.getYearMonth(argOut0.getByteArray(), 1) != 0)
+                                    || (ADurationSerializerDeserializer.getYearMonth(argOut1.getByteArray(), 1) != 0)) {
+                                throw new AlgebricksException(getIdentifier().getName()
+                                        + ": only year-month durations are allowed.");
+                            }
+
+                            if (ADurationSerializerDeserializer.getDayTime(argOut0.getByteArray(), 1) > ADurationSerializerDeserializer
+                                    .getDayTime(argOut1.getByteArray(), 1)) {
+                                boolSerde.serialize(isGreaterThan ? ABoolean.TRUE : ABoolean.FALSE, out);
+                            } else {
+                                boolSerde.serialize(isGreaterThan ? ABoolean.FALSE : ABoolean.TRUE, out);
+                            }
+
+                        } catch (HyracksDataException hex) {
+                            throw new AlgebricksException(hex);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.AbstractFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return isGreaterThan ? GREATER_THAN_FID : LESS_THAN_FID;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DurationEqualDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DurationEqualDescriptor.java
new file mode 100644
index 0000000..fceb144
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DurationEqualDescriptor.java
@@ -0,0 +1,130 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ABoolean;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.EnumDeserializer;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class DurationEqualDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = AsterixBuiltinFunctions.DURATION_EQUAL;
+
+    // allowed input types
+    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+    private final static byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new DurationEqualDescriptor();
+        }
+    };
+
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+                    private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+                    private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+                    private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+                    private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+
+                    // possible output types
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ABoolean> boolSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ABOOLEAN);
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        argOut0.reset();
+                        eval0.evaluate(tuple);
+                        argOut1.reset();
+                        eval1.evaluate(tuple);
+
+                        try {
+                            if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+                                    || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                                return;
+                            }
+
+                            if (argOut0.getByteArray()[0] != SER_DURATION_TYPE_TAG
+                                    || argOut1.getByteArray()[0] != SER_DURATION_TYPE_TAG) {
+                                throw new AlgebricksException(FID.getName()
+                                        + ": expects type NULL/DURATION, NULL/DURATION but got "
+                                        + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0])
+                                        + " and "
+                                        + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut1.getByteArray()[0]));
+                            }
+
+                            if ((ADurationSerializerDeserializer.getDayTime(argOut0.getByteArray(), 1) == ADurationSerializerDeserializer
+                                    .getDayTime(argOut1.getByteArray(), 1))
+                                    && (ADurationSerializerDeserializer.getYearMonth(argOut0.getByteArray(), 1) == ADurationSerializerDeserializer
+                                            .getYearMonth(argOut1.getByteArray(), 1))) {
+                                boolSerde.serialize(ABoolean.TRUE, out);
+                            } else {
+                                boolSerde.serialize(ABoolean.FALSE, out);
+                            }
+
+                        } catch (HyracksDataException hex) {
+                            throw new AlgebricksException(hex);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.AbstractFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DurationFromMillisecondsDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DurationFromMillisecondsDescriptor.java
new file mode 100644
index 0000000..267f4a7
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DurationFromMillisecondsDescriptor.java
@@ -0,0 +1,133 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt16SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt8SerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ADuration;
+import edu.uci.ics.asterix.om.base.AMutableDuration;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.EnumDeserializer;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class DurationFromMillisecondsDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = AsterixBuiltinFunctions.DURATION_FROM_MILLISECONDS;
+
+    // allowed input types
+    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+    private final static byte SER_INT8_TYPE_TAG = ATypeTag.INT8.serialize();
+    private final static byte SER_INT16_TYPE_TAG = ATypeTag.INT16.serialize();
+    private final static byte SER_INT32_TYPE_TAG = ATypeTag.INT32.serialize();
+    private final static byte SER_INT64_TYPE_TAG = ATypeTag.INT64.serialize();
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new DurationFromMillisecondsDescriptor();
+        }
+    };
+
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+                    private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+                    private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+
+                    // possible output types
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ADuration> durationSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ADURATION);
+
+                    AMutableDuration aDuration = new AMutableDuration(0, 0);
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        argOut0.reset();
+                        eval0.evaluate(tuple);
+
+                        try {
+                            if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                                return;
+                            }
+
+                            if (argOut0.getByteArray()[0] == SER_INT8_TYPE_TAG) {
+                                aDuration.setValue(0, AInt8SerializerDeserializer.getByte(argOut0.getByteArray(), 1));
+                            } else if (argOut0.getByteArray()[0] == SER_INT16_TYPE_TAG) {
+                                aDuration.setValue(0, AInt16SerializerDeserializer.getShort(argOut0.getByteArray(), 1));
+                            } else if (argOut0.getByteArray()[0] == SER_INT32_TYPE_TAG) {
+                                aDuration.setValue(0, AInt32SerializerDeserializer.getInt(argOut0.getByteArray(), 1));
+                            } else if (argOut0.getByteArray()[0] == SER_INT64_TYPE_TAG) {
+                                aDuration.setValue(0, AInt64SerializerDeserializer.getLong(argOut0.getByteArray(), 1));
+                            } else {
+                                throw new AlgebricksException(FID.getName()
+                                        + ": expects NULL/INT8/INT16/INT32/INT64, but got "
+                                        + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0]));
+                            }
+
+                            durationSerde.serialize(aDuration, out);
+
+                        } catch (HyracksDataException hex) {
+                            throw new AlgebricksException(hex);
+                        }
+                    }
+                };
+            }
+
+        };
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.AbstractFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DurationFromMonthsDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DurationFromMonthsDescriptor.java
new file mode 100644
index 0000000..f8b7a3e
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/DurationFromMonthsDescriptor.java
@@ -0,0 +1,129 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt16SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt8SerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ADuration;
+import edu.uci.ics.asterix.om.base.AMutableDuration;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.EnumDeserializer;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class DurationFromMonthsDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = AsterixBuiltinFunctions.DURATION_FROM_MONTHS;
+
+    // allowed input types
+    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+    private final static byte SER_INT8_TYPE_TAG = ATypeTag.INT8.serialize();
+    private final static byte SER_INT16_TYPE_TAG = ATypeTag.INT16.serialize();
+    private final static byte SER_INT32_TYPE_TAG = ATypeTag.INT32.serialize();
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new DurationFromMonthsDescriptor();
+        }
+    };
+
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+                    private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+                    private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+
+                    // possible output types
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ADuration> durationSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ADURATION);
+
+                    AMutableDuration aDuration = new AMutableDuration(0, 0);
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        argOut0.reset();
+                        eval0.evaluate(tuple);
+
+                        try {
+                            if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                                return;
+                            }
+
+                            if (argOut0.getByteArray()[0] == SER_INT8_TYPE_TAG) {
+                                aDuration.setValue(AInt8SerializerDeserializer.getByte(argOut0.getByteArray(), 1), 0);
+                            } else if (argOut0.getByteArray()[0] == SER_INT16_TYPE_TAG) {
+                                aDuration.setValue(AInt16SerializerDeserializer.getShort(argOut0.getByteArray(), 1), 0);
+                            } else if (argOut0.getByteArray()[0] == SER_INT32_TYPE_TAG) {
+                                aDuration.setValue(AInt32SerializerDeserializer.getInt(argOut0.getByteArray(), 1), 0);
+                            } else {
+                                throw new AlgebricksException(FID.getName()
+                                        + ": expects NULL/INT8/INT16/INT32, but got "
+                                        + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0]));
+                            }
+
+                            durationSerde.serialize(aDuration, out);
+
+                        } catch (HyracksDataException hex) {
+                            throw new AlgebricksException(hex);
+                        }
+                    }
+                };
+            }
+
+        };
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.AbstractFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/MillisecondsOfDayTimeDurationDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/MillisecondsOfDayTimeDurationDescriptor.java
new file mode 100644
index 0000000..97fa94a
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/MillisecondsOfDayTimeDurationDescriptor.java
@@ -0,0 +1,120 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AInt64;
+import edu.uci.ics.asterix.om.base.AMutableInt64;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.EnumDeserializer;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class MillisecondsOfDayTimeDurationDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = AsterixBuiltinFunctions.MILLISECONDS_OF_DAY_TIME_DURATION;
+
+    // allowed input types
+    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+    private final static byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new MillisecondsOfDayTimeDurationDescriptor();
+        }
+    };
+
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+                    private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+                    private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+
+                    // possible output types
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<AInt64> int64Serde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.AINT64);
+
+                    AMutableInt64 aInt64 = new AMutableInt64(0);
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        argOut0.reset();
+                        eval0.evaluate(tuple);
+
+                        try {
+                            if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                                return;
+                            }
+
+                            if (argOut0.getByteArray()[0] != SER_DURATION_TYPE_TAG) {
+                                throw new AlgebricksException(FID.getName() + ": expects NULL/DURATION, but got "
+                                        + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0]));
+                            }
+
+                            aInt64.setValue(ADurationSerializerDeserializer.getDayTime(argOut0.getByteArray(), 1));
+
+                            int64Serde.serialize(aInt64, out);
+
+                        } catch (HyracksDataException hex) {
+                            throw new AlgebricksException(hex);
+                        }
+                    }
+                };
+            }
+
+        };
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.AbstractFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/MonthsOfYearMonthDurationDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/MonthsOfYearMonthDurationDescriptor.java
new file mode 100644
index 0000000..55173a5
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/MonthsOfYearMonthDurationDescriptor.java
@@ -0,0 +1,120 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.AMutableInt32;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.EnumDeserializer;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class MonthsOfYearMonthDurationDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier FID = AsterixBuiltinFunctions.MONTHS_OF_YEAR_MONTH_DURATION;
+
+    // allowed input types
+    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+    private final static byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
+
+    public final static IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new MonthsOfYearMonthDurationDescriptor();
+        }
+    };
+
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+                    private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+                    private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+
+                    // possible output types
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<AInt32> int32Serde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.AINT32);
+
+                    AMutableInt32 aInt32 = new AMutableInt32(0);
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        argOut0.reset();
+                        eval0.evaluate(tuple);
+
+                        try {
+                            if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                                return;
+                            }
+
+                            if (argOut0.getByteArray()[0] != SER_DURATION_TYPE_TAG) {
+                                throw new AlgebricksException(FID.getName() + ": expects NULL/DURATION, but got "
+                                        + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0]));
+                            }
+
+                            aInt32.setValue(ADurationSerializerDeserializer.getYearMonth(argOut0.getByteArray(), 1));
+
+                            int32Serde.serialize(aInt32, out);
+
+                        } catch (HyracksDataException hex) {
+                            throw new AlgebricksException(hex);
+                        }
+                    }
+                };
+            }
+
+        };
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.AbstractFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return FID;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/YearMonthDurationComparatorDecriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/YearMonthDurationComparatorDecriptor.java
new file mode 100644
index 0000000..22ab96c
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/temporal/YearMonthDurationComparatorDecriptor.java
@@ -0,0 +1,149 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.functions.temporal;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ABoolean;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.EnumDeserializer;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class YearMonthDurationComparatorDecriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+    private final static long serialVersionUID = 1L;
+    public final static FunctionIdentifier GREATER_THAN_FID = AsterixBuiltinFunctions.YEAR_MONTH_DURATION_GREATER_THAN;
+    public final static FunctionIdentifier LESS_THAN_FID = AsterixBuiltinFunctions.YEAR_MONTH_DURATION_LESS_THAN;
+
+    // allowed input types
+    private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+    private final static byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
+    
+    private final boolean isGreaterThan;
+
+    private YearMonthDurationComparatorDecriptor(boolean isGreaterThan) {
+        this.isGreaterThan = isGreaterThan;
+    }
+
+    public final static IFunctionDescriptorFactory GREATER_THAN_FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new YearMonthDurationComparatorDecriptor(true);
+        }
+    };
+
+    public final static IFunctionDescriptorFactory LESS_THAN_FACTORY = new IFunctionDescriptorFactory() {
+
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new YearMonthDurationComparatorDecriptor(false);
+        }
+    };
+
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+        return new ICopyEvaluatorFactory() {
+
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+                return new ICopyEvaluator() {
+
+                    private DataOutput out = output.getDataOutput();
+                    private ArrayBackedValueStorage argOut0 = new ArrayBackedValueStorage();
+                    private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
+                    private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
+                    private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
+
+                    // possible output types
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ANULL);
+                    @SuppressWarnings("unchecked")
+                    private ISerializerDeserializer<ABoolean> boolSerde = AqlSerializerDeserializerProvider.INSTANCE
+                            .getSerializerDeserializer(BuiltinType.ABOOLEAN);
+
+                    @Override
+                    public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+                        argOut0.reset();
+                        eval0.evaluate(tuple);
+                        argOut1.reset();
+                        eval1.evaluate(tuple);
+
+                        try {
+                            if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
+                                    || argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
+                                nullSerde.serialize(ANull.NULL, out);
+                                return;
+                            }
+
+                            if (argOut0.getByteArray()[0] != SER_DURATION_TYPE_TAG
+                                    || argOut1.getByteArray()[0] != SER_DURATION_TYPE_TAG) {
+                                throw new AlgebricksException(getIdentifier().getName()
+                                        + ": expects type NULL/DURATION, NULL/DURATION but got "
+                                        + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0])
+                                        + " and "
+                                        + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut1.getByteArray()[0]));
+                            }
+
+                            if ((ADurationSerializerDeserializer.getDayTime(argOut0.getByteArray(), 1) != 0)
+                                    || (ADurationSerializerDeserializer.getDayTime(argOut1.getByteArray(), 1) != 0)) {
+                                throw new AlgebricksException(getIdentifier().getName()
+                                        + ": only year-month durations are allowed.");
+                            }
+
+                            if (ADurationSerializerDeserializer.getYearMonth(argOut0.getByteArray(), 1) > ADurationSerializerDeserializer
+                                    .getYearMonth(argOut1.getByteArray(), 1)) {
+                                boolSerde.serialize(isGreaterThan ? ABoolean.TRUE : ABoolean.FALSE, out);
+                            } else {
+                                boolSerde.serialize(isGreaterThan ? ABoolean.FALSE : ABoolean.TRUE, out);
+                            }
+
+                        } catch (HyracksDataException hex) {
+                            throw new AlgebricksException(hex);
+                        }
+                    }
+                };
+            }
+        };
+    }
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.asterix.om.functions.AbstractFunctionDescriptor#getIdentifier()
+     */
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return isGreaterThan ? GREATER_THAN_FID : LESS_THAN_FID;
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/formats/NonTaggedDataFormat.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/formats/NonTaggedDataFormat.java
index a61208d..84142be 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/formats/NonTaggedDataFormat.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/formats/NonTaggedDataFormat.java
@@ -203,6 +203,10 @@
 import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.DateFromUnixTimeInDaysDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.DatetimeFromDateAndTimeDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.DatetimeFromUnixTimeInMsDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.DayTimeDurationComparatorDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.DurationEqualDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.DurationFromMillisecondsDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.DurationFromMonthsDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalAfterDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalBeforeDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalCoveredByDescriptor;
@@ -211,6 +215,8 @@
 import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalEndsDecriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalMeetsDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalMetByDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.MillisecondsOfDayTimeDurationDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.MonthsOfYearMonthDurationDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.OverlapDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalOverlappedByDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.IntervalOverlapsDescriptor;
@@ -221,6 +227,7 @@
 import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.SubtractTimeDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.TimeFromDatetimeDescriptor;
 import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.TimeFromUnixTimeInMsDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.temporal.YearMonthDurationComparatorDecriptor;
 import edu.uci.ics.asterix.runtime.operators.file.AdmSchemafullRecordParserFactory;
 import edu.uci.ics.asterix.runtime.operators.file.NtDelimitedDataTupleParserFactory;
 import edu.uci.ics.asterix.runtime.runningaggregates.std.TidRunningAggregateDescriptor;
@@ -502,6 +509,15 @@
         temp.add(CurrentDateDescriptor.FACTORY);
         temp.add(CurrentTimeDescriptor.FACTORY);
         temp.add(CurrentDateTimeDescriptor.FACTORY);
+        temp.add(DurationFromMillisecondsDescriptor.FACTORY);
+        temp.add(DurationFromMonthsDescriptor.FACTORY);
+        temp.add(YearMonthDurationComparatorDecriptor.GREATER_THAN_FACTORY);
+        temp.add(YearMonthDurationComparatorDecriptor.LESS_THAN_FACTORY);
+        temp.add(DayTimeDurationComparatorDescriptor.GREATER_THAN_FACTORY);
+        temp.add(DayTimeDurationComparatorDescriptor.LESS_THAN_FACTORY);
+        temp.add(MonthsOfYearMonthDurationDescriptor.FACTORY);
+        temp.add(MillisecondsOfDayTimeDurationDescriptor.FACTORY);
+        temp.add(DurationEqualDescriptor.FACTORY);
 
         // Interval constructor
         temp.add(AIntervalFromDateConstructorDescriptor.FACTORY);
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexInstantSearchOperationCallback.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexInstantSearchOperationCallback.java
index 718ea3f..a78f2ff 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexInstantSearchOperationCallback.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexInstantSearchOperationCallback.java
@@ -48,7 +48,7 @@
     public void reconcile(ITupleReference tuple) throws HyracksDataException {
         int pkHash = computePrimaryKeyHashValue(tuple, primaryKeyFields);
         try {
-            lockManager.instantLock(datasetId, pkHash, LockMode.S, txnCtx);
+            lockManager.lock(datasetId, pkHash, LockMode.S, txnCtx);
         } catch (ACIDException e) {
             throw new HyracksDataException(e);
         }
@@ -58,4 +58,14 @@
     public void cancel(ITupleReference tuple) throws HyracksDataException {
         //no op
     }
+
+    @Override
+    public void complete(ITupleReference tuple) throws HyracksDataException {
+        int pkHash = computePrimaryKeyHashValue(tuple, primaryKeyFields);
+        try {
+            lockManager.unlock(datasetId, pkHash, txnCtx);
+        } catch (ACIDException e) {
+            throw new HyracksDataException(e);
+        }
+    }
 }
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexSearchOperationCallback.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexSearchOperationCallback.java
index 4760307..62ec3c9 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexSearchOperationCallback.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexSearchOperationCallback.java
@@ -28,8 +28,8 @@
  */
 public class PrimaryIndexSearchOperationCallback extends AbstractOperationCallback implements ISearchOperationCallback {
 
-    public PrimaryIndexSearchOperationCallback(int datasetId, int[] entityIdFields,
-            ILockManager lockManager, TransactionContext txnCtx) {
+    public PrimaryIndexSearchOperationCallback(int datasetId, int[] entityIdFields, ILockManager lockManager,
+            TransactionContext txnCtx) {
         super(datasetId, entityIdFields, txnCtx, lockManager);
     }
 
@@ -62,4 +62,9 @@
             throw new HyracksDataException(e);
         }
     }
+
+    @Override
+    public void complete(ITupleReference tuple) throws HyracksDataException {
+        //no op
+    }
 }
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/SecondaryIndexModificationOperationCallback.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/SecondaryIndexModificationOperationCallback.java
index 092f99c..5b55e9a 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/SecondaryIndexModificationOperationCallback.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/SecondaryIndexModificationOperationCallback.java
@@ -18,12 +18,14 @@
 import edu.uci.ics.asterix.transaction.management.exception.ACIDException;
 import edu.uci.ics.asterix.transaction.management.service.locking.ILockManager;
 import edu.uci.ics.asterix.transaction.management.service.logging.IndexLogger;
+import edu.uci.ics.asterix.transaction.management.service.transaction.IResourceManager.ResourceType;
 import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionContext;
 import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionSubsystem;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
 import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
+import edu.uci.ics.hyracks.storage.am.lsm.btree.tuples.LSMBTreeTupleReference;
 
 /**
  * Secondary-index modifications do not require any locking.
@@ -40,8 +42,8 @@
     protected final TransactionSubsystem txnSubsystem;
 
     public SecondaryIndexModificationOperationCallback(int datasetId, int[] primaryKeyFields,
-            TransactionContext txnCtx, ILockManager lockManager,
-            TransactionSubsystem txnSubsystem, long resourceId, byte resourceType, IndexOperation indexOp) {
+            TransactionContext txnCtx, ILockManager lockManager, TransactionSubsystem txnSubsystem, long resourceId,
+            byte resourceType, IndexOperation indexOp) {
         super(datasetId, primaryKeyFields, txnCtx, lockManager);
         this.resourceId = resourceId;
         this.resourceType = resourceType;
@@ -60,8 +62,21 @@
         IndexLogger logger = txnSubsystem.getTreeLoggerRepository().getIndexLogger(resourceId, resourceType);
         int pkHash = computePrimaryKeyHashValue(after, primaryKeyFields);
         try {
+            IndexOperation effectiveOldOp;
+            if (resourceType == ResourceType.LSM_BTREE) {
+                LSMBTreeTupleReference lsmBTreeTuple = (LSMBTreeTupleReference) before;
+                if (before == null) {
+                    effectiveOldOp = IndexOperation.NOOP;
+                } else if (lsmBTreeTuple != null && lsmBTreeTuple.isAntimatter()) {
+                    effectiveOldOp = IndexOperation.DELETE;
+                } else {
+                    effectiveOldOp = IndexOperation.INSERT;
+                }
+            } else {
+                effectiveOldOp = oldOp;
+            }
             logger.generateLogRecord(txnSubsystem, txnCtx, datasetId.getId(), pkHash, resourceId, indexOp, after,
-                    oldOp, before);
+                    effectiveOldOp, before);
         } catch (ACIDException e) {
             throw new HyracksDataException(e);
         }
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/SecondaryIndexSearchOperationCallback.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/SecondaryIndexSearchOperationCallback.java
index c53b651..4c8a583 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/SecondaryIndexSearchOperationCallback.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/SecondaryIndexSearchOperationCallback.java
@@ -44,4 +44,9 @@
         // Do nothing.
     }
 
+    @Override
+    public void complete(ITupleReference tuple) throws HyracksDataException {
+        // Do nothing.
+    }
+
 }
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/resource/PersistentLocalResourceRepository.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
index 838dc6d..0a12ba1 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
@@ -87,7 +87,8 @@
         //#. load all local resources. 
         File rootDirFile = new File(this.rootDir);
         if (!rootDirFile.exists()) {
-            throw new HyracksDataException(rootDirFile.getAbsolutePath() + "doesn't exist.");
+            //rootDir may not exist if this node is not the metadata node and doesn't have any user data.
+            return;
         }
 
         FilenameFilter filter = new FilenameFilter() {
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/EntityInfoManager.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/EntityInfoManager.java
index b8820c4..5d81e8a 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/EntityInfoManager.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/EntityInfoManager.java
@@ -164,6 +164,7 @@
                 allocChild = pArray.size() - 1;
             }
         }
+
         occupiedSlots++;
         return pArray.get(allocChild).allocate() + allocChild * ChildEntityInfoArrayManager.NUM_OF_SLOTS;
     }
@@ -230,45 +231,35 @@
      */
     private void shrink() {
         int i;
-        boolean bContiguous = true;
-        int decreaseCount = 0;
+        int removeCount = 0;
         int size = pArray.size();
         int maxDecreaseCount = size / 2;
         ChildEntityInfoArrayManager child;
-        for (i = size - 1; i >= 0; i--) {
-            child = pArray.get(i);
-            if (child.isEmpty() || child.isDeinitialized()) {
-                if (bContiguous) {
-                    pArray.remove(i);
-                    if (++decreaseCount == maxDecreaseCount) {
-                        break;
-                    }
-                } else {
-                    bContiguous = false;
-                    if (child.isEmpty()) {
-                        child.deinitialize();
-                        if (++decreaseCount == maxDecreaseCount) {
-                            break;
-                        }
-                    }
-                }
-            } else {
-                bContiguous = false;
+
+        //The first buffer never be deinitialized.
+        for (i = 1; i < size; i++) {
+            if (pArray.get(i).isEmpty()) {
+                pArray.get(i).deinitialize();
             }
         }
 
-        //reset allocChild when the child is removed or deinitialized.
-        size = pArray.size();
-        if (allocChild >= size || pArray.get(allocChild).isDeinitialized()) {
-            //set allocChild to any initialized one.
-            //It is guaranteed that there is at least one initialized child.
-            for (i = 0; i < size; i++) {
-                if (!pArray.get(i).isDeinitialized()) {
-                    allocChild = i;
+        //remove the empty buffers from the end
+        for (i = size - 1; i >= 1; i--) {
+            child = pArray.get(i);
+            if (child.isDeinitialized()) {
+                pArray.remove(i);
+                if (++removeCount == maxDecreaseCount) {
                     break;
                 }
+            } else {
+                break;
             }
         }
+        
+        //reset allocChild to the first buffer
+        allocChild = 0;
+
+        isShrinkTimerOn = false;
     }
 
     public String prettyPrint() {
@@ -538,7 +529,7 @@
         freeSlotNum = getNextFreeSlot(currentSlot);
         occupiedSlots++;
         if (LockManager.IS_DEBUG_MODE) {
-            System.out.println(Thread.currentThread().getName()+" entity allocate: "+currentSlot);
+            System.out.println(Thread.currentThread().getName() + " entity allocate: " + currentSlot);
         }
         return currentSlot;
     }
@@ -548,7 +539,7 @@
         freeSlotNum = slotNum;
         occupiedSlots--;
         if (LockManager.IS_DEBUG_MODE) {
-            System.out.println(Thread.currentThread().getName()+" entity deallocate: "+slotNum);
+            System.out.println(Thread.currentThread().getName() + " entity deallocate: " + slotNum);
         }
     }
 
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/EntityLockInfoManager.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/EntityLockInfoManager.java
index 59c20f2..ca00aa2 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/EntityLockInfoManager.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/EntityLockInfoManager.java
@@ -230,45 +230,35 @@
      */
     private void shrink() {
         int i;
-        boolean bContiguous = true;
-        int decreaseCount = 0;
+        int removeCount = 0;
         int size = pArray.size();
         int maxDecreaseCount = size / 2;
         ChildEntityLockInfoArrayManager child;
-        for (i = size - 1; i >= 0; i--) {
-            child = pArray.get(i);
-            if (child.isEmpty() || child.isDeinitialized()) {
-                if (bContiguous) {
-                    pArray.remove(i);
-                    if (++decreaseCount == maxDecreaseCount) {
-                        break;
-                    }
-                } else {
-                    bContiguous = false;
-                    if (child.isEmpty()) {
-                        child.deinitialize();
-                        if (++decreaseCount == maxDecreaseCount) {
-                            break;
-                        }
-                    }
-                }
-            } else {
-                bContiguous = false;
+
+        //The first buffer never be deinitialized.
+        for (i = 1; i < size; i++) {
+            if (pArray.get(i).isEmpty()) {
+                pArray.get(i).deinitialize();
             }
         }
 
-        //reset allocChild when the child is removed or deinitialized.
-        size = pArray.size();
-        if (allocChild >= size || pArray.get(allocChild).isDeinitialized()) {
-            //set allocChild to any initialized one.
-            //It is guaranteed that there is at least one initialized child.
-            for (i = 0; i < size; i++) {
-                if (!pArray.get(i).isDeinitialized()) {
-                    allocChild = i;
+        //remove the empty buffers from the end
+        for (i = size - 1; i >= 1; i--) {
+            child = pArray.get(i);
+            if (child.isDeinitialized()) {
+                pArray.remove(i);
+                if (++removeCount == maxDecreaseCount) {
                     break;
                 }
+            } else {
+                break;
             }
         }
+        
+        //reset allocChild to the first buffer
+        allocChild = 0;
+
+        isShrinkTimerOn = false;
     }
 
     public String prettyPrint() {
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManager.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManager.java
index b1a28ca..a354d2a 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManager.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManager.java
@@ -44,6 +44,11 @@
 public class LockManager implements ILockManager {
 
     public static final boolean IS_DEBUG_MODE = false;//true
+    //This variable indicates that the dataset granule X lock request is allowed when 
+    //there are concurrent lock requests. As of 4/16/2013, we only allow the dataset granule X lock 
+    //during DDL operation which is preceded by holding X latch on metadata.
+    //Therefore, we don't allow the concurrent lock requests with the dataset granule X lock. 
+    public static final boolean ALLOW_DATASET_GRANULE_X_LOCK_WITH_OTHER_CONCURRENT_LOCK_REQUESTS = false;
 
     public static final boolean ALLOW_ESCALATE_FROM_ENTITY_TO_DATASET = true;
     //Threshold must be greater than 1 and should be reasonably large enough not to escalate too soon.
@@ -119,73 +124,111 @@
         JobInfo jobInfo;
         byte datasetLockMode = entityHashValue == -1 ? lockMode : lockMode == LockMode.S ? LockMode.IS : LockMode.IX;
         boolean doEscalate = false;
+        boolean caughtLockMgrLatchException = false;
 
         latchLockTable();
-        validateJob(txnContext);
+        try {
+            validateJob(txnContext);
 
-        if (IS_DEBUG_MODE) {
-            trackLockRequest("Requested", RequestType.LOCK, datasetId, entityHashValue, lockMode, txnContext,
-                    dLockInfo, eLockInfo);
-        }
-
-        dLockInfo = datasetResourceHT.get(datasetId);
-        jobInfo = jobHT.get(jobId);
-
-        if (ALLOW_ESCALATE_FROM_ENTITY_TO_DATASET) {
-            if (!isInstant && datasetLockMode == LockMode.IS && jobInfo != null && dLockInfo != null) {
-                int escalateStatus = needEscalateFromEntityToDataset(jobInfo, dId, lockMode);
-                switch (escalateStatus) {
-                    case DO_ESCALATE:
-                        entityHashValue = -1;
-                        doEscalate = true;
-                        break;
-
-                    case ESCALATED:
-                        unlatchLockTable();
-                        return;
-
-                    default:
-                        break;
-                }
-            }
-        }
-
-        //#. if the datasetLockInfo doesn't exist in datasetResourceHT 
-        if (dLockInfo == null || dLockInfo.isNoHolder()) {
-            if (dLockInfo == null) {
-                dLockInfo = new DatasetLockInfo(entityLockInfoManager, entityInfoManager, lockWaiterManager);
-                datasetResourceHT.put(new DatasetId(dId), dLockInfo); //datsetId obj should be created
-            }
-            entityInfo = entityInfoManager.allocate(jId, dId, entityHashValue, lockMode);
-
-            //if dataset-granule lock
-            if (entityHashValue == -1) { //-1 stands for dataset-granule
-                entityInfoManager.increaseDatasetLockCount(entityInfo);
-                dLockInfo.increaseLockCount(datasetLockMode);
-                dLockInfo.addHolder(entityInfo);
-            } else {
-                entityInfoManager.increaseDatasetLockCount(entityInfo);
-                dLockInfo.increaseLockCount(datasetLockMode);
-                //add entityLockInfo
-                eLockInfo = entityLockInfoManager.allocate();
-                dLockInfo.getEntityResourceHT().put(entityHashValue, eLockInfo);
-                entityInfoManager.increaseEntityLockCount(entityInfo);
-                entityLockInfoManager.increaseLockCount(eLockInfo, lockMode);
-                entityLockInfoManager.addHolder(eLockInfo, entityInfo);
+            if (IS_DEBUG_MODE) {
+                trackLockRequest("Requested", RequestType.LOCK, datasetId, entityHashValue, lockMode, txnContext,
+                        dLockInfo, eLockInfo);
             }
 
-            if (jobInfo == null) {
-                jobInfo = new JobInfo(entityInfoManager, lockWaiterManager, txnContext);
-                jobHT.put(jobId, jobInfo); //jobId obj doesn't have to be created
-            }
-            jobInfo.addHoldingResource(entityInfo);
+            dLockInfo = datasetResourceHT.get(datasetId);
+            jobInfo = jobHT.get(jobId);
 
             if (ALLOW_ESCALATE_FROM_ENTITY_TO_DATASET) {
-                if (!isInstant && datasetLockMode == LockMode.IS) {
-                    jobInfo.increaseDatasetISLockCount(dId);
+                if (!isInstant && datasetLockMode == LockMode.IS && jobInfo != null && dLockInfo != null) {
+                    int escalateStatus = needEscalateFromEntityToDataset(jobInfo, dId, lockMode);
+                    switch (escalateStatus) {
+                        case DO_ESCALATE:
+                            entityHashValue = -1;
+                            doEscalate = true;
+                            break;
+
+                        case ESCALATED:
+                            return;
+
+                        default:
+                            break;
+                    }
+                }
+            }
+
+            //#. if the datasetLockInfo doesn't exist in datasetResourceHT 
+            if (dLockInfo == null || dLockInfo.isNoHolder()) {
+                if (dLockInfo == null) {
+                    dLockInfo = new DatasetLockInfo(entityLockInfoManager, entityInfoManager, lockWaiterManager);
+                    datasetResourceHT.put(new DatasetId(dId), dLockInfo); //datsetId obj should be created
+                }
+                entityInfo = entityInfoManager.allocate(jId, dId, entityHashValue, lockMode);
+
+                //if dataset-granule lock
+                if (entityHashValue == -1) { //-1 stands for dataset-granule
+                    entityInfoManager.increaseDatasetLockCount(entityInfo);
+                    dLockInfo.increaseLockCount(datasetLockMode);
+                    dLockInfo.addHolder(entityInfo);
+                } else {
+                    entityInfoManager.increaseDatasetLockCount(entityInfo);
+                    dLockInfo.increaseLockCount(datasetLockMode);
+                    //add entityLockInfo
+                    eLockInfo = entityLockInfoManager.allocate();
+                    dLockInfo.getEntityResourceHT().put(entityHashValue, eLockInfo);
+                    entityInfoManager.increaseEntityLockCount(entityInfo);
+                    entityLockInfoManager.increaseLockCount(eLockInfo, lockMode);
+                    entityLockInfoManager.addHolder(eLockInfo, entityInfo);
+                }
+
+                if (jobInfo == null) {
+                    jobInfo = new JobInfo(entityInfoManager, lockWaiterManager, txnContext);
+                    jobHT.put(jobId, jobInfo); //jobId obj doesn't have to be created
+                }
+                jobInfo.addHoldingResource(entityInfo);
+
+                if (ALLOW_ESCALATE_FROM_ENTITY_TO_DATASET) {
+                    if (!isInstant && datasetLockMode == LockMode.IS) {
+                        jobInfo.increaseDatasetISLockCount(dId);
+                        if (doEscalate) {
+                            throw new IllegalStateException(
+                                    "ESCALATE_TRHESHOLD_ENTITY_TO_DATASET should not be set to "
+                                            + ESCALATE_TRHESHOLD_ENTITY_TO_DATASET);
+                        }
+                    }
+                }
+
+                if (IS_DEBUG_MODE) {
+                    trackLockRequest("Granted", RequestType.LOCK, datasetId, entityHashValue, lockMode, txnContext,
+                            dLockInfo, eLockInfo);
+                }
+
+                return;
+            }
+
+            //#. the datasetLockInfo exists in datasetResourceHT.
+            //1. handle dataset-granule lock
+            entityInfo = lockDatasetGranule(datasetId, entityHashValue, lockMode, txnContext);
+
+            //2. handle entity-granule lock
+            if (entityHashValue != -1) {
+                lockEntityGranule(datasetId, entityHashValue, lockMode, entityInfo, txnContext);
+            }
+
+            if (ALLOW_ESCALATE_FROM_ENTITY_TO_DATASET) {
+                if (!isInstant) {
                     if (doEscalate) {
-                        throw new IllegalStateException("ESCALATE_TRHESHOLD_ENTITY_TO_DATASET should not be set to "
-                                + ESCALATE_TRHESHOLD_ENTITY_TO_DATASET);
+                        //jobInfo must not be null.
+                        assert jobInfo != null;
+                        jobInfo.increaseDatasetISLockCount(dId);
+                        //release pre-acquired locks
+                        releaseDatasetISLocks(jobInfo, jobId, datasetId, txnContext);
+                    } else if (datasetLockMode == LockMode.IS) {
+                        if (jobInfo == null) {
+                            jobInfo = jobHT.get(jobId);
+                            //jobInfo must not be null;
+                            assert jobInfo != null;
+                        }
+                        jobInfo.increaseDatasetISLockCount(dId);
                     }
                 }
             }
@@ -194,44 +237,18 @@
                 trackLockRequest("Granted", RequestType.LOCK, datasetId, entityHashValue, lockMode, txnContext,
                         dLockInfo, eLockInfo);
             }
-
-            unlatchLockTable();
-            return;
-        }
-
-        //#. the datasetLockInfo exists in datasetResourceHT.
-        //1. handle dataset-granule lock
-        entityInfo = lockDatasetGranule(datasetId, entityHashValue, lockMode, txnContext);
-
-        //2. handle entity-granule lock
-        if (entityHashValue != -1) {
-            lockEntityGranule(datasetId, entityHashValue, lockMode, entityInfo, txnContext);
-        }
-
-        if (ALLOW_ESCALATE_FROM_ENTITY_TO_DATASET) {
-            if (!isInstant) {
-                if (doEscalate) {
-                    //jobInfo must not be null.
-                    assert jobInfo != null;
-                    jobInfo.increaseDatasetISLockCount(dId);
-                    //release pre-acquired locks
-                    releaseDatasetISLocks(jobInfo, jobId, datasetId, txnContext);
-                } else if (datasetLockMode == LockMode.IS) {
-                    if (jobInfo == null) {
-                        jobInfo = jobHT.get(jobId);
-                        //jobInfo must not be null;
-                        assert jobInfo != null;
-                    }
-                    jobInfo.increaseDatasetISLockCount(dId);
-                }
+        } catch (Exception e) {
+            if (e instanceof LockMgrLatchHandlerException) {
+                // don't unlatch
+                caughtLockMgrLatchException = true;
+                throw new ACIDException(((LockMgrLatchHandlerException) e).getInternalException());
+            }
+        } finally {
+            if (!caughtLockMgrLatchException) {
+                unlatchLockTable();
             }
         }
 
-        if (IS_DEBUG_MODE) {
-            trackLockRequest("Granted", RequestType.LOCK, datasetId, entityHashValue, lockMode, txnContext, dLockInfo,
-                    eLockInfo);
-        }
-        unlatchLockTable();
         return;
     }
 
@@ -280,14 +297,9 @@
 
     private void validateJob(TransactionContext txnContext) throws ACIDException {
         if (txnContext.getTxnState() == TransactionState.ABORTED) {
-            unlatchLockTable();
             throw new ACIDException("" + txnContext.getJobId() + " is in ABORTED state.");
         } else if (txnContext.getStatus() == TransactionContext.TIMED_OUT_STATUS) {
-            try {
-                requestAbort(txnContext);
-            } finally {
-                unlatchLockTable();
-            }
+            requestAbort(txnContext);
         }
     }
 
@@ -399,32 +411,26 @@
                 ///////////////////////////////////////////////////////////////////////////////////////////////
 
                 /////////////////////////////////////////////////////////////////////////////////////////////
-                //[Notice]
-                //There has been no same caller as (jId, dId, entityHashValue) triplet.
-                //But there could be the same caller as (jId, dId) pair.
-                //For example, two requests (J1, D1, E1) and (J1, D1, E2) are considered as duplicated call in dataset-granule perspective.
-                //Therefore, the above duplicated call case is covered in the following code.
-                //find the same dataset-granule lock request, that is, (J1, D1) pair in the above example.
-                //if (jobInfo.isDatasetLockGranted(dId, datasetLockMode)) {
-                if (jobInfo.isDatasetLockGranted(dId, LockMode.IS)) {
-                    if (dLockInfo.isCompatible(datasetLockMode)) {
-                        //this is duplicated call
-                        entityInfoManager.increaseDatasetLockCount(entityInfo);
-                        if (entityHashValue == -1) {
-                            dLockInfo.increaseLockCount(datasetLockMode);
-                            dLockInfo.addHolder(entityInfo);
-                        } else {
-                            dLockInfo.increaseLockCount(datasetLockMode);
-                            //IS and IX holders are implicitly handled.
-                        }
-                        //add entityInfo to JobInfo's holding-resource list
-                        jobInfo.addHoldingResource(entityInfo);
+                if (ALLOW_DATASET_GRANULE_X_LOCK_WITH_OTHER_CONCURRENT_LOCK_REQUESTS) {
+                    //The following case only may occur when the dataset level X lock is requested 
+                    //with the other lock
 
-                        return entityInfo;
-                    } else {
-                        //considered as upgrader
-                        waiterCount = handleLockWaiter(dLockInfo, -1, entityInfo, true, true, txnContext, jobInfo, -1);
-                        if (waiterCount > 0) {
+                    //[Notice]
+                    //There has been no same caller as (jId, dId, entityHashValue) triplet.
+                    //But there could be the same caller in terms of (jId, dId) pair.
+                    //For example, 
+                    //1) (J1, D1, E1) acquires IS in Dataset D1
+                    //2) (J2, D1, -1) requests X  in Dataset D1, but waits
+                    //3) (J1, D1, E2) requests IS in Dataset D1, but should wait 
+                    //The 3) may cause deadlock if 1) and 3) are under the same thread.
+                    //Even if (J1, D1, E1) and (J1, D1, E2) are two different thread, instead of
+                    //aborting (J1, D1, E1) triggered by the deadlock, we give higher priority to 3) than 2)
+                    //as long as the dataset level lock D1 is being held by the same jobId. 
+                    //The above consideration is covered in the following code.
+                    //find the same dataset-granule lock request, that is, (J1, D1) pair in the above example.
+                    if (jobInfo.isDatasetLockGranted(dId, LockMode.IS)) {
+                        if (dLockInfo.isCompatible(datasetLockMode)) {
+                            //this is duplicated call
                             entityInfoManager.increaseDatasetLockCount(entityInfo);
                             if (entityHashValue == -1) {
                                 dLockInfo.increaseLockCount(datasetLockMode);
@@ -435,8 +441,26 @@
                             }
                             //add entityInfo to JobInfo's holding-resource list
                             jobInfo.addHoldingResource(entityInfo);
+
+                            return entityInfo;
+                        } else {
+                            //considered as upgrader
+                            waiterCount = handleLockWaiter(dLockInfo, -1, entityInfo, true, true, txnContext, jobInfo,
+                                    -1);
+                            if (waiterCount > 0) {
+                                entityInfoManager.increaseDatasetLockCount(entityInfo);
+                                if (entityHashValue == -1) {
+                                    dLockInfo.increaseLockCount(datasetLockMode);
+                                    dLockInfo.addHolder(entityInfo);
+                                } else {
+                                    dLockInfo.increaseLockCount(datasetLockMode);
+                                    //IS and IX holders are implicitly handled.
+                                }
+                                //add entityInfo to JobInfo's holding-resource list
+                                jobInfo.addHoldingResource(entityInfo);
+                            }
+                            return entityInfo;
                         }
-                        return entityInfo;
                     }
                 }
                 /////////////////////////////////////////////////////////////////////////////////////////////
@@ -617,8 +641,9 @@
             throws ACIDException {
         internalUnlock(datasetId, entityHashValue, txnContext, false, commitFlag);
     }
-    
-    private void instantUnlock(DatasetId datasetId, int entityHashValue, TransactionContext txnContext) throws ACIDException {
+
+    private void instantUnlock(DatasetId datasetId, int entityHashValue, TransactionContext txnContext)
+            throws ACIDException {
         internalUnlock(datasetId, entityHashValue, txnContext, true, false);
     }
 
@@ -639,127 +664,124 @@
         }
 
         latchLockTable();
-        validateJob(txnContext);
+        try {
+            validateJob(txnContext);
 
-        if (IS_DEBUG_MODE) {
-            trackLockRequest("Requested", RequestType.UNLOCK, datasetId, entityHashValue, (byte) 0, txnContext,
-                    dLockInfo, eLockInfo);
-        }
+            if (IS_DEBUG_MODE) {
+                trackLockRequest("Requested", RequestType.UNLOCK, datasetId, entityHashValue, (byte) 0, txnContext,
+                        dLockInfo, eLockInfo);
+            }
 
-        //find the resource to be unlocked
-        dLockInfo = datasetResourceHT.get(datasetId);
-        jobInfo = jobHT.get(jobId);
-        if (dLockInfo == null || jobInfo == null) {
-            unlatchLockTable();
-            throw new IllegalStateException("Invalid unlock request: Corresponding lock info doesn't exist.");
-        }
+            //find the resource to be unlocked
+            dLockInfo = datasetResourceHT.get(datasetId);
+            jobInfo = jobHT.get(jobId);
+            if (dLockInfo == null || jobInfo == null) {
+                throw new IllegalStateException("Invalid unlock request: Corresponding lock info doesn't exist.");
+            }
 
-        eLockInfo = dLockInfo.getEntityResourceHT().get(entityHashValue);
+            eLockInfo = dLockInfo.getEntityResourceHT().get(entityHashValue);
 
-        if (eLockInfo == -1) {
-            unlatchLockTable();
-            throw new IllegalStateException("Invalid unlock request: Corresponding lock info doesn't exist.");
-        }
+            if (eLockInfo == -1) {
+                throw new IllegalStateException("Invalid unlock request: Corresponding lock info doesn't exist.");
+            }
 
-        //find the corresponding entityInfo
-        entityInfo = entityLockInfoManager.findEntityInfoFromHolderList(eLockInfo, jobId.getId(), entityHashValue);
-        if (entityInfo == -1) {
-            unlatchLockTable();
-            throw new IllegalStateException("Invalid unlock request[" + jobId.getId() + "," + datasetId.getId() + ","
-                    + entityHashValue + "]: Corresponding lock info doesn't exist.");
-        }
+            //find the corresponding entityInfo
+            entityInfo = entityLockInfoManager.findEntityInfoFromHolderList(eLockInfo, jobId.getId(), entityHashValue);
+            if (entityInfo == -1) {
+                throw new IllegalStateException("Invalid unlock request[" + jobId.getId() + "," + datasetId.getId()
+                        + "," + entityHashValue + "]: Corresponding lock info doesn't exist.");
+            }
 
-        datasetLockMode = entityInfoManager.getDatasetLockMode(entityInfo) == LockMode.S ? LockMode.IS : LockMode.IX;
+            datasetLockMode = entityInfoManager.getDatasetLockMode(entityInfo) == LockMode.S ? LockMode.IS
+                    : LockMode.IX;
 
-        //decrease the corresponding count of dLockInfo/eLockInfo/entityInfo
-        dLockInfo.decreaseLockCount(datasetLockMode);
-        entityLockInfoManager.decreaseLockCount(eLockInfo, entityInfoManager.getEntityLockMode(entityInfo));
-        entityInfoManager.decreaseDatasetLockCount(entityInfo);
-        entityInfoManager.decreaseEntityLockCount(entityInfo);
+            //decrease the corresponding count of dLockInfo/eLockInfo/entityInfo
+            dLockInfo.decreaseLockCount(datasetLockMode);
+            entityLockInfoManager.decreaseLockCount(eLockInfo, entityInfoManager.getEntityLockMode(entityInfo));
+            entityInfoManager.decreaseDatasetLockCount(entityInfo);
+            entityInfoManager.decreaseEntityLockCount(entityInfo);
 
-        if (entityInfoManager.getEntityLockCount(entityInfo) == 0
-                && entityInfoManager.getDatasetLockCount(entityInfo) == 0) {
-            int threadCount = 0; //number of threads(in the same job) waiting on the same resource 
-            int waiterObjId = jobInfo.getFirstWaitingResource();
-            int waitingEntityInfo;
-            LockWaiter waiterObj;
+            if (entityInfoManager.getEntityLockCount(entityInfo) == 0
+                    && entityInfoManager.getDatasetLockCount(entityInfo) == 0) {
+                int threadCount = 0; //number of threads(in the same job) waiting on the same resource 
+                int waiterObjId = jobInfo.getFirstWaitingResource();
+                int waitingEntityInfo;
+                LockWaiter waiterObj;
 
-            //TODO
-            //This code should be taken care properly when there is a way to avoid doubling memory space for txnIds.
-            //This commit log is written here in order to avoid increasing the memory space for managing transactionIds
-            if (commitFlag) {
-                if (txnContext.getTransactionType().equals(TransactionContext.TransactionType.READ_WRITE)) {
-                    try {
-                        txnSubsystem.getLogManager().log(LogType.ENTITY_COMMIT, txnContext, datasetId.getId(),
-                                entityHashValue, -1, (byte) 0, 0, null, null, logicalLogLocator);
-                    } catch (ACIDException e) {
+                //TODO
+                //This code should be taken care properly when there is a way to avoid doubling memory space for txnIds.
+                //This commit log is written here in order to avoid increasing the memory space for managing transactionIds
+                if (commitFlag) {
+                    if (txnContext.getTransactionType().equals(TransactionContext.TransactionType.READ_WRITE)) {
                         try {
+                            txnSubsystem.getLogManager().log(LogType.ENTITY_COMMIT, txnContext, datasetId.getId(),
+                                    entityHashValue, -1, (byte) 0, 0, null, null, logicalLogLocator);
+                        } catch (ACIDException e) {
                             requestAbort(txnContext);
-                        } finally {
-                            unlatchLockTable();
                         }
                     }
+
+                    txnContext.updateLastLSNForIndexes(logicalLogLocator.getLsn());
                 }
 
-                txnContext.updateLastLSNForIndexes(logicalLogLocator.getLsn());
-            }
+                //1) wake up waiters and remove holder
+                //wake up waiters of dataset-granule lock
+                wakeUpDatasetLockWaiters(dLockInfo);
+                //wake up waiters of entity-granule lock
+                wakeUpEntityLockWaiters(eLockInfo);
+                //remove the holder from eLockInfo's holder list and remove the holding resource from jobInfo's holding resource list
+                //this can be done in the following single function call.
+                entityLockInfoManager.removeHolder(eLockInfo, entityInfo, jobInfo);
 
-            //1) wake up waiters and remove holder
-            //wake up waiters of dataset-granule lock
-            wakeUpDatasetLockWaiters(dLockInfo);
-            //wake up waiters of entity-granule lock
-            wakeUpEntityLockWaiters(eLockInfo);
-            //remove the holder from eLockInfo's holder list and remove the holding resource from jobInfo's holding resource list
-            //this can be done in the following single function call.
-            entityLockInfoManager.removeHolder(eLockInfo, entityInfo, jobInfo);
-
-            //2) if 
-            //      there is no waiting thread on the same resource (this can be checked through jobInfo)
-            //   then 
-            //      a) delete the corresponding entityInfo
-            //      b) write commit log for the unlocked resource(which is a committed txn).
-            while (waiterObjId != -1) {
-                waiterObj = lockWaiterManager.getLockWaiter(waiterObjId);
-                waitingEntityInfo = waiterObj.getEntityInfoSlot();
-                if (entityInfoManager.getDatasetId(waitingEntityInfo) == datasetId.getId()
-                        && entityInfoManager.getPKHashVal(waitingEntityInfo) == entityHashValue) {
-                    threadCount++;
-                    break;
+                //2) if 
+                //      there is no waiting thread on the same resource (this can be checked through jobInfo)
+                //   then 
+                //      a) delete the corresponding entityInfo
+                //      b) write commit log for the unlocked resource(which is a committed txn).
+                while (waiterObjId != -1) {
+                    waiterObj = lockWaiterManager.getLockWaiter(waiterObjId);
+                    waitingEntityInfo = waiterObj.getEntityInfoSlot();
+                    if (entityInfoManager.getDatasetId(waitingEntityInfo) == datasetId.getId()
+                            && entityInfoManager.getPKHashVal(waitingEntityInfo) == entityHashValue) {
+                        threadCount++;
+                        break;
+                    }
+                    waiterObjId = waiterObj.getNextWaiterObjId();
                 }
-                waiterObjId = waiterObj.getNextWaiterObjId();
-            }
-            if (threadCount == 0) {
-                if (entityInfoManager.getEntityLockMode(entityInfo) == LockMode.X) {
-                    //TODO
-                    //write a commit log for the unlocked resource
-                    //need to figure out that instantLock() also needs to write a commit log. 
+                if (threadCount == 0) {
+                    if (entityInfoManager.getEntityLockMode(entityInfo) == LockMode.X) {
+                        //TODO
+                        //write a commit log for the unlocked resource
+                        //need to figure out that instantLock() also needs to write a commit log. 
+                    }
+                    entityInfoManager.deallocate(entityInfo);
                 }
-                entityInfoManager.deallocate(entityInfo);
             }
-        }
 
-        //deallocate entityLockInfo's slot if there is no txn referring to the entityLockInfo.
-        if (entityLockInfoManager.getFirstWaiter(eLockInfo) == -1
-                && entityLockInfoManager.getLastHolder(eLockInfo) == -1
-                && entityLockInfoManager.getUpgrader(eLockInfo) == -1) {
-            dLockInfo.getEntityResourceHT().remove(entityHashValue);
-            entityLockInfoManager.deallocate(eLockInfo);
-        }
-
-        //we don't deallocate datasetLockInfo even if there is no txn referring to the datasetLockInfo
-        //since the datasetLockInfo is likely to be referred to again.
-
-        if (ALLOW_ESCALATE_FROM_ENTITY_TO_DATASET) {
-            if (!isInstant && datasetLockMode == LockMode.IS) {
-                jobInfo.decreaseDatasetISLockCount(datasetId.getId());
+            //deallocate entityLockInfo's slot if there is no txn referring to the entityLockInfo.
+            if (entityLockInfoManager.getFirstWaiter(eLockInfo) == -1
+                    && entityLockInfoManager.getLastHolder(eLockInfo) == -1
+                    && entityLockInfoManager.getUpgrader(eLockInfo) == -1) {
+                dLockInfo.getEntityResourceHT().remove(entityHashValue);
+                entityLockInfoManager.deallocate(eLockInfo);
             }
-        }
 
-        if (IS_DEBUG_MODE) {
-            trackLockRequest("Granted", RequestType.UNLOCK, datasetId, entityHashValue, (byte) 0, txnContext,
-                    dLockInfo, eLockInfo);
+            //we don't deallocate datasetLockInfo even if there is no txn referring to the datasetLockInfo
+            //since the datasetLockInfo is likely to be referred to again.
+
+            if (ALLOW_ESCALATE_FROM_ENTITY_TO_DATASET) {
+                if (!isInstant && datasetLockMode == LockMode.IS) {
+                    jobInfo.decreaseDatasetISLockCount(datasetId.getId());
+                }
+            }
+
+            if (IS_DEBUG_MODE) {
+                trackLockRequest("Granted", RequestType.UNLOCK, datasetId, entityHashValue, (byte) 0, txnContext,
+                        dLockInfo, eLockInfo);
+            }
+        } finally {
+            unlatchLockTable();
         }
-        unlatchLockTable();
     }
 
     @Override
@@ -779,183 +801,178 @@
         JobId jobId = txnContext.getJobId();
 
         latchLockTable();
-
-        if (IS_DEBUG_MODE) {
-            trackLockRequest("Requested", RequestType.RELEASE_LOCKS, new DatasetId(0), 0, (byte) 0, txnContext,
-                    dLockInfo, eLockInfo);
-        }
-
-        JobInfo jobInfo = jobHT.get(jobId);
-        if (jobInfo == null) {
-            unlatchLockTable();
-            return;
-        }
-
-        //remove waiterObj of JobInfo 
-        //[Notice]
-        //waiterObjs may exist if aborted thread is the caller of this function.
-        //Even if there are the waiterObjs, there is no waiting thread on the objects. 
-        //If the caller of this function is an aborted thread, it is guaranteed that there is no waiting threads
-        //on the waiterObjs since when the aborted caller thread is waken up, all other waiting threads are
-        //also waken up at the same time through 'notifyAll()' call.
-        //In contrast, if the caller of this function is not an aborted thread, then there is no waiting object.
-        int waiterObjId = jobInfo.getFirstWaitingResource();
-        int nextWaiterObjId;
-        while (waiterObjId != -1) {
-            existWaiter = true;
-            waiterObj = lockWaiterManager.getLockWaiter(waiterObjId);
-            nextWaiterObjId = waiterObj.getNextWaitingResourceObjId();
-            entityInfo = waiterObj.getEntityInfoSlot();
+        try {
             if (IS_DEBUG_MODE) {
-                if (jobId.getId() != entityInfoManager.getJobId(entityInfo)) {
-                    throw new IllegalStateException("JobInfo(" + jobId + ") has diffrent Job(JID:"
-                            + entityInfoManager.getJobId(entityInfo) + "'s lock request!!!");
-                }
+                trackLockRequest("Requested", RequestType.RELEASE_LOCKS, new DatasetId(0), 0, (byte) 0, txnContext,
+                        dLockInfo, eLockInfo);
             }
 
-            //1. remove from waiter(or upgrader)'s list of dLockInfo or eLockInfo.
-            did = entityInfoManager.getDatasetId(entityInfo);
-            tempDatasetIdObj.setId(did);
-            dLockInfo = datasetResourceHT.get(tempDatasetIdObj);
-
-            if (waiterObj.isWaitingOnEntityLock()) {
-                entityHashValue = entityInfoManager.getPKHashVal(entityInfo);
-                eLockInfo = dLockInfo.getEntityResourceHT().get(entityHashValue);
-                if (waiterObj.isWaiter()) {
-                    entityLockInfoManager.removeWaiter(eLockInfo, waiterObjId);
-                } else {
-                    entityLockInfoManager.removeUpgrader(eLockInfo, waiterObjId);
-                }
-            } else {
-                if (waiterObj.isWaiter()) {
-                    dLockInfo.removeWaiter(waiterObjId);
-                } else {
-                    dLockInfo.removeUpgrader(waiterObjId);
-                }
+            JobInfo jobInfo = jobHT.get(jobId);
+            if (jobInfo == null) {
+                return;
             }
 
-            //2. wake-up waiters
-            latchWaitNotify();
-            synchronized (waiterObj) {
-                unlatchWaitNotify();
-                waiterObj.setWait(false);
+            //remove waiterObj of JobInfo 
+            //[Notice]
+            //waiterObjs may exist if aborted thread is the caller of this function.
+            //Even if there are the waiterObjs, there is no waiting thread on the objects. 
+            //If the caller of this function is an aborted thread, it is guaranteed that there is no waiting threads
+            //on the waiterObjs since when the aborted caller thread is waken up, all other waiting threads are
+            //also waken up at the same time through 'notifyAll()' call.
+            //In contrast, if the caller of this function is not an aborted thread, then there is no waiting object.
+            int waiterObjId = jobInfo.getFirstWaitingResource();
+            int nextWaiterObjId;
+            while (waiterObjId != -1) {
+                existWaiter = true;
+                waiterObj = lockWaiterManager.getLockWaiter(waiterObjId);
+                nextWaiterObjId = waiterObj.getNextWaitingResourceObjId();
+                entityInfo = waiterObj.getEntityInfoSlot();
                 if (IS_DEBUG_MODE) {
-                    System.out.println("" + Thread.currentThread().getName() + "\twake-up(D): WID(" + waiterObjId
-                            + "),EID(" + waiterObj.getEntityInfoSlot() + ")");
-                }
-                waiterObj.notifyAll();
-            }
-
-            //3. deallocate waiterObj
-            lockWaiterManager.deallocate(waiterObjId);
-
-            //4. deallocate entityInfo only if this waiter is not an upgrader
-            if (entityInfoManager.getDatasetLockCount(entityInfo) == 0
-                    && entityInfoManager.getEntityLockCount(entityInfo) == 0) {
-                entityInfoManager.deallocate(entityInfo);
-            }
-            waiterObjId = nextWaiterObjId;
-        }
-
-        //release holding resources
-        entityInfo = jobInfo.getLastHoldingResource();
-        while (entityInfo != -1) {
-            prevEntityInfo = entityInfoManager.getPrevJobResource(entityInfo);
-
-            //decrease lock count of datasetLock and entityLock
-            did = entityInfoManager.getDatasetId(entityInfo);
-            tempDatasetIdObj.setId(did);
-            dLockInfo = datasetResourceHT.get(tempDatasetIdObj);
-            entityHashValue = entityInfoManager.getPKHashVal(entityInfo);
-
-            if (entityHashValue == -1) {
-                //decrease datasetLockCount
-                lockMode = entityInfoManager.getDatasetLockMode(entityInfo);
-                datasetLockCount = entityInfoManager.getDatasetLockCount(entityInfo);
-                if (datasetLockCount != 0) {
-                    dLockInfo.decreaseLockCount(lockMode, datasetLockCount);
-
-                    //wakeup waiters of datasetLock and remove holder from datasetLockInfo
-                    wakeUpDatasetLockWaiters(dLockInfo);
-
-                    //remove the holder from datasetLockInfo only if the lock is dataset-granule lock.
-                    //--> this also removes the holding resource from jobInfo               
-                    //(Because the IX and IS lock's holders are handled implicitly, 
-                    //those are not in the holder list of datasetLockInfo.)
-                    dLockInfo.removeHolder(entityInfo, jobInfo);
-                }
-            } else {
-                //decrease datasetLockCount
-                lockMode = entityInfoManager.getDatasetLockMode(entityInfo);
-                lockMode = lockMode == LockMode.S ? LockMode.IS : LockMode.IX;
-                datasetLockCount = entityInfoManager.getDatasetLockCount(entityInfo);
-
-                if (datasetLockCount != 0) {
-                    dLockInfo.decreaseLockCount(lockMode, datasetLockCount);
-                }
-
-                //decrease entityLockCount
-                lockMode = entityInfoManager.getEntityLockMode(entityInfo);
-                entityLockCount = entityInfoManager.getEntityLockCount(entityInfo);
-                eLockInfo = dLockInfo.getEntityResourceHT().get(entityHashValue);
-                if (IS_DEBUG_MODE) {
-                    if (eLockInfo < 0) {
-                        System.out.println("eLockInfo:" + eLockInfo);
+                    if (jobId.getId() != entityInfoManager.getJobId(entityInfo)) {
+                        throw new IllegalStateException("JobInfo(" + jobId + ") has diffrent Job(JID:"
+                                + entityInfoManager.getJobId(entityInfo) + "'s lock request!!!");
                     }
                 }
 
-                if (entityLockCount != 0) {
-                    entityLockInfoManager.decreaseLockCount(eLockInfo, lockMode, (short) entityLockCount);
+                //1. remove from waiter(or upgrader)'s list of dLockInfo or eLockInfo.
+                did = entityInfoManager.getDatasetId(entityInfo);
+                tempDatasetIdObj.setId(did);
+                dLockInfo = datasetResourceHT.get(tempDatasetIdObj);
+
+                if (waiterObj.isWaitingOnEntityLock()) {
+                    entityHashValue = entityInfoManager.getPKHashVal(entityInfo);
+                    eLockInfo = dLockInfo.getEntityResourceHT().get(entityHashValue);
+                    if (waiterObj.isWaiter()) {
+                        entityLockInfoManager.removeWaiter(eLockInfo, waiterObjId);
+                    } else {
+                        entityLockInfoManager.removeUpgrader(eLockInfo, waiterObjId);
+                    }
+                } else {
+                    if (waiterObj.isWaiter()) {
+                        dLockInfo.removeWaiter(waiterObjId);
+                    } else {
+                        dLockInfo.removeUpgrader(waiterObjId);
+                    }
                 }
 
-                if (datasetLockCount != 0) {
-                    //wakeup waiters of datasetLock and don't remove holder from datasetLockInfo
-                    wakeUpDatasetLockWaiters(dLockInfo);
+                //2. wake-up waiters
+                latchWaitNotify();
+                synchronized (waiterObj) {
+                    unlatchWaitNotify();
+                    waiterObj.setWait(false);
+                    if (IS_DEBUG_MODE) {
+                        System.out.println("" + Thread.currentThread().getName() + "\twake-up(D): WID(" + waiterObjId
+                                + "),EID(" + waiterObj.getEntityInfoSlot() + ")");
+                    }
+                    waiterObj.notifyAll();
                 }
 
-                if (entityLockCount != 0) {
-                    //wakeup waiters of entityLock
-                    wakeUpEntityLockWaiters(eLockInfo);
+                //3. deallocate waiterObj
+                lockWaiterManager.deallocate(waiterObjId);
 
-                    //remove the holder from entityLockInfo 
-                    //--> this also removes the holding resource from jobInfo
-                    entityLockInfoManager.removeHolder(eLockInfo, entityInfo, jobInfo);
+                //4. deallocate entityInfo only if this waiter is not an upgrader
+                if (entityInfoManager.getDatasetLockCount(entityInfo) == 0
+                        && entityInfoManager.getEntityLockCount(entityInfo) == 0) {
+                    entityInfoManager.deallocate(entityInfo);
                 }
-
-                //deallocate entityLockInfo if there is no holder and waiter.
-                if (entityLockInfoManager.getLastHolder(eLockInfo) == -1
-                        && entityLockInfoManager.getFirstWaiter(eLockInfo) == -1
-                        && entityLockInfoManager.getUpgrader(eLockInfo) == -1) {
-                    dLockInfo.getEntityResourceHT().remove(entityHashValue);
-                    entityLockInfoManager.deallocate(eLockInfo);
-                    //                    if (IS_DEBUG_MODE) {
-                    //                        System.out.println("removed PK["+entityHashValue+"]");
-                    //                    }
-                }
+                waiterObjId = nextWaiterObjId;
             }
 
-            //deallocate entityInfo
-            entityInfoManager.deallocate(entityInfo);
-            //            if (IS_DEBUG_MODE) {
-            //                System.out.println("dellocate EntityInfo["+entityInfo+"]");
-            //            }
+            //release holding resources
+            entityInfo = jobInfo.getLastHoldingResource();
+            while (entityInfo != -1) {
+                prevEntityInfo = entityInfoManager.getPrevJobResource(entityInfo);
 
-            entityInfo = prevEntityInfo;
+                //decrease lock count of datasetLock and entityLock
+                did = entityInfoManager.getDatasetId(entityInfo);
+                tempDatasetIdObj.setId(did);
+                dLockInfo = datasetResourceHT.get(tempDatasetIdObj);
+                entityHashValue = entityInfoManager.getPKHashVal(entityInfo);
+
+                if (entityHashValue == -1) {
+                    //decrease datasetLockCount
+                    lockMode = entityInfoManager.getDatasetLockMode(entityInfo);
+                    datasetLockCount = entityInfoManager.getDatasetLockCount(entityInfo);
+                    if (datasetLockCount != 0) {
+                        dLockInfo.decreaseLockCount(lockMode, datasetLockCount);
+
+                        //wakeup waiters of datasetLock and remove holder from datasetLockInfo
+                        wakeUpDatasetLockWaiters(dLockInfo);
+
+                        //remove the holder from datasetLockInfo only if the lock is dataset-granule lock.
+                        //--> this also removes the holding resource from jobInfo               
+                        //(Because the IX and IS lock's holders are handled implicitly, 
+                        //those are not in the holder list of datasetLockInfo.)
+                        dLockInfo.removeHolder(entityInfo, jobInfo);
+                    }
+                } else {
+                    //decrease datasetLockCount
+                    lockMode = entityInfoManager.getDatasetLockMode(entityInfo);
+                    lockMode = lockMode == LockMode.S ? LockMode.IS : LockMode.IX;
+                    datasetLockCount = entityInfoManager.getDatasetLockCount(entityInfo);
+
+                    if (datasetLockCount != 0) {
+                        dLockInfo.decreaseLockCount(lockMode, datasetLockCount);
+                    }
+
+                    //decrease entityLockCount
+                    lockMode = entityInfoManager.getEntityLockMode(entityInfo);
+                    entityLockCount = entityInfoManager.getEntityLockCount(entityInfo);
+                    eLockInfo = dLockInfo.getEntityResourceHT().get(entityHashValue);
+                    if (IS_DEBUG_MODE) {
+                        if (eLockInfo < 0) {
+                            System.out.println("eLockInfo:" + eLockInfo);
+                        }
+                    }
+
+                    if (entityLockCount != 0) {
+                        entityLockInfoManager.decreaseLockCount(eLockInfo, lockMode, (short) entityLockCount);
+                    }
+
+                    if (datasetLockCount != 0) {
+                        //wakeup waiters of datasetLock and don't remove holder from datasetLockInfo
+                        wakeUpDatasetLockWaiters(dLockInfo);
+                    }
+
+                    if (entityLockCount != 0) {
+                        //wakeup waiters of entityLock
+                        wakeUpEntityLockWaiters(eLockInfo);
+
+                        //remove the holder from entityLockInfo 
+                        //--> this also removes the holding resource from jobInfo
+                        entityLockInfoManager.removeHolder(eLockInfo, entityInfo, jobInfo);
+                    }
+
+                    //deallocate entityLockInfo if there is no holder and waiter.
+                    if (entityLockInfoManager.getLastHolder(eLockInfo) == -1
+                            && entityLockInfoManager.getFirstWaiter(eLockInfo) == -1
+                            && entityLockInfoManager.getUpgrader(eLockInfo) == -1) {
+                        dLockInfo.getEntityResourceHT().remove(entityHashValue);
+                        entityLockInfoManager.deallocate(eLockInfo);
+                    }
+                }
+
+                //deallocate entityInfo
+                entityInfoManager.deallocate(entityInfo);
+
+                entityInfo = prevEntityInfo;
+            }
+
+            //remove JobInfo
+            jobHT.remove(jobId);
+
+            if (existWaiter) {
+                txnContext.setStatus(TransactionContext.TIMED_OUT_STATUS);
+                txnContext.setTxnState(TransactionState.ABORTED);
+            }
+
+            if (IS_DEBUG_MODE) {
+                trackLockRequest("Granted", RequestType.RELEASE_LOCKS, new DatasetId(0), 0, (byte) 0, txnContext,
+                        dLockInfo, eLockInfo);
+            }
+        } finally {
+            unlatchLockTable();
         }
-
-        //remove JobInfo
-        jobHT.remove(jobId);
-
-        if (existWaiter) {
-            txnContext.setStatus(TransactionContext.TIMED_OUT_STATUS);
-            txnContext.setTxnState(TransactionState.ABORTED);
-        }
-
-        if (IS_DEBUG_MODE) {
-            trackLockRequest("Granted", RequestType.RELEASE_LOCKS, new DatasetId(0), 0, (byte) 0, txnContext,
-                    dLockInfo, eLockInfo);
-        }
-        unlatchLockTable();
     }
 
     @Override
@@ -981,20 +998,210 @@
     @Override
     public boolean instantTryLock(DatasetId datasetId, int entityHashValue, byte lockMode, TransactionContext txnContext)
             throws ACIDException {
-        boolean isGranted = false;
-        //        try {
-        //            isGranted = internalTryLock(datasetId, entityHashValue, lockMode, txnContext);
-        //            return isGranted;
-        //        } finally {
-        //            if (isGranted) {
-        //                unlock(datasetId, entityHashValue, txnContext);
-        //            }
-        //        }
-        isGranted = internalTryLock(datasetId, entityHashValue, lockMode, txnContext, true);
-        if (isGranted) {
-            instantUnlock(datasetId, entityHashValue, txnContext);
+        return internalInstantTryLock(datasetId, entityHashValue, lockMode, txnContext);
+    }
+
+    private boolean internalInstantTryLock(DatasetId datasetId, int entityHashValue, byte lockMode,
+            TransactionContext txnContext) throws ACIDException {
+        DatasetLockInfo dLockInfo = null;
+        boolean isSuccess = true;
+
+        latchLockTable();
+        try {
+            validateJob(txnContext);
+
+            if (IS_DEBUG_MODE) {
+                trackLockRequest("Requested", RequestType.INSTANT_TRY_LOCK, datasetId, entityHashValue, lockMode,
+                        txnContext, dLockInfo, -1);
+            }
+
+            dLockInfo = datasetResourceHT.get(datasetId);
+
+            //#. if the datasetLockInfo doesn't exist in datasetResourceHT 
+            if (dLockInfo == null || dLockInfo.isNoHolder()) {
+                if (IS_DEBUG_MODE) {
+                    trackLockRequest("Granted", RequestType.INSTANT_TRY_LOCK, datasetId, entityHashValue, lockMode,
+                            txnContext, dLockInfo, -1);
+                }
+                return true;
+            }
+
+            //#. the datasetLockInfo exists in datasetResourceHT.
+            //1. handle dataset-granule lock
+            byte datasetLockMode = entityHashValue == -1 ? lockMode : lockMode == LockMode.S ? LockMode.IS
+                    : LockMode.IX;
+            if (datasetLockMode == LockMode.IS) {
+                //[Notice]
+                //Skip checking the dataset level lock compatibility if the requested LockMode is IS lock.
+                //We know that this internalInstantTryLock() call with IS lock mode will be always granted 
+                //because we don't allow X lock on dataset-level except DDL operation. 
+                //During DDL operation, all other operations will be pending, so there is no conflict. 
+                isSuccess = true;
+            } else {
+                isSuccess = instantTryLockDatasetGranule(datasetId, entityHashValue, lockMode, txnContext, dLockInfo,
+                        datasetLockMode);
+            }
+
+            if (isSuccess && entityHashValue != -1) {
+                //2. handle entity-granule lock
+                isSuccess = instantTryLockEntityGranule(datasetId, entityHashValue, lockMode, txnContext, dLockInfo);
+            }
+
+            if (IS_DEBUG_MODE) {
+                if (isSuccess) {
+                    trackLockRequest("Granted", RequestType.INSTANT_TRY_LOCK, datasetId, entityHashValue, lockMode,
+                            txnContext, dLockInfo, -1);
+                } else {
+                    trackLockRequest("Failed", RequestType.INSTANT_TRY_LOCK, datasetId, entityHashValue, lockMode,
+                            txnContext, dLockInfo, -1);
+                }
+            }
+
+        } finally {
+            unlatchLockTable();
         }
-        return isGranted;
+
+        return isSuccess;
+    }
+
+    private boolean instantTryLockDatasetGranule(DatasetId datasetId, int entityHashValue, byte lockMode,
+            TransactionContext txnContext, DatasetLockInfo dLockInfo, byte datasetLockMode) throws ACIDException {
+        JobId jobId = txnContext.getJobId();
+        int jId = jobId.getId(); //int-type jobId
+        int dId = datasetId.getId(); //int-type datasetId
+        int waiterObjId;
+        int entityInfo = -1;
+        JobInfo jobInfo;
+        boolean isUpgrade = false;
+
+        jobInfo = jobHT.get(jobId);
+
+        //check duplicated call
+
+        //1. lock request causing duplicated upgrading requests from different threads in a same job
+        waiterObjId = dLockInfo.findUpgraderFromUpgraderList(jId, entityHashValue);
+        if (waiterObjId != -1) {
+            return false;
+        }
+
+        //2. lock request causing duplicated waiting requests from different threads in a same job
+        waiterObjId = dLockInfo.findWaiterFromWaiterList(jId, entityHashValue);
+        if (waiterObjId != -1) {
+            return false;
+        }
+
+        //3. lock request causing duplicated holding requests from different threads or a single thread in a same job
+        entityInfo = dLockInfo.findEntityInfoFromHolderList(jId, entityHashValue);
+        if (entityInfo == -1) { //new call from this job -> doesn't mean that eLockInfo doesn't exist since another thread might have create the eLockInfo already.
+
+            //return fail if any upgrader exists or upgrading lock mode is not compatible
+            if (dLockInfo.getFirstUpgrader() != -1 || dLockInfo.getFirstWaiter() != -1
+                    || !dLockInfo.isCompatible(datasetLockMode)) {
+
+                if (ALLOW_DATASET_GRANULE_X_LOCK_WITH_OTHER_CONCURRENT_LOCK_REQUESTS) {
+                    //The following case only may occur when the dataset level X lock is requested 
+                    //with the other lock
+
+                    //[Notice]
+                    //There has been no same caller as (jId, dId, entityHashValue) triplet.
+                    //But there could be the same caller in terms of (jId, dId) pair.
+                    //For example, 
+                    //1) (J1, D1, E1) acquires IS in Dataset D1
+                    //2) (J2, D1, -1) requests X  in Dataset D1, but waits
+                    //3) (J1, D1, E2) requests IS in Dataset D1, but should wait 
+                    //The 3) may cause deadlock if 1) and 3) are under the same thread.
+                    //Even if (J1, D1, E1) and (J1, D1, E2) are two different thread, instead of
+                    //aborting (J1, D1, E1) triggered by the deadlock, we give higher priority to 3) than 2)
+                    //as long as the dataset level lock D1 is being held by the same jobId. 
+                    //The above consideration is covered in the following code.
+                    //find the same dataset-granule lock request, that is, (J1, D1) pair in the above example.
+                    if (jobInfo != null && jobInfo.isDatasetLockGranted(dId, LockMode.IS)) {
+                        if (dLockInfo.isCompatible(datasetLockMode)) {
+                            //this is duplicated call
+                            return true;
+                        }
+                    }
+                }
+
+                return false;
+            }
+        } else {
+            isUpgrade = isLockUpgrade(entityInfoManager.getDatasetLockMode(entityInfo), lockMode);
+            if (isUpgrade) { //upgrade call 
+                //return fail if any upgrader exists or upgrading lock mode is not compatible
+                if (dLockInfo.getFirstUpgrader() != -1 || !dLockInfo.isUpgradeCompatible(datasetLockMode, entityInfo)) {
+                    return false;
+                }
+            }
+            /************************************
+             * else { //duplicated call
+             * //do nothing
+             * }
+             *************************************/
+        }
+
+        return true;
+    }
+
+    private boolean instantTryLockEntityGranule(DatasetId datasetId, int entityHashValue, byte lockMode,
+            TransactionContext txnContext, DatasetLockInfo dLockInfo) throws ACIDException {
+        JobId jobId = txnContext.getJobId();
+        int jId = jobId.getId(); //int-type jobId
+        int waiterObjId;
+        int eLockInfo = -1;
+        int entityInfo;
+        boolean isUpgrade = false;
+
+        dLockInfo = datasetResourceHT.get(datasetId);
+        eLockInfo = dLockInfo.getEntityResourceHT().get(entityHashValue);
+
+        if (eLockInfo != -1) {
+            //check duplicated call
+
+            //1. lock request causing duplicated upgrading requests from different threads in a same job
+            waiterObjId = entityLockInfoManager.findUpgraderFromUpgraderList(eLockInfo, jId, entityHashValue);
+            if (waiterObjId != -1) {
+                return false;
+            }
+
+            //2. lock request causing duplicated waiting requests from different threads in a same job
+            waiterObjId = entityLockInfoManager.findWaiterFromWaiterList(eLockInfo, jId, entityHashValue);
+            if (waiterObjId != -1) {
+                return false;
+            }
+
+            //3. lock request causing duplicated holding requests from different threads or a single thread in a same job
+            entityInfo = entityLockInfoManager.findEntityInfoFromHolderList(eLockInfo, jId, entityHashValue);
+            if (entityInfo != -1) {//duplicated call or upgrader
+
+                isUpgrade = isLockUpgrade(entityInfoManager.getEntityLockMode(entityInfo), lockMode);
+                if (isUpgrade) {//upgrade call
+                    //wait if any upgrader exists or upgrading lock mode is not compatible
+                    if (entityLockInfoManager.getUpgrader(eLockInfo) != -1
+                            || !entityLockInfoManager.isUpgradeCompatible(eLockInfo, lockMode, entityInfo)) {
+                        return false;
+                    }
+                }
+                /***************************
+                 * else {//duplicated call
+                 * //do nothing
+                 * }
+                 ****************************/
+            } else {//new call from this job, but still eLockInfo exists since other threads hold it or wait on it
+                if (entityLockInfoManager.getUpgrader(eLockInfo) != -1
+                        || entityLockInfoManager.getFirstWaiter(eLockInfo) != -1
+                        || !entityLockInfoManager.isCompatible(eLockInfo, lockMode)) {
+                    return false;
+                }
+            }
+        }
+        /*******************************
+         * else {//eLockInfo doesn't exist, so this lock request is the first request and can be granted without waiting.
+         * //do nothing
+         * }
+         *********************************/
+
+        return true;
     }
 
     private boolean internalTryLock(DatasetId datasetId, int entityHashValue, byte lockMode,
@@ -1007,137 +1214,140 @@
         DatasetLockInfo dLockInfo = null;
         JobInfo jobInfo;
         byte datasetLockMode = entityHashValue == -1 ? lockMode : lockMode == LockMode.S ? LockMode.IS : LockMode.IX;
-        boolean isSuccess = false;
+        boolean isSuccess = true;
         boolean doEscalate = false;
 
         latchLockTable();
-        validateJob(txnContext);
+        try {
+            validateJob(txnContext);
 
-        if (IS_DEBUG_MODE) {
-            trackLockRequest("Requested", RequestType.TRY_LOCK, datasetId, entityHashValue, lockMode, txnContext,
-                    dLockInfo, eLockInfo);
-        }
-
-        dLockInfo = datasetResourceHT.get(datasetId);
-        jobInfo = jobHT.get(jobId);
-
-        if (ALLOW_ESCALATE_FROM_ENTITY_TO_DATASET) {
-            if (!isInstant && datasetLockMode == LockMode.IS && jobInfo != null && dLockInfo != null) {
-                int upgradeStatus = needEscalateFromEntityToDataset(jobInfo, dId, lockMode);
-                switch (upgradeStatus) {
-                    case DO_ESCALATE:
-                        entityHashValue = -1;
-                        doEscalate = true;
-                        break;
-
-                    case ESCALATED:
-                        unlatchLockTable();
-                        return true;
-
-                    default:
-                        break;
-                }
-            }
-        }
-
-        //#. if the datasetLockInfo doesn't exist in datasetResourceHT 
-        if (dLockInfo == null || dLockInfo.isNoHolder()) {
-            if (dLockInfo == null) {
-                dLockInfo = new DatasetLockInfo(entityLockInfoManager, entityInfoManager, lockWaiterManager);
-                datasetResourceHT.put(new DatasetId(dId), dLockInfo); //datsetId obj should be created
-            }
-            entityInfo = entityInfoManager.allocate(jId, dId, entityHashValue, lockMode);
-
-            //if dataset-granule lock
-            if (entityHashValue == -1) { //-1 stands for dataset-granule
-                entityInfoManager.increaseDatasetLockCount(entityInfo);
-                dLockInfo.increaseLockCount(datasetLockMode);
-                dLockInfo.addHolder(entityInfo);
-            } else {
-                entityInfoManager.increaseDatasetLockCount(entityInfo);
-                dLockInfo.increaseLockCount(datasetLockMode);
-                //add entityLockInfo
-                eLockInfo = entityLockInfoManager.allocate();
-                dLockInfo.getEntityResourceHT().put(entityHashValue, eLockInfo);
-                entityInfoManager.increaseEntityLockCount(entityInfo);
-                entityLockInfoManager.increaseLockCount(eLockInfo, lockMode);
-                entityLockInfoManager.addHolder(eLockInfo, entityInfo);
+            if (IS_DEBUG_MODE) {
+                trackLockRequest("Requested", RequestType.TRY_LOCK, datasetId, entityHashValue, lockMode, txnContext,
+                        dLockInfo, eLockInfo);
             }
 
-            if (jobInfo == null) {
-                jobInfo = new JobInfo(entityInfoManager, lockWaiterManager, txnContext);
-                jobHT.put(jobId, jobInfo); //jobId obj doesn't have to be created
-            }
-            jobInfo.addHoldingResource(entityInfo);
+            dLockInfo = datasetResourceHT.get(datasetId);
+            jobInfo = jobHT.get(jobId);
 
             if (ALLOW_ESCALATE_FROM_ENTITY_TO_DATASET) {
-                if (!isInstant && datasetLockMode == LockMode.IS) {
-                    jobInfo.increaseDatasetISLockCount(dId);
+                if (!isInstant && datasetLockMode == LockMode.IS && jobInfo != null && dLockInfo != null) {
+                    int upgradeStatus = needEscalateFromEntityToDataset(jobInfo, dId, lockMode);
+                    switch (upgradeStatus) {
+                        case DO_ESCALATE:
+                            entityHashValue = -1;
+                            doEscalate = true;
+                            break;
+
+                        case ESCALATED:
+                            return true;
+
+                        default:
+                            break;
+                    }
+                }
+            }
+
+            //#. if the datasetLockInfo doesn't exist in datasetResourceHT 
+            if (dLockInfo == null || dLockInfo.isNoHolder()) {
+                if (dLockInfo == null) {
+                    dLockInfo = new DatasetLockInfo(entityLockInfoManager, entityInfoManager, lockWaiterManager);
+                    datasetResourceHT.put(new DatasetId(dId), dLockInfo); //datsetId obj should be created
+                }
+                entityInfo = entityInfoManager.allocate(jId, dId, entityHashValue, lockMode);
+
+                //if dataset-granule lock
+                if (entityHashValue == -1) { //-1 stands for dataset-granule
+                    entityInfoManager.increaseDatasetLockCount(entityInfo);
+                    dLockInfo.increaseLockCount(datasetLockMode);
+                    dLockInfo.addHolder(entityInfo);
+                } else {
+                    entityInfoManager.increaseDatasetLockCount(entityInfo);
+                    dLockInfo.increaseLockCount(datasetLockMode);
+                    //add entityLockInfo
+                    eLockInfo = entityLockInfoManager.allocate();
+                    dLockInfo.getEntityResourceHT().put(entityHashValue, eLockInfo);
+                    entityInfoManager.increaseEntityLockCount(entityInfo);
+                    entityLockInfoManager.increaseLockCount(eLockInfo, lockMode);
+                    entityLockInfoManager.addHolder(eLockInfo, entityInfo);
+                }
+
+                if (jobInfo == null) {
+                    jobInfo = new JobInfo(entityInfoManager, lockWaiterManager, txnContext);
+                    jobHT.put(jobId, jobInfo); //jobId obj doesn't have to be created
+                }
+                jobInfo.addHoldingResource(entityInfo);
+
+                if (ALLOW_ESCALATE_FROM_ENTITY_TO_DATASET) {
+                    if (!isInstant && datasetLockMode == LockMode.IS) {
+                        jobInfo.increaseDatasetISLockCount(dId);
+                        if (doEscalate) {
+                            //This exception is thrown when the threshold value is set to 1.
+                            //We don't want to allow the lock escalation when there is a first lock request on a dataset. 
+                            throw new IllegalStateException(
+                                    "ESCALATE_TRHESHOLD_ENTITY_TO_DATASET should not be set to "
+                                            + ESCALATE_TRHESHOLD_ENTITY_TO_DATASET);
+                        }
+                    }
+                }
+
+                if (IS_DEBUG_MODE) {
+                    trackLockRequest("Granted", RequestType.TRY_LOCK, datasetId, entityHashValue, lockMode, txnContext,
+                            dLockInfo, eLockInfo);
+                }
+
+                return true;
+            }
+
+            //#. the datasetLockInfo exists in datasetResourceHT.
+            //1. handle dataset-granule lock
+            tryLockDatasetGranuleRevertOperation = 0;
+            entityInfo = tryLockDatasetGranule(datasetId, entityHashValue, lockMode, txnContext);
+            if (entityInfo == -2) {//-2 represents fail
+                isSuccess = false;
+            } else {
+                //2. handle entity-granule lock
+                if (entityHashValue != -1) {
+                    isSuccess = tryLockEntityGranule(datasetId, entityHashValue, lockMode, entityInfo, txnContext);
+                    if (!isSuccess) {
+                        revertTryLockDatasetGranuleOperation(datasetId, entityHashValue, lockMode, entityInfo,
+                                txnContext);
+                    }
+                }
+            }
+
+            if (ALLOW_ESCALATE_FROM_ENTITY_TO_DATASET) {
+                if (!isInstant) {
                     if (doEscalate) {
-                        //This exception is thrown when the threshold value is set to 1.
-                        //We don't want to allow the lock escalation when there is a first lock request on a dataset. 
-                        throw new IllegalStateException("ESCALATE_TRHESHOLD_ENTITY_TO_DATASET should not be set to "
-                                + ESCALATE_TRHESHOLD_ENTITY_TO_DATASET);
+                        //jobInfo must not be null.
+                        assert jobInfo != null;
+                        jobInfo.increaseDatasetISLockCount(dId);
+                        //release pre-acquired locks
+                        releaseDatasetISLocks(jobInfo, jobId, datasetId, txnContext);
+                    } else if (datasetLockMode == LockMode.IS) {
+                        if (jobInfo == null) {
+                            jobInfo = jobHT.get(jobId);
+                            //jobInfo must not be null;
+                            assert jobInfo != null;
+                        }
+                        jobInfo.increaseDatasetISLockCount(dId);
                     }
                 }
             }
 
             if (IS_DEBUG_MODE) {
-                trackLockRequest("Granted", RequestType.TRY_LOCK, datasetId, entityHashValue, lockMode, txnContext,
-                        dLockInfo, eLockInfo);
+                if (isSuccess) {
+                    trackLockRequest("Granted", RequestType.TRY_LOCK, datasetId, entityHashValue, lockMode, txnContext,
+                            dLockInfo, eLockInfo);
+                } else {
+                    trackLockRequest("Failed", RequestType.TRY_LOCK, datasetId, entityHashValue, lockMode, txnContext,
+                            dLockInfo, eLockInfo);
+                }
             }
 
+        } finally {
             unlatchLockTable();
-            return true;
         }
 
-        //#. the datasetLockInfo exists in datasetResourceHT.
-        //1. handle dataset-granule lock
-        tryLockDatasetGranuleRevertOperation = 0;
-        entityInfo = tryLockDatasetGranule(datasetId, entityHashValue, lockMode, txnContext);
-        if (entityInfo == -2) {//-2 represents fail
-            isSuccess = false;
-        } else {
-            //2. handle entity-granule lock
-            if (entityHashValue != -1) {
-                isSuccess = tryLockEntityGranule(datasetId, entityHashValue, lockMode, entityInfo, txnContext);
-                if (!isSuccess) {
-                    revertTryLockDatasetGranuleOperation(datasetId, entityHashValue, lockMode, entityInfo, txnContext);
-                }
-            }
-        }
-
-        if (ALLOW_ESCALATE_FROM_ENTITY_TO_DATASET) {
-            if (!isInstant) {
-                if (doEscalate) {
-                    //jobInfo must not be null.
-                    assert jobInfo != null;
-                    jobInfo.increaseDatasetISLockCount(dId);
-                    //release pre-acquired locks
-                    releaseDatasetISLocks(jobInfo, jobId, datasetId, txnContext);
-                } else if (datasetLockMode == LockMode.IS) {
-                    if (jobInfo == null) {
-                        jobInfo = jobHT.get(jobId);
-                        //jobInfo must not be null;
-                        assert jobInfo != null;
-                    }
-                    jobInfo.increaseDatasetISLockCount(dId);
-                }
-            }
-        }
-
-        if (IS_DEBUG_MODE) {
-            if (isSuccess) {
-                trackLockRequest("Granted", RequestType.TRY_LOCK, datasetId, entityHashValue, lockMode, txnContext,
-                        dLockInfo, eLockInfo);
-            } else {
-                trackLockRequest("Failed", RequestType.TRY_LOCK, datasetId, entityHashValue, lockMode, txnContext,
-                        dLockInfo, eLockInfo);
-            }
-        }
-
-        unlatchLockTable();
-
         return isSuccess;
     }
 
@@ -1319,29 +1529,41 @@
             if (dLockInfo.getFirstUpgrader() != -1 || dLockInfo.getFirstWaiter() != -1
                     || !dLockInfo.isCompatible(datasetLockMode)) {
 
-                //[Notice]
-                //There has been no same caller as (jId, dId, entityHashValue) triplet.
-                //But there could be the same caller as (jId, dId) pair.
-                //For example, two requests (J1, D1, E1) and (J1, D1, E2) are considered as duplicated call in dataset-granule perspective.
-                //Therefore, the above duplicated call case is covered in the following code.
-                //find the same dataset-granule lock request, that is, (J1, D1) pair in the above example.
-                if (jobInfo.isDatasetLockGranted(dId, LockMode.IS)) {
-                    if (dLockInfo.isCompatible(datasetLockMode)) {
-                        //this is duplicated call
-                        entityInfoManager.increaseDatasetLockCount(entityInfo);
-                        if (entityHashValue == -1) {
-                            dLockInfo.increaseLockCount(datasetLockMode);
-                            dLockInfo.addHolder(entityInfo);
-                        } else {
-                            dLockInfo.increaseLockCount(datasetLockMode);
-                            //IS and IX holders are implicitly handled.
+                if (ALLOW_DATASET_GRANULE_X_LOCK_WITH_OTHER_CONCURRENT_LOCK_REQUESTS) {
+                    //The following case only may occur when the dataset level X lock is requested 
+                    //with the other lock
+
+                    //[Notice]
+                    //There has been no same caller as (jId, dId, entityHashValue) triplet.
+                    //But there could be the same caller in terms of (jId, dId) pair.
+                    //For example, 
+                    //1) (J1, D1, E1) acquires IS in Dataset D1
+                    //2) (J2, D1, -1) requests X  in Dataset D1, but waits
+                    //3) (J1, D1, E2) requests IS in Dataset D1, but should wait 
+                    //The 3) may cause deadlock if 1) and 3) are under the same thread.
+                    //Even if (J1, D1, E1) and (J1, D1, E2) are two different thread, instead of
+                    //aborting (J1, D1, E1) triggered by the deadlock, we give higher priority to 3) than 2)
+                    //as long as the dataset level lock D1 is being held by the same jobId. 
+                    //The above consideration is covered in the following code.
+                    //find the same dataset-granule lock request, that is, (J1, D1) pair in the above example.
+                    if (jobInfo.isDatasetLockGranted(dId, LockMode.IS)) {
+                        if (dLockInfo.isCompatible(datasetLockMode)) {
+                            //this is duplicated call
+                            entityInfoManager.increaseDatasetLockCount(entityInfo);
+                            if (entityHashValue == -1) {
+                                dLockInfo.increaseLockCount(datasetLockMode);
+                                dLockInfo.addHolder(entityInfo);
+                            } else {
+                                dLockInfo.increaseLockCount(datasetLockMode);
+                                //IS and IX holders are implicitly handled.
+                            }
+                            //add entityInfo to JobInfo's holding-resource list
+                            jobInfo.addHoldingResource(entityInfo);
+
+                            tryLockDatasetGranuleRevertOperation = 1;
+
+                            return entityInfo;
                         }
-                        //add entityInfo to JobInfo's holding-resource list
-                        jobInfo.addHoldingResource(entityInfo);
-
-                        tryLockDatasetGranuleRevertOperation = 1;
-
-                        return entityInfo;
                     }
                 }
 
@@ -1560,38 +1782,38 @@
 
             latchWaitNotify();
             unlatchLockTable();
-            synchronized (waiter) {
-                unlatchWaitNotify();
-                while (waiter.needWait()) {
-                    try {
-                        if (IS_DEBUG_MODE) {
-                            System.out.println("" + Thread.currentThread().getName() + "\twaits("
-                                    + waiter.getWaiterCount() + "): WID(" + waiterId + "),EID("
-                                    + waiter.getEntityInfoSlot() + ")");
+            try {
+                synchronized (waiter) {
+                    unlatchWaitNotify();
+                    while (waiter.needWait()) {
+                        try {
+                            if (IS_DEBUG_MODE) {
+                                System.out.println("" + Thread.currentThread().getName() + "\twaits("
+                                        + waiter.getWaiterCount() + "): WID(" + waiterId + "),EID("
+                                        + waiter.getEntityInfoSlot() + ")");
+                            }
+                            waiter.wait();
+                        } catch (InterruptedException e) {
+                            //TODO figure-out what is the appropriate way to handle this exception
+                            e.printStackTrace();
+                            isInterruptedExceptionOccurred = true;
+                            waiter.setWait(false);
                         }
-                        waiter.wait();
-                    } catch (InterruptedException e) {
-                        //TODO figure-out what is the appropriate way to handle this exception
-                        e.printStackTrace();
-                        isInterruptedExceptionOccurred = true;
-                        waiter.setWait(false);
                     }
                 }
-            }
 
-            if (isInterruptedExceptionOccurred) {
-                throw new ACIDException("InterruptedException is caught");
+                if (isInterruptedExceptionOccurred) {
+                    throw new ACIDException("InterruptedException is caught");
+                }
+            } catch (Exception e) {
+                throw new LockMgrLatchHandlerException(e);
             }
 
             //waiter woke up -> remove/deallocate waiter object and abort if timeout
             latchLockTable();
 
             if (txnContext.getStatus() == TransactionContext.TIMED_OUT_STATUS || waiter.isVictim()) {
-                try {
-                    requestAbort(txnContext);
-                } finally {
-                    unlatchLockTable();
-                }
+                requestAbort(txnContext);
             }
 
             if (waiter.isFirstGetUp()) {
@@ -1635,11 +1857,7 @@
                 //deallocate the entityInfo
                 entityInfoManager.deallocate(entityInfo);
             }
-            try {
-                requestAbort(txnContext);
-            } finally {
-                unlatchLockTable();
-            }
+            requestAbort(txnContext);
         }
 
         return waiterCount;
@@ -1810,20 +2028,22 @@
         LockWaiter waiterObj;
 
         latchLockTable();
+        try {
 
-        Iterator<Entry<JobId, JobInfo>> iter = jobHT.entrySet().iterator();
-        while (iter.hasNext()) {
-            Map.Entry<JobId, JobInfo> pair = (Map.Entry<JobId, JobInfo>) iter.next();
-            jobInfo = pair.getValue();
-            waiterObjId = jobInfo.getFirstWaitingResource();
-            while (waiterObjId != -1) {
-                waiterObj = lockWaiterManager.getLockWaiter(waiterObjId);
-                toutDetector.checkAndSetVictim(waiterObj);
-                waiterObjId = waiterObj.getNextWaiterObjId();
+            Iterator<Entry<JobId, JobInfo>> iter = jobHT.entrySet().iterator();
+            while (iter.hasNext()) {
+                Map.Entry<JobId, JobInfo> pair = (Map.Entry<JobId, JobInfo>) iter.next();
+                jobInfo = pair.getValue();
+                waiterObjId = jobInfo.getFirstWaitingResource();
+                while (waiterObjId != -1) {
+                    waiterObj = lockWaiterManager.getLockWaiter(waiterObjId);
+                    toutDetector.checkAndSetVictim(waiterObj);
+                    waiterObjId = waiterObj.getNextWaiterObjId();
+                }
             }
+        } finally {
+            unlatchLockTable();
         }
-
-        unlatchLockTable();
     }
 }
 
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockMgrLatchHandlerException.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockMgrLatchHandlerException.java
new file mode 100644
index 0000000..05a582c
--- /dev/null
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockMgrLatchHandlerException.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2012-2014 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.transaction.management.service.locking;
+
+import edu.uci.ics.asterix.transaction.management.exception.ACIDException;
+
+public class LockMgrLatchHandlerException extends ACIDException {
+
+    private static final long serialVersionUID = 1203182080428864199L;
+    private final Exception internalException;
+
+    public LockMgrLatchHandlerException(Exception e) {
+        super(e);
+        this.internalException = e;
+    }
+
+    public Exception getInternalException() {
+        return internalException;
+    }
+}
\ No newline at end of file
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogManager.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogManager.java
index 4e96d2e..9b8f09c 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogManager.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogManager.java
@@ -226,8 +226,7 @@
     }
 
     public int getLogPageIndex(long lsnValue) {
-        return (int) ((lsnValue - startingLSN) / logManagerProperties.getLogPageSize()) % numLogPages;
-
+        return (int) (((lsnValue - startingLSN) / logManagerProperties.getLogPageSize()) % numLogPages);
     }
 
     /*
@@ -236,7 +235,6 @@
      */
     public int getLogFileId(long lsnValue) {
         return (int) ((lsnValue) / logManagerProperties.getLogPartitionSize());
-
     }
 
     /*
@@ -244,7 +242,7 @@
      * record is (to be) placed.
      */
     public int getLogPageOffset(long lsnValue) {
-        return (int) (lsnValue - startingLSN) % logManagerProperties.getLogPageSize();
+        return (int) ((lsnValue - startingLSN) % logManagerProperties.getLogPageSize());
     }
 
     /*
@@ -524,9 +522,9 @@
      * This method resets the log page and is called by the log flusher thread
      * after a page has been flushed to disk.
      */
-    public void resetLogPage(long nextWritePosition, int pageIndex) throws IOException {
+    public void resetLogPage(long lsn, long nextWritePosition, int pageIndex) throws IOException {
 
-        String filePath = LogUtil.getLogFilePath(logManagerProperties, getLogFileId(nextWritePosition));
+        String filePath = LogUtil.getLogFilePath(logManagerProperties, getLogFileId(lsn));
 
         logPages[pageIndex].reset(filePath, LogUtil.getFileOffset(this, nextWritePosition),
                 logManagerProperties.getLogPageSize());
@@ -906,19 +904,6 @@
 
                 synchronized (logManager.getLogPage(pageToFlush)) {
 
-                    // lock the internal state of the log manager and create a
-                    // log file if necessary.
-                    int prevLogFileId = logManager.getLogFileId(logManager.getLastFlushedLsn().get());
-                    int nextLogFileId = logManager.getLogFileId(logManager.getLastFlushedLsn().get()
-                            + logManager.getLogManagerProperties().getLogPageSize());
-                    if (prevLogFileId != nextLogFileId) {
-                        String filePath = LogUtil.getLogFilePath(logManager.getLogManagerProperties(), nextLogFileId);
-                        FileUtil.createFileIfNotExists(filePath);
-                        logManager.getLogPage(pageToFlush).reset(
-                                LogUtil.getLogFilePath(logManager.getLogManagerProperties(), nextLogFileId), 0,
-                                logManager.getLogManagerProperties().getLogPageSize());
-                    }
-
                     // #. sleep during the groupCommitWaitTime
                     sleep(groupCommitWaitPeriod);
 
@@ -941,6 +926,13 @@
                     // the log page)
                     logManager.getLogPage(pageToFlush).flush();
 
+                    // Map the log page to a new region in the log file.
+                    long nextWritePosition = logManager.getLogPages()[pageToFlush].getNextWritePosition()
+                            + logManager.getLogManagerProperties().getLogBufferSize();
+
+                    logManager.resetLogPage(logManager.getLastFlushedLsn().get() + 1
+                            + logManager.getLogManagerProperties().getLogBufferSize(), nextWritePosition, pageToFlush);
+
                     // increment the last flushed lsn and lastFlushedPage
                     logManager.incrementLastFlushedLsn(logManager.getLogManagerProperties().getLogPageSize());
 
@@ -950,12 +942,6 @@
                     // reset the count to 1
                     logManager.getLogPageOwnershipCount(pageToFlush).set(PageOwnershipStatus.LOG_WRITER);
 
-                    // Map the log page to a new region in the log file.
-                    long nextWritePosition = logManager.getLogPages()[pageToFlush].getNextWritePosition()
-                            + logManager.getLogManagerProperties().getLogBufferSize();
-
-                    logManager.resetLogPage(nextWritePosition, pageToFlush);
-
                     // mark the page as ACTIVE
                     logManager.getLogPageStatus(pageToFlush).set(LogManager.PageState.ACTIVE);
 
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogManagerProperties.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogManagerProperties.java
index 0211f69..5040fa9 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogManagerProperties.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogManagerProperties.java
@@ -32,7 +32,7 @@
     private static final int DEFAULT_LOG_PAGE_SIZE = 128 * 1024; //128KB
     private static final int DEFAULT_NUM_LOG_PAGES = 8;
     private static final long DEFAULT_LOG_PARTITION_SIZE = (long) 1024 * 1024 * 1024 * 2; //2GB 
-    private static final long DEFAULT_GROUP_COMMIT_WAIT_PERIOD = 1; // time in millisec.
+    private static final long DEFAULT_GROUP_COMMIT_WAIT_PERIOD = 200; // time in millisec.
     private static final String DEFAULT_LOG_FILE_PREFIX = "asterix_transaction_log";
     private static final String DEFAULT_LOG_DIRECTORY = "asterix_logs/";
 
diff --git a/pom.xml b/pom.xml
index 2d0922f..b424195 100644
--- a/pom.xml
+++ b/pom.xml
@@ -7,8 +7,8 @@
 	<packaging>pom</packaging>
 
     <properties>
-    	<algebricks.version>0.2.4</algebricks.version>
-    	<hyracks.version>0.2.4</hyracks.version>
+    	<algebricks.version>0.2.5-SNAPSHOT</algebricks.version>
+    	<hyracks.version>0.2.5-SNAPSHOT</hyracks.version>
     </properties>
 
 	<build>