commit from asterix_lsm_stabilization
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..c806aca
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,6 @@
+target
+.classpath
+.settings
+.project
+ClusterControllerService
+asterix-app/rttest
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
index 976ec7c..d941289 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
@@ -518,6 +518,7 @@
String dataverseName = null;
String datasetName = null;
String indexName = null;
+ JobSpecification spec = null;
try {
CreateIndexStatement stmtCreateIndex = (CreateIndexStatement) stmt;
dataverseName = stmtCreateIndex.getDataverseName() == null ? activeDefaultDataverse == null ? null
@@ -557,7 +558,7 @@
//#. create the index artifact in NC.
CompiledCreateIndexStatement cis = new CompiledCreateIndexStatement(index.getIndexName(), dataverseName,
index.getDatasetName(), index.getKeyFieldNames(), index.getGramLength(), index.getIndexType());
- JobSpecification spec = IndexOperations.buildSecondaryIndexCreationJobSpec(cis, metadataProvider);
+ spec = IndexOperations.buildSecondaryIndexCreationJobSpec(cis, metadataProvider);
if (spec == null) {
throw new AsterixException("Failed to create job spec for creating index '"
+ stmtCreateIndex.getDatasetName() + "." + stmtCreateIndex.getIndexName() + "'");
@@ -599,37 +600,38 @@
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
- //#. execute compensation operations
- // remove the index in NC
- mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
- bActiveTxn = true;
- metadataProvider.setMetadataTxnContext(mdTxnCtx);
- CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName, indexName);
- try {
- JobSpecification jobSpec = IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider);
- MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
- bActiveTxn = false;
+ if (spec != null) {
+ //#. execute compensation operations
+ // remove the index in NC
+ mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+ bActiveTxn = true;
+ metadataProvider.setMetadataTxnContext(mdTxnCtx);
+ CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName, indexName);
+ try {
+ JobSpecification jobSpec = IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider);
+ MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+ bActiveTxn = false;
- runJob(hcc, jobSpec, true);
- } catch (Exception e3) {
- if (bActiveTxn) {
- MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ runJob(hcc, jobSpec, true);
+ } catch (Exception e3) {
+ if (bActiveTxn) {
+ MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ }
+ //do no throw exception since still the metadata needs to be compensated.
}
- //do no throw exception since still the metadata needs to be compensated.
- }
- // remove the record from the metadata.
- mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
- metadataProvider.setMetadataTxnContext(mdTxnCtx);
- try {
- MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
- datasetName, indexName);
- MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
- } catch (Exception e2) {
- MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
- throw new AlgebricksException(e2);
+ // remove the record from the metadata.
+ mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+ metadataProvider.setMetadataTxnContext(mdTxnCtx);
+ try {
+ MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
+ datasetName, indexName);
+ MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+ } catch (Exception e2) {
+ MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ throw new AlgebricksException(e2);
+ }
}
-
throw new AlgebricksException(e);
} finally {
releaseWriteLatch();
diff --git a/asterix-app/src/main/resources/log.properties b/asterix-app/src/main/resources/log.properties
new file mode 100644
index 0000000..ee8040a
--- /dev/null
+++ b/asterix-app/src/main/resources/log.properties
@@ -0,0 +1 @@
+group_commit_wait_period=1
\ No newline at end of file
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java
index 0dbf6d4..299c43c 100644
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java
+++ b/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java
@@ -231,7 +231,7 @@
List<CompilationUnit> cUnits = tcCtx.getTestCase().getCompilationUnit();
for (CompilationUnit cUnit : cUnits) {
- LOGGER.info("[TEST]: " + tcCtx.getTestCase().getFilePath() + "/" + cUnit.getName());
+ LOGGER.severe("[TEST]: " + tcCtx.getTestCase().getFilePath() + "/" + cUnit.getName());
testFileCtxs = tcCtx.getTestFiles(cUnit);
expectedResultFileCtxs = tcCtx.getExpectedResultFiles(cUnit);
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_272_create_index_error_1/issue_272_create_index_error_1.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_272_create_index_error_1/issue_272_create_index_error_1.1.ddl.aql
new file mode 100644
index 0000000..6ba4d86
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_272_create_index_error_1/issue_272_create_index_error_1.1.ddl.aql
@@ -0,0 +1,13 @@
+/*
+ * Description : create an index on a non-existent dataset
+ * Expected Res : Failure
+ * Date : 14 April 2013
+ * Issue : 272
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
+use dataverse test;
+create index loc_index on Foo(name);
+
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_272_create_index_error_2/issue_272_create_index_error_2.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_272_create_index_error_2/issue_272_create_index_error_2.1.ddl.aql
new file mode 100644
index 0000000..a1f9771
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_272_create_index_error_2/issue_272_create_index_error_2.1.ddl.aql
@@ -0,0 +1,14 @@
+/*
+ * Description : create an index on a non-existent dataset
+ * Expected Res : Failure
+ * Date : 14 April 2013
+ * Issue : 272
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
+use dataverse test;
+
+create index loc_index if not exists on Foo(name);
+
diff --git a/asterix-app/src/test/resources/metadata/testsuite.xml b/asterix-app/src/test/resources/metadata/testsuite.xml
index d4fb915..17d7bf2 100644
--- a/asterix-app/src/test/resources/metadata/testsuite.xml
+++ b/asterix-app/src/test/resources/metadata/testsuite.xml
@@ -212,6 +212,18 @@
<expected-error>AsterixException</expected-error>
</compilation-unit>
</test-case>
+ <test-case FilePath="exception">
+ <compilation-unit name="issue_272_create_index_error_1">
+ <output-dir compare="Text">none</output-dir>
+ <expected-error>edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="exception">
+ <compilation-unit name="issue_272_create_index_error_2">
+ <output-dir compare="Text">none</output-dir>
+ <expected-error>edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
+ </compilation-unit>
+ </test-case>
</test-group>
<test-group name="transaction">
<test-case FilePath="transaction">
@@ -233,4 +245,4 @@
</compilation-unit>
</test-case>
</test-group>
-</test-suite>
+</test-suite>
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/constructor/interval/interval.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/constructor/interval/interval.3.query.aql
index ebb3d2d..f30fdd7 100644
--- a/asterix-app/src/test/resources/runtimets/queries/constructor/interval/interval.3.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/constructor/interval/interval.3.query.aql
@@ -1,11 +1,41 @@
use dataverse test;
-let $itv1 := interval-from-date("2010-10-30", "2012-10-21")
-let $itv2 := interval-from-time("03:04:05.678-11:00", "232425267+0200")
-let $itv3 := interval-from-datetime("-1987-11-19T02:43:57.938+08:00", "19991112T124935948-0700")
-let $itv4 := interval-start-from-date("0001-12-27", "P3Y394DT48H398.483S")
-let $itv5 := interval-start-from-time("20:03:20.948", "P60DT48M389.938S")
-let $itv6 := interval-start-from-datetime("-2043-11-19T15:32:39.293", "P439Y3M20DT20H39M58.949S")
+let $itv11 := interval-from-date(date("2010-10-30"), date("2012-10-21"))
+let $itv12 := interval-from-date("2010-10-30", date("2012-10-21"))
+let $itv13 := interval-from-date(date("2010-10-30"), "2012-10-21")
+let $itv14 := interval-from-date("2010-10-30", "2012-10-21")
+let $itv15 := interval-from-date(null, "2012-10-21")
+let $itv16 := interval-from-date("2010-10-30", null)
+let $itv21 := interval-from-time(time("03:04:05.678-11:00"), time("232425267+0200"))
+let $itv22 := interval-from-time("03:04:05.678-11:00", time("232425267+0200"))
+let $itv23 := interval-from-time(time("03:04:05.678-11:00"), "232425267+0200")
+let $itv24 := interval-from-time("03:04:05.678-11:00", "232425267+0200")
+let $itv25 := interval-from-time(null, time("232425267+0200"))
+let $itv26 := interval-from-time(time("03:04:05.678-11:00"), null)
+let $itv31 := interval-from-datetime(datetime("-1987-11-19T02:43:57.938+08:00"), datetime("19991112T124935948-0700"))
+let $itv32 := interval-from-datetime("-1987-11-19T02:43:57.938+08:00", datetime("19991112T124935948-0700"))
+let $itv33 := interval-from-datetime(datetime("-1987-11-19T02:43:57.938+08:00"), "19991112T124935948-0700")
+let $itv34 := interval-from-datetime("-1987-11-19T02:43:57.938+08:00", "19991112T124935948-0700")
+let $itv35 := interval-from-datetime(null, datetime("19991112T124935948-0700"))
+let $itv36 := interval-from-datetime(datetime("-1987-11-19T02:43:57.938+08:00"), null)
+let $itv41 := interval-start-from-date(date("0001-12-27"), duration("P3Y394DT48H398.483S"))
+let $itv42 := interval-start-from-date("0001-12-27", duration("P3Y394DT48H398.483S"))
+let $itv43 := interval-start-from-date(date("0001-12-27"), "P3Y394DT48H398.483S")
+let $itv44 := interval-start-from-date("0001-12-27", "P3Y394DT48H398.483S")
+let $itv45 := interval-start-from-date(null, duration("P3Y394DT48H398.483S"))
+let $itv46 := interval-start-from-date(date("0001-12-27"), null)
+let $itv51 := interval-start-from-time(time("20:03:20.948"), duration("P60DT48M389.938S"))
+let $itv52 := interval-start-from-time("20:03:20.948", duration("P60DT48M389.938S"))
+let $itv53 := interval-start-from-time(time("20:03:20.948"), "P60DT48M389.938S")
+let $itv54 := interval-start-from-time("20:03:20.948", "P60DT48M389.938S")
+let $itv55 := interval-start-from-time(null, duration("P60DT48M389.938S"))
+let $itv56 := interval-start-from-time(time("20:03:20.948"), null)
+let $itv61 := interval-start-from-datetime(datetime("-2043-11-19T15:32:39.293"), duration("P439Y3M20DT20H39M58.949S"))
+let $itv62 := interval-start-from-datetime("-2043-11-19T15:32:39.293", duration("P439Y3M20DT20H39M58.949S"))
+let $itv63 := interval-start-from-datetime(datetime("-2043-11-19T15:32:39.293"), "P439Y3M20DT20H39M58.949S")
+let $itv64 := interval-start-from-datetime("-2043-11-19T15:32:39.293", "P439Y3M20DT20H39M58.949S")
+let $itv65 := interval-start-from-datetime(null, duration("P439Y3M20DT20H39M58.949S"))
+let $itv66 := interval-start-from-datetime(datetime("-2043-11-19T15:32:39.293"), null)
-return {"interval1": $itv1, "interval2": $itv2, "interval3": $itv3, "interval4": $itv4, "interval5": $itv5, "interval6": $itv6}
+return {"interval11": $itv11, "interval12": $itv12, "interval13": $itv13, "interval14": $itv14, "interval15": $itv15, "interval16": $itv16, "interval21": $itv21, "interval22": $itv22, "interval23": $itv23, "interval24": $itv24, "interval25": $itv25, "interval26": $itv26, "interval31": $itv31, "interval32": $itv32, "interval33": $itv33, "interval34": $itv34, "interval35": $itv35, "interval36": $itv36, "interval41": $itv41, "interval42": $itv42, "interval43": $itv43, "interval44": $itv44, "interval45": $itv45, "interval46": $itv46, "interval51": $itv51, "interval52": $itv52, "interval53": $itv53, "interval54": $itv54, "interval55": $itv55, "interval56": $itv56, "interval61": $itv61, "interval62": $itv62, "interval63": $itv63, "interval64": $itv64, "interval65": $itv65, "interval66": $itv66}
diff --git a/asterix-app/src/test/resources/runtimets/queries/constructor/rectangle_01/rectangle_01.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/constructor/rectangle_01/rectangle_01.1.ddl.aql
new file mode 100644
index 0000000..efde712
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/constructor/rectangle_01/rectangle_01.1.ddl.aql
@@ -0,0 +1,10 @@
+/*
+ * Description : create a rectangle constructor
+ * Expected Res : Success
+ * Date : 18 April 2013
+ * Issue : 272
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
diff --git a/asterix-app/src/test/resources/runtimets/queries/constructor/rectangle_01/rectangle_01.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/constructor/rectangle_01/rectangle_01.2.update.aql
new file mode 100644
index 0000000..f7e538e
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/constructor/rectangle_01/rectangle_01.2.update.aql
@@ -0,0 +1,7 @@
+/*
+ * Description : create a rectangle constructor
+ * Expected Res : Success
+ * Date : 18 April 2013
+ * Issue : 272
+ */
+
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/constructor/rectangle_01/rectangle_01.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/constructor/rectangle_01/rectangle_01.3.query.aql
new file mode 100644
index 0000000..696746f
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/constructor/rectangle_01/rectangle_01.3.query.aql
@@ -0,0 +1,12 @@
+/*
+ * Description : create a rectangle constructor
+ * Expected Res : Success
+ * Date : 18 April 2013
+ * Issue : 272
+ */
+
+use dataverse test;
+
+let $r1 := rectangle("5.1,11.8 87.6,15.6548")
+let $r2 := rectangle("0.1234,-1.00e-10 5.5487,0.48765")
+return {"rectangle1": $r1,"rectangle2": $r2}
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree/delete-rtree.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree/delete-rtree.1.ddl.aql
index d8ab247..381ad3f 100644
--- a/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree/delete-rtree.1.ddl.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree/delete-rtree.1.ddl.aql
@@ -14,4 +14,6 @@
create dataset MyData(MyRecord)
primary key id;
+
+create index rtree_index_loc on MyData(loc) type rtree;
diff --git a/asterix-app/src/test/resources/runtimets/queries/temporal/accessors/accessors.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/temporal/accessors/accessors.3.query.aql
index 2586d2a..c41d37d 100644
--- a/asterix-app/src/test/resources/runtimets/queries/temporal/accessors/accessors.3.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/temporal/accessors/accessors.3.query.aql
@@ -9,4 +9,4 @@
let $c7 := string("-0003-01-09T23:12:12.39-07:00")
let $c8 := duration("P3Y73M632DT49H743M3948.94S")
-return {"year1": year($c1), "year2": year($c2), "year3": year($c3), "year4": year($c4), "year5": year($c5), "year6": year($c7), "year7": year($c8), "month1": month($c1), "month2": month($c2), "month3": month($c3), "month4": month($c4), "month5": month($c5), "month6": month($c8), "day1": day($c1), "day2": day($c2), "day3": day($c3), "day4": day($c4), "day5": day($c5), "day6": day($c8), "hour1": hour($c2), "hour2": hour($c5), "hour3": hour($c6), "hour4": hour($c8), "min1": minute($c2), "min2": minute($c5), "min3": minute($c6), "min4": minute($c8), "second1": second($c2), "second2": second($c5), "second3": second($c6), "second4": second($c8), "ms1": millisecond($c2), "ms2": millisecond($c5), "ms3": millisecond($c6), "ms4": millisecond($c8)}
+return {"year1": year($c1), "year2": year($c2), "year3": year($c3), "year4": year($c4), "year5": year($c5), "year6": year($c7), "year7": year($c8), "year-null": year(null), "month1": month($c1), "month2": month($c2), "month3": month($c3), "month4": month($c4), "month5": month($c5), "month6": month($c8), "month-null": month(null), "day1": day($c1), "day2": day($c2), "day3": day($c3), "day4": day($c4), "day5": day($c5), "day6": day($c8), "day-null": day(null), "hour1": hour($c2), "hour2": hour($c5), "hour3": hour($c6), "hour4": hour($c8), "hour-null": hour(null), "min1": minute($c2), "min2": minute($c5), "min3": minute($c6), "min4": minute($c8), "min-null": minute(null), "second1": second($c2), "second2": second($c5), "second3": second($c6), "second4": second($c8), "second-null": second(null), "ms1": millisecond($c2), "ms2": millisecond($c5), "ms3": millisecond($c6), "ms4": millisecond($c8), "ms-null": millisecond(null) }
diff --git a/asterix-app/src/test/resources/runtimets/queries/temporal/accessors_interval/accessors_interval.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/temporal/accessors_interval/accessors_interval.1.ddl.aql
new file mode 100644
index 0000000..e6a3879
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/temporal/accessors_interval/accessors_interval.1.ddl.aql
@@ -0,0 +1,2 @@
+drop dataverse test if exists;
+create dataverse test;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/temporal/accessors_interval/accessors_interval.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/temporal/accessors_interval/accessors_interval.2.update.aql
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/temporal/accessors_interval/accessors_interval.2.update.aql
diff --git a/asterix-app/src/test/resources/runtimets/queries/temporal/accessors_interval/accessors_interval.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/temporal/accessors_interval/accessors_interval.3.query.aql
new file mode 100644
index 0000000..b703377
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/temporal/accessors_interval/accessors_interval.3.query.aql
@@ -0,0 +1,7 @@
+use dataverse test;
+
+let $interval1 := interval-from-date(date("2010-10-30"), "2013-04-01")
+let $interval2 := interval-from-time("08:09:10.234Z", time("203040567+0800"))
+let $interval3 := interval-from-datetime("2009-09-01T00:00:00.000+08:00", datetime-from-date-time(date("2013-04-04"), time("00:00:00.000+08:00")))
+
+return {"start1": get-interval-start($interval1), "end1": get-interval-end($interval1), "start2": get-interval-start($interval2), "end2": get-interval-end($interval2), "start3": get-interval-start($interval3), "end3": get-interval-end($interval3) }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/temporal/accessors_interval_null/accessors_interval_null.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/temporal/accessors_interval_null/accessors_interval_null.1.ddl.aql
new file mode 100644
index 0000000..e6a3879
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/temporal/accessors_interval_null/accessors_interval_null.1.ddl.aql
@@ -0,0 +1,2 @@
+drop dataverse test if exists;
+create dataverse test;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/temporal/accessors_interval_null/accessors_interval_null.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/temporal/accessors_interval_null/accessors_interval_null.2.update.aql
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/temporal/accessors_interval_null/accessors_interval_null.2.update.aql
diff --git a/asterix-app/src/test/resources/runtimets/queries/temporal/accessors_interval_null/accessors_interval_null.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/temporal/accessors_interval_null/accessors_interval_null.3.query.aql
new file mode 100644
index 0000000..98bead8
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/temporal/accessors_interval_null/accessors_interval_null.3.query.aql
@@ -0,0 +1,3 @@
+use dataverse test;
+
+{"start-null-interval": get-interval-start(null), "end-null-interval": get-interval-end(null) }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/temporal/adjust_timezone/adjust_timezone.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/temporal/adjust_timezone/adjust_timezone.3.query.aql
index a3959b0..c099d14 100644
--- a/asterix-app/src/test/resources/runtimets/queries/temporal/adjust_timezone/adjust_timezone.3.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/temporal/adjust_timezone/adjust_timezone.3.query.aql
@@ -4,4 +4,4 @@
let $dt1 := datetime("2010-10-23T01:12:13.329Z")
let $s1 := adjust-time-for-timezone($t1, "+0800")
let $s2 := adjust-datetime-for-timezone($dt1, "-0615")
-return { "string1" : $s1, "string2" : $s2 }
\ No newline at end of file
+return { "time" : $s1, "datetime" : $s2, "null1": adjust-time-for-timezone(null, "+0800"), "null2": adjust-time-for-timezone($t1, null), "null3": adjust-datetime-for-timezone(null, "-0800"), "null4": adjust-datetime-for-timezone($dt1, null) }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/temporal/calendar_duration/calendar_duration.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/temporal/calendar_duration/calendar_duration.3.query.aql
index cbf4b7f..291d296 100644
--- a/asterix-app/src/test/resources/runtimets/queries/temporal/calendar_duration/calendar_duration.3.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/temporal/calendar_duration/calendar_duration.3.query.aql
@@ -39,4 +39,4 @@
let $dtt8 := add-date-duration($t2, $cdr8)
let $c8 := $dt8 = $dtt8
-return { "cduration1":$cdr1, "c1":$c1, "cduration2":$cdr2, "c2":$c2, "cduration3":$cdr3, "c3":$c3, "cduration4":$cdr4, "c4":$c4, "cduration5":$cdr5, "c5":$c5, "cduration6":$cdr6, "c6":$c6, "cduration7":$cdr7, "c7":$c7, "cduration8":$cdr8, "c8":$c8 }
+return { "cduration1":$cdr1, "c1":$c1, "cduration2":$cdr2, "c2":$c2, "cduration3":$cdr3, "c3":$c3, "cduration4":$cdr4, "c4":$c4, "cduration5":$cdr5, "c5":$c5, "cduration6":$cdr6, "c6":$c6, "cduration7":$cdr7, "c7":$c7, "cduration8":$cdr8, "c8":$c8, "cduration-null-1": calendar-duration-from-datetime(null, $dr1), "cduration-null-2": calendar-duration-from-datetime($t1, null), "cduration-null-3": calendar-duration-from-date(null, $dr1), "cduration-null-4": calendar-duration-from-date($t2, null) }
diff --git a/asterix-app/src/test/resources/runtimets/queries/temporal/date_functions/date_functions.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/temporal/date_functions/date_functions.3.query.aql
index 93e1366..b748429 100644
--- a/asterix-app/src/test/resources/runtimets/queries/temporal/date_functions/date_functions.3.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/temporal/date_functions/date_functions.3.query.aql
@@ -1,17 +1,23 @@
use dataverse test;
let $d1 := date-from-unix-time-in-days(15600)
+let $null1 := date-from-unix-time-in-days(null)
let $dt1 := datetime("1327-12-02T23:35:49.938Z")
let $d2 := date-from-datetime($dt1)
+let $null2 := date-from-datetime(null)
let $dt2 := datetime("2012-10-11T02:30:23+03:00")
let $d3 := date-from-datetime($dt2)
let $dr1 := duration("-P2Y1M90DT30H")
let $d4 := add-date-duration($d1, $dr1)
+let $null3 := add-date-duration(null, $dr1)
+let $null4 := add-date-duration($d1, null)
let $c1 := $d1 = add-date-duration($d4, subtract-date($d1, $d4))
let $dr2 := duration("P300Y900MT360000M")
let $d5 := add-date-duration($d2, $dr2)
let $c2 := $d2 = add-date-duration($d5, subtract-date($d2, $d5))
let $dr3 := subtract-date($d5, $d2)
let $dr4 := subtract-date($d4, $d1)
+let $null5 := subtract-date(null, $d2)
+let $null6 := subtract-date($d5, null)
-return { "date1" : $d1, "date2" : $d2, "date3" : $d3, "date4" : $d4, "date5" : $d5, "duration1" : $dr3, "duration2" : $dr4, "c1" : $c1, "c2" : $c2 }
\ No newline at end of file
+return { "date1": $d1, "date2": $d2, "date3": $d3, "date4": $d4, "date5": $d5, "duration1": $dr3, "duration2": $dr4, "c1": $c1, "c2": $c2, "null1": $null1, "null2": $null2, "null3": $null3, "null4": $null4, "null5": $null5, "null6": $null6 }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/temporal/datetime_functions/datetime_functions.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/temporal/datetime_functions/datetime_functions.3.query.aql
index 6f3dde3..ea6c2a8 100644
--- a/asterix-app/src/test/resources/runtimets/queries/temporal/datetime_functions/datetime_functions.3.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/temporal/datetime_functions/datetime_functions.3.query.aql
@@ -1,11 +1,18 @@
use dataverse test;
let $dt1 := datetime-from-unix-time-in-ms(956007429)
+let $null1 := datetime-from-unix-time-in-ms(null)
let $d1 := date("1327-12-02")
let $t1 := time("15:35:49.938-0800")
let $dt2 := datetime-from-date-time($d1, $t1)
+let $null2 := datetime-from-date-time(null, $t1)
+let $null3 := datetime-from-date-time($d1, null)
let $dr1 := subtract-datetime($dt2, $dt1)
+let $null4 := subtract-datetime(null, $dt1)
+let $null5 := subtract-datetime($dt2, null)
let $dt3 := add-datetime-duration($dt1, $dr1)
+let $null6 := add-datetime-duration(null, $dr1)
+let $null7 := add-datetime-duration($dt1, null)
let $c1 := $dt1 = add-datetime-duration($dt3, subtract-datetime($dt1, $dt3))
-return { "datetime1" : $dt1, "datetime2" : $dt2, "datetime3" : $dt3, "duration1" : $dr1, "c1" : $c1 }
+return { "datetime1" : $dt1, "datetime2" : $dt2, "datetime3" : $dt3, "duration1" : $dr1, "c1" : $c1, "null1" : $null1, "null2" : $null2, "null3" : $null3, "null4" : $null4, "null5" : $null5, "null6" : $null6, "null7" : $null7 }
diff --git a/asterix-app/src/test/resources/runtimets/queries/temporal/interval_functions/interval_functions.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/temporal/interval_functions/interval_functions.3.query.aql
index 1c773bc..1eb5f53 100644
--- a/asterix-app/src/test/resources/runtimets/queries/temporal/interval_functions/interval_functions.3.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/temporal/interval_functions/interval_functions.3.query.aql
@@ -46,4 +46,8 @@
let $blnEndedBy1 := interval-ended-by($itv7, $itv11)
let $blnEndedBy2 := interval-ended-by($itv8, $itv11)
-return { "before1" : $blnBefore1, "before2" : $blnBefore2, "after1" : $blnAfter1, "after2" : $blnAfter2, "meet1" : $blnMeet1, "meet2" : $blnMeet2, "metby1" : $blnMetBy1, "metby2" : $blnMetBy2, "overlaps1" : $blnOverlaps1, "overlaps2" : $blnOverlaps2, "overlapped1" : $blnOverlapped1, "overlapped2" : $blnOverlapped2, "overlap1" : $blnOverlap1, "overlap2" : $blnOverlap2, "starts1" : $blnStarts1, "starts2" : $blnStarts2, "startedby1" : $blnStartedBy1, "startedby2" : $blnStartedBy2, "covers1" : $blnCovers1, "covers2" : $blnCovers2, "coveredby1" : $blnCoveredBy1, "coveredby2" : $blnCoveredBy2, "ends1" : $blnEnds1, "ends2" : $blnEnds2, "endedby1" : $blnEndedBy1, "endedby2" : $blnEndedBy2 }
\ No newline at end of file
+let $null1 := interval-before(null, $itv2)
+let $null2 := interval-covered-by($itv11, null)
+let $null3 := overlap(null, null)
+
+return { "before1" : $blnBefore1, "before2" : $blnBefore2, "after1" : $blnAfter1, "after2" : $blnAfter2, "meet1" : $blnMeet1, "meet2" : $blnMeet2, "metby1" : $blnMetBy1, "metby2" : $blnMetBy2, "overlaps1" : $blnOverlaps1, "overlaps2" : $blnOverlaps2, "overlapped1" : $blnOverlapped1, "overlapped2" : $blnOverlapped2, "overlap1" : $blnOverlap1, "overlap2" : $blnOverlap2, "starts1" : $blnStarts1, "starts2" : $blnStarts2, "startedby1" : $blnStartedBy1, "startedby2" : $blnStartedBy2, "covers1" : $blnCovers1, "covers2" : $blnCovers2, "coveredby1" : $blnCoveredBy1, "coveredby2" : $blnCoveredBy2, "ends1" : $blnEnds1, "ends2" : $blnEnds2, "endedby1" : $blnEndedBy1, "endedby2" : $blnEndedBy2, "null1": $null1, "null2": $null2, "null3": $null3 }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/temporal/time_functions/time_functions.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/temporal/time_functions/time_functions.3.query.aql
index ca25b8b..c042df0 100644
--- a/asterix-app/src/test/resources/runtimets/queries/temporal/time_functions/time_functions.3.query.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/temporal/time_functions/time_functions.3.query.aql
@@ -1,20 +1,26 @@
use dataverse test;
let $t1 := time-from-unix-time-in-ms(1560074)
+let $null1 := time-from-unix-time-in-ms(null)
let $dt1 := datetime("1327-12-02T23:35:49.938Z")
let $t2 := time-from-datetime($dt1)
+let $null2 := time-from-datetime(null)
let $dt2 := datetime("2012-10-11T02:30:23+03:00")
let $t3 := time-from-datetime($dt2)
let $dr1 := duration("-PT30H")
let $t4 := add-time-duration($t1, $dr1)
+let $null3 := add-time-duration(null, $dr1)
+let $null4 := add-time-duration($t1, null)
let $c1 := $t1 = add-time-duration($t4, subtract-time($t1, $t4))
let $dr2 := duration("PT36M")
let $t5 := add-time-duration($t2, $dr2)
let $c2 := $t2 = add-time-duration($t5, subtract-time($t2, $t5))
let $dr3 := subtract-time($t5, $t2)
let $dr4 := subtract-time($t4, $t1)
+let $null5 := subtract-time(null, $t1)
+let $null6 := subtract-time($t4, null)
let $ct := current-time()
let $cd := current-date()
let $cdt := current-datetime()
-return { "time1" : $t1, "time2" : $t2, "time3" : $t3, "time4" : $t4, "time5" : $t5, "duration1" : $dr3, "duration2" : $dr4, "c1" : $c1, "c2" : $c2 }
+return { "time1" : $t1, "time2" : $t2, "time3" : $t3, "time4" : $t4, "time5" : $t5, "duration1" : $dr3, "duration2" : $dr4, "c1" : $c1, "c2" : $c2, "null1": $null1, "null2": $null2, "null3": $null3, "null4": $null4, "null5": $null5, "null6": $null6 }
diff --git a/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/udf28/udf28.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/udf28/udf28.1.ddl.aql
new file mode 100644
index 0000000..62bbd54
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/udf28/udf28.1.ddl.aql
@@ -0,0 +1,18 @@
+/*
+ * Description : Create UDF and terminate the statement with a ';'
+ * Expected Res : Success
+ * Date : Sep 6th 2012
+ */
+
+// this test is not giving expected results.
+// issue 194 reported to track this
+
+drop dataverse test if exists;
+create dataverse test;
+
+use dataverse test;
+
+create function test.f1(){
+100
+};
+
diff --git a/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/udf28/udf28.2.query.aql b/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/udf28/udf28.2.query.aql
new file mode 100644
index 0000000..2e2e27c
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/udf28/udf28.2.query.aql
@@ -0,0 +1,10 @@
+/*
+ * Description : Create a UDF but use ';' for terminating the create function statement. Look up metadata
+ * Expected Res : Success
+ * Date : Apr 5th 2013
+ */
+
+use dataverse test;
+for $x in dataset Metadata.Function
+where $x.DataverseName='test'
+return $x
diff --git a/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/udf29/udf29.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/udf29/udf29.1.ddl.aql
new file mode 100644
index 0000000..edede00
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/udf29/udf29.1.ddl.aql
@@ -0,0 +1,16 @@
+/*
+ * Description : Declare UDF and terminate the statement with a ';'
+ * Expected Res : Success
+ * Date : Apr 10th 2013
+ */
+
+
+drop dataverse test if exists;
+create dataverse test;
+
+use dataverse test;
+
+declare function test.f1(){
+100
+};
+
diff --git a/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/udf29/udf29.2.query.aql b/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/udf29/udf29.2.query.aql
new file mode 100644
index 0000000..2f9e763
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/udf29/udf29.2.query.aql
@@ -0,0 +1,9 @@
+/*
+ * Description : Declare a UDF but use ';' for terminating the declare function statement. Invoke the function
+ * Expected Res : Success
+ * Date : Apr 10th 2013
+ */
+
+use dataverse test;
+let $x:=f1()
+return $x
diff --git a/asterix-app/src/test/resources/runtimets/results/constructor/interval/interval.1.adm b/asterix-app/src/test/resources/runtimets/results/constructor/interval/interval.1.adm
index 893c3ff..c99ade9 100644
--- a/asterix-app/src/test/resources/runtimets/results/constructor/interval/interval.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/constructor/interval/interval.1.adm
@@ -1 +1 @@
-{ "interval1": interval-date("2010-10-30, 2012-10-21"), "interval2": interval-time("14:04:05.678Z, 21:24:25.267Z"), "interval3": interval-datetime("-1987-11-18T18:43:57.938Z, 1999-11-12T19:49:35.948Z"), "interval4": interval-date("0001-12-27, 0006-01-27"), "interval5": interval-time("20:03:20.948Z, 20:57:50.886Z"), "interval6": interval-datetime("-2043-11-19T15:32:39.293Z, -1603-03-12T12:12:38.242Z") }
\ No newline at end of file
+{ "interval11": interval-date("2010-10-30, 2012-10-21"), "interval12": interval-date("2010-10-30, 2012-10-21"), "interval13": interval-date("2010-10-30, 2012-10-21"), "interval14": interval-date("2010-10-30, 2012-10-21"), "interval15": null, "interval16": null, "interval21": interval-time("14:04:05.678Z, 21:24:25.267Z"), "interval22": interval-time("14:04:05.678Z, 21:24:25.267Z"), "interval23": interval-time("14:04:05.678Z, 21:24:25.267Z"), "interval24": interval-time("14:04:05.678Z, 21:24:25.267Z"), "interval25": null, "interval26": null, "interval31": interval-datetime("-1987-11-18T18:43:57.938Z, 1999-11-12T19:49:35.948Z"), "interval32": interval-datetime("-1987-11-18T18:43:57.938Z, 1999-11-12T19:49:35.948Z"), "interval33": interval-datetime("-1987-11-18T18:43:57.938Z, 1999-11-12T19:49:35.948Z"), "interval34": interval-datetime("-1987-11-18T18:43:57.938Z, 1999-11-12T19:49:35.948Z"), "interval35": null, "interval36": null, "interval41": interval-date("0001-12-27, 0006-01-27"), "interval42": interval-date("0001-12-27, 0006-01-27"), "interval43": interval-date("0001-12-27, 0006-01-27"), "interval44": interval-date("0001-12-27, 0006-01-27"), "interval45": null, "interval46": null, "interval51": interval-time("20:03:20.948Z, 20:57:50.886Z"), "interval52": interval-time("20:03:20.948Z, 20:57:50.886Z"), "interval53": interval-time("20:03:20.948Z, 20:57:50.886Z"), "interval54": interval-time("20:03:20.948Z, 20:57:50.886Z"), "interval55": null, "interval56": null, "interval61": interval-datetime("-2043-11-19T15:32:39.293Z, -1603-03-12T12:12:38.242Z"), "interval62": interval-datetime("-2043-11-19T15:32:39.293Z, -1603-03-12T12:12:38.242Z"), "interval63": interval-datetime("-2043-11-19T15:32:39.293Z, -1603-03-12T12:12:38.242Z"), "interval64": interval-datetime("-2043-11-19T15:32:39.293Z, -1603-03-12T12:12:38.242Z"), "interval65": null, "interval66": null }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/constructor/rectangle_01/rectangle_01.1.adm b/asterix-app/src/test/resources/runtimets/results/constructor/rectangle_01/rectangle_01.1.adm
new file mode 100644
index 0000000..32f14b3
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/constructor/rectangle_01/rectangle_01.1.adm
@@ -0,0 +1 @@
+{ "rectangle1": rectangle("5.1,11.8 87.6,15.6548"), "rectangle2": rectangle("0.1234,-1.0E-10 5.5487,0.48765") }
diff --git a/asterix-app/src/test/resources/runtimets/results/spatial/cell-aggregation-with-filtering/cell-aggregation-with-filtering.1.adm b/asterix-app/src/test/resources/runtimets/results/spatial/cell-aggregation-with-filtering/cell-aggregation-with-filtering.1.adm
index e95bac0..25859de 100644
--- a/asterix-app/src/test/resources/runtimets/results/spatial/cell-aggregation-with-filtering/cell-aggregation-with-filtering.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/spatial/cell-aggregation-with-filtering/cell-aggregation-with-filtering.1.adm
@@ -1,2 +1,2 @@
-{ "cell": rectangle("33.5,-101.5 | 36.5,-98.5"), "count": 1 }
-{ "cell": rectangle("33.5,-98.5 | 36.5,-95.5"), "count": 2 }
\ No newline at end of file
+{ "cell": rectangle("33.5,-101.5 36.5,-98.5"), "count": 1 }
+{ "cell": rectangle("33.5,-98.5 36.5,-95.5"), "count": 2 }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/spatial/cell-aggregation/cell-aggregation.1.adm b/asterix-app/src/test/resources/runtimets/results/spatial/cell-aggregation/cell-aggregation.1.adm
index 137ed1c..0014d49 100644
--- a/asterix-app/src/test/resources/runtimets/results/spatial/cell-aggregation/cell-aggregation.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/spatial/cell-aggregation/cell-aggregation.1.adm
@@ -1,3 +1,3 @@
-{ "cell": rectangle("5.0,5.0 | 10.0,10.0"), "count": 1 }
-{ "cell": rectangle("5.0,0.0 | 10.0,5.0"), "count": 3 }
-{ "cell": rectangle("0.0,0.0 | 5.0,5.0"), "count": 12 }
\ No newline at end of file
+{ "cell": rectangle("5.0,5.0 10.0,10.0"), "count": 1 }
+{ "cell": rectangle("5.0,0.0 10.0,5.0"), "count": 3 }
+{ "cell": rectangle("0.0,0.0 5.0,5.0"), "count": 12 }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/temporal/accessors/accessors.1.adm b/asterix-app/src/test/resources/runtimets/results/temporal/accessors/accessors.1.adm
index 4f36f91..89e471a 100644
--- a/asterix-app/src/test/resources/runtimets/results/temporal/accessors/accessors.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/temporal/accessors/accessors.1.adm
@@ -1 +1 @@
-{ "year1": 2010, "year2": 1987, "year3": -1987, "year4": 928, "year5": 1937, "year6": -3, "year7": 9, "month1": 10, "month2": 11, "month3": 11, "month4": 3, "month5": 12, "month6": 1, "day1": 30, "day2": 19, "day3": 19, "day4": 29, "day5": 29, "day6": 634, "hour1": 23, "hour2": 20, "hour3": 5, "hour4": 14, "min1": 49, "min2": 3, "min3": 23, "min4": 28, "second1": 23, "second2": 6, "second3": 34, "second4": 48, "ms1": 938, "ms2": 280, "ms3": 930, "ms4": 94 }
\ No newline at end of file
+{ "year1": 2010, "year2": 1987, "year3": -1987, "year4": 928, "year5": 1937, "year6": -3, "year7": 9, "year-null": null, "month1": 10, "month2": 11, "month3": 11, "month4": 3, "month5": 12, "month6": 1, "month-null": null, "day1": 30, "day2": 19, "day3": 19, "day4": 29, "day5": 29, "day6": 634, "day-null": null, "hour1": 23, "hour2": 20, "hour3": 5, "hour4": 14, "hour-null": null, "min1": 49, "min2": 3, "min3": 23, "min4": 28, "min-null": null, "second1": 23, "second2": 6, "second3": 34, "second4": 48, "second-null": null, "ms1": 938, "ms2": 280, "ms3": 930, "ms4": 94, "ms-null": null }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/temporal/accessors_interval/accessors_interval.1.adm b/asterix-app/src/test/resources/runtimets/results/temporal/accessors_interval/accessors_interval.1.adm
new file mode 100644
index 0000000..df34c86
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/temporal/accessors_interval/accessors_interval.1.adm
@@ -0,0 +1 @@
+{ "start1": date("2010-10-30"), "end1": date("2013-04-01"), "start2": time("08:09:10.234Z"), "end2": time("12:30:40.567Z"), "start3": datetime("2009-08-31T16:00:00.000Z"), "end3": datetime("2013-04-04T16:00:00.000Z") }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/temporal/accessors_interval_null/accessors_interval_null.1.adm b/asterix-app/src/test/resources/runtimets/results/temporal/accessors_interval_null/accessors_interval_null.1.adm
new file mode 100644
index 0000000..7a50c5f
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/temporal/accessors_interval_null/accessors_interval_null.1.adm
@@ -0,0 +1 @@
+{ "start-null-interval": null, "end-null-interval": null }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/temporal/adjust_timezone/adjust_timezone.1.adm b/asterix-app/src/test/resources/runtimets/results/temporal/adjust_timezone/adjust_timezone.1.adm
index 1f80fd9..c51cd50 100644
--- a/asterix-app/src/test/resources/runtimets/results/temporal/adjust_timezone/adjust_timezone.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/temporal/adjust_timezone/adjust_timezone.1.adm
@@ -1 +1 @@
-{ "string1": "04:15:10.327+08:00", "string2": "2010-10-22T18:57:13.329-06:15" }
\ No newline at end of file
+{ "time": "04:15:10.327+08:00", "datetime": "2010-10-22T18:57:13.329-06:15", "null1": null, "null2": null, "null3": null, "null4": null }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/temporal/calendar_duration/calendar_duration.1.adm b/asterix-app/src/test/resources/runtimets/results/temporal/calendar_duration/calendar_duration.1.adm
index 957388f..fd18463 100644
--- a/asterix-app/src/test/resources/runtimets/results/temporal/calendar_duration/calendar_duration.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/temporal/calendar_duration/calendar_duration.1.adm
@@ -1 +1 @@
-{ "cduration1": duration("P20Y3M12DT7H48M21.329S"), "c1": true, "cduration2": duration("-P9M6DT4H45M39.328S"), "c2": true, "cduration3": duration("P8Y6M"), "c3": true, "cduration4": duration("-P21Y7M10DT13H9M42.983S"), "c4": true, "cduration5": duration("P20Y3M12DT7H48M21.329S"), "c5": true, "cduration6": duration("-P9M5DT4H45M39.328S"), "c6": true, "cduration7": duration("P8Y6M"), "c7": true, "cduration8": duration("-P21Y7M10DT13H9M42.983S"), "c8": true }
\ No newline at end of file
+{ "cduration1": duration("P20Y3M12DT7H48M21.329S"), "c1": true, "cduration2": duration("-P9M6DT4H45M39.328S"), "c2": true, "cduration3": duration("P8Y6M"), "c3": true, "cduration4": duration("-P21Y7M10DT13H9M42.983S"), "c4": true, "cduration5": duration("P20Y3M12DT7H48M21.329S"), "c5": true, "cduration6": duration("-P9M5DT4H45M39.328S"), "c6": true, "cduration7": duration("P8Y6M"), "c7": true, "cduration8": duration("-P21Y7M10DT13H9M42.983S"), "c8": true, "cduration-null-1": null, "cduration-null-2": null, "cduration-null-3": null, "cduration-null-4": null }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/temporal/date_functions/date_functions.1.adm b/asterix-app/src/test/resources/runtimets/results/temporal/date_functions/date_functions.1.adm
index a5285d8..811d5e5 100644
--- a/asterix-app/src/test/resources/runtimets/results/temporal/date_functions/date_functions.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/temporal/date_functions/date_functions.1.adm
@@ -1 +1 @@
-{ "date1": date("2012-09-17"), "date2": date("1327-12-02"), "date3": date("2012-10-10"), "date4": date("2010-05-17"), "date5": date("1703-08-09"), "duration1": duration("P137216D"), "duration2": duration("-P854D"), "c1": true, "c2": true }
\ No newline at end of file
+{ "date1": date("2012-09-17"), "date2": date("1327-12-02"), "date3": date("2012-10-10"), "date4": date("2010-05-17"), "date5": date("1703-08-09"), "duration1": duration("P137216D"), "duration2": duration("-P854D"), "c1": true, "c2": true, "null1": null, "null2": null, "null3": null, "null4": null, "null5": null, "null6": null }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/temporal/datetime_functions/datetime_functions.1.adm b/asterix-app/src/test/resources/runtimets/results/temporal/datetime_functions/datetime_functions.1.adm
index 01f3758..4911179 100644
--- a/asterix-app/src/test/resources/runtimets/results/temporal/datetime_functions/datetime_functions.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/temporal/datetime_functions/datetime_functions.1.adm
@@ -1 +1 @@
-{ "datetime1": datetime("1970-01-12T01:33:27.429Z"), "datetime2": datetime("1327-12-02T23:35:49.938Z"), "datetime3": datetime("1327-12-02T23:35:49.938Z"), "duration1": duration("-P234526DT1H57M37.491S"), "c1": true }
\ No newline at end of file
+{ "datetime1": datetime("1970-01-12T01:33:27.429Z"), "datetime2": datetime("1327-12-02T23:35:49.938Z"), "datetime3": datetime("1327-12-02T23:35:49.938Z"), "duration1": duration("-P234526DT1H57M37.491S"), "c1": true, "null1": null, "null2": null, "null3": null, "null4": null, "null5": null, "null6": null, "null7": null }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/temporal/interval_functions/interval_functions.1.adm b/asterix-app/src/test/resources/runtimets/results/temporal/interval_functions/interval_functions.1.adm
index 9f9c9d3..0d86592 100644
--- a/asterix-app/src/test/resources/runtimets/results/temporal/interval_functions/interval_functions.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/temporal/interval_functions/interval_functions.1.adm
@@ -1 +1 @@
-{ "before1": true, "before2": false, "after1": true, "after2": false, "meet1": true, "meet2": false, "metby1": true, "metby2": false, "overlaps1": true, "overlaps2": false, "overlapped1": true, "overlapped2": false, "overlap1": true, "overlap2": false, "starts1": true, "starts2": false, "startedby1": true, "startedby2": false, "covers1": true, "covers2": false, "coveredby1": true, "coveredby2": false, "ends1": true, "ends2": false, "endedby1": true, "endedby2": false }
\ No newline at end of file
+{ "before1": true, "before2": false, "after1": true, "after2": false, "meet1": true, "meet2": false, "metby1": true, "metby2": false, "overlaps1": true, "overlaps2": false, "overlapped1": true, "overlapped2": false, "overlap1": true, "overlap2": false, "starts1": true, "starts2": false, "startedby1": true, "startedby2": false, "covers1": true, "covers2": false, "coveredby1": true, "coveredby2": false, "ends1": true, "ends2": false, "endedby1": true, "endedby2": false, "null1": null, "null2": null, "null3": null }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/temporal/time_functions/time_functions.1.adm b/asterix-app/src/test/resources/runtimets/results/temporal/time_functions/time_functions.1.adm
index 791d652..d98d702 100644
--- a/asterix-app/src/test/resources/runtimets/results/temporal/time_functions/time_functions.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/temporal/time_functions/time_functions.1.adm
@@ -1 +1 @@
-{ "time1": time("00:26:00.074Z"), "time2": time("23:35:49.938Z"), "time3": time("23:30:23.000Z"), "time4": time("18:26:00.074Z"), "time5": time("00:11:49.938Z"), "duration1": duration("-PT23H24M"), "duration2": duration("PT18H"), "c1": true, "c2": true }
\ No newline at end of file
+{ "time1": time("00:26:00.074Z"), "time2": time("23:35:49.938Z"), "time3": time("23:30:23.000Z"), "time4": time("18:26:00.074Z"), "time5": time("00:11:49.938Z"), "duration1": duration("-PT23H24M"), "duration2": duration("PT18H"), "c1": true, "c2": true, "null1": null, "null2": null, "null3": null, "null4": null, "null5": null, "null6": null }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/user-defined-functions/udf28/udf28.1.adm b/asterix-app/src/test/resources/runtimets/results/user-defined-functions/udf28/udf28.1.adm
new file mode 100644
index 0000000..b106dbe
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/user-defined-functions/udf28/udf28.1.adm
@@ -0,0 +1 @@
+{ "DataverseName": "test", "Name": "f1", "Arity": "0", "Params": [ ], "ReturnType": "VOID", "Definition": "100", "Language": "AQL", "Kind": "SCALAR" }
diff --git a/asterix-app/src/test/resources/runtimets/results/user-defined-functions/udf29/udf29.1.adm b/asterix-app/src/test/resources/runtimets/results/user-defined-functions/udf29/udf29.1.adm
new file mode 100644
index 0000000..29d6383
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/user-defined-functions/udf29/udf29.1.adm
@@ -0,0 +1 @@
+100
diff --git a/asterix-app/src/test/resources/runtimets/testsuite.xml b/asterix-app/src/test/resources/runtimets/testsuite.xml
index b730b41..17ec2bb 100644
--- a/asterix-app/src/test/resources/runtimets/testsuite.xml
+++ b/asterix-app/src/test/resources/runtimets/testsuite.xml
@@ -551,6 +551,11 @@
</compilation-unit>
</test-case>
<test-case FilePath="constructor">
+ <compilation-unit name="rectangle_01">
+ <output-dir compare="Text">rectangle_01</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="constructor">
<compilation-unit name="point_01">
<output-dir compare="Text">point_01</output-dir>
</compilation-unit>
@@ -774,6 +779,11 @@
</compilation-unit>
</test-case>
<test-case FilePath="dml">
+ <compilation-unit name="drop-index">
+ <output-dir compare="Text">drop-index</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="dml">
<compilation-unit name="create-drop-cltype">
<output-dir compare="Text">create-drop-cltype</output-dir>
</compilation-unit>
@@ -4038,6 +4048,16 @@
</compilation-unit>
</test-case>
<test-case FilePath="user-defined-functions">
+ <compilation-unit name="udf28">
+ <output-dir compare="Text">udf28</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="user-defined-functions">
+ <compilation-unit name="udf29">
+ <output-dir compare="Text">udf29</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="user-defined-functions">
<compilation-unit name="f01">
<output-dir compare="Text">f01</output-dir>
<expected-error>edu.uci.ics.asterix.common.exceptions.AsterixException</expected-error>
@@ -4124,12 +4144,22 @@
</compilation-unit>
</test-case>
</test-group>
- <test-group name="temporal">
- <test-case FilePath="temporal">
- <compilation-unit name="accessors">
+ <test-group name="temporal">
+ <test-case FilePath="temporal">
+ <compilation-unit name="accessors">
<output-dir compare="Text">accessors</output-dir>
</compilation-unit>
- </test-case>
+ </test-case>
+ <test-case FilePath="temporal">
+ <compilation-unit name="accessors_interval">
+ <output-dir compare="Text">accessors_interval</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="temporal">
+ <compilation-unit name="accessors_interval_null">
+ <output-dir compare="Text">accessors_interval_null</output-dir>
+ </compilation-unit>
+ </test-case>
<test-case FilePath="temporal">
<compilation-unit name="adjust_timezone">
<output-dir compare="Text">adjust_timezone</output-dir>
@@ -4188,5 +4218,4 @@
</compilation-unit>
</test-case>
</test-group>
-</test-suite>
-
+</test-suite>
\ No newline at end of file
diff --git a/asterix-aql/src/main/javacc/AQL.jj b/asterix-aql/src/main/javacc/AQL.jj
index a69bbe6..5f5915c 100644
--- a/asterix-aql/src/main/javacc/AQL.jj
+++ b/asterix-aql/src/main/javacc/AQL.jj
@@ -1371,7 +1371,7 @@
getCurrentScope().addNewVarSymbolToScope(var);
arity++;
})*)? <RIGHTPAREN> "{" funcBody = Expression() "}"
-
+ (";")?
{
signature = new FunctionSignature(defaultDataverse, functionName, arity);
getCurrentScope().addFunctionDescriptor(signature, false);
@@ -1428,11 +1428,12 @@
beginPos = getToken(0);
}
functionBodyExpr = Expression()
- "}"
+ "}"
{
endPos = getToken(0);
functionBody = extractFragment(beginPos.beginLine, beginPos.beginColumn, endPos.beginLine, endPos.beginColumn);
}
+ (";")?
{
signature = new FunctionSignature(dataverse, functionName, paramList.size());
getCurrentScope().addFunctionDescriptor(signature, false);
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/driver/EventDriver.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/driver/EventDriver.java
index b8f0fc6..9b0a41a 100644
--- a/asterix-events/src/main/java/edu/uci/ics/asterix/event/driver/EventDriver.java
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/driver/EventDriver.java
@@ -69,10 +69,7 @@
}
public static Cluster initializeCluster(String path) throws JAXBException, IOException {
- File file = new File(path);
- JAXBContext ctx = JAXBContext.newInstance(Cluster.class);
- Unmarshaller unmarshaller = ctx.createUnmarshaller();
- Cluster cluster = (Cluster) unmarshaller.unmarshal(file);
+ Cluster cluster = EventUtil.getCluster(path);
for (Property p : cluster.getEnv().getProperty()) {
env.put(p.getKey(), p.getValue());
}
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventUtil.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventUtil.java
index d484947..1c51022 100644
--- a/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventUtil.java
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/management/EventUtil.java
@@ -14,6 +14,7 @@
*/
package edu.uci.ics.asterix.event.management;
+import java.io.File;
import java.io.IOException;
import java.math.BigInteger;
import java.text.DateFormat;
@@ -21,6 +22,10 @@
import java.util.ArrayList;
import java.util.List;
+import javax.xml.bind.JAXBContext;
+import javax.xml.bind.JAXBException;
+import javax.xml.bind.Unmarshaller;
+
import edu.uci.ics.asterix.event.driver.EventDriver;
import edu.uci.ics.asterix.event.management.ValueType.Type;
import edu.uci.ics.asterix.event.schema.cluster.Cluster;
@@ -41,6 +46,24 @@
private static final String SCRIPT = "SCRIPT";
private static final String ARGS = "ARGS";
private static final String EXECUTE_SCRIPT = "events/execute.sh";
+ private static final String LOCALHOST = "localhost";
+ private static final String LOCALHOST_IP = "127.0.0.1";
+
+ public static Cluster getCluster(String clusterConfigurationPath) throws JAXBException {
+ File file = new File(clusterConfigurationPath);
+ JAXBContext ctx = JAXBContext.newInstance(Cluster.class);
+ Unmarshaller unmarshaller = ctx.createUnmarshaller();
+ Cluster cluster = (Cluster) unmarshaller.unmarshal(file);
+ if (cluster.getMasterNode().getClusterIp().equals(LOCALHOST)) {
+ cluster.getMasterNode().setClusterIp(LOCALHOST_IP);
+ }
+ for (Node node : cluster.getNode()) {
+ if (node.getClusterIp().equals(LOCALHOST)) {
+ node.setClusterIp(LOCALHOST_IP);
+ }
+ }
+ return cluster;
+ }
public static long parseTimeInterval(ValueType v, String unit) throws IllegalArgumentException {
int val = 0;
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/BackupCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/BackupCommand.java
index 297a168..261de22 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/BackupCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/BackupCommand.java
@@ -57,12 +57,11 @@
@Override
protected String getUsageDescription() {
- return "\nIn an undesirable event of data loss either due to a disk/system"
- + "\nfailure or accidental execution of a DDL statement (drop dataverse/dataset),"
- + "\nyou may need to recover the lost data. The backup command allows you to take a"
+ return "\nThe backup command allows you to take a"
+ "\nbackup of the data stored with an ASTERIX instance. "
- + "\nThe backed up snapshot is stored in HDFS." + "\n\nAvailable arguments/options:"
- + "\n-n name of the Asterix instance";
+ + "\nThe backed up snapshot is stored either in HDFS or on the local file system of each node in the ASTERIX cluster."
+ + "\nThe target location of backup can be configured in $MANAGIX_HOME/conf/managix-conf.xml"
+ + "\n\nAvailable arguments/options:" + "\n-n name of the Asterix instance";
}
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/ConfigureCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/ConfigureCommand.java
index ff85d03..7f12f01 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/ConfigureCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/ConfigureCommand.java
@@ -7,6 +7,7 @@
import javax.xml.bind.Marshaller;
import javax.xml.bind.Unmarshaller;
+import edu.uci.ics.asterix.event.management.EventUtil;
import edu.uci.ics.asterix.event.schema.cluster.Cluster;
import edu.uci.ics.asterix.event.schema.cluster.WorkingDir;
import edu.uci.ics.asterix.installer.driver.InstallerDriver;
@@ -19,10 +20,7 @@
String localClusterPath = InstallerDriver.getManagixHome() + File.separator + "clusters" + File.separator
+ "local" + File.separator + "local.xml";
- JAXBContext ctx = JAXBContext.newInstance(Cluster.class);
- Unmarshaller unmarshaller = ctx.createUnmarshaller();
- Cluster cluster = (Cluster) unmarshaller.unmarshal(new File(localClusterPath));
-
+ Cluster cluster = EventUtil.getCluster(localClusterPath);
String workingDir = InstallerDriver.getManagixHome() + File.separator + "clusters" + File.separator + "local"
+ File.separator + "working_dir";
cluster.setWorkingDir(new WorkingDir(workingDir, true));
@@ -32,13 +30,14 @@
cluster.setJavaHome(System.getProperty("java.home"));
cluster.setJavaOpts("-Xmx1024m");
+ JAXBContext ctx = JAXBContext.newInstance(Cluster.class);
Marshaller marshaller = ctx.createMarshaller();
marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);
marshaller.marshal(cluster, new FileOutputStream(localClusterPath));
String installerConfPath = InstallerDriver.getManagixHome() + File.separator + InstallerDriver.MANAGIX_CONF_XML;
ctx = JAXBContext.newInstance(Configuration.class);
- unmarshaller = ctx.createUnmarshaller();
+ Unmarshaller unmarshaller = ctx.createUnmarshaller();
Configuration configuration = (Configuration) unmarshaller.unmarshal(new File(installerConfPath));
configuration.getBackup().setBackupDir(workingDir + File.separator + "backup");
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/CreateCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/CreateCommand.java
index 301134e..81d6481 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/CreateCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/CreateCommand.java
@@ -23,6 +23,8 @@
import org.kohsuke.args4j.Option;
+import edu.uci.ics.asterix.event.management.EventUtil;
+import edu.uci.ics.asterix.event.management.EventrixClient;
import edu.uci.ics.asterix.event.schema.cluster.Cluster;
import edu.uci.ics.asterix.event.schema.cluster.Env;
import edu.uci.ics.asterix.event.schema.cluster.Property;
@@ -51,9 +53,7 @@
asterixInstanceName = ((CreateConfig) config).name;
InstallerUtil.validateAsterixInstanceNotExists(asterixInstanceName);
CreateConfig createConfig = (CreateConfig) config;
- JAXBContext ctx = JAXBContext.newInstance(Cluster.class);
- Unmarshaller unmarshaller = ctx.createUnmarshaller();
- cluster = (Cluster) unmarshaller.unmarshal(new File(createConfig.clusterPath));
+ cluster = EventUtil.getCluster(createConfig.clusterPath);
AsterixInstance asterixInstance = InstallerUtil.createAsterixInstance(asterixInstanceName, cluster);
InstallerUtil.evaluateConflictWithOtherInstances(asterixInstance);
InstallerUtil.createAsterixZip(asterixInstance, true);
@@ -72,9 +72,14 @@
clusterProperties.add(new Property("WORKING_DIR", cluster.getWorkingDir().getDir()));
cluster.setEnv(new Env(clusterProperties));
+ EventrixClient eventrixClient = InstallerUtil.getEventrixClient(cluster);
PatternCreator pc = new PatternCreator();
+
+ Patterns asterixBinarytrasnferPattern = pc.getAsterixBinaryTransferPattern(asterixInstanceName, cluster);
+ eventrixClient.submit(asterixBinarytrasnferPattern);
+
Patterns patterns = pc.getStartAsterixPattern(asterixInstanceName, cluster);
- InstallerUtil.getEventrixClient(cluster).submit(patterns);
+ eventrixClient.submit(patterns);
AsterixRuntimeState runtimeState = VerificationUtil.getAsterixRuntimeState(asterixInstance);
VerificationUtil.updateInstanceWithRuntimeDescription(asterixInstance, runtimeState, true);
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/StartCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/StartCommand.java
index 38bcf33..01fdda4 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/StartCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/StartCommand.java
@@ -18,6 +18,7 @@
import org.kohsuke.args4j.Option;
+import edu.uci.ics.asterix.event.management.EventrixClient;
import edu.uci.ics.asterix.event.schema.pattern.Patterns;
import edu.uci.ics.asterix.installer.driver.InstallerDriver;
import edu.uci.ics.asterix.installer.driver.InstallerUtil;
@@ -37,8 +38,13 @@
AsterixInstance instance = InstallerUtil.validateAsterixInstanceExists(asterixInstanceName, State.INACTIVE);
InstallerUtil.createAsterixZip(instance, false);
PatternCreator pc = new PatternCreator();
+ EventrixClient client = InstallerUtil.getEventrixClient(instance.getCluster());
+ Patterns asterixBinaryTransferPattern = pc.getAsterixBinaryTransferPattern(asterixInstanceName,
+ instance.getCluster());
+ client.submit(asterixBinaryTransferPattern);
+
Patterns patterns = pc.getStartAsterixPattern(asterixInstanceName, instance.getCluster());
- InstallerUtil.getEventrixClient(instance.getCluster()).submit(patterns);
+ client.submit(patterns);
InstallerUtil.deleteDirectory(InstallerDriver.getManagixHome() + File.separator + InstallerDriver.ASTERIX_DIR
+ File.separator + asterixInstanceName);
AsterixRuntimeState runtimeState = VerificationUtil.getAsterixRuntimeState(instance);
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/ValidateCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/ValidateCommand.java
index 7c803f6..dcc63df 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/ValidateCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/ValidateCommand.java
@@ -26,10 +26,12 @@
import org.kohsuke.args4j.Option;
+import edu.uci.ics.asterix.event.management.EventUtil;
import edu.uci.ics.asterix.event.schema.cluster.Cluster;
import edu.uci.ics.asterix.event.schema.cluster.MasterNode;
import edu.uci.ics.asterix.event.schema.cluster.Node;
import edu.uci.ics.asterix.installer.driver.InstallerDriver;
+import edu.uci.ics.asterix.installer.driver.InstallerUtil;
import edu.uci.ics.asterix.installer.schema.conf.Configuration;
import edu.uci.ics.asterix.installer.schema.conf.Zookeeper;
@@ -46,7 +48,7 @@
if (((ValidateConfig) config).cluster != null) {
logValidationResult("Cluster configuration", validateCluster(vConfig.cluster));
} else {
- logValidationResult("Installer Configuration", validateConfiguration());
+ logValidationResult("Managix Configuration", validateConfiguration());
}
}
@@ -86,13 +88,12 @@
boolean valid = true;
Cluster cluster = null;
File f = new File(clusterPath);
+ List<String> ipAddresses = new ArrayList<String>();
if (!f.exists() || !f.isFile()) {
LOGGER.error(" Invalid path " + f.getAbsolutePath() + ERROR);
valid = false;
} else {
- JAXBContext ctx = JAXBContext.newInstance(Cluster.class);
- Unmarshaller unmarshaller = ctx.createUnmarshaller();
- cluster = (Cluster) unmarshaller.unmarshal(new File(clusterPath));
+ cluster = EventUtil.getCluster(clusterPath);
validateClusterProperties(cluster);
Set<String> servers = new HashSet<String>();
@@ -103,6 +104,7 @@
MasterNode masterNode = cluster.getMasterNode();
Node master = new Node(masterNode.getId(), masterNode.getClusterIp(), masterNode.getJavaOpts(),
masterNode.getJavaHome(), masterNode.getLogdir(), null, null, null);
+ ipAddresses.add(masterNode.getClusterIp());
valid = valid & validateNodeConfiguration(master, cluster);
@@ -113,13 +115,44 @@
LOGGER.error("Duplicate node id :" + node.getId() + ERROR);
} else {
valid = valid & validateNodeConfiguration(node, cluster);
+ if (!ipAddresses.contains(node.getClusterIp())) {
+ ipAddresses.add(node.getClusterIp());
+ }
}
}
}
+ if (valid) {
+ String username = cluster.getUsername();
+ if (username == null) {
+ username = System.getProperty("user.name");
+ }
+ valid = checkPasswordLessSSHLogin(username, ipAddresses);
+ }
return valid;
}
+ private boolean checkPasswordLessSSHLogin(String username, List<String> ipAddresses) throws Exception {
+ String script = InstallerDriver.getManagixHome() + File.separator + InstallerDriver.MANAGIX_INTERNAL_DIR
+ + File.separator + "scripts" + File.separator + "validate_ssh.sh";
+ List<String> args = ipAddresses;
+ args.add(0, username);
+ String output = InstallerUtil.executeLocalScript(script, args);
+ ipAddresses.remove(0);
+ for (String line : output.split("\n")) {
+ ipAddresses.remove(line);
+ }
+ if (ipAddresses.size() > 0) {
+ LOGGER.error(" Password-less SSH (from user account: " + username + " )"
+ + " not configured for the following hosts");
+ for (String failedIp : ipAddresses) {
+ System.out.println(failedIp);
+ }
+ return false;
+ }
+ return true;
+ }
+
private void validateClusterProperties(Cluster cluster) {
List<String> tempDirs = new ArrayList<String>();
if (cluster.getLogdir() != null && checkTemporaryPath(cluster.getLogdir())) {
@@ -177,8 +210,7 @@
}
private boolean checkTemporaryPath(String logdir) {
- return logdir.startsWith("/tmp/");
-
+ return logdir.startsWith(System.getProperty("java.io.tmpdir"));
}
public boolean validateConfiguration() throws Exception {
@@ -201,14 +233,23 @@
LOGGER.warn("Zookeeper home dir is subject to be cleaned up by OS" + WARNING);
}
- if (zk.getServers().getServer().isEmpty()) {
+ if (zk.getServers().getServer() == null || zk.getServers().getServer().isEmpty()) {
valid = false;
LOGGER.fatal("Zookeeper servers not configured" + ERROR);
}
- boolean validEnsemble = true;
+ if (zk.getServers().getJavaHome() == null || zk.getServers().getJavaHome().length() == 0) {
+ valid = false;
+ LOGGER.fatal("Java home not set for Zookeeper server in " + InstallerDriver.getManagixHome()
+ + File.separator + InstallerDriver.MANAGIX_CONF_XML);
+ }
+
for (String server : zk.getServers().getServer()) {
- validEnsemble = validEnsemble && checkNodeReachability(server);
+ valid = valid && checkNodeReachability(server);
+ }
+
+ if (valid) {
+ valid = valid & checkPasswordLessSSHLogin(System.getProperty("user.name"), zk.getServers().getServer());
}
return valid;
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/events/PatternCreator.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/events/PatternCreator.java
index 2210ba1..0068c08 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/events/PatternCreator.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/events/PatternCreator.java
@@ -51,27 +51,37 @@
p.setDelay(d);
}
- public Patterns getStartAsterixPattern(String asterixInstanceName, Cluster cluster) throws Exception {
- String ccLocationId = cluster.getMasterNode().getId();
+ public Patterns getAsterixBinaryTransferPattern(String asterixInstanceName, Cluster cluster) throws Exception {
String ccLocationIp = cluster.getMasterNode().getClusterIp();
-
String destDir = cluster.getWorkingDir().getDir() + File.separator + "asterix";
List<Pattern> ps = new ArrayList<Pattern>();
Pattern copyHyracks = createCopyHyracksPattern(asterixInstanceName, cluster, ccLocationIp, destDir);
ps.add(copyHyracks);
- Pattern createCC = createCCStartPattern(ccLocationId);
- addInitialDelay(createCC, 3, "sec");
- ps.add(createCC);
-
boolean copyHyracksToNC = !cluster.getWorkingDir().isNFS();
+
for (Node node : cluster.getNode()) {
if (copyHyracksToNC) {
Pattern copyHyracksForNC = createCopyHyracksPattern(asterixInstanceName, cluster, node.getClusterIp(),
destDir);
ps.add(copyHyracksForNC);
}
+ }
+ ps.addAll(createHadoopLibraryTransferPattern(cluster).getPattern());
+ Patterns patterns = new Patterns(ps);
+ return patterns;
+ }
+
+ public Patterns getStartAsterixPattern(String asterixInstanceName, Cluster cluster) throws Exception {
+ String ccLocationId = cluster.getMasterNode().getId();
+ List<Pattern> ps = new ArrayList<Pattern>();
+
+ Pattern createCC = createCCStartPattern(ccLocationId);
+ addInitialDelay(createCC, 3, "sec");
+ ps.add(createCC);
+
+ for (Node node : cluster.getNode()) {
String iodevices = node.getIodevices() == null ? cluster.getIodevices() : node.getIodevices();
Pattern createNC = createNCStartPattern(cluster.getMasterNode().getClusterIp(), node.getId(),
asterixInstanceName + "_" + node.getId(), iodevices);
@@ -80,7 +90,6 @@
}
Patterns patterns = new Patterns(ps);
- patterns.getPattern().addAll(createHadoopLibraryTransferPattern(cluster).getPattern());
return patterns;
}
@@ -296,6 +305,26 @@
}
patternList.addAll(createRemoveAsterixLogDirPattern(instance).getPattern());
patternList.addAll(createRemoveAsterixRootMetadata(instance).getPattern());
+ patternList.addAll(createRemoveAsterixTxnLogs(instance).getPattern());
+ Patterns patterns = new Patterns(patternList);
+ return patterns;
+ }
+
+ private Patterns createRemoveAsterixTxnLogs(AsterixInstance instance) throws Exception {
+ List<Pattern> patternList = new ArrayList<Pattern>();
+ Cluster cluster = instance.getCluster();
+ Nodeid nodeid = null;
+ String pargs = null;
+ Event event = null;
+ for (Node node : cluster.getNode()) {
+ String iodevices = node.getIodevices() == null ? cluster.getIodevices() : node.getIodevices();
+ String primaryIODevice = iodevices.split(",")[0].trim();
+ pargs = primaryIODevice + File.separator + InstallerUtil.TXN_LOG_DIR;
+ nodeid = new Nodeid(new Value(null, node.getId()));
+ event = new Event("file_delete", nodeid, pargs);
+ patternList.add(new Pattern(null, 1, null, event));
+ }
+
Patterns patterns = new Patterns(patternList);
return patterns;
}
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/model/AsterixInstance.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/model/AsterixInstance.java
index 9f8b28d..9a2d43d 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/model/AsterixInstance.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/model/AsterixInstance.java
@@ -25,159 +25,158 @@
public class AsterixInstance implements Serializable {
- private static final long serialVersionUID = 2874439550187520449L;
+ private static final long serialVersionUID = 2874439550187520449L;
- public enum State {
- ACTIVE, INACTIVE, UNUSABLE
- }
+ public enum State {
+ ACTIVE,
+ INACTIVE,
+ UNUSABLE
+ }
- private final Cluster cluster;
- private final String name;
- private final Date createdTimestamp;
- private Date stateChangeTimestamp;
- private Date modifiedTimestamp;
- private Properties configuration;
- private State state;
- private final String metadataNodeId;
- private final String asterixVersion;
- private final List<BackupInfo> backupInfo;
- private final String webInterfaceUrl;
- private AsterixRuntimeState runtimeState;
- private State previousState;
+ private final Cluster cluster;
+ private final String name;
+ private final Date createdTimestamp;
+ private Date stateChangeTimestamp;
+ private Date modifiedTimestamp;
+ private Properties configuration;
+ private State state;
+ private final String metadataNodeId;
+ private final String asterixVersion;
+ private final List<BackupInfo> backupInfo;
+ private final String webInterfaceUrl;
+ private AsterixRuntimeState runtimeState;
+ private State previousState;
- public AsterixInstance(String name, Cluster cluster,
- Properties configuration, String metadataNodeId,
- String asterixVersion) {
- this.name = name;
- this.cluster = cluster;
- this.configuration = configuration;
- this.metadataNodeId = metadataNodeId;
- this.state = State.ACTIVE;
- this.previousState = State.UNUSABLE;
- this.asterixVersion = asterixVersion;
- this.createdTimestamp = new Date();
- this.backupInfo = new ArrayList<BackupInfo>();
- this.webInterfaceUrl = "http://"
- + cluster.getMasterNode().getClusterIp() + ":" + 19001;
- }
+ public AsterixInstance(String name, Cluster cluster, Properties configuration, String metadataNodeId,
+ String asterixVersion) {
+ this.name = name;
+ this.cluster = cluster;
+ this.configuration = configuration;
+ this.metadataNodeId = metadataNodeId;
+ this.state = State.ACTIVE;
+ this.previousState = State.UNUSABLE;
+ this.asterixVersion = asterixVersion;
+ this.createdTimestamp = new Date();
+ this.backupInfo = new ArrayList<BackupInfo>();
+ this.webInterfaceUrl = "http://" + cluster.getMasterNode().getClientIp() + ":" + 19001;
+ }
- public Date getModifiedTimestamp() {
- return stateChangeTimestamp;
- }
+ public Date getModifiedTimestamp() {
+ return stateChangeTimestamp;
+ }
- public Properties getConfiguration() {
- return configuration;
- }
+ public Properties getConfiguration() {
+ return configuration;
+ }
- public void setConfiguration(Properties properties) {
- this.configuration = properties;
- }
+ public void setConfiguration(Properties properties) {
+ this.configuration = properties;
+ }
- public State getState() {
- return state;
- }
+ public State getState() {
+ return state;
+ }
- public void setState(State state) {
- this.previousState = this.state;
- this.state = state;
- }
+ public void setState(State state) {
+ this.previousState = this.state;
+ this.state = state;
+ }
- public Cluster getCluster() {
- return cluster;
- }
+ public Cluster getCluster() {
+ return cluster;
+ }
- public String getName() {
- return name;
- }
+ public String getName() {
+ return name;
+ }
- public Date getCreatedTimestamp() {
- return createdTimestamp;
- }
+ public Date getCreatedTimestamp() {
+ return createdTimestamp;
+ }
- public Date getStateChangeTimestamp() {
- return stateChangeTimestamp;
- }
+ public Date getStateChangeTimestamp() {
+ return stateChangeTimestamp;
+ }
- public void setStateChangeTimestamp(Date stateChangeTimestamp) {
- this.stateChangeTimestamp = stateChangeTimestamp;
- }
+ public void setStateChangeTimestamp(Date stateChangeTimestamp) {
+ this.stateChangeTimestamp = stateChangeTimestamp;
+ }
- public void setModifiedTimestamp(Date modifiedTimestamp) {
- this.modifiedTimestamp = modifiedTimestamp;
- }
+ public void setModifiedTimestamp(Date modifiedTimestamp) {
+ this.modifiedTimestamp = modifiedTimestamp;
+ }
- public String getMetadataNodeId() {
- return metadataNodeId;
- }
+ public String getMetadataNodeId() {
+ return metadataNodeId;
+ }
- public String getAsterixVersion() {
- return asterixVersion;
- }
+ public String getAsterixVersion() {
+ return asterixVersion;
+ }
- public String getDescription(boolean detailed) {
- StringBuffer buffer = new StringBuffer();
- buffer.append("Name:" + name + "\n");
- buffer.append("Created:" + createdTimestamp + "\n");
- buffer.append("Web-Url:" + webInterfaceUrl + "\n");
- buffer.append("State:" + state);
- if (!state.equals(State.UNUSABLE) && stateChangeTimestamp != null) {
- buffer.append(" (" + stateChangeTimestamp + ")" + "\n");
- } else {
- buffer.append("\n");
- }
- if (modifiedTimestamp != null) {
- buffer.append("Last modified timestamp:" + modifiedTimestamp + "\n");
- }
+ public String getDescription(boolean detailed) {
+ StringBuffer buffer = new StringBuffer();
+ buffer.append("Name:" + name + "\n");
+ buffer.append("Created:" + createdTimestamp + "\n");
+ buffer.append("Web-Url:" + webInterfaceUrl + "\n");
+ buffer.append("State:" + state);
+ if (!state.equals(State.UNUSABLE) && stateChangeTimestamp != null) {
+ buffer.append(" (" + stateChangeTimestamp + ")" + "\n");
+ } else {
+ buffer.append("\n");
+ }
+ if (modifiedTimestamp != null) {
+ buffer.append("Last modified timestamp:" + modifiedTimestamp + "\n");
+ }
- if (runtimeState.getSummary() != null
- && runtimeState.getSummary().length() > 0) {
- buffer.append("\nWARNING!:" + runtimeState.getSummary() + "\n");
- }
- if (detailed) {
- addDetailedInformation(buffer);
- }
- return buffer.toString();
- }
+ if (runtimeState.getSummary() != null && runtimeState.getSummary().length() > 0) {
+ buffer.append("\nWARNING!:" + runtimeState.getSummary() + "\n");
+ }
+ if (detailed) {
+ addDetailedInformation(buffer);
+ }
+ return buffer.toString();
+ }
- public List<BackupInfo> getBackupInfo() {
- return backupInfo;
- }
+ public List<BackupInfo> getBackupInfo() {
+ return backupInfo;
+ }
- public String getWebInterfaceUrl() {
- return webInterfaceUrl;
- }
+ public String getWebInterfaceUrl() {
+ return webInterfaceUrl;
+ }
- public AsterixRuntimeState getAsterixRuntimeState() {
- return runtimeState;
- }
+ public AsterixRuntimeState getAsterixRuntimeState() {
+ return runtimeState;
+ }
- public void setAsterixRuntimeStates(AsterixRuntimeState runtimeState) {
- this.runtimeState = runtimeState;
- }
+ public void setAsterixRuntimeStates(AsterixRuntimeState runtimeState) {
+ this.runtimeState = runtimeState;
+ }
- private void addDetailedInformation(StringBuffer buffer) {
- buffer.append("Master node:" + cluster.getMasterNode().getId() + ":"
- + cluster.getMasterNode().getClusterIp() + "\n");
- for (Node node : cluster.getNode()) {
- buffer.append(node.getId() + ":" + node.getClusterIp() + "\n");
- }
+ private void addDetailedInformation(StringBuffer buffer) {
+ buffer.append("Master node:" + cluster.getMasterNode().getId() + ":" + cluster.getMasterNode().getClusterIp()
+ + "\n");
+ for (Node node : cluster.getNode()) {
+ buffer.append(node.getId() + ":" + node.getClusterIp() + "\n");
+ }
- if (backupInfo != null && backupInfo.size() > 0) {
- for (BackupInfo info : backupInfo) {
- buffer.append(info + "\n");
- }
- }
- buffer.append("\n");
- buffer.append("Asterix version:" + asterixVersion + "\n");
- buffer.append("Metadata Node:" + metadataNodeId + "\n");
- buffer.append("Processes" + "\n");
- for (ProcessInfo pInfo : runtimeState.getProcesses()) {
- buffer.append(pInfo + "\n");
- }
+ if (backupInfo != null && backupInfo.size() > 0) {
+ for (BackupInfo info : backupInfo) {
+ buffer.append(info + "\n");
+ }
+ }
+ buffer.append("\n");
+ buffer.append("Asterix version:" + asterixVersion + "\n");
+ buffer.append("Metadata Node:" + metadataNodeId + "\n");
+ buffer.append("Processes" + "\n");
+ for (ProcessInfo pInfo : runtimeState.getProcesses()) {
+ buffer.append(pInfo + "\n");
+ }
- }
+ }
- public State getPreviousState() {
- return previousState;
- }
+ public State getPreviousState() {
+ return previousState;
+ }
}
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/service/ZooKeeperService.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/service/ZooKeeperService.java
index 81adee0..f09e4b6 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/service/ZooKeeperService.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/service/ZooKeeperService.java
@@ -107,9 +107,13 @@
zk = new ZooKeeper(zkConnectionString, ZOOKEEPER_SESSION_TIME_OUT, watcher);
String head = msgQ.poll(10, TimeUnit.SECONDS);
if (head == null) {
- String msg = "Unable to start Zookeeper Service. Please verify the configuration at "
- + InstallerDriver.getManagixHome() + File.separator + InstallerDriver.MANAGIX_CONF_XML;
- throw new Exception(msg);
+ StringBuilder msg = new StringBuilder(
+ "Unable to start Zookeeper Service. This could be because of the following reasons.\n");
+ msg.append("1) Managix is incorrectly configured. Please run " + InstallerDriver.getManagixHome()
+ + "/bin/managix validate" + " to run a validation test and correct the errors reported.");
+ msg.append("\n2) If validation in (1) is successful, ensure that java_home parameter is set correctly in Managix configuration ("
+ + InstallerDriver.getManagixHome() + File.separator + InstallerDriver.MANAGIX_CONF_XML + ")");
+ throw new Exception(msg.toString());
}
msgQ.take();
createRootIfNotExist();
diff --git a/asterix-installer/src/main/resources/scripts/managix b/asterix-installer/src/main/resources/scripts/managix
index cd0794e..3d2ac10 100755
--- a/asterix-installer/src/main/resources/scripts/managix
+++ b/asterix-installer/src/main/resources/scripts/managix
@@ -4,7 +4,6 @@
exit 1
fi
-VERSION=0.0.4-SNAPSHOT
for jar in `ls $MANAGIX_HOME/lib/*.jar`
do
diff --git a/asterix-installer/src/main/resources/scripts/validate_ssh.sh b/asterix-installer/src/main/resources/scripts/validate_ssh.sh
new file mode 100755
index 0000000..de6d79c
--- /dev/null
+++ b/asterix-installer/src/main/resources/scripts/validate_ssh.sh
@@ -0,0 +1,9 @@
+USERNAME=$1
+shift 1
+numargs=$#
+for ((i=1 ; i <= numargs ; i=i+1))
+do
+ host=$1
+ ssh -l $USERNAME -oNumberOfPasswordPrompts=0 $host "echo $host"
+ shift 1
+done
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ARectanglePrinter.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ARectanglePrinter.java
index 0618f28..f83118b 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ARectanglePrinter.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ARectanglePrinter.java
@@ -17,9 +17,14 @@
@Override
public void print(byte[] b, int s, int l, PrintStream ps) throws AlgebricksException {
- ps.print("rectangle(\"" + ADoubleSerializerDeserializer.getDouble(b, s + 1) + ",");
+ ps.print("rectangle(\"");
+ ps.print(ADoubleSerializerDeserializer.getDouble(b, s + 1));
+ ps.print(",");
ps.print(ADoubleSerializerDeserializer.getDouble(b, s + 9));
- ps.print(" | " + ADoubleSerializerDeserializer.getDouble(b, s + 17) + ",");
- ps.print(ADoubleSerializerDeserializer.getDouble(b, s + 25) + "\")");
+ ps.print(" ");
+ ps.print(ADoubleSerializerDeserializer.getDouble(b, s + 17));
+ ps.print(",");
+ ps.print(ADoubleSerializerDeserializer.getDouble(b, s + 25));
+ ps.print("\")");
}
}
\ No newline at end of file
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/AObjectSerializerDeserializer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/AObjectSerializerDeserializer.java
index b5b7303..2e4f72c 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/AObjectSerializerDeserializer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/AObjectSerializerDeserializer.java
@@ -23,6 +23,7 @@
import edu.uci.ics.asterix.om.base.APoint3D;
import edu.uci.ics.asterix.om.base.APolygon;
import edu.uci.ics.asterix.om.base.ARecord;
+import edu.uci.ics.asterix.om.base.ARectangle;
import edu.uci.ics.asterix.om.base.AString;
import edu.uci.ics.asterix.om.base.ATime;
import edu.uci.ics.asterix.om.base.AUnorderedList;
@@ -97,6 +98,9 @@
case LINE: {
return ALineSerializerDeserializer.INSTANCE.deserialize(in);
}
+ case RECTANGLE: {
+ return ARectangleSerializerDeserializer.INSTANCE.deserialize(in);
+ }
case POLYGON: {
return APolygonSerializerDeserializer.INSTANCE.deserialize(in);
}
@@ -199,6 +203,10 @@
ALineSerializerDeserializer.INSTANCE.serialize((ALine) instance, out);
break;
}
+ case RECTANGLE: {
+ ARectangleSerializerDeserializer.INSTANCE.serialize((ARectangle) instance, out);
+ break;
+ }
case POLYGON: {
APolygonSerializerDeserializer.INSTANCE.serialize((APolygon) instance, out);
break;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixBuiltinFunctions.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixBuiltinFunctions.java
index 24649d9..fa3179a 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixBuiltinFunctions.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixBuiltinFunctions.java
@@ -61,6 +61,7 @@
import edu.uci.ics.asterix.om.typecomputer.impl.OptionalAPolygonTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.OptionalARectangleTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.OptionalAStringTypeComputer;
+import edu.uci.ics.asterix.om.typecomputer.impl.OptionalATemporalInstanceTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.OptionalATimeTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.OrderedListConstructorResultType;
import edu.uci.ics.asterix.om.typecomputer.impl.OrderedListOfAInt32TypeComputer;
@@ -459,6 +460,10 @@
"second", 1);
public static final FunctionIdentifier ACCESSOR_TEMPORAL_MILLISEC = new FunctionIdentifier(
FunctionConstants.ASTERIX_NS, "millisecond", 1);
+ public static final FunctionIdentifier ACCESSOR_TEMPORAL_INTERVAL_START = new FunctionIdentifier(
+ FunctionConstants.ASTERIX_NS, "get-interval-start", 1);
+ public static final FunctionIdentifier ACCESSOR_TEMPORAL_INTERVAL_END = new FunctionIdentifier(
+ FunctionConstants.ASTERIX_NS, "get-interval-end", 1);
// Temporal functions
public static final FunctionIdentifier DATE_FROM_UNIX_TIME_IN_DAYS = new FunctionIdentifier(
@@ -774,6 +779,8 @@
add(ACCESSOR_TEMPORAL_MIN, OptionalAInt32TypeComputer.INSTANCE);
add(ACCESSOR_TEMPORAL_SEC, OptionalAInt32TypeComputer.INSTANCE);
add(ACCESSOR_TEMPORAL_MILLISEC, OptionalAInt32TypeComputer.INSTANCE);
+ add(ACCESSOR_TEMPORAL_INTERVAL_START, OptionalATemporalInstanceTypeComputer.INSTANCE);
+ add(ACCESSOR_TEMPORAL_INTERVAL_END, OptionalATemporalInstanceTypeComputer.INSTANCE);
// temporal functions
add(DATE_FROM_UNIX_TIME_IN_DAYS, OptionalADateTypeComputer.INSTANCE);
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/OptionalATemporalInstanceTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/OptionalATemporalInstanceTypeComputer.java
new file mode 100644
index 0000000..eda385f
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/OptionalATemporalInstanceTypeComputer.java
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.om.typecomputer.impl;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
+import edu.uci.ics.asterix.om.types.AUnionType;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
+
+public class OptionalATemporalInstanceTypeComputer implements IResultTypeComputer {
+
+ public static final OptionalATemporalInstanceTypeComputer INSTANCE = new OptionalATemporalInstanceTypeComputer();
+
+ private OptionalATemporalInstanceTypeComputer() {
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer#computeType(edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression, edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment, edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider)
+ */
+ @Override
+ public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env,
+ IMetadataProvider<?, ?> metadataProvider) throws AlgebricksException {
+ List<IAType> unionList = new ArrayList<IAType>();
+ unionList.add(BuiltinType.ANULL);
+ unionList.add(BuiltinType.ADATE);
+ unionList.add(BuiltinType.ATIME);
+ unionList.add(BuiltinType.ADATETIME);
+ return new AUnionType(unionList, "OptionalTemporalInstance");
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalDayAccessor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalDayAccessor.java
index 05391de..6b7a9d2 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalDayAccessor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalDayAccessor.java
@@ -17,7 +17,6 @@
import java.io.DataOutput;
import java.io.IOException;
-import edu.uci.ics.asterix.common.functions.FunctionConstants;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
@@ -26,6 +25,7 @@
import edu.uci.ics.asterix.om.base.AMutableInt32;
import edu.uci.ics.asterix.om.base.ANull;
import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
import edu.uci.ics.asterix.om.types.ATypeTag;
@@ -44,7 +44,7 @@
private static final long serialVersionUID = 1L;
- private static final FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "day", 1);
+ private static final FunctionIdentifier FID = AsterixBuiltinFunctions.ACCESSOR_TEMPORAL_DAY;
// allowed input types
private static final byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalHourAccessor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalHourAccessor.java
index eba012f..38e83db 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalHourAccessor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalHourAccessor.java
@@ -17,7 +17,6 @@
import java.io.DataOutput;
import java.io.IOException;
-import edu.uci.ics.asterix.common.functions.FunctionConstants;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
@@ -26,6 +25,7 @@
import edu.uci.ics.asterix.om.base.AMutableInt32;
import edu.uci.ics.asterix.om.base.ANull;
import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
import edu.uci.ics.asterix.om.types.ATypeTag;
@@ -44,7 +44,7 @@
private static final long serialVersionUID = 1L;
- private static final FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "hour", 1);
+ private static final FunctionIdentifier FID = AsterixBuiltinFunctions.ACCESSOR_TEMPORAL_HOUR;
// allowed input types
private static final byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalIntervalEndAccessor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalIntervalEndAccessor.java
new file mode 100644
index 0000000..e8ea6d5
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalIntervalEndAccessor.java
@@ -0,0 +1,142 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.accessors;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AIntervalSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ADate;
+import edu.uci.ics.asterix.om.base.ADateTime;
+import edu.uci.ics.asterix.om.base.AMutableDate;
+import edu.uci.ics.asterix.om.base.AMutableDateTime;
+import edu.uci.ics.asterix.om.base.AMutableTime;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.ATime;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.EnumDeserializer;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class TemporalIntervalEndAccessor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private static final long serialVersionUID = 1L;
+
+ private static final FunctionIdentifier FID = AsterixBuiltinFunctions.ACCESSOR_TEMPORAL_INTERVAL_END;
+
+ private static final byte SER_INTERVAL_TYPE_TAG = ATypeTag.INTERVAL.serialize();
+ private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+ private static final byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
+ private static final byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
+ private static final byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
+
+ public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new TemporalIntervalEndAccessor();
+ }
+ };
+
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ return new ICopyEvaluatorFactory() {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+
+ private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
+
+ private ICopyEvaluator eval = args[0].createEvaluator(argOut);
+
+ // possible output
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ADate> dateSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ADATE);
+ private AMutableDate aDate = new AMutableDate(0);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ADateTime> datetimeSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ADATETIME);
+ private AMutableDateTime aDateTime = new AMutableDateTime(0);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ATime> timeSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ATIME);
+ private AMutableTime aTime = new AMutableTime(0);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ argOut.reset();
+ eval.evaluate(tuple);
+ byte[] bytes = argOut.getByteArray();
+
+ try {
+ if (bytes[0] == SER_NULL_TYPE_TAG) {
+ nullSerde.serialize(ANull.NULL, out);
+ return;
+ } else if (bytes[0] == SER_INTERVAL_TYPE_TAG) {
+ byte timeType = AIntervalSerializerDeserializer.getIntervalTimeType(bytes, 1);
+ long endTime = AIntervalSerializerDeserializer.getIntervalEnd(bytes, 1);
+ if (timeType == SER_DATE_TYPE_TAG) {
+ aDate.setValue((int) (endTime));
+ dateSerde.serialize(aDate, out);
+ } else if (timeType == SER_TIME_TYPE_TAG) {
+ aTime.setValue((int) (endTime));
+ timeSerde.serialize(aTime, out);
+ } else if (timeType == SER_DATETIME_TYPE_TAG) {
+ aDateTime.setValue(endTime);
+ datetimeSerde.serialize(aDateTime, out);
+ }
+ } else {
+ throw new AlgebricksException(FID.getName() + ": expects NULL/INTERVAL, but got "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(bytes[0]));
+ }
+ } catch (IOException e) {
+ throw new AlgebricksException(e);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.AbstractFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalIntervalStartAccessor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalIntervalStartAccessor.java
new file mode 100644
index 0000000..ef76934
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalIntervalStartAccessor.java
@@ -0,0 +1,142 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.evaluators.accessors;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AIntervalSerializerDeserializer;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ADate;
+import edu.uci.ics.asterix.om.base.ADateTime;
+import edu.uci.ics.asterix.om.base.AMutableDate;
+import edu.uci.ics.asterix.om.base.AMutableDateTime;
+import edu.uci.ics.asterix.om.base.AMutableTime;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.ATime;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.EnumDeserializer;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+public class TemporalIntervalStartAccessor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private static final long serialVersionUID = 1L;
+
+ private static final FunctionIdentifier FID = AsterixBuiltinFunctions.ACCESSOR_TEMPORAL_INTERVAL_START;
+
+ private static final byte SER_INTERVAL_TYPE_TAG = ATypeTag.INTERVAL.serialize();
+ private static final byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+ private static final byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
+ private static final byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
+ private static final byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
+
+ public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+
+ @Override
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new TemporalIntervalStartAccessor();
+ }
+ };
+
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) throws AlgebricksException {
+ return new ICopyEvaluatorFactory() {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+
+ private DataOutput out = output.getDataOutput();
+
+ private ArrayBackedValueStorage argOut = new ArrayBackedValueStorage();
+
+ private ICopyEvaluator eval = args[0].createEvaluator(argOut);
+
+ // possible output
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ADate> dateSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ADATE);
+ private AMutableDate aDate = new AMutableDate(0);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ADateTime> datetimeSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ADATETIME);
+ private AMutableDateTime aDateTime = new AMutableDateTime(0);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ATime> timeSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ATIME);
+ private AMutableTime aTime = new AMutableTime(0);
+ @SuppressWarnings("unchecked")
+ private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+ argOut.reset();
+ eval.evaluate(tuple);
+ byte[] bytes = argOut.getByteArray();
+
+ try {
+ if (bytes[0] == SER_NULL_TYPE_TAG) {
+ nullSerde.serialize(ANull.NULL, out);
+ return;
+ } else if (bytes[0] == SER_INTERVAL_TYPE_TAG) {
+ byte timeType = AIntervalSerializerDeserializer.getIntervalTimeType(bytes, 1);
+ long startTime = AIntervalSerializerDeserializer.getIntervalStart(bytes, 1);
+ if (timeType == SER_DATE_TYPE_TAG) {
+ aDate.setValue((int) (startTime));
+ dateSerde.serialize(aDate, out);
+ } else if (timeType == SER_TIME_TYPE_TAG) {
+ aTime.setValue((int) (startTime));
+ timeSerde.serialize(aTime, out);
+ } else if (timeType == SER_DATETIME_TYPE_TAG) {
+ aDateTime.setValue(startTime);
+ datetimeSerde.serialize(aDateTime, out);
+ }
+ } else {
+ throw new AlgebricksException(FID.getName() + ": expects NULL/INTERVAL, but got "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(bytes[0]));
+ }
+ } catch (IOException e) {
+ throw new AlgebricksException(e);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ /* (non-Javadoc)
+ * @see edu.uci.ics.asterix.om.functions.AbstractFunctionDescriptor#getIdentifier()
+ */
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return FID;
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalMillisecondAccessor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalMillisecondAccessor.java
index ecc1a35..0d6e99b 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalMillisecondAccessor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalMillisecondAccessor.java
@@ -17,7 +17,6 @@
import java.io.DataOutput;
import java.io.IOException;
-import edu.uci.ics.asterix.common.functions.FunctionConstants;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
@@ -26,6 +25,7 @@
import edu.uci.ics.asterix.om.base.AMutableInt32;
import edu.uci.ics.asterix.om.base.ANull;
import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
import edu.uci.ics.asterix.om.types.ATypeTag;
@@ -44,7 +44,7 @@
private static final long serialVersionUID = 1L;
- private static final FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "millisecond", 1);
+ private static final FunctionIdentifier FID = AsterixBuiltinFunctions.ACCESSOR_TEMPORAL_MILLISEC;
// allowed input types
private static final byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalMinuteAccessor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalMinuteAccessor.java
index f436016..b61c843 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalMinuteAccessor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalMinuteAccessor.java
@@ -17,7 +17,6 @@
import java.io.DataOutput;
import java.io.IOException;
-import edu.uci.ics.asterix.common.functions.FunctionConstants;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
@@ -26,6 +25,7 @@
import edu.uci.ics.asterix.om.base.AMutableInt32;
import edu.uci.ics.asterix.om.base.ANull;
import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
import edu.uci.ics.asterix.om.types.ATypeTag;
@@ -44,7 +44,7 @@
private static final long serialVersionUID = 1L;
- private static final FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "minute", 1);
+ private static final FunctionIdentifier FID = AsterixBuiltinFunctions.ACCESSOR_TEMPORAL_MIN;
// allowed input types
private static final byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalMonthAccessor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalMonthAccessor.java
index fb68f7d..b18afe3 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalMonthAccessor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalMonthAccessor.java
@@ -17,7 +17,6 @@
import java.io.DataOutput;
import java.io.IOException;
-import edu.uci.ics.asterix.common.functions.FunctionConstants;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
@@ -26,6 +25,7 @@
import edu.uci.ics.asterix.om.base.AMutableInt32;
import edu.uci.ics.asterix.om.base.ANull;
import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
import edu.uci.ics.asterix.om.types.ATypeTag;
@@ -44,7 +44,7 @@
private static final long serialVersionUID = 1L;
- private static final FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "month", 1);
+ private static final FunctionIdentifier FID = AsterixBuiltinFunctions.ACCESSOR_TEMPORAL_MONTH;
// allowed input types
private static final byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalSecondAccessor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalSecondAccessor.java
index 3b9ee95..8a2f240 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalSecondAccessor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalSecondAccessor.java
@@ -17,7 +17,6 @@
import java.io.DataOutput;
import java.io.IOException;
-import edu.uci.ics.asterix.common.functions.FunctionConstants;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
@@ -26,6 +25,7 @@
import edu.uci.ics.asterix.om.base.AMutableInt32;
import edu.uci.ics.asterix.om.base.ANull;
import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
import edu.uci.ics.asterix.om.types.ATypeTag;
@@ -44,7 +44,7 @@
private static final long serialVersionUID = 1L;
- private static final FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "second", 1);
+ private static final FunctionIdentifier FID = AsterixBuiltinFunctions.ACCESSOR_TEMPORAL_SEC;
// allowed input types
private static final byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalYearAccessor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalYearAccessor.java
index 41dacfd..a940500 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalYearAccessor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/accessors/TemporalYearAccessor.java
@@ -17,7 +17,6 @@
import java.io.DataOutput;
import java.io.IOException;
-import edu.uci.ics.asterix.common.functions.FunctionConstants;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
@@ -26,6 +25,7 @@
import edu.uci.ics.asterix.om.base.AMutableInt32;
import edu.uci.ics.asterix.om.base.ANull;
import edu.uci.ics.asterix.om.base.temporal.GregorianCalendarSystem;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
import edu.uci.ics.asterix.om.types.ATypeTag;
@@ -45,7 +45,7 @@
private static final long serialVersionUID = 1L;
- private static final FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "year", 1);
+ private static final FunctionIdentifier FID = AsterixBuiltinFunctions.ACCESSOR_TEMPORAL_YEAR;
// allowed input types
private static final byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromDateConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromDateConstructorDescriptor.java
index 0414874..8daf9a6 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromDateConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromDateConstructorDescriptor.java
@@ -18,6 +18,7 @@
import java.io.IOException;
import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateSerializerDeserializer;
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
import edu.uci.ics.asterix.om.base.AInterval;
import edu.uci.ics.asterix.om.base.AMutableInterval;
@@ -28,6 +29,7 @@
import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
import edu.uci.ics.asterix.om.types.ATypeTag;
import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.EnumDeserializer;
import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
@@ -45,6 +47,7 @@
"interval-from-date", 2);
private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+ private final static byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
public IFunctionDescriptor createFunctionDescriptor() {
@@ -90,34 +93,45 @@
if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
|| argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
- } else if (argOut0.getByteArray()[0] == SER_STRING_TYPE_TAG
- && argOut1.getByteArray()[0] == SER_STRING_TYPE_TAG) {
+ return;
+ }
+ long intervalStart = 0, intervalEnd = 0;
+
+ if (argOut0.getByteArray()[0] == SER_DATE_TYPE_TAG) {
+ intervalStart = ADateSerializerDeserializer.getChronon(argOut0.getByteArray(), 1);
+ } else if (argOut0.getByteArray()[0] == SER_STRING_TYPE_TAG) {
int stringLength = (argOut0.getByteArray()[1] & 0xff << 8)
+ (argOut0.getByteArray()[2] & 0xff << 0);
-
- // start date
- long intervalStart = ADateParserFactory.parseDatePart(argOut0.getByteArray(), 3,
+ intervalStart = ADateParserFactory.parseDatePart(argOut0.getByteArray(), 3,
stringLength) / GregorianCalendarSystem.CHRONON_OF_DAY;
-
- // end date
- stringLength = (argOut1.getByteArray()[1] & 0xff << 8)
- + (argOut1.getByteArray()[2] & 0xff << 0);
-
- long intervalEnd = ADateParserFactory.parseDatePart(argOut1.getByteArray(), 3,
- stringLength) / GregorianCalendarSystem.CHRONON_OF_DAY;
-
- if (intervalEnd < intervalStart) {
- throw new AlgebricksException(
- "Interval end must not be less than the interval start.");
- }
-
- aInterval.setValue(intervalStart, intervalEnd, ATypeTag.DATE.serialize());
- intervalSerde.serialize(aInterval, out);
} else {
- throw new AlgebricksException("Wrong format for interval constructor from dates.");
+ throw new AlgebricksException(FID.getName()
+ + ": expects NULL/STRING/DATE for the first argument, but got "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0]));
}
+ if (argOut1.getByteArray()[0] == SER_DATE_TYPE_TAG) {
+ intervalEnd = ADateSerializerDeserializer.getChronon(argOut1.getByteArray(), 1);
+ } else if (argOut1.getByteArray()[0] == SER_STRING_TYPE_TAG) {
+ int stringLength = (argOut1.getByteArray()[1] & 0xff << 8)
+ + (argOut1.getByteArray()[2] & 0xff << 0);
+ intervalEnd = ADateParserFactory.parseDatePart(argOut1.getByteArray(), 3, stringLength)
+ / GregorianCalendarSystem.CHRONON_OF_DAY;
+ } else {
+ throw new AlgebricksException(FID.getName()
+ + ": expects NULL/STRING/DATE for the second argument, but got "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut1.getByteArray()[0]));
+ }
+
+ if (intervalEnd < intervalStart) {
+ throw new AlgebricksException(FID.getName()
+ + ": interval end must not be less than the interval start.");
+ }
+
+ aInterval.setValue(intervalStart, intervalEnd, ATypeTag.DATE.serialize());
+ intervalSerde.serialize(aInterval, out);
+
} catch (IOException e1) {
throw new AlgebricksException(errorMessage);
} catch (Exception e2) {
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromDateTimeConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromDateTimeConstructorDescriptor.java
index 6b3e2b6..78d2e4a 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromDateTimeConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromDateTimeConstructorDescriptor.java
@@ -18,6 +18,7 @@
import java.io.IOException;
import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateTimeSerializerDeserializer;
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
import edu.uci.ics.asterix.om.base.AInterval;
import edu.uci.ics.asterix.om.base.AMutableInterval;
@@ -28,6 +29,7 @@
import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
import edu.uci.ics.asterix.om.types.ATypeTag;
import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.EnumDeserializer;
import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
@@ -45,6 +47,7 @@
"interval-from-datetime", 2);
private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+ private final static byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
public IFunctionDescriptor createFunctionDescriptor() {
@@ -67,7 +70,7 @@
private ArrayBackedValueStorage argOut1 = new ArrayBackedValueStorage();
private ICopyEvaluator eval0 = args[0].createEvaluator(argOut0);
private ICopyEvaluator eval1 = args[1].createEvaluator(argOut1);
- private String errorMessage = "This can not be an instance of interval (from Date)";
+ private String errorMessage = "This can not be an instance of interval (from DateTime)";
//TODO: Where to move and fix these?
private AMutableInterval aInterval = new AMutableInterval(0L, 0L, (byte) 0);
@SuppressWarnings("unchecked")
@@ -90,12 +93,17 @@
if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
|| argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
- } else if (argOut0.getByteArray()[0] == SER_STRING_TYPE_TAG
- && argOut1.getByteArray()[0] == SER_STRING_TYPE_TAG) {
+ return;
+ }
+
+ long intervalStart = 0, intervalEnd = 0;
+
+ if (argOut0.getByteArray()[0] == SER_DATETIME_TYPE_TAG) {
+ intervalStart = ADateTimeSerializerDeserializer.getChronon(argOut0.getByteArray(), 1);
+ } else if (argOut0.getByteArray()[0] == SER_STRING_TYPE_TAG) {
// start datetime
int stringLength = (argOut0.getByteArray()[1] & 0xff << 8)
+ (argOut0.getByteArray()[2] & 0xff << 0);
-
// get offset for time part: +1 if it is negative (-)
short timeOffset = (short) ((argOut0.getByteArray()[3] == '-') ? 1 : 0);
timeOffset += 8;
@@ -105,18 +113,23 @@
throw new AlgebricksException(errorMessage + ": missing T");
}
}
-
- long intervalStart = ADateParserFactory.parseDatePart(argOut0.getByteArray(), 3,
- timeOffset);
+ intervalStart = ADateParserFactory.parseDatePart(argOut0.getByteArray(), 3, timeOffset);
intervalStart += ATimeParserFactory.parseTimePart(argOut0.getByteArray(),
3 + timeOffset + 1, stringLength - timeOffset - 1);
+ } else {
+ throw new AlgebricksException(FID.getName()
+ + ": expects NULL/STRING/DATETIME for the first argument, but got "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0]));
+ }
- // end datetime
- stringLength = (argOut1.getByteArray()[1] & 0xff << 8)
+ if (argOut1.getByteArray()[0] == SER_DATETIME_TYPE_TAG) {
+ intervalEnd = ADateTimeSerializerDeserializer.getChronon(argOut1.getByteArray(), 1);
+ } else if (argOut1.getByteArray()[0] == SER_STRING_TYPE_TAG) {
+ // start datetime
+ int stringLength = (argOut1.getByteArray()[1] & 0xff << 8)
+ (argOut1.getByteArray()[2] & 0xff << 0);
-
// get offset for time part: +1 if it is negative (-)
- timeOffset = (short) ((argOut1.getByteArray()[3] == '-') ? 1 : 0);
+ short timeOffset = (short) ((argOut1.getByteArray()[3] == '-') ? 1 : 0);
timeOffset += 8;
if (argOut1.getByteArray()[3 + timeOffset] != 'T') {
timeOffset += 2;
@@ -124,22 +137,22 @@
throw new AlgebricksException(errorMessage + ": missing T");
}
}
-
- long intervalEnd = ADateParserFactory.parseDatePart(argOut1.getByteArray(), 3,
- timeOffset);
+ intervalEnd = ADateParserFactory.parseDatePart(argOut1.getByteArray(), 3, timeOffset);
intervalEnd += ATimeParserFactory.parseTimePart(argOut1.getByteArray(),
3 + timeOffset + 1, stringLength - timeOffset - 1);
-
- if (intervalEnd < intervalStart) {
- throw new AlgebricksException(
- "Interval end must not be less than the interval start.");
- }
-
- aInterval.setValue(intervalStart, intervalEnd, ATypeTag.DATETIME.serialize());
- intervalSerde.serialize(aInterval, out);
} else {
- throw new AlgebricksException("Wrong format for interval constructor from dates.");
+ throw new AlgebricksException(FID.getName()
+ + ": expects NULL/STRING/DATETIME for the second argument, but got "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut1.getByteArray()[0]));
}
+
+ if (intervalEnd < intervalStart) {
+ throw new AlgebricksException(FID.getName()
+ + ": interval end must not be less than the interval start.");
+ }
+
+ aInterval.setValue(intervalStart, intervalEnd, ATypeTag.DATETIME.serialize());
+ intervalSerde.serialize(aInterval, out);
} catch (IOException e1) {
throw new AlgebricksException(errorMessage);
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromTimeConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromTimeConstructorDescriptor.java
index 042a33b..8816160 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromTimeConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalFromTimeConstructorDescriptor.java
@@ -18,6 +18,7 @@
import java.io.IOException;
import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ATimeSerializerDeserializer;
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
import edu.uci.ics.asterix.om.base.AInterval;
import edu.uci.ics.asterix.om.base.AMutableInterval;
@@ -28,6 +29,7 @@
import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
import edu.uci.ics.asterix.om.types.ATypeTag;
import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.EnumDeserializer;
import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
@@ -45,6 +47,7 @@
"interval-from-time", 2);
private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+ private final static byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
public IFunctionDescriptor createFunctionDescriptor() {
@@ -90,39 +93,57 @@
if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
|| argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
- } else if (argOut0.getByteArray()[0] == SER_STRING_TYPE_TAG
- && argOut1.getByteArray()[0] == SER_STRING_TYPE_TAG) {
+ return;
+ }
+
+ long intervalStart = 0, intervalEnd = 0;
+
+ if (argOut0.getByteArray()[0] == SER_TIME_TYPE_TAG) {
+ intervalStart = ATimeSerializerDeserializer.getChronon(argOut0.getByteArray(), 1);
+ } else if (argOut0.getByteArray()[0] == SER_STRING_TYPE_TAG) {
// start date
int stringLength = (argOut0.getByteArray()[1] & 0xff << 8)
+ (argOut0.getByteArray()[2] & 0xff << 0);
- long intervalStart = ATimeParserFactory.parseTimePart(argOut0.getByteArray(), 3,
+ intervalStart = ATimeParserFactory.parseTimePart(argOut0.getByteArray(), 3,
stringLength);
- if (intervalStart < 0) {
- intervalStart += GregorianCalendarSystem.CHRONON_OF_DAY;
- }
+ } else {
+ throw new AlgebricksException(FID.getName()
+ + ": expects NULL/STRING/TIME for the first argument, but got "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0]));
+ }
- // end date
- stringLength = (argOut1.getByteArray()[1] & 0xff << 8)
+ if (intervalStart < 0) {
+ intervalStart += GregorianCalendarSystem.CHRONON_OF_DAY;
+ }
+
+ if (argOut1.getByteArray()[0] == SER_TIME_TYPE_TAG) {
+ intervalEnd = ATimeSerializerDeserializer.getChronon(argOut1.getByteArray(), 1);
+ } else if (argOut1.getByteArray()[0] == SER_STRING_TYPE_TAG) {
+ // start date
+ int stringLength = (argOut1.getByteArray()[1] & 0xff << 8)
+ (argOut1.getByteArray()[2] & 0xff << 0);
- long intervalEnd = ATimeParserFactory.parseTimePart(argOut1.getByteArray(), 3,
- stringLength);
- if (intervalEnd < 0) {
- intervalEnd += GregorianCalendarSystem.CHRONON_OF_DAY;
- }
+ intervalEnd = ATimeParserFactory.parseTimePart(argOut1.getByteArray(), 3, stringLength);
- if (intervalEnd < intervalStart) {
- throw new AlgebricksException(
- "Interval end must not be less than the interval start.");
- }
-
- aInterval.setValue(intervalStart, intervalEnd, ATypeTag.TIME.serialize());
- intervalSerde.serialize(aInterval, out);
} else {
- throw new AlgebricksException("Wrong format for interval constructor from dates.");
+ throw new AlgebricksException(FID.getName()
+ + ": expects NULL/STRING/TIME for the second argument, but got "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut1.getByteArray()[0]));
}
+ if (intervalEnd < 0) {
+ intervalEnd += GregorianCalendarSystem.CHRONON_OF_DAY;
+ }
+
+ if (intervalEnd < intervalStart) {
+ throw new AlgebricksException(FID.getName()
+ + ": interval end must not be less than the interval start.");
+ }
+
+ aInterval.setValue(intervalStart, intervalEnd, ATypeTag.TIME.serialize());
+ intervalSerde.serialize(aInterval, out);
+
} catch (IOException e1) {
throw new AlgebricksException(errorMessage);
} catch (Exception e2) {
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalStartFromDateConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalStartFromDateConstructorDescriptor.java
index d5a0db9..7ea8e90 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalStartFromDateConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalStartFromDateConstructorDescriptor.java
@@ -18,6 +18,8 @@
import java.io.IOException;
import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
import edu.uci.ics.asterix.om.base.AInterval;
import edu.uci.ics.asterix.om.base.AMutableDuration;
@@ -31,6 +33,7 @@
import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
import edu.uci.ics.asterix.om.types.ATypeTag;
import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.EnumDeserializer;
import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
@@ -48,6 +51,8 @@
"interval-start-from-date", 2);
private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+ private final static byte SER_DATE_TYPE_TAG = ATypeTag.DATE.serialize();
+ private final static byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
public IFunctionDescriptor createFunctionDescriptor() {
@@ -94,40 +99,56 @@
if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
|| argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
- } else if (argOut0.getByteArray()[0] == SER_STRING_TYPE_TAG
- && argOut1.getByteArray()[0] == SER_STRING_TYPE_TAG) {
+ return;
+ }
+ long intervalStart = 0, intervalEnd = 0;
+
+ if (argOut0.getByteArray()[0] == SER_DATE_TYPE_TAG) {
+ intervalStart = ADateSerializerDeserializer.getChronon(argOut0.getByteArray(), 1)
+ * GregorianCalendarSystem.CHRONON_OF_DAY;
+ } else if (argOut0.getByteArray()[0] == SER_STRING_TYPE_TAG) {
// start date
int stringLength = (argOut0.getByteArray()[1] & 0xff << 8)
+ (argOut0.getByteArray()[2] & 0xff << 0);
-
- long intervalStart = ADateParserFactory.parseDatePart(argOut0.getByteArray(), 3,
+ intervalStart = ADateParserFactory.parseDatePart(argOut0.getByteArray(), 3,
stringLength);
+ } else {
+ throw new AlgebricksException(FID.getName()
+ + ": expects NULL/STRING/DATE for the first argument, but got "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0]));
+ }
+ if (argOut1.getByteArray()[0] == SER_DURATION_TYPE_TAG) {
+ intervalEnd = DurationArithmeticOperations.addDuration(intervalStart,
+ ADurationSerializerDeserializer.getYearMonth(argOut1.getByteArray(), 1),
+ ADurationSerializerDeserializer.getDayTime(argOut1.getByteArray(), 1));
+ } else if (argOut1.getByteArray()[0] == SER_STRING_TYPE_TAG) {
// duration
- stringLength = (argOut1.getByteArray()[1] & 0xff << 8)
+ int stringLength = (argOut1.getByteArray()[1] & 0xff << 8)
+ (argOut1.getByteArray()[2] & 0xff << 0);
ADurationParserFactory
.parseDuration(argOut1.getByteArray(), 3, stringLength, aDuration);
-
- long intervalEnd = DurationArithmeticOperations.addDuration(intervalStart,
+ intervalEnd = DurationArithmeticOperations.addDuration(intervalStart,
aDuration.getMonths(), aDuration.getMilliseconds());
-
- intervalStart = GregorianCalendarSystem.getChrononInDays(intervalStart);
- intervalEnd = GregorianCalendarSystem.getChrononInDays(intervalEnd);
-
- if (intervalEnd < intervalStart) {
- throw new AlgebricksException(
- "Interval end must not be less than the interval start.");
- }
-
- aInterval.setValue(intervalStart, intervalEnd, ATypeTag.DATE.serialize());
- intervalSerde.serialize(aInterval, out);
} else {
- throw new AlgebricksException("Wrong format for interval constructor from dates.");
+ throw new AlgebricksException(FID.getName()
+ + ": expects NULL/STRING/DURATION for the second argument, but got "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut1.getByteArray()[0]));
}
+ intervalStart = GregorianCalendarSystem.getChrononInDays(intervalStart);
+ intervalEnd = GregorianCalendarSystem.getChrononInDays(intervalEnd);
+
+ if (intervalEnd < intervalStart) {
+ throw new AlgebricksException(FID.getName()
+ + ": interval end must not be less than the interval start.");
+ }
+
+ aInterval.setValue(intervalStart, intervalEnd, ATypeTag.DATE.serialize());
+ intervalSerde.serialize(aInterval, out);
+
} catch (IOException e1) {
throw new AlgebricksException(errorMessage);
} catch (Exception e2) {
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalStartFromDateTimeConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalStartFromDateTimeConstructorDescriptor.java
index d821fc3..f16dccb 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalStartFromDateTimeConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalStartFromDateTimeConstructorDescriptor.java
@@ -18,6 +18,8 @@
import java.io.IOException;
import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateTimeSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
import edu.uci.ics.asterix.om.base.AInterval;
import edu.uci.ics.asterix.om.base.AMutableDuration;
@@ -31,6 +33,7 @@
import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
import edu.uci.ics.asterix.om.types.ATypeTag;
import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.EnumDeserializer;
import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
@@ -48,6 +51,8 @@
"interval-start-from-datetime", 2);
private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+ private final static byte SER_DATETIME_TYPE_TAG = ATypeTag.DATETIME.serialize();
+ private final static byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
public IFunctionDescriptor createFunctionDescriptor() {
@@ -94,10 +99,14 @@
if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
|| argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
- } else if (argOut0.getByteArray()[0] == SER_STRING_TYPE_TAG
- && argOut1.getByteArray()[0] == SER_STRING_TYPE_TAG) {
- // start date
+ return;
+ }
+ long intervalStart = 0, intervalEnd = 0;
+
+ if (argOut0.getByteArray()[0] == SER_DATETIME_TYPE_TAG) {
+ intervalStart = ADateTimeSerializerDeserializer.getChronon(argOut0.getByteArray(), 1);
+ } else if (argOut0.getByteArray()[0] == SER_STRING_TYPE_TAG) {
int stringLength = (argOut0.getByteArray()[1] & 0xff << 8)
+ (argOut0.getByteArray()[2] & 0xff << 0);
@@ -113,32 +122,44 @@
}
}
- long intervalStart = ADateParserFactory.parseDatePart(argOut0.getByteArray(), 3,
- timeOffset);
+ intervalStart = ADateParserFactory.parseDatePart(argOut0.getByteArray(), 3, timeOffset);
intervalStart += ATimeParserFactory.parseTimePart(argOut0.getByteArray(),
3 + timeOffset + 1, stringLength - timeOffset - 1);
+ } else {
+ throw new AlgebricksException(FID.getName()
+ + ": expects NULL/STRING/DATETIME for the first argument but got "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0]));
+ }
+
+ if (argOut1.getByteArray()[0] == SER_DURATION_TYPE_TAG) {
+ intervalEnd = DurationArithmeticOperations.addDuration(intervalStart,
+ ADurationSerializerDeserializer.getYearMonth(argOut1.getByteArray(), 1),
+ ADurationSerializerDeserializer.getDayTime(argOut1.getByteArray(), 1));
+ } else if (argOut1.getByteArray()[0] == SER_STRING_TYPE_TAG) {
// duration
- stringLength = (argOut1.getByteArray()[1] & 0xff << 8)
+ int stringLength = (argOut1.getByteArray()[1] & 0xff << 8)
+ (argOut1.getByteArray()[2] & 0xff << 0);
ADurationParserFactory
.parseDuration(argOut1.getByteArray(), 3, stringLength, aDuration);
- long intervalEnd = DurationArithmeticOperations.addDuration(intervalStart,
+ intervalEnd = DurationArithmeticOperations.addDuration(intervalStart,
aDuration.getMonths(), aDuration.getMilliseconds());
-
- if (intervalEnd < intervalStart) {
- throw new AlgebricksException(
- "Interval end must not be less than the interval start.");
- }
-
- aInterval.setValue(intervalStart, intervalEnd, ATypeTag.DATETIME.serialize());
- intervalSerde.serialize(aInterval, out);
} else {
- throw new AlgebricksException("Wrong format for interval constructor from dates.");
+ throw new AlgebricksException(FID.getName()
+ + ": expects NULL/STRING/DURATION for the second argument but got "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut1.getByteArray()[0]));
}
+ if (intervalEnd < intervalStart) {
+ throw new AlgebricksException(FID.getName()
+ + ": interval end must not be less than the interval start.");
+ }
+
+ aInterval.setValue(intervalStart, intervalEnd, ATypeTag.DATETIME.serialize());
+ intervalSerde.serialize(aInterval, out);
+
} catch (IOException e1) {
throw new AlgebricksException(errorMessage);
} catch (Exception e2) {
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalStartFromTimeConstructorDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalStartFromTimeConstructorDescriptor.java
index 8135598..35bfe17 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalStartFromTimeConstructorDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/constructors/AIntervalStartFromTimeConstructorDescriptor.java
@@ -18,6 +18,8 @@
import java.io.IOException;
import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ATimeSerializerDeserializer;
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
import edu.uci.ics.asterix.om.base.AInterval;
import edu.uci.ics.asterix.om.base.AMutableDuration;
@@ -31,6 +33,7 @@
import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
import edu.uci.ics.asterix.om.types.ATypeTag;
import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.EnumDeserializer;
import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
@@ -48,6 +51,8 @@
"interval-start-from-time", 2);
private final static byte SER_STRING_TYPE_TAG = ATypeTag.STRING.serialize();
private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+ private final static byte SER_TIME_TYPE_TAG = ATypeTag.TIME.serialize();
+ private final static byte SER_DURATION_TYPE_TAG = ATypeTag.DURATION.serialize();
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
public IFunctionDescriptor createFunctionDescriptor() {
@@ -94,51 +99,71 @@
if (argOut0.getByteArray()[0] == SER_NULL_TYPE_TAG
|| argOut1.getByteArray()[0] == SER_NULL_TYPE_TAG) {
nullSerde.serialize(ANull.NULL, out);
- } else if (argOut0.getByteArray()[0] == SER_STRING_TYPE_TAG
- && argOut1.getByteArray()[0] == SER_STRING_TYPE_TAG) {
- // start time
+ return;
+ }
+ long intervalStart = 0, intervalEnd = 0;
+
+ if (argOut0.getByteArray()[0] == SER_TIME_TYPE_TAG) {
+ intervalStart = ATimeSerializerDeserializer.getChronon(argOut0.getByteArray(), 1);
+ } else if (argOut0.getByteArray()[0] == SER_STRING_TYPE_TAG) {
int stringLength = (argOut0.getByteArray()[1] & 0xff << 8)
+ (argOut0.getByteArray()[2] & 0xff << 0);
- int intervalStart = ATimeParserFactory.parseTimePart(argOut0.getByteArray(), 3,
+ intervalStart = ATimeParserFactory.parseTimePart(argOut0.getByteArray(), 3,
stringLength);
+ } else {
+ throw new AlgebricksException(FID.getName()
+ + ": expects NULL/STRING/TIME for the first argument, but got "
+ + EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argOut0.getByteArray()[0]));
+ }
- if (intervalStart < 0) {
- intervalStart += GregorianCalendarSystem.CHRONON_OF_DAY;
+ if (intervalStart < 0) {
+ intervalStart += GregorianCalendarSystem.CHRONON_OF_DAY;
+ }
+
+ if (argOut1.getByteArray()[0] == SER_DURATION_TYPE_TAG) {
+
+ if (ADurationSerializerDeserializer.getYearMonth(argOut1.getByteArray(), 1) != 0) {
+ throw new AlgebricksException(FID.getName()
+ + ": cannot add a year-month duration to a time value.");
}
+ intervalEnd = DurationArithmeticOperations.addDuration(intervalStart, 0,
+ ADurationSerializerDeserializer.getDayTime(argOut1.getByteArray(), 1));
+
+ } else if (argOut1.getByteArray()[0] == SER_STRING_TYPE_TAG) {
// duration
- stringLength = (argOut1.getByteArray()[1] & 0xff << 8)
+ int stringLength = (argOut1.getByteArray()[1] & 0xff << 8)
+ (argOut1.getByteArray()[2] & 0xff << 0);
ADurationParserFactory
.parseDuration(argOut1.getByteArray(), 3, stringLength, aDuration);
if (aDuration.getMonths() != 0) {
- throw new AlgebricksException("Cannot add a year-month duration to a time value.");
+ throw new AlgebricksException(FID.getName()
+ + ": cannot add a year-month duration to a time value.");
}
- int intervalEnd = DurationArithmeticOperations.addDuration(intervalStart,
+ intervalEnd = DurationArithmeticOperations.addDuration(intervalStart, 0,
aDuration.getMilliseconds());
-
- if (intervalEnd > GregorianCalendarSystem.CHRONON_OF_DAY) {
-
- intervalEnd = intervalEnd - (int) (GregorianCalendarSystem.CHRONON_OF_DAY);
- }
-
- if (intervalEnd < intervalStart) {
- throw new AlgebricksException(
- "Interval end must not be less than the interval start.");
- }
-
- aInterval.setValue(intervalStart, intervalEnd, ATypeTag.TIME.serialize());
- intervalSerde.serialize(aInterval, out);
} else {
throw new AlgebricksException("Wrong format for interval constructor from dates.");
}
+ if (intervalEnd > GregorianCalendarSystem.CHRONON_OF_DAY) {
+ intervalEnd = intervalEnd % (int) (GregorianCalendarSystem.CHRONON_OF_DAY);
+ }
+
+ if (intervalEnd < intervalStart) {
+ throw new AlgebricksException(FID.getName()
+ + ": interval end must not be less than the interval start.");
+ }
+
+ aInterval.setValue(intervalStart, intervalEnd, ATypeTag.TIME.serialize());
+ intervalSerde.serialize(aInterval, out);
+
} catch (IOException e1) {
throw new AlgebricksException(errorMessage);
} catch (Exception e2) {
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/formats/NonTaggedDataFormat.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/formats/NonTaggedDataFormat.java
index f12ea91..a61208d 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/formats/NonTaggedDataFormat.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/formats/NonTaggedDataFormat.java
@@ -69,6 +69,8 @@
import edu.uci.ics.asterix.runtime.aggregates.stream.NonEmptyStreamAggregateDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.accessors.TemporalDayAccessor;
import edu.uci.ics.asterix.runtime.evaluators.accessors.TemporalHourAccessor;
+import edu.uci.ics.asterix.runtime.evaluators.accessors.TemporalIntervalEndAccessor;
+import edu.uci.ics.asterix.runtime.evaluators.accessors.TemporalIntervalStartAccessor;
import edu.uci.ics.asterix.runtime.evaluators.accessors.TemporalMillisecondAccessor;
import edu.uci.ics.asterix.runtime.evaluators.accessors.TemporalMinuteAccessor;
import edu.uci.ics.asterix.runtime.evaluators.accessors.TemporalMonthAccessor;
@@ -464,6 +466,8 @@
temp.add(TemporalMinuteAccessor.FACTORY);
temp.add(TemporalSecondAccessor.FACTORY);
temp.add(TemporalMillisecondAccessor.FACTORY);
+ temp.add(TemporalIntervalStartAccessor.FACTORY);
+ temp.add(TemporalIntervalEndAccessor.FACTORY);
// Temporal functions
temp.add(DateFromUnixTimeInDaysDescriptor.FACTORY);
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexInstantSearchOperationCallback.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexInstantSearchOperationCallback.java
index 718ea3f..a78f2ff 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexInstantSearchOperationCallback.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexInstantSearchOperationCallback.java
@@ -48,7 +48,7 @@
public void reconcile(ITupleReference tuple) throws HyracksDataException {
int pkHash = computePrimaryKeyHashValue(tuple, primaryKeyFields);
try {
- lockManager.instantLock(datasetId, pkHash, LockMode.S, txnCtx);
+ lockManager.lock(datasetId, pkHash, LockMode.S, txnCtx);
} catch (ACIDException e) {
throw new HyracksDataException(e);
}
@@ -58,4 +58,14 @@
public void cancel(ITupleReference tuple) throws HyracksDataException {
//no op
}
+
+ @Override
+ public void complete(ITupleReference tuple) throws HyracksDataException {
+ int pkHash = computePrimaryKeyHashValue(tuple, primaryKeyFields);
+ try {
+ lockManager.unlock(datasetId, pkHash, txnCtx);
+ } catch (ACIDException e) {
+ throw new HyracksDataException(e);
+ }
+ }
}
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexSearchOperationCallback.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexSearchOperationCallback.java
index 4760307..62ec3c9 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexSearchOperationCallback.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/PrimaryIndexSearchOperationCallback.java
@@ -28,8 +28,8 @@
*/
public class PrimaryIndexSearchOperationCallback extends AbstractOperationCallback implements ISearchOperationCallback {
- public PrimaryIndexSearchOperationCallback(int datasetId, int[] entityIdFields,
- ILockManager lockManager, TransactionContext txnCtx) {
+ public PrimaryIndexSearchOperationCallback(int datasetId, int[] entityIdFields, ILockManager lockManager,
+ TransactionContext txnCtx) {
super(datasetId, entityIdFields, txnCtx, lockManager);
}
@@ -62,4 +62,9 @@
throw new HyracksDataException(e);
}
}
+
+ @Override
+ public void complete(ITupleReference tuple) throws HyracksDataException {
+ //no op
+ }
}
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/SecondaryIndexModificationOperationCallback.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/SecondaryIndexModificationOperationCallback.java
index 092f99c..5b55e9a 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/SecondaryIndexModificationOperationCallback.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/SecondaryIndexModificationOperationCallback.java
@@ -18,12 +18,14 @@
import edu.uci.ics.asterix.transaction.management.exception.ACIDException;
import edu.uci.ics.asterix.transaction.management.service.locking.ILockManager;
import edu.uci.ics.asterix.transaction.management.service.logging.IndexLogger;
+import edu.uci.ics.asterix.transaction.management.service.transaction.IResourceManager.ResourceType;
import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionContext;
import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionSubsystem;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
import edu.uci.ics.hyracks.storage.am.common.api.IModificationOperationCallback;
import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
+import edu.uci.ics.hyracks.storage.am.lsm.btree.tuples.LSMBTreeTupleReference;
/**
* Secondary-index modifications do not require any locking.
@@ -40,8 +42,8 @@
protected final TransactionSubsystem txnSubsystem;
public SecondaryIndexModificationOperationCallback(int datasetId, int[] primaryKeyFields,
- TransactionContext txnCtx, ILockManager lockManager,
- TransactionSubsystem txnSubsystem, long resourceId, byte resourceType, IndexOperation indexOp) {
+ TransactionContext txnCtx, ILockManager lockManager, TransactionSubsystem txnSubsystem, long resourceId,
+ byte resourceType, IndexOperation indexOp) {
super(datasetId, primaryKeyFields, txnCtx, lockManager);
this.resourceId = resourceId;
this.resourceType = resourceType;
@@ -60,8 +62,21 @@
IndexLogger logger = txnSubsystem.getTreeLoggerRepository().getIndexLogger(resourceId, resourceType);
int pkHash = computePrimaryKeyHashValue(after, primaryKeyFields);
try {
+ IndexOperation effectiveOldOp;
+ if (resourceType == ResourceType.LSM_BTREE) {
+ LSMBTreeTupleReference lsmBTreeTuple = (LSMBTreeTupleReference) before;
+ if (before == null) {
+ effectiveOldOp = IndexOperation.NOOP;
+ } else if (lsmBTreeTuple != null && lsmBTreeTuple.isAntimatter()) {
+ effectiveOldOp = IndexOperation.DELETE;
+ } else {
+ effectiveOldOp = IndexOperation.INSERT;
+ }
+ } else {
+ effectiveOldOp = oldOp;
+ }
logger.generateLogRecord(txnSubsystem, txnCtx, datasetId.getId(), pkHash, resourceId, indexOp, after,
- oldOp, before);
+ effectiveOldOp, before);
} catch (ACIDException e) {
throw new HyracksDataException(e);
}
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/SecondaryIndexSearchOperationCallback.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/SecondaryIndexSearchOperationCallback.java
index c53b651..4c8a583 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/SecondaryIndexSearchOperationCallback.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/opcallbacks/SecondaryIndexSearchOperationCallback.java
@@ -44,4 +44,9 @@
// Do nothing.
}
+ @Override
+ public void complete(ITupleReference tuple) throws HyracksDataException {
+ // Do nothing.
+ }
+
}
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/resource/PersistentLocalResourceRepository.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
index 838dc6d..0a12ba1 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
@@ -87,7 +87,8 @@
//#. load all local resources.
File rootDirFile = new File(this.rootDir);
if (!rootDirFile.exists()) {
- throw new HyracksDataException(rootDirFile.getAbsolutePath() + "doesn't exist.");
+ //rootDir may not exist if this node is not the metadata node and doesn't have any user data.
+ return;
}
FilenameFilter filter = new FilenameFilter() {
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/EntityInfoManager.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/EntityInfoManager.java
index b8820c4..5d81e8a 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/EntityInfoManager.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/EntityInfoManager.java
@@ -164,6 +164,7 @@
allocChild = pArray.size() - 1;
}
}
+
occupiedSlots++;
return pArray.get(allocChild).allocate() + allocChild * ChildEntityInfoArrayManager.NUM_OF_SLOTS;
}
@@ -230,45 +231,35 @@
*/
private void shrink() {
int i;
- boolean bContiguous = true;
- int decreaseCount = 0;
+ int removeCount = 0;
int size = pArray.size();
int maxDecreaseCount = size / 2;
ChildEntityInfoArrayManager child;
- for (i = size - 1; i >= 0; i--) {
- child = pArray.get(i);
- if (child.isEmpty() || child.isDeinitialized()) {
- if (bContiguous) {
- pArray.remove(i);
- if (++decreaseCount == maxDecreaseCount) {
- break;
- }
- } else {
- bContiguous = false;
- if (child.isEmpty()) {
- child.deinitialize();
- if (++decreaseCount == maxDecreaseCount) {
- break;
- }
- }
- }
- } else {
- bContiguous = false;
+
+ //The first buffer never be deinitialized.
+ for (i = 1; i < size; i++) {
+ if (pArray.get(i).isEmpty()) {
+ pArray.get(i).deinitialize();
}
}
- //reset allocChild when the child is removed or deinitialized.
- size = pArray.size();
- if (allocChild >= size || pArray.get(allocChild).isDeinitialized()) {
- //set allocChild to any initialized one.
- //It is guaranteed that there is at least one initialized child.
- for (i = 0; i < size; i++) {
- if (!pArray.get(i).isDeinitialized()) {
- allocChild = i;
+ //remove the empty buffers from the end
+ for (i = size - 1; i >= 1; i--) {
+ child = pArray.get(i);
+ if (child.isDeinitialized()) {
+ pArray.remove(i);
+ if (++removeCount == maxDecreaseCount) {
break;
}
+ } else {
+ break;
}
}
+
+ //reset allocChild to the first buffer
+ allocChild = 0;
+
+ isShrinkTimerOn = false;
}
public String prettyPrint() {
@@ -538,7 +529,7 @@
freeSlotNum = getNextFreeSlot(currentSlot);
occupiedSlots++;
if (LockManager.IS_DEBUG_MODE) {
- System.out.println(Thread.currentThread().getName()+" entity allocate: "+currentSlot);
+ System.out.println(Thread.currentThread().getName() + " entity allocate: " + currentSlot);
}
return currentSlot;
}
@@ -548,7 +539,7 @@
freeSlotNum = slotNum;
occupiedSlots--;
if (LockManager.IS_DEBUG_MODE) {
- System.out.println(Thread.currentThread().getName()+" entity deallocate: "+slotNum);
+ System.out.println(Thread.currentThread().getName() + " entity deallocate: " + slotNum);
}
}
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/EntityLockInfoManager.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/EntityLockInfoManager.java
index 59c20f2..ca00aa2 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/EntityLockInfoManager.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/EntityLockInfoManager.java
@@ -230,45 +230,35 @@
*/
private void shrink() {
int i;
- boolean bContiguous = true;
- int decreaseCount = 0;
+ int removeCount = 0;
int size = pArray.size();
int maxDecreaseCount = size / 2;
ChildEntityLockInfoArrayManager child;
- for (i = size - 1; i >= 0; i--) {
- child = pArray.get(i);
- if (child.isEmpty() || child.isDeinitialized()) {
- if (bContiguous) {
- pArray.remove(i);
- if (++decreaseCount == maxDecreaseCount) {
- break;
- }
- } else {
- bContiguous = false;
- if (child.isEmpty()) {
- child.deinitialize();
- if (++decreaseCount == maxDecreaseCount) {
- break;
- }
- }
- }
- } else {
- bContiguous = false;
+
+ //The first buffer never be deinitialized.
+ for (i = 1; i < size; i++) {
+ if (pArray.get(i).isEmpty()) {
+ pArray.get(i).deinitialize();
}
}
- //reset allocChild when the child is removed or deinitialized.
- size = pArray.size();
- if (allocChild >= size || pArray.get(allocChild).isDeinitialized()) {
- //set allocChild to any initialized one.
- //It is guaranteed that there is at least one initialized child.
- for (i = 0; i < size; i++) {
- if (!pArray.get(i).isDeinitialized()) {
- allocChild = i;
+ //remove the empty buffers from the end
+ for (i = size - 1; i >= 1; i--) {
+ child = pArray.get(i);
+ if (child.isDeinitialized()) {
+ pArray.remove(i);
+ if (++removeCount == maxDecreaseCount) {
break;
}
+ } else {
+ break;
}
}
+
+ //reset allocChild to the first buffer
+ allocChild = 0;
+
+ isShrinkTimerOn = false;
}
public String prettyPrint() {
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManager.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManager.java
index aeb54b5..a354d2a 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManager.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockManager.java
@@ -44,8 +44,14 @@
public class LockManager implements ILockManager {
public static final boolean IS_DEBUG_MODE = false;//true
+ //This variable indicates that the dataset granule X lock request is allowed when
+ //there are concurrent lock requests. As of 4/16/2013, we only allow the dataset granule X lock
+ //during DDL operation which is preceded by holding X latch on metadata.
+ //Therefore, we don't allow the concurrent lock requests with the dataset granule X lock.
+ public static final boolean ALLOW_DATASET_GRANULE_X_LOCK_WITH_OTHER_CONCURRENT_LOCK_REQUESTS = false;
public static final boolean ALLOW_ESCALATE_FROM_ENTITY_TO_DATASET = true;
+ //Threshold must be greater than 1 and should be reasonably large enough not to escalate too soon.
public static final int ESCALATE_TRHESHOLD_ENTITY_TO_DATASET = 1000;
private static final int DO_ESCALATE = 0;
private static final int ESCALATED = 1;
@@ -103,11 +109,11 @@
@Override
public void lock(DatasetId datasetId, int entityHashValue, byte lockMode, TransactionContext txnContext)
throws ACIDException {
- internalLock(datasetId, entityHashValue, lockMode, txnContext);
+ internalLock(datasetId, entityHashValue, lockMode, txnContext, false);
}
- private void internalLock(DatasetId datasetId, int entityHashValue, byte lockMode, TransactionContext txnContext)
- throws ACIDException {
+ private void internalLock(DatasetId datasetId, int entityHashValue, byte lockMode, TransactionContext txnContext,
+ boolean isInstant) throws ACIDException {
JobId jobId = txnContext.getJobId();
int jId = jobId.getId(); //int-type jobId
@@ -117,71 +123,113 @@
DatasetLockInfo dLockInfo = null;
JobInfo jobInfo;
byte datasetLockMode = entityHashValue == -1 ? lockMode : lockMode == LockMode.S ? LockMode.IS : LockMode.IX;
- boolean isEscalated = false;
+ boolean doEscalate = false;
+ boolean caughtLockMgrLatchException = false;
latchLockTable();
- validateJob(txnContext);
+ try {
+ validateJob(txnContext);
- if (IS_DEBUG_MODE) {
- trackLockRequest("Requested", RequestType.LOCK, datasetId, entityHashValue, lockMode, txnContext,
- dLockInfo, eLockInfo);
- }
-
- dLockInfo = datasetResourceHT.get(datasetId);
- jobInfo = jobHT.get(jobId);
-
- if (ALLOW_ESCALATE_FROM_ENTITY_TO_DATASET) {
- if (datasetLockMode == LockMode.IS && jobInfo != null && dLockInfo != null) {
- int upgradeStatus = needUpgradeFromEntityToDataset(jobInfo, dId, lockMode);
- switch (upgradeStatus) {
- case DO_ESCALATE:
- entityHashValue = -1;
- isEscalated = true;
- break;
-
- case ESCALATED:
- unlatchLockTable();
- return;
-
- default:
- break;
- }
- }
- }
-
- //#. if the datasetLockInfo doesn't exist in datasetResourceHT
- if (dLockInfo == null || dLockInfo.isNoHolder()) {
- if (dLockInfo == null) {
- dLockInfo = new DatasetLockInfo(entityLockInfoManager, entityInfoManager, lockWaiterManager);
- datasetResourceHT.put(new DatasetId(dId), dLockInfo); //datsetId obj should be created
- }
- entityInfo = entityInfoManager.allocate(jId, dId, entityHashValue, lockMode);
-
- //if dataset-granule lock
- if (entityHashValue == -1) { //-1 stands for dataset-granule
- entityInfoManager.increaseDatasetLockCount(entityInfo);
- dLockInfo.increaseLockCount(datasetLockMode);
- dLockInfo.addHolder(entityInfo);
- } else {
- entityInfoManager.increaseDatasetLockCount(entityInfo);
- dLockInfo.increaseLockCount(datasetLockMode);
- //add entityLockInfo
- eLockInfo = entityLockInfoManager.allocate();
- dLockInfo.getEntityResourceHT().put(entityHashValue, eLockInfo);
- entityInfoManager.increaseEntityLockCount(entityInfo);
- entityLockInfoManager.increaseLockCount(eLockInfo, lockMode);
- entityLockInfoManager.addHolder(eLockInfo, entityInfo);
+ if (IS_DEBUG_MODE) {
+ trackLockRequest("Requested", RequestType.LOCK, datasetId, entityHashValue, lockMode, txnContext,
+ dLockInfo, eLockInfo);
}
- if (jobInfo == null) {
- jobInfo = new JobInfo(entityInfoManager, lockWaiterManager, txnContext);
- jobHT.put(jobId, jobInfo); //jobId obj doesn't have to be created
- }
- jobInfo.addHoldingResource(entityInfo);
+ dLockInfo = datasetResourceHT.get(datasetId);
+ jobInfo = jobHT.get(jobId);
if (ALLOW_ESCALATE_FROM_ENTITY_TO_DATASET) {
- if (datasetLockMode == LockMode.IS) {
- jobInfo.increaseDatasetISLockCount(dId);
+ if (!isInstant && datasetLockMode == LockMode.IS && jobInfo != null && dLockInfo != null) {
+ int escalateStatus = needEscalateFromEntityToDataset(jobInfo, dId, lockMode);
+ switch (escalateStatus) {
+ case DO_ESCALATE:
+ entityHashValue = -1;
+ doEscalate = true;
+ break;
+
+ case ESCALATED:
+ return;
+
+ default:
+ break;
+ }
+ }
+ }
+
+ //#. if the datasetLockInfo doesn't exist in datasetResourceHT
+ if (dLockInfo == null || dLockInfo.isNoHolder()) {
+ if (dLockInfo == null) {
+ dLockInfo = new DatasetLockInfo(entityLockInfoManager, entityInfoManager, lockWaiterManager);
+ datasetResourceHT.put(new DatasetId(dId), dLockInfo); //datsetId obj should be created
+ }
+ entityInfo = entityInfoManager.allocate(jId, dId, entityHashValue, lockMode);
+
+ //if dataset-granule lock
+ if (entityHashValue == -1) { //-1 stands for dataset-granule
+ entityInfoManager.increaseDatasetLockCount(entityInfo);
+ dLockInfo.increaseLockCount(datasetLockMode);
+ dLockInfo.addHolder(entityInfo);
+ } else {
+ entityInfoManager.increaseDatasetLockCount(entityInfo);
+ dLockInfo.increaseLockCount(datasetLockMode);
+ //add entityLockInfo
+ eLockInfo = entityLockInfoManager.allocate();
+ dLockInfo.getEntityResourceHT().put(entityHashValue, eLockInfo);
+ entityInfoManager.increaseEntityLockCount(entityInfo);
+ entityLockInfoManager.increaseLockCount(eLockInfo, lockMode);
+ entityLockInfoManager.addHolder(eLockInfo, entityInfo);
+ }
+
+ if (jobInfo == null) {
+ jobInfo = new JobInfo(entityInfoManager, lockWaiterManager, txnContext);
+ jobHT.put(jobId, jobInfo); //jobId obj doesn't have to be created
+ }
+ jobInfo.addHoldingResource(entityInfo);
+
+ if (ALLOW_ESCALATE_FROM_ENTITY_TO_DATASET) {
+ if (!isInstant && datasetLockMode == LockMode.IS) {
+ jobInfo.increaseDatasetISLockCount(dId);
+ if (doEscalate) {
+ throw new IllegalStateException(
+ "ESCALATE_TRHESHOLD_ENTITY_TO_DATASET should not be set to "
+ + ESCALATE_TRHESHOLD_ENTITY_TO_DATASET);
+ }
+ }
+ }
+
+ if (IS_DEBUG_MODE) {
+ trackLockRequest("Granted", RequestType.LOCK, datasetId, entityHashValue, lockMode, txnContext,
+ dLockInfo, eLockInfo);
+ }
+
+ return;
+ }
+
+ //#. the datasetLockInfo exists in datasetResourceHT.
+ //1. handle dataset-granule lock
+ entityInfo = lockDatasetGranule(datasetId, entityHashValue, lockMode, txnContext);
+
+ //2. handle entity-granule lock
+ if (entityHashValue != -1) {
+ lockEntityGranule(datasetId, entityHashValue, lockMode, entityInfo, txnContext);
+ }
+
+ if (ALLOW_ESCALATE_FROM_ENTITY_TO_DATASET) {
+ if (!isInstant) {
+ if (doEscalate) {
+ //jobInfo must not be null.
+ assert jobInfo != null;
+ jobInfo.increaseDatasetISLockCount(dId);
+ //release pre-acquired locks
+ releaseDatasetISLocks(jobInfo, jobId, datasetId, txnContext);
+ } else if (datasetLockMode == LockMode.IS) {
+ if (jobInfo == null) {
+ jobInfo = jobHT.get(jobId);
+ //jobInfo must not be null;
+ assert jobInfo != null;
+ }
+ jobInfo.increaseDatasetISLockCount(dId);
+ }
}
}
@@ -189,34 +237,18 @@
trackLockRequest("Granted", RequestType.LOCK, datasetId, entityHashValue, lockMode, txnContext,
dLockInfo, eLockInfo);
}
-
- unlatchLockTable();
- return;
- }
-
- //#. the datasetLockInfo exists in datasetResourceHT.
- //1. handle dataset-granule lock
- entityInfo = lockDatasetGranule(datasetId, entityHashValue, lockMode, txnContext);
-
- //2. handle entity-granule lock
- if (entityHashValue != -1) {
- lockEntityGranule(datasetId, entityHashValue, lockMode, entityInfo, txnContext);
- }
-
- if (ALLOW_ESCALATE_FROM_ENTITY_TO_DATASET) {
- if (isEscalated) {
- releaseDatasetISLocks(jobInfo, jobId, datasetId, txnContext);
+ } catch (Exception e) {
+ if (e instanceof LockMgrLatchHandlerException) {
+ // don't unlatch
+ caughtLockMgrLatchException = true;
+ throw new ACIDException(((LockMgrLatchHandlerException) e).getInternalException());
}
- if (jobInfo != null && datasetLockMode == LockMode.IS) {
- jobInfo.increaseDatasetISLockCount(dId);
+ } finally {
+ if (!caughtLockMgrLatchException) {
+ unlatchLockTable();
}
}
- if (IS_DEBUG_MODE) {
- trackLockRequest("Granted", RequestType.LOCK, datasetId, entityHashValue, lockMode, txnContext, dLockInfo,
- eLockInfo);
- }
- unlatchLockTable();
return;
}
@@ -247,7 +279,7 @@
}
}
- private int needUpgradeFromEntityToDataset(JobInfo jobInfo, int datasetId, byte lockMode) {
+ private int needEscalateFromEntityToDataset(JobInfo jobInfo, int datasetId, byte lockMode) {
//we currently allow upgrade only if the lockMode is S.
if (lockMode != LockMode.S) {
return DONOT_ESCALATE;
@@ -265,14 +297,9 @@
private void validateJob(TransactionContext txnContext) throws ACIDException {
if (txnContext.getTxnState() == TransactionState.ABORTED) {
- unlatchLockTable();
throw new ACIDException("" + txnContext.getJobId() + " is in ABORTED state.");
} else if (txnContext.getStatus() == TransactionContext.TIMED_OUT_STATUS) {
- try {
- requestAbort(txnContext);
- } finally {
- unlatchLockTable();
- }
+ requestAbort(txnContext);
}
}
@@ -384,32 +411,26 @@
///////////////////////////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////////////////////////
- //[Notice]
- //There has been no same caller as (jId, dId, entityHashValue) triplet.
- //But there could be the same caller as (jId, dId) pair.
- //For example, two requests (J1, D1, E1) and (J1, D1, E2) are considered as duplicated call in dataset-granule perspective.
- //Therefore, the above duplicated call case is covered in the following code.
- //find the same dataset-granule lock request, that is, (J1, D1) pair in the above example.
- //if (jobInfo.isDatasetLockGranted(dId, datasetLockMode)) {
- if (jobInfo.isDatasetLockGranted(dId, LockMode.IS)) {
- if (dLockInfo.isCompatible(datasetLockMode)) {
- //this is duplicated call
- entityInfoManager.increaseDatasetLockCount(entityInfo);
- if (entityHashValue == -1) {
- dLockInfo.increaseLockCount(datasetLockMode);
- dLockInfo.addHolder(entityInfo);
- } else {
- dLockInfo.increaseLockCount(datasetLockMode);
- //IS and IX holders are implicitly handled.
- }
- //add entityInfo to JobInfo's holding-resource list
- jobInfo.addHoldingResource(entityInfo);
+ if (ALLOW_DATASET_GRANULE_X_LOCK_WITH_OTHER_CONCURRENT_LOCK_REQUESTS) {
+ //The following case only may occur when the dataset level X lock is requested
+ //with the other lock
- return entityInfo;
- } else {
- //considered as upgrader
- waiterCount = handleLockWaiter(dLockInfo, -1, entityInfo, true, true, txnContext, jobInfo, -1);
- if (waiterCount > 0) {
+ //[Notice]
+ //There has been no same caller as (jId, dId, entityHashValue) triplet.
+ //But there could be the same caller in terms of (jId, dId) pair.
+ //For example,
+ //1) (J1, D1, E1) acquires IS in Dataset D1
+ //2) (J2, D1, -1) requests X in Dataset D1, but waits
+ //3) (J1, D1, E2) requests IS in Dataset D1, but should wait
+ //The 3) may cause deadlock if 1) and 3) are under the same thread.
+ //Even if (J1, D1, E1) and (J1, D1, E2) are two different thread, instead of
+ //aborting (J1, D1, E1) triggered by the deadlock, we give higher priority to 3) than 2)
+ //as long as the dataset level lock D1 is being held by the same jobId.
+ //The above consideration is covered in the following code.
+ //find the same dataset-granule lock request, that is, (J1, D1) pair in the above example.
+ if (jobInfo.isDatasetLockGranted(dId, LockMode.IS)) {
+ if (dLockInfo.isCompatible(datasetLockMode)) {
+ //this is duplicated call
entityInfoManager.increaseDatasetLockCount(entityInfo);
if (entityHashValue == -1) {
dLockInfo.increaseLockCount(datasetLockMode);
@@ -420,8 +441,26 @@
}
//add entityInfo to JobInfo's holding-resource list
jobInfo.addHoldingResource(entityInfo);
+
+ return entityInfo;
+ } else {
+ //considered as upgrader
+ waiterCount = handleLockWaiter(dLockInfo, -1, entityInfo, true, true, txnContext, jobInfo,
+ -1);
+ if (waiterCount > 0) {
+ entityInfoManager.increaseDatasetLockCount(entityInfo);
+ if (entityHashValue == -1) {
+ dLockInfo.increaseLockCount(datasetLockMode);
+ dLockInfo.addHolder(entityInfo);
+ } else {
+ dLockInfo.increaseLockCount(datasetLockMode);
+ //IS and IX holders are implicitly handled.
+ }
+ //add entityInfo to JobInfo's holding-resource list
+ jobInfo.addHoldingResource(entityInfo);
+ }
+ return entityInfo;
}
- return entityInfo;
}
}
/////////////////////////////////////////////////////////////////////////////////////////////
@@ -594,17 +633,22 @@
@Override
public void unlock(DatasetId datasetId, int entityHashValue, TransactionContext txnContext) throws ACIDException {
- internalUnlock(datasetId, entityHashValue, txnContext, false);
+ internalUnlock(datasetId, entityHashValue, txnContext, false, false);
}
@Override
public void unlock(DatasetId datasetId, int entityHashValue, TransactionContext txnContext, boolean commitFlag)
throws ACIDException {
- internalUnlock(datasetId, entityHashValue, txnContext, commitFlag);
+ internalUnlock(datasetId, entityHashValue, txnContext, false, commitFlag);
+ }
+
+ private void instantUnlock(DatasetId datasetId, int entityHashValue, TransactionContext txnContext)
+ throws ACIDException {
+ internalUnlock(datasetId, entityHashValue, txnContext, true, false);
}
private void internalUnlock(DatasetId datasetId, int entityHashValue, TransactionContext txnContext,
- boolean commitFlag) throws ACIDException {
+ boolean isInstant, boolean commitFlag) throws ACIDException {
JobId jobId = txnContext.getJobId();
int eLockInfo = -1;
DatasetLockInfo dLockInfo = null;
@@ -620,127 +664,124 @@
}
latchLockTable();
- validateJob(txnContext);
+ try {
+ validateJob(txnContext);
- if (IS_DEBUG_MODE) {
- trackLockRequest("Requested", RequestType.UNLOCK, datasetId, entityHashValue, (byte) 0, txnContext,
- dLockInfo, eLockInfo);
- }
+ if (IS_DEBUG_MODE) {
+ trackLockRequest("Requested", RequestType.UNLOCK, datasetId, entityHashValue, (byte) 0, txnContext,
+ dLockInfo, eLockInfo);
+ }
- //find the resource to be unlocked
- dLockInfo = datasetResourceHT.get(datasetId);
- jobInfo = jobHT.get(jobId);
- if (dLockInfo == null || jobInfo == null) {
- unlatchLockTable();
- throw new IllegalStateException("Invalid unlock request: Corresponding lock info doesn't exist.");
- }
+ //find the resource to be unlocked
+ dLockInfo = datasetResourceHT.get(datasetId);
+ jobInfo = jobHT.get(jobId);
+ if (dLockInfo == null || jobInfo == null) {
+ throw new IllegalStateException("Invalid unlock request: Corresponding lock info doesn't exist.");
+ }
- eLockInfo = dLockInfo.getEntityResourceHT().get(entityHashValue);
+ eLockInfo = dLockInfo.getEntityResourceHT().get(entityHashValue);
- if (eLockInfo == -1) {
- unlatchLockTable();
- throw new IllegalStateException("Invalid unlock request: Corresponding lock info doesn't exist.");
- }
+ if (eLockInfo == -1) {
+ throw new IllegalStateException("Invalid unlock request: Corresponding lock info doesn't exist.");
+ }
- //find the corresponding entityInfo
- entityInfo = entityLockInfoManager.findEntityInfoFromHolderList(eLockInfo, jobId.getId(), entityHashValue);
- if (entityInfo == -1) {
- unlatchLockTable();
- throw new IllegalStateException("Invalid unlock request[" + jobId.getId() + "," + datasetId.getId() + ","
- + entityHashValue + "]: Corresponding lock info doesn't exist.");
- }
+ //find the corresponding entityInfo
+ entityInfo = entityLockInfoManager.findEntityInfoFromHolderList(eLockInfo, jobId.getId(), entityHashValue);
+ if (entityInfo == -1) {
+ throw new IllegalStateException("Invalid unlock request[" + jobId.getId() + "," + datasetId.getId()
+ + "," + entityHashValue + "]: Corresponding lock info doesn't exist.");
+ }
- datasetLockMode = entityInfoManager.getDatasetLockMode(entityInfo) == LockMode.S ? LockMode.IS : LockMode.IX;
+ datasetLockMode = entityInfoManager.getDatasetLockMode(entityInfo) == LockMode.S ? LockMode.IS
+ : LockMode.IX;
- //decrease the corresponding count of dLockInfo/eLockInfo/entityInfo
- dLockInfo.decreaseLockCount(datasetLockMode);
- entityLockInfoManager.decreaseLockCount(eLockInfo, entityInfoManager.getEntityLockMode(entityInfo));
- entityInfoManager.decreaseDatasetLockCount(entityInfo);
- entityInfoManager.decreaseEntityLockCount(entityInfo);
+ //decrease the corresponding count of dLockInfo/eLockInfo/entityInfo
+ dLockInfo.decreaseLockCount(datasetLockMode);
+ entityLockInfoManager.decreaseLockCount(eLockInfo, entityInfoManager.getEntityLockMode(entityInfo));
+ entityInfoManager.decreaseDatasetLockCount(entityInfo);
+ entityInfoManager.decreaseEntityLockCount(entityInfo);
- if (entityInfoManager.getEntityLockCount(entityInfo) == 0
- && entityInfoManager.getDatasetLockCount(entityInfo) == 0) {
- int threadCount = 0; //number of threads(in the same job) waiting on the same resource
- int waiterObjId = jobInfo.getFirstWaitingResource();
- int waitingEntityInfo;
- LockWaiter waiterObj;
+ if (entityInfoManager.getEntityLockCount(entityInfo) == 0
+ && entityInfoManager.getDatasetLockCount(entityInfo) == 0) {
+ int threadCount = 0; //number of threads(in the same job) waiting on the same resource
+ int waiterObjId = jobInfo.getFirstWaitingResource();
+ int waitingEntityInfo;
+ LockWaiter waiterObj;
- //TODO
- //This code should be taken care properly when there is a way to avoid doubling memory space for txnIds.
- //This commit log is written here in order to avoid increasing the memory space for managing transactionIds
- if (commitFlag) {
- if (txnContext.getTransactionType().equals(TransactionContext.TransactionType.READ_WRITE)) {
- try {
- txnSubsystem.getLogManager().log(LogType.ENTITY_COMMIT, txnContext, datasetId.getId(),
- entityHashValue, -1, (byte) 0, 0, null, null, logicalLogLocator);
- } catch (ACIDException e) {
+ //TODO
+ //This code should be taken care properly when there is a way to avoid doubling memory space for txnIds.
+ //This commit log is written here in order to avoid increasing the memory space for managing transactionIds
+ if (commitFlag) {
+ if (txnContext.getTransactionType().equals(TransactionContext.TransactionType.READ_WRITE)) {
try {
+ txnSubsystem.getLogManager().log(LogType.ENTITY_COMMIT, txnContext, datasetId.getId(),
+ entityHashValue, -1, (byte) 0, 0, null, null, logicalLogLocator);
+ } catch (ACIDException e) {
requestAbort(txnContext);
- } finally {
- unlatchLockTable();
}
}
+
+ txnContext.updateLastLSNForIndexes(logicalLogLocator.getLsn());
}
- txnContext.updateLastLSNForIndexes(logicalLogLocator.getLsn());
- }
+ //1) wake up waiters and remove holder
+ //wake up waiters of dataset-granule lock
+ wakeUpDatasetLockWaiters(dLockInfo);
+ //wake up waiters of entity-granule lock
+ wakeUpEntityLockWaiters(eLockInfo);
+ //remove the holder from eLockInfo's holder list and remove the holding resource from jobInfo's holding resource list
+ //this can be done in the following single function call.
+ entityLockInfoManager.removeHolder(eLockInfo, entityInfo, jobInfo);
- //1) wake up waiters and remove holder
- //wake up waiters of dataset-granule lock
- wakeUpDatasetLockWaiters(dLockInfo);
- //wake up waiters of entity-granule lock
- wakeUpEntityLockWaiters(eLockInfo);
- //remove the holder from eLockInfo's holder list and remove the holding resource from jobInfo's holding resource list
- //this can be done in the following single function call.
- entityLockInfoManager.removeHolder(eLockInfo, entityInfo, jobInfo);
-
- //2) if
- // there is no waiting thread on the same resource (this can be checked through jobInfo)
- // then
- // a) delete the corresponding entityInfo
- // b) write commit log for the unlocked resource(which is a committed txn).
- while (waiterObjId != -1) {
- waiterObj = lockWaiterManager.getLockWaiter(waiterObjId);
- waitingEntityInfo = waiterObj.getEntityInfoSlot();
- if (entityInfoManager.getDatasetId(waitingEntityInfo) == datasetId.getId()
- && entityInfoManager.getPKHashVal(waitingEntityInfo) == entityHashValue) {
- threadCount++;
- break;
+ //2) if
+ // there is no waiting thread on the same resource (this can be checked through jobInfo)
+ // then
+ // a) delete the corresponding entityInfo
+ // b) write commit log for the unlocked resource(which is a committed txn).
+ while (waiterObjId != -1) {
+ waiterObj = lockWaiterManager.getLockWaiter(waiterObjId);
+ waitingEntityInfo = waiterObj.getEntityInfoSlot();
+ if (entityInfoManager.getDatasetId(waitingEntityInfo) == datasetId.getId()
+ && entityInfoManager.getPKHashVal(waitingEntityInfo) == entityHashValue) {
+ threadCount++;
+ break;
+ }
+ waiterObjId = waiterObj.getNextWaiterObjId();
}
- waiterObjId = waiterObj.getNextWaiterObjId();
- }
- if (threadCount == 0) {
- if (entityInfoManager.getEntityLockMode(entityInfo) == LockMode.X) {
- //TODO
- //write a commit log for the unlocked resource
- //need to figure out that instantLock() also needs to write a commit log.
+ if (threadCount == 0) {
+ if (entityInfoManager.getEntityLockMode(entityInfo) == LockMode.X) {
+ //TODO
+ //write a commit log for the unlocked resource
+ //need to figure out that instantLock() also needs to write a commit log.
+ }
+ entityInfoManager.deallocate(entityInfo);
}
- entityInfoManager.deallocate(entityInfo);
}
- }
- //deallocate entityLockInfo's slot if there is no txn referring to the entityLockInfo.
- if (entityLockInfoManager.getFirstWaiter(eLockInfo) == -1
- && entityLockInfoManager.getLastHolder(eLockInfo) == -1
- && entityLockInfoManager.getUpgrader(eLockInfo) == -1) {
- dLockInfo.getEntityResourceHT().remove(entityHashValue);
- entityLockInfoManager.deallocate(eLockInfo);
- }
-
- //we don't deallocate datasetLockInfo even if there is no txn referring to the datasetLockInfo
- //since the datasetLockInfo is likely to be referred to again.
-
- if (ALLOW_ESCALATE_FROM_ENTITY_TO_DATASET) {
- if (datasetLockMode == LockMode.IS) {
- jobInfo.decreaseDatasetISLockCount(datasetId.getId());
+ //deallocate entityLockInfo's slot if there is no txn referring to the entityLockInfo.
+ if (entityLockInfoManager.getFirstWaiter(eLockInfo) == -1
+ && entityLockInfoManager.getLastHolder(eLockInfo) == -1
+ && entityLockInfoManager.getUpgrader(eLockInfo) == -1) {
+ dLockInfo.getEntityResourceHT().remove(entityHashValue);
+ entityLockInfoManager.deallocate(eLockInfo);
}
- }
- if (IS_DEBUG_MODE) {
- trackLockRequest("Granted", RequestType.UNLOCK, datasetId, entityHashValue, (byte) 0, txnContext,
- dLockInfo, eLockInfo);
+ //we don't deallocate datasetLockInfo even if there is no txn referring to the datasetLockInfo
+ //since the datasetLockInfo is likely to be referred to again.
+
+ if (ALLOW_ESCALATE_FROM_ENTITY_TO_DATASET) {
+ if (!isInstant && datasetLockMode == LockMode.IS) {
+ jobInfo.decreaseDatasetISLockCount(datasetId.getId());
+ }
+ }
+
+ if (IS_DEBUG_MODE) {
+ trackLockRequest("Granted", RequestType.UNLOCK, datasetId, entityHashValue, (byte) 0, txnContext,
+ dLockInfo, eLockInfo);
+ }
+ } finally {
+ unlatchLockTable();
}
- unlatchLockTable();
}
@Override
@@ -760,183 +801,178 @@
JobId jobId = txnContext.getJobId();
latchLockTable();
-
- if (IS_DEBUG_MODE) {
- trackLockRequest("Requested", RequestType.RELEASE_LOCKS, new DatasetId(0), 0, (byte) 0, txnContext,
- dLockInfo, eLockInfo);
- }
-
- JobInfo jobInfo = jobHT.get(jobId);
- if (jobInfo == null) {
- unlatchLockTable();
- return;
- }
-
- //remove waiterObj of JobInfo
- //[Notice]
- //waiterObjs may exist if aborted thread is the caller of this function.
- //Even if there are the waiterObjs, there is no waiting thread on the objects.
- //If the caller of this function is an aborted thread, it is guaranteed that there is no waiting threads
- //on the waiterObjs since when the aborted caller thread is waken up, all other waiting threads are
- //also waken up at the same time through 'notifyAll()' call.
- //In contrast, if the caller of this function is not an aborted thread, then there is no waiting object.
- int waiterObjId = jobInfo.getFirstWaitingResource();
- int nextWaiterObjId;
- while (waiterObjId != -1) {
- existWaiter = true;
- waiterObj = lockWaiterManager.getLockWaiter(waiterObjId);
- nextWaiterObjId = waiterObj.getNextWaitingResourceObjId();
- entityInfo = waiterObj.getEntityInfoSlot();
+ try {
if (IS_DEBUG_MODE) {
- if (jobId.getId() != entityInfoManager.getJobId(entityInfo)) {
- throw new IllegalStateException("JobInfo(" + jobId + ") has diffrent Job(JID:"
- + entityInfoManager.getJobId(entityInfo) + "'s lock request!!!");
- }
+ trackLockRequest("Requested", RequestType.RELEASE_LOCKS, new DatasetId(0), 0, (byte) 0, txnContext,
+ dLockInfo, eLockInfo);
}
- //1. remove from waiter(or upgrader)'s list of dLockInfo or eLockInfo.
- did = entityInfoManager.getDatasetId(entityInfo);
- tempDatasetIdObj.setId(did);
- dLockInfo = datasetResourceHT.get(tempDatasetIdObj);
-
- if (waiterObj.isWaitingOnEntityLock()) {
- entityHashValue = entityInfoManager.getPKHashVal(entityInfo);
- eLockInfo = dLockInfo.getEntityResourceHT().get(entityHashValue);
- if (waiterObj.isWaiter()) {
- entityLockInfoManager.removeWaiter(eLockInfo, waiterObjId);
- } else {
- entityLockInfoManager.removeUpgrader(eLockInfo, waiterObjId);
- }
- } else {
- if (waiterObj.isWaiter()) {
- dLockInfo.removeWaiter(waiterObjId);
- } else {
- dLockInfo.removeUpgrader(waiterObjId);
- }
+ JobInfo jobInfo = jobHT.get(jobId);
+ if (jobInfo == null) {
+ return;
}
- //2. wake-up waiters
- latchWaitNotify();
- synchronized (waiterObj) {
- unlatchWaitNotify();
- waiterObj.setWait(false);
+ //remove waiterObj of JobInfo
+ //[Notice]
+ //waiterObjs may exist if aborted thread is the caller of this function.
+ //Even if there are the waiterObjs, there is no waiting thread on the objects.
+ //If the caller of this function is an aborted thread, it is guaranteed that there is no waiting threads
+ //on the waiterObjs since when the aborted caller thread is waken up, all other waiting threads are
+ //also waken up at the same time through 'notifyAll()' call.
+ //In contrast, if the caller of this function is not an aborted thread, then there is no waiting object.
+ int waiterObjId = jobInfo.getFirstWaitingResource();
+ int nextWaiterObjId;
+ while (waiterObjId != -1) {
+ existWaiter = true;
+ waiterObj = lockWaiterManager.getLockWaiter(waiterObjId);
+ nextWaiterObjId = waiterObj.getNextWaitingResourceObjId();
+ entityInfo = waiterObj.getEntityInfoSlot();
if (IS_DEBUG_MODE) {
- System.out.println("" + Thread.currentThread().getName() + "\twake-up(D): WID(" + waiterObjId
- + "),EID(" + waiterObj.getEntityInfoSlot() + ")");
- }
- waiterObj.notifyAll();
- }
-
- //3. deallocate waiterObj
- lockWaiterManager.deallocate(waiterObjId);
-
- //4. deallocate entityInfo only if this waiter is not an upgrader
- if (entityInfoManager.getDatasetLockCount(entityInfo) == 0
- && entityInfoManager.getEntityLockCount(entityInfo) == 0) {
- entityInfoManager.deallocate(entityInfo);
- }
- waiterObjId = nextWaiterObjId;
- }
-
- //release holding resources
- entityInfo = jobInfo.getLastHoldingResource();
- while (entityInfo != -1) {
- prevEntityInfo = entityInfoManager.getPrevJobResource(entityInfo);
-
- //decrease lock count of datasetLock and entityLock
- did = entityInfoManager.getDatasetId(entityInfo);
- tempDatasetIdObj.setId(did);
- dLockInfo = datasetResourceHT.get(tempDatasetIdObj);
- entityHashValue = entityInfoManager.getPKHashVal(entityInfo);
-
- if (entityHashValue == -1) {
- //decrease datasetLockCount
- lockMode = entityInfoManager.getDatasetLockMode(entityInfo);
- datasetLockCount = entityInfoManager.getDatasetLockCount(entityInfo);
- if (datasetLockCount != 0) {
- dLockInfo.decreaseLockCount(lockMode, datasetLockCount);
-
- //wakeup waiters of datasetLock and remove holder from datasetLockInfo
- wakeUpDatasetLockWaiters(dLockInfo);
-
- //remove the holder from datasetLockInfo only if the lock is dataset-granule lock.
- //--> this also removes the holding resource from jobInfo
- //(Because the IX and IS lock's holders are handled implicitly,
- //those are not in the holder list of datasetLockInfo.)
- dLockInfo.removeHolder(entityInfo, jobInfo);
- }
- } else {
- //decrease datasetLockCount
- lockMode = entityInfoManager.getDatasetLockMode(entityInfo);
- lockMode = lockMode == LockMode.S ? LockMode.IS : LockMode.IX;
- datasetLockCount = entityInfoManager.getDatasetLockCount(entityInfo);
-
- if (datasetLockCount != 0) {
- dLockInfo.decreaseLockCount(lockMode, datasetLockCount);
- }
-
- //decrease entityLockCount
- lockMode = entityInfoManager.getEntityLockMode(entityInfo);
- entityLockCount = entityInfoManager.getEntityLockCount(entityInfo);
- eLockInfo = dLockInfo.getEntityResourceHT().get(entityHashValue);
- if (IS_DEBUG_MODE) {
- if (eLockInfo < 0) {
- System.out.println("eLockInfo:" + eLockInfo);
+ if (jobId.getId() != entityInfoManager.getJobId(entityInfo)) {
+ throw new IllegalStateException("JobInfo(" + jobId + ") has diffrent Job(JID:"
+ + entityInfoManager.getJobId(entityInfo) + "'s lock request!!!");
}
}
- if (entityLockCount != 0) {
- entityLockInfoManager.decreaseLockCount(eLockInfo, lockMode, (short) entityLockCount);
+ //1. remove from waiter(or upgrader)'s list of dLockInfo or eLockInfo.
+ did = entityInfoManager.getDatasetId(entityInfo);
+ tempDatasetIdObj.setId(did);
+ dLockInfo = datasetResourceHT.get(tempDatasetIdObj);
+
+ if (waiterObj.isWaitingOnEntityLock()) {
+ entityHashValue = entityInfoManager.getPKHashVal(entityInfo);
+ eLockInfo = dLockInfo.getEntityResourceHT().get(entityHashValue);
+ if (waiterObj.isWaiter()) {
+ entityLockInfoManager.removeWaiter(eLockInfo, waiterObjId);
+ } else {
+ entityLockInfoManager.removeUpgrader(eLockInfo, waiterObjId);
+ }
+ } else {
+ if (waiterObj.isWaiter()) {
+ dLockInfo.removeWaiter(waiterObjId);
+ } else {
+ dLockInfo.removeUpgrader(waiterObjId);
+ }
}
- if (datasetLockCount != 0) {
- //wakeup waiters of datasetLock and don't remove holder from datasetLockInfo
- wakeUpDatasetLockWaiters(dLockInfo);
+ //2. wake-up waiters
+ latchWaitNotify();
+ synchronized (waiterObj) {
+ unlatchWaitNotify();
+ waiterObj.setWait(false);
+ if (IS_DEBUG_MODE) {
+ System.out.println("" + Thread.currentThread().getName() + "\twake-up(D): WID(" + waiterObjId
+ + "),EID(" + waiterObj.getEntityInfoSlot() + ")");
+ }
+ waiterObj.notifyAll();
}
- if (entityLockCount != 0) {
- //wakeup waiters of entityLock
- wakeUpEntityLockWaiters(eLockInfo);
+ //3. deallocate waiterObj
+ lockWaiterManager.deallocate(waiterObjId);
- //remove the holder from entityLockInfo
- //--> this also removes the holding resource from jobInfo
- entityLockInfoManager.removeHolder(eLockInfo, entityInfo, jobInfo);
+ //4. deallocate entityInfo only if this waiter is not an upgrader
+ if (entityInfoManager.getDatasetLockCount(entityInfo) == 0
+ && entityInfoManager.getEntityLockCount(entityInfo) == 0) {
+ entityInfoManager.deallocate(entityInfo);
}
-
- //deallocate entityLockInfo if there is no holder and waiter.
- if (entityLockInfoManager.getLastHolder(eLockInfo) == -1
- && entityLockInfoManager.getFirstWaiter(eLockInfo) == -1
- && entityLockInfoManager.getUpgrader(eLockInfo) == -1) {
- dLockInfo.getEntityResourceHT().remove(entityHashValue);
- entityLockInfoManager.deallocate(eLockInfo);
- // if (IS_DEBUG_MODE) {
- // System.out.println("removed PK["+entityHashValue+"]");
- // }
- }
+ waiterObjId = nextWaiterObjId;
}
- //deallocate entityInfo
- entityInfoManager.deallocate(entityInfo);
- // if (IS_DEBUG_MODE) {
- // System.out.println("dellocate EntityInfo["+entityInfo+"]");
- // }
+ //release holding resources
+ entityInfo = jobInfo.getLastHoldingResource();
+ while (entityInfo != -1) {
+ prevEntityInfo = entityInfoManager.getPrevJobResource(entityInfo);
- entityInfo = prevEntityInfo;
+ //decrease lock count of datasetLock and entityLock
+ did = entityInfoManager.getDatasetId(entityInfo);
+ tempDatasetIdObj.setId(did);
+ dLockInfo = datasetResourceHT.get(tempDatasetIdObj);
+ entityHashValue = entityInfoManager.getPKHashVal(entityInfo);
+
+ if (entityHashValue == -1) {
+ //decrease datasetLockCount
+ lockMode = entityInfoManager.getDatasetLockMode(entityInfo);
+ datasetLockCount = entityInfoManager.getDatasetLockCount(entityInfo);
+ if (datasetLockCount != 0) {
+ dLockInfo.decreaseLockCount(lockMode, datasetLockCount);
+
+ //wakeup waiters of datasetLock and remove holder from datasetLockInfo
+ wakeUpDatasetLockWaiters(dLockInfo);
+
+ //remove the holder from datasetLockInfo only if the lock is dataset-granule lock.
+ //--> this also removes the holding resource from jobInfo
+ //(Because the IX and IS lock's holders are handled implicitly,
+ //those are not in the holder list of datasetLockInfo.)
+ dLockInfo.removeHolder(entityInfo, jobInfo);
+ }
+ } else {
+ //decrease datasetLockCount
+ lockMode = entityInfoManager.getDatasetLockMode(entityInfo);
+ lockMode = lockMode == LockMode.S ? LockMode.IS : LockMode.IX;
+ datasetLockCount = entityInfoManager.getDatasetLockCount(entityInfo);
+
+ if (datasetLockCount != 0) {
+ dLockInfo.decreaseLockCount(lockMode, datasetLockCount);
+ }
+
+ //decrease entityLockCount
+ lockMode = entityInfoManager.getEntityLockMode(entityInfo);
+ entityLockCount = entityInfoManager.getEntityLockCount(entityInfo);
+ eLockInfo = dLockInfo.getEntityResourceHT().get(entityHashValue);
+ if (IS_DEBUG_MODE) {
+ if (eLockInfo < 0) {
+ System.out.println("eLockInfo:" + eLockInfo);
+ }
+ }
+
+ if (entityLockCount != 0) {
+ entityLockInfoManager.decreaseLockCount(eLockInfo, lockMode, (short) entityLockCount);
+ }
+
+ if (datasetLockCount != 0) {
+ //wakeup waiters of datasetLock and don't remove holder from datasetLockInfo
+ wakeUpDatasetLockWaiters(dLockInfo);
+ }
+
+ if (entityLockCount != 0) {
+ //wakeup waiters of entityLock
+ wakeUpEntityLockWaiters(eLockInfo);
+
+ //remove the holder from entityLockInfo
+ //--> this also removes the holding resource from jobInfo
+ entityLockInfoManager.removeHolder(eLockInfo, entityInfo, jobInfo);
+ }
+
+ //deallocate entityLockInfo if there is no holder and waiter.
+ if (entityLockInfoManager.getLastHolder(eLockInfo) == -1
+ && entityLockInfoManager.getFirstWaiter(eLockInfo) == -1
+ && entityLockInfoManager.getUpgrader(eLockInfo) == -1) {
+ dLockInfo.getEntityResourceHT().remove(entityHashValue);
+ entityLockInfoManager.deallocate(eLockInfo);
+ }
+ }
+
+ //deallocate entityInfo
+ entityInfoManager.deallocate(entityInfo);
+
+ entityInfo = prevEntityInfo;
+ }
+
+ //remove JobInfo
+ jobHT.remove(jobId);
+
+ if (existWaiter) {
+ txnContext.setStatus(TransactionContext.TIMED_OUT_STATUS);
+ txnContext.setTxnState(TransactionState.ABORTED);
+ }
+
+ if (IS_DEBUG_MODE) {
+ trackLockRequest("Granted", RequestType.RELEASE_LOCKS, new DatasetId(0), 0, (byte) 0, txnContext,
+ dLockInfo, eLockInfo);
+ }
+ } finally {
+ unlatchLockTable();
}
-
- //remove JobInfo
- jobHT.remove(jobId);
-
- if (existWaiter) {
- txnContext.setStatus(TransactionContext.TIMED_OUT_STATUS);
- txnContext.setTxnState(TransactionState.ABORTED);
- }
-
- if (IS_DEBUG_MODE) {
- trackLockRequest("Granted", RequestType.RELEASE_LOCKS, new DatasetId(0), 0, (byte) 0, txnContext,
- dLockInfo, eLockInfo);
- }
- unlatchLockTable();
}
@Override
@@ -949,37 +985,227 @@
// } finally {
// unlock(datasetId, entityHashValue, txnContext);
// }
- internalLock(datasetId, entityHashValue, lockMode, txnContext);
- unlock(datasetId, entityHashValue, txnContext);
+ internalLock(datasetId, entityHashValue, lockMode, txnContext, true);
+ instantUnlock(datasetId, entityHashValue, txnContext);
}
@Override
public boolean tryLock(DatasetId datasetId, int entityHashValue, byte lockMode, TransactionContext txnContext)
throws ACIDException {
- return internalTryLock(datasetId, entityHashValue, lockMode, txnContext);
+ return internalTryLock(datasetId, entityHashValue, lockMode, txnContext, false);
}
@Override
public boolean instantTryLock(DatasetId datasetId, int entityHashValue, byte lockMode, TransactionContext txnContext)
throws ACIDException {
- boolean isGranted = false;
- // try {
- // isGranted = internalTryLock(datasetId, entityHashValue, lockMode, txnContext);
- // return isGranted;
- // } finally {
- // if (isGranted) {
- // unlock(datasetId, entityHashValue, txnContext);
- // }
- // }
- isGranted = internalTryLock(datasetId, entityHashValue, lockMode, txnContext);
- if (isGranted) {
- unlock(datasetId, entityHashValue, txnContext);
+ return internalInstantTryLock(datasetId, entityHashValue, lockMode, txnContext);
+ }
+
+ private boolean internalInstantTryLock(DatasetId datasetId, int entityHashValue, byte lockMode,
+ TransactionContext txnContext) throws ACIDException {
+ DatasetLockInfo dLockInfo = null;
+ boolean isSuccess = true;
+
+ latchLockTable();
+ try {
+ validateJob(txnContext);
+
+ if (IS_DEBUG_MODE) {
+ trackLockRequest("Requested", RequestType.INSTANT_TRY_LOCK, datasetId, entityHashValue, lockMode,
+ txnContext, dLockInfo, -1);
+ }
+
+ dLockInfo = datasetResourceHT.get(datasetId);
+
+ //#. if the datasetLockInfo doesn't exist in datasetResourceHT
+ if (dLockInfo == null || dLockInfo.isNoHolder()) {
+ if (IS_DEBUG_MODE) {
+ trackLockRequest("Granted", RequestType.INSTANT_TRY_LOCK, datasetId, entityHashValue, lockMode,
+ txnContext, dLockInfo, -1);
+ }
+ return true;
+ }
+
+ //#. the datasetLockInfo exists in datasetResourceHT.
+ //1. handle dataset-granule lock
+ byte datasetLockMode = entityHashValue == -1 ? lockMode : lockMode == LockMode.S ? LockMode.IS
+ : LockMode.IX;
+ if (datasetLockMode == LockMode.IS) {
+ //[Notice]
+ //Skip checking the dataset level lock compatibility if the requested LockMode is IS lock.
+ //We know that this internalInstantTryLock() call with IS lock mode will be always granted
+ //because we don't allow X lock on dataset-level except DDL operation.
+ //During DDL operation, all other operations will be pending, so there is no conflict.
+ isSuccess = true;
+ } else {
+ isSuccess = instantTryLockDatasetGranule(datasetId, entityHashValue, lockMode, txnContext, dLockInfo,
+ datasetLockMode);
+ }
+
+ if (isSuccess && entityHashValue != -1) {
+ //2. handle entity-granule lock
+ isSuccess = instantTryLockEntityGranule(datasetId, entityHashValue, lockMode, txnContext, dLockInfo);
+ }
+
+ if (IS_DEBUG_MODE) {
+ if (isSuccess) {
+ trackLockRequest("Granted", RequestType.INSTANT_TRY_LOCK, datasetId, entityHashValue, lockMode,
+ txnContext, dLockInfo, -1);
+ } else {
+ trackLockRequest("Failed", RequestType.INSTANT_TRY_LOCK, datasetId, entityHashValue, lockMode,
+ txnContext, dLockInfo, -1);
+ }
+ }
+
+ } finally {
+ unlatchLockTable();
}
- return isGranted;
+
+ return isSuccess;
+ }
+
+ private boolean instantTryLockDatasetGranule(DatasetId datasetId, int entityHashValue, byte lockMode,
+ TransactionContext txnContext, DatasetLockInfo dLockInfo, byte datasetLockMode) throws ACIDException {
+ JobId jobId = txnContext.getJobId();
+ int jId = jobId.getId(); //int-type jobId
+ int dId = datasetId.getId(); //int-type datasetId
+ int waiterObjId;
+ int entityInfo = -1;
+ JobInfo jobInfo;
+ boolean isUpgrade = false;
+
+ jobInfo = jobHT.get(jobId);
+
+ //check duplicated call
+
+ //1. lock request causing duplicated upgrading requests from different threads in a same job
+ waiterObjId = dLockInfo.findUpgraderFromUpgraderList(jId, entityHashValue);
+ if (waiterObjId != -1) {
+ return false;
+ }
+
+ //2. lock request causing duplicated waiting requests from different threads in a same job
+ waiterObjId = dLockInfo.findWaiterFromWaiterList(jId, entityHashValue);
+ if (waiterObjId != -1) {
+ return false;
+ }
+
+ //3. lock request causing duplicated holding requests from different threads or a single thread in a same job
+ entityInfo = dLockInfo.findEntityInfoFromHolderList(jId, entityHashValue);
+ if (entityInfo == -1) { //new call from this job -> doesn't mean that eLockInfo doesn't exist since another thread might have create the eLockInfo already.
+
+ //return fail if any upgrader exists or upgrading lock mode is not compatible
+ if (dLockInfo.getFirstUpgrader() != -1 || dLockInfo.getFirstWaiter() != -1
+ || !dLockInfo.isCompatible(datasetLockMode)) {
+
+ if (ALLOW_DATASET_GRANULE_X_LOCK_WITH_OTHER_CONCURRENT_LOCK_REQUESTS) {
+ //The following case only may occur when the dataset level X lock is requested
+ //with the other lock
+
+ //[Notice]
+ //There has been no same caller as (jId, dId, entityHashValue) triplet.
+ //But there could be the same caller in terms of (jId, dId) pair.
+ //For example,
+ //1) (J1, D1, E1) acquires IS in Dataset D1
+ //2) (J2, D1, -1) requests X in Dataset D1, but waits
+ //3) (J1, D1, E2) requests IS in Dataset D1, but should wait
+ //The 3) may cause deadlock if 1) and 3) are under the same thread.
+ //Even if (J1, D1, E1) and (J1, D1, E2) are two different thread, instead of
+ //aborting (J1, D1, E1) triggered by the deadlock, we give higher priority to 3) than 2)
+ //as long as the dataset level lock D1 is being held by the same jobId.
+ //The above consideration is covered in the following code.
+ //find the same dataset-granule lock request, that is, (J1, D1) pair in the above example.
+ if (jobInfo != null && jobInfo.isDatasetLockGranted(dId, LockMode.IS)) {
+ if (dLockInfo.isCompatible(datasetLockMode)) {
+ //this is duplicated call
+ return true;
+ }
+ }
+ }
+
+ return false;
+ }
+ } else {
+ isUpgrade = isLockUpgrade(entityInfoManager.getDatasetLockMode(entityInfo), lockMode);
+ if (isUpgrade) { //upgrade call
+ //return fail if any upgrader exists or upgrading lock mode is not compatible
+ if (dLockInfo.getFirstUpgrader() != -1 || !dLockInfo.isUpgradeCompatible(datasetLockMode, entityInfo)) {
+ return false;
+ }
+ }
+ /************************************
+ * else { //duplicated call
+ * //do nothing
+ * }
+ *************************************/
+ }
+
+ return true;
+ }
+
+ private boolean instantTryLockEntityGranule(DatasetId datasetId, int entityHashValue, byte lockMode,
+ TransactionContext txnContext, DatasetLockInfo dLockInfo) throws ACIDException {
+ JobId jobId = txnContext.getJobId();
+ int jId = jobId.getId(); //int-type jobId
+ int waiterObjId;
+ int eLockInfo = -1;
+ int entityInfo;
+ boolean isUpgrade = false;
+
+ dLockInfo = datasetResourceHT.get(datasetId);
+ eLockInfo = dLockInfo.getEntityResourceHT().get(entityHashValue);
+
+ if (eLockInfo != -1) {
+ //check duplicated call
+
+ //1. lock request causing duplicated upgrading requests from different threads in a same job
+ waiterObjId = entityLockInfoManager.findUpgraderFromUpgraderList(eLockInfo, jId, entityHashValue);
+ if (waiterObjId != -1) {
+ return false;
+ }
+
+ //2. lock request causing duplicated waiting requests from different threads in a same job
+ waiterObjId = entityLockInfoManager.findWaiterFromWaiterList(eLockInfo, jId, entityHashValue);
+ if (waiterObjId != -1) {
+ return false;
+ }
+
+ //3. lock request causing duplicated holding requests from different threads or a single thread in a same job
+ entityInfo = entityLockInfoManager.findEntityInfoFromHolderList(eLockInfo, jId, entityHashValue);
+ if (entityInfo != -1) {//duplicated call or upgrader
+
+ isUpgrade = isLockUpgrade(entityInfoManager.getEntityLockMode(entityInfo), lockMode);
+ if (isUpgrade) {//upgrade call
+ //wait if any upgrader exists or upgrading lock mode is not compatible
+ if (entityLockInfoManager.getUpgrader(eLockInfo) != -1
+ || !entityLockInfoManager.isUpgradeCompatible(eLockInfo, lockMode, entityInfo)) {
+ return false;
+ }
+ }
+ /***************************
+ * else {//duplicated call
+ * //do nothing
+ * }
+ ****************************/
+ } else {//new call from this job, but still eLockInfo exists since other threads hold it or wait on it
+ if (entityLockInfoManager.getUpgrader(eLockInfo) != -1
+ || entityLockInfoManager.getFirstWaiter(eLockInfo) != -1
+ || !entityLockInfoManager.isCompatible(eLockInfo, lockMode)) {
+ return false;
+ }
+ }
+ }
+ /*******************************
+ * else {//eLockInfo doesn't exist, so this lock request is the first request and can be granted without waiting.
+ * //do nothing
+ * }
+ *********************************/
+
+ return true;
}
private boolean internalTryLock(DatasetId datasetId, int entityHashValue, byte lockMode,
- TransactionContext txnContext) throws ACIDException {
+ TransactionContext txnContext, boolean isInstant) throws ACIDException {
JobId jobId = txnContext.getJobId();
int jId = jobId.getId(); //int-type jobId
int dId = datasetId.getId(); //int-type datasetId
@@ -988,121 +1214,140 @@
DatasetLockInfo dLockInfo = null;
JobInfo jobInfo;
byte datasetLockMode = entityHashValue == -1 ? lockMode : lockMode == LockMode.S ? LockMode.IS : LockMode.IX;
- boolean isSuccess = false;
- boolean isEscalated = false;
+ boolean isSuccess = true;
+ boolean doEscalate = false;
latchLockTable();
- validateJob(txnContext);
+ try {
+ validateJob(txnContext);
- if (IS_DEBUG_MODE) {
- trackLockRequest("Requested", RequestType.TRY_LOCK, datasetId, entityHashValue, lockMode, txnContext,
- dLockInfo, eLockInfo);
- }
-
- dLockInfo = datasetResourceHT.get(datasetId);
- jobInfo = jobHT.get(jobId);
-
- if (ALLOW_ESCALATE_FROM_ENTITY_TO_DATASET) {
- if (datasetLockMode == LockMode.IS && jobInfo != null && dLockInfo != null) {
- int upgradeStatus = needUpgradeFromEntityToDataset(jobInfo, dId, lockMode);
- switch (upgradeStatus) {
- case DO_ESCALATE:
- entityHashValue = -1;
- isEscalated = true;
- break;
-
- case ESCALATED:
- unlatchLockTable();
- return true;
-
- default:
- break;
- }
- }
- }
-
- //#. if the datasetLockInfo doesn't exist in datasetResourceHT
- if (dLockInfo == null || dLockInfo.isNoHolder()) {
- if (dLockInfo == null) {
- dLockInfo = new DatasetLockInfo(entityLockInfoManager, entityInfoManager, lockWaiterManager);
- datasetResourceHT.put(new DatasetId(dId), dLockInfo); //datsetId obj should be created
- }
- entityInfo = entityInfoManager.allocate(jId, dId, entityHashValue, lockMode);
-
- //if dataset-granule lock
- if (entityHashValue == -1) { //-1 stands for dataset-granule
- entityInfoManager.increaseDatasetLockCount(entityInfo);
- dLockInfo.increaseLockCount(datasetLockMode);
- dLockInfo.addHolder(entityInfo);
- } else {
- entityInfoManager.increaseDatasetLockCount(entityInfo);
- dLockInfo.increaseLockCount(datasetLockMode);
- //add entityLockInfo
- eLockInfo = entityLockInfoManager.allocate();
- dLockInfo.getEntityResourceHT().put(entityHashValue, eLockInfo);
- entityInfoManager.increaseEntityLockCount(entityInfo);
- entityLockInfoManager.increaseLockCount(eLockInfo, lockMode);
- entityLockInfoManager.addHolder(eLockInfo, entityInfo);
+ if (IS_DEBUG_MODE) {
+ trackLockRequest("Requested", RequestType.TRY_LOCK, datasetId, entityHashValue, lockMode, txnContext,
+ dLockInfo, eLockInfo);
}
- if (jobInfo == null) {
- jobInfo = new JobInfo(entityInfoManager, lockWaiterManager, txnContext);
- jobHT.put(jobId, jobInfo); //jobId obj doesn't have to be created
- }
- jobInfo.addHoldingResource(entityInfo);
+ dLockInfo = datasetResourceHT.get(datasetId);
+ jobInfo = jobHT.get(jobId);
if (ALLOW_ESCALATE_FROM_ENTITY_TO_DATASET) {
- if (datasetLockMode == LockMode.IS) {
- jobInfo.increaseDatasetISLockCount(dId);
+ if (!isInstant && datasetLockMode == LockMode.IS && jobInfo != null && dLockInfo != null) {
+ int upgradeStatus = needEscalateFromEntityToDataset(jobInfo, dId, lockMode);
+ switch (upgradeStatus) {
+ case DO_ESCALATE:
+ entityHashValue = -1;
+ doEscalate = true;
+ break;
+
+ case ESCALATED:
+ return true;
+
+ default:
+ break;
+ }
+ }
+ }
+
+ //#. if the datasetLockInfo doesn't exist in datasetResourceHT
+ if (dLockInfo == null || dLockInfo.isNoHolder()) {
+ if (dLockInfo == null) {
+ dLockInfo = new DatasetLockInfo(entityLockInfoManager, entityInfoManager, lockWaiterManager);
+ datasetResourceHT.put(new DatasetId(dId), dLockInfo); //datsetId obj should be created
+ }
+ entityInfo = entityInfoManager.allocate(jId, dId, entityHashValue, lockMode);
+
+ //if dataset-granule lock
+ if (entityHashValue == -1) { //-1 stands for dataset-granule
+ entityInfoManager.increaseDatasetLockCount(entityInfo);
+ dLockInfo.increaseLockCount(datasetLockMode);
+ dLockInfo.addHolder(entityInfo);
+ } else {
+ entityInfoManager.increaseDatasetLockCount(entityInfo);
+ dLockInfo.increaseLockCount(datasetLockMode);
+ //add entityLockInfo
+ eLockInfo = entityLockInfoManager.allocate();
+ dLockInfo.getEntityResourceHT().put(entityHashValue, eLockInfo);
+ entityInfoManager.increaseEntityLockCount(entityInfo);
+ entityLockInfoManager.increaseLockCount(eLockInfo, lockMode);
+ entityLockInfoManager.addHolder(eLockInfo, entityInfo);
+ }
+
+ if (jobInfo == null) {
+ jobInfo = new JobInfo(entityInfoManager, lockWaiterManager, txnContext);
+ jobHT.put(jobId, jobInfo); //jobId obj doesn't have to be created
+ }
+ jobInfo.addHoldingResource(entityInfo);
+
+ if (ALLOW_ESCALATE_FROM_ENTITY_TO_DATASET) {
+ if (!isInstant && datasetLockMode == LockMode.IS) {
+ jobInfo.increaseDatasetISLockCount(dId);
+ if (doEscalate) {
+ //This exception is thrown when the threshold value is set to 1.
+ //We don't want to allow the lock escalation when there is a first lock request on a dataset.
+ throw new IllegalStateException(
+ "ESCALATE_TRHESHOLD_ENTITY_TO_DATASET should not be set to "
+ + ESCALATE_TRHESHOLD_ENTITY_TO_DATASET);
+ }
+ }
+ }
+
+ if (IS_DEBUG_MODE) {
+ trackLockRequest("Granted", RequestType.TRY_LOCK, datasetId, entityHashValue, lockMode, txnContext,
+ dLockInfo, eLockInfo);
+ }
+
+ return true;
+ }
+
+ //#. the datasetLockInfo exists in datasetResourceHT.
+ //1. handle dataset-granule lock
+ tryLockDatasetGranuleRevertOperation = 0;
+ entityInfo = tryLockDatasetGranule(datasetId, entityHashValue, lockMode, txnContext);
+ if (entityInfo == -2) {//-2 represents fail
+ isSuccess = false;
+ } else {
+ //2. handle entity-granule lock
+ if (entityHashValue != -1) {
+ isSuccess = tryLockEntityGranule(datasetId, entityHashValue, lockMode, entityInfo, txnContext);
+ if (!isSuccess) {
+ revertTryLockDatasetGranuleOperation(datasetId, entityHashValue, lockMode, entityInfo,
+ txnContext);
+ }
+ }
+ }
+
+ if (ALLOW_ESCALATE_FROM_ENTITY_TO_DATASET) {
+ if (!isInstant) {
+ if (doEscalate) {
+ //jobInfo must not be null.
+ assert jobInfo != null;
+ jobInfo.increaseDatasetISLockCount(dId);
+ //release pre-acquired locks
+ releaseDatasetISLocks(jobInfo, jobId, datasetId, txnContext);
+ } else if (datasetLockMode == LockMode.IS) {
+ if (jobInfo == null) {
+ jobInfo = jobHT.get(jobId);
+ //jobInfo must not be null;
+ assert jobInfo != null;
+ }
+ jobInfo.increaseDatasetISLockCount(dId);
+ }
}
}
if (IS_DEBUG_MODE) {
- trackLockRequest("Granted", RequestType.TRY_LOCK, datasetId, entityHashValue, lockMode, txnContext,
- dLockInfo, eLockInfo);
- }
-
- unlatchLockTable();
- return true;
- }
-
- //#. the datasetLockInfo exists in datasetResourceHT.
- //1. handle dataset-granule lock
- tryLockDatasetGranuleRevertOperation = 0;
- entityInfo = tryLockDatasetGranule(datasetId, entityHashValue, lockMode, txnContext);
- if (entityInfo == -2) {//-2 represents fail
- isSuccess = false;
- } else {
- //2. handle entity-granule lock
- if (entityHashValue != -1) {
- isSuccess = tryLockEntityGranule(datasetId, entityHashValue, lockMode, entityInfo, txnContext);
- if (!isSuccess) {
- revertTryLockDatasetGranuleOperation(datasetId, entityHashValue, lockMode, entityInfo, txnContext);
+ if (isSuccess) {
+ trackLockRequest("Granted", RequestType.TRY_LOCK, datasetId, entityHashValue, lockMode, txnContext,
+ dLockInfo, eLockInfo);
+ } else {
+ trackLockRequest("Failed", RequestType.TRY_LOCK, datasetId, entityHashValue, lockMode, txnContext,
+ dLockInfo, eLockInfo);
}
}
- }
- if (ALLOW_ESCALATE_FROM_ENTITY_TO_DATASET) {
- if (isEscalated) {
- releaseDatasetISLocks(jobInfo, jobId, datasetId, txnContext);
- }
- if (jobInfo != null && datasetLockMode == LockMode.IS) {
- jobInfo.increaseDatasetISLockCount(dId);
- }
+ } finally {
+ unlatchLockTable();
}
- if (IS_DEBUG_MODE) {
- if (isSuccess) {
- trackLockRequest("Granted", RequestType.TRY_LOCK, datasetId, entityHashValue, lockMode, txnContext,
- dLockInfo, eLockInfo);
- } else {
- trackLockRequest("Failed", RequestType.TRY_LOCK, datasetId, entityHashValue, lockMode, txnContext,
- dLockInfo, eLockInfo);
- }
- }
-
- unlatchLockTable();
-
return isSuccess;
}
@@ -1284,29 +1529,41 @@
if (dLockInfo.getFirstUpgrader() != -1 || dLockInfo.getFirstWaiter() != -1
|| !dLockInfo.isCompatible(datasetLockMode)) {
- //[Notice]
- //There has been no same caller as (jId, dId, entityHashValue) triplet.
- //But there could be the same caller as (jId, dId) pair.
- //For example, two requests (J1, D1, E1) and (J1, D1, E2) are considered as duplicated call in dataset-granule perspective.
- //Therefore, the above duplicated call case is covered in the following code.
- //find the same dataset-granule lock request, that is, (J1, D1) pair in the above example.
- if (jobInfo.isDatasetLockGranted(dId, LockMode.IS)) {
- if (dLockInfo.isCompatible(datasetLockMode)) {
- //this is duplicated call
- entityInfoManager.increaseDatasetLockCount(entityInfo);
- if (entityHashValue == -1) {
- dLockInfo.increaseLockCount(datasetLockMode);
- dLockInfo.addHolder(entityInfo);
- } else {
- dLockInfo.increaseLockCount(datasetLockMode);
- //IS and IX holders are implicitly handled.
+ if (ALLOW_DATASET_GRANULE_X_LOCK_WITH_OTHER_CONCURRENT_LOCK_REQUESTS) {
+ //The following case only may occur when the dataset level X lock is requested
+ //with the other lock
+
+ //[Notice]
+ //There has been no same caller as (jId, dId, entityHashValue) triplet.
+ //But there could be the same caller in terms of (jId, dId) pair.
+ //For example,
+ //1) (J1, D1, E1) acquires IS in Dataset D1
+ //2) (J2, D1, -1) requests X in Dataset D1, but waits
+ //3) (J1, D1, E2) requests IS in Dataset D1, but should wait
+ //The 3) may cause deadlock if 1) and 3) are under the same thread.
+ //Even if (J1, D1, E1) and (J1, D1, E2) are two different thread, instead of
+ //aborting (J1, D1, E1) triggered by the deadlock, we give higher priority to 3) than 2)
+ //as long as the dataset level lock D1 is being held by the same jobId.
+ //The above consideration is covered in the following code.
+ //find the same dataset-granule lock request, that is, (J1, D1) pair in the above example.
+ if (jobInfo.isDatasetLockGranted(dId, LockMode.IS)) {
+ if (dLockInfo.isCompatible(datasetLockMode)) {
+ //this is duplicated call
+ entityInfoManager.increaseDatasetLockCount(entityInfo);
+ if (entityHashValue == -1) {
+ dLockInfo.increaseLockCount(datasetLockMode);
+ dLockInfo.addHolder(entityInfo);
+ } else {
+ dLockInfo.increaseLockCount(datasetLockMode);
+ //IS and IX holders are implicitly handled.
+ }
+ //add entityInfo to JobInfo's holding-resource list
+ jobInfo.addHoldingResource(entityInfo);
+
+ tryLockDatasetGranuleRevertOperation = 1;
+
+ return entityInfo;
}
- //add entityInfo to JobInfo's holding-resource list
- jobInfo.addHoldingResource(entityInfo);
-
- tryLockDatasetGranuleRevertOperation = 1;
-
- return entityInfo;
}
}
@@ -1525,38 +1782,38 @@
latchWaitNotify();
unlatchLockTable();
- synchronized (waiter) {
- unlatchWaitNotify();
- while (waiter.needWait()) {
- try {
- if (IS_DEBUG_MODE) {
- System.out.println("" + Thread.currentThread().getName() + "\twaits("
- + waiter.getWaiterCount() + "): WID(" + waiterId + "),EID("
- + waiter.getEntityInfoSlot() + ")");
+ try {
+ synchronized (waiter) {
+ unlatchWaitNotify();
+ while (waiter.needWait()) {
+ try {
+ if (IS_DEBUG_MODE) {
+ System.out.println("" + Thread.currentThread().getName() + "\twaits("
+ + waiter.getWaiterCount() + "): WID(" + waiterId + "),EID("
+ + waiter.getEntityInfoSlot() + ")");
+ }
+ waiter.wait();
+ } catch (InterruptedException e) {
+ //TODO figure-out what is the appropriate way to handle this exception
+ e.printStackTrace();
+ isInterruptedExceptionOccurred = true;
+ waiter.setWait(false);
}
- waiter.wait();
- } catch (InterruptedException e) {
- //TODO figure-out what is the appropriate way to handle this exception
- e.printStackTrace();
- isInterruptedExceptionOccurred = true;
- waiter.setWait(false);
}
}
- }
- if (isInterruptedExceptionOccurred) {
- throw new ACIDException("InterruptedException is caught");
+ if (isInterruptedExceptionOccurred) {
+ throw new ACIDException("InterruptedException is caught");
+ }
+ } catch (Exception e) {
+ throw new LockMgrLatchHandlerException(e);
}
//waiter woke up -> remove/deallocate waiter object and abort if timeout
latchLockTable();
if (txnContext.getStatus() == TransactionContext.TIMED_OUT_STATUS || waiter.isVictim()) {
- try {
- requestAbort(txnContext);
- } finally {
- unlatchLockTable();
- }
+ requestAbort(txnContext);
}
if (waiter.isFirstGetUp()) {
@@ -1600,11 +1857,7 @@
//deallocate the entityInfo
entityInfoManager.deallocate(entityInfo);
}
- try {
- requestAbort(txnContext);
- } finally {
- unlatchLockTable();
- }
+ requestAbort(txnContext);
}
return waiterCount;
@@ -1775,20 +2028,22 @@
LockWaiter waiterObj;
latchLockTable();
+ try {
- Iterator<Entry<JobId, JobInfo>> iter = jobHT.entrySet().iterator();
- while (iter.hasNext()) {
- Map.Entry<JobId, JobInfo> pair = (Map.Entry<JobId, JobInfo>) iter.next();
- jobInfo = pair.getValue();
- waiterObjId = jobInfo.getFirstWaitingResource();
- while (waiterObjId != -1) {
- waiterObj = lockWaiterManager.getLockWaiter(waiterObjId);
- toutDetector.checkAndSetVictim(waiterObj);
- waiterObjId = waiterObj.getNextWaiterObjId();
+ Iterator<Entry<JobId, JobInfo>> iter = jobHT.entrySet().iterator();
+ while (iter.hasNext()) {
+ Map.Entry<JobId, JobInfo> pair = (Map.Entry<JobId, JobInfo>) iter.next();
+ jobInfo = pair.getValue();
+ waiterObjId = jobInfo.getFirstWaitingResource();
+ while (waiterObjId != -1) {
+ waiterObj = lockWaiterManager.getLockWaiter(waiterObjId);
+ toutDetector.checkAndSetVictim(waiterObj);
+ waiterObjId = waiterObj.getNextWaiterObjId();
+ }
}
+ } finally {
+ unlatchLockTable();
}
-
- unlatchLockTable();
}
}
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockMgrLatchHandlerException.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockMgrLatchHandlerException.java
new file mode 100644
index 0000000..05a582c
--- /dev/null
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/LockMgrLatchHandlerException.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2012-2014 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.transaction.management.service.locking;
+
+import edu.uci.ics.asterix.transaction.management.exception.ACIDException;
+
+public class LockMgrLatchHandlerException extends ACIDException {
+
+ private static final long serialVersionUID = 1203182080428864199L;
+ private final Exception internalException;
+
+ public LockMgrLatchHandlerException(Exception e) {
+ super(e);
+ this.internalException = e;
+ }
+
+ public Exception getInternalException() {
+ return internalException;
+ }
+}
\ No newline at end of file
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogManager.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogManager.java
index 4e96d2e..9b8f09c 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogManager.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogManager.java
@@ -226,8 +226,7 @@
}
public int getLogPageIndex(long lsnValue) {
- return (int) ((lsnValue - startingLSN) / logManagerProperties.getLogPageSize()) % numLogPages;
-
+ return (int) (((lsnValue - startingLSN) / logManagerProperties.getLogPageSize()) % numLogPages);
}
/*
@@ -236,7 +235,6 @@
*/
public int getLogFileId(long lsnValue) {
return (int) ((lsnValue) / logManagerProperties.getLogPartitionSize());
-
}
/*
@@ -244,7 +242,7 @@
* record is (to be) placed.
*/
public int getLogPageOffset(long lsnValue) {
- return (int) (lsnValue - startingLSN) % logManagerProperties.getLogPageSize();
+ return (int) ((lsnValue - startingLSN) % logManagerProperties.getLogPageSize());
}
/*
@@ -524,9 +522,9 @@
* This method resets the log page and is called by the log flusher thread
* after a page has been flushed to disk.
*/
- public void resetLogPage(long nextWritePosition, int pageIndex) throws IOException {
+ public void resetLogPage(long lsn, long nextWritePosition, int pageIndex) throws IOException {
- String filePath = LogUtil.getLogFilePath(logManagerProperties, getLogFileId(nextWritePosition));
+ String filePath = LogUtil.getLogFilePath(logManagerProperties, getLogFileId(lsn));
logPages[pageIndex].reset(filePath, LogUtil.getFileOffset(this, nextWritePosition),
logManagerProperties.getLogPageSize());
@@ -906,19 +904,6 @@
synchronized (logManager.getLogPage(pageToFlush)) {
- // lock the internal state of the log manager and create a
- // log file if necessary.
- int prevLogFileId = logManager.getLogFileId(logManager.getLastFlushedLsn().get());
- int nextLogFileId = logManager.getLogFileId(logManager.getLastFlushedLsn().get()
- + logManager.getLogManagerProperties().getLogPageSize());
- if (prevLogFileId != nextLogFileId) {
- String filePath = LogUtil.getLogFilePath(logManager.getLogManagerProperties(), nextLogFileId);
- FileUtil.createFileIfNotExists(filePath);
- logManager.getLogPage(pageToFlush).reset(
- LogUtil.getLogFilePath(logManager.getLogManagerProperties(), nextLogFileId), 0,
- logManager.getLogManagerProperties().getLogPageSize());
- }
-
// #. sleep during the groupCommitWaitTime
sleep(groupCommitWaitPeriod);
@@ -941,6 +926,13 @@
// the log page)
logManager.getLogPage(pageToFlush).flush();
+ // Map the log page to a new region in the log file.
+ long nextWritePosition = logManager.getLogPages()[pageToFlush].getNextWritePosition()
+ + logManager.getLogManagerProperties().getLogBufferSize();
+
+ logManager.resetLogPage(logManager.getLastFlushedLsn().get() + 1
+ + logManager.getLogManagerProperties().getLogBufferSize(), nextWritePosition, pageToFlush);
+
// increment the last flushed lsn and lastFlushedPage
logManager.incrementLastFlushedLsn(logManager.getLogManagerProperties().getLogPageSize());
@@ -950,12 +942,6 @@
// reset the count to 1
logManager.getLogPageOwnershipCount(pageToFlush).set(PageOwnershipStatus.LOG_WRITER);
- // Map the log page to a new region in the log file.
- long nextWritePosition = logManager.getLogPages()[pageToFlush].getNextWritePosition()
- + logManager.getLogManagerProperties().getLogBufferSize();
-
- logManager.resetLogPage(nextWritePosition, pageToFlush);
-
// mark the page as ACTIVE
logManager.getLogPageStatus(pageToFlush).set(LogManager.PageState.ACTIVE);
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogManagerProperties.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogManagerProperties.java
index 0211f69..5040fa9 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogManagerProperties.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogManagerProperties.java
@@ -32,7 +32,7 @@
private static final int DEFAULT_LOG_PAGE_SIZE = 128 * 1024; //128KB
private static final int DEFAULT_NUM_LOG_PAGES = 8;
private static final long DEFAULT_LOG_PARTITION_SIZE = (long) 1024 * 1024 * 1024 * 2; //2GB
- private static final long DEFAULT_GROUP_COMMIT_WAIT_PERIOD = 1; // time in millisec.
+ private static final long DEFAULT_GROUP_COMMIT_WAIT_PERIOD = 200; // time in millisec.
private static final String DEFAULT_LOG_FILE_PREFIX = "asterix_transaction_log";
private static final String DEFAULT_LOG_DIRECTORY = "asterix_logs/";
diff --git a/pom.xml b/pom.xml
index 2d0922f..b424195 100644
--- a/pom.xml
+++ b/pom.xml
@@ -7,8 +7,8 @@
<packaging>pom</packaging>
<properties>
- <algebricks.version>0.2.4</algebricks.version>
- <hyracks.version>0.2.4</hyracks.version>
+ <algebricks.version>0.2.5-SNAPSHOT</algebricks.version>
+ <hyracks.version>0.2.5-SNAPSHOT</hyracks.version>
</properties>
<build>