merge from asterix_lsm_stabilization
diff --git a/asterix-algebra/pom.xml b/asterix-algebra/pom.xml
index 6e24513..350734b 100644
--- a/asterix-algebra/pom.xml
+++ b/asterix-algebra/pom.xml
@@ -3,7 +3,7 @@
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
</parent>
<artifactId>asterix-algebra</artifactId>
@@ -77,19 +77,19 @@
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-runtime</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-aql</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-metadata</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
diff --git a/asterix-app/pom.xml b/asterix-app/pom.xml
index d62d031..452562e 100644
--- a/asterix-app/pom.xml
+++ b/asterix-app/pom.xml
@@ -3,7 +3,7 @@
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
</parent>
<artifactId>asterix-app</artifactId>
@@ -75,7 +75,6 @@
<!-- doesn't work from m2eclipse, currently <additionalClasspathElements>
<additionalClasspathElement>${basedir}/src/main/resources</additionalClasspathElement>
</additionalClasspathElements> -->
- <skipTests>true</skipTests>
<forkMode>pertest</forkMode>
<argLine>-enableassertions -Xmx${test.heap.size}m
-Dfile.encoding=UTF-8
@@ -95,7 +94,7 @@
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-algebra</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
@@ -144,38 +143,45 @@
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-aql</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-om</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-metadata</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-tools</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-common</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
+ <dependency>
+ <groupId>edu.uci.ics.asterix</groupId>
+ <artifactId>asterix-common</artifactId>
+ <version>0.0.6-SNAPSHOT</version>
+ <type>test-jar</type>
+ <scope>test</scope>
+ </dependency>
<dependency>
<groupId>com.kenai.nbpwr</groupId>
<artifactId>org-apache-commons-io</artifactId>
@@ -217,7 +223,7 @@
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-test-framework</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<scope>test</scope>
</dependency>
</dependencies>
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/RESTAPIServlet.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/RESTAPIServlet.java
index 29feb5e..2c15578 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/RESTAPIServlet.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/RESTAPIServlet.java
@@ -17,6 +17,7 @@
import java.io.IOException;
import java.io.PrintWriter;
import java.util.List;
+import java.util.logging.Level;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServlet;
@@ -32,6 +33,7 @@
import edu.uci.ics.asterix.aql.parser.AQLParser;
import edu.uci.ics.asterix.aql.parser.ParseException;
import edu.uci.ics.asterix.aql.translator.AqlTranslator;
+import edu.uci.ics.asterix.common.config.GlobalConfig;
import edu.uci.ics.asterix.metadata.MetadataManager;
import edu.uci.ics.asterix.result.ResultReader;
import edu.uci.ics.asterix.result.ResultUtils;
@@ -95,6 +97,7 @@
aqlTranslator.compileAndExecute(hcc, hds, asyncResults);
} catch (ParseException pe) {
+ GlobalConfig.ASTERIX_LOGGER.log(Level.INFO, pe.getMessage(), pe);
StringBuilder errorMessage = new StringBuilder();
String message = pe.getMessage();
message = message.replace("<", "<");
@@ -110,6 +113,7 @@
JSONObject errorResp = ResultUtils.getErrorResponse(2, errorMessage.toString());
out.write(errorResp.toString());
} catch (Exception e) {
+ GlobalConfig.ASTERIX_LOGGER.log(Level.INFO, e.getMessage(), e);
StringBuilder errorMessage = new StringBuilder();
errorMessage.append(e.getMessage());
JSONObject errorResp = ResultUtils.getErrorResponse(99, errorMessage.toString());
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
index 68c6271..2fc9525 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
@@ -133,7 +133,7 @@
private List<FunctionDecl> declaredFunctions;
private static Logger LOGGER = Logger.getLogger(AqlTranslator.class.getName());
-
+
public AqlTranslator(List<Statement> aqlStatements, PrintWriter out, SessionConfig pc, DisplayFormat pdf)
throws MetadataException, AsterixException {
this.aqlStatements = aqlStatements;
@@ -157,12 +157,16 @@
* Compiles and submits for execution a list of AQL statements.
*
* @param hcc
- * A Hyracks client connection that is used to submit a jobspec to Hyracks.
+ * A Hyracks client connection that is used to submit a jobspec
+ * to Hyracks.
* @param hdc
- * A Hyracks dataset client object that is used to read the results.
+ * A Hyracks dataset client object that is used to read the
+ * results.
* @param asyncResults
- * True if the results should be read asynchronously or false if we should wait for results to be read.
- * @return A List<QueryResult> containing a QueryResult instance corresponding to each submitted query.
+ * True if the results should be read asynchronously or false if
+ * we should wait for results to be read.
+ * @return A List<QueryResult> containing a QueryResult instance
+ * corresponding to each submitted query.
* @throws Exception
*/
public List<QueryResult> compileAndExecute(IHyracksClientConnection hcc, IHyracksDataset hdc, boolean asyncResults)
@@ -175,10 +179,6 @@
Map<String, String> config = new HashMap<String, String>();
List<JobSpecification> jobsToExecute = new ArrayList<JobSpecification>();
- String numLogPages = AsterixProperties.INSTANCE.getProperty("log_buffer_num_pages", "4");
- LOGGER.info("Number of log pages (info)" + numLogPages);
- LOGGER.severe("Number of log pages (severe)" + numLogPages);
-
for (Statement stmt : aqlStatements) {
validateOperation(activeDefaultDataverse, stmt);
AqlMetadataProvider metadataProvider = new AqlMetadataProvider(activeDefaultDataverse);
@@ -449,7 +449,12 @@
}
}
- //#. add a new dataset with PendingAddOp
+ // #. initialize DatasetIdFactory if it is not initialized.
+ if (!DatasetIdFactory.isInitialized()) {
+ DatasetIdFactory.initialize(MetadataManager.INSTANCE.getMostRecentDatasetId());
+ }
+
+ // #. add a new dataset with PendingAddOp
dataset = new Dataset(dataverseName, datasetName, itemTypeName, datasetDetails, dd.getHints(), dsType,
DatasetIdFactory.generateDatasetId(), IMetadataEntity.PENDING_ADD_OP);
MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
@@ -460,20 +465,21 @@
JobSpecification jobSpec = DatasetOperations.createDatasetJobSpec(dataverse, datasetName,
metadataProvider);
- //#. make metadataTxn commit before calling runJob.
+ // #. make metadataTxn commit before calling runJob.
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
- //#. runJob
+ // #. runJob
runJob(hcc, jobSpec, true);
- //#. begin new metadataTxn
+ // #. begin new metadataTxn
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
}
- //#. add a new dataset with PendingNoOp after deleting the dataset with PendingAddOp
+ // #. add a new dataset with PendingNoOp after deleting the dataset
+ // with PendingAddOp
MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), new Dataset(dataverseName,
datasetName, itemTypeName, datasetDetails, dd.getHints(), dsType, dataset.getDatasetId(),
@@ -485,8 +491,8 @@
}
if (dataset != null) {
- //#. execute compensation operations
- // remove the index in NC
+ // #. execute compensation operations
+ // remove the index in NC
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -501,10 +507,11 @@
if (bActiveTxn) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
- //do no throw exception since still the metadata needs to be compensated.
+ // do no throw exception since still the metadata needs to
+ // be compensated.
}
- // remove the record from the metadata.
+ // remove the record from the metadata.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
@@ -555,6 +562,13 @@
Index idx = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
datasetName, indexName);
+ String itemTypeName = ds.getItemTypeName();
+ Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(), dataverseName,
+ itemTypeName);
+ IAType itemType = dt.getDatatype();
+ ARecordType aRecordType = (ARecordType) itemType;
+ aRecordType.validateKeyFields(stmtCreateIndex.getFieldExprs(), stmtCreateIndex.getIndexType());
+
if (idx != null) {
if (!stmtCreateIndex.getIfNotExists()) {
throw new AlgebricksException("An index with this name " + indexName + " already exists.");
@@ -565,13 +579,13 @@
}
}
- //#. add a new index with PendingAddOp
+ // #. add a new index with PendingAddOp
Index index = new Index(dataverseName, datasetName, indexName, stmtCreateIndex.getIndexType(),
stmtCreateIndex.getFieldExprs(), stmtCreateIndex.getGramLength(), false,
IMetadataEntity.PENDING_ADD_OP);
MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), index);
- //#. create the index artifact in NC.
+ // #. create the index artifact in NC.
CompiledCreateIndexStatement cis = new CompiledCreateIndexStatement(index.getIndexName(), dataverseName,
index.getDatasetName(), index.getKeyFieldNames(), index.getGramLength(), index.getIndexType());
spec = IndexOperations.buildSecondaryIndexCreationJobSpec(cis, metadataProvider);
@@ -588,7 +602,7 @@
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
- //#. load data into the index in NC.
+ // #. load data into the index in NC.
cis = new CompiledCreateIndexStatement(index.getIndexName(), dataverseName, index.getDatasetName(),
index.getKeyFieldNames(), index.getGramLength(), index.getIndexType());
spec = IndexOperations.buildSecondaryIndexLoadingJobSpec(cis, metadataProvider);
@@ -597,12 +611,13 @@
runJob(hcc, spec, true);
- //#. begin new metadataTxn
+ // #. begin new metadataTxn
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
- //#. add another new index with PendingNoOp after deleting the index with PendingAddOp
+ // #. add another new index with PendingNoOp after deleting the
+ // index with PendingAddOp
MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName,
indexName);
index = new Index(dataverseName, datasetName, indexName, stmtCreateIndex.getIndexType(),
@@ -617,8 +632,8 @@
}
if (spec != null) {
- //#. execute compensation operations
- // remove the index in NC
+ // #. execute compensation operations
+ // remove the index in NC
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -633,10 +648,11 @@
if (bActiveTxn) {
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
- //do no throw exception since still the metadata needs to be compensated.
+ // do no throw exception since still the metadata needs to
+ // be compensated.
}
- // remove the record from the metadata.
+ // remove the record from the metadata.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
@@ -721,7 +737,8 @@
return;
}
- //#. prepare jobs which will drop corresponding datasets with indexes.
+ // #. prepare jobs which will drop corresponding datasets with
+ // indexes.
List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, dvName);
for (int j = 0; j < datasets.size(); j++) {
String datasetName = datasets.get(j).getDatasetName();
@@ -742,9 +759,10 @@
}
}
- //#. mark PendingDropOp on the dataverse record by
- // first, deleting the dataverse record from the DATAVERSE_DATASET
- // second, inserting the dataverse record with the PendingDropOp value into the DATAVERSE_DATASET
+ // #. mark PendingDropOp on the dataverse record by
+ // first, deleting the dataverse record from the DATAVERSE_DATASET
+ // second, inserting the dataverse record with the PendingDropOp
+ // value into the DATAVERSE_DATASET
MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dvName);
MetadataManager.INSTANCE.addDataverse(mdTxnCtx, new Dataverse(dvName, dv.getDataFormat(),
IMetadataEntity.PENDING_DROP_OP));
@@ -760,7 +778,7 @@
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
- //#. finally, delete the dataverse.
+ // #. finally, delete the dataverse.
MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dvName);
if (activeDefaultDataverse != null && activeDefaultDataverse.getDataverseName() == dvName) {
activeDefaultDataverse = null;
@@ -772,13 +790,13 @@
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
- //#. execute compensation operations
- // remove the all indexes in NC
+ // #. execute compensation operations
+ // remove the all indexes in NC
for (JobSpecification jobSpec : jobsToExecute) {
runJob(hcc, jobSpec, true);
}
- // remove the record from the metadata.
+ // remove the record from the metadata.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
@@ -827,7 +845,7 @@
if (ds.getDatasetType() == DatasetType.INTERNAL || ds.getDatasetType() == DatasetType.FEED) {
- //#. prepare jobs to drop the datatset and the indexes in NC
+ // #. prepare jobs to drop the datatset and the indexes in NC
List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
for (int j = 0; j < indexes.size(); j++) {
if (indexes.get(j).isSecondaryIndex()) {
@@ -839,7 +857,7 @@
CompiledDatasetDropStatement cds = new CompiledDatasetDropStatement(dataverseName, datasetName);
jobsToExecute.add(DatasetOperations.createDropDatasetJobSpec(cds, metadataProvider));
- //#. mark the existing dataset as PendingDropOp
+ // #. mark the existing dataset as PendingDropOp
MetadataManager.INSTANCE.dropDataset(mdTxnCtx, dataverseName, datasetName);
MetadataManager.INSTANCE.addDataset(
mdTxnCtx,
@@ -849,7 +867,7 @@
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
- //#. run the jobs
+ // #. run the jobs
for (JobSpecification jobSpec : jobsToExecute) {
runJob(hcc, jobSpec, true);
}
@@ -859,7 +877,7 @@
metadataProvider.setMetadataTxnContext(mdTxnCtx);
}
- //#. finally, delete the dataset.
+ // #. finally, delete the dataset.
MetadataManager.INSTANCE.dropDataset(mdTxnCtx, dataverseName, datasetName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
@@ -868,13 +886,13 @@
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
- //#. execute compensation operations
- // remove the all indexes in NC
+ // #. execute compensation operations
+ // remove the all indexes in NC
for (JobSpecification jobSpec : jobsToExecute) {
runJob(hcc, jobSpec, true);
}
- // remove the record from the metadata.
+ // remove the record from the metadata.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
@@ -927,19 +945,19 @@
throw new AlgebricksException("There is no index with this name " + indexName + ".");
}
} else {
- //#. prepare a job to drop the index in NC.
+ // #. prepare a job to drop the index in NC.
CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
indexName);
jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider));
- //#. mark PendingDropOp on the existing index
+ // #. mark PendingDropOp on the existing index
MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
MetadataManager.INSTANCE.addIndex(
mdTxnCtx,
new Index(dataverseName, datasetName, indexName, index.getIndexType(), index
.getKeyFieldNames(), index.isPrimaryIndex(), IMetadataEntity.PENDING_DROP_OP));
- //#. commit the existing transaction before calling runJob.
+ // #. commit the existing transaction before calling runJob.
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
@@ -947,12 +965,12 @@
runJob(hcc, jobSpec, true);
}
- //#. begin a new transaction
+ // #. begin a new transaction
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
- //#. finally, delete the existing index
+ // #. finally, delete the existing index
MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
}
} else {
@@ -966,13 +984,13 @@
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
}
- //#. execute compensation operations
- // remove the all indexes in NC
+ // #. execute compensation operations
+ // remove the all indexes in NC
for (JobSpecification jobSpec : jobsToExecute) {
runJob(hcc, jobSpec, true);
}
- // remove the record from the metadata.
+ // remove the record from the metadata.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
@@ -1131,7 +1149,8 @@
if (!index.isSecondaryIndex()) {
continue;
}
- // Create CompiledCreateIndexStatement from metadata entity 'index'.
+ // Create CompiledCreateIndexStatement from metadata entity
+ // 'index'.
CompiledCreateIndexStatement cis = new CompiledCreateIndexStatement(index.getIndexName(),
dataverseName, index.getDatasetName(), index.getKeyFieldNames(), index.getGramLength(),
index.getIndexType());
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java
index 61a7fdc..8ad8c67 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java
@@ -44,6 +44,10 @@
// #. recover if the system is corrupted by checking system state.
IRecoveryManager recoveryMgr = runtimeContext.getTransactionSubsystem().getRecoveryManager();
systemState = recoveryMgr.getSystemState();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("System is in a state: " + systemState);
+ }
+
if (systemState != SystemState.NEW_UNIVERSE) {
PersistentLocalResourceRepository localResourceRepository = (PersistentLocalResourceRepository) runtimeContext
.getLocalResourceRepository();
@@ -87,6 +91,13 @@
PersistentLocalResourceRepository localResourceRepository = (PersistentLocalResourceRepository) runtimeContext
.getLocalResourceRepository();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("nodeid" + nodeId);
+ LOGGER.info("proxy" + proxy);
+ LOGGER.info("stores" + proxy.getAsterixProperties().getStores());
+ LOGGER.info("store" + proxy.getAsterixProperties().getStores().get(nodeId)[0]);
+ }
+
localResourceRepository.initialize(nodeId, proxy.getAsterixProperties().getStores().get(nodeId)[0], true,
null);
}
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java
index 151502b..bf21c7f 100644
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java
+++ b/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java
@@ -14,50 +14,29 @@
*/
package edu.uci.ics.asterix.test.metadata;
-import java.io.BufferedReader;
import java.io.File;
-import java.io.FileReader;
-import java.io.PrintWriter;
-import java.util.ArrayList;
-import java.util.Collection;
import java.util.List;
-import java.util.logging.Logger;
-import org.apache.commons.httpclient.DefaultHttpMethodRetryHandler;
-import org.apache.commons.httpclient.HttpClient;
-import org.apache.commons.httpclient.HttpStatus;
-import org.apache.commons.httpclient.NameValuePair;
-import org.apache.commons.httpclient.methods.GetMethod;
-import org.apache.commons.httpclient.params.HttpMethodParams;
import org.apache.commons.io.FileUtils;
-import org.json.JSONObject;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
import edu.uci.ics.asterix.api.common.AsterixHyracksIntegrationUtil;
import edu.uci.ics.asterix.common.config.GlobalConfig;
import edu.uci.ics.asterix.test.aql.TestsUtils;
import edu.uci.ics.asterix.testframework.context.TestCaseContext;
-import edu.uci.ics.asterix.testframework.context.TestFileContext;
-import edu.uci.ics.asterix.testframework.xml.TestCase.CompilationUnit;
/**
* Executes the Metadata tests.
*/
-@RunWith(Parameterized.class)
public class MetadataTest {
- private TestCaseContext tcCtx;
-
- private static final Logger LOGGER = Logger.getLogger(MetadataTest.class.getName());
private static final String PATH_ACTUAL = "mdtest/";
private static final String PATH_BASE = "src/test/resources/metadata/";
private static final String TEST_CONFIG_FILE_NAME = "asterix-configuration.xml";
private static final String WEB_SERVER_PORT = "19002";
+ private static List<TestCaseContext> testCaseCollection;
@BeforeClass
public static void setUp() throws Exception {
@@ -72,7 +51,8 @@
}
AsterixHyracksIntegrationUtil.init();
-
+ TestCaseContext.Builder b = new TestCaseContext.Builder();
+ testCaseCollection = b.build(new File(PATH_BASE));
}
@AfterClass
@@ -95,197 +75,10 @@
}
}
- @Parameters
- public static Collection<Object[]> tests() throws Exception {
- Collection<Object[]> testArgs = new ArrayList<Object[]>();
- TestCaseContext.Builder b = new TestCaseContext.Builder();
- for (TestCaseContext ctx : b.build(new File(PATH_BASE))) {
- testArgs.add(new Object[] { ctx });
- }
- return testArgs;
- }
-
- public MetadataTest(TestCaseContext tcCtx) {
- this.tcCtx = tcCtx;
- }
-
- // Method that reads a DDL/Update/Query File
- // and returns the contents as a string
- // This string is later passed to REST API for execution.
- public String readTestFile(File testFile) throws Exception {
- BufferedReader reader = new BufferedReader(new FileReader(testFile));
- String line = null;
- StringBuilder stringBuilder = new StringBuilder();
- String ls = System.getProperty("line.separator");
-
- while ((line = reader.readLine()) != null) {
- stringBuilder.append(line);
- stringBuilder.append(ls);
- }
-
- return stringBuilder.toString();
- }
-
- // To execute DDL and Update statements
- // create type statement
- // create dataset statement
- // create index statement
- // create dataverse statement
- // create function statement
- public void executeDDL(String str) throws Exception {
- final String url = "http://localhost:19101/ddl";
-
- // Create an instance of HttpClient.
- HttpClient client = new HttpClient();
-
- // Create a method instance.
- GetMethod method = new GetMethod(url);
-
- method.setQueryString(new NameValuePair[] { new NameValuePair("ddl", str) });
-
- // Provide custom retry handler is necessary
- method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false));
-
- // Execute the method.
- int statusCode = client.executeMethod(method);
-
- // Read the response body as String.
- String responseBody = method.getResponseBodyAsString();
-
- // Check if the method was executed successfully.
- if (statusCode != HttpStatus.SC_OK) {
- System.err.println("Method failed: " + method.getStatusLine());
- }
- }
-
- // To execute Update statements
- // Insert and Delete statements are executed here
- public void executeUpdate(String str) throws Exception {
- final String url = "http://localhost:19101/update";
-
- // Create an instance of HttpClient.
- HttpClient client = new HttpClient();
-
- // Create a method instance.
- GetMethod method = new GetMethod(url);
-
- method.setQueryString(new NameValuePair[] { new NameValuePair("statements", str) });
-
- // Provide custom retry handler is necessary
- method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false));
-
- // Execute the method.
- int statusCode = client.executeMethod(method);
-
- // Read the response body as String.
- String responseBody = method.getResponseBodyAsString();
-
- // Check if the method was executed successfully.
- if (statusCode != HttpStatus.SC_OK) {
- System.err.println("Method failed: " + method.getStatusLine());
- }
- }
-
- // Executes Query and returns results as JSONArray
- public JSONObject executeQuery(String str) throws Exception {
-
- final String url = "http://localhost:19101/query";
-
- // Create an instance of HttpClient.
- HttpClient client = new HttpClient();
-
- // Create a method instance.
- GetMethod method = new GetMethod(url);
-
- method.setQueryString(new NameValuePair[] { new NameValuePair("query", str) });
-
- // Provide custom retry handler is necessary
- method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false));
-
- JSONObject result = null;
-
- try {
- // Execute the method.
- int statusCode = client.executeMethod(method);
-
- // Check if the method was executed successfully.
- if (statusCode != HttpStatus.SC_OK) {
- System.err.println("Method failed: " + method.getStatusLine());
- }
-
- // Read the response body as String.
- String responseBody = method.getResponseBodyAsString();
-
- result = new JSONObject(responseBody);
- } catch (Exception e) {
- System.out.println(e.getMessage());
- e.printStackTrace();
- }
- return result;
- }
-
@Test
public void test() throws Exception {
- List<TestFileContext> testFileCtxs;
- List<TestFileContext> expectedResultFileCtxs;
-
- File testFile;
- File expectedResultFile;
- String statement;
-
- int queryCount = 0;
- JSONObject result;
-
- List<CompilationUnit> cUnits = tcCtx.getTestCase().getCompilationUnit();
- for (CompilationUnit cUnit : cUnits) {
- testFileCtxs = tcCtx.getTestFiles(cUnit);
- expectedResultFileCtxs = tcCtx.getExpectedResultFiles(cUnit);
-
- for (TestFileContext ctx : testFileCtxs) {
- testFile = ctx.getFile();
- statement = readTestFile(testFile);
- try {
- switch (ctx.getType()) {
- case "ddl":
- executeDDL(statement);
- break;
- case "update":
- executeUpdate(statement);
- break;
- case "query":
- result = executeQuery(statement);
- if (!cUnit.getExpectedError().isEmpty()) {
- if (!result.has("error")) {
- throw new Exception("Test \"" + testFile + "\" FAILED!");
- }
- } else {
- expectedResultFile = expectedResultFileCtxs.get(queryCount).getFile();
-
- File actualFile = new File(PATH_ACTUAL + File.separator
- + tcCtx.getTestCase().getFilePath().replace(File.separator, "_") + "_"
- + cUnit.getName() + ".adm");
-
- File actualResultFile = tcCtx.getActualResultFile(cUnit, new File(PATH_ACTUAL));
- actualResultFile.getParentFile().mkdirs();
-
- TestsUtils.writeResultsToFile(actualFile, result);
-
- TestsUtils.runScriptAndCompareWithResult(testFile, new PrintWriter(System.err),
- expectedResultFile, actualFile);
- }
- queryCount++;
- break;
- default:
- throw new IllegalArgumentException("No statements of type " + ctx.getType());
- }
- } catch (Exception e) {
- LOGGER.severe("Test \"" + testFile + "\" FAILED!");
- e.printStackTrace();
- if (cUnit.getExpectedError().isEmpty()) {
- throw new Exception("Test \"" + testFile + "\" FAILED!", e);
- }
- }
- }
+ for (TestCaseContext testCaseCtx : testCaseCollection) {
+ TestsUtils.executeTest(PATH_ACTUAL, testCaseCtx);
}
}
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java
index 33b7ed0..0ed5193 100644
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java
+++ b/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java
@@ -1,28 +1,14 @@
package edu.uci.ics.asterix.test.runtime;
-import java.io.BufferedReader;
import java.io.File;
-import java.io.FileReader;
-import java.io.PrintWriter;
-import java.util.ArrayList;
-import java.util.Collection;
import java.util.List;
-import java.util.logging.Logger;
-import org.apache.commons.httpclient.DefaultHttpMethodRetryHandler;
-import org.apache.commons.httpclient.HttpClient;
-import org.apache.commons.httpclient.HttpStatus;
-import org.apache.commons.httpclient.NameValuePair;
-import org.apache.commons.httpclient.methods.GetMethod;
-import org.apache.commons.httpclient.params.HttpMethodParams;
import org.apache.commons.io.FileUtils;
-import org.json.JSONObject;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
import edu.uci.ics.asterix.api.common.AsterixHyracksIntegrationUtil;
import edu.uci.ics.asterix.common.config.GlobalConfig;
@@ -30,256 +16,75 @@
import edu.uci.ics.asterix.external.util.IdentitiyResolverFactory;
import edu.uci.ics.asterix.test.aql.TestsUtils;
import edu.uci.ics.asterix.testframework.context.TestCaseContext;
-import edu.uci.ics.asterix.testframework.context.TestFileContext;
-import edu.uci.ics.asterix.testframework.xml.TestCase.CompilationUnit;
/**
* Runs the runtime test cases under 'asterix-app/src/test/resources/runtimets'.
*/
-@RunWith(Parameterized.class)
+//@RunWith(Parameterized.class)
public class ExecutionTest {
- private static final String PATH_ACTUAL = "rttest/";
- private static final String PATH_BASE = "src/test/resources/runtimets/";
+ private static final String PATH_ACTUAL = "rttest/";
+ private static final String PATH_BASE = "src/test/resources/runtimets/";
- private static final String TEST_CONFIG_FILE_NAME = "asterix-configuration.xml";
- private static final String[] ASTERIX_DATA_DIRS = new String[] { "nc1data", "nc2data" };
+ private static final String TEST_CONFIG_FILE_NAME = "asterix-configuration.xml";
+ private static final String[] ASTERIX_DATA_DIRS = new String[] { "nc1data",
+ "nc2data" };
- private static final Logger LOGGER = Logger.getLogger(ExecutionTest.class.getName());
+ private static List<TestCaseContext> testCaseCollection;
- // private static NCBootstrapImpl _bootstrap = new NCBootstrapImpl();
+ @BeforeClass
+ public static void setUp() throws Exception {
+ System.setProperty(GlobalConfig.CONFIG_FILE_PROPERTY,
+ TEST_CONFIG_FILE_NAME);
+ System.setProperty(GlobalConfig.WEB_SERVER_PORT_PROPERTY, "19002");
+ File outdir = new File(PATH_ACTUAL);
+ outdir.mkdirs();
- @BeforeClass
- public static void setUp() throws Exception {
- System.setProperty(GlobalConfig.CONFIG_FILE_PROPERTY, TEST_CONFIG_FILE_NAME);
- System.setProperty(GlobalConfig.WEB_SERVER_PORT_PROPERTY, "19002");
- File outdir = new File(PATH_ACTUAL);
- outdir.mkdirs();
+ File log = new File("asterix_logs");
+ if (log.exists()) {
+ FileUtils.deleteDirectory(log);
+ }
- File log = new File("asterix_logs");
- if (log.exists()) {
- FileUtils.deleteDirectory(log);
- }
+ AsterixHyracksIntegrationUtil.init();
- AsterixHyracksIntegrationUtil.init();
+ // TODO: Uncomment when hadoop version is upgraded and adapters are
+ // ported.
+ HDFSCluster.getInstance().setup();
- // TODO: Uncomment when hadoop version is upgraded and adapters are
- // ported.
- HDFSCluster.getInstance().setup();
+ // Set the node resolver to be the identity resolver that expects node
+ // names
+ // to be node controller ids; a valid assumption in test environment.
+ System.setProperty(
+ FileSystemBasedAdapter.NODE_RESOLVER_FACTORY_PROPERTY,
+ IdentitiyResolverFactory.class.getName());
+ TestCaseContext.Builder b = new TestCaseContext.Builder();
+ testCaseCollection = b.build(new File(PATH_BASE));
+ }
- // Set the node resolver to be the identity resolver that expects node
- // names
- // to be node controller ids; a valid assumption in test environment.
- System.setProperty(FileSystemBasedAdapter.NODE_RESOLVER_FACTORY_PROPERTY,
- IdentitiyResolverFactory.class.getName());
- }
+ @AfterClass
+ public static void tearDown() throws Exception {
+ AsterixHyracksIntegrationUtil.deinit();
+ File outdir = new File(PATH_ACTUAL);
+ File[] files = outdir.listFiles();
+ if (files == null || files.length == 0) {
+ outdir.delete();
+ }
+ // clean up the files written by the ASTERIX storage manager
+ for (String d : ASTERIX_DATA_DIRS) {
+ TestsUtils.deleteRec(new File(d));
+ }
- @AfterClass
- public static void tearDown() throws Exception {
- AsterixHyracksIntegrationUtil.deinit();
- File outdir = new File(PATH_ACTUAL);
- File[] files = outdir.listFiles();
- if (files == null || files.length == 0) {
- outdir.delete();
- }
- // clean up the files written by the ASTERIX storage manager
- for (String d : ASTERIX_DATA_DIRS) {
- TestsUtils.deleteRec(new File(d));
- }
+ File log = new File("asterix_logs");
+ if (log.exists()) {
+ FileUtils.deleteDirectory(log);
+ }
+ HDFSCluster.getInstance().cleanup();
+ }
- File log = new File("asterix_logs");
- if (log.exists()) {
- FileUtils.deleteDirectory(log);
- }
- HDFSCluster.getInstance().cleanup();
- }
+ @Test
+ public void test() throws Exception {
+ for (TestCaseContext testCaseCtx : testCaseCollection) {
+ TestsUtils.executeTest(PATH_ACTUAL, testCaseCtx);
+ }
- @Parameters
- public static Collection<Object[]> tests() throws Exception {
- Collection<Object[]> testArgs = new ArrayList<Object[]>();
- TestCaseContext.Builder b = new TestCaseContext.Builder();
- for (TestCaseContext ctx : b.build(new File(PATH_BASE))) {
- testArgs.add(new Object[] { ctx });
- }
- return testArgs;
- }
-
- private TestCaseContext tcCtx;
-
- public ExecutionTest(TestCaseContext tcCtx) {
- this.tcCtx = tcCtx;
- }
-
- // Method that reads a DDL/Update/Query File
- // and returns the contents as a string
- // This string is later passed to REST API for execution.
- public String readTestFile(File testFile) throws Exception {
- BufferedReader reader = new BufferedReader(new FileReader(testFile));
- String line = null;
- StringBuilder stringBuilder = new StringBuilder();
- String ls = System.getProperty("line.separator");
-
- while ((line = reader.readLine()) != null) {
- stringBuilder.append(line);
- stringBuilder.append(ls);
- }
-
- return stringBuilder.toString();
- }
-
- // To execute DDL and Update statements
- // create type statement
- // create dataset statement
- // create index statement
- // create dataverse statement
- // create function statement
- public void executeDDL(String str) throws Exception {
- final String url = "http://localhost:19101/ddl";
-
- // Create an instance of HttpClient.
- HttpClient client = new HttpClient();
-
- // Create a method instance.
- GetMethod method = new GetMethod(url);
-
- method.setQueryString(new NameValuePair[] { new NameValuePair("ddl", str) });
-
- // Provide custom retry handler is necessary
- method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false));
-
- // Execute the method.
- int statusCode = client.executeMethod(method);
-
- // Check if the method was executed successfully.
- if (statusCode != HttpStatus.SC_OK) {
- System.err.println("Method failed: " + method.getStatusLine());
- }
- }
-
- // To execute Update statements
- // Insert and Delete statements are executed here
- public void executeUpdate(String str) throws Exception {
- final String url = "http://localhost:19101/update";
-
- // Create an instance of HttpClient.
- HttpClient client = new HttpClient();
-
- // Create a method instance.
- GetMethod method = new GetMethod(url);
-
- method.setQueryString(new NameValuePair[] { new NameValuePair("statements", str) });
-
- // Provide custom retry handler is necessary
- method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false));
-
- // Execute the method.
- int statusCode = client.executeMethod(method);
-
- // Check if the method was executed successfully.
- if (statusCode != HttpStatus.SC_OK) {
- System.err.println("Method failed: " + method.getStatusLine());
- }
- }
-
- // Executes Query and returns results as JSONArray
- public JSONObject executeQuery(String str) throws Exception {
-
- final String url = "http://localhost:19101/query";
-
- // Create an instance of HttpClient.
- HttpClient client = new HttpClient();
-
- // Create a method instance.
- GetMethod method = new GetMethod(url);
-
- method.setQueryString(new NameValuePair[] { new NameValuePair("query", str) });
-
- // Provide custom retry handler is necessary
- method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false));
-
- JSONObject result = null;
-
- try {
- // Execute the method.
- int statusCode = client.executeMethod(method);
-
- // Check if the method was executed successfully.
- if (statusCode != HttpStatus.SC_OK) {
- System.err.println("Method failed: " + method.getStatusLine());
- }
-
- // Read the response body as String.
- String responseBody = method.getResponseBodyAsString();
-
- result = new JSONObject(responseBody);
- } catch (Exception e) {
- System.out.println(e.getMessage());
- e.printStackTrace();
- }
- return result;
- }
-
- @Test
- public void test() throws Exception {
- List<TestFileContext> testFileCtxs;
- List<TestFileContext> expectedResultFileCtxs;
-
- File testFile;
- File expectedResultFile;
- String statement;
-
- int queryCount = 0;
- JSONObject result;
-
- List<CompilationUnit> cUnits = tcCtx.getTestCase().getCompilationUnit();
- for (CompilationUnit cUnit : cUnits) {
- LOGGER.info("[TEST]: " + tcCtx.getTestCase().getFilePath() + "/" + cUnit.getName());
-
- testFileCtxs = tcCtx.getTestFiles(cUnit);
- expectedResultFileCtxs = tcCtx.getExpectedResultFiles(cUnit);
-
- for (TestFileContext ctx : testFileCtxs) {
- testFile = ctx.getFile();
- statement = readTestFile(testFile);
- try {
- switch (ctx.getType()) {
- case "ddl":
- executeDDL(statement);
- break;
- case "update":
- executeUpdate(statement);
- break;
- case "query":
- result = executeQuery(statement);
- if (!cUnit.getExpectedError().isEmpty()) {
- if (!result.has("error")) {
- throw new Exception("Test \"" + testFile + "\" FAILED!");
- }
- } else {
- expectedResultFile = expectedResultFileCtxs.get(queryCount).getFile();
-
- File actualFile = new File(PATH_ACTUAL + File.separator
- + tcCtx.getTestCase().getFilePath().replace(File.separator, "_") + "_"
- + cUnit.getName() + ".adm");
-
- File actualResultFile = tcCtx.getActualResultFile(cUnit, new File(PATH_ACTUAL));
- actualResultFile.getParentFile().mkdirs();
-
- TestsUtils.writeResultsToFile(actualFile, result);
-
- TestsUtils.runScriptAndCompareWithResult(testFile, new PrintWriter(System.err),
- expectedResultFile, actualFile);
- }
- queryCount++;
- break;
- default:
- throw new IllegalArgumentException("No statements of type " + ctx.getType());
- }
- } catch (Exception e) {
- if (cUnit.getExpectedError().isEmpty()) {
- throw new Exception("Test \"" + testFile + "\" FAILED!", e);
- }
- }
- }
- }
- }
+ }
}
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_255_create_dataset_error_1/issue_255_create_dataset_error_1.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_255_create_dataset_error_1/issue_255_create_dataset_error_1.1.ddl.aql
index 32ccf6d..132d63e 100644
--- a/asterix-app/src/test/resources/metadata/queries/exception/issue_255_create_dataset_error_1/issue_255_create_dataset_error_1.1.ddl.aql
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_255_create_dataset_error_1/issue_255_create_dataset_error_1.1.ddl.aql
@@ -2,7 +2,7 @@
* Description : create a dataset partitioned on non-existent field
* Expected Res : Failure
* Date : 14 April 2013
- * Issue : 272
+ * Issue : 255
*/
drop dataverse test if exists;
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_255_create_dataset_error_2/issue_255_create_dataset_error_2.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_255_create_dataset_error_2/issue_255_create_dataset_error_2.1.ddl.aql
index add51df..e900e6d 100644
--- a/asterix-app/src/test/resources/metadata/queries/exception/issue_255_create_dataset_error_2/issue_255_create_dataset_error_2.1.ddl.aql
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_255_create_dataset_error_2/issue_255_create_dataset_error_2.1.ddl.aql
@@ -2,7 +2,7 @@
* Description : create a dataset partitioned on ARecord type
* Expected Res : Failure
* Date : 14 April 2013
- * Issue : 272
+ * Issue : 255
*/
drop dataverse test if exists;
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_255_create_feed_error/issue_255_create_feed_error.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_255_create_feed_error/issue_255_create_feed_error.1.ddl.aql
index 647b93a..7576da7 100644
--- a/asterix-app/src/test/resources/metadata/queries/exception/issue_255_create_feed_error/issue_255_create_feed_error.1.ddl.aql
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_255_create_feed_error/issue_255_create_feed_error.1.ddl.aql
@@ -2,7 +2,7 @@
* Description : create a dataset partitioned on non-existent field
* Expected Res : Failure
* Date : 14 April 2013
- * Issue : 272
+ * Issue : 255
*/
drop dataverse feeds if exists;
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_266_create_dataset_error_1/issue_266_create_dataset_error_1.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_266_create_dataset_error_1/issue_266_create_dataset_error_1.1.ddl.aql
new file mode 100644
index 0000000..717ba21
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_266_create_dataset_error_1/issue_266_create_dataset_error_1.1.ddl.aql
@@ -0,0 +1,16 @@
+/*
+ * Description : create a dataset partitioned on unallowed key type
+ * Expected Res : Failure
+ * Date : 22 April 2013
+ * Issue : 266
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type opentype as open {
+loc:point
+}
+
+create dataset testds(opentype) primary key point;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_266_create_dataset_error_2/issue_266_create_dataset_error_2.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_266_create_dataset_error_2/issue_266_create_dataset_error_2.1.ddl.aql
new file mode 100644
index 0000000..cda7ae9
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_266_create_dataset_error_2/issue_266_create_dataset_error_2.1.ddl.aql
@@ -0,0 +1,16 @@
+/*
+ * Description : create a dataset partitioned on nullable key type
+ * Expected Res : Failure
+ * Date : 22 April 2013
+ * Issue : 266
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type opentype as open {
+id:int32?
+}
+
+create dataset testds(opentype) primary key id;
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_384_create_index_error_1/issue_384_create_index_error_1.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_384_create_index_error_1/issue_384_create_index_error_1.1.ddl.aql
new file mode 100644
index 0000000..acc89db
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_384_create_index_error_1/issue_384_create_index_error_1.1.ddl.aql
@@ -0,0 +1,20 @@
+/*
+ * Description : create a btree index on unallowed key type.
+ * Expected Res : Failure
+ * Date : 23 April 2013
+ * Issue : 384
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
+use dataverse test;
+
+create type opentype as open {
+id:int32,
+loc:point
+}
+
+create dataset testds(opentype) primary key id;
+create index loc_index on testds(loc);
+
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_384_create_index_error_2/issue_384_create_index_error_2.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_384_create_index_error_2/issue_384_create_index_error_2.1.ddl.aql
new file mode 100644
index 0000000..f3ec8b2
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_384_create_index_error_2/issue_384_create_index_error_2.1.ddl.aql
@@ -0,0 +1,20 @@
+/*
+ * Description : create an rtree index on unallowed key type.
+ * Expected Res : Failure
+ * Date : 23 April 2013
+ * Issue : 384
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
+use dataverse test;
+
+create type opentype as open {
+id:int32,
+age:int32
+}
+
+create dataset testds(opentype) primary key id;
+create index loc_index on testds(age) type rtree;
+
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_384_create_index_error_3/issue_384_create_index_error_3.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_384_create_index_error_3/issue_384_create_index_error_3.1.ddl.aql
new file mode 100644
index 0000000..4d8c488
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_384_create_index_error_3/issue_384_create_index_error_3.1.ddl.aql
@@ -0,0 +1,20 @@
+/*
+ * Description : create a keyword index on unallowed key type.
+ * Expected Res : Failure
+ * Date : 23 April 2013
+ * Issue : 384
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
+use dataverse test;
+
+create type opentype as open {
+id:int32,
+loc:point
+}
+
+create dataset testds(opentype) primary key id;
+create index loc_index on testds(loc) type keyword;
+
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_384_create_index_error_4/issue_384_create_index_error_4.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_384_create_index_error_4/issue_384_create_index_error_4.1.ddl.aql
new file mode 100644
index 0000000..938f137
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_384_create_index_error_4/issue_384_create_index_error_4.1.ddl.aql
@@ -0,0 +1,20 @@
+/*
+ * Description : create a fuzzy keyword index on unallowed key type.
+ * Expected Res : Failure
+ * Date : 23 April 2013
+ * Issue : 384
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
+use dataverse test;
+
+create type opentype as open {
+id:int32,
+loc:point
+}
+
+create dataset testds(opentype) primary key id;
+create index loc_index on testds(loc) type fuzzy keyword;
+
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_384_create_index_error_5/issue_384_create_index_error_5.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_384_create_index_error_5/issue_384_create_index_error_5.1.ddl.aql
new file mode 100644
index 0000000..d9baf04
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_384_create_index_error_5/issue_384_create_index_error_5.1.ddl.aql
@@ -0,0 +1,20 @@
+/*
+ * Description : create an ngram index on unallowed key type.
+ * Expected Res : Failure
+ * Date : 23 April 2013
+ * Issue : 384
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
+use dataverse test;
+
+create type opentype as open {
+id:int32,
+loc:point
+}
+
+create dataset testds(opentype) primary key id;
+create index loc_index on testds(loc) type ngram(2);
+
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_384_create_index_error_6/issue_384_create_index_error_6.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_384_create_index_error_6/issue_384_create_index_error_6.1.ddl.aql
new file mode 100644
index 0000000..dee287d
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_384_create_index_error_6/issue_384_create_index_error_6.1.ddl.aql
@@ -0,0 +1,20 @@
+/*
+ * Description : create a fuzzy ngram index on unallowed key type.
+ * Expected Res : Failure
+ * Date : 23 April 2013
+ * Issue : 384
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+
+use dataverse test;
+
+create type opentype as open {
+id:int32,
+loc:point
+}
+
+create dataset testds(opentype) primary key id;
+create index loc_index on testds(loc) type fuzzy ngram(2);
+
diff --git a/asterix-app/src/test/resources/metadata/testsuite.xml b/asterix-app/src/test/resources/metadata/testsuite.xml
index fb833b7..47bcafa 100644
--- a/asterix-app/src/test/resources/metadata/testsuite.xml
+++ b/asterix-app/src/test/resources/metadata/testsuite.xml
@@ -242,6 +242,54 @@
<expected-error>edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
</compilation-unit>
</test-case>
+ <test-case FilePath="exception">
+ <compilation-unit name="issue_266_create_dataset_error_1">
+ <output-dir compare="Text">none</output-dir>
+ <expected-error>edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="exception">
+ <compilation-unit name="issue_266_create_dataset_error_2">
+ <output-dir compare="Text">none</output-dir>
+ <expected-error>edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="exception">
+ <compilation-unit name="issue_384_create_index_error_1">
+ <output-dir compare="Text">none</output-dir>
+ <expected-error>edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="exception">
+ <compilation-unit name="issue_384_create_index_error_2">
+ <output-dir compare="Text">none</output-dir>
+ <expected-error>edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="exception">
+ <compilation-unit name="issue_384_create_index_error_3">
+ <output-dir compare="Text">none</output-dir>
+ <expected-error>edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="exception">
+ <compilation-unit name="issue_384_create_index_error_4">
+ <output-dir compare="Text">none</output-dir>
+ <expected-error>edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="exception">
+ <compilation-unit name="issue_384_create_index_error_5">
+ <output-dir compare="Text">none</output-dir>
+ <expected-error>edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="exception">
+ <compilation-unit name="issue_384_create_index_error_6">
+ <output-dir compare="Text">none</output-dir>
+ <expected-error>edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
+ </compilation-unit>
+ </test-case>
</test-group>
<test-group name="transaction">
<test-case FilePath="transaction">
diff --git a/asterix-aql/pom.xml b/asterix-aql/pom.xml
index bb6f4b5..038ec62 100644
--- a/asterix-aql/pom.xml
+++ b/asterix-aql/pom.xml
@@ -3,7 +3,7 @@
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
</parent>
<artifactId>asterix-aql</artifactId>
<build>
@@ -85,19 +85,19 @@
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-common</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-om</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-metadata</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
diff --git a/asterix-common/pom.xml b/asterix-common/pom.xml
index 520fed6..1a0e782 100644
--- a/asterix-common/pom.xml
+++ b/asterix-common/pom.xml
@@ -4,7 +4,7 @@
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
</parent>
<artifactId>asterix-common</artifactId>
<build>
@@ -101,7 +101,7 @@
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-transactions</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
@@ -112,7 +112,7 @@
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-test-framework</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<scope>test</scope>
</dependency>
<dependency>
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixProperties.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixProperties.java
index 0a01eaf..b94436a 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixProperties.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixProperties.java
@@ -19,7 +19,6 @@
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.io.Serializable;
-import java.io.StringWriter;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
@@ -51,43 +50,59 @@
public static class AsterixConfigurationKeys {
- //JVM parameters for each Node Contoller (NC)
- public static final String NC_JAVA_OPTS = "nc_java_opts"; // default "-Xmx1024m"
+ // JVM parameters for each Node Contoller (NC)
+ public static final String NC_JAVA_OPTS = "nc_java_opts"; // default
+ // "-Xmx1024m"
- //JVM parameters for the Cluster Contoller (CC)
- public static final String CC_JAVA_OPTS = "cc_java_opts"; // default "-Xmx1024m"
+ // JVM parameters for the Cluster Contoller (CC)
+ public static final String CC_JAVA_OPTS = "cc_java_opts"; // default
+ // "-Xmx1024m"
- public static final String SIZE_MEMORY_COMPONENT = "size_memory_component"; // default "512m"
+ public static final String SIZE_MEMORY_COMPONENT = "size_memory_component"; // default
+ // "512m"
- public static final String TOTAL_SIZE_MEMORY_COMPONENT = "total_size_memory_component"; // default "512m"
+ public static final String TOTAL_SIZE_MEMORY_COMPONENT = "total_size_memory_component"; // default
+ // "512m"
- public static final String LOG_BUFFER_NUM_PAGES = "log_buffer_num_pages"; // default "8"
+ public static final String LOG_BUFFER_NUM_PAGES = "log_buffer_num_pages"; // default
+ // "8"
- public static final String LOG_BUFFER_PAGE_SIZE = "log_buffer_page_size"; // default "131072 (128K)"
+ public static final String LOG_BUFFER_PAGE_SIZE = "log_buffer_page_size"; // default
+ // "131072 (128K)"
- public static final String LOG_PARTITION_SIZE = "log_partition_size"; // default "2147483648 (2GB)"
+ public static final String LOG_PARTITION_SIZE = "log_partition_size"; // default
+ // "2147483648 (2GB)"
- public static final String GROUP_COMMIT_INTERVAL = "group_commit_interval"; // default "200ms (128K)"
+ public static final String GROUP_COMMIT_INTERVAL = "group_commit_interval"; // default
+ // "200ms (128K)"
- public static final String SORT_OP_MEMORY = "sort_op_memory"; // default "512m"
+ public static final String SORT_OP_MEMORY = "sort_op_memory"; // default
+ // "512m"
- public static final String JOIN_OP_MEMORY = "join_op_memory"; // default "512m"
+ public static final String JOIN_OP_MEMORY = "join_op_memory"; // default
+ // "512m"
- public static final String WEB_INTERFACE_PORT = "web_interface_port"; // default "19001"
+ public static final String WEB_INTERFACE_PORT = "web_interface_port"; // default
+ // "19001"
public static final String NC_PORT = "nc_port"; // default "14601"
- public static final String NUM_PAGES_BUFFER_CACHE = "num_pages_buffer_cache"; // default "1000"
+ public static final String NUM_PAGES_BUFFER_CACHE = "num_pages_buffer_cache"; // default
+ // "1000"
public static final String LOG_LEVEL = "log_level"; // default "INFO"
- public static final String LSN_THRESHOLD = "lsn_threshold"; // default "64m"
+ public static final String LSN_THRESHOLD = "lsn_threshold"; // default
+ // "64m"
- public static final String CHECKPOINT_TERMS_IN_SECS = "checkpoint_terms_in_secs"; // default "120"
+ public static final String CHECKPOINT_TERMS_IN_SECS = "checkpoint_terms_in_secs"; // default
+ // "120"
- public static final String ESCALATE_THRSHOLD_ENTITY_TO_DATASET = "escalate_threshold_entity_to_dataset"; // default "8"
+ public static final String ESCALATE_THRSHOLD_ENTITY_TO_DATASET = "escalate_threshold_entity_to_dataset"; // default
+ // "8"
- public static final String SHRINK_TIMER_THRESHOLD = "shrink_timer_threshold"; // default "120000"
+ public static final String SHRINK_TIMER_THRESHOLD = "shrink_timer_threshold"; // default
+ // "120000"
}
@@ -172,6 +187,9 @@
case "off":
level = Level.OFF;
break;
+ case "warning":
+ level = Level.WARNING;
+ break;
default:
level = Level.ALL;
}
diff --git a/asterix-events/pom.xml b/asterix-events/pom.xml
index 107cc0c..7b187bb 100644
--- a/asterix-events/pom.xml
+++ b/asterix-events/pom.xml
@@ -3,11 +3,11 @@
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
</parent>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-events</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
diff --git a/asterix-external-data/pom.xml b/asterix-external-data/pom.xml
index 67ca3df..2e99b7c 100644
--- a/asterix-external-data/pom.xml
+++ b/asterix-external-data/pom.xml
@@ -3,7 +3,7 @@
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
</parent>
<artifactId>asterix-external-data</artifactId>
@@ -84,14 +84,14 @@
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-om</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-runtime</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/asterix-installer/pom.xml b/asterix-installer/pom.xml
index 36b2efa..4b1add5 100644
--- a/asterix-installer/pom.xml
+++ b/asterix-installer/pom.xml
@@ -1,13 +1,11 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <artifactId>asterix</artifactId>
- <groupId>edu.uci.ics.asterix</groupId>
- <version>0.0.5-SNAPSHOT</version>
- </parent>
- <artifactId>asterix-installer</artifactId>
-
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <artifactId>asterix</artifactId>
+ <groupId>edu.uci.ics.asterix</groupId>
+ <version>0.0.6-SNAPSHOT</version>
+ </parent>
+ <artifactId>asterix-installer</artifactId>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
@@ -154,36 +152,37 @@
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-events</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-common</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-common</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-server</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<type>zip</type>
<classifier>binary-assembly</classifier>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-test-framework</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<scope>test</scope>
</dependency>
</dependencies>
+
</project>
diff --git a/asterix-maven-plugins/lexer-generator-maven-plugin/pom.xml b/asterix-maven-plugins/lexer-generator-maven-plugin/pom.xml
index 624c8d3..6b444e8 100644
--- a/asterix-maven-plugins/lexer-generator-maven-plugin/pom.xml
+++ b/asterix-maven-plugins/lexer-generator-maven-plugin/pom.xml
@@ -5,7 +5,7 @@
<parent>
<artifactId>asterix-maven-plugins</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
</parent>
<packaging>maven-plugin</packaging>
diff --git a/asterix-maven-plugins/pom.xml b/asterix-maven-plugins/pom.xml
index f460970..d71f2e9 100644
--- a/asterix-maven-plugins/pom.xml
+++ b/asterix-maven-plugins/pom.xml
@@ -6,7 +6,7 @@
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
</parent>
<dependencies>
diff --git a/asterix-metadata/pom.xml b/asterix-metadata/pom.xml
index ca3f342..425e8ff 100644
--- a/asterix-metadata/pom.xml
+++ b/asterix-metadata/pom.xml
@@ -3,7 +3,7 @@
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
</parent>
<artifactId>asterix-metadata</artifactId>
@@ -26,19 +26,19 @@
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-common</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-om</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-external-data</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataManager.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataManager.java
index ad106fe..994262d 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataManager.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataManager.java
@@ -544,6 +544,15 @@
throw new MetadataException(e);
}
}
+
+ @Override
+ public int getMostRecentDatasetId() throws MetadataException {
+ try {
+ return metadataNode.getMostRecentDatasetId();
+ } catch (RemoteException e) {
+ throw new MetadataException(e);
+ }
+ }
@Override
public List<Function> getDataverseFunctions(MetadataTransactionContext ctx, String dataverseName)
@@ -601,5 +610,4 @@
public void releaseReadLatch() {
metadataLatch.readLock().unlock();
}
-
}
\ No newline at end of file
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataNode.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataNode.java
index d9c6ed4..5bdf086 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataNode.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataNode.java
@@ -1121,4 +1121,9 @@
throw new MetadataException(e);
}
}
+
+ @Override
+ public int getMostRecentDatasetId() throws MetadataException, RemoteException {
+ return DatasetIdFactory.getMostRecentDatasetId();
+ }
}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataManager.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataManager.java
index fdf2d60..f11144e 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataManager.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataManager.java
@@ -441,9 +441,10 @@
public List<Function> getDataverseFunctions(MetadataTransactionContext ctx, String dataverseName)
throws MetadataException;
-
public void initializeDatasetIdFactory(MetadataTransactionContext ctx) throws MetadataException;
+ public int getMostRecentDatasetId() throws MetadataException;
+
public void acquireWriteLatch();
public void releaseWriteLatch();
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataNode.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataNode.java
index 836d42f..e0b5e96 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataNode.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/api/IMetadataNode.java
@@ -473,5 +473,7 @@
public void addAdapter(JobId jobId, DatasourceAdapter adapter) throws MetadataException, RemoteException;
public void initializeDatasetIdFactory(JobId jobId) throws MetadataException, RemoteException;
+
+ public int getMostRecentDatasetId() throws MetadataException, RemoteException;
}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java
index 9d43c5c..d978f8a 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java
@@ -23,6 +23,7 @@
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
+import java.util.logging.Level;
import java.util.logging.Logger;
import edu.uci.ics.asterix.common.config.AsterixProperties;
@@ -159,13 +160,13 @@
fileMapProvider = runtimeContext.getFileMapManager();
ioManager = ncApplicationContext.getRootContext().getIOManager();
- if (isNewUniverse) {
- MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
- try {
- // Begin a transaction against the metadata.
- // Lock the metadata in X mode.
- MetadataManager.INSTANCE.lock(mdTxnCtx, LockMode.X);
-
+ MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+ try {
+ // Begin a transaction against the metadata.
+ // Lock the metadata in X mode.
+ MetadataManager.INSTANCE.lock(mdTxnCtx, LockMode.X);
+
+ if (isNewUniverse) {
for (int i = 0; i < primaryIndexes.length; i++) {
enlistMetadataDataset(primaryIndexes[i], true);
}
@@ -180,23 +181,35 @@
insertInitialGroups(mdTxnCtx);
insertInitialAdapters(mdTxnCtx);
- MetadataManager.INSTANCE.initializeDatasetIdFactory(mdTxnCtx);
- MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
- } catch (Exception e) {
- MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
- throw e;
- }
- LOGGER.info("FINISHED CREATING METADATA B-TREES.");
- } else {
- for (int i = 0; i < primaryIndexes.length; i++) {
- enlistMetadataDataset(primaryIndexes[i], false);
- }
- for (int i = 0; i < secondaryIndexes.length; i++) {
- enlistMetadataDataset(secondaryIndexes[i], false);
- }
- LOGGER.info("FINISHED ENLISTMENT OF METADATA B-TREES.");
- }
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Finished creating metadata B-trees.");
+ }
+ } else {
+ for (int i = 0; i < primaryIndexes.length; i++) {
+ enlistMetadataDataset(primaryIndexes[i], false);
+ }
+ for (int i = 0; i < secondaryIndexes.length; i++) {
+ enlistMetadataDataset(secondaryIndexes[i], false);
+ }
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Finished enlistment of metadata B-trees.");
+ }
+ }
+
+ //#. initialize datasetIdFactory
+ MetadataManager.INSTANCE.initializeDatasetIdFactory(mdTxnCtx);
+ MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+ } catch (Exception e) {
+ try {
+ MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ } catch (Exception e2) {
+ e.addSuppressed(e2);
+ //TODO
+ //change the exception type to AbortFailureException
+ throw new MetadataException(e);
+ }
+ }
}
public static void stopUniverse() throws HyracksDataException {
@@ -379,6 +392,10 @@
MetadataManager.INSTANCE.acquireWriteLatch();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Starting DDL recovery ...");
+ }
+
try {
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
@@ -388,6 +405,9 @@
if (dataverse.getPendingOp() != IMetadataEntity.PENDING_NO_OP) {
//drop pending dataverse
MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Dropped a pending dataverse: " + dataverseName);
+ }
} else {
List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, dataverseName);
for (Dataset dataset : datasets) {
@@ -395,6 +415,9 @@
if (dataset.getPendingOp() != IMetadataEntity.PENDING_NO_OP) {
//drop pending dataset
MetadataManager.INSTANCE.dropDataset(mdTxnCtx, dataverseName, datasetName);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Dropped a pending dataset: " + dataverseName + "." + datasetName);
+ }
} else {
List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName,
datasetName);
@@ -403,14 +426,25 @@
if (index.getPendingOp() != IMetadataEntity.PENDING_NO_OP) {
//drop pending index
MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Dropped a pending index: " + dataverseName + "." + datasetName
+ + "." + indexName);
+ }
}
}
}
}
}
}
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Completed DDL recovery.");
+ }
} catch (Exception e) {
- MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ try {
+ MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ } catch (Exception e2) {
+ e.addSuppressed(e2);
+ }
throw new MetadataException(e);
} finally {
MetadataManager.INSTANCE.releaseWriteLatch();
diff --git a/asterix-om/pom.xml b/asterix-om/pom.xml
index 276d848..f7ef8d3 100644
--- a/asterix-om/pom.xml
+++ b/asterix-om/pom.xml
@@ -3,7 +3,7 @@
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
</parent>
<artifactId>asterix-om</artifactId>
@@ -26,7 +26,7 @@
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-common</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ARecordType.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ARecordType.java
index 788010b..ca7bc24 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ARecordType.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/types/ARecordType.java
@@ -27,6 +27,7 @@
import org.json.JSONObject;
import edu.uci.ics.asterix.common.annotations.IRecordTypeAnnotation;
+import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.om.base.IAObject;
import edu.uci.ics.asterix.om.visitors.IOMVisitor;
@@ -251,9 +252,120 @@
IAType fieldType = getFieldType(fieldName);
if (fieldType == null) {
throw new AlgebricksException("A field with this name \"" + fieldName + "\" could not be found.");
- } else if (fieldType.getTypeTag() == ATypeTag.RECORD) {
- throw new AlgebricksException("The partitioning key \"" + fieldName + "\" cannot be of type "
- + ATypeTag.RECORD + ".");
+ }
+ switch (fieldType.getTypeTag()) {
+ case INT32:
+ case INT64:
+ case FLOAT:
+ case DOUBLE:
+ case STRING:
+ break;
+ case UNION:
+ throw new AlgebricksException("The partitioning key \"" + fieldName + "\" cannot be nullable");
+ default:
+ throw new AlgebricksException("The partitioning key \"" + fieldName + "\" cannot be of type "
+ + fieldType.getTypeTag() + ".");
+ }
+ }
+ }
+
+ /**
+ * Validates the key fields that will be used as keys of an index.
+ *
+ * @param keyFieldNames
+ * a list of key fields that will be validated
+ * @param indexType
+ * the type of the index that its key fields is being validated
+ * @throws AlgebricksException
+ * (if the validation failed), IOException
+ */
+ public void validateKeyFields(List<String> keyFieldNames, IndexType indexType) throws AlgebricksException,
+ IOException {
+ for (String fieldName : keyFieldNames) {
+ IAType fieldType = getFieldType(fieldName);
+ if (fieldType == null) {
+ throw new AlgebricksException("A field with this name \"" + fieldName + "\" could not be found.");
+ }
+ switch (indexType) {
+ case BTREE:
+ switch (fieldType.getTypeTag()) {
+ case INT8:
+ case INT16:
+ case INT32:
+ case INT64:
+ case FLOAT:
+ case DOUBLE:
+ case STRING:
+ case DATE:
+ case TIME:
+ case DATETIME:
+ case UNION:
+ break;
+ default:
+ throw new AlgebricksException("The field \"" + fieldName + "\" which is of type "
+ + fieldType.getTypeTag() + " cannot be indexed using the BTree index.");
+ }
+ break;
+ case RTREE:
+ switch (fieldType.getTypeTag()) {
+ case POINT:
+ case LINE:
+ case RECTANGLE:
+ case CIRCLE:
+ case POLYGON:
+ case UNION:
+ break;
+ default:
+ throw new AlgebricksException("The field \"" + fieldName + "\" which is of type "
+ + fieldType.getTypeTag() + " cannot be indexed using the RTree index.");
+ }
+ break;
+ case FUZZY_NGRAM_INVIX:
+ switch (fieldType.getTypeTag()) {
+ case STRING:
+ case UNION:
+ break;
+ default:
+ throw new AlgebricksException("The field \"" + fieldName + "\" which is of type "
+ + fieldType.getTypeTag() + " cannot be indexed using the Fuzzy N-Gram index.");
+ }
+ break;
+ case FUZZY_WORD_INVIX:
+ switch (fieldType.getTypeTag()) {
+ case STRING:
+ case UNORDEREDLIST:
+ case ORDEREDLIST:
+ case UNION:
+ break;
+ default:
+ throw new AlgebricksException("The field \"" + fieldName + "\" which is of type "
+ + fieldType.getTypeTag() + " cannot be indexed using the Fuzzy Keyword index.");
+ }
+ break;
+ case NGRAM_INVIX:
+ switch (fieldType.getTypeTag()) {
+ case STRING:
+ case UNION:
+ break;
+ default:
+ throw new AlgebricksException("The field \"" + fieldName + "\" which is of type "
+ + fieldType.getTypeTag() + " cannot be indexed using the N-Gram index.");
+ }
+ break;
+ case WORD_INVIX:
+ switch (fieldType.getTypeTag()) {
+ case STRING:
+ case UNORDEREDLIST:
+ case ORDEREDLIST:
+ case UNION:
+ break;
+ default:
+ throw new AlgebricksException("The field \"" + fieldName + "\" which is of type "
+ + fieldType.getTypeTag() + " cannot be indexed using the Keyword index.");
+ }
+ break;
+ default:
+ throw new AlgebricksException("Invalid index type: " + indexType + ".");
}
}
}
diff --git a/asterix-runtime/pom.xml b/asterix-runtime/pom.xml
index 3e03a32..74a1f27 100644
--- a/asterix-runtime/pom.xml
+++ b/asterix-runtime/pom.xml
@@ -3,7 +3,7 @@
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
</parent>
<artifactId>asterix-runtime</artifactId>
@@ -23,7 +23,7 @@
<plugin>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>lexer-generator-maven-plugin</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<configuration>
<grammarFile>src/main/resources/adm.grammar</grammarFile>
<outputDir>${project.build.directory}/generated-sources/edu/uci/ics/asterix/runtime/operators/file/adm</outputDir>
@@ -119,7 +119,7 @@
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-om</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
@@ -135,7 +135,7 @@
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-transactions</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
diff --git a/asterix-server/pom.xml b/asterix-server/pom.xml
index 3c52104..d1d6acb 100644
--- a/asterix-server/pom.xml
+++ b/asterix-server/pom.xml
@@ -5,7 +5,7 @@
<parent>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
</parent>
<build>
@@ -80,7 +80,7 @@
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-app</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
</dependency>
</dependencies>
</project>
\ No newline at end of file
diff --git a/asterix-test-framework/pom.xml b/asterix-test-framework/pom.xml
index a9c3292..791bf26 100755
--- a/asterix-test-framework/pom.xml
+++ b/asterix-test-framework/pom.xml
@@ -3,7 +3,7 @@
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
</parent>
<artifactId>asterix-test-framework</artifactId>
<build>
diff --git a/asterix-tools/pom.xml b/asterix-tools/pom.xml
index bc2a7ca..248cdb5 100644
--- a/asterix-tools/pom.xml
+++ b/asterix-tools/pom.xml
@@ -3,7 +3,7 @@
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
</parent>
<artifactId>asterix-tools</artifactId>
@@ -124,13 +124,13 @@
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-aql</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-algebra</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
diff --git a/asterix-transactions/pom.xml b/asterix-transactions/pom.xml
index 9777d1f..7d951ce 100644
--- a/asterix-transactions/pom.xml
+++ b/asterix-transactions/pom.xml
@@ -3,7 +3,7 @@
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
</parent>
<artifactId>asterix-transactions</artifactId>
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/resource/PersistentLocalResourceRepository.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
index 0a12ba1..01dce6c 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
@@ -25,7 +25,10 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import edu.uci.ics.asterix.transaction.management.service.logging.LogManager;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.storage.common.file.ILocalResourceRepository;
import edu.uci.ics.hyracks.storage.common.file.LocalResource;
@@ -33,6 +36,7 @@
public class PersistentLocalResourceRepository implements ILocalResourceRepository {
+ private static final Logger LOGGER = Logger.getLogger(PersistentLocalResourceRepository.class.getName());
private final String mountPoint;
private static final String ROOT_METADATA_DIRECTORY = "asterix_root_metadata/";
private static final String ROOT_METADATA_FILE_NAME_PREFIX = ".asterix_root_metadata_";
@@ -57,6 +61,9 @@
public void initialize(String nodeId, String rootDir, boolean isNewUniverse, ResourceIdFactory resourceIdFactory)
throws HyracksDataException {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Initializing local resource repository ... ");
+ }
LocalResource rootLocalResource = null;
//#. if the rootMetadataFile doesn't exist, create it and return.
@@ -67,6 +74,9 @@
File rootMetadataDir = new File(mountPoint + ROOT_METADATA_DIRECTORY);
if (!rootMetadataDir.exists()) {
rootMetadataDir.mkdir();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("created the root-metadata-file's directory: " + rootMetadataDir.getAbsolutePath());
+ }
}
rootMetadataFile.delete();
@@ -77,17 +87,31 @@
}
rootLocalResource = new LocalResource(ROOT_LOCAL_RESOURCE_ID, rootMetadataFileName, 0, 0, this.rootDir);
insert(rootLocalResource);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("created the root-metadata-file: " + rootMetadataFileName);
+ }
+
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Completed the initialization of the local resource repository");
+ }
return;
}
//#. if the rootMetadataFile exists, read it and set this.rootDir.
rootLocalResource = readLocalResource(rootMetadataFile);
this.rootDir = (String) rootLocalResource.getResourceObject();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("The root directory of the local resource repository is " + this.rootDir);
+ }
//#. load all local resources.
File rootDirFile = new File(this.rootDir);
if (!rootDirFile.exists()) {
//rootDir may not exist if this node is not the metadata node and doesn't have any user data.
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("The root directory of the local resource repository doesn't exist: there is no local resource.");
+ LOGGER.info("Completed the initialization of the local resource repository");
+ }
return;
}
@@ -119,6 +143,10 @@
id2ResourceMap.put(localResource.getResourceId(), localResource);
name2ResourceMap.put(localResource.getResourceName(), localResource);
maxResourceId = Math.max(localResource.getResourceId(), maxResourceId);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("loaded local resource - [id: " + localResource.getResourceId()
+ + ", name: " + localResource.getResourceName() + "]");
+ }
}
}
}
@@ -127,6 +155,10 @@
}
}
resourceIdFactory.initId(maxResourceId + 1);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("The resource id factory is intialized with the value: " + (maxResourceId + 1));
+ LOGGER.info("Completed the initialization of the local resource repository");
+ }
}
@Override
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogUtil.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogUtil.java
index feaca86..77a99bc 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogUtil.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/logging/LogUtil.java
@@ -47,6 +47,9 @@
List<String> logFiles = getLogFiles(logManagerProperties);
if (logFiles == null || logFiles.size() == 0) {
FileUtil.createFileIfNotExists(getLogFilePath(logManagerProperties, 0));
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("created a log file: " + getLogFilePath(logManagerProperties, 0));
+ }
} else {
File logFile = new File(LogUtil.getLogFilePath(logManagerProperties,
Long.parseLong(logFiles.get(logFiles.size() - 1))));
@@ -55,7 +58,13 @@
}
} else {
FileUtil.createNewDirectory(logManagerProperties.getLogDir());
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("created the log directory: " + logManagerProperties.getLogDir());
+ }
FileUtil.createFileIfNotExists(getLogFilePath(logManagerProperties, 0));
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("created a log file: " + getLogFilePath(logManagerProperties, 0));
+ }
}
} catch (IOException ioe) {
throw new ACIDException("Unable to initialize log anchor", ioe);
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/recovery/RecoveryManager.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/recovery/RecoveryManager.java
index c27ae20..47c5dcb 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/recovery/RecoveryManager.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/recovery/RecoveryManager.java
@@ -116,6 +116,9 @@
//This is initial bootstrap.
//Otherwise, the checkpoint file is deleted unfortunately. What we can do in this case?
state = SystemState.NEW_UNIVERSE;
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("The checkpoint file doesn't exist: systemState = NEW_UNIVERSE");
+ }
return state;
}
@@ -392,7 +395,7 @@
}
if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("[RecoveryMgr] recovery is over");
+ LOGGER.info("[RecoveryMgr] recovery is completed.");
}
if (IS_DEBUG_MODE) {
System.out.println("[RecoveryMgr] Count: Update/Commit/Redo = " + updateLogCount + "/" + commitLogCount
@@ -422,6 +425,10 @@
@Override
public synchronized void checkpoint(boolean isSharpCheckpoint) throws ACIDException {
+ if (isSharpCheckpoint && LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Starting sharp checkpoint ... ");
+ }
+
LogManager logMgr = (LogManager) txnSubsystem.getLogManager();
TransactionManager txnMgr = (TransactionManager) txnSubsystem.getTransactionManager();
String logDir = logMgr.getLogManagerProperties().getLogDir();
@@ -513,6 +520,10 @@
if (isSharpCheckpoint) {
logMgr.renewLogFiles();
}
+
+ if (isSharpCheckpoint && LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Completed sharp checkpoint.");
+ }
}
private CheckpointObject readCheckpoint() throws ACIDException, FileNotFoundException {
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/DatasetIdFactory.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/DatasetIdFactory.java
index 65512ec..17eaf38 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/DatasetIdFactory.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/transaction/DatasetIdFactory.java
@@ -4,12 +4,22 @@
public class DatasetIdFactory {
private static AtomicInteger id = new AtomicInteger();
+ private static boolean isInitialized = false;
+
+ public static boolean isInitialized() {
+ return isInitialized;
+ }
public static void initialize(int initialId) {
id.set(initialId);
+ isInitialized = true;
}
public static int generateDatasetId() {
return id.incrementAndGet();
}
+
+ public static int getMostRecentDatasetId() {
+ return id.get();
+ }
}
\ No newline at end of file
diff --git a/pom.xml b/pom.xml
index b424195..20efdb3 100644
--- a/pom.xml
+++ b/pom.xml
@@ -3,12 +3,12 @@
<modelVersion>4.0.0</modelVersion>
<groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix</artifactId>
- <version>0.0.5-SNAPSHOT</version>
+ <version>0.0.6-SNAPSHOT</version>
<packaging>pom</packaging>
<properties>
- <algebricks.version>0.2.5-SNAPSHOT</algebricks.version>
- <hyracks.version>0.2.5-SNAPSHOT</hyracks.version>
+ <algebricks.version>0.2.6-SNAPSHOT</algebricks.version>
+ <hyracks.version>0.2.6-SNAPSHOT</hyracks.version>
</properties>
<build>
@@ -25,8 +25,8 @@
</build>
<scm>
- <connection>scm:svn:https://asterixdb.googlecode.com/svn/trunk/asterix</connection>
- <developerConnection>scm:svn:https://asterixdb.googlecode.com/svn/trunk/asterix</developerConnection>
+ <connection>scm:git:https://code.google.com/p/asterixdb/</connection>
+ <developerConnection>scm:git:https://code.google.com/p/asterixdb/</developerConnection>
</scm>
<distributionManagement>