merge changes from asterix_stabilization_yfix

git-svn-id: https://asterixdb.googlecode.com/svn/branches/asterix_stabilization@1114 eaa15691-b419-025a-1212-ee371bd00084
diff --git a/asterix-algebra/pom.xml b/asterix-algebra/pom.xml
index f950800..a6f0ac4 100644
--- a/asterix-algebra/pom.xml
+++ b/asterix-algebra/pom.xml
@@ -5,10 +5,7 @@
 		<groupId>edu.uci.ics.asterix</groupId>
 		<version>0.0.4-SNAPSHOT</version>
 	</parent>
-	<groupId>edu.uci.ics.asterix</groupId>
 	<artifactId>asterix-algebra</artifactId>
-	<version>0.0.4-SNAPSHOT</version>
-
 	<build>
 		<plugins>
 			<plugin>
@@ -60,8 +57,7 @@
 		</dependency>
 		<dependency>
 			<groupId>edu.uci.ics.hyracks</groupId>
-			<artifactId>hyracks-algebricks-compiler</artifactId>
-			<version>0.2.2-SNAPSHOT</version>
+			<artifactId>algebricks-compiler</artifactId>
 		</dependency>
 		<dependency>
 			<groupId>org.json</groupId>
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AbstractAqlTranslator.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AbstractAqlTranslator.java
index a908363..9163e35 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AbstractAqlTranslator.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AbstractAqlTranslator.java
@@ -1,78 +1,103 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package edu.uci.ics.asterix.translator;
 
 import java.util.Map;
 
 import edu.uci.ics.asterix.aql.base.Statement;
 import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
+import edu.uci.ics.asterix.aql.expression.DeleteStatement;
 import edu.uci.ics.asterix.aql.expression.DropStatement;
-import edu.uci.ics.asterix.aql.expression.FunctionDropStatement;
+import edu.uci.ics.asterix.aql.expression.InsertStatement;
 import edu.uci.ics.asterix.aql.expression.NodeGroupDropStatement;
 import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.common.functions.FunctionSignature;
 import edu.uci.ics.asterix.metadata.bootstrap.MetadataConstants;
-import edu.uci.ics.asterix.metadata.entities.AsterixBuiltinArtifactMap;
-import edu.uci.ics.asterix.metadata.entities.AsterixBuiltinArtifactMap.ARTIFACT_KIND;
 import edu.uci.ics.asterix.metadata.entities.AsterixBuiltinTypeMap;
 import edu.uci.ics.asterix.metadata.entities.Dataverse;
 import edu.uci.ics.asterix.om.types.BuiltinType;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 
+/**
+ * Base class for AQL translators. 
+ * Contains the common validation logic for AQL statements.
+ */
 public abstract class AbstractAqlTranslator {
 
     protected static final Map<String, BuiltinType> builtinTypeMap = AsterixBuiltinTypeMap.getBuiltinTypes();
 
-    public void validateOperation(Dataverse dataverse, Statement stmt) throws AsterixException {
-        String dataverseName = dataverse != null ? dataverse.getDataverseName() : null;
-        if (dataverseName != null && dataverseName.equals(MetadataConstants.METADATA_DATAVERSE_NAME)) {
+    public void validateOperation(Dataverse defaultDataverse, Statement stmt) throws AsterixException {
+        boolean invalidOperation = false;
+        String message = null;
+        String dataverse = defaultDataverse != null ? defaultDataverse.getDataverseName() : null;
+        switch (stmt.getKind()) {
+            case INSERT:
+                InsertStatement insertStmt = (InsertStatement) stmt;
+                if (insertStmt.getDataverseName() != null) {
+                    dataverse = insertStmt.getDataverseName().getValue();
+                }
+                invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
+                if (invalidOperation) {
+                    message = "Insert operation is not permitted in dataverse "
+                            + MetadataConstants.METADATA_DATAVERSE_NAME;
+                }
+                break;
 
-            boolean invalidOperation = false;
-            String message = null;
-            switch (stmt.getKind()) {
-                case INSERT:
-                case UPDATE:
-                case DELETE:
-                    invalidOperation = true;
-                    message = " Operation  " + stmt.getKind() + " not permitted in system dataverse " + "'"
-                            + MetadataConstants.METADATA_DATAVERSE_NAME + "'";
-                    break;
-                case FUNCTION_DROP:
-                    FunctionSignature signature = ((FunctionDropStatement) stmt).getFunctionSignature();
-                    FunctionIdentifier fId = new FunctionIdentifier(signature.getNamespace(), signature.getName(),
-                            signature.getArity());
-                    if (dataverseName.equals(MetadataConstants.METADATA_DATAVERSE_NAME)
-                            && AsterixBuiltinArtifactMap.isSystemProtectedArtifact(ARTIFACT_KIND.FUNCTION, fId)) {
-                        invalidOperation = true;
-                        message = "Cannot drop function " + signature + " (protected by system)";
-                    }
-                    break;
-                case NODEGROUP_DROP:
-                    NodeGroupDropStatement nodeGroupDropStmt = (NodeGroupDropStatement) stmt;
-                    String nodegroupName = nodeGroupDropStmt.getNodeGroupName().getValue();
-                    if (AsterixBuiltinArtifactMap.isSystemProtectedArtifact(ARTIFACT_KIND.NODEGROUP, nodegroupName)) {
-                        message = "Cannot drop nodegroup " + nodegroupName + " (protected by system)";
-                        invalidOperation = true;
-                    }
-                    break;
-                case DATAVERSE_DROP:
-                    DataverseDropStatement dvDropStmt = (DataverseDropStatement) stmt;
-                    String dvName = dvDropStmt.getDataverseName().getValue();
-                    if (dvName.equals(MetadataConstants.METADATA_DATAVERSE_NAME)) {
-                        message = "Cannot drop dataverse " + dvName + " (protected by system)";
-                        invalidOperation = true;
-                    }
-                    break;
-                case DATASET_DROP:
-                    DropStatement dropStmt = (DropStatement) stmt;
-                    String datasetName = dropStmt.getDatasetName().getValue();
-                    if (AsterixBuiltinArtifactMap.isSystemProtectedArtifact(ARTIFACT_KIND.DATASET, datasetName)) {
-                        invalidOperation = true;
-                        message = "Cannot drop dataset " + datasetName + " (protected by system)";
-                    }
-                    break;
-            }
-            if (invalidOperation) {
-                throw new AsterixException(message);
-            }
+            case DELETE:
+                DeleteStatement deleteStmt = (DeleteStatement) stmt;
+                if (deleteStmt.getDataverseName() != null) {
+                    dataverse = deleteStmt.getDataverseName().getValue();
+                }
+                invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
+                if (invalidOperation) {
+                    message = "Delete operation is not permitted in dataverse "
+                            + MetadataConstants.METADATA_DATAVERSE_NAME;
+                }
+                break;
+
+            case NODEGROUP_DROP:
+                String nodegroupName = ((NodeGroupDropStatement) stmt).getNodeGroupName().getValue();
+                invalidOperation = MetadataConstants.METADATA_DEFAULT_NODEGROUP_NAME.equals(nodegroupName);
+                if (invalidOperation) {
+                    message = "Cannot drop nodegroup:" + nodegroupName;
+                }
+                break;
+
+            case DATAVERSE_DROP:
+                DataverseDropStatement dvDropStmt = (DataverseDropStatement) stmt;
+                invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dvDropStmt.getDataverseName()
+                        .getValue());
+                if (invalidOperation) {
+                    message = "Cannot drop dataverse:" + dvDropStmt.getDataverseName().getValue();
+                }
+                break;
+
+            case DATASET_DROP:
+                DropStatement dropStmt = (DropStatement) stmt;
+                if (dropStmt.getDataverseName() != null) {
+                    dataverse = dropStmt.getDataverseName().getValue();
+                }
+                invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
+                if (invalidOperation) {
+                    message = "Cannot drop a dataset belonging to the dataverse:"
+                            + MetadataConstants.METADATA_DATAVERSE_NAME;
+                }
+                break;
+
+        }
+
+        if (invalidOperation) {
+            throw new AsterixException("Invalid operation - " + message);
         }
     }
-}
\ No newline at end of file
+}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/CompiledStatements.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/CompiledStatements.java
index 6d52b8f..625fed1 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/CompiledStatements.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/CompiledStatements.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -43,501 +43,533 @@
 import edu.uci.ics.asterix.om.types.IAType;
 import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
 
+/**
+ * An AQL statement instance is translated into an instance of type CompileX
+ * that has additional fields for use by the AqlTranslator.
+ */
 public class CompiledStatements {
 
-    public static interface ICompiledStatement {
+	public static interface ICompiledStatement {
 
-        public Kind getKind();
-    }
+		public Kind getKind();
+	}
 
-    public static class CompiledWriteFromQueryResultStatement implements ICompiledDmlStatement {
+	public static class CompiledWriteFromQueryResultStatement implements
+			ICompiledDmlStatement {
 
-        private String dataverseName;
-        private String datasetName;
-        private Query query;
-        private int varCounter;
+		private String dataverseName;
+		private String datasetName;
+		private Query query;
+		private int varCounter;
 
-        public CompiledWriteFromQueryResultStatement(String dataverseName, String datasetName, Query query,
-                int varCounter) {
-            this.dataverseName = dataverseName;
-            this.datasetName = datasetName;
-            this.query = query;
-            this.varCounter = varCounter;
-        }
+		public CompiledWriteFromQueryResultStatement(String dataverseName,
+				String datasetName, Query query, int varCounter) {
+			this.dataverseName = dataverseName;
+			this.datasetName = datasetName;
+			this.query = query;
+			this.varCounter = varCounter;
+		}
 
-        public String getDataverseName() {
-            return dataverseName;
-        }
+		public String getDataverseName() {
+			return dataverseName;
+		}
 
-        public String getDatasetName() {
-            return datasetName;
-        }
+		public String getDatasetName() {
+			return datasetName;
+		}
 
-        public int getVarCounter() {
-            return varCounter;
-        }
+		public int getVarCounter() {
+			return varCounter;
+		}
 
-        public Query getQuery() {
-            return query;
-        }
+		public Query getQuery() {
+			return query;
+		}
 
-        @Override
-        public Kind getKind() {
-            return Kind.WRITE_FROM_QUERY_RESULT;
-        }
+		@Override
+		public Kind getKind() {
+			return Kind.WRITE_FROM_QUERY_RESULT;
+		}
 
-    }
+	}
 
-    public static class CompiledDatasetDropStatement implements ICompiledStatement {
-        private final String dataverseName;
-        private final String datasetName;
+	public static class CompiledDatasetDropStatement implements
+			ICompiledStatement {
+		private final String dataverseName;
+		private final String datasetName;
 
-        public CompiledDatasetDropStatement(String dataverseName, String datasetName) {
-            this.dataverseName = dataverseName;
-            this.datasetName = datasetName;
-        }
+		public CompiledDatasetDropStatement(String dataverseName,
+				String datasetName) {
+			this.dataverseName = dataverseName;
+			this.datasetName = datasetName;
+		}
 
-        public String getDataverseName() {
-            return dataverseName;
-        }
+		public String getDataverseName() {
+			return dataverseName;
+		}
 
-        public String getDatasetName() {
-            return datasetName;
-        }
+		public String getDatasetName() {
+			return datasetName;
+		}
 
-        @Override
-        public Kind getKind() {
-            return Kind.DATASET_DROP;
-        }
-    }
+		@Override
+		public Kind getKind() {
+			return Kind.DATASET_DROP;
+		}
+	}
 
-    // added by yasser
-    public static class CompiledCreateDataverseStatement implements ICompiledStatement {
-        private String dataverseName;
-        private String format;
+	// added by yasser
+	public static class CompiledCreateDataverseStatement implements
+			ICompiledStatement {
+		private String dataverseName;
+		private String format;
 
-        public CompiledCreateDataverseStatement(String dataverseName, String format) {
-            this.dataverseName = dataverseName;
-            this.format = format;
-        }
+		public CompiledCreateDataverseStatement(String dataverseName,
+				String format) {
+			this.dataverseName = dataverseName;
+			this.format = format;
+		}
 
-        public String getDataverseName() {
-            return dataverseName;
-        }
+		public String getDataverseName() {
+			return dataverseName;
+		}
 
-        public String getFormat() {
-            return format;
-        }
+		public String getFormat() {
+			return format;
+		}
 
-        @Override
-        public Kind getKind() {
-            return Kind.CREATE_DATAVERSE;
-        }
-    }
+		@Override
+		public Kind getKind() {
+			return Kind.CREATE_DATAVERSE;
+		}
+	}
 
-    public static class CompiledNodeGroupDropStatement implements ICompiledStatement {
-        private String nodeGroupName;
+	public static class CompiledNodeGroupDropStatement implements
+			ICompiledStatement {
+		private String nodeGroupName;
 
-        public CompiledNodeGroupDropStatement(String nodeGroupName) {
-            this.nodeGroupName = nodeGroupName;
-        }
+		public CompiledNodeGroupDropStatement(String nodeGroupName) {
+			this.nodeGroupName = nodeGroupName;
+		}
 
-        public String getNodeGroupName() {
-            return nodeGroupName;
-        }
+		public String getNodeGroupName() {
+			return nodeGroupName;
+		}
 
-        @Override
-        public Kind getKind() {
-            return Kind.NODEGROUP_DROP;
-        }
-    }
+		@Override
+		public Kind getKind() {
+			return Kind.NODEGROUP_DROP;
+		}
+	}
 
-    public static class CompiledIndexDropStatement implements ICompiledStatement {
-        private String dataverseName;
-        private String datasetName;
-        private String indexName;
+	public static class CompiledIndexDropStatement implements
+			ICompiledStatement {
+		private String dataverseName;
+		private String datasetName;
+		private String indexName;
 
-        public CompiledIndexDropStatement(String dataverseName, String datasetName, String indexName) {
-            this.dataverseName = dataverseName;
-            this.datasetName = datasetName;
-            this.indexName = indexName;
-        }
+		public CompiledIndexDropStatement(String dataverseName,
+				String datasetName, String indexName) {
+			this.dataverseName = dataverseName;
+			this.datasetName = datasetName;
+			this.indexName = indexName;
+		}
 
-        public String getDataverseName() {
-            return dataverseName;
-        }
+		public String getDataverseName() {
+			return dataverseName;
+		}
 
-        public String getDatasetName() {
-            return datasetName;
-        }
+		public String getDatasetName() {
+			return datasetName;
+		}
 
-        public String getIndexName() {
-            return indexName;
-        }
+		public String getIndexName() {
+			return indexName;
+		}
 
-        @Override
-        public Kind getKind() {
-            return Kind.INDEX_DROP;
-        }
-    }
+		@Override
+		public Kind getKind() {
+			return Kind.INDEX_DROP;
+		}
+	}
 
-    public static class CompiledDataverseDropStatement implements ICompiledStatement {
-        private String dataverseName;
-        private boolean ifExists;
+	public static class CompiledDataverseDropStatement implements
+			ICompiledStatement {
+		private String dataverseName;
+		private boolean ifExists;
 
-        public CompiledDataverseDropStatement(String dataverseName, boolean ifExists) {
-            this.dataverseName = dataverseName;
-            this.ifExists = ifExists;
-        }
+		public CompiledDataverseDropStatement(String dataverseName,
+				boolean ifExists) {
+			this.dataverseName = dataverseName;
+			this.ifExists = ifExists;
+		}
 
-        public String getDataverseName() {
-            return dataverseName;
-        }
+		public String getDataverseName() {
+			return dataverseName;
+		}
 
-        public boolean getIfExists() {
-            return ifExists;
-        }
+		public boolean getIfExists() {
+			return ifExists;
+		}
 
-        @Override
-        public Kind getKind() {
-            return Kind.DATAVERSE_DROP;
-        }
-    }
+		@Override
+		public Kind getKind() {
+			return Kind.DATAVERSE_DROP;
+		}
+	}
 
-    public static class CompiledTypeDropStatement implements ICompiledStatement {
-        private String typeName;
+	public static class CompiledTypeDropStatement implements ICompiledStatement {
+		private String typeName;
 
-        public CompiledTypeDropStatement(String nodeGroupName) {
-            this.typeName = nodeGroupName;
-        }
+		public CompiledTypeDropStatement(String nodeGroupName) {
+			this.typeName = nodeGroupName;
+		}
 
-        public String getTypeName() {
-            return typeName;
-        }
+		public String getTypeName() {
+			return typeName;
+		}
 
-        @Override
-        public Kind getKind() {
-            return Kind.TYPE_DROP;
-        }
-    }
+		@Override
+		public Kind getKind() {
+			return Kind.TYPE_DROP;
+		}
+	}
 
-    public static interface ICompiledDmlStatement extends ICompiledStatement {
+	public static interface ICompiledDmlStatement extends ICompiledStatement {
 
-        public String getDataverseName();
+		public String getDataverseName();
 
-        public String getDatasetName();
-    }
+		public String getDatasetName();
+	}
 
-    public static class CompiledCreateIndexStatement implements ICompiledDmlStatement {
-        private final String indexName;
-        private final String dataverseName;
-        private final String datasetName;
-        private final List<String> keyFields;
-        private final IndexType indexType;
+	public static class CompiledCreateIndexStatement implements
+			ICompiledDmlStatement {
+		private final String indexName;
+		private final String dataverseName;
+		private final String datasetName;
+		private final List<String> keyFields;
+		private final IndexType indexType;
 
-        // Specific to NGram index.
-        private final int gramLength;
+		// Specific to NGram index.
+		private final int gramLength;
 
-        public CompiledCreateIndexStatement(String indexName, String dataverseName, String datasetName,
-                List<String> keyFields, int gramLength, IndexType indexType) {
-            this.indexName = indexName;
-            this.dataverseName = dataverseName;
-            this.datasetName = datasetName;
-            this.keyFields = keyFields;
-            this.gramLength = gramLength;
-            this.indexType = indexType;
-        }
+		public CompiledCreateIndexStatement(String indexName,
+				String dataverseName, String datasetName,
+				List<String> keyFields, int gramLength, IndexType indexType) {
+			this.indexName = indexName;
+			this.dataverseName = dataverseName;
+			this.datasetName = datasetName;
+			this.keyFields = keyFields;
+			this.gramLength = gramLength;
+			this.indexType = indexType;
+		}
 
-        public String getDatasetName() {
-            return datasetName;
-        }
+		public String getDatasetName() {
+			return datasetName;
+		}
 
-        public String getDataverseName() {
-            return dataverseName;
-        }
+		public String getDataverseName() {
+			return dataverseName;
+		}
 
-        public String getIndexName() {
-            return indexName;
-        }
+		public String getIndexName() {
+			return indexName;
+		}
 
-        public List<String> getKeyFields() {
-            return keyFields;
-        }
+		public List<String> getKeyFields() {
+			return keyFields;
+		}
 
-        public IndexType getIndexType() {
-            return indexType;
-        }
+		public IndexType getIndexType() {
+			return indexType;
+		}
 
-        public int getGramLength() {
-            return gramLength;
-        }
+		public int getGramLength() {
+			return gramLength;
+		}
 
-        @Override
-        public Kind getKind() {
-            return Kind.CREATE_INDEX;
-        }
-    }
+		@Override
+		public Kind getKind() {
+			return Kind.CREATE_INDEX;
+		}
+	}
 
-    public static class CompiledLoadFromFileStatement implements ICompiledDmlStatement {
-        private String dataverseName;
-        private String datasetName;
-        private boolean alreadySorted;
-        private String adapter;
-        private Map<String, String> properties;
+	public static class CompiledLoadFromFileStatement implements
+			ICompiledDmlStatement {
+		private String dataverseName;
+		private String datasetName;
+		private boolean alreadySorted;
+		private String adapter;
+		private Map<String, String> properties;
 
-        public CompiledLoadFromFileStatement(String dataverseName, String datasetName, String adapter,
-                Map<String, String> properties, boolean alreadySorted) {
-            this.dataverseName = dataverseName;
-            this.datasetName = datasetName;
-            this.alreadySorted = alreadySorted;
-            this.adapter = adapter;
-            this.properties = properties;
-        }
+		public CompiledLoadFromFileStatement(String dataverseName,
+				String datasetName, String adapter,
+				Map<String, String> properties, boolean alreadySorted) {
+			this.dataverseName = dataverseName;
+			this.datasetName = datasetName;
+			this.alreadySorted = alreadySorted;
+			this.adapter = adapter;
+			this.properties = properties;
+		}
 
-        public String getDataverseName() {
-            return dataverseName;
-        }
+		public String getDataverseName() {
+			return dataverseName;
+		}
 
-        public String getDatasetName() {
-            return datasetName;
-        }
+		public String getDatasetName() {
+			return datasetName;
+		}
 
-        public boolean alreadySorted() {
-            return alreadySorted;
-        }
+		public boolean alreadySorted() {
+			return alreadySorted;
+		}
 
-        public String getAdapter() {
-            return adapter;
-        }
+		public String getAdapter() {
+			return adapter;
+		}
 
-        public Map<String, String> getProperties() {
-            return properties;
-        }
+		public Map<String, String> getProperties() {
+			return properties;
+		}
 
-        @Override
-        public Kind getKind() {
-            return Kind.LOAD_FROM_FILE;
-        }
-    }
+		@Override
+		public Kind getKind() {
+			return Kind.LOAD_FROM_FILE;
+		}
+	}
 
-    public static class CompiledInsertStatement implements ICompiledDmlStatement {
-        private final String dataverseName;
-        private final String datasetName;
-        private final Query query;
-        private final int varCounter;
+	public static class CompiledInsertStatement implements
+			ICompiledDmlStatement {
+		private final String dataverseName;
+		private final String datasetName;
+		private final Query query;
+		private final int varCounter;
 
-        public CompiledInsertStatement(String dataverseName, String datasetName, Query query, int varCounter) {
-            this.dataverseName = dataverseName;
-            this.datasetName = datasetName;
-            this.query = query;
-            this.varCounter = varCounter;
-        }
+		public CompiledInsertStatement(String dataverseName,
+				String datasetName, Query query, int varCounter) {
+			this.dataverseName = dataverseName;
+			this.datasetName = datasetName;
+			this.query = query;
+			this.varCounter = varCounter;
+		}
 
-        public String getDataverseName() {
-            return dataverseName;
-        }
+		public String getDataverseName() {
+			return dataverseName;
+		}
 
-        public String getDatasetName() {
-            return datasetName;
-        }
+		public String getDatasetName() {
+			return datasetName;
+		}
 
-        public int getVarCounter() {
-            return varCounter;
-        }
+		public int getVarCounter() {
+			return varCounter;
+		}
 
-        public Query getQuery() {
-            return query;
-        }
+		public Query getQuery() {
+			return query;
+		}
 
-        @Override
-        public Kind getKind() {
-            return Kind.INSERT;
-        }
-    }
+		@Override
+		public Kind getKind() {
+			return Kind.INSERT;
+		}
+	}
 
-    public static class CompiledBeginFeedStatement implements ICompiledDmlStatement {
-        private String dataverseName;
-        private String datasetName;
-        private Query query;
-        private int varCounter;
+	public static class CompiledBeginFeedStatement implements
+			ICompiledDmlStatement {
+		private String dataverseName;
+		private String datasetName;
+		private Query query;
+		private int varCounter;
 
-        public CompiledBeginFeedStatement(String dataverseName, String datasetName, Query query, int varCounter) {
-            this.dataverseName = dataverseName;
-            this.datasetName = datasetName;
-            this.query = query;
-            this.varCounter = varCounter;
-        }
+		public CompiledBeginFeedStatement(String dataverseName,
+				String datasetName, Query query, int varCounter) {
+			this.dataverseName = dataverseName;
+			this.datasetName = datasetName;
+			this.query = query;
+			this.varCounter = varCounter;
+		}
 
-        @Override
-        public String getDataverseName() {
-            return dataverseName;
-        }
+		@Override
+		public String getDataverseName() {
+			return dataverseName;
+		}
 
-        @Override
-        public String getDatasetName() {
-            return datasetName;
-        }
+		@Override
+		public String getDatasetName() {
+			return datasetName;
+		}
 
-        public int getVarCounter() {
-            return varCounter;
-        }
+		public int getVarCounter() {
+			return varCounter;
+		}
 
-        public Query getQuery() {
-            return query;
-        }
+		public Query getQuery() {
+			return query;
+		}
 
-        public void setQuery(Query query) {
-            this.query = query;
-        }
+		public void setQuery(Query query) {
+			this.query = query;
+		}
 
-        @Override
-        public Kind getKind() {
-            return Kind.BEGIN_FEED;
-        }
-    }
+		@Override
+		public Kind getKind() {
+			return Kind.BEGIN_FEED;
+		}
+	}
 
-    public static class CompiledControlFeedStatement implements ICompiledDmlStatement {
-        private String dataverseName;
-        private String datasetName;
-        private OperationType operationType;
-        private Query query;
-        private int varCounter;
-        private Map<String, String> alteredParams;
+	public static class CompiledControlFeedStatement implements
+			ICompiledDmlStatement {
+		private String dataverseName;
+		private String datasetName;
+		private OperationType operationType;
+		private Query query;
+		private int varCounter;
+		private Map<String, String> alteredParams;
 
-        public CompiledControlFeedStatement(OperationType operationType, String dataverseName, String datasetName,
-                Map<String, String> alteredParams) {
-            this.dataverseName = dataverseName;
-            this.datasetName = datasetName;
-            this.operationType = operationType;
-            this.alteredParams = alteredParams;
-        }
+		public CompiledControlFeedStatement(OperationType operationType,
+				String dataverseName, String datasetName,
+				Map<String, String> alteredParams) {
+			this.dataverseName = dataverseName;
+			this.datasetName = datasetName;
+			this.operationType = operationType;
+			this.alteredParams = alteredParams;
+		}
 
-        @Override
-        public String getDataverseName() {
-            return dataverseName;
-        }
+		@Override
+		public String getDataverseName() {
+			return dataverseName;
+		}
 
-        @Override
-        public String getDatasetName() {
-            return datasetName;
-        }
+		@Override
+		public String getDatasetName() {
+			return datasetName;
+		}
 
-        public OperationType getOperationType() {
-            return operationType;
-        }
+		public OperationType getOperationType() {
+			return operationType;
+		}
 
-        public int getVarCounter() {
-            return varCounter;
-        }
+		public int getVarCounter() {
+			return varCounter;
+		}
 
-        public Query getQuery() {
-            return query;
-        }
+		public Query getQuery() {
+			return query;
+		}
 
-        @Override
-        public Kind getKind() {
-            return Kind.CONTROL_FEED;
-        }
+		@Override
+		public Kind getKind() {
+			return Kind.CONTROL_FEED;
+		}
 
-        public Map<String, String> getProperties() {
-            return alteredParams;
-        }
+		public Map<String, String> getProperties() {
+			return alteredParams;
+		}
 
-        public void setProperties(Map<String, String> properties) {
-            this.alteredParams = properties;
-        }
-    }
+		public void setProperties(Map<String, String> properties) {
+			this.alteredParams = properties;
+		}
+	}
 
-    public static class CompiledDeleteStatement implements ICompiledDmlStatement {
-        private VariableExpr var;
-        private String dataverseName;
-        private String datasetName;
-        private Expression condition;
-        private Clause dieClause;
-        private int varCounter;
-        private AqlMetadataProvider metadataProvider;
+	public static class CompiledDeleteStatement implements
+			ICompiledDmlStatement {
+		private VariableExpr var;
+		private String dataverseName;
+		private String datasetName;
+		private Expression condition;
+		private Clause dieClause;
+		private int varCounter;
+		private AqlMetadataProvider metadataProvider;
 
-        public CompiledDeleteStatement(VariableExpr var, String dataverseName, String datasetName,
-                Expression condition, Clause dieClause, int varCounter, AqlMetadataProvider metadataProvider) {
-            this.var = var;
-            this.dataverseName = dataverseName;
-            this.datasetName = datasetName;
-            this.condition = condition;
-            this.dieClause = dieClause;
-            this.varCounter = varCounter;
-            this.metadataProvider = metadataProvider;
-        }
+		public CompiledDeleteStatement(VariableExpr var, String dataverseName,
+				String datasetName, Expression condition, Clause dieClause,
+				int varCounter, AqlMetadataProvider metadataProvider) {
+			this.var = var;
+			this.dataverseName = dataverseName;
+			this.datasetName = datasetName;
+			this.condition = condition;
+			this.dieClause = dieClause;
+			this.varCounter = varCounter;
+			this.metadataProvider = metadataProvider;
+		}
 
-        @Override
-        public String getDatasetName() {
-            return datasetName;
-        }
+		@Override
+		public String getDatasetName() {
+			return datasetName;
+		}
 
-        @Override
-        public String getDataverseName() {
-            return dataverseName;
-        }
+		@Override
+		public String getDataverseName() {
+			return dataverseName;
+		}
 
-        public int getVarCounter() {
-            return varCounter;
-        }
+		public int getVarCounter() {
+			return varCounter;
+		}
 
-        public Expression getCondition() {
-            return condition;
-        }
+		public Expression getCondition() {
+			return condition;
+		}
 
-        public Clause getDieClause() {
-            return dieClause;
-        }
+		public Clause getDieClause() {
+			return dieClause;
+		}
 
-        public Query getQuery() throws AlgebricksException {
+		public Query getQuery() throws AlgebricksException {
 
-            List<Expression> arguments = new ArrayList<Expression>();
-            String arg = dataverseName == null ? datasetName : dataverseName + "." + datasetName;
-            LiteralExpr argumentLiteral = new LiteralExpr(new StringLiteral(arg));
-            arguments.add(argumentLiteral);
+			List<Expression> arguments = new ArrayList<Expression>();
+			String arg = dataverseName == null ? datasetName : dataverseName
+					+ "." + datasetName;
+			LiteralExpr argumentLiteral = new LiteralExpr(
+					new StringLiteral(arg));
+			arguments.add(argumentLiteral);
 
-            CallExpr callExpression = new CallExpr(new FunctionSignature(FunctionConstants.ASTERIX_NS, "dataset", 1),
-                    arguments);
-            List<Clause> clauseList = new ArrayList<Clause>();
-            Clause forClause = new ForClause(var, callExpression);
-            clauseList.add(forClause);
-            Clause whereClause = null;
-            if (condition != null) {
-                whereClause = new WhereClause(condition);
-                clauseList.add(whereClause);
-            }
-            if (dieClause != null) {
-                clauseList.add(dieClause);
-            }
+			CallExpr callExpression = new CallExpr(new FunctionSignature(
+					FunctionConstants.ASTERIX_NS, "dataset", 1), arguments);
+			List<Clause> clauseList = new ArrayList<Clause>();
+			Clause forClause = new ForClause(var, callExpression);
+			clauseList.add(forClause);
+			Clause whereClause = null;
+			if (condition != null) {
+				whereClause = new WhereClause(condition);
+				clauseList.add(whereClause);
+			}
+			if (dieClause != null) {
+				clauseList.add(dieClause);
+			}
 
-            Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
-            if (dataset == null) {
-                throw new AlgebricksException("Unknown dataset " + datasetName);
-            }
-            String itemTypeName = dataset.getItemTypeName();
-            IAType itemType = metadataProvider.findType(dataset.getDataverseName(), itemTypeName);
-            ARecordType recType = (ARecordType) itemType;
-            String[] fieldNames = recType.getFieldNames();
-            List<FieldBinding> fieldBindings = new ArrayList<FieldBinding>();
-            for (int i = 0; i < fieldNames.length; i++) {
-                FieldAccessor fa = new FieldAccessor(var, new Identifier(fieldNames[i]));
-                FieldBinding fb = new FieldBinding(new LiteralExpr(new StringLiteral(fieldNames[i])), fa);
-                fieldBindings.add(fb);
-            }
-            RecordConstructor rc = new RecordConstructor(fieldBindings);
+			Dataset dataset = metadataProvider.findDataset(dataverseName,
+					datasetName);
+			if (dataset == null) {
+				throw new AlgebricksException("Unknown dataset " + datasetName);
+			}
+			String itemTypeName = dataset.getItemTypeName();
+			IAType itemType = metadataProvider.findType(
+					dataset.getDataverseName(), itemTypeName);
+			ARecordType recType = (ARecordType) itemType;
+			String[] fieldNames = recType.getFieldNames();
+			List<FieldBinding> fieldBindings = new ArrayList<FieldBinding>();
+			for (int i = 0; i < fieldNames.length; i++) {
+				FieldAccessor fa = new FieldAccessor(var, new Identifier(
+						fieldNames[i]));
+				FieldBinding fb = new FieldBinding(new LiteralExpr(
+						new StringLiteral(fieldNames[i])), fa);
+				fieldBindings.add(fb);
+			}
+			RecordConstructor rc = new RecordConstructor(fieldBindings);
 
-            FLWOGRExpression flowgr = new FLWOGRExpression(clauseList, rc);
-            Query query = new Query();
-            query.setBody(flowgr);
-            return query;
-        }
+			FLWOGRExpression flowgr = new FLWOGRExpression(clauseList, rc);
+			Query query = new Query();
+			query.setBody(flowgr);
+			return query;
+		}
 
-        @Override
-        public Kind getKind() {
-            return Kind.DELETE;
-        }
+		@Override
+		public Kind getKind() {
+			return Kind.DELETE;
+		}
 
-    }
+	}
 
 }
diff --git a/asterix-app/data/hdfs/asterix_info.txt b/asterix-app/data/hdfs/asterix_info.txt
new file mode 100644
index 0000000..a9a5596
--- /dev/null
+++ b/asterix-app/data/hdfs/asterix_info.txt
@@ -0,0 +1,4 @@
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
diff --git a/asterix-app/data/hdfs/large_text b/asterix-app/data/hdfs/large_text
new file mode 100644
index 0000000..31a394d
--- /dev/null
+++ b/asterix-app/data/hdfs/large_text
@@ -0,0 +1,136 @@
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
diff --git a/asterix-app/data/hdfs/obamatweets.adm b/asterix-app/data/hdfs/obamatweets.adm
new file mode 100644
index 0000000..2567483
--- /dev/null
+++ b/asterix-app/data/hdfs/obamatweets.adm
@@ -0,0 +1,11 @@
+{ "id": "nc1:1", "username": "BronsonMike", "location": "", "text": "@GottaLaff @reutersus Christie and obama just foul weather friends", "timestamp": "Thu Dec 06 16:53:06 PST 2012" }
+{ "id": "nc1:100", "username": "KidrauhlProuds", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson  uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:102", "username": "jaysauce82", "location": "", "text": "Not voting for President Obama #BadDecision", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:104", "username": "princeofsupras", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson e uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:15 PST 2012" }
+{ "id": "nc1:106", "username": "GulfDogs", "location": "", "text": "Obama Admin Knew Libyan Terrorists Had US-Provided Weaponsteaparty #tcot #ccot #NewGuards #BreitbartArmy #patriotwttp://t.co/vJxzrQUE", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:108", "username": "Laugzpz", "location": "", "text": "@AlfredoJalife Maestro Obama se hace de la vista gorda, es un acuerdo de siempre creo yo.", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:11", "username": "magarika", "location": "", "text": "RT @ken24xavier: Obama tells SOROS - our plan is ALMOST finished http://t.co/WvzK0GtU", "timestamp": "Thu Dec 06 16:53:05 PST 2012" }
+{ "id": "nc1:111", "username": "ToucanMall", "location": "", "text": "RT @WorldWar3Watch: Michelle Obama Gets More Grammy Nominations Than Justin ...  #Obama #WW3 http://t.co/0Wv2GKij", "timestamp": "Thu Dec 06 16:53:13 PST 2012" }
+{ "id": "nc1:113", "username": "ToucanMall", "location": "", "text": "RT @ObamaPalooza: Tiffany Shared What $2,000 Meant to Her ... and the President Stopped by to Talk About It http://t.co/sgT7lsNV #Obama", "timestamp": "Thu Dec 06 16:53:12 PST 2012" }
+{ "id": "nc1:115", "username": "thewildpitch", "location": "", "text": "RT @RevkahJC: Dennis Miller: Obama Should Just Say He Wants To Tax Successful People http://t.co/Ihlemy9Y", "timestamp": "Thu Dec 06 16:53:11 PST 2012" }
+{ "id": "nc1:117", "username": "Rnugent24", "location": "", "text": "RT @ConservativeQuo: unemployment is above 8% again. I wonder how long it will take for Obama to start blaming Bush? 3-2-1 #tcot #antiobama", "timestamp": "Thu Dec 06 16:53:10 PST 2012" }
diff --git a/asterix-app/data/hdfs/textFileS b/asterix-app/data/hdfs/textFileS
new file mode 100644
index 0000000..4bd0604
--- /dev/null
+++ b/asterix-app/data/hdfs/textFileS
Binary files differ
diff --git a/asterix-app/data/twitter/obamatweets.adm b/asterix-app/data/twitter/obamatweets.adm
new file mode 100644
index 0000000..9720960
--- /dev/null
+++ b/asterix-app/data/twitter/obamatweets.adm
@@ -0,0 +1,12 @@
+{ "id": "nc1:1", "username": "BronsonMike", "location": "", "text": "@GottaLaff @reutersus Christie and obama just foul weather friends", "timestamp": "Thu Dec 06 16:53:06 PST 2012" }
+{ "id": "nc1:100", "username": "KidrauhlProuds", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson  uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:102", "username": "jaysauce82", "location": "", "text": "Not voting for President Obama #BadDecision", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:104", "username": "princeofsupras", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson e uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:15 PST 2012" }
+{ "id": "nc1:106", "username": "GulfDogs", "location": "", "text": "Obama Admin Knew Libyan Terrorists Had US-Provided Weaponsteaparty #tcot #ccot #NewGuards #BreitbartArmy #patriotwttp://t.co/vJxzrQUE", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:108", "username": "Laugzpz", "location": "", "text": "@AlfredoJalife Maestro Obama se hace de la vista gorda, es un acuerdo de siempre creo yo.", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:11", "username": "magarika", "location": "", "text": "RT @ken24xavier: Obama tells SOROS - our plan is ALMOST finished http://t.co/WvzK0GtU", "timestamp": "Thu Dec 06 16:53:05 PST 2012" }
+{ "id": "nc1:111", "username": "ToucanMall", "location": "", "text": "RT @WorldWar3Watch: Michelle Obama Gets More Grammy Nominations Than Justin ...  #Obama #WW3 http://t.co/0Wv2GKij", "timestamp": "Thu Dec 06 16:53:13 PST 2012" }
+{ "id": "nc1:113", "username": "ToucanMall", "location": "", "text": "RT @ObamaPalooza: Tiffany Shared What $2,000 Meant to Her ... and the President Stopped by to Talk About It http://t.co/sgT7lsNV #Obama", "timestamp": "Thu Dec 06 16:53:12 PST 2012" }
+{ "id": "nc1:115", "username": "thewildpitch", "location": "", "text": "RT @RevkahJC: Dennis Miller: Obama Should Just Say He Wants To Tax Successful People http://t.co/Ihlemy9Y", "timestamp": "Thu Dec 06 16:53:11 PST 2012" }
+{ "id": "nc1:117", "username": "Rnugent24", "location": "", "text": "RT @ConservativeQuo: unemployment is above 8% again. I wonder how long it will take for Obama to start blaming Bush? 3-2-1 #tcot #antiobama", "timestamp": "Thu Dec 06 16:53:10 PST 2012" }
+{ "id": "nc1:119", "username": "ToucanMall", "location": "", "text": "RT @Newitrsdotcom: I hope #Obama will win re-election... Other four years without meaningless #wars", "timestamp": "Thu Dec 06 16:53:09 PST 2012" }
diff --git a/asterix-app/pom.xml b/asterix-app/pom.xml
index b47aadc..2f80c30 100644
--- a/asterix-app/pom.xml
+++ b/asterix-app/pom.xml
@@ -129,17 +129,14 @@
 		<dependency>
 			<groupId>edu.uci.ics.hyracks</groupId>
 			<artifactId>hyracks-control-cc</artifactId>
-			<version>0.2.2-SNAPSHOT</version>
 		</dependency>
 		<dependency>
 			<groupId>edu.uci.ics.hyracks</groupId>
 			<artifactId>hyracks-control-nc</artifactId>
-			<version>0.2.2-SNAPSHOT</version>
 		</dependency>
 		<dependency>
 			<groupId>edu.uci.ics.hyracks</groupId>
-			<artifactId>hyracks-algebricks-compiler</artifactId>
-			<version>0.2.2-SNAPSHOT</version>
+			<artifactId>algebricks-compiler</artifactId>
 		</dependency>
 		<dependency>
 			<groupId>edu.uci.ics.asterix</groupId>
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java
index c207153..1eb37cd 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java
@@ -1,3 +1,17 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package edu.uci.ics.asterix.api.common;
 
 import java.io.PrintWriter;
@@ -57,6 +71,10 @@
 import edu.uci.ics.hyracks.api.job.JobId;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 
+/**
+ * Provides helper methods for compilation of a query into a JobSpec and submission
+ * to Hyracks through the Hyracks client interface.
+ */
 public class APIFramework {
 
     private static List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> buildDefaultLogicalRewrites() {
@@ -318,7 +336,7 @@
         builder.setTypeTraitProvider(format.getTypeTraitProvider());
         builder.setNormalizedKeyComputerFactoryProvider(format.getNormalizedKeyComputerFactoryProvider());
 
-        JobSpecification spec = compiler.createJob(AsterixAppContextInfoImpl.INSTANCE);
+        JobSpecification spec = compiler.createJob(AsterixAppContextInfoImpl.getInstance());
         // set the job event listener
         spec.setJobletEventListenerFactory(new JobEventListenerFactory(queryMetadataProvider.getJobTxnId(),
                 isWriteTransaction));
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
index 80293b0..508126f 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
@@ -106,6 +106,10 @@
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
 
+/*
+ * Provides functionality for executing a batch of AQL statements (queries included)
+ * sequentially.
+ */
 public class AqlTranslator extends AbstractAqlTranslator {
 
     private final List<Statement> aqlStatements;
@@ -134,6 +138,12 @@
         return functionDecls;
     }
 
+    /**
+     * Compiles and submits for execution a list of AQL statements.
+     * @param hcc AHyracks client connection that is used to submit a jobspec to Hyracks.
+     * @return A List<QueryResult> containing a QueryResult instance corresponding to each submitted query.
+     * @throws Exception
+     */
     public List<QueryResult> compileAndExecute(IHyracksClientConnection hcc) throws Exception {
         List<QueryResult> executionResult = new ArrayList<QueryResult>();
         FileSplit outputFile = null;
@@ -709,14 +719,16 @@
             IHyracksClientConnection hcc, List<JobSpecification> jobsToExecute) throws Exception {
         BeginFeedStatement bfs = (BeginFeedStatement) stmt;
         String dataverseName = bfs.getDataverseName() == null ? activeDefaultDataverse == null ? null
-                : activeDefaultDataverse.getDataverseName() : bfs.getDatasetName().getValue();
+                : activeDefaultDataverse.getDataverseName() : bfs.getDataverseName().getValue();
 
         CompiledBeginFeedStatement cbfs = new CompiledBeginFeedStatement(dataverseName,
                 bfs.getDatasetName().getValue(), bfs.getQuery(), bfs.getVarCounter());
 
-        Dataset dataset;
-        dataset = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(), dataverseName, bfs
+        Dataset dataset = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(), dataverseName, bfs
                 .getDatasetName().getValue());
+        if(dataset == null) {
+            throw new AsterixException("Unknown dataset :" + bfs.getDatasetName().getValue());
+        }
         IDatasetDetails datasetDetails = dataset.getDatasetDetails();
         if (datasetDetails.getDatasetType() != DatasetType.FEED) {
             throw new IllegalArgumentException("Dataset " + bfs.getDatasetName().getValue() + " is not a feed dataset");
@@ -733,7 +745,7 @@
             IHyracksClientConnection hcc, List<JobSpecification> jobsToExecute) throws Exception {
         ControlFeedStatement cfs = (ControlFeedStatement) stmt;
         String dataverseName = cfs.getDataverseName() == null ? activeDefaultDataverse == null ? null
-                : activeDefaultDataverse.getDataverseName() : cfs.getDatasetName().getValue();
+                : activeDefaultDataverse.getDataverseName() : cfs.getDataverseName().getValue();
         CompiledControlFeedStatement clcfs = new CompiledControlFeedStatement(cfs.getOperationType(), dataverseName,
                 cfs.getDatasetName().getValue(), cfs.getAlterAdapterConfParams());
         jobsToExecute.add(FeedOperations.buildControlFeedJobSpec(clcfs, metadataProvider));
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/FeedOperations.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/FeedOperations.java
index 546973c..f9bd2d5 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/FeedOperations.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/FeedOperations.java
@@ -21,10 +21,10 @@
 
 import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
 import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.feed.comm.AlterFeedMessage;
-import edu.uci.ics.asterix.feed.comm.FeedMessage;
-import edu.uci.ics.asterix.feed.comm.IFeedMessage;
-import edu.uci.ics.asterix.feed.comm.IFeedMessage.MessageType;
+import edu.uci.ics.asterix.external.feed.lifecycle.AlterFeedMessage;
+import edu.uci.ics.asterix.external.feed.lifecycle.FeedMessage;
+import edu.uci.ics.asterix.external.feed.lifecycle.IFeedMessage;
+import edu.uci.ics.asterix.external.feed.lifecycle.IFeedMessage.MessageType;
 import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
 import edu.uci.ics.asterix.metadata.entities.Dataset;
 import edu.uci.ics.asterix.metadata.entities.FeedDatasetDetails;
@@ -38,16 +38,29 @@
 import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.misc.NullSinkOperatorDescriptor;
 
+/**
+ * Provides helper method(s) for creating JobSpec for operations on a feed.
+ */
 public class FeedOperations {
 
     private static final Logger LOGGER = Logger.getLogger(IndexOperations.class.getName());
 
+    /**
+     * @param controlFeedStatement
+     *            The statement representing the action that describes the
+     *            action that needs to be taken on the feed. E.g. of actions are
+     *            stop feed or alter feed.
+     * @param metadataProvider
+     *            An instance of the MetadataProvider
+     * @return An instance of JobSpec for the job that would send an appropriate
+     *         control message to the running feed.
+     * @throws AsterixException
+     * @throws AlgebricksException
+     */
     public static JobSpecification buildControlFeedJobSpec(CompiledControlFeedStatement controlFeedStatement,
             AqlMetadataProvider metadataProvider) throws AsterixException, AlgebricksException {
         switch (controlFeedStatement.getOperationType()) {
             case ALTER:
-            case SUSPEND:
-            case RESUME:
             case END: {
                 return createSendMessageToFeedJobSpec(controlFeedStatement, metadataProvider);
             }
@@ -86,15 +99,9 @@
 
         List<IFeedMessage> feedMessages = new ArrayList<IFeedMessage>();
         switch (controlFeedStatement.getOperationType()) {
-            case SUSPEND:
-                feedMessages.add(new FeedMessage(MessageType.SUSPEND));
-                break;
             case END:
                 feedMessages.add(new FeedMessage(MessageType.STOP));
                 break;
-            case RESUME:
-                feedMessages.add(new FeedMessage(MessageType.RESUME));
-                break;
             case ALTER:
                 feedMessages.add(new AlterFeedMessage(controlFeedStatement.getProperties()));
                 break;
@@ -102,8 +109,8 @@
 
         try {
             Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = metadataProvider.buildFeedMessengerRuntime(
-                    metadataProvider, spec, (FeedDatasetDetails) dataset.getDatasetDetails(),
-                    metadataProvider.getDefaultDataverseName(), datasetName, feedMessages);
+                    metadataProvider, spec, (FeedDatasetDetails) dataset.getDatasetDetails(), dataverseName,
+                    datasetName, feedMessages);
             feedMessenger = p.first;
             messengerPc = p.second;
         } catch (AlgebricksException e) {
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCBootstrapImpl.java b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCBootstrapImpl.java
index ccba498..434f44c 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCBootstrapImpl.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCBootstrapImpl.java
@@ -29,6 +29,7 @@
 import edu.uci.ics.asterix.api.aqlj.server.APIClientThreadFactory;
 import edu.uci.ics.asterix.api.aqlj.server.ThreadedServer;
 import edu.uci.ics.asterix.api.http.servlet.APIServlet;
+import edu.uci.ics.asterix.common.api.AsterixAppContextInfoImpl;
 import edu.uci.ics.asterix.common.config.GlobalConfig;
 import edu.uci.ics.asterix.metadata.MetadataManager;
 import edu.uci.ics.asterix.metadata.api.IAsterixStateProxy;
@@ -37,6 +38,10 @@
 import edu.uci.ics.hyracks.api.application.ICCApplicationContext;
 import edu.uci.ics.hyracks.api.application.ICCBootstrap;
 
+/**
+ * The bootstrap class of the application that will manage its
+ * life cycle at the Cluster Controller.
+ */
 public class CCBootstrapImpl implements ICCBootstrap {
     private static final Logger LOGGER = Logger.getLogger(CCBootstrapImpl.class.getName());
 
@@ -71,6 +76,9 @@
         // Setup and start the API server
         setupAPIServer();
         apiServer.start();
+
+        //Initialize AsterixAppContext
+        AsterixAppContextInfoImpl.initialize(appCtx);
     }
 
     @Override
@@ -79,7 +87,7 @@
             LOGGER.info("Stopping Asterix cluster controller");
         }
         AsterixStateProxy.unregisterRemoteObject();
-        
+
         webServer.stop();
         apiServer.shutdown();
     }
@@ -107,11 +115,7 @@
         // set the APINodeDataServer ports
         int startPort = DEFAULT_API_NODEDATA_SERVER_PORT;
         Map<String, Set<String>> nodeNameMap = new HashMap<String, Set<String>>();
-        try {
-            appCtx.getCCContext().getIPAddressNodeMap(nodeNameMap);
-        } catch (Exception e) {
-            throw new IOException("Unable to obtain IP address node map", e);
-        }
+        getIPAddressNodeMap(nodeNameMap);
 
         for (Map.Entry<String, Set<String>> entry : nodeNameMap.entrySet()) {
             Set<String> nodeNames = entry.getValue();
@@ -122,7 +126,15 @@
                 proxy.setAsterixNodeState(it.next(), ns);
             }
         }
-
         apiServer = new ThreadedServer(DEFAULT_API_SERVER_PORT, new APIClientThreadFactory(appCtx));
     }
+
+    private void getIPAddressNodeMap(Map<String, Set<String>> nodeNameMap) throws IOException {
+        nodeNameMap.clear();
+        try {
+            appCtx.getCCContext().getIPAddressNodeMap(nodeNameMap);
+        } catch (Exception e) {
+            throw new IOException("Unable to obtain IP address node map", e);
+        }
+    }
 }
\ No newline at end of file
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java
index 9b7c356..3505b23 100644
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java
+++ b/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java
@@ -1,3 +1,17 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package edu.uci.ics.asterix.test.metadata;
 
 import java.io.File;
@@ -21,6 +35,9 @@
 import edu.uci.ics.asterix.testframework.context.TestCaseContext;
 import edu.uci.ics.asterix.testframework.xml.TestCase.CompilationUnit;
 
+/**
+ * Executes the Metadata tests.
+ */
 @RunWith(Parameterized.class)
 public class MetadataTest {
 
@@ -30,8 +47,7 @@
     private static final String PATH_ACTUAL = "mdtest/";
     private static final String PATH_BASE = "src/test/resources/metadata/";
     private static final String TEST_CONFIG_FILE_NAME = "test.properties";
-    private static final String WEB_SERVER_PORT="19002";
-    private static final String[] ASTERIX_DATA_DIRS = new String[] { "nc1data", "nc2data" };
+    private static final String WEB_SERVER_PORT = "19002";
 
     public MetadataTest(TestCaseContext tcCtx) {
         this.tcCtx = tcCtx;
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java
index d35001e..8eafd02 100644
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java
+++ b/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java
@@ -21,6 +21,9 @@
 import edu.uci.ics.asterix.testframework.context.TestCaseContext;
 import edu.uci.ics.asterix.testframework.xml.TestCase.CompilationUnit;
 
+/**
+ * Runs the runtime test cases under 'src/test/resources/runtimets'.
+ */
 @RunWith(Parameterized.class)
 public class ExecutionTest {
     private static final String PATH_ACTUAL = "rttest/";
@@ -49,7 +52,7 @@
         AsterixHyracksIntegrationUtil.init();
 
         // TODO: Uncomment when hadoop version is upgraded and adapters are ported
-        //HDFSCluster.getInstance().setup();
+        HDFSCluster.getInstance().setup();
     }
 
     @AfterClass
@@ -70,6 +73,7 @@
             FileUtils.deleteDirectory(log);
         File lsn = new File("last_checkpoint_lsn");
         lsn.deleteOnExit();
+        HDFSCluster.getInstance().cleanup();
     }
 
     @Parameters
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/HDFSCluster.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/HDFSCluster.java
index c07cff2..20df118 100644
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/HDFSCluster.java
+++ b/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/HDFSCluster.java
@@ -1,5 +1,20 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package edu.uci.ics.asterix.test.runtime;
 
+import java.io.File;
 import java.io.IOException;
 
 import org.apache.hadoop.conf.Configuration;
@@ -7,20 +22,29 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.server.common.HdfsConstants.StartupOption;
+import org.apache.hadoop.mapred.InputSplit;
 import org.apache.hadoop.mapred.JobConf;
 
+import edu.uci.ics.asterix.external.dataset.adapter.HDFSAdapter;
+
+/**
+ * Manages a Mini (local VM) HDFS cluster with a configured number of datanodes.
+ * 
+ * @author ramangrover29
+ */
 @SuppressWarnings("deprecation")
 public class HDFSCluster {
 
     private static final String PATH_TO_HADOOP_CONF = "src/test/resources/hadoop/conf";
-    private static final int nameNodePort = 10009;
-    private static final String DATA_PATH = "data/tpch0.001";
-    private static final String HDFS_PATH = "/tpch";
+    private static final int nameNodePort = 31888;
+    private static final String DATA_PATH = "data/hdfs";
+    private static final String HDFS_PATH = "/asterix";
     private static final HDFSCluster INSTANCE = new HDFSCluster();
 
     private MiniDFSCluster dfsCluster;
     private int numDataNodes = 2;
     private JobConf conf = new JobConf();
+    private FileSystem dfs;
 
     public static HDFSCluster getInstance() {
         return INSTANCE;
@@ -30,16 +54,30 @@
 
     }
 
+    /**
+     * Instantiates the (Mini) DFS Cluster with the configured number of datanodes.
+     * Post instantiation, data is laoded to HDFS.
+     * Called prior to running the Runtime test suite.
+     */
     public void setup() throws Exception {
         conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/core-site.xml"));
         conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/mapred-site.xml"));
         conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));
         cleanupLocal();
         dfsCluster = new MiniDFSCluster(nameNodePort, conf, numDataNodes, true, true, StartupOption.REGULAR, null);
-        FileSystem dfs = FileSystem.get(conf);
-        Path src = new Path(DATA_PATH);
-        Path dest = new Path(HDFS_PATH);
-        dfs.copyFromLocalFile(src, dest);
+        dfs = FileSystem.get(conf);
+        loadData();
+    }
+
+    private void loadData() throws IOException {
+        Path destDir = new Path(HDFS_PATH);
+        dfs.mkdirs(destDir);
+        File srcDir = new File(DATA_PATH);
+        File[] listOfFiles = srcDir.listFiles();
+        for (File srcFile : listOfFiles) {
+            Path path = new Path(srcFile.getAbsolutePath());
+            dfs.copyFromLocalFile(path, destDir);
+        }
     }
 
     private void cleanupLocal() throws IOException {
@@ -57,7 +95,25 @@
     public static void main(String[] args) throws Exception {
         HDFSCluster cluster = new HDFSCluster();
         cluster.setup();
-        cluster.cleanup();
+        JobConf conf = configureJobConf();
+        FileSystem fs = FileSystem.get(conf);
+        InputSplit[] inputSplits = conf.getInputFormat().getSplits(conf, 0);
+        for (InputSplit split : inputSplits) {
+            System.out.println("split :" + split);
+        }
+        //   cluster.cleanup();
+    }
+
+    private static JobConf configureJobConf() throws Exception {
+        JobConf conf = new JobConf();
+        String hdfsUrl = "hdfs://127.0.0.1:31888";
+        String hdfsPath = "/asterix/extrasmalltweets.txt";
+        conf.set("fs.default.name", hdfsUrl);
+        conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
+        conf.setClassLoader(HDFSAdapter.class.getClassLoader());
+        conf.set("mapred.input.dir", hdfsPath);
+        conf.set("mapred.input.format.class", "org.apache.hadoop.mapred.TextInputFormat");
+        return conf;
     }
 
 }
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_1.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_1.aql
new file mode 100644
index 0000000..f4547d6
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_1.aql
@@ -0,0 +1,8 @@
+/*
+ * Description  : Drop a system dataset- Metadata.Dataset
+ * Expected Res : Failure
+ * Date         : 13 Jan 2013
+ * Issue        : 239
+ */
+
+drop dataset Metadata.Dataset;
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_2.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_2.aql
new file mode 100644
index 0000000..d62bb49
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_2.aql
@@ -0,0 +1,8 @@
+/*
+ * Description  : Drop a system dataset- Metadata.Dataset
+ * Expected Res : Failure
+ * Date         : 13 Jan 2013
+ * Issue        : 239
+ */
+
+drop dataset Metadata.Dataverse;
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_3.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_3.aql
new file mode 100644
index 0000000..96dd8cc
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_3.aql
@@ -0,0 +1,8 @@
+/*
+ * Description  : Drop a system dataset- Metadata.Nodegroup
+ * Expected Res : Failure
+ * Date         : 13 Jan 2013
+ * Issue        : 239
+ */
+
+drop dataset Metadata.Nodegroup;
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_4.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_4.aql
new file mode 100644
index 0000000..7685427
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_4.aql
@@ -0,0 +1,8 @@
+/*
+ * Description  : Drop a system dataset- Metadata.Index
+ * Expected Res : Failure
+ * Date         : 13 Jan 2013
+ * Issue        : 239
+ */
+
+drop dataset Metadata.Index;
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_5.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_5.aql
new file mode 100644
index 0000000..5e6e468
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_5.aql
@@ -0,0 +1,8 @@
+/*
+ * Description  : Drop a system dataset- Metadata.DatasourceAdapter
+ * Expected Res : Failure
+ * Date         : 13 Jan 2013
+ * Issue        : 239
+ */
+
+drop dataset Metadata.DatasourceAdapter;
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_6.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_6.aql
new file mode 100644
index 0000000..0eb863f
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_6.aql
@@ -0,0 +1,8 @@
+/*
+ * Description  : Drop a system dataset- Metadata.Function
+ * Expected Res : Failure
+ * Date         : 13 Jan 2013
+ * Issue        : 239
+ */
+
+drop dataset Metadata.Function;
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_7.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_7.aql
new file mode 100644
index 0000000..6794d04
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_7.aql
@@ -0,0 +1,8 @@
+/*
+ * Description  : Drop a system dataset- Metadata.Datatype
+ * Expected Res : Failure
+ * Date         : 13 Jan 2013
+ * Issue        : 239
+ */
+
+drop dataset Metadata.Datatype;
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_8.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_8.aql
new file mode 100644
index 0000000..d75e27a
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_8.aql
@@ -0,0 +1,8 @@
+/*
+ * Description  : Drop a system dataset- Metadata.Node
+ * Expected Res : Failure
+ * Date         : 13 Jan 2013
+ * Issue        : 239
+ */
+
+drop dataset Metadata.Node;
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta17.adm b/asterix-app/src/test/resources/metadata/results/basic/meta17.adm
index b351cfb..8a56248 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/meta17.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta17.adm
@@ -1,56 +1,57 @@
-{ "DataverseName": "Metadata", "DatatypeName": "DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "DataTypeName", "FieldType": "string" }, { "FieldName": "DatasetType", "FieldType": "string" }, { "FieldName": "InternalDetails", "FieldType": "Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "ExternalDetails", "FieldType": "Field_ExternalDetails_in_DatasetRecordType" }, { "FieldName": "FeedDetails", "FieldType": "Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "DatasourceAdapterRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Classname", "FieldType": "string" }, { "FieldName": "Type", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatatypeName", "FieldType": "string" }, { "FieldName": "Derived", "FieldType": "Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "DataverseRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DataFormat", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType_ItemType" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FieldName", "FieldType": "string" }, { "FieldName": "FieldType", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_NodeNames_in_NodeGroupRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "string", "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_OrderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Params_in_FunctionRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_PartitioningKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_PartitioningKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_PrimaryKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_PrimaryKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_SearchKey_in_IndexRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_UnorderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "FunctionRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Arity", "FieldType": "string" }, { "FieldName": "Params", "FieldType": "Field_Params_in_FunctionRecordType" }, { "FieldName": "ReturnType", "FieldType": "string" }, { "FieldName": "Definition", "FieldType": "string" }, { "FieldName": "Language", "FieldType": "string" }, { "FieldName": "Kind", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "IndexRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "IndexName", "FieldType": "string" }, { "FieldName": "IndexStructure", "FieldType": "string" }, { "FieldName": "SearchKey", "FieldType": "Field_SearchKey_in_IndexRecordType" }, { "FieldName": "IsPrimary", "FieldType": "boolean" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "NodeGroupRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "GroupName", "FieldType": "string" }, { "FieldName": "NodeNames", "FieldType": "Field_NodeNames_in_NodeGroupRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "NodeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "NodeName", "FieldType": "string" }, { "FieldName": "NumberOfCores", "FieldType": "int32" }, { "FieldName": "WorkingMemorySize", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Tag", "FieldType": "string" }, { "FieldName": "IsAnonymous", "FieldType": "boolean" }, { "FieldName": "EnumValues", "FieldType": "Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Record", "FieldType": "Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Union", "FieldType": "Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "UnorderedList", "FieldType": "Field_UnorderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "OrderedList", "FieldType": "Field_OrderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DatasourceAdapter", "FieldType": "string" }, { "FieldName": "Properties", "FieldType": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FileStructure", "FieldType": "string" }, { "FieldName": "PartitioningStrategy", "FieldType": "string" }, { "FieldName": "PartitioningKey", "FieldType": "Field_PartitioningKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "PrimaryKey", "FieldType": "Field_PrimaryKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "GroupName", "FieldType": "string" }, { "FieldName": "DatasourceAdapter", "FieldType": "string" }, { "FieldName": "Properties", "FieldType": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "Function", "FieldType": "string" }, { "FieldName": "Status", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FileStructure", "FieldType": "string" }, { "FieldName": "PartitioningStrategy", "FieldType": "string" }, { "FieldName": "PartitioningKey", "FieldType": "Field_PartitioningKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "PrimaryKey", "FieldType": "Field_PrimaryKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "GroupName", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "IsOpen", "FieldType": "boolean" }, { "FieldName": "Fields", "FieldType": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "boolean", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "circle", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "date", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "datetime", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "double", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "duration", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "float", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "int16", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "int32", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "int64", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "int8", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "line", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "null", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "point", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "point3d", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "polygon", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "rectangle", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "string", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "time", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "DataTypeName", "FieldType": "string" }, { "FieldName": "DatasetType", "FieldType": "string" }, { "FieldName": "InternalDetails", "FieldType": "Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "ExternalDetails", "FieldType": "Field_ExternalDetails_in_DatasetRecordType" }, { "FieldName": "FeedDetails", "FieldType": "Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "DatasourceAdapterRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Classname", "FieldType": "string" }, { "FieldName": "Type", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatatypeName", "FieldType": "string" }, { "FieldName": "Derived", "FieldType": "Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "DataverseRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DataFormat", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType_ItemType" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FieldName", "FieldType": "string" }, { "FieldName": "FieldType", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Function_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_NodeNames_in_NodeGroupRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "string", "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_OrderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Params_in_FunctionRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_PartitioningKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_PartitioningKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_PrimaryKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_PrimaryKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_SearchKey_in_IndexRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_UnorderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "FunctionRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Arity", "FieldType": "string" }, { "FieldName": "Params", "FieldType": "Field_Params_in_FunctionRecordType" }, { "FieldName": "ReturnType", "FieldType": "string" }, { "FieldName": "Definition", "FieldType": "string" }, { "FieldName": "Language", "FieldType": "string" }, { "FieldName": "Kind", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "IndexRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "IndexName", "FieldType": "string" }, { "FieldName": "IndexStructure", "FieldType": "string" }, { "FieldName": "SearchKey", "FieldType": "Field_SearchKey_in_IndexRecordType" }, { "FieldName": "IsPrimary", "FieldType": "boolean" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "NodeGroupRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "GroupName", "FieldType": "string" }, { "FieldName": "NodeNames", "FieldType": "Field_NodeNames_in_NodeGroupRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "NodeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "NodeName", "FieldType": "string" }, { "FieldName": "NumberOfCores", "FieldType": "int32" }, { "FieldName": "WorkingMemorySize", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Tag", "FieldType": "string" }, { "FieldName": "IsAnonymous", "FieldType": "boolean" }, { "FieldName": "EnumValues", "FieldType": "Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Record", "FieldType": "Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Union", "FieldType": "Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "UnorderedList", "FieldType": "Field_UnorderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "OrderedList", "FieldType": "Field_OrderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DatasourceAdapter", "FieldType": "string" }, { "FieldName": "Properties", "FieldType": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FileStructure", "FieldType": "string" }, { "FieldName": "PartitioningStrategy", "FieldType": "string" }, { "FieldName": "PartitioningKey", "FieldType": "Field_PartitioningKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "PrimaryKey", "FieldType": "Field_PrimaryKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "GroupName", "FieldType": "string" }, { "FieldName": "DatasourceAdapter", "FieldType": "string" }, { "FieldName": "Properties", "FieldType": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "Function", "FieldType": "Field_Function_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "Status", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FileStructure", "FieldType": "string" }, { "FieldName": "PartitioningStrategy", "FieldType": "string" }, { "FieldName": "PartitioningKey", "FieldType": "Field_PartitioningKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "PrimaryKey", "FieldType": "Field_PrimaryKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "GroupName", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "IsOpen", "FieldType": "boolean" }, { "FieldName": "Fields", "FieldType": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "boolean", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "circle", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "date", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "datetime", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "double", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "duration", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "float", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "int16", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "int32", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "int64", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "int8", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "line", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "null", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "point", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "point3d", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "polygon", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "rectangle", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "string", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "time", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
diff --git a/asterix-app/src/test/resources/metadata/testsuite.xml b/asterix-app/src/test/resources/metadata/testsuite.xml
index 78dc3b5..b1b303e 100644
--- a/asterix-app/src/test/resources/metadata/testsuite.xml
+++ b/asterix-app/src/test/resources/metadata/testsuite.xml
@@ -108,43 +108,49 @@
   </test-group>
   <test-group name="exception">
     <test-case FilePath="exception">
-      <compilation-unit name="exception_drop_dataset"> 
-        <expected-error>MetadataException</expected-error>
+      <compilation-unit name="issue_239_drop_system_dataset_1">
         <output-file compare="Text">none.adm</output-file>
+        <expected-error>MetadataException</expected-error>
       </compilation-unit>
     </test-case>
     <test-case FilePath="exception">
-      <compilation-unit name="exception_drop_dataverse"> 
-        <expected-error>MetadataException</expected-error>
+      <compilation-unit name="issue_239_drop_system_dataset_2">
         <output-file compare="Text">none.adm</output-file>
+        <expected-error>MetadataException</expected-error>
       </compilation-unit>
     </test-case>
     <test-case FilePath="exception">
-      <compilation-unit name="exception_drop_index"> 
-        <expected-error>MetadataException</expected-error>
+      <compilation-unit name="issue_239_drop_system_dataset_3">
         <output-file compare="Text">none.adm</output-file>
+        <expected-error>MetadataException</expected-error>
       </compilation-unit>
     </test-case>
     <test-case FilePath="exception">
-      <compilation-unit name="exception_drop_nodegroup"> 
-        <expected-error>MetadataException</expected-error>
+      <compilation-unit name="issue_239_drop_system_dataset_4">
         <output-file compare="Text">none.adm</output-file>
+        <expected-error>MetadataException</expected-error>
       </compilation-unit>
     </test-case>
     <test-case FilePath="exception">
-      <compilation-unit name="exception_drop_type1"> 
-        <expected-error>MetadataException</expected-error>
+      <compilation-unit name="issue_239_drop_system_dataset_5">
         <output-file compare="Text">none.adm</output-file>
+        <expected-error>MetadataException</expected-error>
       </compilation-unit>
     </test-case>
     <test-case FilePath="exception">
-      <compilation-unit name="exception_drop_type2"> 
-        <expected-error>MetadataException</expected-error>
+      <compilation-unit name="issue_239_drop_system_dataset_6">
         <output-file compare="Text">none.adm</output-file>
+        <expected-error>MetadataException</expected-error>
       </compilation-unit>
     </test-case>
     <test-case FilePath="exception">
-      <compilation-unit name="exception_drop_type3"> 
+      <compilation-unit name="issue_239_drop_system_dataset_7">
+        <output-file compare="Text">none.adm</output-file>
+        <expected-error>MetadataException</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="exception">
+      <compilation-unit name="issue_239_drop_system_dataset_8">
         <output-file compare="Text">none.adm</output-file>
         <expected-error>MetadataException</expected-error>
       </compilation-unit>
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_01.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_01.aql
new file mode 100644
index 0000000..d4dcd38
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_01.aql
@@ -0,0 +1,27 @@
+/*
+ * Description  : Create a feed dataset and verify contents in Metadata
+ * Expected Res : Success
+ * Date         : 24th Dec 2012
+ */
+drop dataverse feeds if exists;
+create dataverse feeds;
+use dataverse feeds;
+
+create type TweetType as closed {
+  id: string,
+  username : string,
+  location : string,
+  text : string,
+  timestamp : string
+}      
+
+create feed dataset TweetFeed(TweetType)
+using "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory"
+(("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("output-type-name"="TweetType"),("tuple-interval"="10"))
+partitioned by key id;
+
+write output to nc1:"rttest/feeds_feeds_01.adm";
+
+for $x in dataset('Metadata.Dataset')
+where $x.DataverseName='feeds' and $x.DatasetName='TweetFeed'
+return $x
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02.aql
new file mode 100644
index 0000000..3129d63
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02.aql
@@ -0,0 +1,31 @@
+/*
+ * Description  : Create a feed dataset that uses the feed simulator adapter. 
+                  Begin ingestion and verify contents of the dataset post completion.  
+ * Expected Res : Success
+ * Date         : 24th Dec 2012
+ */
+drop dataverse feeds if exists;
+create dataverse feeds;
+use dataverse feeds;
+
+create type TweetType as closed {
+  id: string,
+  username : string,
+  location : string,
+  text : string,
+  timestamp : string
+}      
+
+create feed dataset TweetFeed(TweetType)
+using "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory"
+(("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("output-type-name"="TweetType"),("tuple-interval"="10"))
+partitioned by key id;
+
+begin feed TweetFeed; 
+
+write output to nc1:"rttest/feeds_feeds_02.adm";
+
+for $x in dataset('TweetFeed')
+return $x
+
+drop dataverse feeds;
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_03.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_03.aql
new file mode 100644
index 0000000..a4b22d0
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_03.aql
@@ -0,0 +1,34 @@
+/*
+ * Description  : Create a feed dataset with an associated function and verify contents in Metadata
+ * Expected Res : Success
+ * Date         : 24th Dec 2012
+ */
+drop dataverse feeds if exists;
+create dataverse feeds;
+use dataverse feeds;
+
+create type TweetType as closed {
+  id: string,
+  username : string,
+  location : string,
+  text : string,
+  timestamp : string
+}      
+
+create function feed_processor($x) {
+$x
+}
+
+create feed dataset TweetFeed(TweetType)
+using "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory"
+(("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("output-type-name"="TweetType"),("tuple-interval"="10"))
+apply function feed_processor@1
+partitioned by key id;
+
+write output to nc1:"rttest/feeds_feeds_03.adm";
+
+for $x in dataset('Metadata.Dataset')
+where $x.DataverseName='feeds' and $x.DatasetName='TweetFeed'
+return $x
+
+drop dataverse feeds;
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04.aql
new file mode 100644
index 0000000..c38cfd2
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04.aql
@@ -0,0 +1,32 @@
+/*
+ * Description  : Create a feed dataset that uses the feed simulator adapter. 
+                  The feed simulator simulates feed from a file in the HDFS. 
+                  Begin ingestion and verify contents of the dataset post completion.  
+ * Expected Res : Success
+ * Date         : 24th Dec 2012
+ */
+drop dataverse feeds if exists;
+create dataverse feeds;
+use dataverse feeds;
+
+create type TweetType as closed {
+  id: string,
+  username : string,
+  location : string,
+  text : string,
+  timestamp : string
+}      
+
+create feed dataset TweetFeed(TweetType)
+using "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory"
+(("fs"="hdfs"),("hdfs"="hdfs://127.0.0.1:31888"),("path"="/asterix/obamatweets.adm"),("format"="adm"),("input-format"="text-input-format"),("output-type-name"="TweetType"),("tuple-interval"="10"))
+partitioned by key id;
+
+begin feed TweetFeed; 
+
+write output to nc1:"rttest/feeds_feeds_04.adm";
+
+for $x in dataset('TweetFeed')
+return $x
+
+drop dataverse feeds;
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds.aql
new file mode 100644
index 0000000..a7dc4fa
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds.aql
@@ -0,0 +1,31 @@
+/*
+ * Description  : Create a feed dataset that uses the feed simulator adapter. 
+                  Begin ingestion using a fully qualified name and verify contents of the dataset post completion.  
+ * Expected Res : Success
+ * Date         : 24th Dec 2012
+ */
+drop dataverse feeds if exists;
+create dataverse feeds;
+use dataverse feeds;
+
+create type TweetType as closed {
+  id: string,
+  username : string,
+  location : string,
+  text : string,
+  timestamp : string
+}      
+
+create feed dataset TweetFeed(TweetType)
+using "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory"
+(("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("output-type-name"="TweetType"),("tuple-interval"="10"))
+partitioned by key id;
+
+begin feed feeds.TweetFeed; 
+
+write output to nc1:"rttest/feeds_issue_230_feeds.adm";
+
+for $x in dataset('TweetFeed')
+return $x
+
+drop dataverse feeds;
diff --git a/asterix-app/src/test/resources/runtimets/queries/hdfs/hdfs_02.aql b/asterix-app/src/test/resources/runtimets/queries/hdfs/hdfs_02.aql
new file mode 100644
index 0000000..7a0494f
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/hdfs/hdfs_02.aql
@@ -0,0 +1,26 @@
+/*
+* Description  : Create an external dataset that contains a tuples, the lines from a (*sequence*) file in HDFS.
+                 Perform a word-count over the data in the dataset.
+* Expected Res : Success
+* Date         : 7th Jan 2013
+*/
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type LineType as closed {
+ content: string
+};
+
+create external dataset TextDataset(LineType)
+using "hdfs"
+(("hdfs"="hdfs://127.0.0.1:31888"),("path"="/asterix/textFileS"),("input-format"="sequence-input-format"),("format"="delimited-text"),("delimiter"="."));
+
+write output to nc1:"rttest/hdfs_hdfs_02.adm";
+
+for $line in dataset('TextDataset')
+let $tokens := word-tokens($line.content)
+for $token in $tokens
+group by $tok := $token with $token
+order by $tok
+return { "word": $tok, "count": count($token) }
diff --git a/asterix-app/src/test/resources/runtimets/queries/hdfs/hdfs_03.aql b/asterix-app/src/test/resources/runtimets/queries/hdfs/hdfs_03.aql
new file mode 100644
index 0000000..fc5b3ab
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/hdfs/hdfs_03.aql
@@ -0,0 +1,28 @@
+/*
+* Description  : Create an external dataset that contains a tuples, the lines from a large (35kb) text file in HDFS.
+                 The input file is sufficiently large to guarantee that # of bytes > than internal buffer of size 8192.
+                 This causes a record to span across the buffer size boundaries. 
+                 Perform a word-count over the data in the dataset.
+* Expected Res : Success
+* Date         : 7th Jan 2013
+*/
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type LineType as closed {
+ content: string
+};
+
+create external dataset TextDataset(LineType)
+using "hdfs"
+(("hdfs"="hdfs://127.0.0.1:31888"),("path"="/asterix/large_text"),("input-format"="text-input-format"),("format"="delimited-text"),("delimiter"="."));
+
+write output to nc1:"rttest/hdfs_hdfs_03.adm";
+
+for $line in dataset('TextDataset')
+let $tokens := word-tokens($line.content)
+for $token in $tokens
+group by $tok := $token with $token
+order by $tok
+return { "word": $tok, "count": count($token) }
diff --git a/asterix-app/src/test/resources/runtimets/queries/hdfs/issue_245_hdfs.aql b/asterix-app/src/test/resources/runtimets/queries/hdfs/issue_245_hdfs.aql
new file mode 100644
index 0000000..c2a0963
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/hdfs/issue_245_hdfs.aql
@@ -0,0 +1,23 @@
+/*
+* Description  : Create an external dataset that contains a tuples, the lines from a file in HDFS.
+                 Iterate over the contained tuples.
+* Expected Res : Success
+* Issue        : 245
+* Date         : 7th Jan 2013
+*/
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type LineType as closed {
+ line: string
+};
+
+create external dataset TextDataset(LineType)
+using "hdfs"
+(("hdfs"="hdfs://127.0.0.1:31888"),("path"="/asterix/asterix_info.txt"),("input-format"="text-input-format"),("format"="delimited-text"),("delimiter"="."));
+
+write output to nc1:"rttest/hdfs_issue_245_hdfs.adm";
+
+for $x in dataset('TextDataset')
+return $x
diff --git a/asterix-app/src/test/resources/runtimets/queries/load/issue14_query.aql b/asterix-app/src/test/resources/runtimets/queries/load/issue14_query.aql
new file mode 100644
index 0000000..9b25210
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/load/issue14_query.aql
@@ -0,0 +1,26 @@
+/*
+ * Description  : Create and load a dataset but with an unspecified data format.
+ * Expected Res : Failure
+ * Date         : 16 Jan 2012
+ */
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type Schema as closed{
+id: int32,
+age: int32,
+name: string
+}
+
+create dataset onektup(Schema) 
+partitioned by key id;
+
+load dataset onektup 
+using "localfs"(("path"="nc1:///tmp/one.adm"));
+
+write output to nc1:"/tmp/foo.adm";
+
+for $l in dataset('onektup')
+return $l
+
diff --git a/asterix-app/src/test/resources/runtimets/queries/scan/issue238_query_1.aql b/asterix-app/src/test/resources/runtimets/queries/scan/issue238_query_1.aql
new file mode 100644
index 0000000..ff04a36
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/scan/issue238_query_1.aql
@@ -0,0 +1,35 @@
+/*
+* Description  : Create an  dataset and load it from two file splits 
+                 Include whitespace between the elements in the comma-separated list of file paths.
+* Expected Res : Success
+* Issue        : 238
+* Date         : 7th Jan 2013
+*/
+
+/* scan and print an ADM file as a dataset of closed records */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type DBLPType as closed {
+  id: int32, 
+  dblpid: string,
+  title: string,
+  authors: string,
+  misc: string
+}
+
+create dataset DBLPadm(DBLPType) 
+  partitioned by key id;
+
+// drop dataset DBLPadm;
+load dataset DBLPadm 
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter" 
+(("path"="nc1://data/dblp-small/part-00000.adm, nc1://data/dblp-small/part-00001.adm"),("format"="adm"));
+
+write output to nc1:"rttest/scan_issue238_query_1.adm";
+
+for $paper in dataset('DBLPadm')
+order by $paper.id
+return $paper
diff --git a/asterix-app/src/test/resources/runtimets/queries/scan/issue238_query_2.aql b/asterix-app/src/test/resources/runtimets/queries/scan/issue238_query_2.aql
new file mode 100644
index 0000000..297e2f2
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/scan/issue238_query_2.aql
@@ -0,0 +1,36 @@
+/*
+* Description  : Create an  dataset and load it from two file splits 
+                 Include newline between the elements in the comma-separated list of file paths.
+* Expected Res : Success
+* Issue        : 238
+* Date         : 7th Jan 2013
+*/
+
+/* scan and print an ADM file as a dataset of closed records */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type DBLPType as closed {
+  id: int32, 
+  dblpid: string,
+  title: string,
+  authors: string,
+  misc: string
+}
+
+create dataset DBLPadm(DBLPType) 
+  partitioned by key id;
+
+// drop dataset DBLPadm;
+load dataset DBLPadm 
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter" 
+(("path"="nc1://data/dblp-small/part-00000.adm, 
+ nc1://data/dblp-small/part-00001.adm"),("format"="adm"));
+
+write output to nc1:"rttest/scan_issue238_query_2.adm";
+
+for $paper in dataset('DBLPadm')
+order by $paper.id
+return $paper
diff --git a/asterix-app/src/test/resources/runtimets/results/feeds/feeds_01.adm b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_01.adm
new file mode 100644
index 0000000..17d8d1d
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_01.adm
@@ -0,0 +1 @@
+{ "DataverseName": "feeds", "DatasetName": "TweetFeed", "DataTypeName": "TweetType", "DatasetType": "FEED", "InternalDetails": null, "ExternalDetails": null, "FeedDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "DatasourceAdapter": "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory", "Properties": [ { "Name": "output-type-name", "Value": "TweetType" }, { "Name": "fs", "Value": "localfs" }, { "Name": "path", "Value": "nc1://data/twitter/obamatweets.adm" }, { "Name": "format", "Value": "adm" }, { "Name": "tuple-interval", "Value": "10" } ], "Function": null, "Status": "INACTIVE" }, "Timestamp": "Mon Dec 24 13:51:31 PST 2012" }
diff --git a/asterix-app/src/test/resources/runtimets/results/feeds/feeds_02.adm b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_02.adm
new file mode 100644
index 0000000..9720960
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_02.adm
@@ -0,0 +1,12 @@
+{ "id": "nc1:1", "username": "BronsonMike", "location": "", "text": "@GottaLaff @reutersus Christie and obama just foul weather friends", "timestamp": "Thu Dec 06 16:53:06 PST 2012" }
+{ "id": "nc1:100", "username": "KidrauhlProuds", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson  uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:102", "username": "jaysauce82", "location": "", "text": "Not voting for President Obama #BadDecision", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:104", "username": "princeofsupras", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson e uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:15 PST 2012" }
+{ "id": "nc1:106", "username": "GulfDogs", "location": "", "text": "Obama Admin Knew Libyan Terrorists Had US-Provided Weaponsteaparty #tcot #ccot #NewGuards #BreitbartArmy #patriotwttp://t.co/vJxzrQUE", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:108", "username": "Laugzpz", "location": "", "text": "@AlfredoJalife Maestro Obama se hace de la vista gorda, es un acuerdo de siempre creo yo.", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:11", "username": "magarika", "location": "", "text": "RT @ken24xavier: Obama tells SOROS - our plan is ALMOST finished http://t.co/WvzK0GtU", "timestamp": "Thu Dec 06 16:53:05 PST 2012" }
+{ "id": "nc1:111", "username": "ToucanMall", "location": "", "text": "RT @WorldWar3Watch: Michelle Obama Gets More Grammy Nominations Than Justin ...  #Obama #WW3 http://t.co/0Wv2GKij", "timestamp": "Thu Dec 06 16:53:13 PST 2012" }
+{ "id": "nc1:113", "username": "ToucanMall", "location": "", "text": "RT @ObamaPalooza: Tiffany Shared What $2,000 Meant to Her ... and the President Stopped by to Talk About It http://t.co/sgT7lsNV #Obama", "timestamp": "Thu Dec 06 16:53:12 PST 2012" }
+{ "id": "nc1:115", "username": "thewildpitch", "location": "", "text": "RT @RevkahJC: Dennis Miller: Obama Should Just Say He Wants To Tax Successful People http://t.co/Ihlemy9Y", "timestamp": "Thu Dec 06 16:53:11 PST 2012" }
+{ "id": "nc1:117", "username": "Rnugent24", "location": "", "text": "RT @ConservativeQuo: unemployment is above 8% again. I wonder how long it will take for Obama to start blaming Bush? 3-2-1 #tcot #antiobama", "timestamp": "Thu Dec 06 16:53:10 PST 2012" }
+{ "id": "nc1:119", "username": "ToucanMall", "location": "", "text": "RT @Newitrsdotcom: I hope #Obama will win re-election... Other four years without meaningless #wars", "timestamp": "Thu Dec 06 16:53:09 PST 2012" }
diff --git a/asterix-app/src/test/resources/runtimets/results/feeds/feeds_03.adm b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_03.adm
new file mode 100644
index 0000000..2fd80d983
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_03.adm
@@ -0,0 +1 @@
+{ "DataverseName": "feeds", "DatasetName": "TweetFeed", "DataTypeName": "TweetType", "DatasetType": "FEED", "InternalDetails": null, "ExternalDetails": null, "FeedDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "DatasourceAdapter": "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory", "Properties": [ { "Name": "output-type-name", "Value": "TweetType" }, { "Name": "fs", "Value": "localfs" }, { "Name": "path", "Value": "nc1://data/twitter/obamatweets.adm" }, { "Name": "format", "Value": "adm" }, { "Name": "tuple-interval", "Value": "10" } ], "Function": "feeds.feed_processor@1", "Status": "INACTIVE" }, "Timestamp": "Mon Dec 24 13:49:20 PST 2012" }
diff --git a/asterix-app/src/test/resources/runtimets/results/feeds/feeds_04.adm b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_04.adm
new file mode 100644
index 0000000..2567483
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_04.adm
@@ -0,0 +1,11 @@
+{ "id": "nc1:1", "username": "BronsonMike", "location": "", "text": "@GottaLaff @reutersus Christie and obama just foul weather friends", "timestamp": "Thu Dec 06 16:53:06 PST 2012" }
+{ "id": "nc1:100", "username": "KidrauhlProuds", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson  uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:102", "username": "jaysauce82", "location": "", "text": "Not voting for President Obama #BadDecision", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:104", "username": "princeofsupras", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson e uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:15 PST 2012" }
+{ "id": "nc1:106", "username": "GulfDogs", "location": "", "text": "Obama Admin Knew Libyan Terrorists Had US-Provided Weaponsteaparty #tcot #ccot #NewGuards #BreitbartArmy #patriotwttp://t.co/vJxzrQUE", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:108", "username": "Laugzpz", "location": "", "text": "@AlfredoJalife Maestro Obama se hace de la vista gorda, es un acuerdo de siempre creo yo.", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:11", "username": "magarika", "location": "", "text": "RT @ken24xavier: Obama tells SOROS - our plan is ALMOST finished http://t.co/WvzK0GtU", "timestamp": "Thu Dec 06 16:53:05 PST 2012" }
+{ "id": "nc1:111", "username": "ToucanMall", "location": "", "text": "RT @WorldWar3Watch: Michelle Obama Gets More Grammy Nominations Than Justin ...  #Obama #WW3 http://t.co/0Wv2GKij", "timestamp": "Thu Dec 06 16:53:13 PST 2012" }
+{ "id": "nc1:113", "username": "ToucanMall", "location": "", "text": "RT @ObamaPalooza: Tiffany Shared What $2,000 Meant to Her ... and the President Stopped by to Talk About It http://t.co/sgT7lsNV #Obama", "timestamp": "Thu Dec 06 16:53:12 PST 2012" }
+{ "id": "nc1:115", "username": "thewildpitch", "location": "", "text": "RT @RevkahJC: Dennis Miller: Obama Should Just Say He Wants To Tax Successful People http://t.co/Ihlemy9Y", "timestamp": "Thu Dec 06 16:53:11 PST 2012" }
+{ "id": "nc1:117", "username": "Rnugent24", "location": "", "text": "RT @ConservativeQuo: unemployment is above 8% again. I wonder how long it will take for Obama to start blaming Bush? 3-2-1 #tcot #antiobama", "timestamp": "Thu Dec 06 16:53:10 PST 2012" }
diff --git a/asterix-app/src/test/resources/runtimets/results/feeds/issue_230_feeds.adm b/asterix-app/src/test/resources/runtimets/results/feeds/issue_230_feeds.adm
new file mode 100644
index 0000000..9720960
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/feeds/issue_230_feeds.adm
@@ -0,0 +1,12 @@
+{ "id": "nc1:1", "username": "BronsonMike", "location": "", "text": "@GottaLaff @reutersus Christie and obama just foul weather friends", "timestamp": "Thu Dec 06 16:53:06 PST 2012" }
+{ "id": "nc1:100", "username": "KidrauhlProuds", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson  uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:102", "username": "jaysauce82", "location": "", "text": "Not voting for President Obama #BadDecision", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:104", "username": "princeofsupras", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson e uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:15 PST 2012" }
+{ "id": "nc1:106", "username": "GulfDogs", "location": "", "text": "Obama Admin Knew Libyan Terrorists Had US-Provided Weaponsteaparty #tcot #ccot #NewGuards #BreitbartArmy #patriotwttp://t.co/vJxzrQUE", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:108", "username": "Laugzpz", "location": "", "text": "@AlfredoJalife Maestro Obama se hace de la vista gorda, es un acuerdo de siempre creo yo.", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:11", "username": "magarika", "location": "", "text": "RT @ken24xavier: Obama tells SOROS - our plan is ALMOST finished http://t.co/WvzK0GtU", "timestamp": "Thu Dec 06 16:53:05 PST 2012" }
+{ "id": "nc1:111", "username": "ToucanMall", "location": "", "text": "RT @WorldWar3Watch: Michelle Obama Gets More Grammy Nominations Than Justin ...  #Obama #WW3 http://t.co/0Wv2GKij", "timestamp": "Thu Dec 06 16:53:13 PST 2012" }
+{ "id": "nc1:113", "username": "ToucanMall", "location": "", "text": "RT @ObamaPalooza: Tiffany Shared What $2,000 Meant to Her ... and the President Stopped by to Talk About It http://t.co/sgT7lsNV #Obama", "timestamp": "Thu Dec 06 16:53:12 PST 2012" }
+{ "id": "nc1:115", "username": "thewildpitch", "location": "", "text": "RT @RevkahJC: Dennis Miller: Obama Should Just Say He Wants To Tax Successful People http://t.co/Ihlemy9Y", "timestamp": "Thu Dec 06 16:53:11 PST 2012" }
+{ "id": "nc1:117", "username": "Rnugent24", "location": "", "text": "RT @ConservativeQuo: unemployment is above 8% again. I wonder how long it will take for Obama to start blaming Bush? 3-2-1 #tcot #antiobama", "timestamp": "Thu Dec 06 16:53:10 PST 2012" }
+{ "id": "nc1:119", "username": "ToucanMall", "location": "", "text": "RT @Newitrsdotcom: I hope #Obama will win re-election... Other four years without meaningless #wars", "timestamp": "Thu Dec 06 16:53:09 PST 2012" }
diff --git a/asterix-app/src/test/resources/runtimets/results/hdfs/hdfs_02.adm b/asterix-app/src/test/resources/runtimets/results/hdfs/hdfs_02.adm
new file mode 100644
index 0000000..d7ae022
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/hdfs/hdfs_02.adm
@@ -0,0 +1,5 @@
+{ "word": "am", "count": 1 }
+{ "word": "grover", "count": 1 }
+{ "word": "hi", "count": 1 }
+{ "word": "i", "count": 1 }
+{ "word": "raman", "count": 1 }
diff --git a/asterix-app/src/test/resources/runtimets/results/hdfs/hdfs_03.adm b/asterix-app/src/test/resources/runtimets/results/hdfs/hdfs_03.adm
new file mode 100644
index 0000000..1033913
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/hdfs/hdfs_03.adm
@@ -0,0 +1,93 @@
+{ "word": "a", "count": 68 }
+{ "word": "addressing", "count": 34 }
+{ "word": "an", "count": 34 }
+{ "word": "analyzing", "count": 68 }
+{ "word": "and", "count": 238 }
+{ "word": "areas", "count": 34 }
+{ "word": "asterix", "count": 102 }
+{ "word": "by", "count": 34 }
+{ "word": "cases", "count": 68 }
+{ "word": "clusters", "count": 68 }
+{ "word": "combining", "count": 34 }
+{ "word": "commodity", "count": 34 }
+{ "word": "computing", "count": 102 }
+{ "word": "content", "count": 34 }
+{ "word": "create", "count": 34 }
+{ "word": "data", "count": 238 }
+{ "word": "database", "count": 34 }
+{ "word": "databases", "count": 34 }
+{ "word": "datum", "count": 34 }
+{ "word": "declarative", "count": 34 }
+{ "word": "developing", "count": 34 }
+{ "word": "distinct", "count": 34 }
+{ "word": "each", "count": 34 }
+{ "word": "for", "count": 34 }
+{ "word": "formats", "count": 34 }
+{ "word": "from", "count": 68 }
+{ "word": "generation", "count": 34 }
+{ "word": "highly", "count": 68 }
+{ "word": "ideas", "count": 34 }
+{ "word": "including", "count": 34 }
+{ "word": "indexing", "count": 68 }
+{ "word": "information", "count": 136 }
+{ "word": "ingesting", "count": 34 }
+{ "word": "intensive", "count": 68 }
+{ "word": "irregular", "count": 34 }
+{ "word": "is", "count": 204 }
+{ "word": "issues", "count": 34 }
+{ "word": "large", "count": 68 }
+{ "word": "managing", "count": 34 }
+{ "word": "merging", "count": 34 }
+{ "word": "much", "count": 34 }
+{ "word": "new", "count": 34 }
+{ "word": "next", "count": 34 }
+{ "word": "nothing", "count": 34 }
+{ "word": "of", "count": 136 }
+{ "word": "on", "count": 102 }
+{ "word": "open", "count": 68 }
+{ "word": "parallel", "count": 68 }
+{ "word": "performant", "count": 34 }
+{ "word": "platform", "count": 34 }
+{ "word": "problem", "count": 34 }
+{ "word": "processing", "count": 34 }
+{ "word": "project", "count": 68 }
+{ "word": "quantities", "count": 34 }
+{ "word": "query", "count": 34 }
+{ "word": "querying", "count": 34 }
+{ "word": "range", "count": 34 }
+{ "word": "ranging", "count": 34 }
+{ "word": "regular", "count": 34 }
+{ "word": "research", "count": 34 }
+{ "word": "running", "count": 34 }
+{ "word": "scalable", "count": 34 }
+{ "word": "scales", "count": 34 }
+{ "word": "semi", "count": 170 }
+{ "word": "shared", "count": 34 }
+{ "word": "software", "count": 34 }
+{ "word": "solutions", "count": 34 }
+{ "word": "source", "count": 34 }
+{ "word": "stance", "count": 34 }
+{ "word": "storage", "count": 34 }
+{ "word": "storing", "count": 34 }
+{ "word": "structured", "count": 170 }
+{ "word": "subscribing", "count": 34 }
+{ "word": "support", "count": 34 }
+{ "word": "tagged", "count": 34 }
+{ "word": "taking", "count": 34 }
+{ "word": "targets", "count": 34 }
+{ "word": "techniques", "count": 68 }
+{ "word": "technologies", "count": 34 }
+{ "word": "textual", "count": 34 }
+{ "word": "that", "count": 34 }
+{ "word": "the", "count": 102 }
+{ "word": "three", "count": 34 }
+{ "word": "to", "count": 170 }
+{ "word": "todays", "count": 34 }
+{ "word": "use", "count": 68 }
+{ "word": "vast", "count": 34 }
+{ "word": "very", "count": 34 }
+{ "word": "well", "count": 34 }
+{ "word": "where", "count": 68 }
+{ "word": "wide", "count": 34 }
+{ "word": "with", "count": 34 }
+{ "word": "yet", "count": 34 }
diff --git a/asterix-app/src/test/resources/runtimets/results/hdfs/issue_245_hdfs.adm b/asterix-app/src/test/resources/runtimets/results/hdfs/issue_245_hdfs.adm
new file mode 100644
index 0000000..8af2f5f
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/hdfs/issue_245_hdfs.adm
@@ -0,0 +1,4 @@
+{ "line": "The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information" }
+{ "line": "The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing  to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters" }
+{ "line": "ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual" }
+{ "line": "ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information" }
diff --git a/asterix-app/src/test/resources/runtimets/results/scan/issue238_query_1.adm b/asterix-app/src/test/resources/runtimets/results/scan/issue238_query_1.adm
new file mode 100644
index 0000000..a7ec8f6
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/scan/issue238_query_1.adm
@@ -0,0 +1,100 @@
+{ "id": 1, "dblpid": "books/acm/kim95/AnnevelinkACFHK95", "title": "Object SQL - A Language for the Design and Implementation of Object Databases.", "authors": "Jurgen Annevelink Rafiul Ahad Amelia Carlson Daniel H. Fishman Michael L. Heytens William Kent", "misc": "2002-01-03 42-68 1995 Modern Database Systems db/books/collections/kim95.html#AnnevelinkACFHK95" }
+{ "id": 2, "dblpid": "books/acm/kim95/Blakeley95", "title": "OQL[C++]  Extending C++ with an Object Query Capability.", "authors": "José A. Blakeley", "misc": "2002-01-03 69-88 Modern Database Systems db/books/collections/kim95.html#Blakeley95 1995" }
+{ "id": 3, "dblpid": "books/acm/kim95/BreitbartGS95", "title": "Transaction Management in Multidatabase Systems.", "authors": "Yuri Breitbart Hector Garcia-Molina Abraham Silberschatz", "misc": "2004-03-08 573-591 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#BreitbartGS95 1995" }
+{ "id": 4, "dblpid": "books/acm/kim95/ChristodoulakisK95", "title": "Multimedia Information Systems  Issues and Approaches.", "authors": "Stavros Christodoulakis Leonidas Koveos", "misc": "2002-01-03 318-337 1995 Modern Database Systems db/books/collections/kim95.html#ChristodoulakisK95" }
+{ "id": 5, "dblpid": "books/acm/kim95/DayalHW95", "title": "Active Database Systems.", "authors": "Umeshwar Dayal Eric N. Hanson Jennifer Widom", "misc": "2002-01-03 434-456 1995 Modern Database Systems db/books/collections/kim95.html#DayalHW95" }
+{ "id": 6, "dblpid": "books/acm/kim95/DittrichD95", "title": "Where Object-Oriented DBMSs Should Do Better  A Critique Based on Early Experiences.", "authors": "Angelika Kotz Dittrich Klaus R. Dittrich", "misc": "2002-01-03 238-254 1995 Modern Database Systems db/books/collections/kim95.html#DittrichD95" }
+{ "id": 7, "dblpid": "books/acm/kim95/Garcia-MolinaH95", "title": "Distributed Databases.", "authors": "Hector Garcia-Molina Meichun Hsu", "misc": "2002-01-03 477-493 1995 Modern Database Systems db/books/collections/kim95.html#Garcia-MolinaH95" }
+{ "id": 8, "dblpid": "books/acm/kim95/Goodman95", "title": "An Object-Oriented DBMS War Story  Developing a Genome Mapping Database in C++.", "authors": "Nathan Goodman", "misc": "2002-01-03 216-237 1995 Modern Database Systems db/books/collections/kim95.html#Goodman95" }
+{ "id": 9, "dblpid": "books/acm/kim95/Kaiser95", "title": "Cooperative Transactions for Multiuser Environments.", "authors": "Gail E. Kaiser", "misc": "2002-01-03 409-433 1995 Modern Database Systems db/books/collections/kim95.html#Kaiser95" }
+{ "id": 10, "dblpid": "books/acm/kim95/KelleyGKRG95", "title": "Schema Architecture of the UniSQL/M Multidatabase System", "authors": "William Kelley Sunit K. Gala Won Kim Tom C. Reyes Bruce Graham", "misc": "2004-03-08 Modern Database Systems books/acm/Kim95 621-648 1995 db/books/collections/kim95.html#KelleyGKRG95" }
+{ "id": 11, "dblpid": "books/acm/kim95/KemperM95", "title": "Physical Object Management.", "authors": "Alfons Kemper Guido Moerkotte", "misc": "2002-01-03 175-202 1995 Modern Database Systems db/books/collections/kim95.html#KemperM95" }
+{ "id": 12, "dblpid": "books/acm/kim95/Kim95", "title": "Introduction to Part 1  Next-Generation Database Technology.", "authors": "Won Kim", "misc": "2002-01-03 5-17 1995 Modern Database Systems db/books/collections/kim95.html#Kim95" }
+{ "id": 13, "dblpid": "books/acm/kim95/Kim95a", "title": "Object-Oriented Database Systems  Promises, Reality, and Future.", "authors": "Won Kim", "misc": "2002-01-03 255-280 1995 Modern Database Systems db/books/collections/kim95.html#Kim95a" }
+{ "id": 14, "dblpid": "books/acm/kim95/Kim95b", "title": "Introduction to Part 2  Technology for Interoperating Legacy Databases.", "authors": "Won Kim", "misc": "2002-01-03 515-520 1995 Modern Database Systems db/books/collections/kim95.html#Kim95b" }
+{ "id": 15, "dblpid": "books/acm/kim95/KimCGS95", "title": "On Resolving Schematic Heterogeneity in Multidatabase Systems.", "authors": "Won Kim Injun Choi Sunit K. Gala Mark Scheevel", "misc": "2002-01-03 521-550 1995 Modern Database Systems db/books/collections/kim95.html#KimCGS95" }
+{ "id": 16, "dblpid": "books/acm/kim95/KimG95", "title": "Requirements for a Performance Benchmark for Object-Oriented Database Systems.", "authors": "Won Kim Jorge F. Garza", "misc": "2002-01-03 203-215 1995 Modern Database Systems db/books/collections/kim95.html#KimG95" }
+{ "id": 17, "dblpid": "books/acm/kim95/KimK95", "title": "On View Support in Object-Oriented Databases Systems.", "authors": "Won Kim William Kelley", "misc": "2002-01-03 108-129 1995 Modern Database Systems db/books/collections/kim95.html#KimK95" }
+{ "id": 18, "dblpid": "books/acm/kim95/Kowalski95", "title": "The POSC Solution to Managing E&P Data.", "authors": "Vincent J. Kowalski", "misc": "2002-01-03 281-301 1995 Modern Database Systems db/books/collections/kim95.html#Kowalski95" }
+{ "id": 19, "dblpid": "books/acm/kim95/KriegerA95", "title": "C++ Bindings to an Object Database.", "authors": "David Krieger Tim Andrews", "misc": "2002-01-03 89-107 1995 Modern Database Systems db/books/collections/kim95.html#KriegerA95" }
+{ "id": 20, "dblpid": "books/acm/kim95/Lunt95", "title": "Authorization in Object-Oriented Databases.", "authors": "Teresa F. Lunt", "misc": "2002-01-03 130-145 1995 Modern Database Systems db/books/collections/kim95.html#Lunt95" }
+{ "id": 21, "dblpid": "books/acm/kim95/MengY95", "title": "Query Processing in Multidatabase Systems.", "authors": "Weiyi Meng Clement T. Yu", "misc": "2002-01-03 551-572 1995 Modern Database Systems db/books/collections/kim95.html#MengY95" }
+{ "id": 22, "dblpid": "books/acm/kim95/Motro95", "title": "Management of Uncerainty in database Systems.", "authors": "Amihai Motro", "misc": "2002-01-03 457-476 1995 Modern Database Systems db/books/collections/kim95.html#Motro95" }
+{ "id": 23, "dblpid": "books/acm/kim95/Omiecinski95", "title": "Parallel Relational Database Systems.", "authors": "Edward Omiecinski", "misc": "2002-01-03 494-512 1995 Modern Database Systems db/books/collections/kim95.html#Omiecinski95" }
+{ "id": 24, "dblpid": "books/acm/kim95/OzsuB95", "title": "Query Processing in Object-Oriented Database Systems.", "authors": "M. Tamer Özsu José A. Blakeley", "misc": "2002-01-03 146-174 1995 Modern Database Systems db/books/collections/kim95.html#OzsuB95" }
+{ "id": 25, "dblpid": "books/acm/kim95/RusinkiewiczS95", "title": "Specification and Execution of Transactional Workflows.", "authors": "Marek Rusinkiewicz Amit P. Sheth", "misc": "2004-03-08 592-620 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#RusinkiewiczS95 1995" }
+{ "id": 26, "dblpid": "books/acm/kim95/Samet95", "title": "Spatial Data Structures.", "authors": "Hanan Samet", "misc": "2004-03-08 361-385 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#Samet95 1995" }
+{ "id": 27, "dblpid": "books/acm/kim95/SametA95", "title": "Spatial Data Models and Query Processing.", "authors": "Hanan Samet Walid G. Aref", "misc": "2002-01-03 338-360 1995 Modern Database Systems db/books/collections/kim95.html#SametA95" }
+{ "id": 28, "dblpid": "books/acm/kim95/ShanADDK95", "title": "Pegasus  A Heterogeneous Information Management System.", "authors": "Ming-Chien Shan Rafi Ahmed Jim Davis Weimin Du William Kent", "misc": "2004-03-08 664-682 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#ShanADDK95 1995" }
+{ "id": 29, "dblpid": "books/acm/kim95/Snodgrass95", "title": "Temporal Object-Oriented Databases  A Critical Comparison.", "authors": "Richard T. Snodgrass", "misc": "2002-01-03 386-408 1995 Modern Database Systems db/books/collections/kim95.html#Snodgrass95" }
+{ "id": 30, "dblpid": "books/acm/kim95/SoleyK95", "title": "The OMG Object Model.", "authors": "Richard Mark Soley William Kent", "misc": "2002-01-03 18-41 1995 Modern Database Systems db/books/collections/kim95.html#SoleyK95" }
+{ "id": 31, "dblpid": "books/acm/kim95/Stout95", "title": "EDA/SQL.", "authors": "Ralph L. Stout", "misc": "2004-03-08 649-663 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#Stout95 1995" }
+{ "id": 32, "dblpid": "books/acm/kim95/Thompson95", "title": "The Changing Database Standards Landscape.", "authors": "Craig W. Thompson", "misc": "2002-01-03 302-317 1995 Modern Database Systems db/books/collections/kim95.html#Thompson95" }
+{ "id": 33, "dblpid": "books/acm/kim95/BreitbartR95", "title": "Overview of the ADDS System.", "authors": "Yuri Breitbart Tom C. Reyes", "misc": "2009-06-12 683-701 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#BreitbartR95 1995" }
+{ "id": 34, "dblpid": "books/acm/Kim95", "title": "Modern Database Systems  The Object Model, Interoperability, and Beyond.", "authors": "", "misc": "2004-03-08 Won Kim Modern Database Systems ACM Press and Addison-Wesley 1995 0-201-59098-0 db/books/collections/kim95.html" }
+{ "id": 35, "dblpid": "books/ap/MarshallO79", "title": "Inequalities  Theory of Majorization and Its Application.", "authors": "Albert W. Marshall Ingram Olkin", "misc": "2002-01-03 Academic Press 1979 0-12-473750-1" }
+{ "id": 36, "dblpid": "books/aw/kimL89/BjornerstedtH89", "title": "Version Control in an Object-Oriented Architecture.", "authors": "Anders Björnerstedt Christer Hulten", "misc": "2006-02-24 451-485 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#BjornerstedtH89" }
+{ "id": 37, "dblpid": "books/aw/kimL89/BretlMOPSSWW89", "title": "The GemStone Data Management System.", "authors": "Robert Bretl David Maier Allen Otis D. Jason Penney Bruce Schuchardt Jacob Stein E. Harold Williams Monty Williams", "misc": "2002-01-03 283-308 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#BretlMOPSSWW89" }
+{ "id": 38, "dblpid": "books/aw/kimL89/CareyDRS89", "title": "Storage Management in EXODUS.", "authors": "Michael J. Carey David J. DeWitt Joel E. Richardson Eugene J. Shekita", "misc": "2002-01-03 341-369 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#CareyDRS89" }
+{ "id": 39, "dblpid": "books/aw/kimL89/Decouchant89", "title": "A Distributed Object Manager for the Smalltalk-80 System.", "authors": "Dominique Decouchant", "misc": "2002-01-03 487-520 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Decouchant89" }
+{ "id": 40, "dblpid": "books/aw/kimL89/DiederichM89", "title": "Objects, Messages, and Rules in Database Design.", "authors": "Jim Diederich Jack Milton", "misc": "2002-01-03 177-197 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#DiederichM89" }
+{ "id": 41, "dblpid": "books/aw/kimL89/EllisG89", "title": "Active Objects  Ealities and Possibilities.", "authors": "Clarence A. Ellis Simon J. Gibbs", "misc": "2002-01-03 561-572 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#EllisG89" }
+{ "id": 42, "dblpid": "books/aw/kimL89/FishmanABCCDHHKLLMNRSW89", "title": "Overview of the Iris DBMS.", "authors": "Daniel H. Fishman Jurgen Annevelink David Beech E. C. Chow Tim Connors J. W. Davis Waqar Hasan C. G. Hoch William Kent S. Leichner Peter Lyngbæk Brom Mahbod Marie-Anne Neimat Tore Risch Ming-Chien Shan W. Kevin Wilkinson", "misc": "2002-01-03 219-250 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#FishmanABCCDHHKLLMNRSW89" }
+{ "id": 43, "dblpid": "books/aw/kimL89/KimBCGW89", "title": "Features of the ORION Object-Oriented Database System.", "authors": "Won Kim Nat Ballou Hong-Tai Chou Jorge F. Garza Darrell Woelk", "misc": "2002-01-03 251-282 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#KimBCGW89" }
+{ "id": 44, "dblpid": "books/aw/kimL89/KimKD89", "title": "Indexing Techniques for Object-Oriented Databases.", "authors": "Won Kim Kyung-Chang Kim Alfred G. Dale", "misc": "2002-01-03 371-394 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#KimKD89" }
+{ "id": 45, "dblpid": "books/aw/kimL89/King89", "title": "My Cat Is Object-Oriented.", "authors": "Roger King", "misc": "2002-01-03 23-30 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#King89" }
+{ "id": 46, "dblpid": "books/aw/kimL89/Maier89", "title": "Making Database Systems Fast Enough for CAD Applications.", "authors": "David Maier", "misc": "2002-01-03 573-582 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Maier89" }
+{ "id": 47, "dblpid": "books/aw/kimL89/MellenderRS89", "title": "Optimizing Smalltalk Message Performance.", "authors": "Fred Mellender Steve Riegel Andrew Straw", "misc": "2002-01-03 423-450 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#MellenderRS89" }
+{ "id": 48, "dblpid": "books/aw/kimL89/Moon89", "title": "The Common List Object-Oriented Programming Language Standard.", "authors": "David A. Moon", "misc": "2002-01-03 49-78 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Moon89" }
+{ "id": 49, "dblpid": "books/aw/kimL89/Moss89", "title": "Object Orientation as Catalyst for Language-Database Inegration.", "authors": "J. Eliot B. Moss", "misc": "2002-01-03 583-592 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Moss89" }
+{ "id": 50, "dblpid": "books/aw/kimL89/Nierstrasz89", "title": "A Survey of Object-Oriented Concepts.", "authors": "Oscar Nierstrasz", "misc": "2002-01-03 3-21 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Nierstrasz89" }
+{ "id": 51, "dblpid": "books/aw/kimL89/NierstraszT89", "title": "Integrated Office Systems.", "authors": "Oscar Nierstrasz Dennis Tsichritzis", "misc": "2002-01-03 199-215 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#NierstraszT89" }
+{ "id": 52, "dblpid": "books/aw/kimL89/Russinoff89", "title": "Proteus  A Frame-Based Nonmonotonic Inference System.", "authors": "David M. Russinoff", "misc": "2002-01-03 127-150 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#Russinoff89" }
+{ "id": 53, "dblpid": "books/aw/kimL89/SkarraZ89", "title": "Concurrency Control and Object-Oriented Databases.", "authors": "Andrea H. Skarra Stanley B. Zdonik", "misc": "2002-01-03 395-421 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#SkarraZ89" }
+{ "id": 54, "dblpid": "books/aw/kimL89/SteinLU89", "title": "A Shared View of Sharing  The Treaty of Orlando.", "authors": "Lynn Andrea Stein Henry Lieberman David Ungar", "misc": "2002-01-03 31-48 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#SteinLU89" }
+{ "id": 55, "dblpid": "books/aw/kimL89/TarltonT89", "title": "Pogo  A Declarative Representation System for Graphics.", "authors": "Mark A. Tarlton P. Nong Tarlton", "misc": "2002-01-03 151-176 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#TarltonT89" }
+{ "id": 56, "dblpid": "books/aw/kimL89/TomlinsonS89", "title": "Concurrent Object-Oriented Programming Languages.", "authors": "Chris Tomlinson Mark Scheevel", "misc": "2002-01-03 79-124 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#TomlinsonS89" }
+{ "id": 57, "dblpid": "books/aw/kimL89/TsichritzisN89", "title": "Directions in Object-Oriented Research.", "authors": "Dennis Tsichritzis Oscar Nierstrasz", "misc": "2002-01-03 523-536 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#TsichritzisN89" }
+{ "id": 58, "dblpid": "books/aw/kimL89/Wand89", "title": "A Proposal for a Formal Model of Objects.", "authors": "Yair Wand", "misc": "2002-01-03 537-559 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Wand89" }
+{ "id": 59, "dblpid": "books/aw/kimL89/WeiserL89", "title": "OZ+  An Object-Oriented Database System.", "authors": "Stephen P. Weiser Frederick H. Lochovsky", "misc": "2002-01-03 309-337 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#WeiserL89" }
+{ "id": 60, "dblpid": "books/aw/stonebraker86/RoweS86", "title": "The Commercial INGRES Epilogue.", "authors": "Lawrence A. Rowe Michael Stonebraker", "misc": "2002-01-03 63-82 1986 The INGRES Papers db/books/collections/Stonebraker86.html#RoweS86 db/books/collections/Stonebraker86/RoweS86.html ingres/P063.pdf" }
+{ "id": 61, "dblpid": "books/aw/stonebraker86/Stonebraker86", "title": "Design of Relational Systems (Introduction to Section 1).", "authors": "Michael Stonebraker", "misc": "2002-01-03 1-3 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86 db/books/collections/Stonebraker86/Stonebraker86.html ingres/P001.pdf" }
+{ "id": 62, "dblpid": "books/aw/stonebraker86/Stonebraker86a", "title": "Supporting Studies on Relational Systems (Introduction to Section 2).", "authors": "Michael Stonebraker", "misc": "2002-01-03 83-85 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86a db/books/collections/Stonebraker86/Stonebraker86a.html ingres/P083.pdf" }
+{ "id": 63, "dblpid": "books/aw/stonebraker86/Stonebraker86b", "title": "Distributed Database Systems (Introduction to Section 3).", "authors": "Michael Stonebraker", "misc": "2002-01-03 183-186 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86b db/books/collections/Stonebraker86/Stonebraker86b.html ingres/P183.pdf" }
+{ "id": 64, "dblpid": "books/aw/stonebraker86/Stonebraker86c", "title": "The Design and Implementation of Distributed INGRES.", "authors": "Michael Stonebraker", "misc": "2002-01-03 187-196 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86c db/books/collections/Stonebraker86/Stonebraker86c.html ingres/P187.pdf" }
+{ "id": 65, "dblpid": "books/aw/stonebraker86/Stonebraker86d", "title": "User Interfaces for Database Systems (Introduction to Section 4).", "authors": "Michael Stonebraker", "misc": "2002-01-03 243-245 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86d db/books/collections/Stonebraker86/Stonebraker86d.html ingres/P243.pdf" }
+{ "id": 66, "dblpid": "books/aw/stonebraker86/Stonebraker86e", "title": "Extended Semantics for the Relational Model (Introduction to Section 5).", "authors": "Michael Stonebraker", "misc": "2002-01-03 313-316 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86e db/books/collections/Stonebraker86/Stonebraker86e.html ingres/P313.pdf" }
+{ "id": 67, "dblpid": "books/aw/stonebraker86/Stonebraker86f", "title": "Database Design (Introduction to Section 6).", "authors": "Michael Stonebraker", "misc": "2002-01-03 393-394 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86f db/books/collections/Stonebraker86/Stonebraker86f.html ingres/P393.pdf" }
+{ "id": 68, "dblpid": "books/aw/stonebraker86/X86", "title": "Title, Preface, Contents.", "authors": "", "misc": "2002-01-03 1986 The INGRES Papers db/books/collections/Stonebraker86.html#X86 db/books/collections/Stonebraker86/X86.html ingres/frontmatter.pdf" }
+{ "id": 69, "dblpid": "books/aw/stonebraker86/X86a", "title": "References.", "authors": "", "misc": "2002-01-03 429-444 1986 The INGRES Papers db/books/collections/Stonebraker86.html#X86a db/books/collections/Stonebraker86/X86a.html ingres/P429.pdf" }
+{ "id": 70, "dblpid": "books/aw/Knuth86a", "title": "TeX  The Program", "authors": "Donald E. Knuth", "misc": "2002-01-03 Addison-Wesley 1986 0-201-13437-3" }
+{ "id": 71, "dblpid": "books/aw/AbiteboulHV95", "title": "Foundations of Databases.", "authors": "Serge Abiteboul Richard Hull Victor Vianu", "misc": "2002-01-03 Addison-Wesley 1995 0-201-53771-0 AHV/Toc.pdf ... ... journals/tods/AstrahanBCEGGKLMMPTWW76 books/bc/AtzeniA93 journals/tcs/AtzeniABM82 journals/jcss/AbiteboulB86 journals/csur/AtkinsonB87 conf/pods/AtzeniB87 journals/vldb/AbiteboulB95 conf/sigmod/AbiteboulB91 conf/dood/AtkinsonBDDMZ89 conf/vldb/AlbanoBGO93 ... conf/icdt/Abiteboul88 journals/ipl/Abiteboul89 conf/ds/Abrial74 journals/tods/AhoBU79 books/mk/minker88/AptBW88 conf/vldb/AroraC78 conf/stoc/AfratiC89 journals/tods/AlbanoCO85 conf/pods/AfratiCY91 conf/pods/AusielloDM85 conf/vldb/AbiteboulG85 journals/jacm/AjtaiG87 conf/focs/AjtaiG89 journals/tods/AbiteboulG91 ... ... journals/tods/AbiteboulH87 conf/sigmod/AbiteboulH88 ... conf/sigmod/AbiteboulK89 journals/tcs/AbiteboulKG91 journals/jcss/AbiteboulKRW95 conf/sigmod/AbiteboulLUW93 conf/pods/AtzeniP82 conf/pods/AfratiP87 conf/pods/AptP87 conf/wg/AndriesP91 conf/pods/AfratiPPRSU86 books/el/leeuwen90/Apt90 conf/ifip/Armstrong74 journals/siamcomp/AhoSSU81 journals/tods/AhoSU79 journals/siamcomp/AhoSU79 conf/pods/AbiteboulSV90 journals/is/AtzeniT93 conf/popl/AhoU79 conf/pods/AbiteboulV87 conf/jcdkb/AbiteboulV88 journals/jacm/AbiteboulV88 conf/pods/AbiteboulV88 journals/jacm/AbiteboulV89 journals/jcss/AbiteboulV90 journals/jcss/AbiteboulV91 conf/stoc/AbiteboulV91 journals/amai/AbiteboulV91 journals/jcss/AbiteboulV95 journals/jacm/AptE82 conf/coco/AbiteboulVV92 conf/iclp/AptB88 conf/oopsla/BobrowKKMSZ86 journals/tse/BatoryBGSTTW88 conf/mfcs/Bancilhon78 ... conf/db-workshops/Bancilhon85 books/el/leeuwen90/Barendregt90 ... journals/tods/BeeriB79 books/el/leeuwen90/BerstelB90 conf/icdt/BeneventanoB92 conf/vldb/BernsteinBC80 conf/vldb/BeeriBG78 conf/sigmod/BorgidaBMR89 journals/tods/BunemanC79 journals/jacm/BernsteinC81 conf/dbpl/BancilhonCD89 books/bc/tanselCGSS93/BaudinetCW93 conf/sigmod/BiskupDB79 journals/jacm/BeeriDFS84 books/mk/BancilhonDK92 conf/edbt/BryDM88 conf/pods/BunemanDW88 journals/jcss/BunemanDW91 journals/tods/Beeri80 journals/dke/Beeri90 ... journals/tods/Bernstein76 conf/lics/BidoitF87 journals/iandc/BidoitF91 conf/sigmod/BeeriFH77 conf/stoc/BeeriFMMUY81 journals/jacm/BeeriFMY83 journals/tods/BunemanFN82 journals/siamcomp/BernsteinG81 journals/iandc/BlassGK85 conf/ijcai/BrachmanGL85 journals/tods/BernsteinGWRR81 books/aw/BernsteinHG87 ... journals/tcs/Bidoit91 journals/tcs/Biskup80 conf/adbt/Biskup79 journals/tods/Biskup83 journals/tcs/BunemanJO91 journals/tods/BeeriK86 conf/pods/BeeriKBR87 conf/icdt/BidoitL90 journals/csur/BatiniL86 conf/sigmod/BlakeleyLT86 conf/vldb/BeeriM91 conf/sigmod/BlakeleyMG93 journals/siamcomp/BeeriMSU81 conf/pods/BancilhonMSU86 conf/pods/BeeriNRST87 journals/software/Borgida85 conf/icalp/BraP83 conf/fgcs/BalbinMR88 ... conf/pods/BeeriR87 journals/jlp/BalbinR87 conf/sigmod/BancilhonR86 books/mk/minker88/BancilhonR88 journals/jlp/BeeriR91 conf/vldb/BancilhonRS82 conf/pods/BeeriRSS92 conf/dood/Bry89 journals/tods/BancilhonS81 journals/cogsci/BrachmanS85 journals/tods/BergamaschiS92 conf/sigmod/BernsteinST75 conf/dbpl/TannenBN91 conf/icdt/TannenBW92 ... journals/jacm/BeeriV84 conf/icalp/BeeriV81 conf/adbt/BeeriV79 journals/siamcomp/BeeriV84 journals/iandc/BeeriV84 journals/jacm/BeeriV84 journals/tcs/BeeriV85 journals/ibmrd/ChamberlinAEGLMRW76 ... journals/iandc/Cardelli88 books/mk/Cattell94 conf/sigmod/CacaceCCTZ90 conf/vldb/CastilhoCF82 conf/adbt/CasanovaF82 conf/focs/CaiFI89 journals/jcss/CasanovaFP84 conf/stoc/CosmadakisGKV88 conf/dood/CorciuloGP93 books/sp/CeriGT90 conf/focs/ChandraH80 journals/jcss/ChandraH80 journals/jcss/ChandraH82 journals/jlp/ChandraH85 conf/popl/Chandra81 conf/adbt/Chang79 conf/pods/Chandra88 ... journals/tods/Chen76 conf/ride/ChenHM94 conf/icde/Chomicki92 conf/pods/Chomicki92 ... ... ... conf/stoc/CosmadakisK85 journals/acr/CosmadakisK86 ... journals/jcss/CosmadakisKS86 journals/jacm/CosmadakisKV90 ... conf/pods/CalvaneseL94 conf/adbt/Clark77 conf/stoc/ChandraLM81 conf/stoc/ChandraM77 conf/pods/ConsensM90 conf/sigmod/ConsensM93 conf/icdt/ConsensM90 journals/cacm/Codd70 conf/sigmod/Codd71a persons/Codd71a persons/Codd72 conf/ifip/Codd74 ... conf/sigmod/Codd79 journals/cacm/Codd82 ... conf/sigmod/Cohen89 journals/cacm/Cohen90 ... journals/jcss/Cook74 conf/pods/Cosmadakis83 conf/focs/Cosmadakis87 books/el/leeuwen90/Courcelle90a journals/jacm/CosmadakisP84 conf/edbt/CeriCGLLTZ88 ... conf/vldb/CeriT87 conf/vldb/CasanovaTF88 ... conf/pods/CasanovaV83 journals/siamcomp/ChandraV85 conf/pods/ChaudhuriV92 conf/pods/ChaudhuriV93 conf/pods/ChaudhuriV94 journals/csur/CardelliW85 conf/pods/ChenW89 conf/pods/CohenW89 conf/vldb/CeriW90 conf/vldb/CeriW91 conf/iclp/ChenW92 conf/vldb/CeriW93 ... conf/birthday/Dahlhaus87 conf/vldb/Date81 books/aw/Date86 ... conf/dbpl/Dayal89 journals/tods/DayalB82 journals/ibmrd/DelobelC73 conf/icde/DelcambreD89 ... journals/tods/Delobel78 journals/jacm/Demolombe92 journals/tods/DateF92 ... conf/vldb/DayalHL91 journals/jacm/Paola69a conf/caap/DahlhausM86 journals/acr/DAtriM86 journals/iandc/DahlhausM92 conf/sigmod/DerrMP93 conf/vldb/MaindrevilleS88 conf/pods/Dong92 conf/adbt/BraP82 ... conf/dbpl/DongS91 journals/iandc/DongS95 conf/dbpl/DongS93 conf/dbpl/DongS93 conf/icdt/DongT92 conf/vldb/DenninghoffV91 conf/pods/DenninghoffV93 ... ... books/acm/kim95/DayalHW95 ... conf/pods/EiterGM94 conf/pods/Escobar-MolanoHJ93 ... books/el/leeuwen90/Emerson90 books/bc/ElmasriN89 ... conf/icse/Eswaran76 conf/sigmod/EpsteinSW78 ... ... conf/vldb/Fagin77 journals/tods/Fagin77 conf/sigmod/Fagin79 journals/tods/Fagin81 journals/ipl/FaginV83 journals/jacm/Fagin82 journals/jacm/Fagin83 journals/tcs/Fagin93 books/sp/kimrb85/FurtadoC85 ... journals/jlp/Fitting85a journals/tcs/FischerJT83 journals/acr/FaginKUV86 conf/icdt/FernandezM92 journals/tods/FaginMU82 conf/vldb/FaloutsosNS91 ... journals/ai/Forgy82 ... conf/sigmod/Freytag87 ... journals/siamcomp/FischerT83 journals/siamcomp/FaginMUY83 conf/pods/FaginUV83 conf/icalp/FaginV84 ... ... ... ... conf/sigmod/GraefeD87 conf/ride/GatziuD94 conf/sigmod/GardarinM86 conf/sigmod/GyssensG88 journals/tcs/GinsburgH83a journals/jacm/GinsburgH86 ... books/bc/tanselCGSS93/Ginsburg93 books/fm/GareyJ79 journals/jacm/GrantJ82 conf/vldb/GehaniJ91 conf/vldb/GhandeharizadehHJCELLTZ93 journals/tods/GhandeharizadehHJ96 conf/vldb/GehaniJS92 ... conf/sigmod/GehaniJS92 ... conf/deductive/GuptaKM92 conf/pods/GurevichL82 conf/iclp/GelfondL88 conf/adbt/77 journals/csur/GallaireMN84 conf/pods/GrahneMR92 conf/sigmod/GuptaMS93 conf/lics/GaifmanMSV87 journals/jacm/GaifmanMSV93 journals/jacm/GrahamMV86 conf/csl/GradelO92 ... conf/pods/Gottlob87 conf/pods/GyssensPG90 conf/dood/GiannottiPSZ91 books/aw/GoldbergR83 journals/acr/GrahneR86 journals/ipl/Grant77 ... journals/iandc/Grandjean83 conf/vldb/Grahne84 ... journals/csur/Graefe93 books/sp/Greibach75 journals/tods/GoodmanS82 journals/jcss/GoodmanS84 conf/focs/GurevichS85 ... conf/pods/GrumbachS94 conf/sigmod/GangulyST90 ... journals/tcs/Gunter92 ... ... ... ... conf/pods/GrahamV84 conf/pods/GrumbachV91 conf/icde/GardarinV92 conf/sigmod/GraefeW89 ... journals/jacm/GinsburgZ82 conf/vldb/GottlobZ88 ... ... journals/sigmod/Hanson89 ... journals/cacm/Harel80 journals/tkde/HaasCLMWLLPCS90 conf/lics/Hella92 journals/iandc/Herrmann95 conf/pods/HirstH93 conf/vldb/HullJ91 conf/ewdw/HullJ90 journals/csur/HullK87 journals/tods/HudsonK89 conf/lics/HillebrandKM93 conf/nato/HillebrandKR93 conf/jcdkb/HsuLM88 journals/ipl/HoneymanLY80 journals/tods/HammerM81 conf/adbt/HenschenMN82 ... journals/jacm/HenschenN84 journals/jacm/Honeyman82 conf/sigmod/HullS89 conf/pods/HullS89 journals/acta/HullS94 journals/jcss/HullS93 conf/fodo/HullTY89 journals/jcss/Hull83 journals/jacm/Hull84 journals/tcs/Hull85 journals/siamcomp/Hull86 ... conf/vldb/Hulin89 ... journals/jacm/HullY84 conf/vldb/HullY90 conf/pods/HullY91 conf/sigmod/IoannidisK90 journals/jcss/ImielinskiL84 conf/adbt/Imielinski82 journals/jcss/Immerman82 journals/iandc/Immerman86 ... journals/siamcomp/Immerman87 conf/pods/ImielinskiN88 conf/vldb/IoannidisNSS92 conf/sigmod/ImielinskiNV91 conf/dood/ImielinskiNV91 conf/vldb/Ioannidis85 journals/jacm/Jacobs82 conf/dbpl/JacobsH91 journals/csur/JarkeK84 journals/jcss/JohnsonK84 conf/popl/JaffarL87 books/el/leeuwen90/Johnson90 journals/jacm/Joyner76 conf/pods/JaeschkeS82 ... books/mk/minker88/Kanellakis88 books/el/leeuwen90/Kanellakis90 conf/oopsla/KhoshafianC86 conf/edbt/KotzDM88 conf/jcdkb/Keller82 conf/pods/Keller85 journals/computer/Keller86 ... journals/tods/Kent79 ... journals/ngc/RohmerLK86 conf/tacs/KanellakisG94 conf/jcdkb/Kifer88 conf/pods/KanellakisKR90 conf/sigmod/KiferKS92 ... conf/icdt/KiferL86 books/aw/KimL89 ... journals/tods/Klug80 journals/jacm/Klug82 journals/jacm/Klug88 journals/jacm/KiferLW95 conf/kr/KatsunoM91 journals/ai/KatsunoM92 conf/jcdkb/KrishnamurthyN88 journals/csur/Knight89 ... journals/iandc/Kolaitis91 journals/ai/Konolige88 conf/ifip/Kowalski74 journals/jacm/Kowalski75 conf/bncod/Kowalski84 conf/vldb/KoenigP81 journals/tods/KlugP82 ... conf/pods/KolaitisP88 conf/pods/KiferRS88 conf/sigmod/KrishnamurthyRS88 books/mg/SilberschatzK91 conf/iclp/KempT88 conf/sigmod/KellerU84 conf/dood/Kuchenhoff91 ... journals/jlp/Kunen87 conf/iclp/Kunen88 conf/pods/Kuper87 conf/pods/Kuper88 conf/ppcp/Kuper93 conf/pods/KuperV84 conf/stoc/KolaitisV87 journals/tcs/KarabegV90 journals/iandc/KolaitisV90 conf/pods/KolaitisV90 journals/tods/KarabegV91 journals/iandc/KolaitisV92 journals/tcs/KuperV93 journals/tods/KuperV93 journals/tse/KellerW85 conf/pods/KiferW89 conf/jcdkb/Lang88 books/el/Leeuwen90 ... journals/jcss/Leivant89 ... journals/iandc/Leivant90 ... conf/db-workshops/Levesque82 journals/ai/Levesque84 conf/mfdbs/Libkin91 conf/er/Lien79 journals/jacm/Lien82 books/mk/minker88/Lifschitz88 ... journals/tcs/Lindell91 journals/tods/Lipski79 journals/jacm/Lipski81 journals/tcs/LeratL86 journals/cj/LeveneL90 books/sp/Lloyd87 conf/pods/LakshmananM89 conf/tlca/LeivantM93 conf/sigmod/LaverMG83 conf/pods/LiptonN90 journals/jcss/LucchesiO78 conf/sigmod/Lohman88 ... conf/ijcai/Lozinskii85 books/ph/LewisP81 ... conf/sigmod/LecluseRV88 journals/is/LipeckS87 journals/jlp/LloydST87 journals/tods/LingTK81 conf/sigmod/LyngbaekV87 conf/dood/LefebvreV89 conf/pods/LibkinW93 conf/dbpl/LibkinW93 journals/jacm/Maier80 books/cs/Maier83 ... conf/vldb/Makinouchi77 conf/icalp/Makowsky81 ... conf/icdt/Malvestuto86 conf/aaai/MacGregorB92 journals/tods/MylopoulosBW80 conf/sigmod/McCarthyD89 journals/csur/MishraE92 conf/sigmod/MumickFPR90 books/mk/Minker88 journals/jlp/Minker88 conf/vldb/MillerIR93 journals/is/MillerIR94 journals/iandc/Mitchell83 conf/pods/Mitchell83 conf/vldb/MendelzonM79 journals/tods/MaierMS79 journals/jcss/MaierMSU80 conf/pods/MendelzonMW94 journals/debu/MorrisNSUG87 journals/ai/Moore85 conf/vldb/Morgenstern83 conf/pods/Morris88 ... conf/pods/MannilaR85 ... journals/jlp/MinkerR90 books/aw/MannilaR92 journals/acr/MaierRW86 ... journals/tods/MarkowitzS92 conf/pods/Marchetti-SpaccamelaPS87 journals/jacm/MaierSY81 conf/iclp/MorrisUG86 journals/tods/MaierUV84 conf/iclp/MorrisUG86 journals/acta/MakowskyV86 books/bc/MaierW88 books/mk/minker88/ManchandraW88 conf/pods/Naughton86 conf/sigmod/NgFS91 ... conf/vldb/Nejdl87 conf/adbt/NicolasM77 conf/sigmod/Nicolas78 journals/acta/Nicolas82 conf/ds/76 conf/pods/NaqviK88 journals/tods/NegriPS91 conf/vldb/NaughtonRSU89 conf/pods/NaughtonS87 ... ... conf/vldb/Osborn79 ... journals/tods/OzsoyogluY87 conf/adbt/Paige82 ... books/cs/Papadimitriou86 ... journals/ipl/Paredaens78 ... books/sp/ParedaensBGG89 journals/ai/Andersen91 books/el/leeuwen90/Perrin90 journals/ins/Petrov89 conf/pods/ParedaensG88 conf/pods/PatnaikI94 conf/adbt/ParedaensJ79 journals/csur/PeckhamM88 ... ... conf/sigmod/ParkerP80 ... conf/iclp/Przymusinski88 conf/pods/Przymusinski89 ... conf/vldb/ParkerSV92 conf/aaai/PearlV87 journals/ai/PereiraW80a conf/pods/PapadimitriouY92 journals/tkde/QianW91 ... journals/jlp/Ramakrishnan91 conf/pods/RamakrishnanBS87 ... conf/adbt/Reiter77 journals/ai/Reiter80 conf/db-workshops/Reiter82 journals/jacm/Reiter86 journals/tods/Rissanen77 conf/mfcs/Rissanen78 conf/pods/Rissanen82 ... journals/ngc/RohmerLK86 journals/jacm/Robinson65 ... conf/pods/Ross89 ... ... conf/sigmod/RoweS79 conf/sigmod/RichardsonS91 journals/debu/RamamohanaraoSBPNTZD87 conf/vldb/RamakrishnanSS92 conf/sigmod/RamakrishnanSSS93 conf/pods/RamakrishnanSUV89 journals/jcss/RamakrishnanSUV93 journals/jlp/RamakrishnanU95 conf/sigmod/SelingerACLP79 conf/sigmod/Sagiv81 journals/tods/Sagiv83 books/mk/minker88/Sagiv88 conf/slp/Sagiv90 conf/sigmod/Sciore81 journals/jacm/Sciore82 conf/pods/Sciore83 journals/acr/Sciore86 journals/jacm/SagivDPF81 conf/pods/X89 ... journals/ai/SmithG85 books/mk/minker88/Shepherdson88 journals/tods/Shipman81 conf/pods/Shmueli87 conf/iclp/SekiI88 conf/sigmod/ShmueliI84 journals/tc/Sickel76 journals/jsc/Siekmann89 conf/sigmod/StonebrakerJGP90 conf/vldb/SimonKM92 journals/csur/ShethL90 conf/pods/SeibL91 conf/sigmod/SuLRD93 conf/adbt/SilvaM79 journals/sigmod/Snodgrass90 journals/sigmod/Soo91 conf/pods/SuciuP94 conf/sigmod/StonebrakerR86 conf/slp/SudarshanR93 conf/pods/SagivS86 journals/cacm/Stonebraker81 books/mk/Stonebraker88 journals/tkde/Stonebraker92 books/aw/Stroustrup91 journals/jacm/SadriU82 conf/vldb/Su91 conf/pods/SagivV89 journals/jacm/SagivW82 journals/tods/StonebrakerWKH76 journals/jacm/SagivY80 conf/pods/SaccaZ86 journals/tcs/SaccaZ88 ... conf/pods/SaccaZ90 ... ... books/bc/TanselCGJSS93 ... journals/acr/ThomasF86 ... ... ... ... journals/tcs/Topor87 ... books/mk/minker88/ToporS88 ... journals/siamcomp/TarjanY84 journals/csur/TeoreyYF86 journals/algorithmica/UllmanG88 conf/pods/Ullman82 books/cs/Ullman82 journals/tods/Ullman85 books/cs/Ullman88 conf/pods/Ullman89 books/cs/Ullman89 conf/sigmod/Gelder86 ... conf/pods/BusscheG92 conf/focs/BusscheGAG92 conf/pods/BusscheP91 conf/slp/Gelder86 conf/pods/Gelder89 conf/pods/GelderRS88 journals/jacm/GelderRS91 journals/tods/GelderT91 journals/ipl/Vardi81 conf/stoc/Vardi82 conf/focs/Vardi82 journals/acta/Vardi83 journals/jcss/Vardi84 conf/pods/Vardi85 conf/pods/Vardi86 journals/jcss/Vardi86 ... conf/pods/Vardi88 conf/sigmod/Vassiliou79 ... ... journals/jacm/EmdenK76 conf/nf2/SchollABBGPRV87 journals/jacm/Vianu87 journals/acta/Vianu87 conf/eds/Vieille86 conf/iclp/Vieille87 ... conf/eds/Vieille88 journals/tcs/Vieille89 ... journals/tcs/VianuV92 conf/sigmod/WidomF90 conf/icde/WangH92 conf/pos/WidjojoHW90 journals/computer/Wiederhold92 conf/pods/Wilkins86 conf/pods/Winslett88 conf/sigmod/WolfsonO90 conf/pods/Wong93 conf/sigmod/WolfsonS88 journals/ibmrd/WangW75 journals/tods/WongY76 conf/vldb/Yannakakis81 journals/csur/YuC84 ... journals/jcss/YannakakisP82 ... journals/tods/Zaniolo82 journals/jcss/Zaniolo84 ... conf/edbt/ZhouH90 journals/ibmsj/Zloof77 books/mk/ZdonikM90 db/books/dbtext/abiteboul95.html" }
+{ "id": 72, "dblpid": "books/aw/Lamport86", "title": "LaTeX  User's Guide & Reference Manual", "authors": "Leslie Lamport", "misc": "2002-01-03 Addison-Wesley 1986 0-201-15790-X" }
+{ "id": 73, "dblpid": "books/aw/AhoHU74", "title": "The Design and Analysis of Computer Algorithms.", "authors": "Alfred V. Aho John E. Hopcroft Jeffrey D. Ullman", "misc": "2002-01-03 Addison-Wesley 1974 0-201-00029-6" }
+{ "id": 74, "dblpid": "books/aw/Lamport2002", "title": "Specifying Systems, The TLA+ Language and Tools for Hardware and Software Engineers", "authors": "Leslie Lamport", "misc": "2005-07-28 Addison-Wesley 2002 0-3211-4306-X http //research.microsoft.com/users/lamport/tla/book.html" }
+{ "id": 75, "dblpid": "books/aw/AhoHU83", "title": "Data Structures and Algorithms.", "authors": "Alfred V. Aho John E. Hopcroft Jeffrey D. Ullman", "misc": "2002-01-03 Addison-Wesley 1983 0-201-00023-7" }
+{ "id": 76, "dblpid": "books/aw/LewisBK01", "title": "Databases and Transaction Processing  An Application-Oriented Approach", "authors": "Philip M. Lewis Arthur J. Bernstein Michael Kifer", "misc": "2002-01-03 Addison-Wesley 2001 0-201-70872-8" }
+{ "id": 77, "dblpid": "books/aw/AhoKW88", "title": "The AWK Programming Language", "authors": "Alfred V. Aho Brian W. Kernighan Peter J. Weinberger", "misc": "2002-01-03 Addison-Wesley 1988" }
+{ "id": 78, "dblpid": "books/aw/LindholmY97", "title": "The Java Virtual Machine Specification", "authors": "Tim Lindholm Frank Yellin", "misc": "2002-01-28 Addison-Wesley 1997 0-201-63452-X" }
+{ "id": 79, "dblpid": "books/aw/AhoSU86", "title": "Compilers  Princiles, Techniques, and Tools.", "authors": "Alfred V. Aho Ravi Sethi Jeffrey D. Ullman", "misc": "2002-01-03 Addison-Wesley 1986 0-201-10088-6" }
+{ "id": 80, "dblpid": "books/aw/Sedgewick83", "title": "Algorithms", "authors": "Robert Sedgewick", "misc": "2002-01-03 Addison-Wesley 1983 0-201-06672-6" }
+{ "id": 81, "dblpid": "journals/siamcomp/AspnesW96", "title": "Randomized Consensus in Expected O(n log² n) Operations Per Processor.", "authors": "James Aspnes Orli Waarts", "misc": "2002-01-03 1024-1044 1996 25 SIAM J. Comput. 5 db/journals/siamcomp/siamcomp25.html#AspnesW96" }
+{ "id": 82, "dblpid": "conf/focs/AspnesW92", "title": "Randomized Consensus in Expected O(n log ^2 n) Operations Per Processor", "authors": "James Aspnes Orli Waarts", "misc": "2006-04-25 137-146 conf/focs/FOCS33 1992 FOCS db/conf/focs/focs92.html#AspnesW92" }
+{ "id": 83, "dblpid": "journals/siamcomp/Bloniarz83", "title": "A Shortest-Path Algorithm with Expected Time O(n² log n log* n).", "authors": "Peter A. Bloniarz", "misc": "2002-01-03 588-600 1983 12 SIAM J. Comput. 3 db/journals/siamcomp/siamcomp12.html#Bloniarz83" }
+{ "id": 84, "dblpid": "conf/stoc/Bloniarz80", "title": "A Shortest-Path Algorithm with Expected Time O(n^2 log n log ^* n)", "authors": "Peter A. Bloniarz", "misc": "2006-04-25 378-384 conf/stoc/STOC12 1980 STOC db/conf/stoc/stoc80.html#Bloniarz80" }
+{ "id": 85, "dblpid": "journals/siamcomp/Megiddo83a", "title": "Linear-Time Algorithms for Linear Programming in R³ and Related Problems.", "authors": "Nimrod Megiddo", "misc": "2002-01-03 759-776 1983 12 SIAM J. Comput. 4 db/journals/siamcomp/siamcomp12.html#Megiddo83a" }
+{ "id": 86, "dblpid": "conf/focs/Megiddo82", "title": "Linear-Time Algorithms for Linear Programming in R^3 and Related Problems", "authors": "Nimrod Megiddo", "misc": "2006-04-25 329-338 conf/focs/FOCS23 1982 FOCS db/conf/focs/focs82.html#Megiddo82" }
+{ "id": 87, "dblpid": "journals/siamcomp/MoffatT87", "title": "An All Pairs Shortest Path Algorithm with Expected Time O(n² log n).", "authors": "Alistair Moffat Tadao Takaoka", "misc": "2002-01-03 1023-1031 1987 16 SIAM J. Comput. 6 db/journals/siamcomp/siamcomp16.html#MoffatT87" }
+{ "id": 88, "dblpid": "conf/focs/MoffatT85", "title": "An All Pairs Shortest Path Algorithm with Expected Running Time O(n^2 log n)", "authors": "Alistair Moffat Tadao Takaoka", "misc": "2006-04-25 101-105 conf/focs/FOCS26 1985 FOCS db/conf/focs/focs85.html#MoffatT85" }
+{ "id": 89, "dblpid": "conf/icip/SchonfeldL98", "title": "VORTEX  Video Retrieval and Tracking from Compressed Multimedia Databases.", "authors": "Dan Schonfeld Dan Lelescu", "misc": "2002-11-05 123-127 1998 ICIP (3) db/conf/icip/icip1998-3.html#SchonfeldL98" }
+{ "id": 90, "dblpid": "conf/hicss/SchonfeldL99", "title": "VORTEX  Video Retrieval and Tracking from Compressed Multimedia Databases ¾ Visual Search Engine.", "authors": "Dan Schonfeld Dan Lelescu", "misc": "2002-01-03 1999 HICSS http //computer.org/proceedings/hicss/0001/00013/00013006abs.htm db/conf/hicss/hicss1999-3.html#SchonfeldL99" }
+{ "id": 91, "dblpid": "journals/corr/abs-0802-2861", "title": "Geometric Set Cover and Hitting Sets for Polytopes in $R^3$", "authors": "Sören Laue", "misc": "2008-03-03 http //arxiv.org/abs/0802.2861 2008 CoRR abs/0802.2861 db/journals/corr/corr0802.html#abs-0802-2861 informal publication" }
+{ "id": 92, "dblpid": "conf/stacs/Laue08", "title": "Geometric Set Cover and Hitting Sets for Polytopes in R³.", "authors": "Sören Laue", "misc": "2008-03-04 2008 STACS 479-490 http //drops.dagstuhl.de/opus/volltexte/2008/1367 conf/stacs/2008 db/conf/stacs/stacs2008.html#Laue08" }
+{ "id": 93, "dblpid": "journals/iandc/IbarraJCR91", "title": "Some Classes of Languages in NC¹", "authors": "Oscar H. Ibarra Tao Jiang Jik H. Chang Bala Ravikumar", "misc": "2006-04-25 86-106 Inf. Comput. January 1991 90 1 db/journals/iandc/iandc90.html#IbarraJCR91" }
+{ "id": 94, "dblpid": "conf/awoc/IbarraJRC88", "title": "On Some Languages in NC.", "authors": "Oscar H. Ibarra Tao Jiang Bala Ravikumar Jik H. Chang", "misc": "2002-08-06 64-73 1988 conf/awoc/1988 AWOC db/conf/awoc/awoc88.html#IbarraJRC88" }
+{ "id": 95, "dblpid": "journals/jacm/GalilHLSW87", "title": "An O(n³log n) deterministic and an O(n³) Las Vegs isomorphism test for trivalent graphs.", "authors": "Zvi Galil Christoph M. Hoffmann Eugene M. Luks Claus-Peter Schnorr Andreas Weber", "misc": "2003-11-20 513-531 1987 34 J. ACM 3 http //doi.acm.org/10.1145/28869.28870 db/journals/jacm/jacm34.html#GalilHLSW87" }
+{ "id": 96, "dblpid": "conf/focs/GalilHLSW82", "title": "An O(n^3 log n) Deterministic and an O(n^3) Probabilistic Isomorphism Test for Trivalent Graphs", "authors": "Zvi Galil Christoph M. Hoffmann Eugene M. Luks Claus-Peter Schnorr Andreas Weber", "misc": "2006-04-25 118-125 conf/focs/FOCS23 1982 FOCS db/conf/focs/focs82.html#GalilHLSW82" }
+{ "id": 97, "dblpid": "journals/jacm/GalilT88", "title": "An O(n²(m + n log n)log n) min-cost flow algorithm.", "authors": "Zvi Galil Éva Tardos", "misc": "2003-11-20 374-386 1988 35 J. ACM 2 http //doi.acm.org/10.1145/42282.214090 db/journals/jacm/jacm35.html#GalilT88" }
+{ "id": 98, "dblpid": "conf/focs/GalilT86", "title": "An O(n^2 (m + n log n) log n) Min-Cost Flow Algorithm", "authors": "Zvi Galil Éva Tardos", "misc": "2006-04-25 1-9 conf/focs/FOCS27 1986 FOCS db/conf/focs/focs86.html#GalilT86" }
+{ "id": 99, "dblpid": "series/synthesis/2009Weintraub", "title": "Jordan Canonical Form  Theory and Practice", "authors": "Steven H. Weintraub", "misc": "2009-09-06 Jordan Canonical Form  Theory and Practice http //dx.doi.org/10.2200/S00218ED1V01Y200908MAS006 http //dx.doi.org/10.2200/S00218ED1V01Y200908MAS006 2009 Synthesis Lectures on Mathematics & Statistics Morgan & Claypool Publishers" }
+{ "id": 100, "dblpid": "series/synthesis/2009Brozos", "title": "The Geometry of Walker Manifolds", "authors": "Miguel Brozos-Vázquez Eduardo García-Río Peter Gilkey Stana Nikcevic Rámon Vázquez-Lorenzo", "misc": "2009-09-06 The Geometry of Walker Manifolds http //dx.doi.org/10.2200/S00197ED1V01Y200906MAS005 http //dx.doi.org/10.2200/S00197ED1V01Y200906MAS005 2009 Synthesis Lectures on Mathematics & Statistics Morgan & Claypool Publishers" }
diff --git a/asterix-app/src/test/resources/runtimets/results/scan/issue238_query_2.adm b/asterix-app/src/test/resources/runtimets/results/scan/issue238_query_2.adm
new file mode 100644
index 0000000..a7ec8f6
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/scan/issue238_query_2.adm
@@ -0,0 +1,100 @@
+{ "id": 1, "dblpid": "books/acm/kim95/AnnevelinkACFHK95", "title": "Object SQL - A Language for the Design and Implementation of Object Databases.", "authors": "Jurgen Annevelink Rafiul Ahad Amelia Carlson Daniel H. Fishman Michael L. Heytens William Kent", "misc": "2002-01-03 42-68 1995 Modern Database Systems db/books/collections/kim95.html#AnnevelinkACFHK95" }
+{ "id": 2, "dblpid": "books/acm/kim95/Blakeley95", "title": "OQL[C++]  Extending C++ with an Object Query Capability.", "authors": "José A. Blakeley", "misc": "2002-01-03 69-88 Modern Database Systems db/books/collections/kim95.html#Blakeley95 1995" }
+{ "id": 3, "dblpid": "books/acm/kim95/BreitbartGS95", "title": "Transaction Management in Multidatabase Systems.", "authors": "Yuri Breitbart Hector Garcia-Molina Abraham Silberschatz", "misc": "2004-03-08 573-591 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#BreitbartGS95 1995" }
+{ "id": 4, "dblpid": "books/acm/kim95/ChristodoulakisK95", "title": "Multimedia Information Systems  Issues and Approaches.", "authors": "Stavros Christodoulakis Leonidas Koveos", "misc": "2002-01-03 318-337 1995 Modern Database Systems db/books/collections/kim95.html#ChristodoulakisK95" }
+{ "id": 5, "dblpid": "books/acm/kim95/DayalHW95", "title": "Active Database Systems.", "authors": "Umeshwar Dayal Eric N. Hanson Jennifer Widom", "misc": "2002-01-03 434-456 1995 Modern Database Systems db/books/collections/kim95.html#DayalHW95" }
+{ "id": 6, "dblpid": "books/acm/kim95/DittrichD95", "title": "Where Object-Oriented DBMSs Should Do Better  A Critique Based on Early Experiences.", "authors": "Angelika Kotz Dittrich Klaus R. Dittrich", "misc": "2002-01-03 238-254 1995 Modern Database Systems db/books/collections/kim95.html#DittrichD95" }
+{ "id": 7, "dblpid": "books/acm/kim95/Garcia-MolinaH95", "title": "Distributed Databases.", "authors": "Hector Garcia-Molina Meichun Hsu", "misc": "2002-01-03 477-493 1995 Modern Database Systems db/books/collections/kim95.html#Garcia-MolinaH95" }
+{ "id": 8, "dblpid": "books/acm/kim95/Goodman95", "title": "An Object-Oriented DBMS War Story  Developing a Genome Mapping Database in C++.", "authors": "Nathan Goodman", "misc": "2002-01-03 216-237 1995 Modern Database Systems db/books/collections/kim95.html#Goodman95" }
+{ "id": 9, "dblpid": "books/acm/kim95/Kaiser95", "title": "Cooperative Transactions for Multiuser Environments.", "authors": "Gail E. Kaiser", "misc": "2002-01-03 409-433 1995 Modern Database Systems db/books/collections/kim95.html#Kaiser95" }
+{ "id": 10, "dblpid": "books/acm/kim95/KelleyGKRG95", "title": "Schema Architecture of the UniSQL/M Multidatabase System", "authors": "William Kelley Sunit K. Gala Won Kim Tom C. Reyes Bruce Graham", "misc": "2004-03-08 Modern Database Systems books/acm/Kim95 621-648 1995 db/books/collections/kim95.html#KelleyGKRG95" }
+{ "id": 11, "dblpid": "books/acm/kim95/KemperM95", "title": "Physical Object Management.", "authors": "Alfons Kemper Guido Moerkotte", "misc": "2002-01-03 175-202 1995 Modern Database Systems db/books/collections/kim95.html#KemperM95" }
+{ "id": 12, "dblpid": "books/acm/kim95/Kim95", "title": "Introduction to Part 1  Next-Generation Database Technology.", "authors": "Won Kim", "misc": "2002-01-03 5-17 1995 Modern Database Systems db/books/collections/kim95.html#Kim95" }
+{ "id": 13, "dblpid": "books/acm/kim95/Kim95a", "title": "Object-Oriented Database Systems  Promises, Reality, and Future.", "authors": "Won Kim", "misc": "2002-01-03 255-280 1995 Modern Database Systems db/books/collections/kim95.html#Kim95a" }
+{ "id": 14, "dblpid": "books/acm/kim95/Kim95b", "title": "Introduction to Part 2  Technology for Interoperating Legacy Databases.", "authors": "Won Kim", "misc": "2002-01-03 515-520 1995 Modern Database Systems db/books/collections/kim95.html#Kim95b" }
+{ "id": 15, "dblpid": "books/acm/kim95/KimCGS95", "title": "On Resolving Schematic Heterogeneity in Multidatabase Systems.", "authors": "Won Kim Injun Choi Sunit K. Gala Mark Scheevel", "misc": "2002-01-03 521-550 1995 Modern Database Systems db/books/collections/kim95.html#KimCGS95" }
+{ "id": 16, "dblpid": "books/acm/kim95/KimG95", "title": "Requirements for a Performance Benchmark for Object-Oriented Database Systems.", "authors": "Won Kim Jorge F. Garza", "misc": "2002-01-03 203-215 1995 Modern Database Systems db/books/collections/kim95.html#KimG95" }
+{ "id": 17, "dblpid": "books/acm/kim95/KimK95", "title": "On View Support in Object-Oriented Databases Systems.", "authors": "Won Kim William Kelley", "misc": "2002-01-03 108-129 1995 Modern Database Systems db/books/collections/kim95.html#KimK95" }
+{ "id": 18, "dblpid": "books/acm/kim95/Kowalski95", "title": "The POSC Solution to Managing E&P Data.", "authors": "Vincent J. Kowalski", "misc": "2002-01-03 281-301 1995 Modern Database Systems db/books/collections/kim95.html#Kowalski95" }
+{ "id": 19, "dblpid": "books/acm/kim95/KriegerA95", "title": "C++ Bindings to an Object Database.", "authors": "David Krieger Tim Andrews", "misc": "2002-01-03 89-107 1995 Modern Database Systems db/books/collections/kim95.html#KriegerA95" }
+{ "id": 20, "dblpid": "books/acm/kim95/Lunt95", "title": "Authorization in Object-Oriented Databases.", "authors": "Teresa F. Lunt", "misc": "2002-01-03 130-145 1995 Modern Database Systems db/books/collections/kim95.html#Lunt95" }
+{ "id": 21, "dblpid": "books/acm/kim95/MengY95", "title": "Query Processing in Multidatabase Systems.", "authors": "Weiyi Meng Clement T. Yu", "misc": "2002-01-03 551-572 1995 Modern Database Systems db/books/collections/kim95.html#MengY95" }
+{ "id": 22, "dblpid": "books/acm/kim95/Motro95", "title": "Management of Uncerainty in database Systems.", "authors": "Amihai Motro", "misc": "2002-01-03 457-476 1995 Modern Database Systems db/books/collections/kim95.html#Motro95" }
+{ "id": 23, "dblpid": "books/acm/kim95/Omiecinski95", "title": "Parallel Relational Database Systems.", "authors": "Edward Omiecinski", "misc": "2002-01-03 494-512 1995 Modern Database Systems db/books/collections/kim95.html#Omiecinski95" }
+{ "id": 24, "dblpid": "books/acm/kim95/OzsuB95", "title": "Query Processing in Object-Oriented Database Systems.", "authors": "M. Tamer Özsu José A. Blakeley", "misc": "2002-01-03 146-174 1995 Modern Database Systems db/books/collections/kim95.html#OzsuB95" }
+{ "id": 25, "dblpid": "books/acm/kim95/RusinkiewiczS95", "title": "Specification and Execution of Transactional Workflows.", "authors": "Marek Rusinkiewicz Amit P. Sheth", "misc": "2004-03-08 592-620 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#RusinkiewiczS95 1995" }
+{ "id": 26, "dblpid": "books/acm/kim95/Samet95", "title": "Spatial Data Structures.", "authors": "Hanan Samet", "misc": "2004-03-08 361-385 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#Samet95 1995" }
+{ "id": 27, "dblpid": "books/acm/kim95/SametA95", "title": "Spatial Data Models and Query Processing.", "authors": "Hanan Samet Walid G. Aref", "misc": "2002-01-03 338-360 1995 Modern Database Systems db/books/collections/kim95.html#SametA95" }
+{ "id": 28, "dblpid": "books/acm/kim95/ShanADDK95", "title": "Pegasus  A Heterogeneous Information Management System.", "authors": "Ming-Chien Shan Rafi Ahmed Jim Davis Weimin Du William Kent", "misc": "2004-03-08 664-682 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#ShanADDK95 1995" }
+{ "id": 29, "dblpid": "books/acm/kim95/Snodgrass95", "title": "Temporal Object-Oriented Databases  A Critical Comparison.", "authors": "Richard T. Snodgrass", "misc": "2002-01-03 386-408 1995 Modern Database Systems db/books/collections/kim95.html#Snodgrass95" }
+{ "id": 30, "dblpid": "books/acm/kim95/SoleyK95", "title": "The OMG Object Model.", "authors": "Richard Mark Soley William Kent", "misc": "2002-01-03 18-41 1995 Modern Database Systems db/books/collections/kim95.html#SoleyK95" }
+{ "id": 31, "dblpid": "books/acm/kim95/Stout95", "title": "EDA/SQL.", "authors": "Ralph L. Stout", "misc": "2004-03-08 649-663 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#Stout95 1995" }
+{ "id": 32, "dblpid": "books/acm/kim95/Thompson95", "title": "The Changing Database Standards Landscape.", "authors": "Craig W. Thompson", "misc": "2002-01-03 302-317 1995 Modern Database Systems db/books/collections/kim95.html#Thompson95" }
+{ "id": 33, "dblpid": "books/acm/kim95/BreitbartR95", "title": "Overview of the ADDS System.", "authors": "Yuri Breitbart Tom C. Reyes", "misc": "2009-06-12 683-701 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#BreitbartR95 1995" }
+{ "id": 34, "dblpid": "books/acm/Kim95", "title": "Modern Database Systems  The Object Model, Interoperability, and Beyond.", "authors": "", "misc": "2004-03-08 Won Kim Modern Database Systems ACM Press and Addison-Wesley 1995 0-201-59098-0 db/books/collections/kim95.html" }
+{ "id": 35, "dblpid": "books/ap/MarshallO79", "title": "Inequalities  Theory of Majorization and Its Application.", "authors": "Albert W. Marshall Ingram Olkin", "misc": "2002-01-03 Academic Press 1979 0-12-473750-1" }
+{ "id": 36, "dblpid": "books/aw/kimL89/BjornerstedtH89", "title": "Version Control in an Object-Oriented Architecture.", "authors": "Anders Björnerstedt Christer Hulten", "misc": "2006-02-24 451-485 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#BjornerstedtH89" }
+{ "id": 37, "dblpid": "books/aw/kimL89/BretlMOPSSWW89", "title": "The GemStone Data Management System.", "authors": "Robert Bretl David Maier Allen Otis D. Jason Penney Bruce Schuchardt Jacob Stein E. Harold Williams Monty Williams", "misc": "2002-01-03 283-308 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#BretlMOPSSWW89" }
+{ "id": 38, "dblpid": "books/aw/kimL89/CareyDRS89", "title": "Storage Management in EXODUS.", "authors": "Michael J. Carey David J. DeWitt Joel E. Richardson Eugene J. Shekita", "misc": "2002-01-03 341-369 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#CareyDRS89" }
+{ "id": 39, "dblpid": "books/aw/kimL89/Decouchant89", "title": "A Distributed Object Manager for the Smalltalk-80 System.", "authors": "Dominique Decouchant", "misc": "2002-01-03 487-520 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Decouchant89" }
+{ "id": 40, "dblpid": "books/aw/kimL89/DiederichM89", "title": "Objects, Messages, and Rules in Database Design.", "authors": "Jim Diederich Jack Milton", "misc": "2002-01-03 177-197 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#DiederichM89" }
+{ "id": 41, "dblpid": "books/aw/kimL89/EllisG89", "title": "Active Objects  Ealities and Possibilities.", "authors": "Clarence A. Ellis Simon J. Gibbs", "misc": "2002-01-03 561-572 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#EllisG89" }
+{ "id": 42, "dblpid": "books/aw/kimL89/FishmanABCCDHHKLLMNRSW89", "title": "Overview of the Iris DBMS.", "authors": "Daniel H. Fishman Jurgen Annevelink David Beech E. C. Chow Tim Connors J. W. Davis Waqar Hasan C. G. Hoch William Kent S. Leichner Peter Lyngbæk Brom Mahbod Marie-Anne Neimat Tore Risch Ming-Chien Shan W. Kevin Wilkinson", "misc": "2002-01-03 219-250 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#FishmanABCCDHHKLLMNRSW89" }
+{ "id": 43, "dblpid": "books/aw/kimL89/KimBCGW89", "title": "Features of the ORION Object-Oriented Database System.", "authors": "Won Kim Nat Ballou Hong-Tai Chou Jorge F. Garza Darrell Woelk", "misc": "2002-01-03 251-282 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#KimBCGW89" }
+{ "id": 44, "dblpid": "books/aw/kimL89/KimKD89", "title": "Indexing Techniques for Object-Oriented Databases.", "authors": "Won Kim Kyung-Chang Kim Alfred G. Dale", "misc": "2002-01-03 371-394 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#KimKD89" }
+{ "id": 45, "dblpid": "books/aw/kimL89/King89", "title": "My Cat Is Object-Oriented.", "authors": "Roger King", "misc": "2002-01-03 23-30 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#King89" }
+{ "id": 46, "dblpid": "books/aw/kimL89/Maier89", "title": "Making Database Systems Fast Enough for CAD Applications.", "authors": "David Maier", "misc": "2002-01-03 573-582 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Maier89" }
+{ "id": 47, "dblpid": "books/aw/kimL89/MellenderRS89", "title": "Optimizing Smalltalk Message Performance.", "authors": "Fred Mellender Steve Riegel Andrew Straw", "misc": "2002-01-03 423-450 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#MellenderRS89" }
+{ "id": 48, "dblpid": "books/aw/kimL89/Moon89", "title": "The Common List Object-Oriented Programming Language Standard.", "authors": "David A. Moon", "misc": "2002-01-03 49-78 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Moon89" }
+{ "id": 49, "dblpid": "books/aw/kimL89/Moss89", "title": "Object Orientation as Catalyst for Language-Database Inegration.", "authors": "J. Eliot B. Moss", "misc": "2002-01-03 583-592 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Moss89" }
+{ "id": 50, "dblpid": "books/aw/kimL89/Nierstrasz89", "title": "A Survey of Object-Oriented Concepts.", "authors": "Oscar Nierstrasz", "misc": "2002-01-03 3-21 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Nierstrasz89" }
+{ "id": 51, "dblpid": "books/aw/kimL89/NierstraszT89", "title": "Integrated Office Systems.", "authors": "Oscar Nierstrasz Dennis Tsichritzis", "misc": "2002-01-03 199-215 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#NierstraszT89" }
+{ "id": 52, "dblpid": "books/aw/kimL89/Russinoff89", "title": "Proteus  A Frame-Based Nonmonotonic Inference System.", "authors": "David M. Russinoff", "misc": "2002-01-03 127-150 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#Russinoff89" }
+{ "id": 53, "dblpid": "books/aw/kimL89/SkarraZ89", "title": "Concurrency Control and Object-Oriented Databases.", "authors": "Andrea H. Skarra Stanley B. Zdonik", "misc": "2002-01-03 395-421 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#SkarraZ89" }
+{ "id": 54, "dblpid": "books/aw/kimL89/SteinLU89", "title": "A Shared View of Sharing  The Treaty of Orlando.", "authors": "Lynn Andrea Stein Henry Lieberman David Ungar", "misc": "2002-01-03 31-48 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#SteinLU89" }
+{ "id": 55, "dblpid": "books/aw/kimL89/TarltonT89", "title": "Pogo  A Declarative Representation System for Graphics.", "authors": "Mark A. Tarlton P. Nong Tarlton", "misc": "2002-01-03 151-176 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#TarltonT89" }
+{ "id": 56, "dblpid": "books/aw/kimL89/TomlinsonS89", "title": "Concurrent Object-Oriented Programming Languages.", "authors": "Chris Tomlinson Mark Scheevel", "misc": "2002-01-03 79-124 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#TomlinsonS89" }
+{ "id": 57, "dblpid": "books/aw/kimL89/TsichritzisN89", "title": "Directions in Object-Oriented Research.", "authors": "Dennis Tsichritzis Oscar Nierstrasz", "misc": "2002-01-03 523-536 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#TsichritzisN89" }
+{ "id": 58, "dblpid": "books/aw/kimL89/Wand89", "title": "A Proposal for a Formal Model of Objects.", "authors": "Yair Wand", "misc": "2002-01-03 537-559 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Wand89" }
+{ "id": 59, "dblpid": "books/aw/kimL89/WeiserL89", "title": "OZ+  An Object-Oriented Database System.", "authors": "Stephen P. Weiser Frederick H. Lochovsky", "misc": "2002-01-03 309-337 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#WeiserL89" }
+{ "id": 60, "dblpid": "books/aw/stonebraker86/RoweS86", "title": "The Commercial INGRES Epilogue.", "authors": "Lawrence A. Rowe Michael Stonebraker", "misc": "2002-01-03 63-82 1986 The INGRES Papers db/books/collections/Stonebraker86.html#RoweS86 db/books/collections/Stonebraker86/RoweS86.html ingres/P063.pdf" }
+{ "id": 61, "dblpid": "books/aw/stonebraker86/Stonebraker86", "title": "Design of Relational Systems (Introduction to Section 1).", "authors": "Michael Stonebraker", "misc": "2002-01-03 1-3 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86 db/books/collections/Stonebraker86/Stonebraker86.html ingres/P001.pdf" }
+{ "id": 62, "dblpid": "books/aw/stonebraker86/Stonebraker86a", "title": "Supporting Studies on Relational Systems (Introduction to Section 2).", "authors": "Michael Stonebraker", "misc": "2002-01-03 83-85 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86a db/books/collections/Stonebraker86/Stonebraker86a.html ingres/P083.pdf" }
+{ "id": 63, "dblpid": "books/aw/stonebraker86/Stonebraker86b", "title": "Distributed Database Systems (Introduction to Section 3).", "authors": "Michael Stonebraker", "misc": "2002-01-03 183-186 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86b db/books/collections/Stonebraker86/Stonebraker86b.html ingres/P183.pdf" }
+{ "id": 64, "dblpid": "books/aw/stonebraker86/Stonebraker86c", "title": "The Design and Implementation of Distributed INGRES.", "authors": "Michael Stonebraker", "misc": "2002-01-03 187-196 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86c db/books/collections/Stonebraker86/Stonebraker86c.html ingres/P187.pdf" }
+{ "id": 65, "dblpid": "books/aw/stonebraker86/Stonebraker86d", "title": "User Interfaces for Database Systems (Introduction to Section 4).", "authors": "Michael Stonebraker", "misc": "2002-01-03 243-245 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86d db/books/collections/Stonebraker86/Stonebraker86d.html ingres/P243.pdf" }
+{ "id": 66, "dblpid": "books/aw/stonebraker86/Stonebraker86e", "title": "Extended Semantics for the Relational Model (Introduction to Section 5).", "authors": "Michael Stonebraker", "misc": "2002-01-03 313-316 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86e db/books/collections/Stonebraker86/Stonebraker86e.html ingres/P313.pdf" }
+{ "id": 67, "dblpid": "books/aw/stonebraker86/Stonebraker86f", "title": "Database Design (Introduction to Section 6).", "authors": "Michael Stonebraker", "misc": "2002-01-03 393-394 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86f db/books/collections/Stonebraker86/Stonebraker86f.html ingres/P393.pdf" }
+{ "id": 68, "dblpid": "books/aw/stonebraker86/X86", "title": "Title, Preface, Contents.", "authors": "", "misc": "2002-01-03 1986 The INGRES Papers db/books/collections/Stonebraker86.html#X86 db/books/collections/Stonebraker86/X86.html ingres/frontmatter.pdf" }
+{ "id": 69, "dblpid": "books/aw/stonebraker86/X86a", "title": "References.", "authors": "", "misc": "2002-01-03 429-444 1986 The INGRES Papers db/books/collections/Stonebraker86.html#X86a db/books/collections/Stonebraker86/X86a.html ingres/P429.pdf" }
+{ "id": 70, "dblpid": "books/aw/Knuth86a", "title": "TeX  The Program", "authors": "Donald E. Knuth", "misc": "2002-01-03 Addison-Wesley 1986 0-201-13437-3" }
+{ "id": 71, "dblpid": "books/aw/AbiteboulHV95", "title": "Foundations of Databases.", "authors": "Serge Abiteboul Richard Hull Victor Vianu", "misc": "2002-01-03 Addison-Wesley 1995 0-201-53771-0 AHV/Toc.pdf ... ... journals/tods/AstrahanBCEGGKLMMPTWW76 books/bc/AtzeniA93 journals/tcs/AtzeniABM82 journals/jcss/AbiteboulB86 journals/csur/AtkinsonB87 conf/pods/AtzeniB87 journals/vldb/AbiteboulB95 conf/sigmod/AbiteboulB91 conf/dood/AtkinsonBDDMZ89 conf/vldb/AlbanoBGO93 ... conf/icdt/Abiteboul88 journals/ipl/Abiteboul89 conf/ds/Abrial74 journals/tods/AhoBU79 books/mk/minker88/AptBW88 conf/vldb/AroraC78 conf/stoc/AfratiC89 journals/tods/AlbanoCO85 conf/pods/AfratiCY91 conf/pods/AusielloDM85 conf/vldb/AbiteboulG85 journals/jacm/AjtaiG87 conf/focs/AjtaiG89 journals/tods/AbiteboulG91 ... ... journals/tods/AbiteboulH87 conf/sigmod/AbiteboulH88 ... conf/sigmod/AbiteboulK89 journals/tcs/AbiteboulKG91 journals/jcss/AbiteboulKRW95 conf/sigmod/AbiteboulLUW93 conf/pods/AtzeniP82 conf/pods/AfratiP87 conf/pods/AptP87 conf/wg/AndriesP91 conf/pods/AfratiPPRSU86 books/el/leeuwen90/Apt90 conf/ifip/Armstrong74 journals/siamcomp/AhoSSU81 journals/tods/AhoSU79 journals/siamcomp/AhoSU79 conf/pods/AbiteboulSV90 journals/is/AtzeniT93 conf/popl/AhoU79 conf/pods/AbiteboulV87 conf/jcdkb/AbiteboulV88 journals/jacm/AbiteboulV88 conf/pods/AbiteboulV88 journals/jacm/AbiteboulV89 journals/jcss/AbiteboulV90 journals/jcss/AbiteboulV91 conf/stoc/AbiteboulV91 journals/amai/AbiteboulV91 journals/jcss/AbiteboulV95 journals/jacm/AptE82 conf/coco/AbiteboulVV92 conf/iclp/AptB88 conf/oopsla/BobrowKKMSZ86 journals/tse/BatoryBGSTTW88 conf/mfcs/Bancilhon78 ... conf/db-workshops/Bancilhon85 books/el/leeuwen90/Barendregt90 ... journals/tods/BeeriB79 books/el/leeuwen90/BerstelB90 conf/icdt/BeneventanoB92 conf/vldb/BernsteinBC80 conf/vldb/BeeriBG78 conf/sigmod/BorgidaBMR89 journals/tods/BunemanC79 journals/jacm/BernsteinC81 conf/dbpl/BancilhonCD89 books/bc/tanselCGSS93/BaudinetCW93 conf/sigmod/BiskupDB79 journals/jacm/BeeriDFS84 books/mk/BancilhonDK92 conf/edbt/BryDM88 conf/pods/BunemanDW88 journals/jcss/BunemanDW91 journals/tods/Beeri80 journals/dke/Beeri90 ... journals/tods/Bernstein76 conf/lics/BidoitF87 journals/iandc/BidoitF91 conf/sigmod/BeeriFH77 conf/stoc/BeeriFMMUY81 journals/jacm/BeeriFMY83 journals/tods/BunemanFN82 journals/siamcomp/BernsteinG81 journals/iandc/BlassGK85 conf/ijcai/BrachmanGL85 journals/tods/BernsteinGWRR81 books/aw/BernsteinHG87 ... journals/tcs/Bidoit91 journals/tcs/Biskup80 conf/adbt/Biskup79 journals/tods/Biskup83 journals/tcs/BunemanJO91 journals/tods/BeeriK86 conf/pods/BeeriKBR87 conf/icdt/BidoitL90 journals/csur/BatiniL86 conf/sigmod/BlakeleyLT86 conf/vldb/BeeriM91 conf/sigmod/BlakeleyMG93 journals/siamcomp/BeeriMSU81 conf/pods/BancilhonMSU86 conf/pods/BeeriNRST87 journals/software/Borgida85 conf/icalp/BraP83 conf/fgcs/BalbinMR88 ... conf/pods/BeeriR87 journals/jlp/BalbinR87 conf/sigmod/BancilhonR86 books/mk/minker88/BancilhonR88 journals/jlp/BeeriR91 conf/vldb/BancilhonRS82 conf/pods/BeeriRSS92 conf/dood/Bry89 journals/tods/BancilhonS81 journals/cogsci/BrachmanS85 journals/tods/BergamaschiS92 conf/sigmod/BernsteinST75 conf/dbpl/TannenBN91 conf/icdt/TannenBW92 ... journals/jacm/BeeriV84 conf/icalp/BeeriV81 conf/adbt/BeeriV79 journals/siamcomp/BeeriV84 journals/iandc/BeeriV84 journals/jacm/BeeriV84 journals/tcs/BeeriV85 journals/ibmrd/ChamberlinAEGLMRW76 ... journals/iandc/Cardelli88 books/mk/Cattell94 conf/sigmod/CacaceCCTZ90 conf/vldb/CastilhoCF82 conf/adbt/CasanovaF82 conf/focs/CaiFI89 journals/jcss/CasanovaFP84 conf/stoc/CosmadakisGKV88 conf/dood/CorciuloGP93 books/sp/CeriGT90 conf/focs/ChandraH80 journals/jcss/ChandraH80 journals/jcss/ChandraH82 journals/jlp/ChandraH85 conf/popl/Chandra81 conf/adbt/Chang79 conf/pods/Chandra88 ... journals/tods/Chen76 conf/ride/ChenHM94 conf/icde/Chomicki92 conf/pods/Chomicki92 ... ... ... conf/stoc/CosmadakisK85 journals/acr/CosmadakisK86 ... journals/jcss/CosmadakisKS86 journals/jacm/CosmadakisKV90 ... conf/pods/CalvaneseL94 conf/adbt/Clark77 conf/stoc/ChandraLM81 conf/stoc/ChandraM77 conf/pods/ConsensM90 conf/sigmod/ConsensM93 conf/icdt/ConsensM90 journals/cacm/Codd70 conf/sigmod/Codd71a persons/Codd71a persons/Codd72 conf/ifip/Codd74 ... conf/sigmod/Codd79 journals/cacm/Codd82 ... conf/sigmod/Cohen89 journals/cacm/Cohen90 ... journals/jcss/Cook74 conf/pods/Cosmadakis83 conf/focs/Cosmadakis87 books/el/leeuwen90/Courcelle90a journals/jacm/CosmadakisP84 conf/edbt/CeriCGLLTZ88 ... conf/vldb/CeriT87 conf/vldb/CasanovaTF88 ... conf/pods/CasanovaV83 journals/siamcomp/ChandraV85 conf/pods/ChaudhuriV92 conf/pods/ChaudhuriV93 conf/pods/ChaudhuriV94 journals/csur/CardelliW85 conf/pods/ChenW89 conf/pods/CohenW89 conf/vldb/CeriW90 conf/vldb/CeriW91 conf/iclp/ChenW92 conf/vldb/CeriW93 ... conf/birthday/Dahlhaus87 conf/vldb/Date81 books/aw/Date86 ... conf/dbpl/Dayal89 journals/tods/DayalB82 journals/ibmrd/DelobelC73 conf/icde/DelcambreD89 ... journals/tods/Delobel78 journals/jacm/Demolombe92 journals/tods/DateF92 ... conf/vldb/DayalHL91 journals/jacm/Paola69a conf/caap/DahlhausM86 journals/acr/DAtriM86 journals/iandc/DahlhausM92 conf/sigmod/DerrMP93 conf/vldb/MaindrevilleS88 conf/pods/Dong92 conf/adbt/BraP82 ... conf/dbpl/DongS91 journals/iandc/DongS95 conf/dbpl/DongS93 conf/dbpl/DongS93 conf/icdt/DongT92 conf/vldb/DenninghoffV91 conf/pods/DenninghoffV93 ... ... books/acm/kim95/DayalHW95 ... conf/pods/EiterGM94 conf/pods/Escobar-MolanoHJ93 ... books/el/leeuwen90/Emerson90 books/bc/ElmasriN89 ... conf/icse/Eswaran76 conf/sigmod/EpsteinSW78 ... ... conf/vldb/Fagin77 journals/tods/Fagin77 conf/sigmod/Fagin79 journals/tods/Fagin81 journals/ipl/FaginV83 journals/jacm/Fagin82 journals/jacm/Fagin83 journals/tcs/Fagin93 books/sp/kimrb85/FurtadoC85 ... journals/jlp/Fitting85a journals/tcs/FischerJT83 journals/acr/FaginKUV86 conf/icdt/FernandezM92 journals/tods/FaginMU82 conf/vldb/FaloutsosNS91 ... journals/ai/Forgy82 ... conf/sigmod/Freytag87 ... journals/siamcomp/FischerT83 journals/siamcomp/FaginMUY83 conf/pods/FaginUV83 conf/icalp/FaginV84 ... ... ... ... conf/sigmod/GraefeD87 conf/ride/GatziuD94 conf/sigmod/GardarinM86 conf/sigmod/GyssensG88 journals/tcs/GinsburgH83a journals/jacm/GinsburgH86 ... books/bc/tanselCGSS93/Ginsburg93 books/fm/GareyJ79 journals/jacm/GrantJ82 conf/vldb/GehaniJ91 conf/vldb/GhandeharizadehHJCELLTZ93 journals/tods/GhandeharizadehHJ96 conf/vldb/GehaniJS92 ... conf/sigmod/GehaniJS92 ... conf/deductive/GuptaKM92 conf/pods/GurevichL82 conf/iclp/GelfondL88 conf/adbt/77 journals/csur/GallaireMN84 conf/pods/GrahneMR92 conf/sigmod/GuptaMS93 conf/lics/GaifmanMSV87 journals/jacm/GaifmanMSV93 journals/jacm/GrahamMV86 conf/csl/GradelO92 ... conf/pods/Gottlob87 conf/pods/GyssensPG90 conf/dood/GiannottiPSZ91 books/aw/GoldbergR83 journals/acr/GrahneR86 journals/ipl/Grant77 ... journals/iandc/Grandjean83 conf/vldb/Grahne84 ... journals/csur/Graefe93 books/sp/Greibach75 journals/tods/GoodmanS82 journals/jcss/GoodmanS84 conf/focs/GurevichS85 ... conf/pods/GrumbachS94 conf/sigmod/GangulyST90 ... journals/tcs/Gunter92 ... ... ... ... conf/pods/GrahamV84 conf/pods/GrumbachV91 conf/icde/GardarinV92 conf/sigmod/GraefeW89 ... journals/jacm/GinsburgZ82 conf/vldb/GottlobZ88 ... ... journals/sigmod/Hanson89 ... journals/cacm/Harel80 journals/tkde/HaasCLMWLLPCS90 conf/lics/Hella92 journals/iandc/Herrmann95 conf/pods/HirstH93 conf/vldb/HullJ91 conf/ewdw/HullJ90 journals/csur/HullK87 journals/tods/HudsonK89 conf/lics/HillebrandKM93 conf/nato/HillebrandKR93 conf/jcdkb/HsuLM88 journals/ipl/HoneymanLY80 journals/tods/HammerM81 conf/adbt/HenschenMN82 ... journals/jacm/HenschenN84 journals/jacm/Honeyman82 conf/sigmod/HullS89 conf/pods/HullS89 journals/acta/HullS94 journals/jcss/HullS93 conf/fodo/HullTY89 journals/jcss/Hull83 journals/jacm/Hull84 journals/tcs/Hull85 journals/siamcomp/Hull86 ... conf/vldb/Hulin89 ... journals/jacm/HullY84 conf/vldb/HullY90 conf/pods/HullY91 conf/sigmod/IoannidisK90 journals/jcss/ImielinskiL84 conf/adbt/Imielinski82 journals/jcss/Immerman82 journals/iandc/Immerman86 ... journals/siamcomp/Immerman87 conf/pods/ImielinskiN88 conf/vldb/IoannidisNSS92 conf/sigmod/ImielinskiNV91 conf/dood/ImielinskiNV91 conf/vldb/Ioannidis85 journals/jacm/Jacobs82 conf/dbpl/JacobsH91 journals/csur/JarkeK84 journals/jcss/JohnsonK84 conf/popl/JaffarL87 books/el/leeuwen90/Johnson90 journals/jacm/Joyner76 conf/pods/JaeschkeS82 ... books/mk/minker88/Kanellakis88 books/el/leeuwen90/Kanellakis90 conf/oopsla/KhoshafianC86 conf/edbt/KotzDM88 conf/jcdkb/Keller82 conf/pods/Keller85 journals/computer/Keller86 ... journals/tods/Kent79 ... journals/ngc/RohmerLK86 conf/tacs/KanellakisG94 conf/jcdkb/Kifer88 conf/pods/KanellakisKR90 conf/sigmod/KiferKS92 ... conf/icdt/KiferL86 books/aw/KimL89 ... journals/tods/Klug80 journals/jacm/Klug82 journals/jacm/Klug88 journals/jacm/KiferLW95 conf/kr/KatsunoM91 journals/ai/KatsunoM92 conf/jcdkb/KrishnamurthyN88 journals/csur/Knight89 ... journals/iandc/Kolaitis91 journals/ai/Konolige88 conf/ifip/Kowalski74 journals/jacm/Kowalski75 conf/bncod/Kowalski84 conf/vldb/KoenigP81 journals/tods/KlugP82 ... conf/pods/KolaitisP88 conf/pods/KiferRS88 conf/sigmod/KrishnamurthyRS88 books/mg/SilberschatzK91 conf/iclp/KempT88 conf/sigmod/KellerU84 conf/dood/Kuchenhoff91 ... journals/jlp/Kunen87 conf/iclp/Kunen88 conf/pods/Kuper87 conf/pods/Kuper88 conf/ppcp/Kuper93 conf/pods/KuperV84 conf/stoc/KolaitisV87 journals/tcs/KarabegV90 journals/iandc/KolaitisV90 conf/pods/KolaitisV90 journals/tods/KarabegV91 journals/iandc/KolaitisV92 journals/tcs/KuperV93 journals/tods/KuperV93 journals/tse/KellerW85 conf/pods/KiferW89 conf/jcdkb/Lang88 books/el/Leeuwen90 ... journals/jcss/Leivant89 ... journals/iandc/Leivant90 ... conf/db-workshops/Levesque82 journals/ai/Levesque84 conf/mfdbs/Libkin91 conf/er/Lien79 journals/jacm/Lien82 books/mk/minker88/Lifschitz88 ... journals/tcs/Lindell91 journals/tods/Lipski79 journals/jacm/Lipski81 journals/tcs/LeratL86 journals/cj/LeveneL90 books/sp/Lloyd87 conf/pods/LakshmananM89 conf/tlca/LeivantM93 conf/sigmod/LaverMG83 conf/pods/LiptonN90 journals/jcss/LucchesiO78 conf/sigmod/Lohman88 ... conf/ijcai/Lozinskii85 books/ph/LewisP81 ... conf/sigmod/LecluseRV88 journals/is/LipeckS87 journals/jlp/LloydST87 journals/tods/LingTK81 conf/sigmod/LyngbaekV87 conf/dood/LefebvreV89 conf/pods/LibkinW93 conf/dbpl/LibkinW93 journals/jacm/Maier80 books/cs/Maier83 ... conf/vldb/Makinouchi77 conf/icalp/Makowsky81 ... conf/icdt/Malvestuto86 conf/aaai/MacGregorB92 journals/tods/MylopoulosBW80 conf/sigmod/McCarthyD89 journals/csur/MishraE92 conf/sigmod/MumickFPR90 books/mk/Minker88 journals/jlp/Minker88 conf/vldb/MillerIR93 journals/is/MillerIR94 journals/iandc/Mitchell83 conf/pods/Mitchell83 conf/vldb/MendelzonM79 journals/tods/MaierMS79 journals/jcss/MaierMSU80 conf/pods/MendelzonMW94 journals/debu/MorrisNSUG87 journals/ai/Moore85 conf/vldb/Morgenstern83 conf/pods/Morris88 ... conf/pods/MannilaR85 ... journals/jlp/MinkerR90 books/aw/MannilaR92 journals/acr/MaierRW86 ... journals/tods/MarkowitzS92 conf/pods/Marchetti-SpaccamelaPS87 journals/jacm/MaierSY81 conf/iclp/MorrisUG86 journals/tods/MaierUV84 conf/iclp/MorrisUG86 journals/acta/MakowskyV86 books/bc/MaierW88 books/mk/minker88/ManchandraW88 conf/pods/Naughton86 conf/sigmod/NgFS91 ... conf/vldb/Nejdl87 conf/adbt/NicolasM77 conf/sigmod/Nicolas78 journals/acta/Nicolas82 conf/ds/76 conf/pods/NaqviK88 journals/tods/NegriPS91 conf/vldb/NaughtonRSU89 conf/pods/NaughtonS87 ... ... conf/vldb/Osborn79 ... journals/tods/OzsoyogluY87 conf/adbt/Paige82 ... books/cs/Papadimitriou86 ... journals/ipl/Paredaens78 ... books/sp/ParedaensBGG89 journals/ai/Andersen91 books/el/leeuwen90/Perrin90 journals/ins/Petrov89 conf/pods/ParedaensG88 conf/pods/PatnaikI94 conf/adbt/ParedaensJ79 journals/csur/PeckhamM88 ... ... conf/sigmod/ParkerP80 ... conf/iclp/Przymusinski88 conf/pods/Przymusinski89 ... conf/vldb/ParkerSV92 conf/aaai/PearlV87 journals/ai/PereiraW80a conf/pods/PapadimitriouY92 journals/tkde/QianW91 ... journals/jlp/Ramakrishnan91 conf/pods/RamakrishnanBS87 ... conf/adbt/Reiter77 journals/ai/Reiter80 conf/db-workshops/Reiter82 journals/jacm/Reiter86 journals/tods/Rissanen77 conf/mfcs/Rissanen78 conf/pods/Rissanen82 ... journals/ngc/RohmerLK86 journals/jacm/Robinson65 ... conf/pods/Ross89 ... ... conf/sigmod/RoweS79 conf/sigmod/RichardsonS91 journals/debu/RamamohanaraoSBPNTZD87 conf/vldb/RamakrishnanSS92 conf/sigmod/RamakrishnanSSS93 conf/pods/RamakrishnanSUV89 journals/jcss/RamakrishnanSUV93 journals/jlp/RamakrishnanU95 conf/sigmod/SelingerACLP79 conf/sigmod/Sagiv81 journals/tods/Sagiv83 books/mk/minker88/Sagiv88 conf/slp/Sagiv90 conf/sigmod/Sciore81 journals/jacm/Sciore82 conf/pods/Sciore83 journals/acr/Sciore86 journals/jacm/SagivDPF81 conf/pods/X89 ... journals/ai/SmithG85 books/mk/minker88/Shepherdson88 journals/tods/Shipman81 conf/pods/Shmueli87 conf/iclp/SekiI88 conf/sigmod/ShmueliI84 journals/tc/Sickel76 journals/jsc/Siekmann89 conf/sigmod/StonebrakerJGP90 conf/vldb/SimonKM92 journals/csur/ShethL90 conf/pods/SeibL91 conf/sigmod/SuLRD93 conf/adbt/SilvaM79 journals/sigmod/Snodgrass90 journals/sigmod/Soo91 conf/pods/SuciuP94 conf/sigmod/StonebrakerR86 conf/slp/SudarshanR93 conf/pods/SagivS86 journals/cacm/Stonebraker81 books/mk/Stonebraker88 journals/tkde/Stonebraker92 books/aw/Stroustrup91 journals/jacm/SadriU82 conf/vldb/Su91 conf/pods/SagivV89 journals/jacm/SagivW82 journals/tods/StonebrakerWKH76 journals/jacm/SagivY80 conf/pods/SaccaZ86 journals/tcs/SaccaZ88 ... conf/pods/SaccaZ90 ... ... books/bc/TanselCGJSS93 ... journals/acr/ThomasF86 ... ... ... ... journals/tcs/Topor87 ... books/mk/minker88/ToporS88 ... journals/siamcomp/TarjanY84 journals/csur/TeoreyYF86 journals/algorithmica/UllmanG88 conf/pods/Ullman82 books/cs/Ullman82 journals/tods/Ullman85 books/cs/Ullman88 conf/pods/Ullman89 books/cs/Ullman89 conf/sigmod/Gelder86 ... conf/pods/BusscheG92 conf/focs/BusscheGAG92 conf/pods/BusscheP91 conf/slp/Gelder86 conf/pods/Gelder89 conf/pods/GelderRS88 journals/jacm/GelderRS91 journals/tods/GelderT91 journals/ipl/Vardi81 conf/stoc/Vardi82 conf/focs/Vardi82 journals/acta/Vardi83 journals/jcss/Vardi84 conf/pods/Vardi85 conf/pods/Vardi86 journals/jcss/Vardi86 ... conf/pods/Vardi88 conf/sigmod/Vassiliou79 ... ... journals/jacm/EmdenK76 conf/nf2/SchollABBGPRV87 journals/jacm/Vianu87 journals/acta/Vianu87 conf/eds/Vieille86 conf/iclp/Vieille87 ... conf/eds/Vieille88 journals/tcs/Vieille89 ... journals/tcs/VianuV92 conf/sigmod/WidomF90 conf/icde/WangH92 conf/pos/WidjojoHW90 journals/computer/Wiederhold92 conf/pods/Wilkins86 conf/pods/Winslett88 conf/sigmod/WolfsonO90 conf/pods/Wong93 conf/sigmod/WolfsonS88 journals/ibmrd/WangW75 journals/tods/WongY76 conf/vldb/Yannakakis81 journals/csur/YuC84 ... journals/jcss/YannakakisP82 ... journals/tods/Zaniolo82 journals/jcss/Zaniolo84 ... conf/edbt/ZhouH90 journals/ibmsj/Zloof77 books/mk/ZdonikM90 db/books/dbtext/abiteboul95.html" }
+{ "id": 72, "dblpid": "books/aw/Lamport86", "title": "LaTeX  User's Guide & Reference Manual", "authors": "Leslie Lamport", "misc": "2002-01-03 Addison-Wesley 1986 0-201-15790-X" }
+{ "id": 73, "dblpid": "books/aw/AhoHU74", "title": "The Design and Analysis of Computer Algorithms.", "authors": "Alfred V. Aho John E. Hopcroft Jeffrey D. Ullman", "misc": "2002-01-03 Addison-Wesley 1974 0-201-00029-6" }
+{ "id": 74, "dblpid": "books/aw/Lamport2002", "title": "Specifying Systems, The TLA+ Language and Tools for Hardware and Software Engineers", "authors": "Leslie Lamport", "misc": "2005-07-28 Addison-Wesley 2002 0-3211-4306-X http //research.microsoft.com/users/lamport/tla/book.html" }
+{ "id": 75, "dblpid": "books/aw/AhoHU83", "title": "Data Structures and Algorithms.", "authors": "Alfred V. Aho John E. Hopcroft Jeffrey D. Ullman", "misc": "2002-01-03 Addison-Wesley 1983 0-201-00023-7" }
+{ "id": 76, "dblpid": "books/aw/LewisBK01", "title": "Databases and Transaction Processing  An Application-Oriented Approach", "authors": "Philip M. Lewis Arthur J. Bernstein Michael Kifer", "misc": "2002-01-03 Addison-Wesley 2001 0-201-70872-8" }
+{ "id": 77, "dblpid": "books/aw/AhoKW88", "title": "The AWK Programming Language", "authors": "Alfred V. Aho Brian W. Kernighan Peter J. Weinberger", "misc": "2002-01-03 Addison-Wesley 1988" }
+{ "id": 78, "dblpid": "books/aw/LindholmY97", "title": "The Java Virtual Machine Specification", "authors": "Tim Lindholm Frank Yellin", "misc": "2002-01-28 Addison-Wesley 1997 0-201-63452-X" }
+{ "id": 79, "dblpid": "books/aw/AhoSU86", "title": "Compilers  Princiles, Techniques, and Tools.", "authors": "Alfred V. Aho Ravi Sethi Jeffrey D. Ullman", "misc": "2002-01-03 Addison-Wesley 1986 0-201-10088-6" }
+{ "id": 80, "dblpid": "books/aw/Sedgewick83", "title": "Algorithms", "authors": "Robert Sedgewick", "misc": "2002-01-03 Addison-Wesley 1983 0-201-06672-6" }
+{ "id": 81, "dblpid": "journals/siamcomp/AspnesW96", "title": "Randomized Consensus in Expected O(n log² n) Operations Per Processor.", "authors": "James Aspnes Orli Waarts", "misc": "2002-01-03 1024-1044 1996 25 SIAM J. Comput. 5 db/journals/siamcomp/siamcomp25.html#AspnesW96" }
+{ "id": 82, "dblpid": "conf/focs/AspnesW92", "title": "Randomized Consensus in Expected O(n log ^2 n) Operations Per Processor", "authors": "James Aspnes Orli Waarts", "misc": "2006-04-25 137-146 conf/focs/FOCS33 1992 FOCS db/conf/focs/focs92.html#AspnesW92" }
+{ "id": 83, "dblpid": "journals/siamcomp/Bloniarz83", "title": "A Shortest-Path Algorithm with Expected Time O(n² log n log* n).", "authors": "Peter A. Bloniarz", "misc": "2002-01-03 588-600 1983 12 SIAM J. Comput. 3 db/journals/siamcomp/siamcomp12.html#Bloniarz83" }
+{ "id": 84, "dblpid": "conf/stoc/Bloniarz80", "title": "A Shortest-Path Algorithm with Expected Time O(n^2 log n log ^* n)", "authors": "Peter A. Bloniarz", "misc": "2006-04-25 378-384 conf/stoc/STOC12 1980 STOC db/conf/stoc/stoc80.html#Bloniarz80" }
+{ "id": 85, "dblpid": "journals/siamcomp/Megiddo83a", "title": "Linear-Time Algorithms for Linear Programming in R³ and Related Problems.", "authors": "Nimrod Megiddo", "misc": "2002-01-03 759-776 1983 12 SIAM J. Comput. 4 db/journals/siamcomp/siamcomp12.html#Megiddo83a" }
+{ "id": 86, "dblpid": "conf/focs/Megiddo82", "title": "Linear-Time Algorithms for Linear Programming in R^3 and Related Problems", "authors": "Nimrod Megiddo", "misc": "2006-04-25 329-338 conf/focs/FOCS23 1982 FOCS db/conf/focs/focs82.html#Megiddo82" }
+{ "id": 87, "dblpid": "journals/siamcomp/MoffatT87", "title": "An All Pairs Shortest Path Algorithm with Expected Time O(n² log n).", "authors": "Alistair Moffat Tadao Takaoka", "misc": "2002-01-03 1023-1031 1987 16 SIAM J. Comput. 6 db/journals/siamcomp/siamcomp16.html#MoffatT87" }
+{ "id": 88, "dblpid": "conf/focs/MoffatT85", "title": "An All Pairs Shortest Path Algorithm with Expected Running Time O(n^2 log n)", "authors": "Alistair Moffat Tadao Takaoka", "misc": "2006-04-25 101-105 conf/focs/FOCS26 1985 FOCS db/conf/focs/focs85.html#MoffatT85" }
+{ "id": 89, "dblpid": "conf/icip/SchonfeldL98", "title": "VORTEX  Video Retrieval and Tracking from Compressed Multimedia Databases.", "authors": "Dan Schonfeld Dan Lelescu", "misc": "2002-11-05 123-127 1998 ICIP (3) db/conf/icip/icip1998-3.html#SchonfeldL98" }
+{ "id": 90, "dblpid": "conf/hicss/SchonfeldL99", "title": "VORTEX  Video Retrieval and Tracking from Compressed Multimedia Databases ¾ Visual Search Engine.", "authors": "Dan Schonfeld Dan Lelescu", "misc": "2002-01-03 1999 HICSS http //computer.org/proceedings/hicss/0001/00013/00013006abs.htm db/conf/hicss/hicss1999-3.html#SchonfeldL99" }
+{ "id": 91, "dblpid": "journals/corr/abs-0802-2861", "title": "Geometric Set Cover and Hitting Sets for Polytopes in $R^3$", "authors": "Sören Laue", "misc": "2008-03-03 http //arxiv.org/abs/0802.2861 2008 CoRR abs/0802.2861 db/journals/corr/corr0802.html#abs-0802-2861 informal publication" }
+{ "id": 92, "dblpid": "conf/stacs/Laue08", "title": "Geometric Set Cover and Hitting Sets for Polytopes in R³.", "authors": "Sören Laue", "misc": "2008-03-04 2008 STACS 479-490 http //drops.dagstuhl.de/opus/volltexte/2008/1367 conf/stacs/2008 db/conf/stacs/stacs2008.html#Laue08" }
+{ "id": 93, "dblpid": "journals/iandc/IbarraJCR91", "title": "Some Classes of Languages in NC¹", "authors": "Oscar H. Ibarra Tao Jiang Jik H. Chang Bala Ravikumar", "misc": "2006-04-25 86-106 Inf. Comput. January 1991 90 1 db/journals/iandc/iandc90.html#IbarraJCR91" }
+{ "id": 94, "dblpid": "conf/awoc/IbarraJRC88", "title": "On Some Languages in NC.", "authors": "Oscar H. Ibarra Tao Jiang Bala Ravikumar Jik H. Chang", "misc": "2002-08-06 64-73 1988 conf/awoc/1988 AWOC db/conf/awoc/awoc88.html#IbarraJRC88" }
+{ "id": 95, "dblpid": "journals/jacm/GalilHLSW87", "title": "An O(n³log n) deterministic and an O(n³) Las Vegs isomorphism test for trivalent graphs.", "authors": "Zvi Galil Christoph M. Hoffmann Eugene M. Luks Claus-Peter Schnorr Andreas Weber", "misc": "2003-11-20 513-531 1987 34 J. ACM 3 http //doi.acm.org/10.1145/28869.28870 db/journals/jacm/jacm34.html#GalilHLSW87" }
+{ "id": 96, "dblpid": "conf/focs/GalilHLSW82", "title": "An O(n^3 log n) Deterministic and an O(n^3) Probabilistic Isomorphism Test for Trivalent Graphs", "authors": "Zvi Galil Christoph M. Hoffmann Eugene M. Luks Claus-Peter Schnorr Andreas Weber", "misc": "2006-04-25 118-125 conf/focs/FOCS23 1982 FOCS db/conf/focs/focs82.html#GalilHLSW82" }
+{ "id": 97, "dblpid": "journals/jacm/GalilT88", "title": "An O(n²(m + n log n)log n) min-cost flow algorithm.", "authors": "Zvi Galil Éva Tardos", "misc": "2003-11-20 374-386 1988 35 J. ACM 2 http //doi.acm.org/10.1145/42282.214090 db/journals/jacm/jacm35.html#GalilT88" }
+{ "id": 98, "dblpid": "conf/focs/GalilT86", "title": "An O(n^2 (m + n log n) log n) Min-Cost Flow Algorithm", "authors": "Zvi Galil Éva Tardos", "misc": "2006-04-25 1-9 conf/focs/FOCS27 1986 FOCS db/conf/focs/focs86.html#GalilT86" }
+{ "id": 99, "dblpid": "series/synthesis/2009Weintraub", "title": "Jordan Canonical Form  Theory and Practice", "authors": "Steven H. Weintraub", "misc": "2009-09-06 Jordan Canonical Form  Theory and Practice http //dx.doi.org/10.2200/S00218ED1V01Y200908MAS006 http //dx.doi.org/10.2200/S00218ED1V01Y200908MAS006 2009 Synthesis Lectures on Mathematics & Statistics Morgan & Claypool Publishers" }
+{ "id": 100, "dblpid": "series/synthesis/2009Brozos", "title": "The Geometry of Walker Manifolds", "authors": "Miguel Brozos-Vázquez Eduardo García-Río Peter Gilkey Stana Nikcevic Rámon Vázquez-Lorenzo", "misc": "2009-09-06 The Geometry of Walker Manifolds http //dx.doi.org/10.2200/S00197ED1V01Y200906MAS005 http //dx.doi.org/10.2200/S00197ED1V01Y200906MAS005 2009 Synthesis Lectures on Mathematics & Statistics Morgan & Claypool Publishers" }
diff --git a/asterix-app/src/test/resources/runtimets/testsuite.xml b/asterix-app/src/test/resources/runtimets/testsuite.xml
index 7756317..9ae7c60 100644
--- a/asterix-app/src/test/resources/runtimets/testsuite.xml
+++ b/asterix-app/src/test/resources/runtimets/testsuite.xml
@@ -2629,6 +2629,16 @@
       </compilation-unit>
     </test-case>
     <test-case FilePath="scan">
+      <compilation-unit name="issue238_query_1">
+        <output-file compare="Text">issue238_query_1.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="scan">
+      <compilation-unit name="issue238_query_2">
+        <output-file compare="Text">issue238_query_2.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="scan">
       <compilation-unit name="30">
         <output-file compare="Text">30.adm</output-file>
       </compilation-unit>
@@ -3827,4 +3837,56 @@
       </compilation-unit>
     </test-case>
   </test-group>
+  <test-group name="load">
+    <test-case FilePath="load">
+      <compilation-unit name="issue14_query">
+        <output-file compare="Text">none.adm</output-file>
+        <expected-error>edu.uci.ics.asterix.common.exceptions.AsterixException</expected-error> 
+      </compilation-unit>
+    </test-case>
+  </test-group>
+  <test-group name="feeds">
+    <test-case FilePath="feeds">
+      <compilation-unit name="feeds_01">
+        <output-file compare="Text">feeds_01.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="feeds">
+      <compilation-unit name="feeds_02">
+        <output-file compare="Text">feeds_02.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="feeds">
+      <compilation-unit name="feeds_03">
+        <output-file compare="Text">feeds_03.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="feeds">
+      <compilation-unit name="feeds_04">
+        <output-file compare="Text">feeds_04.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="feeds">
+      <compilation-unit name="issue_230_feeds">
+        <output-file compare="Text">issue_230_feeds.adm</output-file>
+      </compilation-unit>
+    </test-case>
+  </test-group>
+  <test-group name="hdfs">
+    <test-case FilePath="hdfs">
+      <compilation-unit name="issue_245_hdfs">
+        <output-file compare="Text">issue_245_hdfs.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="hdfs">
+      <compilation-unit name="hdfs_02">
+        <output-file compare="Text">hdfs_02.adm</output-file>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="hdfs">
+      <compilation-unit name="hdfs_03">
+        <output-file compare="Text">hdfs_03.adm</output-file>
+      </compilation-unit>
+    </test-case>
+  </test-group>
 </test-suite>
diff --git a/asterix-aql/src/main/javacc/AQL.jj b/asterix-aql/src/main/javacc/AQL.jj
index eaa4260..8672fd1 100644
--- a/asterix-aql/src/main/javacc/AQL.jj
+++ b/asterix-aql/src/main/javacc/AQL.jj
@@ -805,7 +805,7 @@
   dataverse = nameComponents.first != null ? nameComponents.first.getValue() : defaultDataverse;
   functionName = nameComponents.second.getValue();
   }
-  ("@" <IDENTIFIER> 
+  ("@" <INTEGER_LITERAL> 
      {
         arity = Integer.parseInt(token.image);
      }
@@ -866,7 +866,7 @@
    { 
    configuration = getConfiguration();
    }
-  ";"
+  
   {
     return new ControlFeedStatement(ControlFeedStatement.OperationType.ALTER, nameComponents.first, nameComponents.second, configuration);
   }
diff --git a/asterix-common/pom.xml b/asterix-common/pom.xml
index 048c037..ee81e75 100644
--- a/asterix-common/pom.xml
+++ b/asterix-common/pom.xml
@@ -25,13 +25,11 @@
 	<dependencies>
 		<dependency>
 			<groupId>edu.uci.ics.hyracks</groupId>
-			<artifactId>hyracks-algebricks-compiler</artifactId>
-			<version>0.2.2-SNAPSHOT</version>
+			<artifactId>algebricks-compiler</artifactId>
 		</dependency>
 		<dependency>
 			<groupId>edu.uci.ics.hyracks</groupId>
 			<artifactId>hyracks-dataflow-std</artifactId>
-			<version>0.2.2-SNAPSHOT</version>
 		</dependency>
 		<dependency>
 			<groupId>edu.uci.ics.asterix</groupId>
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/AsterixAppContextInfoImpl.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/AsterixAppContextInfoImpl.java
index 144a8824..00a2651 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/AsterixAppContextInfoImpl.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/AsterixAppContextInfoImpl.java
@@ -1,22 +1,50 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package edu.uci.ics.asterix.common.api;
 
-import java.util.Map;
-import java.util.Set;
-
 import edu.uci.ics.asterix.common.context.AsterixIndexRegistryProvider;
 import edu.uci.ics.asterix.common.context.AsterixStorageManagerInterface;
 import edu.uci.ics.asterix.common.dataflow.IAsterixApplicationContextInfo;
+import edu.uci.ics.hyracks.api.application.ICCApplicationContext;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
 
+/*
+ * Acts as an holder class for IndexRegistryProvider, AsterixStorageManager
+ * instances that are accessed from the NCs. In addition an instance of ICCApplicationContext 
+ * is stored for access by the CC.
+ */
 public class AsterixAppContextInfoImpl implements IAsterixApplicationContextInfo {
 
-    public static final AsterixAppContextInfoImpl INSTANCE = new AsterixAppContextInfoImpl();
+    private static AsterixAppContextInfoImpl INSTANCE;
 
-    private static Map<String, Set<String>> nodeControllerMap;
+    private final ICCApplicationContext appCtx;
 
-    private AsterixAppContextInfoImpl() {
+    public static void initialize(ICCApplicationContext ccAppCtx) {
+        if (INSTANCE == null) {
+            INSTANCE = new AsterixAppContextInfoImpl(ccAppCtx);
+        }
+    }
+
+    private AsterixAppContextInfoImpl(ICCApplicationContext ccAppCtx) {
+        this.appCtx = ccAppCtx;
+    }
+
+    public static IAsterixApplicationContextInfo getInstance() {
+        return INSTANCE;
     }
 
     @Override
@@ -29,12 +57,9 @@
         return AsterixStorageManagerInterface.INSTANCE;
     }
 
-    public static void setNodeControllerInfo(Map<String, Set<String>> nodeControllerInfo) {
-        nodeControllerMap = nodeControllerInfo;
-    }
-
-    public static Map<String, Set<String>> getNodeControllerMap() {
-        return nodeControllerMap;
+    @Override
+    public ICCApplicationContext getCCApplicationContext() {
+        return appCtx;
     }
 
 }
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/context/AsterixAppRuntimeContext.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/context/AsterixAppRuntimeContext.java
index d676cb5..f84f294 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/context/AsterixAppRuntimeContext.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/context/AsterixAppRuntimeContext.java
@@ -12,9 +12,11 @@
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexRegistry;
 import edu.uci.ics.hyracks.storage.common.buffercache.BufferCache;
 import edu.uci.ics.hyracks.storage.common.buffercache.ClockPageReplacementStrategy;
+import edu.uci.ics.hyracks.storage.common.buffercache.DelayPageCleanerPolicy;
 import edu.uci.ics.hyracks.storage.common.buffercache.HeapBufferAllocator;
 import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
 import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
+import edu.uci.ics.hyracks.storage.common.buffercache.IPageCleanerPolicy;
 import edu.uci.ics.hyracks.storage.common.buffercache.IPageReplacementStrategy;
 import edu.uci.ics.hyracks.storage.common.file.IFileMapManager;
 import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
@@ -22,7 +24,7 @@
 public class AsterixAppRuntimeContext {
     private static final int DEFAULT_BUFFER_CACHE_PAGE_SIZE = 32768;
     private final INCApplicationContext ncApplicationContext;
-    
+
     private IndexRegistry<IIndex> indexRegistry;
     private IFileMapManager fileMapManager;
     private IBufferCache bufferCache;
@@ -43,7 +45,8 @@
         ICacheMemoryAllocator allocator = new HeapBufferAllocator();
         IPageReplacementStrategy prs = new ClockPageReplacementStrategy();
         IIOManager ioMgr = ncApplicationContext.getRootContext().getIOManager();
-        bufferCache = new BufferCache(ioMgr, allocator, prs, fileMapManager, pageSize, numPages, Integer.MAX_VALUE);
+        IPageCleanerPolicy pcp = new DelayPageCleanerPolicy(600000);
+        bufferCache = new BufferCache(ioMgr, allocator, prs, pcp, fileMapManager, pageSize, numPages, Integer.MAX_VALUE);
 
         // Initialize the index registry
         indexRegistry = new IndexRegistry<IIndex>();
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/dataflow/IAsterixApplicationContextInfo.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/dataflow/IAsterixApplicationContextInfo.java
index 7bb0fd6..032e832 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/dataflow/IAsterixApplicationContextInfo.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/dataflow/IAsterixApplicationContextInfo.java
@@ -1,11 +1,48 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package edu.uci.ics.asterix.common.dataflow;
 
+import edu.uci.ics.hyracks.api.application.ICCApplicationContext;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
 
+/**
+ * Provides methods for obtaining the IIndexRegistryProvider, IStorageManager and
+ * ICCApplicationContext implementation.
+ */
 public interface IAsterixApplicationContextInfo {
+
+    /**
+     * Returns an instance of the implementation for IIndexRegistryProvider.
+     * 
+     * @return IIndexRegistryProvider implementation instance
+     */
     public IIndexRegistryProvider<IIndex> getIndexRegistryProvider();
 
+    /**
+     * Returns an instance of the implementation for IStorageManagerInterface.
+     * 
+     * @return IStorageManagerInterface implementation instance
+     */
     public IStorageManagerInterface getStorageManagerInterface();
+
+    /**
+     * Returns an instance of the implementation for ICCApplicationContext.
+     * 
+     * @return ICCApplicationContext implementation instance
+     */
+    public ICCApplicationContext getCCApplicationContext();
 }
diff --git a/asterix-dist/pom.xml b/asterix-dist/pom.xml
index 53168b4..654a11e 100644
--- a/asterix-dist/pom.xml
+++ b/asterix-dist/pom.xml
@@ -1,54 +1,55 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>asterix-dist</artifactId>
-  <parent>
-    <groupId>edu.uci.ics.asterix</groupId>
-    <artifactId>asterix</artifactId>
-    <version>0.0.4-SNAPSHOT</version>
-  </parent>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+	<artifactId>asterix-dist</artifactId>
+	<parent>
+		<groupId>edu.uci.ics.asterix</groupId>
+		<artifactId>asterix</artifactId>
+		<version>0.0.4-SNAPSHOT</version>
+	</parent>
 
-  <build>
-    <plugins>
-      <plugin>
-        <artifactId>maven-assembly-plugin</artifactId>
-        <version>2.2-beta-5</version>
-        <executions>
-          <execution>
-            <configuration>
-              <descriptors>
-                <descriptor>src/main/assembly/binary-assembly.xml</descriptor>
-              </descriptors>
-            </configuration>
-            <phase>package</phase>
-            <goals>
-              <goal>attached</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-    <dependency>
-	  <groupId>edu.uci.ics.hyracks</groupId>
-  	  <artifactId>hyracks-server</artifactId>
-  	  <version>0.2.2-SNAPSHOT</version>
-  	  <type>zip</type>
-      <classifier>binary-assembly</classifier>
-    </dependency>
-    <dependency>
-	  <groupId>edu.uci.ics.hyracks</groupId>
-  	  <artifactId>hyracks-cli</artifactId>
-  	  <version>0.2.2-SNAPSHOT</version>
-  	  <type>zip</type>
-      <classifier>binary-assembly</classifier>
-    </dependency>
-    <dependency>
-	  <groupId>edu.uci.ics.asterix</groupId>
-  	  <artifactId>asterix-app</artifactId>
-  	  <version>0.0.4-SNAPSHOT</version>
-  	  <type>zip</type>
-      <classifier>binary-assembly</classifier>
-    </dependency>
-  </dependencies>
+	<build>
+		<plugins>
+			<plugin>
+				<artifactId>maven-assembly-plugin</artifactId>
+				<version>2.2-beta-5</version>
+				<executions>
+					<execution>
+						<configuration>
+							<descriptors>
+								<descriptor>src/main/assembly/binary-assembly.xml</descriptor>
+							</descriptors>
+						</configuration>
+						<phase>package</phase>
+						<goals>
+							<goal>attached</goal>
+						</goals>
+					</execution>
+				</executions>
+			</plugin>
+		</plugins>
+	</build>
+	<dependencies>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-server</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>zip</type>
+			<classifier>binary-assembly</classifier>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-cli</artifactId>
+			<version>0.2.3-SNAPSHOT</version>
+			<type>zip</type>
+			<classifier>binary-assembly</classifier>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.asterix</groupId>
+			<artifactId>asterix-app</artifactId>
+			<version>0.0.4-SNAPSHOT</version>
+			<type>zip</type>
+			<classifier>binary-assembly</classifier>
+		</dependency>
+	</dependencies>
 </project>
diff --git a/asterix-external-data/pom.xml b/asterix-external-data/pom.xml
index 7da6bd9..36e7a71 100644
--- a/asterix-external-data/pom.xml
+++ b/asterix-external-data/pom.xml
@@ -5,10 +5,7 @@
 		<groupId>edu.uci.ics.asterix</groupId>
 		<version>0.0.4-SNAPSHOT</version>
 	</parent>
-	<groupId>edu.uci.ics.asterix</groupId>
 	<artifactId>asterix-external-data</artifactId>
-	<version>0.0.4-SNAPSHOT</version>
-
 	<build>
 		<plugins>
 			<plugin>
@@ -131,7 +128,6 @@
 	        <dependency>
 		         <groupId>edu.uci.ics.hyracks</groupId>
 		         <artifactId>hyracks-dataflow-hadoop</artifactId>
-		         <version>0.2.2-SNAPSHOT</version>
 	        </dependency>
 <dependency>
             <groupId>jdom</groupId>
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/CNNFeedAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/CNNFeedAdapterFactory.java
index 7d5984a..f1f5884 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/CNNFeedAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/CNNFeedAdapterFactory.java
@@ -1,3 +1,17 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package edu.uci.ics.asterix.external.adapter.factory;
 
 import java.util.Map;
@@ -5,7 +19,10 @@
 import edu.uci.ics.asterix.external.dataset.adapter.CNNFeedAdapter;
 import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
 
-public class CNNFeedAdapterFactory implements ITypedFeedDatasetAdapterFactory {
+/**
+ * A factory class for creating the @see {CNNFeedAdapter}.  
+ */
+public class CNNFeedAdapterFactory implements ITypedDatasetAdapterFactory {
 
     @Override
     public IDatasourceAdapter createAdapter(Map<String, String> configuration) throws Exception {
@@ -19,14 +36,4 @@
         return "cnn_feed";
     }
 
-    @Override
-    public FeedAdapterType getFeedAdapterType() {
-        return FeedAdapterType.TYPED;
-    }
-
-    @Override
-    public AdapterType getAdapterType() {
-        return AdapterType.FEED;
-    }
-
 }
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HDFSAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HDFSAdapterFactory.java
index f0e30b69..6fcb710 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HDFSAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HDFSAdapterFactory.java
@@ -1,3 +1,17 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package edu.uci.ics.asterix.external.adapter.factory;
 
 import java.util.Map;
@@ -6,7 +20,12 @@
 import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
 import edu.uci.ics.asterix.om.types.IAType;
 
-public class HDFSAdapterFactory implements IExternalDatasetAdapterFactory {
+/**
+ * A factory class for creating an instance of HDFSAdapter
+ */
+public class HDFSAdapterFactory implements IGenericDatasetAdapterFactory {
+
+    public static final String HDFS_ADAPTER_NAME = "hdfs";
 
     @Override
     public IDatasourceAdapter createAdapter(Map<String, String> configuration, IAType atype) throws Exception {
@@ -16,13 +35,8 @@
     }
 
     @Override
-    public AdapterType getAdapterType() {
-        return AdapterType.EXTERNAL_DATASET;
-    }
-
-    @Override
     public String getName() {
-        return "hdfs";
+        return HDFS_ADAPTER_NAME;
     }
 
 }
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HiveAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HiveAdapterFactory.java
index b21abe6..5e28eed 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HiveAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HiveAdapterFactory.java
@@ -1,3 +1,17 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package edu.uci.ics.asterix.external.adapter.factory;
 
 import java.util.Map;
@@ -6,7 +20,10 @@
 import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
 import edu.uci.ics.asterix.om.types.IAType;
 
-public class HiveAdapterFactory implements IExternalDatasetAdapterFactory {
+/**
+ * A factory class for creating an instance of HiveAdapter
+ */
+public class HiveAdapterFactory implements IGenericDatasetAdapterFactory {
 
     @Override
     public IDatasourceAdapter createAdapter(Map<String, String> configuration, IAType type) throws Exception {
@@ -16,11 +33,6 @@
     }
 
     @Override
-    public AdapterType getAdapterType() {
-        return AdapterType.EXTERNAL_DATASET;
-    }
-
-    @Override
     public String getName() {
         return "hive";
     }
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IAdapterFactory.java
index ca59da7..45fd6cf 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IAdapterFactory.java
@@ -1,13 +1,30 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package edu.uci.ics.asterix.external.adapter.factory;
 
+/**
+ * Base interface for IGenericDatasetAdapterFactory and ITypedDatasetAdapterFactory.
+ * Acts as a marker interface indicating that the implementation provides functionality
+ * for creating an adapter.
+ */
 public interface IAdapterFactory {
 
-    public enum AdapterType {
-        EXTERNAL_DATASET,
-        FEED
-    }
-
-    public AdapterType getAdapterType();
-
+    /**
+     * Returns the display name corresponding to the Adapter type that is created by the factory.
+     * 
+     * @return the display name
+     */
     public String getName();
 }
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IExternalDatasetAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IExternalDatasetAdapterFactory.java
deleted file mode 100644
index 22768a3..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IExternalDatasetAdapterFactory.java
+++ /dev/null
@@ -1,12 +0,0 @@
-package edu.uci.ics.asterix.external.adapter.factory;
-
-import java.util.Map;
-
-import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
-import edu.uci.ics.asterix.om.types.IAType;
-
-public interface IExternalDatasetAdapterFactory extends IAdapterFactory {
-
-    public IDatasourceAdapter createAdapter(Map<String, String> configuration, IAType sourceType) throws Exception;
-    
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IFeedDatasetAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IFeedDatasetAdapterFactory.java
deleted file mode 100644
index a7d5998..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IFeedDatasetAdapterFactory.java
+++ /dev/null
@@ -1,13 +0,0 @@
-package edu.uci.ics.asterix.external.adapter.factory;
-
-
-public interface IFeedDatasetAdapterFactory extends IAdapterFactory {
-
-    public enum FeedAdapterType {
-        GENERIC,
-        TYPED
-    }
-
-    public FeedAdapterType getFeedAdapterType();
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IGenericDatasetAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IGenericDatasetAdapterFactory.java
new file mode 100644
index 0000000..093a3dd
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IGenericDatasetAdapterFactory.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.adapter.factory;
+
+import java.util.Map;
+
+import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
+import edu.uci.ics.asterix.om.types.IAType;
+
+/**
+ * A base interface for an adapter factory that creates instance of an adapter kind that
+ * is 'generic' in nature. A 'typed' adapter returns records with a configurable datatype.
+ */
+public interface IGenericDatasetAdapterFactory extends IAdapterFactory {
+
+    public static final String KEY_TYPE_NAME = "output-type-name";
+
+    /**
+     * Creates an instance of IDatasourceAdapter.
+     * 
+     * @param configuration
+     *            The configuration parameters for the adapter that is instantiated.
+     *            The passed-in configuration is used to configure the created instance of the adapter.
+     * @param atype
+     *            The type for the ADM records that are returned by the adapter.
+     * @return An instance of IDatasourceAdapter.
+     * @throws Exception
+     */
+    public IDatasourceAdapter createAdapter(Map<String, String> configuration, IAType atype) throws Exception;
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IGenericFeedDatasetAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IGenericFeedDatasetAdapterFactory.java
deleted file mode 100644
index 34eeff2..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IGenericFeedDatasetAdapterFactory.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package edu.uci.ics.asterix.external.adapter.factory;
-
-import java.util.Map;
-
-import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
-import edu.uci.ics.asterix.om.types.IAType;
-
-public interface IGenericFeedDatasetAdapterFactory extends IFeedDatasetAdapterFactory {
-
-    public static final String KEY_TYPE_NAME="output-type-name";
-    
-    public IDatasourceAdapter createAdapter(Map<String, String> configuration, IAType atype) throws Exception;
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/ITypedDatasetAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/ITypedDatasetAdapterFactory.java
new file mode 100644
index 0000000..0f9978e
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/ITypedDatasetAdapterFactory.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.adapter.factory;
+
+import java.util.Map;
+
+import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
+
+/**
+ * A base interface for an adapter factory that creates instance of an adapter kind that
+ * is 'typed' in nature. A 'typed' adapter returns records with a pre-defined datatype.
+ */
+public interface ITypedDatasetAdapterFactory extends IAdapterFactory {
+
+    /**
+     * Creates an instance of IDatasourceAdapter.
+     * 
+     * @param configuration
+     *            The configuration parameters for the adapter that is instantiated.
+     *            The passed-in configuration is used to configure the created instance of the adapter.
+     * @return An instance of IDatasourceAdapter.
+     * @throws Exception
+     */
+    public IDatasourceAdapter createAdapter(Map<String, String> configuration) throws Exception;
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/ITypedFeedDatasetAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/ITypedFeedDatasetAdapterFactory.java
deleted file mode 100644
index 84aa88d..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/ITypedFeedDatasetAdapterFactory.java
+++ /dev/null
@@ -1,11 +0,0 @@
-package edu.uci.ics.asterix.external.adapter.factory;
-
-import java.util.Map;
-
-import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
-
-public interface ITypedFeedDatasetAdapterFactory extends IFeedDatasetAdapterFactory {
-
-    public IDatasourceAdapter createAdapter(Map<String, String> configuration) throws Exception;
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/NCFileSystemAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/NCFileSystemAdapterFactory.java
index ed43371..2040949 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/NCFileSystemAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/NCFileSystemAdapterFactory.java
@@ -1,3 +1,17 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package edu.uci.ics.asterix.external.adapter.factory;
 
 import java.util.Map;
@@ -6,7 +20,14 @@
 import edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter;
 import edu.uci.ics.asterix.om.types.IAType;
 
-public class NCFileSystemAdapterFactory implements IExternalDatasetAdapterFactory {
+/**
+ * Factory class for creating an instance of NCFileSystemAdapter. An
+ * NCFileSystemAdapter reads external data residing on the local file system of
+ * an NC.
+ */
+public class NCFileSystemAdapterFactory implements IGenericDatasetAdapterFactory {
+
+    public static final String NC_FILE_SYSTEM_ADAPTER_NAME = "localfs";
 
     @Override
     public IDatasourceAdapter createAdapter(Map<String, String> configuration, IAType atype) throws Exception {
@@ -16,12 +37,7 @@
     }
 
     @Override
-    public AdapterType getAdapterType() {
-        return AdapterType.EXTERNAL_DATASET;
-    }
-
-    @Override
     public String getName() {
-        return "localfs";
+        return NC_FILE_SYSTEM_ADAPTER_NAME;
     }
 }
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PullBasedTwitterAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PullBasedTwitterAdapterFactory.java
index 46a8004..bc00469 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PullBasedTwitterAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PullBasedTwitterAdapterFactory.java
@@ -1,3 +1,17 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package edu.uci.ics.asterix.external.adapter.factory;
 
 import java.util.Map;
@@ -5,7 +19,14 @@
 import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
 import edu.uci.ics.asterix.external.dataset.adapter.PullBasedTwitterAdapter;
 
-public class PullBasedTwitterAdapterFactory implements ITypedFeedDatasetAdapterFactory {
+/**
+ * Factory class for creating an instance of PullBasedTwitterAdapter.
+ * This adapter provides the functionality of fetching tweets from Twitter service
+ * via pull-based Twitter API.
+ */
+public class PullBasedTwitterAdapterFactory implements ITypedDatasetAdapterFactory {
+
+    public static final String PULL_BASED_TWITTER_ADAPTER_NAME = "pull_twitter";
 
     @Override
     public IDatasourceAdapter createAdapter(Map<String, String> configuration) throws Exception {
@@ -15,18 +36,8 @@
     }
 
     @Override
-    public AdapterType getAdapterType() {
-        return AdapterType.EXTERNAL_DATASET;
-    }
-
-    @Override
     public String getName() {
-        return "pull_twitter";
-    }
-
-    @Override
-    public FeedAdapterType getFeedAdapterType() {
-        return FeedAdapterType.TYPED;
+        return PULL_BASED_TWITTER_ADAPTER_NAME;
     }
 
 }
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/RSSFeedAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/RSSFeedAdapterFactory.java
index 1154d8a..bbbea38 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/RSSFeedAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/RSSFeedAdapterFactory.java
@@ -1,3 +1,17 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package edu.uci.ics.asterix.external.adapter.factory;
 
 import java.util.Map;
@@ -5,7 +19,13 @@
 import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
 import edu.uci.ics.asterix.external.dataset.adapter.RSSFeedAdapter;
 
-public class RSSFeedAdapterFactory implements ITypedFeedDatasetAdapterFactory {
+/**
+ * Factory class for creating an instance of @see {RSSFeedAdapter}.
+ * RSSFeedAdapter provides the functionality of fetching an RSS based feed.
+ */
+public class RSSFeedAdapterFactory implements ITypedDatasetAdapterFactory {
+
+    public static final String RSS_FEED_ADAPTER_NAME = "rss_feed";
 
     @Override
     public IDatasourceAdapter createAdapter(Map<String, String> configuration) throws Exception {
@@ -15,18 +35,8 @@
     }
 
     @Override
-    public AdapterType getAdapterType() {
-        return AdapterType.FEED;
-    }
-
-    @Override
     public String getName() {
         return "rss_feed";
     }
 
-    @Override
-    public FeedAdapterType getFeedAdapterType() {
-        return FeedAdapterType.TYPED;
-    }
-
 }
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/ExternalDataScanOperatorDescriptor.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/ExternalDataScanOperatorDescriptor.java
index 6dbec48..fb4cc99 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/ExternalDataScanOperatorDescriptor.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/ExternalDataScanOperatorDescriptor.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -16,7 +16,7 @@
 
 import java.util.Map;
 
-import edu.uci.ics.asterix.external.adapter.factory.IExternalDatasetAdapterFactory;
+import edu.uci.ics.asterix.external.adapter.factory.IGenericDatasetAdapterFactory;
 import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
 import edu.uci.ics.asterix.om.types.IAType;
 import edu.uci.ics.hyracks.api.application.ICCApplicationContext;
@@ -30,13 +30,18 @@
 import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
 
+/*
+ * A single activity operator that provides the functionality of scanning data using an
+ * instance of the configured adapter.
+ */
 public class ExternalDataScanOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+
     private static final long serialVersionUID = 1L;
 
     private final String adapterFactory;
     private final Map<String, String> adapterConfiguration;
     private final IAType atype;
-    private IExternalDatasetAdapterFactory datasourceAdapterFactory;
+    private IGenericDatasetAdapterFactory datasourceAdapterFactory;
 
     public ExternalDataScanOperatorDescriptor(JobSpecification spec, String adapter, Map<String, String> arguments,
             IAType atype, RecordDescriptor rDesc) {
@@ -79,11 +84,12 @@
 
     }
 
+    @Override
     public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
             IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions)
             throws HyracksDataException {
         try {
-            datasourceAdapterFactory = (IExternalDatasetAdapterFactory) Class.forName(adapterFactory).newInstance();
+            datasourceAdapterFactory = (IGenericDatasetAdapterFactory) Class.forName(adapterFactory).newInstance();
         } catch (Exception e) {
             throw new HyracksDataException("initialization of adapter failed", e);
         }
@@ -93,7 +99,7 @@
                 writer.open();
                 IDatasourceAdapter adapter = null;
                 try {
-                    adapter = ((IExternalDatasetAdapterFactory) datasourceAdapterFactory).createAdapter(
+                    adapter = ((IGenericDatasetAdapterFactory) datasourceAdapterFactory).createAdapter(
                             adapterConfiguration, atype);
                     adapter.initialize(ctx);
                     adapter.start(partition, writer);
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedIntakeOperatorDescriptor.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedIntakeOperatorDescriptor.java
new file mode 100644
index 0000000..2da4e76
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedIntakeOperatorDescriptor.java
@@ -0,0 +1,86 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.data.operator;
+
+import java.util.Map;
+
+import edu.uci.ics.asterix.external.adapter.factory.IAdapterFactory;
+import edu.uci.ics.asterix.external.adapter.factory.IGenericDatasetAdapterFactory;
+import edu.uci.ics.asterix.external.adapter.factory.ITypedDatasetAdapterFactory;
+import edu.uci.ics.asterix.external.dataset.adapter.ITypedDatasourceAdapter;
+import edu.uci.ics.asterix.external.feed.lifecycle.FeedId;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+
+/**
+ * Operator responsible for ingesting data from an external source. This
+ * operator uses a (configurable) adapter associated with the feed dataset.
+ */
+public class FeedIntakeOperatorDescriptor extends
+		AbstractSingleActivityOperatorDescriptor {
+
+	private static final long serialVersionUID = 1L;
+
+	private final String adapterFactoryClassName;
+	private final Map<String, String> adapterConfiguration;
+	private final IAType atype;
+	private final FeedId feedId;
+
+	private transient IAdapterFactory datasourceAdapterFactory;
+
+	public FeedIntakeOperatorDescriptor(JobSpecification spec, FeedId feedId,
+			String adapter, Map<String, String> arguments, ARecordType atype,
+			RecordDescriptor rDesc) {
+		super(spec, 1, 1);
+		recordDescriptors[0] = rDesc;
+		this.adapterFactoryClassName = adapter;
+		this.adapterConfiguration = arguments;
+		this.atype = atype;
+		this.feedId = feedId;
+	}
+
+	public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+			IRecordDescriptorProvider recordDescProvider, final int partition,
+			int nPartitions) throws HyracksDataException {
+		ITypedDatasourceAdapter adapter;
+		try {
+			datasourceAdapterFactory = (IAdapterFactory) Class.forName(
+					adapterFactoryClassName).newInstance();
+			if (datasourceAdapterFactory instanceof IGenericDatasetAdapterFactory) {
+				adapter = (ITypedDatasourceAdapter) ((IGenericDatasetAdapterFactory) datasourceAdapterFactory)
+						.createAdapter(adapterConfiguration, atype);
+			} else if (datasourceAdapterFactory instanceof ITypedDatasetAdapterFactory) {
+				adapter = (ITypedDatasourceAdapter) ((ITypedDatasetAdapterFactory) datasourceAdapterFactory)
+						.createAdapter(adapterConfiguration);
+			} else {
+				throw new IllegalStateException(
+						" Unknown adapter factory type for "
+								+ adapterFactoryClassName);
+			}
+			adapter.initialize(ctx);
+		} catch (Exception e) {
+			throw new HyracksDataException("initialization of adapter failed",
+					e);
+		}
+		return new FeedIntakeOperatorNodePushable(feedId, adapter, partition);
+	}
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedIntakeOperatorNodePushable.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedIntakeOperatorNodePushable.java
similarity index 60%
rename from asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedIntakeOperatorNodePushable.java
rename to asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedIntakeOperatorNodePushable.java
index ff6216a..29fe72a 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedIntakeOperatorNodePushable.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedIntakeOperatorNodePushable.java
@@ -1,19 +1,35 @@
-package edu.uci.ics.asterix.feed.operator;
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.data.operator;
 
 import java.nio.ByteBuffer;
 import java.util.concurrent.LinkedBlockingQueue;
 
 import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
-import edu.uci.ics.asterix.feed.comm.AlterFeedMessage;
-import edu.uci.ics.asterix.feed.comm.IFeedMessage;
+import edu.uci.ics.asterix.external.feed.lifecycle.AlterFeedMessage;
+import edu.uci.ics.asterix.external.feed.lifecycle.FeedId;
+import edu.uci.ics.asterix.external.feed.lifecycle.FeedManager;
+import edu.uci.ics.asterix.external.feed.lifecycle.IFeedManager;
+import edu.uci.ics.asterix.external.feed.lifecycle.IFeedMessage;
 import edu.uci.ics.asterix.feed.managed.adapter.IManagedFeedAdapter;
-import edu.uci.ics.asterix.feed.managed.adapter.IMutableFeedAdapter;
-import edu.uci.ics.asterix.feed.mgmt.FeedId;
-import edu.uci.ics.asterix.feed.mgmt.FeedSystemProvider;
-import edu.uci.ics.asterix.feed.mgmt.IFeedManager;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
 
+/**
+ * The runtime for @see{FeedIntakeOperationDescriptor}
+ */
 public class FeedIntakeOperatorNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
 
     private final IDatasourceAdapter adapter;
@@ -26,7 +42,7 @@
     public FeedIntakeOperatorNodePushable(FeedId feedId, IDatasourceAdapter adapter, int partition) {
         this.adapter = adapter;
         this.partition = partition;
-        this.feedManager = (IFeedManager) FeedSystemProvider.getFeedManager();
+        this.feedManager = (IFeedManager) FeedManager.INSTANCE;
         this.feedId = feedId;
         inbox = new LinkedBlockingQueue<IFeedMessage>();
     }
@@ -36,21 +52,25 @@
         if (adapter instanceof IManagedFeedAdapter) {
             feedInboxMonitor = new FeedInboxMonitor((IManagedFeedAdapter) adapter, inbox, partition);
             feedInboxMonitor.start();
-            feedManager.registerFeedOperatorMsgQueue(feedId, inbox);
+            feedManager.registerFeedMsgQueue(feedId, inbox);
         }
         writer.open();
         try {
             adapter.start(partition, writer);
         } catch (Exception e) {
             e.printStackTrace();
-            // we do not throw an exception, but allow the operator to close
-            // gracefully
-            // Throwing an exception here would result in a job abort and a
-            // transaction roll back
-            // that undoes all the work done so far.
+            throw new HyracksDataException(e);
+            /* 
+             we do not throw an exception, but allow the operator to close
+             gracefully throwing an exception here would result in a job abort and a
+             transaction roll back that undoes all the work done so far.
+             */
 
         } finally {
             writer.close();
+            if (adapter instanceof IManagedFeedAdapter) {
+                feedManager.unregisterFeedMsgQueue(feedId, inbox);
+            }
         }
     }
 
@@ -66,7 +86,7 @@
 
     @Override
     public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
-        // TODO Auto-generated method stub
+        // do nothing
     }
 }
 
@@ -86,17 +106,11 @@
             try {
                 IFeedMessage feedMessage = inbox.take();
                 switch (feedMessage.getMessageType()) {
-                    case SUSPEND:
-                        adapter.suspend();
-                        break;
-                    case RESUME:
-                        adapter.resume();
-                        break;
                     case STOP:
                         adapter.stop();
                         break;
                     case ALTER:
-                        ((IMutableFeedAdapter) adapter).alter(((AlterFeedMessage) feedMessage).getAlteredConfParams());
+                        adapter.alter(((AlterFeedMessage) feedMessage).getAlteredConfParams());
                         break;
                 }
             } catch (InterruptedException ie) {
@@ -107,4 +121,4 @@
         }
     }
 
-}
\ No newline at end of file
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedMessageOperatorDescriptor.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedMessageOperatorDescriptor.java
new file mode 100644
index 0000000..d0dc5ca
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedMessageOperatorDescriptor.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.data.operator;
+
+import java.util.List;
+
+import edu.uci.ics.asterix.external.feed.lifecycle.FeedId;
+import edu.uci.ics.asterix.external.feed.lifecycle.IFeedMessage;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+
+/**
+ * Sends a control message to the registered message queue for feed specified by its feedId.
+ */
+public class FeedMessageOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+
+    private static final long serialVersionUID = 1L;
+
+    private final FeedId feedId;
+    private final List<IFeedMessage> feedMessages;
+    private final boolean sendToAll = true;
+
+    public FeedMessageOperatorDescriptor(JobSpecification spec, String dataverse, String dataset,
+            List<IFeedMessage> feedMessages) {
+        super(spec, 0, 1);
+        this.feedId = new FeedId(dataverse, dataset);
+        this.feedMessages = feedMessages;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
+        return new FeedMessageOperatorNodePushable(ctx, feedId, feedMessages, sendToAll, partition, nPartitions);
+    }
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedMessageOperatorNodePushable.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedMessageOperatorNodePushable.java
new file mode 100644
index 0000000..d03eeaa
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedMessageOperatorNodePushable.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.data.operator;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import edu.uci.ics.asterix.external.feed.lifecycle.FeedId;
+import edu.uci.ics.asterix.external.feed.lifecycle.FeedManager;
+import edu.uci.ics.asterix.external.feed.lifecycle.IFeedManager;
+import edu.uci.ics.asterix.external.feed.lifecycle.IFeedMessage;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
+
+/**
+ * Runtime for the @see{FeedMessageOperatorDescriptor}
+ */
+public class FeedMessageOperatorNodePushable extends AbstractUnaryOutputSourceOperatorNodePushable {
+
+    private final FeedId feedId;
+    private final List<IFeedMessage> feedMessages;
+    private IFeedManager feedManager;
+
+    public FeedMessageOperatorNodePushable(IHyracksTaskContext ctx, FeedId feedId, List<IFeedMessage> feedMessages,
+            boolean applyToAll, int partition, int nPartitions) {
+        this.feedId = feedId;
+        if (applyToAll) {
+            this.feedMessages = feedMessages;
+        } else {
+            this.feedMessages = new ArrayList<IFeedMessage>();
+            feedMessages.add(feedMessages.get(partition));
+        }
+        feedManager = (IFeedManager) FeedManager.INSTANCE;
+    }
+
+    @Override
+    public void initialize() throws HyracksDataException {
+        try {
+            writer.open();
+            for (IFeedMessage feedMessage : feedMessages) {
+                feedManager.deliverMessage(feedId, feedMessage);
+            }
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        } finally {
+            writer.close();
+        }
+    }
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AbstractDatasourceAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AbstractDatasourceAdapter.java
index 64c8853..440ee8c 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AbstractDatasourceAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AbstractDatasourceAdapter.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -34,66 +34,60 @@
  */
 public abstract class AbstractDatasourceAdapter implements IDatasourceAdapter {
 
-	private static final long serialVersionUID = -3510610289692452466L;
+    private static final long serialVersionUID = 1L;
 
-	protected Map<String, String> configuration;
-	protected transient AlgebricksPartitionConstraint partitionConstraint;
-	protected IAType atype;
-	protected IHyracksTaskContext ctx;
-	protected AdapterType adapterType;
-	protected boolean typeInfoRequired = false;
-	
-	
-	protected static final HashMap<ATypeTag, IValueParserFactory> typeToValueParserFactMap = new HashMap<ATypeTag, IValueParserFactory>();
-	static {
-		typeToValueParserFactMap.put(ATypeTag.INT32,
-				IntegerParserFactory.INSTANCE);
-		typeToValueParserFactMap.put(ATypeTag.FLOAT,
-				FloatParserFactory.INSTANCE);
-		typeToValueParserFactMap.put(ATypeTag.DOUBLE,
-				DoubleParserFactory.INSTANCE);
-		typeToValueParserFactMap
-				.put(ATypeTag.INT64, LongParserFactory.INSTANCE);
-		typeToValueParserFactMap.put(ATypeTag.STRING,
-				UTF8StringParserFactory.INSTANCE);
-	}
+    protected Map<String, String> configuration;
+    protected transient AlgebricksPartitionConstraint partitionConstraint;
+    protected IAType atype;
+    protected IHyracksTaskContext ctx;
+    protected AdapterType adapterType;
 
-	protected static final HashMap<String, String> formatToParserFactoryMap = new HashMap<String, String>();
+    protected static final HashMap<ATypeTag, IValueParserFactory> typeToValueParserFactMap = new HashMap<ATypeTag, IValueParserFactory>();
+    static {
+        typeToValueParserFactMap.put(ATypeTag.INT32, IntegerParserFactory.INSTANCE);
+        typeToValueParserFactMap.put(ATypeTag.FLOAT, FloatParserFactory.INSTANCE);
+        typeToValueParserFactMap.put(ATypeTag.DOUBLE, DoubleParserFactory.INSTANCE);
+        typeToValueParserFactMap.put(ATypeTag.INT64, LongParserFactory.INSTANCE);
+        typeToValueParserFactMap.put(ATypeTag.STRING, UTF8StringParserFactory.INSTANCE);
+    }
 
-	public static final String KEY_FORMAT = "format";
-	public static final String KEY_PARSER_FACTORY = "parser";
+    protected static final Map<String, String> formatToParserFactoryMap = initializeFormatParserFactoryMap();
 
-	public static final String FORMAT_DELIMITED_TEXT = "delimited-text";
-	public static final String FORMAT_ADM = "adm";
+    public static final String KEY_FORMAT = "format";
+    public static final String KEY_PARSER_FACTORY = "parser";
+    public static final String FORMAT_DELIMITED_TEXT = "delimited-text";
+    public static final String FORMAT_ADM = "adm";
 
-	static {
-		formatToParserFactoryMap
-				.put(FORMAT_DELIMITED_TEXT,
-						"edu.uci.ics.asterix.runtime.operators.file.NtDelimitedDataTupleParserFactory");
-		formatToParserFactoryMap
-				.put(FORMAT_ADM,
-						"edu.uci.ics.asterix.runtime.operators.file.AdmSchemafullRecordParserFactory");
+    private static Map<String, String> initializeFormatParserFactoryMap() {
+        Map<String, String> map = new HashMap<String, String>();
+        map.put(FORMAT_DELIMITED_TEXT, "edu.uci.ics.asterix.runtime.operators.file.NtDelimitedDataTupleParserFactory");
+        map.put(FORMAT_ADM, "edu.uci.ics.asterix.runtime.operators.file.AdmSchemafullRecordParserFactory");
+        return map;
+    }
 
-	}
+    /**
+     * Get the partition constraint chosen by the adapter.
+     * An adapter may have preferences as to where it needs to be instantiated and used.
+     */
+    public abstract AlgebricksPartitionConstraint getPartitionConstraint() throws Exception;
 
-	public AlgebricksPartitionConstraint getPartitionConstraint() {
-		return partitionConstraint;
-	}
+    /**
+     * Get the configured value from the adapter configuration parameters, corresponding to the an attribute.
+     * 
+     * @param attribute
+     *            The attribute whose value needs to be obtained.
+     */
+    public String getAdapterProperty(String attribute) {
+        return configuration.get(attribute);
+    }
 
-	public String getAdapterProperty(String attribute) {
-		return configuration.get(attribute);
-	}
-
-	public Map<String, String> getConfiguration() {
-		return configuration;
-	}
-
-	public void setAdapterProperty(String property, String value) {
-		configuration.put(property, value);
-	}
-
-	public boolean isTypeInfoRequired() {
-        return typeInfoRequired;
+    /**
+     * Get the adapter configuration parameters.
+     * 
+     * @return A Map<String,String> instance representing the adapter configuration.
+     */
+    public Map<String, String> getConfiguration() {
+        return configuration;
     }
 
 }
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AbstractFeedDatasourceAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AbstractFeedDatasourceAdapter.java
deleted file mode 100644
index 8f2a896..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AbstractFeedDatasourceAdapter.java
+++ /dev/null
@@ -1,18 +0,0 @@
-package edu.uci.ics.asterix.external.dataset.adapter;
-
-import edu.uci.ics.asterix.om.types.ARecordType;
-
-public abstract class AbstractFeedDatasourceAdapter extends AbstractDatasourceAdapter implements IFeedDatasourceAdapter {
-
-    protected AdapterDataFlowType adapterDataFlowType;
-    protected ARecordType adapterOutputType;
-
-    public AdapterDataFlowType getAdapterDataFlowType() {
-        return adapterDataFlowType;
-    }
-
-    public ARecordType getAdapterOutputType() {
-        return adapterOutputType;
-    }
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AdapterIdentifier.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AdapterIdentifier.java
index 1a6be25..ac36733 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AdapterIdentifier.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AdapterIdentifier.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -16,8 +16,13 @@
 
 import java.io.Serializable;
 
+/**
+ * A unique identifier for a datasource adapter.
+ */
 public class AdapterIdentifier implements Serializable {
 
+    private static final long serialVersionUID = 1L;
+
     private final String namespace;
     private final String adapterName;
 
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/CNNFeedAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/CNNFeedAdapter.java
index 3760d56..3898f7e 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/CNNFeedAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/CNNFeedAdapter.java
@@ -1,3 +1,17 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package edu.uci.ics.asterix.external.dataset.adapter;
 
 import java.util.ArrayList;
@@ -5,19 +19,20 @@
 import java.util.List;
 import java.util.Map;
 
-import edu.uci.ics.asterix.feed.managed.adapter.IMutableFeedAdapter;
+import edu.uci.ics.asterix.feed.managed.adapter.IManagedFeedAdapter;
 
-public class CNNFeedAdapter extends RSSFeedAdapter implements IDatasourceAdapter, IMutableFeedAdapter {
+/**
+ * An Adapter that provides the functionality of fetching news feed from CNN service
+ * The Adapter provides news feed as ADM records.
+ */
+public class CNNFeedAdapter extends RSSFeedAdapter implements IDatasourceAdapter, IManagedFeedAdapter {
 
     private static final long serialVersionUID = 2523303758114582251L;
     private List<String> feedURLs = new ArrayList<String>();
-    private String id_prefix = "";
+    private static Map<String, String> topicFeeds = new HashMap<String, String>();
 
     public static final String KEY_RSS_URL = "topic";
     public static final String KEY_INTERVAL = "interval";
-
-    private static Map<String, String> topicFeeds = new HashMap<String, String>();
-
     public static final String TOP_STORIES = "topstories";
     public static final String WORLD = "world";
     public static final String US = "us";
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/FileSystemBasedAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/FileSystemBasedAdapter.java
index dde273e..e46705d 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/FileSystemBasedAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/FileSystemBasedAdapter.java
@@ -16,14 +16,15 @@
 
 import java.io.IOException;
 import java.io.InputStream;
-import java.lang.reflect.Constructor;
 import java.util.Map;
 
 import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.om.types.ARecordType;
 import edu.uci.ics.asterix.om.types.ATypeTag;
 import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.runtime.operators.file.AdmSchemafullRecordParserFactory;
 import edu.uci.ics.asterix.runtime.operators.file.NtDelimitedDataTupleParserFactory;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
 import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
 import edu.uci.ics.hyracks.api.comm.IFrameWriter;
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
@@ -33,10 +34,13 @@
 
 public abstract class FileSystemBasedAdapter extends AbstractDatasourceAdapter {
 
-    protected boolean userDefinedParser = false;
-    protected String parserFactoryClassname;
+    private static final long serialVersionUID = 1L;
+
+    protected ITupleParserFactory parserFactory;
+    protected ITupleParser parser;
 
     public static final String KEY_DELIMITER = "delimiter";
+    public static final String KEY_PATH = "path";
 
     public abstract InputStream getInputStream(int partition) throws IOException;
 
@@ -44,13 +48,10 @@
         this.atype = atype;
     }
 
-    public FileSystemBasedAdapter() {
-    }
-
     @Override
     public void start(int partition, IFrameWriter writer) throws Exception {
         InputStream in = getInputStream(partition);
-        ITupleParser parser = getTupleParser();
+        parser = getTupleParser();
         parser.parse(in, writer);
     }
 
@@ -63,44 +64,33 @@
     @Override
     public abstract AdapterType getAdapterType();
 
+    @Override
+    public abstract AlgebricksPartitionConstraint getPartitionConstraint() throws Exception;
+
     protected ITupleParser getTupleParser() throws Exception {
-        ITupleParser parser = null;
-        if (userDefinedParser) {
-            Class tupleParserFactoryClass = Class.forName(parserFactoryClassname);
-            ITupleParserFactory parserFactory = (ITupleParserFactory) tupleParserFactoryClass.newInstance();
-            parser = parserFactory.createTupleParser(ctx);
-        } else {
-            if (FORMAT_DELIMITED_TEXT.equalsIgnoreCase(configuration.get(KEY_FORMAT))) {
-                parser = getDelimitedDataTupleParser((ARecordType) atype);
-
-            } else if (FORMAT_ADM.equalsIgnoreCase(configuration.get(KEY_FORMAT))) {
-                parser = getADMDataTupleParser((ARecordType) atype);
-            } else {
-                throw new IllegalArgumentException(" format " + configuration.get(KEY_FORMAT) + " not supported");
-            }
-        }
-        return parser;
-
+        return parserFactory.createTupleParser(ctx);
     }
 
     protected void configureFormat() throws Exception {
-        parserFactoryClassname = configuration.get(KEY_PARSER_FACTORY);
-        userDefinedParser = (parserFactoryClassname != null);
-
+        String parserFactoryClassname = configuration.get(KEY_PARSER_FACTORY);
         if (parserFactoryClassname == null) {
-            if (FORMAT_DELIMITED_TEXT.equalsIgnoreCase(configuration.get(KEY_FORMAT))) {
-                parserFactoryClassname = formatToParserFactoryMap.get(FORMAT_DELIMITED_TEXT);
+            String specifiedFormat = configuration.get(KEY_FORMAT);
+            if (specifiedFormat == null) {
+                throw new IllegalArgumentException(" Unspecified data format");
+            } else if (FORMAT_DELIMITED_TEXT.equalsIgnoreCase(specifiedFormat)) {
+                parserFactory = getDelimitedDataTupleParserFactory((ARecordType) atype);
             } else if (FORMAT_ADM.equalsIgnoreCase(configuration.get(KEY_FORMAT))) {
-                parserFactoryClassname = formatToParserFactoryMap.get(FORMAT_ADM);
+                parserFactory = getADMDataTupleParserFactory((ARecordType) atype);
             } else {
                 throw new IllegalArgumentException(" format " + configuration.get(KEY_FORMAT) + " not supported");
             }
+        } else {
+            parserFactory = (ITupleParserFactory) Class.forName(parserFactoryClassname).newInstance();
         }
 
     }
 
-    protected ITupleParser getDelimitedDataTupleParser(ARecordType recordType) throws AsterixException {
-        ITupleParser parser;
+    protected ITupleParserFactory getDelimitedDataTupleParserFactory(ARecordType recordType) throws AsterixException {
         int n = recordType.getFieldTypes().length;
         IValueParserFactory[] fieldParserFactories = new IValueParserFactory[n];
         for (int i = 0; i < n; i++) {
@@ -117,17 +107,12 @@
         }
 
         Character delimiter = delimiterValue.charAt(0);
-        parser = new NtDelimitedDataTupleParserFactory(recordType, fieldParserFactories, delimiter)
-                .createTupleParser(ctx);
-        return parser;
+        return new NtDelimitedDataTupleParserFactory(recordType, fieldParserFactories, delimiter);
     }
 
-    protected ITupleParser getADMDataTupleParser(ARecordType recordType) throws AsterixException {
+    protected ITupleParserFactory getADMDataTupleParserFactory(ARecordType recordType) throws AsterixException {
         try {
-            Class tupleParserFactoryClass = Class.forName(parserFactoryClassname);
-            Constructor ctor = tupleParserFactoryClass.getConstructor(ARecordType.class);
-            ITupleParserFactory parserFactory = (ITupleParserFactory) ctor.newInstance(atype);
-            return parserFactory.createTupleParser(ctx);
+            return new AdmSchemafullRecordParserFactory(recordType);
         } catch (Exception e) {
             throw new AsterixException(e);
         }
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HDFSAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HDFSAdapter.java
index 744683c..1e05b2f 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HDFSAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HDFSAdapter.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -18,7 +18,6 @@
 import java.io.IOException;
 import java.io.InputStream;
 import java.net.UnknownHostException;
-import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
@@ -28,8 +27,6 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.Counters.Counter;
 import org.apache.hadoop.mapred.InputSplit;
@@ -43,29 +40,36 @@
 import edu.uci.ics.asterix.om.util.AsterixRuntimeUtil;
 import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksCountPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.dataflow.hadoop.util.InputSplitsProxy;
 
+/**
+ * Provides functionality for fetching external data stored in an HDFS instance.
+ */
+@SuppressWarnings({ "deprecation", "rawtypes" })
 public class HDFSAdapter extends FileSystemBasedAdapter {
 
+    private static final long serialVersionUID = 1L;
     private static final Logger LOGGER = Logger.getLogger(HDFSAdapter.class.getName());
 
+    public static final String KEY_HDFS_URL = "hdfs";
+    public static final String KEY_INPUT_FORMAT = "input-format";
+    public static final String INPUT_FORMAT_TEXT = "text-input-format";
+    public static final String INPUT_FORMAT_SEQUENCE = "sequence-input-format";
+
     private Object[] inputSplits;
     private transient JobConf conf;
     private InputSplitsProxy inputSplitsProxy;
-    private static final Map<String, String> formatClassNames = new HashMap<String, String>();
+    private static final Map<String, String> formatClassNames = initInputFormatMap();
 
-    public static final String KEY_HDFS_URL = "hdfs";
-    public static final String KEY_HDFS_PATH = "path";
-    public static final String KEY_INPUT_FORMAT = "input-format";
-
-    public static final String INPUT_FORMAT_TEXT = "text-input-format";
-    public static final String INPUT_FORMAT_SEQUENCE = "sequence-input-format";
-
-    static {
+    private static Map<String, String> initInputFormatMap() {
+        Map<String, String> formatClassNames = new HashMap<String, String>();
         formatClassNames.put(INPUT_FORMAT_TEXT, "org.apache.hadoop.mapred.TextInputFormat");
         formatClassNames.put(INPUT_FORMAT_SEQUENCE, "org.apache.hadoop.mapred.SequenceFileInputFormat");
+        return formatClassNames;
     }
 
     public HDFSAdapter(IAType atype) {
@@ -77,77 +81,89 @@
         configuration = arguments;
         configureFormat();
         configureJobConf();
-        configurePartitionConstraint();
+        configureSplits();
+    }
+
+    private void configureSplits() throws IOException {
+        if (inputSplitsProxy == null) {
+            inputSplits = conf.getInputFormat().getSplits(conf, 0);
+        }
+        inputSplitsProxy = new InputSplitsProxy(conf, inputSplits);
     }
 
     private void configurePartitionConstraint() throws Exception {
-        AlgebricksAbsolutePartitionConstraint absPartitionConstraint;
         List<String> locations = new ArrayList<String>();
         Random random = new Random();
-        boolean couldConfigureLocationConstraints = true;
-        if (inputSplitsProxy == null) {
-            InputSplit[] inputSplits = conf.getInputFormat().getSplits(conf, 0);
-            try {
-                for (InputSplit inputSplit : inputSplits) {
-                    String[] dataNodeLocations = inputSplit.getLocations();
-                    // loop over all replicas until a split location coincides
-                    // with an asterix datanode location
-                    for (String datanodeLocation : dataNodeLocations) {
-                        Set<String> nodeControllersAtLocation = AsterixRuntimeUtil
-                                .getNodeControllersOnHostName(datanodeLocation);
-                        if (nodeControllersAtLocation == null || nodeControllersAtLocation.size() == 0) {
-                            if (LOGGER.isLoggable(Level.INFO)) {
-                                LOGGER.log(Level.INFO, "No node controller found at " + datanodeLocation
-                                        + " will look at replica location");
-                            }
-                            couldConfigureLocationConstraints = false;
-                        } else {
-                            int locationIndex = random.nextInt(nodeControllersAtLocation.size());
-                            String chosenLocation = (String) nodeControllersAtLocation.toArray()[locationIndex];
-                            locations.add(chosenLocation);
-                            if (LOGGER.isLoggable(Level.INFO)) {
-                                LOGGER.log(Level.INFO, "split : " + inputSplit + " to be processed by :"
-                                        + chosenLocation);
-                            }
-                            couldConfigureLocationConstraints = true;
-                            break;
+        boolean couldConfigureLocationConstraints = false;
+        try {
+            Map<String, Set<String>> nodeControllers = AsterixRuntimeUtil.getNodeControllerMap();
+            for (Object inputSplit : inputSplits) {
+                String[] dataNodeLocations = ((InputSplit) inputSplit).getLocations();
+                if (dataNodeLocations == null || dataNodeLocations.length == 0) {
+                    throw new IllegalArgumentException("No datanode locations found: check hdfs path");
+                }
+
+                // loop over all replicas until a split location coincides
+                // with an asterix datanode location
+                for (String datanodeLocation : dataNodeLocations) {
+                    Set<String> nodeControllersAtLocation = null;
+                    try {
+                        nodeControllersAtLocation = nodeControllers.get(AsterixRuntimeUtil
+                                .getIPAddress(datanodeLocation));
+                    } catch (UnknownHostException uhe) {
+                        if (LOGGER.isLoggable(Level.WARNING)) {
+                            LOGGER.log(Level.WARNING, "Unknown host :" + datanodeLocation);
                         }
+                        continue;
                     }
-
-                    // none of the replica locations coincides with an Asterix
-                    // node controller location.
-                    if (!couldConfigureLocationConstraints) {
-                        List<String> allNodeControllers = AsterixRuntimeUtil.getAllNodeControllers();
-                        int locationIndex = random.nextInt(allNodeControllers.size());
-                        String chosenLocation = allNodeControllers.get(locationIndex);
-                        locations.add(chosenLocation);
-
-                        if (LOGGER.isLoggable(Level.INFO)) {
-                            LOGGER.log(Level.INFO, "No local node controller found to process split : " + inputSplit
-                                    + " will be processed by a remote node controller:" + chosenLocation);
+                    if (nodeControllersAtLocation == null || nodeControllersAtLocation.size() == 0) {
+                        if (LOGGER.isLoggable(Level.WARNING)) {
+                            LOGGER.log(Level.WARNING, "No node controller found at " + datanodeLocation
+                                    + " will look at replica location");
                         }
+                        couldConfigureLocationConstraints = false;
+                    } else {
+                        int locationIndex = random.nextInt(nodeControllersAtLocation.size());
+                        String chosenLocation = (String) nodeControllersAtLocation.toArray()[locationIndex];
+                        locations.add(chosenLocation);
+                        if (LOGGER.isLoggable(Level.INFO)) {
+                            LOGGER.log(Level.INFO, "split : " + inputSplit + " to be processed by :" + chosenLocation);
+                        }
+                        couldConfigureLocationConstraints = true;
                         break;
                     }
                 }
-                if (couldConfigureLocationConstraints) {
-                    partitionConstraint = new AlgebricksAbsolutePartitionConstraint(locations.toArray(new String[] {}));
-                } else {
-                    partitionConstraint = new AlgebricksCountPartitionConstraint(inputSplits.length);
+
+                /* none of the replica locations coincides with an Asterix
+                   node controller location.
+                */
+                if (!couldConfigureLocationConstraints) {
+                    List<String> allNodeControllers = AsterixRuntimeUtil.getAllNodeControllers();
+                    int locationIndex = random.nextInt(allNodeControllers.size());
+                    String chosenLocation = allNodeControllers.get(locationIndex);
+                    locations.add(chosenLocation);
+                    if (LOGGER.isLoggable(Level.SEVERE)) {
+                        LOGGER.log(Level.SEVERE, "No local node controller found to process split : " + inputSplit
+                                + " will be processed by a remote node controller:" + chosenLocation);
+                    }
                 }
-            } catch (UnknownHostException e) {
-                partitionConstraint = new AlgebricksCountPartitionConstraint(inputSplits.length);
             }
-            inputSplitsProxy = new InputSplitsProxy(conf, inputSplits);
+            partitionConstraint = new AlgebricksAbsolutePartitionConstraint(locations.toArray(new String[] {}));
+        } catch (Exception e) {
+            if (LOGGER.isLoggable(Level.SEVERE)) {
+                LOGGER.log(Level.SEVERE, "Encountered exception :" + e + " using count constraints");
+            }
+            partitionConstraint = new AlgebricksCountPartitionConstraint(inputSplits.length);
         }
     }
 
     private JobConf configureJobConf() throws Exception {
         conf = new JobConf();
-        conf.set("fs.default.name", configuration.get(KEY_HDFS_URL));
+        conf.set("fs.default.name", configuration.get(KEY_HDFS_URL).trim());
         conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
         conf.setClassLoader(HDFSAdapter.class.getClassLoader());
-        conf.set("mapred.input.dir", configuration.get(KEY_HDFS_PATH));
-        conf.set("mapred.input.format.class", formatClassNames.get(configuration.get(KEY_INPUT_FORMAT)));
+        conf.set("mapred.input.dir", configuration.get(KEY_PATH).trim());
+        conf.set("mapred.input.format.class", formatClassNames.get(configuration.get(KEY_INPUT_FORMAT).trim()));
         return conf;
     }
 
@@ -199,11 +215,10 @@
         return reporter;
     }
 
+    @SuppressWarnings("unchecked")
     @Override
     public InputStream getInputStream(int partition) throws IOException {
-        Path path = new Path(inputSplits[partition].toString());
         try {
-            FileSystem fs = FileSystem.get(conf);
             InputStream inputStream;
             if (conf.getInputFormat() instanceof SequenceFileInputFormat) {
                 SequenceFileInputFormat format = (SequenceFileInputFormat) conf.getInputFormat();
@@ -227,61 +242,71 @@
 
     }
 
+    @Override
+    public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
+        if (partitionConstraint == null) {
+            configurePartitionConstraint();
+        }
+        return partitionConstraint;
+    }
+
 }
 
 class HDFSStream extends InputStream {
 
-    private ByteBuffer buffer;
-    private int capacity;
-    private RecordReader reader;
-    private boolean readNext = true;
+    private RecordReader<Object, Text> reader;
     private final Object key;
     private final Text value;
+    private boolean hasMore = false;
+    private static final int EOL = "\n".getBytes()[0];
+    private Text pendingValue = null;
 
-    public HDFSStream(RecordReader reader, IHyracksTaskContext ctx) throws Exception {
-        capacity = ctx.getFrameSize();
-        buffer = ByteBuffer.allocate(capacity);
+    public HDFSStream(RecordReader<Object, Text> reader, IHyracksTaskContext ctx) throws Exception {
         this.reader = reader;
         key = reader.createKey();
         try {
             value = (Text) reader.createValue();
         } catch (ClassCastException cce) {
-            throw new Exception("context is not of type org.apache.hadoop.io.Text"
+            throw new Exception("value is not of type org.apache.hadoop.io.Text"
                     + " type not supported in sequence file format", cce);
         }
-        initialize();
-    }
-
-    private void initialize() throws Exception {
-        boolean hasMore = reader.next(key, value);
-        if (!hasMore) {
-            buffer.limit(0);
-        } else {
-            buffer.position(0);
-            buffer.limit(capacity);
-            buffer.put(value.getBytes());
-            buffer.put("\n".getBytes());
-            buffer.flip();
-        }
     }
 
     @Override
-    public int read() throws IOException {
-        if (!buffer.hasRemaining()) {
-            boolean hasMore = reader.next(key, value);
-            if (!hasMore) {
-                return -1;
-            }
-            buffer.position(0);
-            buffer.limit(capacity);
-            buffer.put(value.getBytes());
-            buffer.put("\n".getBytes());
-            buffer.flip();
-            return buffer.get();
-        } else {
-            return buffer.get();
+    public int read(byte[] buffer, int offset, int len) throws IOException {
+        int numBytes = 0;
+        if (pendingValue != null) {
+            System.arraycopy(pendingValue.getBytes(), 0, buffer, offset + numBytes, pendingValue.getLength());
+            buffer[offset + numBytes + pendingValue.getLength()] = (byte) EOL;
+            numBytes += pendingValue.getLength() + 1;
+            pendingValue = null;
         }
 
+        while (numBytes < len) {
+            hasMore = reader.next(key, value);
+            if (!hasMore) {
+                return (numBytes == 0) ? -1 : numBytes;
+            }
+            int sizeOfNextTuple = value.getLength() + 1;
+            if (numBytes + sizeOfNextTuple > len) {
+                // cannot add tuple to current buffer
+                // but the reader has moved pass the fetched tuple
+                // we need to store this for a subsequent read call.
+                // and return this then.
+                pendingValue = value;
+                break;
+            } else {
+                System.arraycopy(value.getBytes(), 0, buffer, offset + numBytes, value.getLength());
+                buffer[offset + numBytes + value.getLength()] = (byte) EOL;
+                numBytes += sizeOfNextTuple;
+            }
+        }
+        return numBytes;
+    }
+
+    @Override
+    public int read() throws IOException {
+        throw new NotImplementedException("Use read(byte[], int, int");
     }
 
 }
\ No newline at end of file
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HiveAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HiveAdapter.java
index ffcc658..3731eba 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HiveAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HiveAdapter.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -17,11 +17,17 @@
 import java.util.Map;
 
 import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
 import edu.uci.ics.hyracks.api.comm.IFrameWriter;
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 
+/**
+ * Provides the functionality of fetching data in form of ADM records from a Hive dataset.
+ */
 public class HiveAdapter extends AbstractDatasourceAdapter {
 
+    private static final long serialVersionUID = 1L;
+
     public static final String HIVE_DATABASE = "database";
     public static final String HIVE_TABLE = "table";
     public static final String HIVE_HOME = "hive-home";
@@ -56,7 +62,7 @@
             tablePath = configuration.get(HIVE_WAREHOUSE_DIR) + "/" + tablePath + ".db" + "/"
                     + configuration.get(HIVE_TABLE);
         }
-        configuration.put(HDFSAdapter.KEY_HDFS_PATH, tablePath);
+        configuration.put(HDFSAdapter.KEY_PATH, tablePath);
         if (!configuration.get(KEY_FORMAT).equals(FORMAT_DELIMITED_TEXT)) {
             throw new IllegalArgumentException("format" + configuration.get(KEY_FORMAT) + " is not supported");
         }
@@ -81,4 +87,9 @@
         hdfsAdapter.start(partition, writer);
     }
 
+    @Override
+    public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
+        return hdfsAdapter.getPartitionConstraint();
+    }
+
 }
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IDatasourceAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IDatasourceAdapter.java
index ccfb9bd..b0dc32f 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IDatasourceAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IDatasourceAdapter.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -29,7 +29,6 @@
  */
 public interface IDatasourceAdapter extends Serializable {
 
-  
     /**
      * An adapter can be used to read from an external data source and may also
      * allow writing to the external data source. This enum type indicates the
@@ -46,7 +45,6 @@
         READ_WRITE
     }
 
-
     /**
      * Returns the type of adapter indicating if the adapter can be used for
      * reading from an external data source or writing to an external data
@@ -75,19 +73,6 @@
     public String getAdapterProperty(String propertyKey);
 
     /**
-     * Allows setting a configuration property of the adapter with a specified
-     * value.
-     * 
-     * @caller Used by the wrapper operator to modify the behavior of the
-     *         adapter, if required.
-     * @param property
-     *            the property to be set
-     * @param value
-     *            the value for the property
-     */
-    public void setAdapterProperty(String property, String value);
-
-    /**
      * Configures the IDatasourceAdapter instance.
      * 
      * @caller Scenario 1) Called during compilation of DDL statement that
@@ -131,9 +116,8 @@
      * @Caller The wrapper operator configures its partition constraints from
      *         the constraints obtained from the adapter.
      */
-    public AlgebricksPartitionConstraint getPartitionConstraint();
+    public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception;
 
-   
     /**
      * Allows the adapter to establish connection with the external data source
      * expressing intent for data and providing any configuration parameters
@@ -148,5 +132,18 @@
      */
     public void initialize(IHyracksTaskContext ctx) throws Exception;
 
+    /**
+     * Triggers the adapter to begin ingestion of data from the external source.
+     * 
+     * @param partition
+     *            The adapter could be running with a degree of parallelism.
+     *            partition corresponds to the i'th parallel instance.
+     * @param writer
+     *            The instance of frame writer that is used by the adapter to
+     *            write frame to. Adapter packs the fetched bytes (from external source),
+     *            packs them into frames and forwards the frames to an upstream receiving
+     *            operator using the instance of IFrameWriter.
+     * @throws Exception
+     */
     public void start(int partition, IFrameWriter writer) throws Exception;
 }
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IFeedDatasourceAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IFeedDatasourceAdapter.java
deleted file mode 100644
index 282be78..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IFeedDatasourceAdapter.java
+++ /dev/null
@@ -1,39 +0,0 @@
-package edu.uci.ics.asterix.external.dataset.adapter;
-
-import edu.uci.ics.asterix.om.types.ARecordType;
-
-public interface IFeedDatasourceAdapter extends IDatasourceAdapter {
-
-    /**
-     * Represents the kind of data exchange that happens between the adapter and
-     * the external data source. The data exchange can be either pull based or
-     * push based. In the former case (pull), the request for data transfer is
-     * initiated by the adapter. In the latter case (push) the adapter is
-     * required to submit an initial request to convey intent for data.
-     * Subsequently all data transfer requests are initiated by the external
-     * data source.
-     */
-    public enum AdapterDataFlowType {
-        PULL,
-        PUSH
-    }
-
-    /**
-     * An adapter can be a pull or a push based adapter. This method returns the
-     * kind of adapter, that is whether it is a pull based adapter or a push
-     * based adapter.
-     * 
-     * @caller Compiler or wrapper operator: Compiler uses this API to choose
-     *         the right wrapper (push-based) operator that wraps around the
-     *         adapter and provides an iterator interface. If we decide to form
-     *         a single operator that handles both pull and push based adapter
-     *         kinds, then this method will be used by the wrapper operator for
-     *         switching between the logic for interacting with a pull based
-     *         adapter versus a push based adapter.
-     * @return AdapterDataFlowType
-     */
-    public AdapterDataFlowType getAdapterDataFlowType();
-
-    public ARecordType getAdapterOutputType();
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IPullBasedFeedClient.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IPullBasedFeedClient.java
new file mode 100644
index 0000000..a1eb075
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IPullBasedFeedClient.java
@@ -0,0 +1,37 @@
+package edu.uci.ics.asterix.external.dataset.adapter;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+
+public interface IPullBasedFeedClient {
+
+    /**
+     * Writes the next fetched tuple into the provided instance of DatatOutput.
+     * 
+     * @param dataOutput
+     *            The receiving channel for the feed client to write ADM records to.
+     * @return true if a record was written to the DataOutput instance
+     *         false if no record was written to the DataOutput instance indicating non-availability of new data.
+     * @throws AsterixException
+     */
+    public boolean nextTuple(DataOutput dataOutput) throws AsterixException;
+
+    /**
+     * Provides logic for any corrective action that feed client needs to execute on
+     * encountering an exception.
+     * 
+     * @param e
+     *            The exception encountered during fetching of data from external source
+     * @throws AsterixException
+     */
+    public void resetOnFailure(Exception e) throws AsterixException;
+
+    /**
+     * Terminates a feed, that is data ingestion activity ceases.
+     * 
+     * @throws Exception
+     */
+    public void stop() throws Exception;
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/ITypedDatasourceAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/ITypedDatasourceAdapter.java
new file mode 100644
index 0000000..3a4b97b
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/ITypedDatasourceAdapter.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.dataset.adapter;
+
+import edu.uci.ics.asterix.om.types.ARecordType;
+
+/**
+ * Implemented by datasource adapter that has a fixed output type.
+ * Example @see {PullBasedTwitterAdapter}
+ */
+public interface ITypedDatasourceAdapter extends IDatasourceAdapter {
+
+    public ARecordType getAdapterOutputType();
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/NCFileSystemAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/NCFileSystemAdapter.java
index 324e227..bcc90c8 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/NCFileSystemAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/NCFileSystemAdapter.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -23,75 +23,86 @@
 
 import edu.uci.ics.asterix.om.types.IAType;
 import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 import edu.uci.ics.hyracks.api.io.FileReference;
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
 
+/**
+ * Factory class for creating an instance of NCFileSystemAdapter. An
+ * NCFileSystemAdapter reads external data residing on the local file system of
+ * an NC.
+ */
 public class NCFileSystemAdapter extends FileSystemBasedAdapter {
 
-	private static final long serialVersionUID = -4154256369973615710L;
-	private FileSplit[] fileSplits;
+    private static final long serialVersionUID = 1L;
+    private FileSplit[] fileSplits;
 
-	public static final String KEY_SPLITS = "path";
-	
-	public NCFileSystemAdapter(IAType atype) {
-		super(atype);
-	}
+    public NCFileSystemAdapter(IAType atype) {
+        super(atype);
+    }
 
-	@Override
-	public void configure(Map<String, String> arguments) throws Exception {
-		this.configuration = arguments;
-		String[] splits = arguments.get(KEY_SPLITS).split(",");
-		configureFileSplits(splits);
-		configurePartitionConstraint();
-		configureFormat();
-	}
+    @Override
+    public void configure(Map<String, String> arguments) throws Exception {
+        this.configuration = arguments;
+        String[] splits = arguments.get(KEY_PATH).split(",");
+        configureFileSplits(splits);
+        configureFormat();
+    }
 
-	@Override
-	public void initialize(IHyracksTaskContext ctx) throws Exception {
-		this.ctx = ctx;
-	}
+    @Override
+    public void initialize(IHyracksTaskContext ctx) throws Exception {
+        this.ctx = ctx;
+    }
 
-	@Override
-	public AdapterType getAdapterType() {
-		return AdapterType.READ;
-	}
+    @Override
+    public AdapterType getAdapterType() {
+        return AdapterType.READ;
+    }
 
-	private void configureFileSplits(String[] splits) {
-		if (fileSplits == null) {
-			fileSplits = new FileSplit[splits.length];
-			String nodeName;
-			String nodeLocalPath;
-			int count = 0;
-			for (String splitPath : splits) {
-				nodeName = splitPath.split(":")[0];
-				nodeLocalPath = splitPath.split("://")[1];
-				FileSplit fileSplit = new FileSplit(nodeName,
-						new FileReference(new File(nodeLocalPath)));
-				fileSplits[count++] = fileSplit;
-			}
-		}
-	}
+    private void configureFileSplits(String[] splits) {
+        if (fileSplits == null) {
+            fileSplits = new FileSplit[splits.length];
+            String nodeName;
+            String nodeLocalPath;
+            int count = 0;
+            String trimmedValue;
+            for (String splitPath : splits) {
+                trimmedValue = splitPath.trim();
+                nodeName = trimmedValue.split(":")[0];
+                nodeLocalPath = trimmedValue.split("://")[1];
+                FileSplit fileSplit = new FileSplit(nodeName, new FileReference(new File(nodeLocalPath)));
+                fileSplits[count++] = fileSplit;
+            }
+        }
+    }
 
-	private void configurePartitionConstraint() {
-		String[] locs = new String[fileSplits.length];
-		for (int i = 0; i < fileSplits.length; i++) {
-			locs[i] = fileSplits[i].getNodeName();
-		}
-		partitionConstraint = new AlgebricksAbsolutePartitionConstraint(locs);
-	}
+    private void configurePartitionConstraint() {
+        String[] locs = new String[fileSplits.length];
+        for (int i = 0; i < fileSplits.length; i++) {
+            locs[i] = fileSplits[i].getNodeName();
+        }
+        partitionConstraint = new AlgebricksAbsolutePartitionConstraint(locs);
+    }
 
-	@Override
-	public InputStream getInputStream(int partition) throws IOException {
-		FileSplit split = fileSplits[partition];
-		File inputFile = split.getLocalFile().getFile();
-		InputStream in;
-		try {
-			in = new FileInputStream(inputFile);
-			return in;
-		} catch (FileNotFoundException e) {
-			throw new IOException(e);
-		}
-	}
+    @Override
+    public InputStream getInputStream(int partition) throws IOException {
+        FileSplit split = fileSplits[partition];
+        File inputFile = split.getLocalFile().getFile();
+        InputStream in;
+        try {
+            in = new FileInputStream(inputFile);
+            return in;
+        } catch (FileNotFoundException e) {
+            throw new IOException(e);
+        }
+    }
 
+    @Override
+    public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
+        if (partitionConstraint == null) {
+            configurePartitionConstraint();
+        }
+        return partitionConstraint;
+    }
 }
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAdapter.java
index 326775a..38686c2 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAdapter.java
@@ -17,17 +17,25 @@
 import java.nio.ByteBuffer;
 
 import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.feed.intake.IPullBasedFeedClient;
+import edu.uci.ics.asterix.om.types.ARecordType;
 import edu.uci.ics.hyracks.api.comm.IFrameWriter;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
 import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
 
-public abstract class PullBasedAdapter extends AbstractFeedDatasourceAdapter implements IDatasourceAdapter {
+/**
+ * Acts as an abstract class for all pull-based external data adapters.
+ * Captures the common logic for obtaining bytes from an external source
+ * and packing them into frames as tuples.
+ */
+public abstract class PullBasedAdapter extends AbstractDatasourceAdapter implements ITypedDatasourceAdapter {
+
+    private static final long serialVersionUID = 1L;
 
     protected ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(1);
     protected IPullBasedFeedClient pullBasedFeedClient;
+    protected ARecordType adapterOutputType;
     private FrameTupleAppender appender;
     private ByteBuffer frame;
 
@@ -40,10 +48,18 @@
         appender.reset(frame, true);
 
         pullBasedFeedClient = getFeedClient(partition);
+        boolean moreData = false;
         while (true) {
             tupleBuilder.reset();
             try {
-                pullBasedFeedClient.nextTuple(tupleBuilder.getDataOutput()); // nextTuple is a blocking call.
+                moreData = pullBasedFeedClient.nextTuple(tupleBuilder.getDataOutput());
+                if (moreData) {
+                    tupleBuilder.addFieldEndOffset();
+                    appendTupleToFrame(writer);
+                } else {
+                    FrameUtils.flushFrame(frame, writer);
+                    break;
+                }
             } catch (Exception failureException) {
                 try {
                     pullBasedFeedClient.resetOnFailure(failureException);
@@ -51,14 +67,15 @@
                 } catch (Exception recoveryException) {
                     throw new Exception(recoveryException);
                 }
-
             }
-            tupleBuilder.addFieldEndOffset();
-            appendTupleToFrame(writer);
-
         }
     }
 
+    /**
+     * Allows an adapter to handle a runtime exception.
+     * @param e exception encountered during runtime
+     * @throws AsterixException
+     */
     public void resetOnFailure(Exception e) throws AsterixException {
         pullBasedFeedClient.resetOnFailure(e);
         tupleBuilder.reset();
@@ -75,4 +92,9 @@
         }
     }
 
+    @Override
+    public ARecordType getAdapterOutputType() {
+        return adapterOutputType;
+    }
+
 }
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedFeedClient.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedFeedClient.java
new file mode 100644
index 0000000..17ecd86
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedFeedClient.java
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.dataset.adapter;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ARecordSerializerDeserializer;
+import edu.uci.ics.asterix.om.base.AMutableRecord;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public abstract class PullBasedFeedClient implements IPullBasedFeedClient {
+
+    protected ARecordSerializerDeserializer recordSerDe;
+    protected AMutableRecord mutableRecord;
+    protected boolean messageReceived;
+    protected boolean continueIngestion=true;
+
+    public abstract boolean setNextRecord() throws Exception;
+
+    @Override
+    public boolean nextTuple(DataOutput dataOutput) throws AsterixException {
+        try {
+            boolean newData = setNextRecord();
+            if (newData && continueIngestion) {
+                IAType t = mutableRecord.getType();
+                ATypeTag tag = t.getTypeTag();
+                try {
+                    dataOutput.writeByte(tag.serialize());
+                } catch (IOException e) {
+                    throw new HyracksDataException(e);
+                }
+                recordSerDe.serialize(mutableRecord, dataOutput);
+                return true;
+            }
+            return false;
+        } catch (Exception e) {
+            throw new AsterixException(e);
+        }
+
+    }
+
+    @Override
+    public void stop() {
+        continueIngestion = false;
+    }
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterAdapter.java
index a7fd3dc..ebfbcad 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterAdapter.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -17,103 +17,92 @@
 import java.util.HashMap;
 import java.util.Map;
 
-import edu.uci.ics.asterix.feed.intake.IPullBasedFeedClient;
-import edu.uci.ics.asterix.feed.intake.PullBasedTwitterFeedClient;
 import edu.uci.ics.asterix.feed.managed.adapter.IManagedFeedAdapter;
-import edu.uci.ics.asterix.feed.managed.adapter.IMutableFeedAdapter;
 import edu.uci.ics.asterix.om.types.ARecordType;
 import edu.uci.ics.asterix.om.types.BuiltinType;
 import edu.uci.ics.asterix.om.types.IAType;
 import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksCountPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 
-public class PullBasedTwitterAdapter extends PullBasedAdapter implements
-		IManagedFeedAdapter, IMutableFeedAdapter {
+/**
+ * An adapter that provides the functionality of receiving tweets from the
+ * Twitter service in the form of ADM formatted records.
+ */
+public class PullBasedTwitterAdapter extends PullBasedAdapter implements IManagedFeedAdapter {
 
-	private int interval = 10;
-	private boolean stopRequested = false;
-	private boolean alterRequested = false;
-	private Map<String, String> alteredParams = new HashMap<String, String>();
-	private ARecordType recordType;
+   
+    private static final long serialVersionUID = 1L;
+    
+    public static final String QUERY = "query";
+    public static final String INTERVAL = "interval";
 
-	private String[] fieldNames = { "id", "username", "location", "text",
-			"timestamp" };
-	private IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING,
-			BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING };
+    private boolean alterRequested = false;
+    private Map<String, String> alteredParams = new HashMap<String, String>();
+    private ARecordType recordType;
 
-	private PullBasedTwitterFeedClient tweetClient;
+    private PullBasedTwitterFeedClient tweetClient;
 
-	public static final String QUERY = "query";
-	public static final String INTERVAL = "interval";
+    @Override
+    public IPullBasedFeedClient getFeedClient(int partition) {
+        return tweetClient;
+    }
 
-	@Override
-	public IPullBasedFeedClient getFeedClient(int partition) {
-		return tweetClient;
-	}
+    @Override
+    public void configure(Map<String, String> arguments) throws Exception {
+        configuration = arguments;
+        String[] fieldNames = { "id", "username", "location", "text", "timestamp" };
+        IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING,
+                BuiltinType.ASTRING };
+        recordType = new ARecordType("FeedRecordType", fieldNames, fieldTypes, false);
+    }
 
-	@Override
-	public void configure(Map<String, String> arguments) throws Exception {
-		configuration = arguments;
-		partitionConstraint = new AlgebricksCountPartitionConstraint(1);
-		interval = Integer.parseInt(arguments.get(INTERVAL));
-		recordType = new ARecordType("FeedRecordType", fieldNames, fieldTypes,
-				false);
-	}
+    @Override
+    public void initialize(IHyracksTaskContext ctx) throws Exception {
+        this.ctx = ctx;
+        tweetClient = new PullBasedTwitterFeedClient(ctx, this);
+    }
 
-	@Override
-	public void initialize(IHyracksTaskContext ctx) throws Exception {
-		this.ctx = ctx;
-		tweetClient = new PullBasedTwitterFeedClient(ctx, this);
-	}
+    @Override
+    public AdapterType getAdapterType() {
+        return AdapterType.READ;
+    }
 
-	@Override
-	public AdapterType getAdapterType() {
-		return adapterType.READ;
-	}
+    @Override
+    public void stop()  {
+        tweetClient.stop();
+    }
 
-	@Override
-	public void suspend() throws Exception {
-		// TODO Auto-generated method stub
+    @Override
+    public void alter(Map<String, String> properties)  {
+        alterRequested = true;
+        this.alteredParams = properties;
+    }
 
-	}
+    public boolean isAlterRequested() {
+        return alterRequested;
+    }
 
-	@Override
-	public void resume() throws Exception {
-		// TODO Auto-generated method stub
+    public Map<String, String> getAlteredParams() {
+        return alteredParams;
+    }
 
-	}
+    public void postAlteration() {
+        alteredParams = null;
+        alterRequested = false;
+    }
 
-	@Override
-	public void stop() throws Exception {
-		stopRequested = true;
-	}
+    @Override
+    public ARecordType getAdapterOutputType() {
+        return recordType;
+    }
 
-	public boolean isStopRequested() {
-		return stopRequested;
-	}
-
-	@Override
-	public void alter(Map<String, String> properties) throws Exception {
-		alterRequested = true;
-		this.alteredParams = properties;
-	}
-
-	public boolean isAlterRequested() {
-		return alterRequested;
-	}
-
-	public Map<String, String> getAlteredParams() {
-		return alteredParams;
-	}
-
-	public void postAlteration() {
-		alteredParams = null;
-		alterRequested = false;
-	}
-
-	@Override
-	public ARecordType getAdapterOutputType() {
-		return recordType;
-	}
+    @Override
+    public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
+        if (partitionConstraint == null) {
+            partitionConstraint = new AlgebricksCountPartitionConstraint(1);
+        }
+        return partitionConstraint;
+    }
 
 }
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/PullBasedTwitterFeedClient.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterFeedClient.java
similarity index 78%
rename from asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/PullBasedTwitterFeedClient.java
rename to asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterFeedClient.java
index c1ea800..2a07472 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/PullBasedTwitterFeedClient.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterFeedClient.java
@@ -1,4 +1,18 @@
-package edu.uci.ics.asterix.feed.intake;
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.dataset.adapter;
 
 import java.util.LinkedList;
 import java.util.Map;
@@ -12,13 +26,17 @@
 import twitter4j.TwitterFactory;
 import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ARecordSerializerDeserializer;
-import edu.uci.ics.asterix.external.dataset.adapter.PullBasedTwitterAdapter;
 import edu.uci.ics.asterix.om.base.AMutableRecord;
 import edu.uci.ics.asterix.om.base.AMutableString;
 import edu.uci.ics.asterix.om.base.IAObject;
 import edu.uci.ics.asterix.om.types.ARecordType;
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 
+/**
+ * An implementation of @see {PullBasedFeedClient} for the Twitter service.
+ * The feed client fetches data from Twitter service by sending request at
+ * regular (configurable) interval.
+ */
 public class PullBasedTwitterFeedClient extends PullBasedFeedClient {
 
     private String keywords;
@@ -54,7 +72,7 @@
     private Tweet getNextTweet() throws TwitterException, InterruptedException {
         if (tweetBuffer.isEmpty()) {
             QueryResult result;
-            Thread.currentThread().sleep(1000 * requestInterval);
+            Thread.sleep(1000 * requestInterval);
             result = twitter.search(query);
             tweetBuffer.addAll(result.getTweets());
         }
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedAdapter.java
index 124a9ed..611183c 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedAdapter.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -19,24 +19,26 @@
 import java.util.List;
 import java.util.Map;
 
-import edu.uci.ics.asterix.feed.intake.IPullBasedFeedClient;
-import edu.uci.ics.asterix.feed.intake.RSSFeedClient;
 import edu.uci.ics.asterix.feed.managed.adapter.IManagedFeedAdapter;
-import edu.uci.ics.asterix.feed.managed.adapter.IMutableFeedAdapter;
 import edu.uci.ics.asterix.om.types.ARecordType;
 import edu.uci.ics.asterix.om.types.BuiltinType;
 import edu.uci.ics.asterix.om.types.IAType;
 import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksCountPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 
-public class RSSFeedAdapter extends PullBasedAdapter implements IManagedFeedAdapter, IMutableFeedAdapter {
+/**
+ * RSSFeedAdapter provides the functionality of fetching an RSS based feed.
+ */
+public class RSSFeedAdapter extends PullBasedAdapter implements IManagedFeedAdapter {
+
+    private static final long serialVersionUID = 1L;
 
     private List<String> feedURLs = new ArrayList<String>();
     private boolean isStopRequested = false;
     private boolean isAlterRequested = false;
     private Map<String, String> alteredParams = new HashMap<String, String>();
     private String id_prefix = "";
-    private int interval = 10;
     private ARecordType recordType;
 
     private IPullBasedFeedClient rssFeedClient;
@@ -53,40 +55,18 @@
     }
 
     @Override
-    public void alter(Map<String, String> properties) throws Exception {
+    public void alter(Map<String, String> properties) {
         isAlterRequested = true;
         this.alteredParams = properties;
         reconfigure(properties);
     }
 
-    public void postAlteration() {
-        alteredParams = null;
-        isAlterRequested = false;
-    }
-
     @Override
-    public void suspend() throws Exception {
-        // TODO Auto-generated method stub
-
-    }
-
-    @Override
-    public void resume() throws Exception {
-        // TODO Auto-generated method stub
-
-    }
-
-    @Override
-    public void stop() throws Exception {
+    public void stop() {
         isStopRequested = true;
     }
 
     @Override
-    public AdapterDataFlowType getAdapterDataFlowType() {
-        return AdapterDataFlowType.PULL;
-    }
-
-    @Override
     public AdapterType getAdapterType() {
         return AdapterType.READ;
     }
@@ -151,4 +131,12 @@
         return recordType;
     }
 
+    @Override
+    public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
+        if (partitionConstraint == null) {
+            configurePartitionConstraints();
+        }
+        return partitionConstraint;
+    }
+
 }
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/RSSFeedClient.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedClient.java
similarity index 86%
rename from asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/RSSFeedClient.java
rename to asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedClient.java
index 9f04500..366b4af 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/RSSFeedClient.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedClient.java
@@ -1,4 +1,18 @@
-package edu.uci.ics.asterix.feed.intake;
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.dataset.adapter;
 
 import java.net.MalformedURLException;
 import java.net.URL;
@@ -15,12 +29,15 @@
 import com.sun.syndication.fetcher.impl.HashMapFeedInfoCache;
 import com.sun.syndication.fetcher.impl.HttpURLFeedFetcher;
 
-import edu.uci.ics.asterix.external.dataset.adapter.RSSFeedAdapter;
 import edu.uci.ics.asterix.om.base.AMutableRecord;
 import edu.uci.ics.asterix.om.base.AMutableString;
 import edu.uci.ics.asterix.om.base.IAObject;
 import edu.uci.ics.asterix.om.types.ARecordType;
 
+/**
+ * An implementation of @see {PullBasedFeedClient} responsible for
+ * fetching from an RSS feed source at regular interval.
+ */
 @SuppressWarnings("rawtypes")
 public class RSSFeedClient extends PullBasedFeedClient {
 
@@ -93,6 +110,7 @@
         }
     }
 
+    @SuppressWarnings("unchecked")
     private void fetchFeed() {
         try {
             System.err.println("Retrieving feed " + feedURL);
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/comm/AlterFeedMessage.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/AlterFeedMessage.java
similarity index 67%
rename from asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/comm/AlterFeedMessage.java
rename to asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/AlterFeedMessage.java
index d34af73..537bf07 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/comm/AlterFeedMessage.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/AlterFeedMessage.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -12,31 +12,26 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package edu.uci.ics.asterix.feed.comm;
+package edu.uci.ics.asterix.external.feed.lifecycle;
 
 import java.util.Map;
 
+/**
+ * A feed control message containing the altered values for
+ * adapter configuration parameters. This message is dispatched
+ * to all runtime instances of the feed's adapter.
+ */
 public class AlterFeedMessage extends FeedMessage {
 
+    private static final long serialVersionUID = 1L;
+
     private final Map<String, String> alteredConfParams;
 
     public AlterFeedMessage(Map<String, String> alteredConfParams) {
         super(MessageType.ALTER);
-        messageResponseMode = MessageResponseMode.SYNCHRONOUS;
         this.alteredConfParams = alteredConfParams;
     }
 
-    public AlterFeedMessage(MessageResponseMode mode, Map<String, String> alteredConfParams) {
-        super(MessageType.ALTER);
-        messageResponseMode = mode;
-        this.alteredConfParams = alteredConfParams;
-    }
-
-    @Override
-    public MessageResponseMode getMessageResponseMode() {
-        return messageResponseMode;
-    }
-
     @Override
     public MessageType getMessageType() {
         return MessageType.ALTER;
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/FeedId.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedId.java
similarity index 75%
rename from asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/FeedId.java
rename to asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedId.java
index 19076c4..b1889ee 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/FeedId.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedId.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -11,19 +11,26 @@
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
-*/
-package edu.uci.ics.asterix.feed.mgmt;
+ */
+package edu.uci.ics.asterix.external.feed.lifecycle;
 
 import java.io.Serializable;
 
+/**
+ * A unique identifier for a feed (dataset).
+ */
 public class FeedId implements Serializable {
 
+    private static final long serialVersionUID = 1L;
+
     private final String dataverse;
     private final String dataset;
+    private final int hashcode;
 
     public FeedId(String dataverse, String dataset) {
         this.dataset = dataset;
         this.dataverse = dataverse;
+        this.hashcode = (dataverse + "." + dataset).hashCode();
     }
 
     public String getDataverse() {
@@ -47,7 +54,12 @@
 
     @Override
     public int hashCode() {
-        return dataverse.hashCode() + dataset.hashCode();
+        return hashcode;
+    }
+
+    @Override
+    public String toString() {
+        return dataverse + "." + dataset;
     }
 
 }
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedManager.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedManager.java
new file mode 100644
index 0000000..29e4486
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedManager.java
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.feed.lifecycle;
+
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.LinkedBlockingQueue;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+
+/**
+ * Handle (de-)registration of feeds for delivery of control messages.
+ */
+public class FeedManager implements IFeedManager {
+
+    public static FeedManager INSTANCE = new FeedManager();
+
+    private FeedManager() {
+
+    }
+
+    private Map<FeedId, Set<LinkedBlockingQueue<IFeedMessage>>> outGoingMsgQueueMap = new HashMap<FeedId, Set<LinkedBlockingQueue<IFeedMessage>>>();
+
+    @Override
+    public void deliverMessage(FeedId feedId, IFeedMessage feedMessage) throws AsterixException {
+        Set<LinkedBlockingQueue<IFeedMessage>> operatorQueues = outGoingMsgQueueMap.get(feedId);
+        try {
+            if (operatorQueues != null) {
+                for (LinkedBlockingQueue<IFeedMessage> queue : operatorQueues) {
+                    queue.put(feedMessage);
+                }
+            } else {
+                throw new AsterixException("Unable to deliver message. Unknown feed :" + feedId);
+            }
+        } catch (Exception e) {
+            throw new AsterixException(e);
+        }
+    }
+
+    @Override
+    public void registerFeedMsgQueue(FeedId feedId, LinkedBlockingQueue<IFeedMessage> queue) {
+        Set<LinkedBlockingQueue<IFeedMessage>> feedQueues = outGoingMsgQueueMap.get(feedId);
+        if (feedQueues == null) {
+            feedQueues = new HashSet<LinkedBlockingQueue<IFeedMessage>>();
+        }
+        feedQueues.add(queue);
+        outGoingMsgQueueMap.put(feedId, feedQueues);
+    }
+
+    @Override
+    public void unregisterFeedMsgQueue(FeedId feedId, LinkedBlockingQueue<IFeedMessage> queue) {
+        Set<LinkedBlockingQueue<IFeedMessage>> feedQueues = outGoingMsgQueueMap.get(feedId);
+        if (feedQueues == null || !feedQueues.contains(queue)) {
+            throw new IllegalArgumentException(" Unable to de-register feed message queue. Unknown feedId " + feedId);
+        }
+        feedQueues.remove(queue);
+    }
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/comm/FeedMessage.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedMessage.java
similarity index 60%
rename from asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/comm/FeedMessage.java
rename to asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedMessage.java
index 1f1a020..af84d4f 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/comm/FeedMessage.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedMessage.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -12,37 +12,28 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package edu.uci.ics.asterix.feed.comm;
+package edu.uci.ics.asterix.external.feed.lifecycle;
 
-public  class FeedMessage implements IFeedMessage {
+/**
+ * A control message that can be sent to the runtime instance of a 
+ * feed's adapter. 
+ */
+public class FeedMessage implements IFeedMessage {
 
-    protected MessageResponseMode messageResponseMode = MessageResponseMode.SYNCHRONOUS;
+    private static final long serialVersionUID = 1L;
+
     protected MessageType messageType;
-    
 
-    public FeedMessage(MessageType messageType){
+    public FeedMessage(MessageType messageType) {
         this.messageType = messageType;
     }
 
-
-    public MessageResponseMode getMessageResponseMode() {
-        return messageResponseMode;
-    }
-
-
-    public void setMessageResponseMode(MessageResponseMode messageResponseMode) {
-        this.messageResponseMode = messageResponseMode;
-    }
-
-
     public MessageType getMessageType() {
         return messageType;
     }
 
-
     public void setMessageType(MessageType messageType) {
         this.messageType = messageType;
     }
-    
-   
+
 }
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/IFeedManager.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/IFeedManager.java
new file mode 100644
index 0000000..587d5a7
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/IFeedManager.java
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.feed.lifecycle;
+
+import java.util.concurrent.LinkedBlockingQueue;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+
+/**
+ * Handle (de-)registration of feeds for delivery of control messages.
+ */
+public interface IFeedManager {
+
+    /**
+     * Register an input message queue for a feed specified by feedId.
+     * All messages sent to a feed are directed to the registered queue(s).
+     * 
+     * @param feedId
+     *            an identifier for the feed dataset.
+     * @param queue
+     *            an input message queue for receiving control messages.
+     */
+    public void registerFeedMsgQueue(FeedId feedId, LinkedBlockingQueue<IFeedMessage> queue);
+
+    /**
+     * Unregister an input message queue for a feed specified by feedId.
+     * A feed prior to finishing should unregister all previously registered queue(s)
+     * as it is no longer active and thus need not process any control messages.
+     * 
+     * @param feedId
+     *            an identifier for the feed dataset.
+     * @param queue
+     *            an input message queue for receiving control messages.
+     */
+    public void unregisterFeedMsgQueue(FeedId feedId, LinkedBlockingQueue<IFeedMessage> queue);
+
+    /**
+     * Deliver a message to a feed with a given feedId.
+     * 
+     * @param feedId
+     *            identifier for the feed dataset.
+     * @param feedMessage
+     *            control message that needs to be delivered.
+     * @throws Exception
+     */
+    public void deliverMessage(FeedId feedId, IFeedMessage feedMessage) throws AsterixException;
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/comm/IFeedMessage.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/IFeedMessage.java
similarity index 72%
rename from asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/comm/IFeedMessage.java
rename to asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/IFeedMessage.java
index dfb4f91..9e1e907 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/comm/IFeedMessage.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/IFeedMessage.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -12,26 +12,17 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package edu.uci.ics.asterix.feed.comm;
+package edu.uci.ics.asterix.external.feed.lifecycle;
 
 import java.io.Serializable;
 
 public interface IFeedMessage extends Serializable {
 
-    public enum MessageResponseMode {
-        SYNCHRONOUS,
-        ASYNCHRONOUS,
-    }
-
     public enum MessageType {
         STOP,
-        SUSPEND,
-        RESUME,
         ALTER,
     }
 
-    public MessageResponseMode getMessageResponseMode();
-
     public MessageType getMessageType();
-    
+
 }
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/IFeedClient.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/IFeedClient.java
deleted file mode 100644
index f0e34c3..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/IFeedClient.java
+++ /dev/null
@@ -1,5 +0,0 @@
-package edu.uci.ics.asterix.feed.intake;
-
-public interface IFeedClient {
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/IPullBasedFeedClient.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/IPullBasedFeedClient.java
deleted file mode 100644
index f6b37b3..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/IPullBasedFeedClient.java
+++ /dev/null
@@ -1,24 +0,0 @@
-package edu.uci.ics.asterix.feed.intake;
-
-import java.io.DataOutput;
-
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-
-public interface IPullBasedFeedClient {
-
-    public enum status {
-        MORE_DATA,
-        END_OF_DATA
-    }
-
-    public boolean nextTuple(DataOutput dataOutput) throws AsterixException;
-
-    public void resetOnFailure(Exception e) throws AsterixException;
-
-    public void suspend() throws Exception;
-
-    public void resume() throws Exception;
-
-    public void stop() throws Exception;
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/PullBasedFeedClient.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/PullBasedFeedClient.java
deleted file mode 100644
index 501acd4..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/PullBasedFeedClient.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feed.intake;
-
-import java.io.DataOutput;
-import java.io.IOException;
-
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ARecordSerializerDeserializer;
-import edu.uci.ics.asterix.om.base.AMutableRecord;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-
-public abstract class PullBasedFeedClient implements IPullBasedFeedClient {
-
-	protected ARecordSerializerDeserializer recordSerDe;
-	protected AMutableRecord mutableRecord;
-	protected boolean messageReceived;
-
-	public abstract boolean setNextRecord() throws Exception;
-
-	@Override
-	public boolean nextTuple(DataOutput dataOutput) throws AsterixException {
-		try {
-			boolean newData = setNextRecord();
-			if (newData) {
-				IAType t = mutableRecord.getType();
-				ATypeTag tag = t.getTypeTag();
-				try {
-					dataOutput.writeByte(tag.serialize());
-				} catch (IOException e) {
-					throw new HyracksDataException(e);
-				}
-				recordSerDe.serialize(mutableRecord, dataOutput);
-				return true;
-			}
-			return false;
-		} catch (Exception e) {
-			throw new AsterixException(e);
-		}
-
-	}
-
-	/*
-	 * public void displayFeedRecord() { StringBuilder builder = new
-	 * StringBuilder(); int numFields = recordType.getFieldNames().length; for
-	 * (int i = 0; i < numFields; i++) {
-	 * builder.append(mutableRecord.getValueByPos(i).toString());
-	 * builder.append("|"); } }
-	 */
-
-	@Override
-	public void suspend() throws Exception {
-		// TODO Auto-generated method stub
-
-	}
-
-	@Override
-	public void resume() throws Exception {
-		// TODO Auto-generated method stub
-
-	}
-
-	@Override
-	public void stop() throws Exception {
-		// TODO Auto-generated method stub
-
-	}
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/managed/adapter/IManagedFeedAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/managed/adapter/IManagedFeedAdapter.java
index 7cf1fb1..6f993ae 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/managed/adapter/IManagedFeedAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/managed/adapter/IManagedFeedAdapter.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -14,24 +14,30 @@
  */
 package edu.uci.ics.asterix.feed.managed.adapter;
 
+import java.util.Map;
+
+/**
+ * Interface implemented by an adapter that can be controlled or managed by external
+ * commands (stop,alter)
+ */
 public interface IManagedFeedAdapter {
 
-    public enum OperationState {
-        SUSPENDED,
-        // INACTIVE state signifies that the feed dataset is not
-        // connected with the external world through the feed
-        // adapter.
-        ACTIVE,
-        // ACTIVE state signifies that the feed dataset is connected to the
-        // external world using an adapter that may put data into the dataset.
-        STOPPED,
-        INACTIVE
-    }
+    /**
+     * Discontinue the ingestion of data and end the feed.
+     * 
+     * @throws Exception
+     */
+    public void stop();
 
-    public void suspend() throws Exception;
-
-    public void resume() throws Exception;
-
-    public void stop() throws Exception;
+    /**
+     * Modify the adapter configuration parameters. This method is called
+     * when the configuration parameters need to be modified while the adapter
+     * is ingesting data in an active feed.
+     * 
+     * @param properties
+     *            A HashMap containing the set of configuration parameters
+     *            that need to be altered.
+     */
+    public void alter(Map<String, String> properties);
 
 }
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/managed/adapter/IMutableFeedAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/managed/adapter/IMutableFeedAdapter.java
deleted file mode 100644
index 290c7d6..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/managed/adapter/IMutableFeedAdapter.java
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feed.managed.adapter;
-
-import java.util.Map;
-
-public interface IMutableFeedAdapter extends IManagedFeedAdapter {
-
-    public void alter(Map<String,String> properties) throws Exception;
-    
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/FeedManager.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/FeedManager.java
deleted file mode 100644
index 748455a..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/FeedManager.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feed.mgmt;
-
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.LinkedBlockingQueue;
-
-import edu.uci.ics.asterix.feed.comm.IFeedMessage;
-
-public class FeedManager implements IFeedManager {
-
-    private Map<FeedId, Set<LinkedBlockingQueue<IFeedMessage>>> outGoingMsgQueueMap = new HashMap<FeedId, Set<LinkedBlockingQueue<IFeedMessage>>>();
-    private LinkedBlockingQueue<IFeedMessage> incomingMsgQueue = new LinkedBlockingQueue<IFeedMessage>();
-
-    @Override
-    public boolean deliverMessage(FeedId feedId, IFeedMessage feedMessage) throws Exception {
-        Set<LinkedBlockingQueue<IFeedMessage>> operatorQueues = outGoingMsgQueueMap.get(feedId);
-        if (operatorQueues != null) {
-            for (LinkedBlockingQueue<IFeedMessage> queue : operatorQueues) {
-                queue.put(feedMessage);
-            }
-        }
-        return true;
-    }
-
-    @Override
-    public void registerFeedOperatorMsgQueue(FeedId feedId, LinkedBlockingQueue<IFeedMessage> queue) {
-        Set<LinkedBlockingQueue<IFeedMessage>> feedQueues = outGoingMsgQueueMap.get(feedId);
-        if (feedQueues == null) {
-            feedQueues = new HashSet<LinkedBlockingQueue<IFeedMessage>>();
-        }
-        feedQueues.add(queue);
-        outGoingMsgQueueMap.put(feedId, feedQueues);
-
-    }
-
-    @Override
-    public void unregisterFeedOperatorMsgQueue(FeedId feedId, LinkedBlockingQueue<IFeedMessage> queue) {
-        Set<LinkedBlockingQueue<IFeedMessage>> feedQueues = outGoingMsgQueueMap.get(feedId);
-        if (feedQueues == null || !feedQueues.contains(queue)) {
-            throw new IllegalArgumentException(" unable to de-register feed message queue");
-        }
-        feedQueues.remove(queue);
-    }
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/FeedSystemProvider.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/FeedSystemProvider.java
deleted file mode 100644
index 82fb67d..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/FeedSystemProvider.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feed.mgmt;
-
-
-/**
- * Provider for all the sub-systems (transaction/lock/log/recovery) managers.
- * Users of transaction sub-systems must obtain them from the provider.
- */
-public class FeedSystemProvider {
-    private static final IFeedManager feedManager = new FeedManager();
-    
-    public static IFeedManager getFeedManager()  {
-      return feedManager;
-    }
-}
\ No newline at end of file
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/IFeedManager.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/IFeedManager.java
deleted file mode 100644
index a8c1303..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/IFeedManager.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feed.mgmt;
-
-import java.util.concurrent.LinkedBlockingQueue;
-
-import edu.uci.ics.asterix.feed.comm.IFeedMessage;
-
-public interface IFeedManager {
-
-    public void registerFeedOperatorMsgQueue(FeedId feedId, LinkedBlockingQueue<IFeedMessage> queue);
-
-    public void unregisterFeedOperatorMsgQueue(FeedId feedId, LinkedBlockingQueue<IFeedMessage> queue);
-
-    public boolean deliverMessage(FeedId feedId, IFeedMessage feedMessage) throws Exception;
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedIntakeOperatorDescriptor.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedIntakeOperatorDescriptor.java
deleted file mode 100644
index c300b0d..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedIntakeOperatorDescriptor.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feed.operator;
-
-import java.util.Map;
-
-import edu.uci.ics.asterix.external.adapter.factory.IFeedDatasetAdapterFactory;
-import edu.uci.ics.asterix.external.adapter.factory.IFeedDatasetAdapterFactory.FeedAdapterType;
-import edu.uci.ics.asterix.external.adapter.factory.IGenericFeedDatasetAdapterFactory;
-import edu.uci.ics.asterix.external.adapter.factory.ITypedFeedDatasetAdapterFactory;
-import edu.uci.ics.asterix.external.dataset.adapter.IFeedDatasourceAdapter;
-import edu.uci.ics.asterix.feed.mgmt.FeedId;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
-
-public class FeedIntakeOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
-    private static final long serialVersionUID = 1L;
-
-    private final String adapterFactoryClassName;
-    private final Map<String, String> adapterConfiguration;
-    private final IAType atype;
-    private final FeedId feedId;
-
-    private transient IFeedDatasetAdapterFactory datasourceAdapterFactory;
-
-    public FeedIntakeOperatorDescriptor(JobSpecification spec, FeedId feedId, String adapter,
-            Map<String, String> arguments, ARecordType atype, RecordDescriptor rDesc) {
-        super(spec, 1, 1);
-        recordDescriptors[0] = rDesc;
-        this.adapterFactoryClassName = adapter;
-        this.adapterConfiguration = arguments;
-        this.atype = atype;
-        this.feedId = feedId;
-    }
-
-    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
-            IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions)
-            throws HyracksDataException {
-        IFeedDatasourceAdapter adapter;
-        try {
-            datasourceAdapterFactory = (IFeedDatasetAdapterFactory) Class.forName(adapterFactoryClassName)
-                    .newInstance();
-            if (datasourceAdapterFactory.getFeedAdapterType().equals(FeedAdapterType.GENERIC)) {
-                adapter = (IFeedDatasourceAdapter) ((IGenericFeedDatasetAdapterFactory) datasourceAdapterFactory)
-                        .createAdapter(adapterConfiguration, atype);
-            } else {
-                adapter = (IFeedDatasourceAdapter) ((ITypedFeedDatasetAdapterFactory) datasourceAdapterFactory)
-                        .createAdapter(adapterConfiguration);
-            }
-            adapter.initialize(ctx);
-        } catch (Exception e) {
-            throw new HyracksDataException("initialization of adapter failed", e);
-        }
-        return new FeedIntakeOperatorNodePushable(feedId, adapter, partition);
-    }
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedMessageOperatorDescriptor.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedMessageOperatorDescriptor.java
deleted file mode 100644
index c66c49e..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedMessageOperatorDescriptor.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feed.operator;
-
-import java.util.List;
-
-import edu.uci.ics.asterix.feed.comm.IFeedMessage;
-import edu.uci.ics.asterix.feed.mgmt.FeedId;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
-
-public class FeedMessageOperatorDescriptor extends
-		AbstractSingleActivityOperatorDescriptor {
-
-	private final FeedId feedId;
-	private final List<IFeedMessage> feedMessages;
-	private final boolean sendToAll = true;
-
-	public FeedMessageOperatorDescriptor(JobSpecification spec,
-			String dataverse, String dataset, List<IFeedMessage> feedMessages) {
-		super(spec, 0, 1);
-		this.feedId = new FeedId(dataverse, dataset);
-		this.feedMessages = feedMessages;
-	}
-
-	@Override
-	public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
-			IRecordDescriptorProvider recordDescProvider, int partition,
-			int nPartitions) throws HyracksDataException {
-		return new FeedMessageOperatorNodePushable(ctx, feedId, feedMessages,
-				sendToAll, partition, nPartitions);
-	}
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedMessageOperatorNodePushable.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedMessageOperatorNodePushable.java
deleted file mode 100644
index 37e3ba3..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedMessageOperatorNodePushable.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feed.operator;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import edu.uci.ics.asterix.feed.comm.IFeedMessage;
-import edu.uci.ics.asterix.feed.mgmt.FeedId;
-import edu.uci.ics.asterix.feed.mgmt.FeedSystemProvider;
-import edu.uci.ics.asterix.feed.mgmt.IFeedManager;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
-
-public class FeedMessageOperatorNodePushable extends
-		AbstractUnaryOutputSourceOperatorNodePushable {
-
-	private final FeedId feedId;
-	private final List<IFeedMessage> feedMessages;
-	private IFeedManager feedManager;
-
-	public FeedMessageOperatorNodePushable(IHyracksTaskContext ctx,
-			FeedId feedId, List<IFeedMessage> feedMessages, boolean applyToAll,
-			int partition, int nPartitions) {
-		this.feedId = feedId;
-		if (applyToAll) {
-			this.feedMessages = feedMessages;
-		} else {
-			this.feedMessages = new ArrayList<IFeedMessage>();
-			feedMessages.add(feedMessages.get(partition));
-		}
-		feedManager = (IFeedManager) FeedSystemProvider.getFeedManager();
-	}
-
-	@Override
-	public void initialize() throws HyracksDataException {
-		try {
-			writer.open();
-			for (IFeedMessage feedMessage : feedMessages) {
-				feedManager.deliverMessage(feedId, feedMessage);
-			}
-		} catch (Exception e) {
-			throw new HyracksDataException(e);
-		} finally {
-			writer.close();
-		}
-	}
-
-}
diff --git a/asterix-metadata/pom.xml b/asterix-metadata/pom.xml
index 67f16ff..459d4d0 100644
--- a/asterix-metadata/pom.xml
+++ b/asterix-metadata/pom.xml
@@ -5,10 +5,7 @@
 		<groupId>edu.uci.ics.asterix</groupId>
 		<version>0.0.4-SNAPSHOT</version>
 	</parent>
-	<groupId>edu.uci.ics.asterix</groupId>
 	<artifactId>asterix-metadata</artifactId>
-	<version>0.0.4-SNAPSHOT</version>
-
 	<build>
 		<plugins>
 			<plugin>
@@ -45,12 +42,10 @@
 		<dependency>
 			<groupId>edu.uci.ics.hyracks</groupId>
 			<artifactId>hyracks-storage-am-invertedindex</artifactId>
-			<version>0.2.2-SNAPSHOT</version>
 		</dependency>
 		<dependency>
 			<groupId>edu.uci.ics.hyracks</groupId>
 			<artifactId>hyracks-storage-am-rtree</artifactId>
-			<version>0.2.2-SNAPSHOT</version>
 		</dependency>
 		<dependency>
 			<groupId>edu.uci.ics.asterix</groupId>
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataRecordTypes.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataRecordTypes.java
index ec46e7b..1db4886 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataRecordTypes.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataRecordTypes.java
@@ -137,12 +137,18 @@
 
     private static final ARecordType createFeedDetailsRecordType() {
         AOrderedListType orderedListType = new AOrderedListType(BuiltinType.ASTRING, null);
-        AOrderedListType orderedListOfPropertiesType = new AOrderedListType(DATASOURCE_ADAPTER_PROPERTIES_RECORDTYPE, null);
+        AOrderedListType orderedListOfPropertiesType = new AOrderedListType(DATASOURCE_ADAPTER_PROPERTIES_RECORDTYPE,
+                null);
         String[] fieldNames = { "FileStructure", "PartitioningStrategy", "PartitioningKey", "PrimaryKey", "GroupName",
                 "DatasourceAdapter", "Properties", "Function", "Status" };
 
+        List<IAType> feedFunctionUnionList = new ArrayList<IAType>();
+        feedFunctionUnionList.add(BuiltinType.ANULL);
+        feedFunctionUnionList.add(BuiltinType.ASTRING);
+        AUnionType feedFunctionUnion = new AUnionType(feedFunctionUnionList, null);
+
         IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, orderedListType, orderedListType,
-                BuiltinType.ASTRING, BuiltinType.ASTRING, orderedListOfPropertiesType, BuiltinType.ASTRING,
+                BuiltinType.ASTRING, BuiltinType.ASTRING, orderedListOfPropertiesType, feedFunctionUnion,
                 BuiltinType.ASTRING };
 
         return new ARecordType(null, fieldNames, fieldTypes, true);
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java
index fad1731..d92c76d 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java
@@ -27,18 +27,16 @@
 import edu.uci.ics.asterix.common.dataflow.IAsterixApplicationContextInfo;
 import edu.uci.ics.asterix.common.parse.IParseFileSplitsDecl;
 import edu.uci.ics.asterix.dataflow.data.nontagged.valueproviders.AqlPrimitiveValueProviderFactory;
-import edu.uci.ics.asterix.external.adapter.factory.IExternalDatasetAdapterFactory;
-import edu.uci.ics.asterix.external.adapter.factory.IFeedDatasetAdapterFactory;
-import edu.uci.ics.asterix.external.adapter.factory.IFeedDatasetAdapterFactory.FeedAdapterType;
-import edu.uci.ics.asterix.external.adapter.factory.IGenericFeedDatasetAdapterFactory;
-import edu.uci.ics.asterix.external.adapter.factory.ITypedFeedDatasetAdapterFactory;
+import edu.uci.ics.asterix.external.adapter.factory.IAdapterFactory;
+import edu.uci.ics.asterix.external.adapter.factory.IGenericDatasetAdapterFactory;
+import edu.uci.ics.asterix.external.adapter.factory.ITypedDatasetAdapterFactory;
 import edu.uci.ics.asterix.external.data.operator.ExternalDataScanOperatorDescriptor;
+import edu.uci.ics.asterix.external.data.operator.FeedIntakeOperatorDescriptor;
+import edu.uci.ics.asterix.external.data.operator.FeedMessageOperatorDescriptor;
 import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
-import edu.uci.ics.asterix.external.dataset.adapter.IFeedDatasourceAdapter;
-import edu.uci.ics.asterix.feed.comm.IFeedMessage;
-import edu.uci.ics.asterix.feed.mgmt.FeedId;
-import edu.uci.ics.asterix.feed.operator.FeedIntakeOperatorDescriptor;
-import edu.uci.ics.asterix.feed.operator.FeedMessageOperatorDescriptor;
+import edu.uci.ics.asterix.external.dataset.adapter.ITypedDatasourceAdapter;
+import edu.uci.ics.asterix.external.feed.lifecycle.FeedId;
+import edu.uci.ics.asterix.external.feed.lifecycle.IFeedMessage;
 import edu.uci.ics.asterix.formats.base.IDataFormat;
 import edu.uci.ics.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;
 import edu.uci.ics.asterix.formats.nontagged.AqlTypeTraitProvider;
@@ -148,9 +146,9 @@
         this.defaultDataverse = defaultDataverse;
         this.stores = AsterixProperties.INSTANCE.getStores();
     }
-    
-    public void setJobTxnId(long txnId){
-    	this.jobTxnId = txnId;
+
+    public void setJobTxnId(long txnId) {
+        this.jobTxnId = txnId;
     }
 
     public Dataverse getDefaultDataverse() {
@@ -272,7 +270,7 @@
             throw new AlgebricksException("Can only scan datasets of records.");
         }
 
-        IExternalDatasetAdapterFactory adapterFactory;
+        IGenericDatasetAdapterFactory adapterFactory;
         IDatasourceAdapter adapter;
         String adapterName;
         DatasourceAdapter adapterEntity;
@@ -283,22 +281,22 @@
                     adapterName);
             if (adapterEntity != null) {
                 adapterFactoryClassname = adapterEntity.getClassname();
-                adapterFactory = (IExternalDatasetAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
+                adapterFactory = (IGenericDatasetAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
             } else {
                 adapterFactoryClassname = adapterFactoryMapping.get(adapterName);
                 if (adapterFactoryClassname == null) {
                     throw new AlgebricksException(" Unknown adapter :" + adapterName);
                 }
-                adapterFactory = (IExternalDatasetAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
+                adapterFactory = (IGenericDatasetAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
             }
 
-            adapter = ((IExternalDatasetAdapterFactory) adapterFactory).createAdapter(datasetDetails.getProperties(),
+            adapter = ((IGenericDatasetAdapterFactory) adapterFactory).createAdapter(datasetDetails.getProperties(),
                     itemType);
         } catch (AlgebricksException ae) {
             throw ae;
         } catch (Exception e) {
             e.printStackTrace();
-            throw new AlgebricksException("unable to load the adapter factory class " + e);
+            throw new AlgebricksException("Unable to create adapter " + e);
         }
 
         if (!(adapter.getAdapterType().equals(IDatasourceAdapter.AdapterType.READ) || adapter.getAdapterType().equals(
@@ -313,7 +311,13 @@
         ExternalDataScanOperatorDescriptor dataScanner = new ExternalDataScanOperatorDescriptor(jobSpec,
                 adapterFactoryClassname, datasetDetails.getProperties(), rt, scannerDesc);
 
-        AlgebricksPartitionConstraint constraint = adapter.getPartitionConstraint();
+        AlgebricksPartitionConstraint constraint;
+        try {
+            constraint = adapter.getPartitionConstraint();
+        } catch (Exception e) {
+            throw new AlgebricksException(e);
+        }
+
         return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(dataScanner, constraint);
     }
 
@@ -346,25 +350,45 @@
         FeedDatasetDetails datasetDetails = (FeedDatasetDetails) dataset.getDatasetDetails();
         DatasourceAdapter adapterEntity;
         IDatasourceAdapter adapter;
-        IFeedDatasetAdapterFactory adapterFactory;
+        IAdapterFactory adapterFactory;
         IAType adapterOutputType;
+        String adapterName;
+        String adapterFactoryClassname;
 
         try {
-            adapterEntity = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, null, datasetDetails.getAdapterFactory());
-            adapterFactory = (IFeedDatasetAdapterFactory) Class.forName(adapterEntity.getClassname()).newInstance();
-            if (adapterFactory.getFeedAdapterType().equals(FeedAdapterType.TYPED)) {
-                adapter = ((ITypedFeedDatasetAdapterFactory) adapterFactory).createAdapter(datasetDetails
-                        .getProperties());
-                adapterOutputType = ((IFeedDatasourceAdapter) adapter).getAdapterOutputType();
+            adapterName = datasetDetails.getAdapterFactory();
+            adapterEntity = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, MetadataConstants.METADATA_DATAVERSE_NAME,
+                    adapterName);
+            if (adapterEntity != null) {
+                adapterFactoryClassname = adapterEntity.getClassname();
+                adapterFactory = (IAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
             } else {
-                String outputTypeName = datasetDetails.getProperties().get(
-                        IGenericFeedDatasetAdapterFactory.KEY_TYPE_NAME);
-                adapterOutputType = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, null, outputTypeName).getDatatype();
-                adapter = ((IGenericFeedDatasetAdapterFactory) adapterFactory).createAdapter(
-                        datasetDetails.getProperties(), adapterOutputType);
+                adapterFactoryClassname = adapterFactoryMapping.get(adapterName);
+                if (adapterFactoryClassname != null) {
+                } else {
+                    // adapterName has been provided as a fully qualified classname 
+                    adapterFactoryClassname = adapterName;
+                }
+                adapterFactory = (IAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
             }
+
+            if (adapterFactory instanceof ITypedDatasetAdapterFactory) {
+                adapter = ((ITypedDatasetAdapterFactory) adapterFactory).createAdapter(datasetDetails.getProperties());
+                adapterOutputType = ((ITypedDatasourceAdapter) adapter).getAdapterOutputType();
+            } else if (adapterFactory instanceof IGenericDatasetAdapterFactory) {
+                String outputTypeName = datasetDetails.getProperties().get(IGenericDatasetAdapterFactory.KEY_TYPE_NAME);
+                adapterOutputType = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataset.getDataverseName(),
+                        outputTypeName).getDatatype();
+                adapter = ((IGenericDatasetAdapterFactory) adapterFactory).createAdapter(
+                        datasetDetails.getProperties(), adapterOutputType);
+            } else {
+                throw new IllegalStateException(" Unknown factory type for " + adapterFactoryClassname);
+            }
+        } catch (AlgebricksException ae) {
+            throw ae;
         } catch (Exception e) {
-            throw new AlgebricksException(e);
+            e.printStackTrace();
+            throw new AlgebricksException("unable to create adapter  " + e);
         }
 
         ISerializerDeserializer payloadSerde = NonTaggedDataFormat.INSTANCE.getSerdeProvider()
@@ -372,11 +396,16 @@
         RecordDescriptor feedDesc = new RecordDescriptor(new ISerializerDeserializer[] { payloadSerde });
 
         FeedIntakeOperatorDescriptor feedIngestor = new FeedIntakeOperatorDescriptor(jobSpec, new FeedId(
-                dataset.getDataverseName(), dataset.getDatasetName()), adapterEntity.getClassname(),
+                dataset.getDataverseName(), dataset.getDatasetName()), adapterFactoryClassname,
                 datasetDetails.getProperties(), (ARecordType) adapterOutputType, feedDesc);
 
-        return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(feedIngestor,
-                adapter.getPartitionConstraint());
+        AlgebricksPartitionConstraint constraint = null;
+        try {
+            constraint = adapter.getPartitionConstraint();
+        } catch (Exception e) {
+            throw new AlgebricksException(e);
+        }
+        return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(feedIngestor, constraint);
     }
 
     public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildFeedMessengerRuntime(
@@ -820,10 +849,8 @@
             TreeIndexInsertUpdateDeleteOperatorDescriptor btreeInsert = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
                     spec, recordDesc, appContext.getStorageManagerInterface(), appContext.getIndexRegistryProvider(),
                     splitsAndConstraint.first, typeTraits, comparatorFactories, fieldPermutation, indexOp,
-                    new BTreeDataflowHelperFactory(), filterFactory, NoOpOperationCallbackProvider.INSTANCE,
-                    jobTxnId);
-            return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(btreeInsert,
-                    splitsAndConstraint.second);
+                    new BTreeDataflowHelperFactory(), filterFactory, NoOpOperationCallbackProvider.INSTANCE, jobTxnId);
+            return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(btreeInsert, splitsAndConstraint.second);
         } catch (MetadataException e) {
             throw new AlgebricksException(e);
         }
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/AsterixBuiltinArtifactMap.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/AsterixBuiltinArtifactMap.java
deleted file mode 100644
index cc48d1e..0000000
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/AsterixBuiltinArtifactMap.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.metadata.entities;
-
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-
-import edu.uci.ics.asterix.metadata.bootstrap.MetadataConstants;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-
-public class AsterixBuiltinArtifactMap {
-
-    public enum ARTIFACT_KIND {
-        DATASET,
-        DATAVERSE,
-        FUNCTION,
-        NODEGROUP
-    }
-
-    public static final String ARTIFACT_TYPE_DATASET = "DATASET";
-    public static final String ARTIFACT_TYPE_DATAVERSE = "DATAVERSE";
-    public static final String ARTIFACT_TYPE_FUNCTION = "FUNCTION";
-    public static final String ARTIFACT_TYPE_NODEGROUP = "NODEGROUP";
-
-    public static final String DATASET_DATASETS = "Dataset";
-    public static final String DATASET_INDEX = "Index";
-    public static final String DATASET_NODEGROUP = "NodeGroup";
-
-    public static final String DATAVERSE_METADATA = "Metadata";
-
-    public static final String NODEGROUP_DEFAULT = MetadataConstants.METADATA_DEFAULT_NODEGROUP_NAME;
-
-    private static final Map<ARTIFACT_KIND, Set<String>> builtinArtifactMap = new HashMap<ARTIFACT_KIND, Set<String>>();
-
-    static {
-        Set<String> datasets = new HashSet<String>();
-        datasets.add(DATASET_DATASETS);
-        datasets.add(DATASET_INDEX);
-        datasets.add(DATASET_NODEGROUP);
-        builtinArtifactMap.put(ARTIFACT_KIND.DATASET, datasets);
-
-        Set<String> dataverses = new HashSet<String>();
-        dataverses.add(DATAVERSE_METADATA);
-        builtinArtifactMap.put(ARTIFACT_KIND.DATAVERSE, dataverses);
-
-        Set<String> nodeGroups = new HashSet<String>();
-        nodeGroups.add(NODEGROUP_DEFAULT);
-        builtinArtifactMap.put(ARTIFACT_KIND.NODEGROUP, nodeGroups);
-
-    }
-
-    public static boolean isSystemProtectedArtifact(ARTIFACT_KIND kind, Object artifactIdentifier) {
-        switch (kind) {
-            case NODEGROUP:
-            case DATASET:
-            case DATAVERSE:
-                return builtinArtifactMap.get(kind).contains((String) artifactIdentifier);
-
-            case FUNCTION:
-                return AsterixBuiltinFunctions.isBuiltinCompilerFunction((FunctionIdentifier) artifactIdentifier);
-            default:
-                return false;
-        }
-    }
-}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/FeedDatasetDetails.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/FeedDatasetDetails.java
index 895466d..367066b 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/FeedDatasetDetails.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/FeedDatasetDetails.java
@@ -34,6 +34,11 @@
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
 
+/**
+ * Provides functionality for writing parameters for a FEED dataset into the
+ * Metadata. Since FEED dataset is a special kind of INTERNAL dataset, this
+ * class extends InternalDatasetDetails.
+ */
 public class FeedDatasetDetails extends InternalDatasetDetails {
 
     private static final long serialVersionUID = 1L;
@@ -141,8 +146,8 @@
 
         // write field 7
         fieldValue.reset();
-        if (getFunction() != null) {
-            aString.setValue(getFunction().toString());
+        if (signature != null) {
+            aString.setValue(signature.toString());
             stringSerde.serialize(aString, fieldValue.getDataOutput());
             feedRecordBuilder.addField(MetadataRecordTypes.FEED_DETAILS_ARECORD_FUNCTION_FIELD_INDEX, fieldValue);
         }
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
index 324429e..3cc6f2f 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
@@ -27,8 +27,6 @@
 import java.util.Map;
 
 import edu.uci.ics.asterix.builders.IARecordBuilder;
-import edu.uci.ics.asterix.builders.RecordBuilder;
-import edu.uci.ics.asterix.common.config.DatasetConfig;
 import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
 import edu.uci.ics.asterix.common.functions.FunctionSignature;
 import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
@@ -41,15 +39,14 @@
 import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails;
 import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails.FileStructure;
 import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails.PartitioningStrategy;
+import edu.uci.ics.asterix.om.base.ANull;
 import edu.uci.ics.asterix.om.base.AOrderedList;
 import edu.uci.ics.asterix.om.base.ARecord;
 import edu.uci.ics.asterix.om.base.AString;
 import edu.uci.ics.asterix.om.base.IACursor;
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
 
 /**
  * Translates a Dataset metadata entity to an ITupleReference and vice versa.
@@ -63,9 +60,6 @@
     // Payload field containing serialized Dataset.
     public static final int DATASET_PAYLOAD_TUPLE_FIELD_INDEX = 2;
 
-    private FileSplit[] splits;
-    private List<String> partitioningKey;
-    private List<String> primaryKey;
     @SuppressWarnings("unchecked")
     private ISerializerDeserializer<ARecord> recordSerDes = AqlSerializerDeserializerProvider.INSTANCE
             .getSerializerDeserializer(MetadataRecordTypes.DATASET_RECORDTYPE);
@@ -95,7 +89,71 @@
         DatasetType datasetType = DatasetType.valueOf(((AString) datasetRecord.getValueByPos(3)).getStringValue());
         IDatasetDetails datasetDetails = null;
         switch (datasetType) {
-            case FEED:
+            case FEED: {
+                ARecord datasetDetailsRecord = (ARecord) datasetRecord
+                        .getValueByPos(MetadataRecordTypes.DATASET_ARECORD_FEEDDETAILS_FIELD_INDEX);
+                FileStructure fileStructure = FileStructure.valueOf(((AString) datasetDetailsRecord
+                        .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_FILESTRUCTURE_FIELD_INDEX))
+                        .getStringValue());
+                PartitioningStrategy partitioningStrategy = PartitioningStrategy
+                        .valueOf(((AString) datasetDetailsRecord
+                                .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_PARTITIONSTRATEGY_FIELD_INDEX))
+                                .getStringValue());
+                IACursor cursor = ((AOrderedList) datasetDetailsRecord
+                        .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_PARTITIONKEY_FIELD_INDEX)).getCursor();
+                List<String> partitioningKey = new ArrayList<String>();
+                while (cursor.next())
+                    partitioningKey.add(((AString) cursor.get()).getStringValue());
+                String groupName = ((AString) datasetDetailsRecord
+                        .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_GROUPNAME_FIELD_INDEX))
+                        .getStringValue();
+                String adapter = ((AString) datasetDetailsRecord
+                        .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_DATASOURCE_ADAPTER_FIELD_INDEX))
+                        .getStringValue();
+                cursor = ((AOrderedList) datasetDetailsRecord
+                        .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_PROPERTIES_FIELD_INDEX)).getCursor();
+                Map<String, String> properties = new HashMap<String, String>();
+                String key;
+                String value;
+                while (cursor.next()) {
+                    ARecord field = (ARecord) cursor.get();
+                    key = ((AString) field
+                            .getValueByPos(MetadataRecordTypes.DATASOURCE_ADAPTER_PROPERTIES_ARECORD_NAME_FIELD_INDEX))
+                            .getStringValue();
+                    value = ((AString) field
+                            .getValueByPos(MetadataRecordTypes.DATASOURCE_ADAPTER_PROPERTIES_ARECORD_VALUE_FIELD_INDEX))
+                            .getStringValue();
+                    properties.put(key, value);
+                }
+
+                Object o = datasetDetailsRecord
+                        .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_FUNCTION_FIELD_INDEX);
+                FunctionSignature signature = null;
+                if (!(o instanceof ANull)) {
+                    String functionIdentifier = ((AString) o).getStringValue();
+                    String[] qnameComponents = functionIdentifier.split("\\.");
+                    String functionDataverse;
+                    String functionName;
+                    if (qnameComponents.length == 2) {
+                        functionDataverse = qnameComponents[0];
+                        functionName = qnameComponents[1];
+                    } else {
+                        functionDataverse = dataverseName;
+                        functionName = qnameComponents[0];
+                    }
+
+                    String[] nameComponents = functionName.split("@");
+                    signature = new FunctionSignature(functionDataverse, nameComponents[0],
+                            Integer.parseInt(nameComponents[1]));
+                }
+
+                String feedState = ((AString) datasetDetailsRecord
+                        .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_STATE_FIELD_INDEX)).getStringValue();
+
+                datasetDetails = new FeedDatasetDetails(fileStructure, partitioningStrategy, partitioningKey,
+                        partitioningKey, groupName, adapter, properties, signature, feedState);
+                break;
+            }
             case INTERNAL: {
                 ARecord datasetDetailsRecord = (ARecord) datasetRecord
                         .getValueByPos(MetadataRecordTypes.DATASET_ARECORD_INTERNALDETAILS_FIELD_INDEX);
@@ -116,48 +174,9 @@
                         .getValueByPos(MetadataRecordTypes.INTERNAL_DETAILS_ARECORD_GROUPNAME_FIELD_INDEX))
                         .getStringValue();
 
-                if (datasetType == DatasetConfig.DatasetType.INTERNAL) {
-                    datasetDetails = new InternalDatasetDetails(fileStructure, partitioningStrategy, partitioningKey,
-                            partitioningKey, groupName);
-                } else {
-                    String adapter = ((AString) datasetDetailsRecord
-                            .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_DATASOURCE_ADAPTER_FIELD_INDEX))
-                            .getStringValue();
-                    cursor = ((AOrderedList) datasetDetailsRecord
-                            .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_PROPERTIES_FIELD_INDEX))
-                            .getCursor();
-                    Map<String, String> properties = new HashMap<String, String>();
-                    String key;
-                    String value;
-                    while (cursor.next()) {
-                        ARecord field = (ARecord) cursor.get();
-                        key = ((AString) field
-                                .getValueByPos(MetadataRecordTypes.DATASOURCE_ADAPTER_PROPERTIES_ARECORD_NAME_FIELD_INDEX))
-                                .getStringValue();
-                        value = ((AString) field
-                                .getValueByPos(MetadataRecordTypes.DATASOURCE_ADAPTER_PROPERTIES_ARECORD_VALUE_FIELD_INDEX))
-                                .getStringValue();
-                        properties.put(key, value);
-                    }
+                datasetDetails = new InternalDatasetDetails(fileStructure, partitioningStrategy, partitioningKey,
+                        partitioningKey, groupName);
 
-                    String functionIdentifier = ((AString) datasetDetailsRecord
-                            .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_FUNCTION_FIELD_INDEX))
-                            .getStringValue();
-                    String[] nameComponents1 = functionIdentifier.split(".");
-                    String functionDataverse = nameComponents1[0];
-                    String[] nameComponents2 = nameComponents1[1].split("@");
-                    String functionName = nameComponents2[0];
-                    FunctionSignature signature = new FunctionSignature(functionDataverse, functionName,
-                            Integer.parseInt(nameComponents2[1]));
-
-                    String feedState = ((AString) datasetDetailsRecord
-                            .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_STATE_FIELD_INDEX))
-                            .getStringValue();
-
-                    datasetDetails = new FeedDatasetDetails(fileStructure, partitioningStrategy, partitioningKey,
-                            partitioningKey, groupName, adapter, properties, signature, feedState);
-
-                }
                 break;
             }
 
@@ -263,23 +282,4 @@
 
     }
 
-    public void writePropertyTypeRecord(String name, String value, DataOutput out) throws IOException {
-        IARecordBuilder propertyRecordBuilder = new RecordBuilder();
-        ArrayBackedValueStorage fieldValue = new ArrayBackedValueStorage();
-        propertyRecordBuilder.reset(MetadataRecordTypes.DATASOURCE_ADAPTER_PROPERTIES_RECORDTYPE);
-
-        // write field 0
-        fieldValue.reset();
-        aString.setValue(name);
-        stringSerde.serialize(aString, fieldValue.getDataOutput());
-        propertyRecordBuilder.addField(MetadataRecordTypes.DATASOURCE_ADAPTER_PROPERTIES_ARECORD_NAME_FIELD_INDEX, fieldValue);
-
-        // write field 1
-        fieldValue.reset();
-        aString.setValue(value);
-        stringSerde.serialize(aString, fieldValue.getDataOutput());
-        propertyRecordBuilder.addField(MetadataRecordTypes.DATASOURCE_ADAPTER_PROPERTIES_ARECORD_VALUE_FIELD_INDEX, fieldValue);
-
-        propertyRecordBuilder.write(out, true);
-    }
 }
\ No newline at end of file
diff --git a/asterix-om/pom.xml b/asterix-om/pom.xml
index a14f3c2..5368f07 100644
--- a/asterix-om/pom.xml
+++ b/asterix-om/pom.xml
@@ -5,10 +5,7 @@
 		<groupId>edu.uci.ics.asterix</groupId>
 		<version>0.0.4-SNAPSHOT</version>
 	</parent>
-	<groupId>edu.uci.ics.asterix</groupId>
 	<artifactId>asterix-om</artifactId>
-	<version>0.0.4-SNAPSHOT</version>
-
 	<build>
 		<plugins>
 			<plugin>
@@ -33,7 +30,6 @@
 		<dependency>
 			<groupId>edu.uci.ics.hyracks</groupId>
 			<artifactId>hyracks-storage-am-invertedindex</artifactId>
-			<version>0.2.2-SNAPSHOT</version>
 		</dependency>
 		<dependency>
 			<groupId>edu.uci.ics.asterix</groupId>
@@ -43,13 +39,11 @@
 		</dependency>
 		<dependency>
 			<groupId>edu.uci.ics.hyracks</groupId>
-			<artifactId>hyracks-algebricks-compiler</artifactId>
-			<version>0.2.2-SNAPSHOT</version>
+			<artifactId>algebricks-compiler</artifactId>
 		</dependency>
 		<dependency>
 			<groupId>edu.uci.ics.hyracks</groupId>
 			<artifactId>hyracks-storage-am-rtree</artifactId>
-			<version>0.2.2-SNAPSHOT</version>
-		</dependency>
+			</dependency>
 	</dependencies>
 </project>
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/hash/MurmurHash3BinaryHashFunctionFamily.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/hash/MurmurHash3BinaryHashFunctionFamily.java
new file mode 100644
index 0000000..83b165b
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/hash/MurmurHash3BinaryHashFunctionFamily.java
@@ -0,0 +1,86 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.dataflow.data.nontagged.hash;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFamily;
+
+/**
+ * An implementation of the Murmur3 hash family. The code is implemented based
+ * on the original <a
+ * href=http://code.google.com/p/guava-libraries/source/browse
+ * /guava/src/com/google/common/hash/Murmur3_32HashFunction.java>guava
+ * implementation</a> from Google Guava library.
+ */
+public class MurmurHash3BinaryHashFunctionFamily implements
+		IBinaryHashFunctionFamily {
+
+	public static final IBinaryHashFunctionFamily INSTANCE = new MurmurHash3BinaryHashFunctionFamily();
+
+	private static final long serialVersionUID = 1L;
+
+	private MurmurHash3BinaryHashFunctionFamily() {
+	}
+
+	private static final int C1 = 0xcc9e2d51;
+	private static final int C2 = 0x1b873593;
+	private static final int C3 = 5;
+	private static final int C4 = 0xe6546b64;
+	private static final int C5 = 0x85ebca6b;
+	private static final int C6 = 0xc2b2ae35;
+
+	@Override
+	public IBinaryHashFunction createBinaryHashFunction(final int seed) {
+		return new IBinaryHashFunction() {
+			@Override
+			public int hash(byte[] bytes, int offset, int length) {
+				int h = seed;
+				int p = offset;
+				int remain = length;
+				while (remain >= 4) {
+					int k = (bytes[p] & 0xff) | ((bytes[p + 1] & 0xff) << 8)
+							| ((bytes[p + 2] & 0xff) << 16)
+							| ((bytes[p + 3] & 0xff) << 24);
+					k *= C1;
+					k = Integer.rotateLeft(k, 15);
+					k *= C2;
+					h ^= k;
+					h = Integer.rotateLeft(h, 13);
+					h = h * C3 + C4;
+					p += 4;
+					remain -= 4;
+				}
+				if (remain > 0) {
+					int k = 0;
+					for (int i = 0; remain > 0; i += 8) {
+						k ^= (bytes[p++] & 0xff) << i;
+						remain--;
+					}
+					k *= C1;
+					k = Integer.rotateLeft(k, 15);
+					k *= C2;
+					h ^= k;
+				}
+				h ^= length;
+				h ^= (h >>> 16);
+				h *= C5;
+				h ^= (h >>> 13);
+				h *= C6;
+				h ^= (h >>> 16);
+				return h;
+			}
+		};
+	}
+}
\ No newline at end of file
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryHashFunctionFamilyProvider.java b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryHashFunctionFamilyProvider.java
index 37d7ac6..bc7ba26 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryHashFunctionFamilyProvider.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryHashFunctionFamilyProvider.java
@@ -17,26 +17,29 @@
 
 import java.io.Serializable;
 
+import edu.uci.ics.asterix.dataflow.data.nontagged.hash.MurmurHash3BinaryHashFunctionFamily;
 import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
 import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFamilyProvider;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFamily;
-import edu.uci.ics.hyracks.data.std.accessors.MurmurHash3BinaryHashFunctionFamily;
 
 /**
- * We use a type-independent binary hash function family from the hyracks codebase
+ * We use a type-independent binary hash function family from the hyracks
+ * codebase
  */
-public class AqlBinaryHashFunctionFamilyProvider implements IBinaryHashFunctionFamilyProvider, Serializable {
+public class AqlBinaryHashFunctionFamilyProvider implements
+		IBinaryHashFunctionFamilyProvider, Serializable {
 
-    private static final long serialVersionUID = 1L;
-    public static final AqlBinaryHashFunctionFamilyProvider INSTANCE = new AqlBinaryHashFunctionFamilyProvider();
+	private static final long serialVersionUID = 1L;
+	public static final AqlBinaryHashFunctionFamilyProvider INSTANCE = new AqlBinaryHashFunctionFamilyProvider();
 
-    private AqlBinaryHashFunctionFamilyProvider() {
+	private AqlBinaryHashFunctionFamilyProvider() {
 
-    }
+	}
 
-    @Override
-    public IBinaryHashFunctionFamily getBinaryHashFunctionFamily(Object type) throws AlgebricksException {
-        return new MurmurHash3BinaryHashFunctionFamily();
-    }
+	@Override
+	public IBinaryHashFunctionFamily getBinaryHashFunctionFamily(Object type)
+			throws AlgebricksException {
+		return MurmurHash3BinaryHashFunctionFamily.INSTANCE;
+	}
 
 }
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/AsterixRuntimeUtil.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/AsterixRuntimeUtil.java
index 624d7eb..76f3301 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/AsterixRuntimeUtil.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/AsterixRuntimeUtil.java
@@ -18,39 +18,51 @@
 import java.net.UnknownHostException;
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
 import edu.uci.ics.asterix.common.api.AsterixAppContextInfoImpl;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
 
+/**
+ * Utility class for obtaining information on the set of Hyracks NodeController
+ * processes that are running on a given host.
+ */
 public class AsterixRuntimeUtil {
 
-    public static Set<String> getNodeControllersOnIP(String ipAddress) throws AsterixException {
-        Map<String, Set<String>> nodeControllerInfo = AsterixAppContextInfoImpl.getNodeControllerMap();
-        Set<String> nodeControllersAtLocation = nodeControllerInfo.get(ipAddress);
-        return nodeControllersAtLocation;
-    }
+	public static Set<String> getNodeControllersOnIP(String ipAddress)
+			throws Exception {
+		Map<String, Set<String>> nodeControllerInfo = getNodeControllerMap();
+		Set<String> nodeControllersAtLocation = nodeControllerInfo
+				.get(ipAddress);
+		return nodeControllersAtLocation;
+	}
 
-    public static Set<String> getNodeControllersOnHostName(String hostName) throws UnknownHostException {
-        Map<String, Set<String>> nodeControllerInfo = AsterixAppContextInfoImpl.getNodeControllerMap();
-        String address;
-        address = InetAddress.getByName(hostName).getHostAddress();
-        if (address.equals("127.0.1.1")) {
-            address = "127.0.0.1";
-        }
-        Set<String> nodeControllersAtLocation = nodeControllerInfo.get(address);
-        return nodeControllersAtLocation;
-    }
+	public static List<String> getAllNodeControllers() throws Exception {
+		Collection<Set<String>> nodeControllersCollection = getNodeControllerMap()
+				.values();
+		List<String> nodeControllers = new ArrayList<String>();
+		for (Set<String> ncCollection : nodeControllersCollection) {
+			nodeControllers.addAll(ncCollection);
+		}
+		return nodeControllers;
+	}
 
-    public static List<String> getAllNodeControllers() {
+	public static Map<String, Set<String>> getNodeControllerMap()
+			throws Exception {
+		Map<String, Set<String>> map = new HashMap<String, Set<String>>();
+		AsterixAppContextInfoImpl.getInstance().getCCApplicationContext()
+				.getCCContext().getIPAddressNodeMap(map);
+		return map;
+	}
 
-        Collection<Set<String>> nodeControllersCollection = AsterixAppContextInfoImpl.getNodeControllerMap().values();
-        List<String> nodeControllers = new ArrayList<String>();
-        for (Set<String> ncCollection : nodeControllersCollection) {
-            nodeControllers.addAll(ncCollection);
-        }
-        return nodeControllers;
-    }
+	public static String getIPAddress(String hostname)
+			throws UnknownHostException {
+		String address = InetAddress.getByName(hostname).getHostAddress();
+		if (address.equals("127.0.1.1")) {
+			address = "127.0.0.1";
+		}
+		return address;
+	}
 }
diff --git a/asterix-runtime/pom.xml b/asterix-runtime/pom.xml
index 77952a6..ddc1cfd 100644
--- a/asterix-runtime/pom.xml
+++ b/asterix-runtime/pom.xml
@@ -5,11 +5,7 @@
 		<groupId>edu.uci.ics.asterix</groupId>
 		<version>0.0.4-SNAPSHOT</version>
 	</parent>
-	<groupId>edu.uci.ics.asterix</groupId>
 	<artifactId>asterix-runtime</artifactId>
-	<version>0.0.4-SNAPSHOT</version>
-
-
 	<build>
 		<plugins>
 			<plugin>
@@ -56,7 +52,6 @@
 		<dependency>
 			<groupId>edu.uci.ics.hyracks</groupId>
 		        <artifactId>hyracks-storage-am-btree</artifactId>
-			<version>0.2.2-SNAPSHOT</version>
 		</dependency>
 		<dependency>
 		        <groupId>edu.uci.ics.asterix</groupId>
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/ADMDataParser.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/ADMDataParser.java
new file mode 100644
index 0000000..8606088
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/ADMDataParser.java
@@ -0,0 +1,925 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.operators.file;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayDeque;
+import java.util.BitSet;
+import java.util.List;
+import java.util.Queue;
+
+import edu.uci.ics.asterix.adm.parser.nontagged.AdmLexer;
+import edu.uci.ics.asterix.adm.parser.nontagged.AdmLexerConstants;
+import edu.uci.ics.asterix.adm.parser.nontagged.ParseException;
+import edu.uci.ics.asterix.adm.parser.nontagged.Token;
+import edu.uci.ics.asterix.builders.IARecordBuilder;
+import edu.uci.ics.asterix.builders.IAsterixListBuilder;
+import edu.uci.ics.asterix.builders.OrderedListBuilder;
+import edu.uci.ics.asterix.builders.RecordBuilder;
+import edu.uci.ics.asterix.builders.UnorderedListBuilder;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ACircleSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateTimeSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ALineSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.APoint3DSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.APointSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.APolygonSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ARectangleSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ATimeSerializerDeserializer;
+import edu.uci.ics.asterix.om.base.ABoolean;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.types.AOrderedListType;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.AUnionType;
+import edu.uci.ics.asterix.om.types.AUnorderedListType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+
+/**
+ * Parser for ADM formatted data. 
+ */
+public class ADMDataParser extends AbstractDataParser implements IDataParser {
+
+    protected AdmLexer admLexer;
+    protected ARecordType recordType;
+    protected boolean datasetRec;
+
+    private int nullableFieldId = 0;
+
+    private Queue<ArrayBackedValueStorage> baaosPool = new ArrayDeque<ArrayBackedValueStorage>();
+    private Queue<IARecordBuilder> recordBuilderPool = new ArrayDeque<IARecordBuilder>();
+    private Queue<IAsterixListBuilder> orderedListBuilderPool = new ArrayDeque<IAsterixListBuilder>();
+    private Queue<IAsterixListBuilder> unorderedListBuilderPool = new ArrayDeque<IAsterixListBuilder>();
+
+    private String mismatchErrorMessage = "Mismatch Type, expecting a value of type ";
+
+    @Override
+    public boolean parse(DataOutput out) throws HyracksDataException {
+        try {
+            return parseAdmInstance((IAType) recordType, datasetRec, out);
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    @Override
+    public void initialize(InputStream in, ARecordType recordType, boolean datasetRec) {
+        admLexer = new AdmLexer(in);
+        this.recordType = recordType;
+        this.datasetRec = datasetRec;
+    }
+
+    protected boolean parseAdmInstance(IAType objectType, boolean datasetRec, DataOutput out) throws AsterixException,
+            IOException {
+        Token token;
+        try {
+            token = admLexer.next();
+        } catch (ParseException pe) {
+            throw new AsterixException(pe);
+        }
+        if (token.kind == AdmLexerConstants.EOF) {
+            return false;
+        } else {
+            admFromLexerStream(token, objectType, out, datasetRec);
+            return true;
+        }
+    }
+
+    private void admFromLexerStream(Token token, IAType objectType, DataOutput out, Boolean datasetRec)
+            throws AsterixException, IOException {
+
+        switch (token.kind) {
+            case AdmLexerConstants.NULL_LITERAL: {
+                if (checkType(ATypeTag.NULL, objectType, out)) {
+                    nullSerde.serialize(ANull.NULL, out);
+                } else
+                    throw new AsterixException(" This field can not be null ");
+                break;
+            }
+            case AdmLexerConstants.TRUE_LITERAL: {
+                if (checkType(ATypeTag.BOOLEAN, objectType, out)) {
+                    booleanSerde.serialize(ABoolean.TRUE, out);
+                } else
+                    throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
+                break;
+            }
+            case AdmLexerConstants.BOOLEAN_CONS: {
+                parseConstructor(ATypeTag.BOOLEAN, objectType, out);
+                break;
+            }
+            case AdmLexerConstants.FALSE_LITERAL: {
+                if (checkType(ATypeTag.BOOLEAN, objectType, out)) {
+                    booleanSerde.serialize(ABoolean.FALSE, out);
+                } else
+                    throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
+                break;
+            }
+            case AdmLexerConstants.DOUBLE_LITERAL: {
+                if (checkType(ATypeTag.DOUBLE, objectType, out)) {
+                    aDouble.setValue(Double.parseDouble(token.image));
+                    doubleSerde.serialize(aDouble, out);
+                } else
+                    throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
+                break;
+            }
+            case AdmLexerConstants.DOUBLE_CONS: {
+                parseConstructor(ATypeTag.DOUBLE, objectType, out);
+                break;
+            }
+            case AdmLexerConstants.FLOAT_LITERAL: {
+                if (checkType(ATypeTag.FLOAT, objectType, out)) {
+                    aFloat.setValue(Float.parseFloat(token.image));
+                    floatSerde.serialize(aFloat, out);
+                } else
+                    throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
+                break;
+            }
+            case AdmLexerConstants.FLOAT_CONS: {
+                parseConstructor(ATypeTag.FLOAT, objectType, out);
+                break;
+            }
+            case AdmLexerConstants.INT8_LITERAL: {
+                if (checkType(ATypeTag.INT8, objectType, out)) {
+                    parseInt8(token.image, out);
+                } else
+                    throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
+                break;
+            }
+            case AdmLexerConstants.INT8_CONS: {
+                parseConstructor(ATypeTag.INT8, objectType, out);
+                break;
+            }
+            case AdmLexerConstants.INT16_LITERAL: {
+                if (checkType(ATypeTag.INT16, objectType, out)) {
+                    parseInt16(token.image, out);
+                } else
+                    throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
+                break;
+            }
+            case AdmLexerConstants.INT16_CONS: {
+                parseConstructor(ATypeTag.INT16, objectType, out);
+                break;
+            }
+            case AdmLexerConstants.INT_LITERAL:
+            case AdmLexerConstants.INT32_LITERAL: {
+                if (checkType(ATypeTag.INT32, objectType, out)) {
+                    parseInt32(token.image, out);
+                } else
+                    throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
+                break;
+            }
+            case AdmLexerConstants.INT32_CONS: {
+                parseConstructor(ATypeTag.INT32, objectType, out);
+                break;
+            }
+            case AdmLexerConstants.INT64_LITERAL: {
+                if (checkType(ATypeTag.INT64, objectType, out)) {
+                    parseInt64(token.image, out);
+                } else
+                    throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
+                break;
+            }
+            case AdmLexerConstants.INT64_CONS: {
+                parseConstructor(ATypeTag.INT64, objectType, out);
+                break;
+            }
+            case AdmLexerConstants.STRING_LITERAL: {
+                if (checkType(ATypeTag.STRING, objectType, out)) {
+                    aString.setValue(token.image.substring(1, token.image.length() - 1));
+                    stringSerde.serialize(aString, out);
+                } else
+                    throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
+                break;
+            }
+            case AdmLexerConstants.STRING_CONS: {
+                parseConstructor(ATypeTag.STRING, objectType, out);
+                break;
+            }
+            case AdmLexerConstants.DATE_CONS: {
+                parseConstructor(ATypeTag.DATE, objectType, out);
+                break;
+            }
+            case AdmLexerConstants.TIME_CONS: {
+                parseConstructor(ATypeTag.TIME, objectType, out);
+                break;
+            }
+            case AdmLexerConstants.DATETIME_CONS: {
+                parseConstructor(ATypeTag.DATETIME, objectType, out);
+                break;
+            }
+            case AdmLexerConstants.DURATION_CONS: {
+                parseConstructor(ATypeTag.DURATION, objectType, out);
+                break;
+            }
+            case AdmLexerConstants.POINT_CONS: {
+                parseConstructor(ATypeTag.POINT, objectType, out);
+                break;
+            }
+            case AdmLexerConstants.POINT3D_CONS: {
+                parseConstructor(ATypeTag.POINT3D, objectType, out);
+                break;
+            }
+            case AdmLexerConstants.CIRCLE_CONS: {
+                parseConstructor(ATypeTag.CIRCLE, objectType, out);
+                break;
+            }
+            case AdmLexerConstants.RECTANGLE_CONS: {
+                parseConstructor(ATypeTag.RECTANGLE, objectType, out);
+                break;
+            }
+            case AdmLexerConstants.LINE_CONS: {
+                parseConstructor(ATypeTag.LINE, objectType, out);
+                break;
+            }
+            case AdmLexerConstants.POLYGON_CONS: {
+                parseConstructor(ATypeTag.POLYGON, objectType, out);
+                break;
+            }
+            case AdmLexerConstants.START_UNORDERED_LIST: {
+                if (checkType(ATypeTag.UNORDEREDLIST, objectType, out)) {
+                    objectType = getComplexType(objectType, ATypeTag.UNORDEREDLIST);
+                    parseUnorderedList((AUnorderedListType) objectType, out);
+                } else
+                    throw new AsterixException(mismatchErrorMessage + objectType.getTypeTag());
+                break;
+            }
+
+            case AdmLexerConstants.START_ORDERED_LIST: {
+                if (checkType(ATypeTag.ORDEREDLIST, objectType, out)) {
+                    objectType = getComplexType(objectType, ATypeTag.ORDEREDLIST);
+                    parseOrderedList((AOrderedListType) objectType, out);
+                } else
+                    throw new AsterixException(mismatchErrorMessage + objectType.getTypeTag());
+                break;
+            }
+            case AdmLexerConstants.START_RECORD: {
+                if (checkType(ATypeTag.RECORD, objectType, out)) {
+                    objectType = getComplexType(objectType, ATypeTag.RECORD);
+                    parseRecord((ARecordType) objectType, out, datasetRec);
+                } else
+                    throw new AsterixException(mismatchErrorMessage + objectType.getTypeTag());
+                break;
+            }
+            case AdmLexerConstants.EOF: {
+                break;
+            }
+            default: {
+                throw new AsterixException("Unexpected ADM token kind: " + admLexer.tokenKindToString(token.kind) + ".");
+            }
+        }
+    }
+
+    private void parseDatetime(String datetime, DataOutput out) throws AsterixException, IOException {
+        try {
+            ADateTimeSerializerDeserializer.parse(datetime, out);
+        } catch (HyracksDataException e) {
+            throw new AsterixException(e);
+        }
+    }
+
+    private void parseDuration(String duration, DataOutput out) throws AsterixException {
+        try {
+            ADurationSerializerDeserializer.parse(duration, out);
+        } catch (HyracksDataException e) {
+            throw new AsterixException(e);
+        }
+
+    }
+
+    private IAType getComplexType(IAType aObjectType, ATypeTag tag) {
+
+        if (aObjectType == null) {
+            return null;
+        }
+
+        if (aObjectType.getTypeTag() == tag)
+            return aObjectType;
+
+        if (aObjectType.getTypeTag() == ATypeTag.UNION) {
+            unionList = ((AUnionType) aObjectType).getUnionList();
+            for (int i = 0; i < unionList.size(); i++)
+                if (unionList.get(i).getTypeTag() == tag) {
+                    return unionList.get(i);
+                }
+        }
+        return null; // wont get here
+    }
+
+    List<IAType> unionList;
+
+    private boolean checkType(ATypeTag expectedTypeTag, IAType aObjectType, DataOutput out) throws IOException {
+
+        if (aObjectType == null)
+            return true;
+
+        if (aObjectType.getTypeTag() != ATypeTag.UNION) {
+            if (expectedTypeTag == aObjectType.getTypeTag())
+                return true;
+        } else { // union
+            unionList = ((AUnionType) aObjectType).getUnionList();
+            for (int i = 0; i < unionList.size(); i++)
+                if (unionList.get(i).getTypeTag() == expectedTypeTag)
+                    return true;
+        }
+        return false;
+    }
+
+    private void parseRecord(ARecordType recType, DataOutput out, Boolean datasetRec) throws IOException,
+            AsterixException {
+
+        ArrayBackedValueStorage fieldValueBuffer = getTempBuffer();
+        ArrayBackedValueStorage fieldNameBuffer = getTempBuffer();
+        IARecordBuilder recBuilder = getRecordBuilder();
+
+        // Boolean[] nulls = null;
+        BitSet nulls = null;
+        if (datasetRec) {
+            if (recType != null) {
+                nulls = new BitSet(recType.getFieldNames().length);
+                recBuilder.reset(recType);
+            } else
+                recBuilder.reset(null);
+        } else if (recType != null) {
+            nulls = new BitSet(recType.getFieldNames().length);
+            recBuilder.reset(recType);
+        } else
+            recBuilder.reset(null);
+
+        recBuilder.init();
+        Token token = null;
+        boolean inRecord = true;
+        boolean expectingRecordField = false;
+        boolean first = true;
+
+        Boolean openRecordField = false;
+        int fieldId = 0;
+        IAType fieldType = null;
+        do {
+            token = nextToken();
+            switch (token.kind) {
+                case AdmLexerConstants.END_RECORD: {
+                    if (expectingRecordField) {
+                        throw new AsterixException("Found END_RECORD while expecting a record field.");
+                    }
+                    inRecord = false;
+                    break;
+                }
+                case AdmLexerConstants.STRING_LITERAL: {
+                    // we've read the name of the field
+                    // now read the content
+                    fieldNameBuffer.reset();
+                    fieldValueBuffer.reset();
+                    expectingRecordField = false;
+
+                    if (recType != null) {
+                        String fldName = token.image.substring(1, token.image.length() - 1);
+                        fieldId = recBuilder.getFieldId(fldName);
+                        if (fieldId < 0 && !recType.isOpen()) {
+                            throw new AsterixException("This record is closed, you can not add extra fields !!");
+                        } else if (fieldId < 0 && recType.isOpen()) {
+                            aStringFieldName.setValue(token.image.substring(1, token.image.length() - 1));
+                            stringSerde.serialize(aStringFieldName, fieldNameBuffer.getDataOutput());
+                            openRecordField = true;
+                            fieldType = null;
+                        } else {
+                            // a closed field
+                            nulls.set(fieldId);
+                            fieldType = recType.getFieldTypes()[fieldId];
+                            openRecordField = false;
+                        }
+                    } else {
+                        aStringFieldName.setValue(token.image.substring(1, token.image.length() - 1));
+                        stringSerde.serialize(aStringFieldName, fieldNameBuffer.getDataOutput());
+                        openRecordField = true;
+                        fieldType = null;
+                    }
+
+                    token = nextToken();
+                    if (token.kind != AdmLexerConstants.COLON) {
+                        throw new AsterixException("Unexpected ADM token kind: "
+                                + admLexer.tokenKindToString(token.kind) + " while expecting \":\".");
+                    }
+
+                    token = nextToken();
+                    this.admFromLexerStream(token, fieldType, fieldValueBuffer.getDataOutput(), false);
+                    if (openRecordField) {
+                        if (fieldValueBuffer.getByteArray()[0] != ATypeTag.NULL.serialize())
+                            recBuilder.addField(fieldNameBuffer, fieldValueBuffer);
+                    } else if (recType.getFieldTypes()[fieldId].getTypeTag() == ATypeTag.UNION) {
+                        if (NonTaggedFormatUtil.isOptionalField((AUnionType) recType.getFieldTypes()[fieldId])) {
+                            if (fieldValueBuffer.getByteArray()[0] != ATypeTag.NULL.serialize()) {
+                                recBuilder.addField(fieldId, fieldValueBuffer);
+                            }
+                        }
+                    } else {
+                        recBuilder.addField(fieldId, fieldValueBuffer);
+                    }
+
+                    break;
+                }
+                case AdmLexerConstants.COMMA: {
+                    if (first) {
+                        throw new AsterixException("Found COMMA before any record field.");
+                    }
+                    if (expectingRecordField) {
+                        throw new AsterixException("Found COMMA while expecting a record field.");
+                    }
+                    expectingRecordField = true;
+                    break;
+                }
+                default: {
+                    throw new AsterixException("Unexpected ADM token kind: " + admLexer.tokenKindToString(token.kind)
+                            + " while parsing record fields.");
+                }
+            }
+            first = false;
+        } while (inRecord);
+
+        if (recType != null) {
+            nullableFieldId = checkNullConstraints(recType, nulls);
+            if (nullableFieldId != -1)
+                throw new AsterixException("Field " + nullableFieldId + " can not be null");
+        }
+        recBuilder.write(out, true);
+        returnRecordBuilder(recBuilder);
+        returnTempBuffer(fieldNameBuffer);
+        returnTempBuffer(fieldValueBuffer);
+    }
+
+    private int checkNullConstraints(ARecordType recType, BitSet nulls) {
+
+        boolean isNull = false;
+        for (int i = 0; i < recType.getFieldTypes().length; i++)
+            if (nulls.get(i) == false) {
+                IAType type = recType.getFieldTypes()[i];
+                if (type.getTypeTag() != ATypeTag.NULL && type.getTypeTag() != ATypeTag.UNION)
+                    return i;
+
+                if (type.getTypeTag() == ATypeTag.UNION) { // union
+                    unionList = ((AUnionType) type).getUnionList();
+                    for (int j = 0; j < unionList.size(); j++)
+                        if (unionList.get(j).getTypeTag() == ATypeTag.NULL) {
+                            isNull = true;
+                            break;
+                        }
+                    if (!isNull)
+                        return i;
+                }
+            }
+        return -1;
+    }
+
+    private void parseOrderedList(AOrderedListType oltype, DataOutput out) throws IOException, AsterixException {
+
+        ArrayBackedValueStorage itemBuffer = getTempBuffer();
+        OrderedListBuilder orderedListBuilder = (OrderedListBuilder) getOrderedListBuilder();
+
+        IAType itemType = null;
+        if (oltype != null)
+            itemType = oltype.getItemType();
+        orderedListBuilder.reset(oltype);
+
+        Token token = null;
+        boolean inList = true;
+        boolean expectingListItem = false;
+        boolean first = true;
+        do {
+            token = nextToken();
+            if (token.kind == AdmLexerConstants.END_ORDERED_LIST) {
+                if (expectingListItem) {
+                    throw new AsterixException("Found END_COLLECTION while expecting a list item.");
+                }
+                inList = false;
+            } else if (token.kind == AdmLexerConstants.COMMA) {
+                if (first) {
+                    throw new AsterixException("Found COMMA before any list item.");
+                }
+                if (expectingListItem) {
+                    throw new AsterixException("Found COMMA while expecting a list item.");
+                }
+                expectingListItem = true;
+            } else {
+                expectingListItem = false;
+                itemBuffer.reset();
+
+                admFromLexerStream(token, itemType, itemBuffer.getDataOutput(), false);
+                orderedListBuilder.addItem(itemBuffer);
+            }
+            first = false;
+        } while (inList);
+        orderedListBuilder.write(out, true);
+        returnOrderedListBuilder(orderedListBuilder);
+        returnTempBuffer(itemBuffer);
+    }
+
+    private void parseUnorderedList(AUnorderedListType uoltype, DataOutput out) throws IOException, AsterixException {
+
+        ArrayBackedValueStorage itemBuffer = getTempBuffer();
+        UnorderedListBuilder unorderedListBuilder = (UnorderedListBuilder) getUnorderedListBuilder();
+
+        IAType itemType = null;
+
+        if (uoltype != null)
+            itemType = uoltype.getItemType();
+        unorderedListBuilder.reset(uoltype);
+
+        Token token = null;
+        boolean inList = true;
+        boolean expectingListItem = false;
+        boolean first = true;
+        do {
+            token = nextToken();
+            if (token.kind == AdmLexerConstants.END_UNORDERED_LIST) {
+                if (expectingListItem) {
+                    throw new AsterixException("Found END_COLLECTION while expecting a list item.");
+                }
+                inList = false;
+            } else if (token.kind == AdmLexerConstants.COMMA) {
+                if (first) {
+                    throw new AsterixException("Found COMMA before any list item.");
+                }
+                if (expectingListItem) {
+                    throw new AsterixException("Found COMMA while expecting a list item.");
+                }
+                expectingListItem = true;
+            } else {
+                expectingListItem = false;
+                itemBuffer.reset();
+                admFromLexerStream(token, itemType, itemBuffer.getDataOutput(), false);
+                unorderedListBuilder.addItem(itemBuffer);
+            }
+            first = false;
+        } while (inList);
+        unorderedListBuilder.write(out, true);
+        returnUnorderedListBuilder(unorderedListBuilder);
+        returnTempBuffer(itemBuffer);
+    }
+
+    private Token nextToken() throws AsterixException {
+        try {
+            return admLexer.next();
+        } catch (ParseException pe) {
+            throw new AsterixException(pe);
+        }
+    }
+
+    private IARecordBuilder getRecordBuilder() {
+        RecordBuilder recBuilder = (RecordBuilder) recordBuilderPool.poll();
+        if (recBuilder != null)
+            return recBuilder;
+        else
+            return new RecordBuilder();
+    }
+
+    private void returnRecordBuilder(IARecordBuilder recBuilder) {
+        this.recordBuilderPool.add(recBuilder);
+    }
+
+    private IAsterixListBuilder getOrderedListBuilder() {
+        OrderedListBuilder orderedListBuilder = (OrderedListBuilder) orderedListBuilderPool.poll();
+        if (orderedListBuilder != null)
+            return orderedListBuilder;
+        else
+            return new OrderedListBuilder();
+    }
+
+    private void returnOrderedListBuilder(IAsterixListBuilder orderedListBuilder) {
+        this.orderedListBuilderPool.add(orderedListBuilder);
+    }
+
+    private IAsterixListBuilder getUnorderedListBuilder() {
+        UnorderedListBuilder unorderedListBuilder = (UnorderedListBuilder) unorderedListBuilderPool.poll();
+        if (unorderedListBuilder != null)
+            return unorderedListBuilder;
+        else
+            return new UnorderedListBuilder();
+    }
+
+    private void returnUnorderedListBuilder(IAsterixListBuilder unorderedListBuilder) {
+        this.unorderedListBuilderPool.add(unorderedListBuilder);
+    }
+
+    private ArrayBackedValueStorage getTempBuffer() {
+        ArrayBackedValueStorage tmpBaaos = baaosPool.poll();
+        if (tmpBaaos != null) {
+            return tmpBaaos;
+        } else {
+            return new ArrayBackedValueStorage();
+        }
+    }
+
+    private void returnTempBuffer(ArrayBackedValueStorage tempBaaos) {
+        baaosPool.add(tempBaaos);
+    }
+
+    private void parseConstructor(ATypeTag typeTag, IAType objectType, DataOutput out) throws AsterixException {
+        try {
+            Token token = admLexer.next();
+            if (token.kind == AdmLexerConstants.CONSTRUCTOR_OPEN) {
+                if (checkType(typeTag, objectType, out)) {
+                    token = admLexer.next();
+                    if (token.kind == AdmLexerConstants.STRING_LITERAL) {
+                        switch (typeTag) {
+                            case BOOLEAN:
+                                parseBoolean(token.image.substring(1, token.image.length() - 1), out);
+                                break;
+                            case INT8:
+                                parseInt8(token.image.substring(1, token.image.length() - 1), out);
+                                break;
+                            case INT16:
+                                parseInt16(token.image.substring(1, token.image.length() - 1), out);
+                                break;
+                            case INT32:
+                                parseInt32(token.image.substring(1, token.image.length() - 1), out);
+                                break;
+                            case INT64:
+                                parseInt64(token.image.substring(1, token.image.length() - 1), out);
+                                break;
+                            case FLOAT:
+                                aFloat.setValue(Float.parseFloat(token.image.substring(1, token.image.length() - 1)));
+                                floatSerde.serialize(aFloat, out);
+                                break;
+                            case DOUBLE:
+                                aDouble.setValue(Double.parseDouble(token.image.substring(1, token.image.length() - 1)));
+                                doubleSerde.serialize(aDouble, out);
+                                break;
+                            case STRING:
+                                aString.setValue(token.image.substring(1, token.image.length() - 1));
+                                stringSerde.serialize(aString, out);
+                                break;
+                            case TIME:
+                                parseTime(token.image.substring(1, token.image.length() - 1), out);
+                                break;
+                            case DATE:
+                                parseDate(token.image.substring(1, token.image.length() - 1), out);
+                                break;
+                            case DATETIME:
+                                parseDatetime(token.image.substring(1, token.image.length() - 1), out);
+                                break;
+                            case DURATION:
+                                parseDuration(token.image.substring(1, token.image.length() - 1), out);
+                                break;
+                            case POINT:
+                                parsePoint(token.image.substring(1, token.image.length() - 1), out);
+                                break;
+                            case POINT3D:
+                                parsePoint3d(token.image.substring(1, token.image.length() - 1), out);
+                                break;
+                            case CIRCLE:
+                                parseCircle(token.image.substring(1, token.image.length() - 1), out);
+                                break;
+                            case RECTANGLE:
+                                parseRectangle(token.image.substring(1, token.image.length() - 1), out);
+                                break;
+                            case LINE:
+                                parseLine(token.image.substring(1, token.image.length() - 1), out);
+                                break;
+                            case POLYGON:
+                                parsePolygon(token.image.substring(1, token.image.length() - 1), out);
+                                break;
+
+                        }
+                        token = admLexer.next();
+                        if (token.kind == AdmLexerConstants.CONSTRUCTOR_CLOSE)
+                            return;
+                    }
+                }
+            }
+        } catch (Exception e) {
+            throw new AsterixException(e);
+        }
+        throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
+    }
+
+    private void parseBoolean(String bool, DataOutput out) throws AsterixException {
+        String errorMessage = "This can not be an instance of boolean";
+        try {
+            if (bool.equals("true"))
+                booleanSerde.serialize(ABoolean.TRUE, out);
+            else if (bool.equals("false"))
+                booleanSerde.serialize(ABoolean.FALSE, out);
+            else
+                throw new AsterixException(errorMessage);
+        } catch (HyracksDataException e) {
+            throw new AsterixException(errorMessage);
+        }
+    }
+
+    private void parseInt8(String int8, DataOutput out) throws AsterixException {
+        String errorMessage = "This can not be an instance of int8";
+        try {
+            boolean positive = true;
+            byte value = 0;
+            int offset = 0;
+
+            if (int8.charAt(offset) == '+')
+                offset++;
+            else if (int8.charAt(offset) == '-') {
+                offset++;
+                positive = false;
+            }
+            for (; offset < int8.length(); offset++) {
+                if (int8.charAt(offset) >= '0' && int8.charAt(offset) <= '9')
+                    value = (byte) (value * 10 + int8.charAt(offset) - '0');
+                else if (int8.charAt(offset) == 'i' && int8.charAt(offset + 1) == '8' && offset + 2 == int8.length())
+                    break;
+                else
+                    throw new AsterixException(errorMessage);
+            }
+            if (value < 0)
+                throw new AsterixException(errorMessage);
+            if (value > 0 && !positive)
+                value *= -1;
+            aInt8.setValue(value);
+            int8Serde.serialize(aInt8, out);
+        } catch (HyracksDataException e) {
+            throw new AsterixException(errorMessage);
+        }
+    }
+
+    private void parseInt16(String int16, DataOutput out) throws AsterixException {
+        String errorMessage = "This can not be an instance of int16";
+        try {
+            boolean positive = true;
+            short value = 0;
+            int offset = 0;
+
+            if (int16.charAt(offset) == '+')
+                offset++;
+            else if (int16.charAt(offset) == '-') {
+                offset++;
+                positive = false;
+            }
+            for (; offset < int16.length(); offset++) {
+                if (int16.charAt(offset) >= '0' && int16.charAt(offset) <= '9')
+                    value = (short) (value * 10 + int16.charAt(offset) - '0');
+                else if (int16.charAt(offset) == 'i' && int16.charAt(offset + 1) == '1'
+                        && int16.charAt(offset + 2) == '6' && offset + 3 == int16.length())
+                    break;
+                else
+                    throw new AsterixException(errorMessage);
+            }
+            if (value < 0)
+                throw new AsterixException(errorMessage);
+            if (value > 0 && !positive)
+                value *= -1;
+            aInt16.setValue(value);
+            int16Serde.serialize(aInt16, out);
+        } catch (HyracksDataException e) {
+            throw new AsterixException(errorMessage);
+        }
+    }
+
+    private void parseInt32(String int32, DataOutput out) throws AsterixException {
+
+        String errorMessage = "This can not be an instance of int32";
+        try {
+            boolean positive = true;
+            int value = 0;
+            int offset = 0;
+
+            if (int32.charAt(offset) == '+')
+                offset++;
+            else if (int32.charAt(offset) == '-') {
+                offset++;
+                positive = false;
+            }
+            for (; offset < int32.length(); offset++) {
+                if (int32.charAt(offset) >= '0' && int32.charAt(offset) <= '9')
+                    value = (value * 10 + int32.charAt(offset) - '0');
+                else if (int32.charAt(offset) == 'i' && int32.charAt(offset + 1) == '3'
+                        && int32.charAt(offset + 2) == '2' && offset + 3 == int32.length())
+                    break;
+                else
+                    throw new AsterixException(errorMessage);
+            }
+            if (value < 0)
+                throw new AsterixException(errorMessage);
+            if (value > 0 && !positive)
+                value *= -1;
+
+            aInt32.setValue(value);
+            int32Serde.serialize(aInt32, out);
+        } catch (HyracksDataException e) {
+            throw new AsterixException(errorMessage);
+        }
+    }
+
+    private void parseInt64(String int64, DataOutput out) throws AsterixException {
+        String errorMessage = "This can not be an instance of int64";
+        try {
+            boolean positive = true;
+            long value = 0;
+            int offset = 0;
+
+            if (int64.charAt(offset) == '+')
+                offset++;
+            else if (int64.charAt(offset) == '-') {
+                offset++;
+                positive = false;
+            }
+            for (; offset < int64.length(); offset++) {
+                if (int64.charAt(offset) >= '0' && int64.charAt(offset) <= '9')
+                    value = (value * 10 + int64.charAt(offset) - '0');
+                else if (int64.charAt(offset) == 'i' && int64.charAt(offset + 1) == '6'
+                        && int64.charAt(offset + 2) == '4' && offset + 3 == int64.length())
+                    break;
+                else
+                    throw new AsterixException(errorMessage);
+            }
+            if (value < 0)
+                throw new AsterixException(errorMessage);
+            if (value > 0 && !positive)
+                value *= -1;
+
+            aInt64.setValue(value);
+            int64Serde.serialize(aInt64, out);
+        } catch (HyracksDataException e) {
+            throw new AsterixException(errorMessage);
+        }
+    }
+
+    private void parsePoint(String point, DataOutput out) throws AsterixException {
+        try {
+            APointSerializerDeserializer.parse(point, out);
+        } catch (HyracksDataException e) {
+            throw new AsterixException(e);
+        }
+    }
+
+    private void parsePoint3d(String point3d, DataOutput out) throws AsterixException {
+        try {
+            APoint3DSerializerDeserializer.parse(point3d, out);
+        } catch (HyracksDataException e) {
+            throw new AsterixException(e);
+        }
+    }
+
+    private void parseCircle(String circle, DataOutput out) throws AsterixException {
+        try {
+            ACircleSerializerDeserializer.parse(circle, out);
+        } catch (HyracksDataException e) {
+            throw new AsterixException(e);
+        }
+    }
+
+    private void parseRectangle(String rectangle, DataOutput out) throws AsterixException {
+        try {
+            ARectangleSerializerDeserializer.parse(rectangle, out);
+        } catch (HyracksDataException e) {
+            throw new AsterixException(e);
+        }
+    }
+
+    private void parseLine(String line, DataOutput out) throws AsterixException {
+        try {
+            ALineSerializerDeserializer.parse(line, out);
+        } catch (HyracksDataException e) {
+            throw new AsterixException(e);
+        }
+    }
+
+    private void parsePolygon(String polygon, DataOutput out) throws AsterixException, IOException {
+        try {
+            APolygonSerializerDeserializer.parse(polygon, out);
+        } catch (HyracksDataException e) {
+            throw new AsterixException(e);
+        }
+    }
+
+    private void parseTime(String time, DataOutput out) throws AsterixException {
+        try {
+            ATimeSerializerDeserializer.parse(time, out);
+        } catch (HyracksDataException e) {
+            throw new AsterixException(e);
+        }
+    }
+
+    private void parseDate(String date, DataOutput out) throws AsterixException, IOException {
+        try {
+            ADateSerializerDeserializer.parse(date, out);
+        } catch (HyracksDataException e) {
+            throw new AsterixException(e);
+        }
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AbstractDataParser.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AbstractDataParser.java
new file mode 100644
index 0000000..fc2d7ca
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AbstractDataParser.java
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.operators.file;
+
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ABoolean;
+import edu.uci.ics.asterix.om.base.ADouble;
+import edu.uci.ics.asterix.om.base.AFloat;
+import edu.uci.ics.asterix.om.base.AInt16;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.AInt64;
+import edu.uci.ics.asterix.om.base.AInt8;
+import edu.uci.ics.asterix.om.base.AMutableDouble;
+import edu.uci.ics.asterix.om.base.AMutableFloat;
+import edu.uci.ics.asterix.om.base.AMutableInt16;
+import edu.uci.ics.asterix.om.base.AMutableInt32;
+import edu.uci.ics.asterix.om.base.AMutableInt64;
+import edu.uci.ics.asterix.om.base.AMutableInt8;
+import edu.uci.ics.asterix.om.base.AMutableString;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.AString;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+
+/**
+ * Base class for data parsers. Includes the common set of definitions for
+ * serializers/deserializers for built-in ADM types.
+ */
+public abstract class AbstractDataParser implements IDataParser {
+
+	protected AMutableInt8 aInt8 = new AMutableInt8((byte) 0);
+	protected AMutableInt16 aInt16 = new AMutableInt16((short) 0);
+	protected AMutableInt32 aInt32 = new AMutableInt32(0);
+	protected AMutableInt64 aInt64 = new AMutableInt64(0);
+	protected AMutableDouble aDouble = new AMutableDouble(0);
+	protected AMutableFloat aFloat = new AMutableFloat(0);
+	protected AMutableString aString = new AMutableString("");
+	protected AMutableString aStringFieldName = new AMutableString("");
+
+	// Serializers
+	@SuppressWarnings("unchecked")
+	protected ISerializerDeserializer<ADouble> doubleSerde = AqlSerializerDeserializerProvider.INSTANCE
+			.getSerializerDeserializer(BuiltinType.ADOUBLE);
+	@SuppressWarnings("unchecked")
+	protected ISerializerDeserializer<AString> stringSerde = AqlSerializerDeserializerProvider.INSTANCE
+			.getSerializerDeserializer(BuiltinType.ASTRING);
+	@SuppressWarnings("unchecked")
+	protected ISerializerDeserializer<AFloat> floatSerde = AqlSerializerDeserializerProvider.INSTANCE
+			.getSerializerDeserializer(BuiltinType.AFLOAT);
+	@SuppressWarnings("unchecked")
+	protected ISerializerDeserializer<AInt8> int8Serde = AqlSerializerDeserializerProvider.INSTANCE
+			.getSerializerDeserializer(BuiltinType.AINT8);
+	@SuppressWarnings("unchecked")
+	protected ISerializerDeserializer<AInt16> int16Serde = AqlSerializerDeserializerProvider.INSTANCE
+			.getSerializerDeserializer(BuiltinType.AINT16);
+	@SuppressWarnings("unchecked")
+	protected ISerializerDeserializer<AInt32> int32Serde = AqlSerializerDeserializerProvider.INSTANCE
+			.getSerializerDeserializer(BuiltinType.AINT32);
+	@SuppressWarnings("unchecked")
+	protected ISerializerDeserializer<AInt64> int64Serde = AqlSerializerDeserializerProvider.INSTANCE
+			.getSerializerDeserializer(BuiltinType.AINT64);
+	@SuppressWarnings("unchecked")
+	protected ISerializerDeserializer<ABoolean> booleanSerde = AqlSerializerDeserializerProvider.INSTANCE
+			.getSerializerDeserializer(BuiltinType.ABOOLEAN);
+	@SuppressWarnings("unchecked")
+	protected ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+			.getSerializerDeserializer(BuiltinType.ANULL);
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AbstractTupleParser.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AbstractTupleParser.java
index 6e83689..cb05529 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AbstractTupleParser.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AbstractTupleParser.java
@@ -1,72 +1,91 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package edu.uci.ics.asterix.runtime.operators.file;
 
+import java.io.DataOutput;
+import java.io.IOException;
 import java.io.InputStream;
+import java.nio.ByteBuffer;
 
-import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
-import edu.uci.ics.asterix.om.base.ABoolean;
-import edu.uci.ics.asterix.om.base.ADouble;
-import edu.uci.ics.asterix.om.base.AFloat;
-import edu.uci.ics.asterix.om.base.AInt16;
-import edu.uci.ics.asterix.om.base.AInt32;
-import edu.uci.ics.asterix.om.base.AInt64;
-import edu.uci.ics.asterix.om.base.AInt8;
-import edu.uci.ics.asterix.om.base.AMutableDouble;
-import edu.uci.ics.asterix.om.base.AMutableFloat;
-import edu.uci.ics.asterix.om.base.AMutableInt16;
-import edu.uci.ics.asterix.om.base.AMutableInt32;
-import edu.uci.ics.asterix.om.base.AMutableInt64;
-import edu.uci.ics.asterix.om.base.AMutableInt8;
-import edu.uci.ics.asterix.om.base.AMutableString;
-import edu.uci.ics.asterix.om.base.ANull;
-import edu.uci.ics.asterix.om.base.AString;
-import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.om.types.ARecordType;
 import edu.uci.ics.hyracks.api.comm.IFrameWriter;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
 import edu.uci.ics.hyracks.dataflow.std.file.ITupleParser;
 
+/**
+ * An Abstract class implementation for ITupleParser. It provides common
+ * functionality involved in parsing data in an external format and packing
+ * frames with formed tuples.
+ */
 public abstract class AbstractTupleParser implements ITupleParser {
 
-	// Mutable Types..
-	protected AMutableInt8 aInt8 = new AMutableInt8((byte) 0);
-	protected AMutableInt16 aInt16 = new AMutableInt16((short) 0);
-	protected AMutableInt32 aInt32 = new AMutableInt32(0);
-	protected AMutableInt64 aInt64 = new AMutableInt64(0);
-	protected AMutableDouble aDouble = new AMutableDouble(0);
-	protected AMutableFloat aFloat = new AMutableFloat(0);
-	protected AMutableString aString = new AMutableString("");
-	protected AMutableString aStringFieldName = new AMutableString("");
+    protected ArrayTupleBuilder tb = new ArrayTupleBuilder(1);
+    protected DataOutput dos = tb.getDataOutput();
+    protected final FrameTupleAppender appender;
+    protected final ByteBuffer frame;
+    protected final ARecordType recType;
+    protected final IHyracksTaskContext ctx;
 
-	// Serializers
-	@SuppressWarnings("unchecked")
-	protected ISerializerDeserializer<ADouble> doubleSerde = AqlSerializerDeserializerProvider.INSTANCE
-			.getSerializerDeserializer(BuiltinType.ADOUBLE);
-	@SuppressWarnings("unchecked")
-	protected ISerializerDeserializer<AString> stringSerde = AqlSerializerDeserializerProvider.INSTANCE
-			.getSerializerDeserializer(BuiltinType.ASTRING);
-	@SuppressWarnings("unchecked")
-	protected ISerializerDeserializer<AFloat> floatSerde = AqlSerializerDeserializerProvider.INSTANCE
-			.getSerializerDeserializer(BuiltinType.AFLOAT);
-	@SuppressWarnings("unchecked")
-	protected ISerializerDeserializer<AInt8> int8Serde = AqlSerializerDeserializerProvider.INSTANCE
-			.getSerializerDeserializer(BuiltinType.AINT8);
-	@SuppressWarnings("unchecked")
-	protected ISerializerDeserializer<AInt16> int16Serde = AqlSerializerDeserializerProvider.INSTANCE
-			.getSerializerDeserializer(BuiltinType.AINT16);
-	@SuppressWarnings("unchecked")
-	protected ISerializerDeserializer<AInt32> int32Serde = AqlSerializerDeserializerProvider.INSTANCE
-			.getSerializerDeserializer(BuiltinType.AINT32);
-	@SuppressWarnings("unchecked")
-	protected ISerializerDeserializer<AInt64> int64Serde = AqlSerializerDeserializerProvider.INSTANCE
-			.getSerializerDeserializer(BuiltinType.AINT64);
-	@SuppressWarnings("unchecked")
-	protected ISerializerDeserializer<ABoolean> booleanSerde = AqlSerializerDeserializerProvider.INSTANCE
-			.getSerializerDeserializer(BuiltinType.ABOOLEAN);
-	@SuppressWarnings("unchecked")
-	protected ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
-			.getSerializerDeserializer(BuiltinType.ANULL);
+    public AbstractTupleParser(IHyracksTaskContext ctx, ARecordType recType) {
+        appender = new FrameTupleAppender(ctx.getFrameSize());
+        frame = ctx.allocateFrame();
+        this.recType = recType;
+        this.ctx = ctx;
+    }
 
-	
-	@Override
-	public abstract  void parse(InputStream in, IFrameWriter writer) throws HyracksDataException;
+    public abstract IDataParser getDataParser();
+
+    @Override
+    public void parse(InputStream in, IFrameWriter writer) throws HyracksDataException {
+
+        appender.reset(frame, true);
+        IDataParser parser = getDataParser();
+        try {
+            parser.initialize(in, recType, true);
+            while (true) {
+                tb.reset();
+                if (!parser.parse(tb.getDataOutput())) {
+                    break;
+                }
+                tb.addFieldEndOffset();
+                addTupleToFrame(writer);
+            }
+            if (appender.getTupleCount() > 0) {
+                FrameUtils.flushFrame(frame, writer);
+            }
+        } catch (AsterixException ae) {
+            throw new HyracksDataException(ae);
+        } catch (IOException ioe) {
+            throw new HyracksDataException(ioe);
+        }
+    }
+
+    protected void addTupleToFrame(IFrameWriter writer) throws HyracksDataException {
+        if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+            FrameUtils.flushFrame(frame, writer);
+            appender.reset(frame, true);
+            if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+                throw new IllegalStateException();
+            }
+        }
+
+    }
+
 }
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AdmSchemafullRecordParserFactory.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AdmSchemafullRecordParserFactory.java
index 3b0d1ab..a9287c8 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AdmSchemafullRecordParserFactory.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AdmSchemafullRecordParserFactory.java
@@ -1,70 +1,28 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package edu.uci.ics.asterix.runtime.operators.file;
 
-import java.io.DataOutput;
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.ByteBuffer;
-import java.util.ArrayDeque;
-import java.util.BitSet;
-import java.util.List;
-import java.util.Queue;
-
-import edu.uci.ics.asterix.adm.parser.nontagged.AdmLexer;
-import edu.uci.ics.asterix.adm.parser.nontagged.AdmLexerConstants;
-import edu.uci.ics.asterix.adm.parser.nontagged.ParseException;
-import edu.uci.ics.asterix.adm.parser.nontagged.Token;
-import edu.uci.ics.asterix.builders.IARecordBuilder;
-import edu.uci.ics.asterix.builders.IAsterixListBuilder;
-import edu.uci.ics.asterix.builders.OrderedListBuilder;
-import edu.uci.ics.asterix.builders.RecordBuilder;
-import edu.uci.ics.asterix.builders.UnorderedListBuilder;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ACircleSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateTimeSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ALineSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.APoint3DSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.APointSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.APolygonSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ARectangleSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ATimeSerializerDeserializer;
-import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
-import edu.uci.ics.asterix.om.base.ABoolean;
-import edu.uci.ics.asterix.om.base.ADouble;
-import edu.uci.ics.asterix.om.base.AFloat;
-import edu.uci.ics.asterix.om.base.AInt16;
-import edu.uci.ics.asterix.om.base.AInt32;
-import edu.uci.ics.asterix.om.base.AInt64;
-import edu.uci.ics.asterix.om.base.AInt8;
-import edu.uci.ics.asterix.om.base.AMutableDouble;
-import edu.uci.ics.asterix.om.base.AMutableFloat;
-import edu.uci.ics.asterix.om.base.AMutableInt16;
-import edu.uci.ics.asterix.om.base.AMutableInt32;
-import edu.uci.ics.asterix.om.base.AMutableInt64;
-import edu.uci.ics.asterix.om.base.AMutableInt8;
-import edu.uci.ics.asterix.om.base.AMutableString;
-import edu.uci.ics.asterix.om.base.ANull;
-import edu.uci.ics.asterix.om.base.AString;
-import edu.uci.ics.asterix.om.types.AOrderedListType;
 import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.AUnionType;
-import edu.uci.ics.asterix.om.types.AUnorderedListType;
-import edu.uci.ics.asterix.om.types.BuiltinType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
-import edu.uci.ics.hyracks.api.comm.IFrameWriter;
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
 import edu.uci.ics.hyracks.dataflow.std.file.ITupleParser;
 import edu.uci.ics.hyracks.dataflow.std.file.ITupleParserFactory;
 
+/**
+ * A Tuple parser factory for creating a tuple parser capable of parsing
+ * ADM data.
+ */
 public class AdmSchemafullRecordParserFactory implements ITupleParserFactory {
 
     private static final long serialVersionUID = 1L;
@@ -77,930 +35,7 @@
 
     @Override
     public ITupleParser createTupleParser(final IHyracksTaskContext ctx) {
-        return new ITupleParser() {
-            private AdmLexer admLexer;
-            private ArrayTupleBuilder tb = new ArrayTupleBuilder(1);
-            private DataOutput dos = tb.getDataOutput();
-            private FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-            private ByteBuffer frame = ctx.allocateFrame();
-
-            private int nullableFieldId = 0;
-
-            private Queue<ArrayBackedValueStorage> baaosPool = new ArrayDeque<ArrayBackedValueStorage>();
-            private Queue<IARecordBuilder> recordBuilderPool = new ArrayDeque<IARecordBuilder>();
-            private Queue<IAsterixListBuilder> orderedListBuilderPool = new ArrayDeque<IAsterixListBuilder>();
-            private Queue<IAsterixListBuilder> unorderedListBuilderPool = new ArrayDeque<IAsterixListBuilder>();
-
-            private String mismatchErrorMessage = "Mismatch Type, expecting a value of type ";
-
-            // Mutable Types..
-            private AMutableInt8 aInt8 = new AMutableInt8((byte) 0);
-            private AMutableInt16 aInt16 = new AMutableInt16((short) 0);
-            private AMutableInt32 aInt32 = new AMutableInt32(0);
-            private AMutableInt64 aInt64 = new AMutableInt64(0);
-            private AMutableDouble aDouble = new AMutableDouble(0);
-            private AMutableFloat aFloat = new AMutableFloat(0);
-            private AMutableString aString = new AMutableString("");
-            private AMutableString aStringFieldName = new AMutableString("");
-
-            // Serializers
-            @SuppressWarnings("unchecked")
-            private ISerializerDeserializer<ADouble> doubleSerde = AqlSerializerDeserializerProvider.INSTANCE
-                    .getSerializerDeserializer(BuiltinType.ADOUBLE);
-            @SuppressWarnings("unchecked")
-            private ISerializerDeserializer<AString> stringSerde = AqlSerializerDeserializerProvider.INSTANCE
-                    .getSerializerDeserializer(BuiltinType.ASTRING);
-            @SuppressWarnings("unchecked")
-            private ISerializerDeserializer<AFloat> floatSerde = AqlSerializerDeserializerProvider.INSTANCE
-                    .getSerializerDeserializer(BuiltinType.AFLOAT);
-            @SuppressWarnings("unchecked")
-            private ISerializerDeserializer<AInt8> int8Serde = AqlSerializerDeserializerProvider.INSTANCE
-                    .getSerializerDeserializer(BuiltinType.AINT8);
-            @SuppressWarnings("unchecked")
-            private ISerializerDeserializer<AInt16> int16Serde = AqlSerializerDeserializerProvider.INSTANCE
-                    .getSerializerDeserializer(BuiltinType.AINT16);
-            @SuppressWarnings("unchecked")
-            private ISerializerDeserializer<AInt32> int32Serde = AqlSerializerDeserializerProvider.INSTANCE
-                    .getSerializerDeserializer(BuiltinType.AINT32);
-            @SuppressWarnings("unchecked")
-            private ISerializerDeserializer<AInt64> int64Serde = AqlSerializerDeserializerProvider.INSTANCE
-                    .getSerializerDeserializer(BuiltinType.AINT64);
-            @SuppressWarnings("unchecked")
-            private ISerializerDeserializer<ABoolean> booleanSerde = AqlSerializerDeserializerProvider.INSTANCE
-                    .getSerializerDeserializer(BuiltinType.ABOOLEAN);
-            @SuppressWarnings("unchecked")
-            private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
-                    .getSerializerDeserializer(BuiltinType.ANULL);
-
-            @Override
-            public void parse(InputStream in, IFrameWriter writer) throws HyracksDataException {
-                admLexer = new AdmLexer(in);
-                appender.reset(frame, true);
-                int tupleNum = 0;
-                try {
-                    while (true) {
-                        tb.reset();
-                        if (!parseAdmInstance(recType, true, dos)) {
-                            break;
-                        }
-                        tb.addFieldEndOffset();
-                        if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
-                            FrameUtils.flushFrame(frame, writer);
-                            appender.reset(frame, true);
-                            if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
-                                throw new IllegalStateException();
-                            }
-                        }
-                        tupleNum++;
-                    }
-                    if (appender.getTupleCount() > 0) {
-                        FrameUtils.flushFrame(frame, writer);
-                    }
-                } catch (AsterixException ae) {
-                    throw new HyracksDataException(ae);
-                } catch (IOException ioe) {
-                    throw new HyracksDataException(ioe);
-                }
-            }
-
-            private boolean parseAdmInstance(IAType objectType, Boolean datasetRec, DataOutput out)
-                    throws AsterixException, IOException {
-                Token token;
-                try {
-                    token = admLexer.next();
-                } catch (ParseException pe) {
-                    throw new AsterixException(pe);
-                }
-                if (token.kind == AdmLexerConstants.EOF) {
-                    return false;
-                } else {
-                    admFromLexerStream(token, objectType, out, datasetRec);
-                    return true;
-                }
-            }
-
-            private void admFromLexerStream(Token token, IAType objectType, DataOutput out, Boolean datasetRec)
-                    throws AsterixException, IOException {
-
-                switch (token.kind) {
-                    case AdmLexerConstants.NULL_LITERAL: {
-                        if (checkType(ATypeTag.NULL, objectType, out)) {
-                            nullSerde.serialize(ANull.NULL, out);
-                        } else
-                            throw new AsterixException(" This field can not be null ");
-                        break;
-                    }
-                    case AdmLexerConstants.TRUE_LITERAL: {
-                        if (checkType(ATypeTag.BOOLEAN, objectType, out)) {
-                            booleanSerde.serialize(ABoolean.TRUE, out);
-                        } else
-                            throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
-                        break;
-                    }
-                    case AdmLexerConstants.BOOLEAN_CONS: {
-                        parseConstructor(ATypeTag.BOOLEAN, objectType, out);
-                        break;
-                    }
-                    case AdmLexerConstants.FALSE_LITERAL: {
-                        if (checkType(ATypeTag.BOOLEAN, objectType, out)) {
-                            booleanSerde.serialize(ABoolean.FALSE, out);
-                        } else
-                            throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
-                        break;
-                    }
-                    case AdmLexerConstants.DOUBLE_LITERAL: {
-                        if (checkType(ATypeTag.DOUBLE, objectType, out)) {
-                            aDouble.setValue(Double.parseDouble(token.image));
-                            doubleSerde.serialize(aDouble, out);
-                        } else
-                            throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
-                        break;
-                    }
-                    case AdmLexerConstants.DOUBLE_CONS: {
-                        parseConstructor(ATypeTag.DOUBLE, objectType, out);
-                        break;
-                    }
-                    case AdmLexerConstants.FLOAT_LITERAL: {
-                        if (checkType(ATypeTag.FLOAT, objectType, out)) {
-                            aFloat.setValue(Float.parseFloat(token.image));
-                            floatSerde.serialize(aFloat, out);
-                        } else
-                            throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
-                        break;
-                    }
-                    case AdmLexerConstants.FLOAT_CONS: {
-                        parseConstructor(ATypeTag.FLOAT, objectType, out);
-                        break;
-                    }
-                    case AdmLexerConstants.INT8_LITERAL: {
-                        if (checkType(ATypeTag.INT8, objectType, out)) {
-                            parseInt8(token.image, out);
-                        } else
-                            throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
-                        break;
-                    }
-                    case AdmLexerConstants.INT8_CONS: {
-                        parseConstructor(ATypeTag.INT8, objectType, out);
-                        break;
-                    }
-                    case AdmLexerConstants.INT16_LITERAL: {
-                        if (checkType(ATypeTag.INT16, objectType, out)) {
-                            parseInt16(token.image, out);
-                        } else
-                            throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
-                        break;
-                    }
-                    case AdmLexerConstants.INT16_CONS: {
-                        parseConstructor(ATypeTag.INT16, objectType, out);
-                        break;
-                    }
-                    case AdmLexerConstants.INT_LITERAL:
-                    case AdmLexerConstants.INT32_LITERAL: {
-                        if (checkType(ATypeTag.INT32, objectType, out)) {
-                            parseInt32(token.image, out);
-                        } else
-                            throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
-                        break;
-                    }
-                    case AdmLexerConstants.INT32_CONS: {
-                        parseConstructor(ATypeTag.INT32, objectType, out);
-                        break;
-                    }
-                    case AdmLexerConstants.INT64_LITERAL: {
-                        if (checkType(ATypeTag.INT64, objectType, out)) {
-                            parseInt64(token.image, out);
-                        } else
-                            throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
-                        break;
-                    }
-                    case AdmLexerConstants.INT64_CONS: {
-                        parseConstructor(ATypeTag.INT64, objectType, out);
-                        break;
-                    }
-                    case AdmLexerConstants.STRING_LITERAL: {
-                        if (checkType(ATypeTag.STRING, objectType, out)) {
-                            aString.setValue(token.image.substring(1, token.image.length() - 1));
-                            stringSerde.serialize(aString, out);
-                        } else
-                            throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
-                        break;
-                    }
-                    case AdmLexerConstants.STRING_CONS: {
-                        parseConstructor(ATypeTag.STRING, objectType, out);
-                        break;
-                    }
-                    case AdmLexerConstants.DATE_CONS: {
-                        parseConstructor(ATypeTag.DATE, objectType, out);
-                        break;
-                    }
-                    case AdmLexerConstants.TIME_CONS: {
-                        parseConstructor(ATypeTag.TIME, objectType, out);
-                        break;
-                    }
-                    case AdmLexerConstants.DATETIME_CONS: {
-                        parseConstructor(ATypeTag.DATETIME, objectType, out);
-                        break;
-                    }
-                    case AdmLexerConstants.DURATION_CONS: {
-                        parseConstructor(ATypeTag.DURATION, objectType, out);
-                        break;
-                    }
-                    case AdmLexerConstants.POINT_CONS: {
-                        parseConstructor(ATypeTag.POINT, objectType, out);
-                        break;
-                    }
-                    case AdmLexerConstants.POINT3D_CONS: {
-                        parseConstructor(ATypeTag.POINT3D, objectType, out);
-                        break;
-                    }
-                    case AdmLexerConstants.CIRCLE_CONS: {
-                        parseConstructor(ATypeTag.CIRCLE, objectType, out);
-                        break;
-                    }
-                    case AdmLexerConstants.RECTANGLE_CONS: {
-                        parseConstructor(ATypeTag.RECTANGLE, objectType, out);
-                        break;
-                    }
-                    case AdmLexerConstants.LINE_CONS: {
-                        parseConstructor(ATypeTag.LINE, objectType, out);
-                        break;
-                    }
-                    case AdmLexerConstants.POLYGON_CONS: {
-                        parseConstructor(ATypeTag.POLYGON, objectType, out);
-                        break;
-                    }
-                    case AdmLexerConstants.START_UNORDERED_LIST: {
-                        if (checkType(ATypeTag.UNORDEREDLIST, objectType, out)) {
-                            objectType = getComplexType(objectType, ATypeTag.UNORDEREDLIST);
-                            parseUnorderedList((AUnorderedListType) objectType, out);
-                        } else
-                            throw new AsterixException(mismatchErrorMessage + objectType.getTypeTag());
-                        break;
-                    }
-
-                    case AdmLexerConstants.START_ORDERED_LIST: {
-                        if (checkType(ATypeTag.ORDEREDLIST, objectType, out)) {
-                            objectType = getComplexType(objectType, ATypeTag.ORDEREDLIST);
-                            parseOrderedList((AOrderedListType) objectType, out);
-                        } else
-                            throw new AsterixException(mismatchErrorMessage + objectType.getTypeTag());
-                        break;
-                    }
-                    case AdmLexerConstants.START_RECORD: {
-                        if (checkType(ATypeTag.RECORD, objectType, out)) {
-                            objectType = getComplexType(objectType, ATypeTag.RECORD);
-                            parseRecord((ARecordType) objectType, out, datasetRec);
-                        } else
-                            throw new AsterixException(mismatchErrorMessage + objectType.getTypeTag());
-                        break;
-                    }
-                    case AdmLexerConstants.EOF: {
-                        break;
-                    }
-                    default: {
-                        throw new AsterixException("Unexpected ADM token kind: "
-                                + admLexer.tokenKindToString(token.kind) + ".");
-                    }
-                }
-            }
-
-            private void parseConstructor(ATypeTag typeTag, IAType objectType, DataOutput out) throws AsterixException {
-                try {
-                    Token token = admLexer.next();
-                    if (token.kind == AdmLexerConstants.CONSTRUCTOR_OPEN) {
-                        if (checkType(typeTag, objectType, out)) {
-                            token = admLexer.next();
-                            if (token.kind == AdmLexerConstants.STRING_LITERAL) {
-                                switch (typeTag) {
-                                    case BOOLEAN:
-                                        parseBoolean(token.image.substring(1, token.image.length() - 1), out);
-                                        break;
-                                    case INT8:
-                                        parseInt8(token.image.substring(1, token.image.length() - 1), out);
-                                        break;
-                                    case INT16:
-                                        parseInt16(token.image.substring(1, token.image.length() - 1), out);
-                                        break;
-                                    case INT32:
-                                        parseInt32(token.image.substring(1, token.image.length() - 1), out);
-                                        break;
-                                    case INT64:
-                                        parseInt64(token.image.substring(1, token.image.length() - 1), out);
-                                        break;
-                                    case FLOAT:
-                                        aFloat.setValue(Float.parseFloat(token.image.substring(1,
-                                                token.image.length() - 1)));
-                                        floatSerde.serialize(aFloat, out);
-                                        break;
-                                    case DOUBLE:
-                                        aDouble.setValue(Double.parseDouble(token.image.substring(1,
-                                                token.image.length() - 1)));
-                                        doubleSerde.serialize(aDouble, out);
-                                        break;
-                                    case STRING:
-                                        aString.setValue(token.image.substring(1, token.image.length() - 1));
-                                        stringSerde.serialize(aString, out);
-                                        break;
-                                    case TIME:
-                                        parseTime(token.image.substring(1, token.image.length() - 1), out);
-                                        break;
-                                    case DATE:
-                                        parseDate(token.image.substring(1, token.image.length() - 1), out);
-                                        break;
-                                    case DATETIME:
-                                        parseDatetime(token.image.substring(1, token.image.length() - 1), out);
-                                        break;
-                                    case DURATION:
-                                        parseDuration(token.image.substring(1, token.image.length() - 1), out);
-                                        break;
-                                    case POINT:
-                                        parsePoint(token.image.substring(1, token.image.length() - 1), out);
-                                        break;
-                                    case POINT3D:
-                                        parsePoint3d(token.image.substring(1, token.image.length() - 1), out);
-                                        break;
-                                    case CIRCLE:
-                                        parseCircle(token.image.substring(1, token.image.length() - 1), out);
-                                        break;
-                                    case RECTANGLE:
-                                        parseRectangle(token.image.substring(1, token.image.length() - 1), out);
-                                        break;
-                                    case LINE:
-                                        parseLine(token.image.substring(1, token.image.length() - 1), out);
-                                        break;
-                                    case POLYGON:
-                                        parsePolygon(token.image.substring(1, token.image.length() - 1), out);
-                                        break;
-
-                                }
-                                token = admLexer.next();
-                                if (token.kind == AdmLexerConstants.CONSTRUCTOR_CLOSE)
-                                    return;
-                            }
-                        }
-                    }
-                } catch (Exception e) {
-                    throw new AsterixException(e);
-                }
-                throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
-            }
-
-            private void parseBoolean(String bool, DataOutput out) throws AsterixException {
-                String errorMessage = "This can not be an instance of boolean";
-                try {
-                    if (bool.equals("true"))
-                        booleanSerde.serialize(ABoolean.TRUE, out);
-                    else if (bool.equals("false"))
-                        booleanSerde.serialize(ABoolean.FALSE, out);
-                    else
-                        throw new AsterixException(errorMessage);
-                } catch (HyracksDataException e) {
-                    throw new AsterixException(errorMessage);
-                }
-            }
-
-            private void parseInt8(String int8, DataOutput out) throws AsterixException {
-                String errorMessage = "This can not be an instance of int8";
-                try {
-                    boolean positive = true;
-                    byte value = 0;
-                    int offset = 0;
-
-                    if (int8.charAt(offset) == '+')
-                        offset++;
-                    else if (int8.charAt(offset) == '-') {
-                        offset++;
-                        positive = false;
-                    }
-                    for (; offset < int8.length(); offset++) {
-                        if (int8.charAt(offset) >= '0' && int8.charAt(offset) <= '9')
-                            value = (byte) (value * 10 + int8.charAt(offset) - '0');
-                        else if (int8.charAt(offset) == 'i' && int8.charAt(offset + 1) == '8'
-                                && offset + 2 == int8.length())
-                            break;
-                        else
-                            throw new AsterixException(errorMessage);
-                    }
-                    if (value < 0)
-                        throw new AsterixException(errorMessage);
-                    if (value > 0 && !positive)
-                        value *= -1;
-                    aInt8.setValue(value);
-                    int8Serde.serialize(aInt8, out);
-                } catch (HyracksDataException e) {
-                    throw new AsterixException(errorMessage);
-                }
-            }
-
-            private void parseInt16(String int16, DataOutput out) throws AsterixException {
-                String errorMessage = "This can not be an instance of int16";
-                try {
-                    boolean positive = true;
-                    short value = 0;
-                    int offset = 0;
-
-                    if (int16.charAt(offset) == '+')
-                        offset++;
-                    else if (int16.charAt(offset) == '-') {
-                        offset++;
-                        positive = false;
-                    }
-                    for (; offset < int16.length(); offset++) {
-                        if (int16.charAt(offset) >= '0' && int16.charAt(offset) <= '9')
-                            value = (short) (value * 10 + int16.charAt(offset) - '0');
-                        else if (int16.charAt(offset) == 'i' && int16.charAt(offset + 1) == '1'
-                                && int16.charAt(offset + 2) == '6' && offset + 3 == int16.length())
-                            break;
-                        else
-                            throw new AsterixException(errorMessage);
-                    }
-                    if (value < 0)
-                        throw new AsterixException(errorMessage);
-                    if (value > 0 && !positive)
-                        value *= -1;
-                    aInt16.setValue(value);
-                    int16Serde.serialize(aInt16, out);
-                } catch (HyracksDataException e) {
-                    throw new AsterixException(errorMessage);
-                }
-            }
-
-            private void parseInt32(String int32, DataOutput out) throws AsterixException {
-
-                String errorMessage = "This can not be an instance of int32";
-                try {
-                    boolean positive = true;
-                    int value = 0;
-                    int offset = 0;
-
-                    if (int32.charAt(offset) == '+')
-                        offset++;
-                    else if (int32.charAt(offset) == '-') {
-                        offset++;
-                        positive = false;
-                    }
-                    for (; offset < int32.length(); offset++) {
-                        if (int32.charAt(offset) >= '0' && int32.charAt(offset) <= '9')
-                            value = (value * 10 + int32.charAt(offset) - '0');
-                        else if (int32.charAt(offset) == 'i' && int32.charAt(offset + 1) == '3'
-                                && int32.charAt(offset + 2) == '2' && offset + 3 == int32.length())
-                            break;
-                        else
-                            throw new AsterixException(errorMessage);
-                    }
-                    if (value < 0)
-                        throw new AsterixException(errorMessage);
-                    if (value > 0 && !positive)
-                        value *= -1;
-
-                    aInt32.setValue(value);
-                    int32Serde.serialize(aInt32, out);
-                } catch (HyracksDataException e) {
-                    throw new AsterixException(errorMessage);
-                }
-            }
-
-            private void parseInt64(String int64, DataOutput out) throws AsterixException {
-                String errorMessage = "This can not be an instance of int64";
-                try {
-                    boolean positive = true;
-                    long value = 0;
-                    int offset = 0;
-
-                    if (int64.charAt(offset) == '+')
-                        offset++;
-                    else if (int64.charAt(offset) == '-') {
-                        offset++;
-                        positive = false;
-                    }
-                    for (; offset < int64.length(); offset++) {
-                        if (int64.charAt(offset) >= '0' && int64.charAt(offset) <= '9')
-                            value = (value * 10 + int64.charAt(offset) - '0');
-                        else if (int64.charAt(offset) == 'i' && int64.charAt(offset + 1) == '6'
-                                && int64.charAt(offset + 2) == '4' && offset + 3 == int64.length())
-                            break;
-                        else
-                            throw new AsterixException(errorMessage);
-                    }
-                    if (value < 0)
-                        throw new AsterixException(errorMessage);
-                    if (value > 0 && !positive)
-                        value *= -1;
-
-                    aInt64.setValue(value);
-                    int64Serde.serialize(aInt64, out);
-                } catch (HyracksDataException e) {
-                    throw new AsterixException(errorMessage);
-                }
-            }
-
-            private void parsePoint(String point, DataOutput out) throws AsterixException {
-                try {
-                    APointSerializerDeserializer.parse(point, out);
-                } catch (HyracksDataException e) {
-                    throw new AsterixException(e);
-                }
-            }
-
-            private void parsePoint3d(String point3d, DataOutput out) throws AsterixException {
-                try {
-                    APoint3DSerializerDeserializer.parse(point3d, out);
-                } catch (HyracksDataException e) {
-                    throw new AsterixException(e);
-                }
-            }
-
-            private void parseCircle(String circle, DataOutput out) throws AsterixException {
-                try {
-                    ACircleSerializerDeserializer.parse(circle, out);
-                } catch (HyracksDataException e) {
-                    throw new AsterixException(e);
-                }
-            }
-
-            private void parseRectangle(String rectangle, DataOutput out) throws AsterixException {
-                try {
-                    ARectangleSerializerDeserializer.parse(rectangle, out);
-                } catch (HyracksDataException e) {
-                    throw new AsterixException(e);
-                }
-            }
-
-            private void parseLine(String line, DataOutput out) throws AsterixException {
-                try {
-                    ALineSerializerDeserializer.parse(line, out);
-                } catch (HyracksDataException e) {
-                    throw new AsterixException(e);
-                }
-            }
-
-            private void parsePolygon(String polygon, DataOutput out) throws AsterixException, IOException {
-                try {
-                    APolygonSerializerDeserializer.parse(polygon, out);
-                } catch (HyracksDataException e) {
-                    throw new AsterixException(e);
-                }
-            }
-
-            private void parseTime(String time, DataOutput out) throws AsterixException {
-                try {
-                    ATimeSerializerDeserializer.parse(time, out);
-                } catch (HyracksDataException e) {
-                    throw new AsterixException(e);
-                }
-            }
-
-            private void parseDate(String date, DataOutput out) throws AsterixException, IOException {
-                try {
-                    ADateSerializerDeserializer.parse(date, out);
-                } catch (HyracksDataException e) {
-                    throw new AsterixException(e);
-                }
-            }
-
-            private void parseDatetime(String datetime, DataOutput out) throws AsterixException, IOException {
-                try {
-                    ADateTimeSerializerDeserializer.parse(datetime, out);
-                } catch (HyracksDataException e) {
-                    throw new AsterixException(e);
-                }
-            }
-
-            private void parseDuration(String duration, DataOutput out) throws AsterixException {
-                try {
-                    ADurationSerializerDeserializer.parse(duration, out);
-                } catch (HyracksDataException e) {
-                    throw new AsterixException(e);
-                }
-
-            }
-
-            private IAType getComplexType(IAType aObjectType, ATypeTag tag) {
-
-                if (aObjectType == null) {
-                    return null;
-                }
-
-                if (aObjectType.getTypeTag() == tag)
-                    return aObjectType;
-
-                if (aObjectType.getTypeTag() == ATypeTag.UNION) {
-                    unionList = ((AUnionType) aObjectType).getUnionList();
-                    for (int i = 0; i < unionList.size(); i++)
-                        if (unionList.get(i).getTypeTag() == tag) {
-                            return unionList.get(i);
-                        }
-                }
-                return null; // wont get here
-            }
-
-            List<IAType> unionList;
-
-            private boolean checkType(ATypeTag expectedTypeTag, IAType aObjectType, DataOutput out) throws IOException {
-
-                if (aObjectType == null)
-                    return true;
-
-                if (aObjectType.getTypeTag() != ATypeTag.UNION) {
-                    if (expectedTypeTag == aObjectType.getTypeTag())
-                        return true;
-                } else { // union
-                    unionList = ((AUnionType) aObjectType).getUnionList();
-                    for (int i = 0; i < unionList.size(); i++)
-                        if (unionList.get(i).getTypeTag() == expectedTypeTag)
-                            return true;
-                }
-                return false;
-            }
-
-            private void parseRecord(ARecordType recType, DataOutput out, Boolean datasetRec) throws IOException,
-                    AsterixException {
-
-                ArrayBackedValueStorage fieldValueBuffer = getTempBuffer();
-                ArrayBackedValueStorage fieldNameBuffer = getTempBuffer();
-                IARecordBuilder recBuilder = getRecordBuilder();
-
-                // Boolean[] nulls = null;
-                BitSet nulls = null;
-                if (datasetRec) {
-                    if (recType != null) {
-                        nulls = new BitSet(recType.getFieldNames().length);
-                        recBuilder.reset(recType);
-                    } else
-                        recBuilder.reset(null);
-                } else if (recType != null) {
-                    nulls = new BitSet(recType.getFieldNames().length);
-                    recBuilder.reset(recType);
-                } else
-                    recBuilder.reset(null);
-
-                recBuilder.init();
-                Token token = null;
-                boolean inRecord = true;
-                boolean expectingRecordField = false;
-                boolean first = true;
-
-                Boolean openRecordField = false;
-                int fieldId = 0;
-                IAType fieldType = null;
-                do {
-                    token = nextToken();
-                    switch (token.kind) {
-                        case AdmLexerConstants.END_RECORD: {
-                            if (expectingRecordField) {
-                                throw new AsterixException("Found END_RECORD while expecting a record field.");
-                            }
-                            inRecord = false;
-                            break;
-                        }
-                        case AdmLexerConstants.STRING_LITERAL: {
-                            // we've read the name of the field
-                            // now read the content
-                            fieldNameBuffer.reset();
-                            fieldValueBuffer.reset();
-                            expectingRecordField = false;
-
-                            if (recType != null) {
-                                String fldName = token.image.substring(1, token.image.length() - 1);
-                                fieldId = recBuilder.getFieldId(fldName);
-                                if (fieldId < 0 && !recType.isOpen()) {
-                                    throw new AsterixException("This record is closed, you can not add extra fields !!");
-                                } else if (fieldId < 0 && recType.isOpen()) {
-                                    aStringFieldName.setValue(token.image.substring(1, token.image.length() - 1));
-                                    stringSerde.serialize(aStringFieldName, fieldNameBuffer.getDataOutput());
-                                    openRecordField = true;
-                                    fieldType = null;
-                                } else {
-                                    // a closed field
-                                    nulls.set(fieldId);
-                                    fieldType = recType.getFieldTypes()[fieldId];
-                                    openRecordField = false;
-                                }
-                            } else {
-                                aStringFieldName.setValue(token.image.substring(1, token.image.length() - 1));
-                                stringSerde.serialize(aStringFieldName, fieldNameBuffer.getDataOutput());
-                                openRecordField = true;
-                                fieldType = null;
-                            }
-
-                            token = nextToken();
-                            if (token.kind != AdmLexerConstants.COLON) {
-                                throw new AsterixException("Unexpected ADM token kind: "
-                                        + admLexer.tokenKindToString(token.kind) + " while expecting \":\".");
-                            }
-
-                            token = nextToken();
-                            this.admFromLexerStream(token, fieldType, fieldValueBuffer.getDataOutput(), false);
-                            if (openRecordField) {
-                                if (fieldValueBuffer.getByteArray()[0] != ATypeTag.NULL.serialize())
-                                    recBuilder.addField(fieldNameBuffer, fieldValueBuffer);
-                            } else if (recType.getFieldTypes()[fieldId].getTypeTag() == ATypeTag.UNION) {
-                                if (NonTaggedFormatUtil.isOptionalField((AUnionType) recType.getFieldTypes()[fieldId])) {
-                                    if (fieldValueBuffer.getByteArray()[0] != ATypeTag.NULL.serialize()) {
-                                        recBuilder.addField(fieldId, fieldValueBuffer);
-                                    }
-                                }
-                            } else {
-                                recBuilder.addField(fieldId, fieldValueBuffer);
-                            }
-
-                            break;
-                        }
-                        case AdmLexerConstants.COMMA: {
-                            if (first) {
-                                throw new AsterixException("Found COMMA before any record field.");
-                            }
-                            if (expectingRecordField) {
-                                throw new AsterixException("Found COMMA while expecting a record field.");
-                            }
-                            expectingRecordField = true;
-                            break;
-                        }
-                        default: {
-                            throw new AsterixException("Unexpected ADM token kind: "
-                                    + admLexer.tokenKindToString(token.kind) + " while parsing record fields.");
-                        }
-                    }
-                    first = false;
-                } while (inRecord);
-
-                if (recType != null) {
-                    nullableFieldId = checkNullConstraints(recType, nulls);
-                    if (nullableFieldId != -1)
-                        throw new AsterixException("Field " + nullableFieldId + " can not be null");
-                }
-                recBuilder.write(out, true);
-                returnRecordBuilder(recBuilder);
-                returnTempBuffer(fieldNameBuffer);
-                returnTempBuffer(fieldValueBuffer);
-            }
-
-            private int checkNullConstraints(ARecordType recType, BitSet nulls) {
-
-                boolean isNull = false;
-                for (int i = 0; i < recType.getFieldTypes().length; i++)
-                    if (nulls.get(i) == false) {
-                        IAType type = recType.getFieldTypes()[i];
-                        if (type.getTypeTag() != ATypeTag.NULL && type.getTypeTag() != ATypeTag.UNION)
-                            return i;
-
-                        if (type.getTypeTag() == ATypeTag.UNION) { // union
-                            unionList = ((AUnionType) type).getUnionList();
-                            for (int j = 0; j < unionList.size(); j++)
-                                if (unionList.get(j).getTypeTag() == ATypeTag.NULL) {
-                                    isNull = true;
-                                    break;
-                                }
-                            if (!isNull)
-                                return i;
-                        }
-                    }
-                return -1;
-            }
-
-            private void parseOrderedList(AOrderedListType oltype, DataOutput out) throws IOException, AsterixException {
-
-                ArrayBackedValueStorage itemBuffer = getTempBuffer();
-                OrderedListBuilder orderedListBuilder = (OrderedListBuilder) getOrderedListBuilder();
-
-                IAType itemType = null;
-                if (oltype != null)
-                    itemType = oltype.getItemType();
-                orderedListBuilder.reset(oltype);
-
-                Token token = null;
-                boolean inList = true;
-                boolean expectingListItem = false;
-                boolean first = true;
-                do {
-                    token = nextToken();
-                    if (token.kind == AdmLexerConstants.END_ORDERED_LIST) {
-                        if (expectingListItem) {
-                            throw new AsterixException("Found END_COLLECTION while expecting a list item.");
-                        }
-                        inList = false;
-                    } else if (token.kind == AdmLexerConstants.COMMA) {
-                        if (first) {
-                            throw new AsterixException("Found COMMA before any list item.");
-                        }
-                        if (expectingListItem) {
-                            throw new AsterixException("Found COMMA while expecting a list item.");
-                        }
-                        expectingListItem = true;
-                    } else {
-                        expectingListItem = false;
-                        itemBuffer.reset();
-
-                        admFromLexerStream(token, itemType, itemBuffer.getDataOutput(), false);
-                        orderedListBuilder.addItem(itemBuffer);
-                    }
-                    first = false;
-                } while (inList);
-                orderedListBuilder.write(out, true);
-                returnOrderedListBuilder(orderedListBuilder);
-                returnTempBuffer(itemBuffer);
-            }
-
-            private void parseUnorderedList(AUnorderedListType uoltype, DataOutput out) throws IOException,
-                    AsterixException {
-
-                ArrayBackedValueStorage itemBuffer = getTempBuffer();
-                UnorderedListBuilder unorderedListBuilder = (UnorderedListBuilder) getUnorderedListBuilder();
-
-                IAType itemType = null;
-
-                if (uoltype != null)
-                    itemType = uoltype.getItemType();
-                unorderedListBuilder.reset(uoltype);
-
-                Token token = null;
-                boolean inList = true;
-                boolean expectingListItem = false;
-                boolean first = true;
-                do {
-                    token = nextToken();
-                    if (token.kind == AdmLexerConstants.END_UNORDERED_LIST) {
-                        if (expectingListItem) {
-                            throw new AsterixException("Found END_COLLECTION while expecting a list item.");
-                        }
-                        inList = false;
-                    } else if (token.kind == AdmLexerConstants.COMMA) {
-                        if (first) {
-                            throw new AsterixException("Found COMMA before any list item.");
-                        }
-                        if (expectingListItem) {
-                            throw new AsterixException("Found COMMA while expecting a list item.");
-                        }
-                        expectingListItem = true;
-                    } else {
-                        expectingListItem = false;
-                        itemBuffer.reset();
-                        admFromLexerStream(token, itemType, itemBuffer.getDataOutput(), false);
-                        unorderedListBuilder.addItem(itemBuffer);
-                    }
-                    first = false;
-                } while (inList);
-                unorderedListBuilder.write(out, true);
-                returnUnorderedListBuilder(unorderedListBuilder);
-                returnTempBuffer(itemBuffer);
-            }
-
-            private Token nextToken() throws AsterixException {
-                try {
-                    return admLexer.next();
-                } catch (ParseException pe) {
-                    throw new AsterixException(pe);
-                }
-            }
-
-            private IARecordBuilder getRecordBuilder() {
-                RecordBuilder recBuilder = (RecordBuilder) recordBuilderPool.poll();
-                if (recBuilder != null)
-                    return recBuilder;
-                else
-                    return new RecordBuilder();
-            }
-
-            private void returnRecordBuilder(IARecordBuilder recBuilder) {
-                this.recordBuilderPool.add(recBuilder);
-            }
-
-            private IAsterixListBuilder getOrderedListBuilder() {
-                OrderedListBuilder orderedListBuilder = (OrderedListBuilder) orderedListBuilderPool.poll();
-                if (orderedListBuilder != null)
-                    return orderedListBuilder;
-                else
-                    return new OrderedListBuilder();
-            }
-
-            private void returnOrderedListBuilder(IAsterixListBuilder orderedListBuilder) {
-                this.orderedListBuilderPool.add(orderedListBuilder);
-            }
-
-            private IAsterixListBuilder getUnorderedListBuilder() {
-                UnorderedListBuilder unorderedListBuilder = (UnorderedListBuilder) unorderedListBuilderPool.poll();
-                if (unorderedListBuilder != null)
-                    return unorderedListBuilder;
-                else
-                    return new UnorderedListBuilder();
-            }
-
-            private void returnUnorderedListBuilder(IAsterixListBuilder unorderedListBuilder) {
-                this.unorderedListBuilderPool.add(unorderedListBuilder);
-            }
-
-            private ArrayBackedValueStorage getTempBuffer() {
-                ArrayBackedValueStorage tmpBaaos = baaosPool.poll();
-                if (tmpBaaos != null) {
-                    return tmpBaaos;
-                } else {
-                    return new ArrayBackedValueStorage();
-                }
-            }
-
-            private void returnTempBuffer(ArrayBackedValueStorage tempBaaos) {
-                baaosPool.add(tempBaaos);
-            }
-        };
+        return new AdmTupleParser(ctx, recType);
     }
+
 }
\ No newline at end of file
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AdmTupleParser.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AdmTupleParser.java
index 65223c1..9be4c00 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AdmTupleParser.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AdmTupleParser.java
@@ -1,1046 +1,35 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package edu.uci.ics.asterix.runtime.operators.file;
 
-import java.io.DataOutput;
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.ByteBuffer;
-import java.util.ArrayDeque;
-import java.util.BitSet;
-import java.util.List;
-import java.util.Queue;
-
-import edu.uci.ics.asterix.adm.parser.nontagged.AdmLexer;
-import edu.uci.ics.asterix.adm.parser.nontagged.AdmLexerConstants;
-import edu.uci.ics.asterix.adm.parser.nontagged.ParseException;
-import edu.uci.ics.asterix.adm.parser.nontagged.Token;
-import edu.uci.ics.asterix.builders.IARecordBuilder;
-import edu.uci.ics.asterix.builders.IAsterixListBuilder;
-import edu.uci.ics.asterix.builders.OrderedListBuilder;
-import edu.uci.ics.asterix.builders.RecordBuilder;
-import edu.uci.ics.asterix.builders.UnorderedListBuilder;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ACircleSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateTimeSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ALineSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.APoint3DSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.APointSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.APolygonSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ARectangleSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ATimeSerializerDeserializer;
-import edu.uci.ics.asterix.om.base.ABoolean;
-import edu.uci.ics.asterix.om.base.ANull;
-import edu.uci.ics.asterix.om.types.AOrderedListType;
 import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.AUnionType;
-import edu.uci.ics.asterix.om.types.AUnorderedListType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
-import edu.uci.ics.hyracks.api.comm.IFrameWriter;
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
 
-public class AdmTupleParser extends AbstractTupleParser  {
+/**
+ * An extension of AbstractTupleParser that provides functionality for
+ * parsing delimited files.
+ */
+public class AdmTupleParser extends AbstractTupleParser {
 
-	protected AdmLexer admLexer;
-	protected ArrayTupleBuilder tb = new ArrayTupleBuilder(1);
-	protected DataOutput dos = tb.getDataOutput();
-	protected final FrameTupleAppender appender;
-	protected final ByteBuffer frame;
-	protected final ARecordType recType;
+    public AdmTupleParser(IHyracksTaskContext ctx, ARecordType recType) {
+        super(ctx, recType);
+    }
 
-	private int nullableFieldId = 0;
-
-	private Queue<ArrayBackedValueStorage> baaosPool = new ArrayDeque<ArrayBackedValueStorage>();
-	private Queue<IARecordBuilder> recordBuilderPool = new ArrayDeque<IARecordBuilder>();
-	private Queue<IAsterixListBuilder> orderedListBuilderPool = new ArrayDeque<IAsterixListBuilder>();
-	private Queue<IAsterixListBuilder> unorderedListBuilderPool = new ArrayDeque<IAsterixListBuilder>();
-
-	private String mismatchErrorMessage = "Mismatch Type, expecting a value of type ";
-
-
-	public AdmTupleParser(IHyracksTaskContext ctx, ARecordType recType) {
-		appender = new FrameTupleAppender(ctx.getFrameSize());
-		frame = ctx.allocateFrame();
-		this.recType = recType;
-
-	}
-
-	@Override
-	public void parse(InputStream in, IFrameWriter writer)
-			throws HyracksDataException {
-		admLexer = new AdmLexer(in);
-		appender.reset(frame, true);
-		int tupleNum = 0;
-		try {
-			while (true) {
-				tb.reset();
-				if (!parseAdmInstance(recType, true, dos)) {
-					break;
-				}
-				tb.addFieldEndOffset();
-				if (!appender.append(tb.getFieldEndOffsets(),
-						tb.getByteArray(), 0, tb.getSize())) {
-					FrameUtils.flushFrame(frame, writer);
-					appender.reset(frame, true);
-					if (!appender.append(tb.getFieldEndOffsets(),
-							tb.getByteArray(), 0, tb.getSize())) {
-						throw new IllegalStateException();
-					}
-				}
-				tupleNum++;
-			}
-			if (appender.getTupleCount() > 0) {
-				FrameUtils.flushFrame(frame, writer);
-			}
-		} catch (AsterixException ae) {
-			throw new HyracksDataException(ae);
-		} catch (IOException ioe) {
-			throw new HyracksDataException(ioe);
-		}
-	}
-
-	protected boolean parseAdmInstance(IAType objectType, Boolean datasetRec,
-			DataOutput out) throws AsterixException, IOException {
-		Token token;
-		try {
-			token = admLexer.next();
-		} catch (ParseException pe) {
-			throw new AsterixException(pe);
-		}
-		if (token.kind == AdmLexerConstants.EOF) {
-			return false;
-		} else {
-			admFromLexerStream(token, objectType, out, datasetRec);
-			return true;
-		}
-	}
-
-	private void admFromLexerStream(Token token, IAType objectType,
-			DataOutput out, Boolean datasetRec) throws AsterixException,
-			IOException {
-
-		switch (token.kind) {
-		case AdmLexerConstants.NULL_LITERAL: {
-			if (checkType(ATypeTag.NULL, objectType, out)) {
-				nullSerde.serialize(ANull.NULL, out);
-			} else
-				throw new AsterixException(" This field can not be null ");
-			break;
-		}
-		case AdmLexerConstants.TRUE_LITERAL: {
-			if (checkType(ATypeTag.BOOLEAN, objectType, out)) {
-				booleanSerde.serialize(ABoolean.TRUE, out);
-			} else
-				throw new AsterixException(mismatchErrorMessage
-						+ objectType.getTypeName());
-			break;
-		}
-		case AdmLexerConstants.BOOLEAN_CONS: {
-			parseConstructor(ATypeTag.BOOLEAN, objectType, out);
-			break;
-		}
-		case AdmLexerConstants.FALSE_LITERAL: {
-			if (checkType(ATypeTag.BOOLEAN, objectType, out)) {
-				booleanSerde.serialize(ABoolean.FALSE, out);
-			} else
-				throw new AsterixException(mismatchErrorMessage
-						+ objectType.getTypeName());
-			break;
-		}
-		case AdmLexerConstants.DOUBLE_LITERAL: {
-			if (checkType(ATypeTag.DOUBLE, objectType, out)) {
-				aDouble.setValue(Double.parseDouble(token.image));
-				doubleSerde.serialize(aDouble, out);
-			} else
-				throw new AsterixException(mismatchErrorMessage
-						+ objectType.getTypeName());
-			break;
-		}
-		case AdmLexerConstants.DOUBLE_CONS: {
-			parseConstructor(ATypeTag.DOUBLE, objectType, out);
-			break;
-		}
-		case AdmLexerConstants.FLOAT_LITERAL: {
-			if (checkType(ATypeTag.FLOAT, objectType, out)) {
-				aFloat.setValue(Float.parseFloat(token.image));
-				floatSerde.serialize(aFloat, out);
-			} else
-				throw new AsterixException(mismatchErrorMessage
-						+ objectType.getTypeName());
-			break;
-		}
-		case AdmLexerConstants.FLOAT_CONS: {
-			parseConstructor(ATypeTag.FLOAT, objectType, out);
-			break;
-		}
-		case AdmLexerConstants.INT8_LITERAL: {
-			if (checkType(ATypeTag.INT8, objectType, out)) {
-				parseInt8(token.image, out);
-			} else
-				throw new AsterixException(mismatchErrorMessage
-						+ objectType.getTypeName());
-			break;
-		}
-		case AdmLexerConstants.INT8_CONS: {
-			parseConstructor(ATypeTag.INT8, objectType, out);
-			break;
-		}
-		case AdmLexerConstants.INT16_LITERAL: {
-			if (checkType(ATypeTag.INT16, objectType, out)) {
-				parseInt16(token.image, out);
-			} else
-				throw new AsterixException(mismatchErrorMessage
-						+ objectType.getTypeName());
-			break;
-		}
-		case AdmLexerConstants.INT16_CONS: {
-			parseConstructor(ATypeTag.INT16, objectType, out);
-			break;
-		}
-		case AdmLexerConstants.INT_LITERAL:
-		case AdmLexerConstants.INT32_LITERAL: {
-			if (checkType(ATypeTag.INT32, objectType, out)) {
-				parseInt32(token.image, out);
-			} else
-				throw new AsterixException(mismatchErrorMessage
-						+ objectType.getTypeName());
-			break;
-		}
-		case AdmLexerConstants.INT32_CONS: {
-			parseConstructor(ATypeTag.INT32, objectType, out);
-			break;
-		}
-		case AdmLexerConstants.INT64_LITERAL: {
-			if (checkType(ATypeTag.INT64, objectType, out)) {
-				parseInt64(token.image, out);
-			} else
-				throw new AsterixException(mismatchErrorMessage
-						+ objectType.getTypeName());
-			break;
-		}
-		case AdmLexerConstants.INT64_CONS: {
-			parseConstructor(ATypeTag.INT64, objectType, out);
-			break;
-		}
-		case AdmLexerConstants.STRING_LITERAL: {
-			if (checkType(ATypeTag.STRING, objectType, out)) {
-				aString.setValue(token.image.substring(1,
-						token.image.length() - 1));
-				stringSerde.serialize(aString, out);
-			} else
-				throw new AsterixException(mismatchErrorMessage
-						+ objectType.getTypeName());
-			break;
-		}
-		case AdmLexerConstants.STRING_CONS: {
-			parseConstructor(ATypeTag.STRING, objectType, out);
-			break;
-		}
-		case AdmLexerConstants.DATE_CONS: {
-			parseConstructor(ATypeTag.DATE, objectType, out);
-			break;
-		}
-		case AdmLexerConstants.TIME_CONS: {
-			parseConstructor(ATypeTag.TIME, objectType, out);
-			break;
-		}
-		case AdmLexerConstants.DATETIME_CONS: {
-			parseConstructor(ATypeTag.DATETIME, objectType, out);
-			break;
-		}
-		case AdmLexerConstants.DURATION_CONS: {
-			parseConstructor(ATypeTag.DURATION, objectType, out);
-			break;
-		}
-		case AdmLexerConstants.POINT_CONS: {
-			parseConstructor(ATypeTag.POINT, objectType, out);
-			break;
-		}
-		case AdmLexerConstants.POINT3D_CONS: {
-			parseConstructor(ATypeTag.POINT3D, objectType, out);
-			break;
-		}
-		case AdmLexerConstants.CIRCLE_CONS: {
-			parseConstructor(ATypeTag.CIRCLE, objectType, out);
-			break;
-		}
-		case AdmLexerConstants.RECTANGLE_CONS: {
-			parseConstructor(ATypeTag.RECTANGLE, objectType, out);
-			break;
-		}
-		case AdmLexerConstants.LINE_CONS: {
-			parseConstructor(ATypeTag.LINE, objectType, out);
-			break;
-		}
-		case AdmLexerConstants.POLYGON_CONS: {
-			parseConstructor(ATypeTag.POLYGON, objectType, out);
-			break;
-		}
-		case AdmLexerConstants.START_UNORDERED_LIST: {
-			if (checkType(ATypeTag.UNORDEREDLIST, objectType, out)) {
-				objectType = getComplexType(objectType, ATypeTag.UNORDEREDLIST);
-				parseUnorderedList((AUnorderedListType) objectType, out);
-			} else
-				throw new AsterixException(mismatchErrorMessage
-						+ objectType.getTypeTag());
-			break;
-		}
-
-		case AdmLexerConstants.START_ORDERED_LIST: {
-			if (checkType(ATypeTag.ORDEREDLIST, objectType, out)) {
-				objectType = getComplexType(objectType, ATypeTag.ORDEREDLIST);
-				parseOrderedList((AOrderedListType) objectType, out);
-			} else
-				throw new AsterixException(mismatchErrorMessage
-						+ objectType.getTypeTag());
-			break;
-		}
-		case AdmLexerConstants.START_RECORD: {
-			if (checkType(ATypeTag.RECORD, objectType, out)) {
-				objectType = getComplexType(objectType, ATypeTag.RECORD);
-				parseRecord((ARecordType) objectType, out, datasetRec);
-			} else
-				throw new AsterixException(mismatchErrorMessage
-						+ objectType.getTypeTag());
-			break;
-		}
-		case AdmLexerConstants.EOF: {
-			break;
-		}
-		default: {
-			throw new AsterixException("Unexpected ADM token kind: "
-					+ admLexer.tokenKindToString(token.kind) + ".");
-		}
-		}
-	}
-
-	private void parseDatetime(String datetime, DataOutput out)
-			throws AsterixException, IOException {
-		try {
-			ADateTimeSerializerDeserializer.parse(datetime, out);
-		} catch (HyracksDataException e) {
-			throw new AsterixException(e);
-		}
-	}
-
-	private void parseDuration(String duration, DataOutput out)
-			throws AsterixException {
-		try {
-			ADurationSerializerDeserializer.parse(duration, out);
-		} catch (HyracksDataException e) {
-			throw new AsterixException(e);
-		}
-
-	}
-
-	private IAType getComplexType(IAType aObjectType, ATypeTag tag) {
-
-		if (aObjectType == null) {
-			return null;
-		}
-
-		if (aObjectType.getTypeTag() == tag)
-			return aObjectType;
-
-		if (aObjectType.getTypeTag() == ATypeTag.UNION) {
-			unionList = ((AUnionType) aObjectType).getUnionList();
-			for (int i = 0; i < unionList.size(); i++)
-				if (unionList.get(i).getTypeTag() == tag) {
-					return unionList.get(i);
-				}
-		}
-		return null; // wont get here
-	}
-
-	List<IAType> unionList;
-
-	private boolean checkType(ATypeTag expectedTypeTag, IAType aObjectType,
-			DataOutput out) throws IOException {
-
-		if (aObjectType == null)
-			return true;
-
-		if (aObjectType.getTypeTag() != ATypeTag.UNION) {
-			if (expectedTypeTag == aObjectType.getTypeTag())
-				return true;
-		} else { // union
-			unionList = ((AUnionType) aObjectType).getUnionList();
-			for (int i = 0; i < unionList.size(); i++)
-				if (unionList.get(i).getTypeTag() == expectedTypeTag)
-					return true;
-		}
-		return false;
-	}
-
-	private void parseRecord(ARecordType recType, DataOutput out,
-			Boolean datasetRec) throws IOException, AsterixException {
-
-		ArrayBackedValueStorage fieldValueBuffer = getTempBuffer();
-		ArrayBackedValueStorage fieldNameBuffer = getTempBuffer();
-		IARecordBuilder recBuilder = getRecordBuilder();
-
-		// Boolean[] nulls = null;
-		BitSet nulls = null;
-		if (datasetRec) {
-			if (recType != null) {
-				nulls = new BitSet(recType.getFieldNames().length);
-				recBuilder.reset(recType);
-			} else
-				recBuilder.reset(null);
-		} else if (recType != null) {
-			nulls = new BitSet(recType.getFieldNames().length);
-			recBuilder.reset(recType);
-		} else
-			recBuilder.reset(null);
-
-		recBuilder.init();
-		Token token = null;
-		boolean inRecord = true;
-		boolean expectingRecordField = false;
-		boolean first = true;
-
-		Boolean openRecordField = false;
-		int fieldId = 0;
-		IAType fieldType = null;
-		do {
-			token = nextToken();
-			switch (token.kind) {
-			case AdmLexerConstants.END_RECORD: {
-				if (expectingRecordField) {
-					throw new AsterixException(
-							"Found END_RECORD while expecting a record field.");
-				}
-				inRecord = false;
-				break;
-			}
-			case AdmLexerConstants.STRING_LITERAL: {
-				// we've read the name of the field
-				// now read the content
-				fieldNameBuffer.reset();
-				fieldValueBuffer.reset();
-				expectingRecordField = false;
-
-				if (recType != null) {
-					String fldName = token.image.substring(1,
-							token.image.length() - 1);
-					fieldId = recBuilder.getFieldId(fldName);
-					if (fieldId < 0 && !recType.isOpen()) {
-						throw new AsterixException(
-								"This record is closed, you can not add extra fields !!");
-					} else if (fieldId < 0 && recType.isOpen()) {
-						aStringFieldName.setValue(token.image.substring(1,
-								token.image.length() - 1));
-						stringSerde.serialize(aStringFieldName,
-								fieldNameBuffer.getDataOutput());
-						openRecordField = true;
-						fieldType = null;
-					} else {
-						// a closed field
-						nulls.set(fieldId);
-						fieldType = recType.getFieldTypes()[fieldId];
-						openRecordField = false;
-					}
-				} else {
-					aStringFieldName.setValue(token.image.substring(1,
-							token.image.length() - 1));
-					stringSerde.serialize(aStringFieldName,
-							fieldNameBuffer.getDataOutput());
-					openRecordField = true;
-					fieldType = null;
-				}
-
-				token = nextToken();
-				if (token.kind != AdmLexerConstants.COLON) {
-					throw new AsterixException("Unexpected ADM token kind: "
-							+ admLexer.tokenKindToString(token.kind)
-							+ " while expecting \":\".");
-				}
-
-				token = nextToken();
-				this.admFromLexerStream(token, fieldType,
-						fieldValueBuffer.getDataOutput(), false);
-				if (openRecordField) {
-					if (fieldValueBuffer.getByteArray()[0] != ATypeTag.NULL
-							.serialize())
-						recBuilder.addField(fieldNameBuffer, fieldValueBuffer);
-				} else if (recType.getFieldTypes()[fieldId].getTypeTag() == ATypeTag.UNION) {
-					if (NonTaggedFormatUtil
-							.isOptionalField((AUnionType) recType
-									.getFieldTypes()[fieldId])) {
-						if (fieldValueBuffer.getByteArray()[0] != ATypeTag.NULL
-								.serialize()) {
-							recBuilder.addField(fieldId, fieldValueBuffer);
-						}
-					}
-				} else {
-					recBuilder.addField(fieldId, fieldValueBuffer);
-				}
-
-				break;
-			}
-			case AdmLexerConstants.COMMA: {
-				if (first) {
-					throw new AsterixException(
-							"Found COMMA before any record field.");
-				}
-				if (expectingRecordField) {
-					throw new AsterixException(
-							"Found COMMA while expecting a record field.");
-				}
-				expectingRecordField = true;
-				break;
-			}
-			default: {
-				throw new AsterixException("Unexpected ADM token kind: "
-						+ admLexer.tokenKindToString(token.kind)
-						+ " while parsing record fields.");
-			}
-			}
-			first = false;
-		} while (inRecord);
-
-		if (recType != null) {
-			nullableFieldId = checkNullConstraints(recType, nulls);
-			if (nullableFieldId != -1)
-				throw new AsterixException("Field " + nullableFieldId
-						+ " can not be null");
-		}
-		recBuilder.write(out, true);
-		returnRecordBuilder(recBuilder);
-		returnTempBuffer(fieldNameBuffer);
-		returnTempBuffer(fieldValueBuffer);
-	}
-
-	private int checkNullConstraints(ARecordType recType, BitSet nulls) {
-
-		boolean isNull = false;
-		for (int i = 0; i < recType.getFieldTypes().length; i++)
-			if (nulls.get(i) == false) {
-				IAType type = recType.getFieldTypes()[i];
-				if (type.getTypeTag() != ATypeTag.NULL
-						&& type.getTypeTag() != ATypeTag.UNION)
-					return i;
-
-				if (type.getTypeTag() == ATypeTag.UNION) { // union
-					unionList = ((AUnionType) type).getUnionList();
-					for (int j = 0; j < unionList.size(); j++)
-						if (unionList.get(j).getTypeTag() == ATypeTag.NULL) {
-							isNull = true;
-							break;
-						}
-					if (!isNull)
-						return i;
-				}
-			}
-		return -1;
-	}
-
-	private void parseOrderedList(AOrderedListType oltype, DataOutput out)
-			throws IOException, AsterixException {
-
-		ArrayBackedValueStorage itemBuffer = getTempBuffer();
-		OrderedListBuilder orderedListBuilder = (OrderedListBuilder) getOrderedListBuilder();
-
-		IAType itemType = null;
-		if (oltype != null)
-			itemType = oltype.getItemType();
-		orderedListBuilder.reset(oltype);
-
-		Token token = null;
-		boolean inList = true;
-		boolean expectingListItem = false;
-		boolean first = true;
-		do {
-			token = nextToken();
-			if (token.kind == AdmLexerConstants.END_ORDERED_LIST) {
-				if (expectingListItem) {
-					throw new AsterixException(
-							"Found END_COLLECTION while expecting a list item.");
-				}
-				inList = false;
-			} else if (token.kind == AdmLexerConstants.COMMA) {
-				if (first) {
-					throw new AsterixException(
-							"Found COMMA before any list item.");
-				}
-				if (expectingListItem) {
-					throw new AsterixException(
-							"Found COMMA while expecting a list item.");
-				}
-				expectingListItem = true;
-			} else {
-				expectingListItem = false;
-				itemBuffer.reset();
-
-				admFromLexerStream(token, itemType, itemBuffer.getDataOutput(),
-						false);
-				orderedListBuilder.addItem(itemBuffer);
-			}
-			first = false;
-		} while (inList);
-		orderedListBuilder.write(out, true);
-		returnOrderedListBuilder(orderedListBuilder);
-		returnTempBuffer(itemBuffer);
-	}
-
-	private void parseUnorderedList(AUnorderedListType uoltype, DataOutput out)
-			throws IOException, AsterixException {
-
-		ArrayBackedValueStorage itemBuffer = getTempBuffer();
-		UnorderedListBuilder unorderedListBuilder = (UnorderedListBuilder) getUnorderedListBuilder();
-
-		IAType itemType = null;
-
-		if (uoltype != null)
-			itemType = uoltype.getItemType();
-		unorderedListBuilder.reset(uoltype);
-
-		Token token = null;
-		boolean inList = true;
-		boolean expectingListItem = false;
-		boolean first = true;
-		do {
-			token = nextToken();
-			if (token.kind == AdmLexerConstants.END_UNORDERED_LIST) {
-				if (expectingListItem) {
-					throw new AsterixException(
-							"Found END_COLLECTION while expecting a list item.");
-				}
-				inList = false;
-			} else if (token.kind == AdmLexerConstants.COMMA) {
-				if (first) {
-					throw new AsterixException(
-							"Found COMMA before any list item.");
-				}
-				if (expectingListItem) {
-					throw new AsterixException(
-							"Found COMMA while expecting a list item.");
-				}
-				expectingListItem = true;
-			} else {
-				expectingListItem = false;
-				itemBuffer.reset();
-				admFromLexerStream(token, itemType, itemBuffer.getDataOutput(),
-						false);
-				unorderedListBuilder.addItem(itemBuffer);
-			}
-			first = false;
-		} while (inList);
-		unorderedListBuilder.write(out, true);
-		returnUnorderedListBuilder(unorderedListBuilder);
-		returnTempBuffer(itemBuffer);
-	}
-
-	private Token nextToken() throws AsterixException {
-		try {
-			return admLexer.next();
-		} catch (ParseException pe) {
-			throw new AsterixException(pe);
-		}
-	}
-
-	private IARecordBuilder getRecordBuilder() {
-		RecordBuilder recBuilder = (RecordBuilder) recordBuilderPool.poll();
-		if (recBuilder != null)
-			return recBuilder;
-		else
-			return new RecordBuilder();
-	}
-
-	private void returnRecordBuilder(IARecordBuilder recBuilder) {
-		this.recordBuilderPool.add(recBuilder);
-	}
-
-	private IAsterixListBuilder getOrderedListBuilder() {
-		OrderedListBuilder orderedListBuilder = (OrderedListBuilder) orderedListBuilderPool
-				.poll();
-		if (orderedListBuilder != null)
-			return orderedListBuilder;
-		else
-			return new OrderedListBuilder();
-	}
-
-	private void returnOrderedListBuilder(
-	        IAsterixListBuilder orderedListBuilder) {
-		this.orderedListBuilderPool.add(orderedListBuilder);
-	}
-
-	private IAsterixListBuilder getUnorderedListBuilder() {
-		UnorderedListBuilder unorderedListBuilder = (UnorderedListBuilder) unorderedListBuilderPool
-				.poll();
-		if (unorderedListBuilder != null)
-			return unorderedListBuilder;
-		else
-			return new UnorderedListBuilder();
-	}
-
-	private void returnUnorderedListBuilder(
-	        IAsterixListBuilder unorderedListBuilder) {
-		this.unorderedListBuilderPool.add(unorderedListBuilder);
-	}
-
-	private ArrayBackedValueStorage getTempBuffer() {
-		ArrayBackedValueStorage tmpBaaos = baaosPool.poll();
-		if (tmpBaaos != null) {
-			return tmpBaaos;
-		} else {
-			return new ArrayBackedValueStorage();
-		}
-	}
-
-	private void returnTempBuffer(ArrayBackedValueStorage tempBaaos) {
-		baaosPool.add(tempBaaos);
-	}
-
-	private void parseConstructor(ATypeTag typeTag, IAType objectType,
-			DataOutput out) throws AsterixException {
-		try {
-			Token token = admLexer.next();
-			if (token.kind == AdmLexerConstants.CONSTRUCTOR_OPEN) {
-				if (checkType(typeTag, objectType, out)) {
-					token = admLexer.next();
-					if (token.kind == AdmLexerConstants.STRING_LITERAL) {
-						switch (typeTag) {
-						case BOOLEAN:
-							parseBoolean(
-									token.image.substring(1,
-											token.image.length() - 1), out);
-							break;
-						case INT8:
-							parseInt8(
-									token.image.substring(1,
-											token.image.length() - 1), out);
-							break;
-						case INT16:
-							parseInt16(
-									token.image.substring(1,
-											token.image.length() - 1), out);
-							break;
-						case INT32:
-							parseInt32(
-									token.image.substring(1,
-											token.image.length() - 1), out);
-							break;
-						case INT64:
-							parseInt64(
-									token.image.substring(1,
-											token.image.length() - 1), out);
-							break;
-						case FLOAT:
-							aFloat.setValue(Float.parseFloat(token.image
-									.substring(1, token.image.length() - 1)));
-							floatSerde.serialize(aFloat, out);
-							break;
-						case DOUBLE:
-							aDouble.setValue(Double.parseDouble(token.image
-									.substring(1, token.image.length() - 1)));
-							doubleSerde.serialize(aDouble, out);
-							break;
-						case STRING:
-							aString.setValue(token.image.substring(1,
-									token.image.length() - 1));
-							stringSerde.serialize(aString, out);
-							break;
-						case TIME:
-							parseTime(
-									token.image.substring(1,
-											token.image.length() - 1), out);
-							break;
-						case DATE:
-							parseDate(
-									token.image.substring(1,
-											token.image.length() - 1), out);
-							break;
-						case DATETIME:
-							parseDatetime(
-									token.image.substring(1,
-											token.image.length() - 1), out);
-							break;
-						case DURATION:
-							parseDuration(
-									token.image.substring(1,
-											token.image.length() - 1), out);
-							break;
-						case POINT:
-							parsePoint(
-									token.image.substring(1,
-											token.image.length() - 1), out);
-							break;
-						case POINT3D:
-							parsePoint3d(
-									token.image.substring(1,
-											token.image.length() - 1), out);
-							break;
-						case CIRCLE:
-							parseCircle(
-									token.image.substring(1,
-											token.image.length() - 1), out);
-							break;
-						case RECTANGLE:
-							parseRectangle(
-									token.image.substring(1,
-											token.image.length() - 1), out);
-							break;
-						case LINE:
-							parseLine(
-									token.image.substring(1,
-											token.image.length() - 1), out);
-							break;
-						case POLYGON:
-							parsePolygon(
-									token.image.substring(1,
-											token.image.length() - 1), out);
-							break;
-
-						}
-						token = admLexer.next();
-						if (token.kind == AdmLexerConstants.CONSTRUCTOR_CLOSE)
-							return;
-					}
-				}
-			}
-		} catch (Exception e) {
-			throw new AsterixException(e);
-		}
-		throw new AsterixException(mismatchErrorMessage
-				+ objectType.getTypeName());
-	}
-
-	private void parseBoolean(String bool, DataOutput out)
-			throws AsterixException {
-		String errorMessage = "This can not be an instance of boolean";
-		try {
-			if (bool.equals("true"))
-				booleanSerde.serialize(ABoolean.TRUE, out);
-			else if (bool.equals("false"))
-				booleanSerde.serialize(ABoolean.FALSE, out);
-			else
-				throw new AsterixException(errorMessage);
-		} catch (HyracksDataException e) {
-			throw new AsterixException(errorMessage);
-		}
-	}
-
-	private void parseInt8(String int8, DataOutput out) throws AsterixException {
-		String errorMessage = "This can not be an instance of int8";
-		try {
-			boolean positive = true;
-			byte value = 0;
-			int offset = 0;
-
-			if (int8.charAt(offset) == '+')
-				offset++;
-			else if (int8.charAt(offset) == '-') {
-				offset++;
-				positive = false;
-			}
-			for (; offset < int8.length(); offset++) {
-				if (int8.charAt(offset) >= '0' && int8.charAt(offset) <= '9')
-					value = (byte) (value * 10 + int8.charAt(offset) - '0');
-				else if (int8.charAt(offset) == 'i'
-						&& int8.charAt(offset + 1) == '8'
-						&& offset + 2 == int8.length())
-					break;
-				else
-					throw new AsterixException(errorMessage);
-			}
-			if (value < 0)
-				throw new AsterixException(errorMessage);
-			if (value > 0 && !positive)
-				value *= -1;
-			aInt8.setValue(value);
-			int8Serde.serialize(aInt8, out);
-		} catch (HyracksDataException e) {
-			throw new AsterixException(errorMessage);
-		}
-	}
-
-	private void parseInt16(String int16, DataOutput out)
-			throws AsterixException {
-		String errorMessage = "This can not be an instance of int16";
-		try {
-			boolean positive = true;
-			short value = 0;
-			int offset = 0;
-
-			if (int16.charAt(offset) == '+')
-				offset++;
-			else if (int16.charAt(offset) == '-') {
-				offset++;
-				positive = false;
-			}
-			for (; offset < int16.length(); offset++) {
-				if (int16.charAt(offset) >= '0' && int16.charAt(offset) <= '9')
-					value = (short) (value * 10 + int16.charAt(offset) - '0');
-				else if (int16.charAt(offset) == 'i'
-						&& int16.charAt(offset + 1) == '1'
-						&& int16.charAt(offset + 2) == '6'
-						&& offset + 3 == int16.length())
-					break;
-				else
-					throw new AsterixException(errorMessage);
-			}
-			if (value < 0)
-				throw new AsterixException(errorMessage);
-			if (value > 0 && !positive)
-				value *= -1;
-			aInt16.setValue(value);
-			int16Serde.serialize(aInt16, out);
-		} catch (HyracksDataException e) {
-			throw new AsterixException(errorMessage);
-		}
-	}
-
-	private void parseInt32(String int32, DataOutput out)
-			throws AsterixException {
-
-		String errorMessage = "This can not be an instance of int32";
-		try {
-			boolean positive = true;
-			int value = 0;
-			int offset = 0;
-
-			if (int32.charAt(offset) == '+')
-				offset++;
-			else if (int32.charAt(offset) == '-') {
-				offset++;
-				positive = false;
-			}
-			for (; offset < int32.length(); offset++) {
-				if (int32.charAt(offset) >= '0' && int32.charAt(offset) <= '9')
-					value = (value * 10 + int32.charAt(offset) - '0');
-				else if (int32.charAt(offset) == 'i'
-						&& int32.charAt(offset + 1) == '3'
-						&& int32.charAt(offset + 2) == '2'
-						&& offset + 3 == int32.length())
-					break;
-				else
-					throw new AsterixException(errorMessage);
-			}
-			if (value < 0)
-				throw new AsterixException(errorMessage);
-			if (value > 0 && !positive)
-				value *= -1;
-
-			aInt32.setValue(value);
-			int32Serde.serialize(aInt32, out);
-		} catch (HyracksDataException e) {
-			throw new AsterixException(errorMessage);
-		}
-	}
-
-	private void parseInt64(String int64, DataOutput out)
-			throws AsterixException {
-		String errorMessage = "This can not be an instance of int64";
-		try {
-			boolean positive = true;
-			long value = 0;
-			int offset = 0;
-
-			if (int64.charAt(offset) == '+')
-				offset++;
-			else if (int64.charAt(offset) == '-') {
-				offset++;
-				positive = false;
-			}
-			for (; offset < int64.length(); offset++) {
-				if (int64.charAt(offset) >= '0' && int64.charAt(offset) <= '9')
-					value = (value * 10 + int64.charAt(offset) - '0');
-				else if (int64.charAt(offset) == 'i'
-						&& int64.charAt(offset + 1) == '6'
-						&& int64.charAt(offset + 2) == '4'
-						&& offset + 3 == int64.length())
-					break;
-				else
-					throw new AsterixException(errorMessage);
-			}
-			if (value < 0)
-				throw new AsterixException(errorMessage);
-			if (value > 0 && !positive)
-				value *= -1;
-
-			aInt64.setValue(value);
-			int64Serde.serialize(aInt64, out);
-		} catch (HyracksDataException e) {
-			throw new AsterixException(errorMessage);
-		}
-	}
-
-	private void parsePoint(String point, DataOutput out)
-			throws AsterixException {
-		try {
-			APointSerializerDeserializer.parse(point, out);
-		} catch (HyracksDataException e) {
-			throw new AsterixException(e);
-		}
-	}
-
-	private void parsePoint3d(String point3d, DataOutput out)
-			throws AsterixException {
-		try {
-			APoint3DSerializerDeserializer.parse(point3d, out);
-		} catch (HyracksDataException e) {
-			throw new AsterixException(e);
-		}
-	}
-
-	private void parseCircle(String circle, DataOutput out)
-			throws AsterixException {
-		try {
-			ACircleSerializerDeserializer.parse(circle, out);
-		} catch (HyracksDataException e) {
-			throw new AsterixException(e);
-		}
-	}
-
-	private void parseRectangle(String rectangle, DataOutput out)
-			throws AsterixException {
-		try {
-			ARectangleSerializerDeserializer.parse(rectangle, out);
-		} catch (HyracksDataException e) {
-			throw new AsterixException(e);
-		}
-	}
-
-	private void parseLine(String line, DataOutput out) throws AsterixException {
-		try {
-			ALineSerializerDeserializer.parse(line, out);
-		} catch (HyracksDataException e) {
-			throw new AsterixException(e);
-		}
-	}
-
-	private void parsePolygon(String polygon, DataOutput out)
-			throws AsterixException, IOException {
-		try {
-			APolygonSerializerDeserializer.parse(polygon, out);
-		} catch (HyracksDataException e) {
-			throw new AsterixException(e);
-		}
-	}
-
-	private void parseTime(String time, DataOutput out) throws AsterixException {
-		try {
-			ATimeSerializerDeserializer.parse(time, out);
-		} catch (HyracksDataException e) {
-			throw new AsterixException(e);
-		}
-	}
-
-	private void parseDate(String date, DataOutput out)
-			throws AsterixException, IOException {
-		try {
-			ADateSerializerDeserializer.parse(date, out);
-		} catch (HyracksDataException e) {
-			throw new AsterixException(e);
-		}
-	}
+    @Override
+    public IDataParser getDataParser() {
+        return new ADMDataParser();
+    }
 
 }
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/DelimitedDataParser.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/DelimitedDataParser.java
new file mode 100644
index 0000000..c9560fe
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/DelimitedDataParser.java
@@ -0,0 +1,326 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.operators.file;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.util.Arrays;
+
+import edu.uci.ics.asterix.builders.IARecordBuilder;
+import edu.uci.ics.asterix.builders.RecordBuilder;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.om.base.AMutableString;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParser;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+
+public class DelimitedDataParser extends AbstractDataParser implements IDataParser {
+
+    protected final IValueParserFactory[] valueParserFactories;
+    protected final char fieldDelimiter;
+    protected final ARecordType recordType;
+
+    private IARecordBuilder recBuilder;
+    private ArrayBackedValueStorage fieldValueBuffer;
+    private DataOutput fieldValueBufferOutput;
+    private IValueParser[] valueParsers;
+    private FieldCursor cursor;
+    private byte[] fieldTypeTags;
+    private int[] fldIds;
+    private ArrayBackedValueStorage[] nameBuffers;
+
+    public DelimitedDataParser(ARecordType recordType, IValueParserFactory[] valueParserFactories, char fieldDelimter) {
+        this.recordType = recordType;
+        this.valueParserFactories = valueParserFactories;
+        this.fieldDelimiter = fieldDelimter;
+    }
+
+    @Override
+    public void initialize(InputStream in, ARecordType recordType, boolean datasetRec) throws AsterixException,
+            IOException {
+
+        valueParsers = new IValueParser[valueParserFactories.length];
+        for (int i = 0; i < valueParserFactories.length; ++i) {
+            valueParsers[i] = valueParserFactories[i].createValueParser();
+        }
+
+        fieldValueBuffer = new ArrayBackedValueStorage();
+        fieldValueBufferOutput = fieldValueBuffer.getDataOutput();
+        recBuilder = new RecordBuilder();
+        recBuilder.reset(recordType);
+        recBuilder.init();
+
+        int n = recordType.getFieldNames().length;
+        fieldTypeTags = new byte[n];
+        for (int i = 0; i < n; i++) {
+            ATypeTag tag = recordType.getFieldTypes()[i].getTypeTag();
+            fieldTypeTags[i] = tag.serialize();
+        }
+
+        fldIds = new int[n];
+        nameBuffers = new ArrayBackedValueStorage[n];
+        AMutableString str = new AMutableString(null);
+        for (int i = 0; i < n; i++) {
+            String name = recordType.getFieldNames()[i];
+            fldIds[i] = recBuilder.getFieldId(name);
+            if (fldIds[i] < 0) {
+                if (!recordType.isOpen()) {
+                    throw new HyracksDataException("Illegal field " + name + " in closed type " + recordType);
+                } else {
+                    nameBuffers[i] = new ArrayBackedValueStorage();
+                    fieldNameToBytes(name, str, nameBuffers[i]);
+                }
+            }
+        }
+
+        cursor = new FieldCursor(new InputStreamReader(in));
+
+    }
+
+    @Override
+    public boolean parse(DataOutput out) throws AsterixException, IOException {
+
+        if (cursor.nextRecord()) {
+            recBuilder.reset(recordType);
+            recBuilder.init();
+            for (int i = 0; i < valueParsers.length; ++i) {
+                if (!cursor.nextField()) {
+                    break;
+                }
+                fieldValueBuffer.reset();
+                fieldValueBufferOutput.writeByte(fieldTypeTags[i]);
+                valueParsers[i]
+                        .parse(cursor.buffer, cursor.fStart, cursor.fEnd - cursor.fStart, fieldValueBufferOutput);
+                if (fldIds[i] < 0) {
+                    recBuilder.addField(nameBuffers[i], fieldValueBuffer);
+                } else {
+                    recBuilder.addField(fldIds[i], fieldValueBuffer);
+                }
+            }
+            recBuilder.write(out, true);
+            return true;
+        } else {
+            return false;
+        }
+    }
+
+    protected void fieldNameToBytes(String fieldName, AMutableString str, ArrayBackedValueStorage buffer)
+            throws HyracksDataException {
+        buffer.reset();
+        DataOutput out = buffer.getDataOutput();
+        str.setValue(fieldName);
+        try {
+            stringSerde.serialize(str, out);
+        } catch (IOException e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    protected enum State {
+        INIT,
+        IN_RECORD,
+        EOR,
+        CR,
+        EOF
+    }
+
+    protected class FieldCursor {
+        private static final int INITIAL_BUFFER_SIZE = 4096;
+        private static final int INCREMENT = 4096;
+
+        private final Reader in;
+
+        private char[] buffer;
+        private int start;
+        private int end;
+        private State state;
+
+        private int fStart;
+        private int fEnd;
+
+        public FieldCursor(Reader in) {
+            this.in = in;
+            buffer = new char[INITIAL_BUFFER_SIZE];
+            start = 0;
+            end = 0;
+            state = State.INIT;
+        }
+
+        public boolean nextRecord() throws IOException {
+            while (true) {
+                switch (state) {
+                    case INIT:
+                        boolean eof = !readMore();
+                        if (eof) {
+                            state = State.EOF;
+                            return false;
+                        } else {
+                            state = State.IN_RECORD;
+                            return true;
+                        }
+
+                    case IN_RECORD:
+                        int p = start;
+                        while (true) {
+                            if (p >= end) {
+                                int s = start;
+                                eof = !readMore();
+                                if (eof) {
+                                    state = State.EOF;
+                                    return start < end;
+                                }
+                                p -= (s - start);
+                            }
+                            char ch = buffer[p];
+                            if (ch == '\n') {
+                                start = p + 1;
+                                state = State.EOR;
+                                break;
+                            } else if (ch == '\r') {
+                                start = p + 1;
+                                state = State.CR;
+                                break;
+                            }
+                            ++p;
+                        }
+                        break;
+
+                    case CR:
+                        if (start >= end) {
+                            eof = !readMore();
+                            if (eof) {
+                                state = State.EOF;
+                                return false;
+                            }
+                        }
+                        char ch = buffer[start];
+                        if (ch == '\n') {
+                            ++start;
+                            state = State.EOR;
+                        } else {
+                            state = State.IN_RECORD;
+                            return true;
+                        }
+
+                    case EOR:
+                        if (start >= end) {
+                            eof = !readMore();
+                            if (eof) {
+                                state = State.EOF;
+                                return false;
+                            }
+                        }
+                        state = State.IN_RECORD;
+                        return start < end;
+
+                    case EOF:
+                        return false;
+                }
+            }
+        }
+
+        public boolean nextField() throws IOException {
+            switch (state) {
+                case INIT:
+                case EOR:
+                case EOF:
+                case CR:
+                    return false;
+
+                case IN_RECORD:
+                    boolean eof;
+                    int p = start;
+                    while (true) {
+                        if (p >= end) {
+                            int s = start;
+                            eof = !readMore();
+                            if (eof) {
+                                state = State.EOF;
+                                return true;
+                            }
+                            p -= (s - start);
+                        }
+                        char ch = buffer[p];
+                        if (ch == fieldDelimiter) {
+                            fStart = start;
+                            fEnd = p;
+                            start = p + 1;
+                            return true;
+                        } else if (ch == '\n') {
+                            fStart = start;
+                            fEnd = p;
+                            start = p + 1;
+                            state = State.EOR;
+                            return true;
+                        } else if (ch == '\r') {
+                            fStart = start;
+                            fEnd = p;
+                            start = p + 1;
+                            state = State.CR;
+                            return true;
+                        }
+                        ++p;
+                    }
+            }
+            throw new IllegalStateException();
+        }
+
+        protected boolean readMore() throws IOException {
+            if (start > 0) {
+                System.arraycopy(buffer, start, buffer, 0, end - start);
+            }
+            end -= start;
+            start = 0;
+
+            if (end == buffer.length) {
+                buffer = Arrays.copyOf(buffer, buffer.length + INCREMENT);
+            }
+
+            int n = in.read(buffer, end, buffer.length - end);
+            if (n < 0) {
+                return false;
+            }
+            end += n;
+            return true;
+        }
+
+        public int getfStart() {
+            return fStart;
+        }
+
+        public void setfStart(int fStart) {
+            this.fStart = fStart;
+        }
+
+        public int getfEnd() {
+            return fEnd;
+        }
+
+        public void setfEnd(int fEnd) {
+            this.fEnd = fEnd;
+        }
+
+        public char[] getBuffer() {
+            return buffer;
+        }
+    }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/DelimitedDataTupleParser.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/DelimitedDataTupleParser.java
index abd2ade..c029b64 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/DelimitedDataTupleParser.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/DelimitedDataTupleParser.java
@@ -1,331 +1,40 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package edu.uci.ics.asterix.runtime.operators.file;
 
-import java.io.DataOutput;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.Reader;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-
-import edu.uci.ics.asterix.builders.IARecordBuilder;
-import edu.uci.ics.asterix.builders.RecordBuilder;
-import edu.uci.ics.asterix.om.base.AMutableString;
 import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.hyracks.api.comm.IFrameWriter;
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParser;
 import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
 
+/**
+ * An extension of AbstractTupleParser that provides functionality for
+ * parsing delimited files.
+ */
 public class DelimitedDataTupleParser extends AbstractTupleParser {
 
-	protected final IValueParserFactory[] valueParserFactories;
-	protected final char fieldDelimiter;
-	protected final IHyracksTaskContext ctx;
-	protected final ARecordType recType;
-	protected ArrayTupleBuilder tb = new ArrayTupleBuilder(1);
-	protected DataOutput recDos = tb.getDataOutput();
-	protected final FrameTupleAppender appender;
-	protected final ByteBuffer frame;
-	
+    private final DelimitedDataParser dataParser;
 
-	public DelimitedDataTupleParser(IHyracksTaskContext ctx,
-			ARecordType recType, IValueParserFactory[] valueParserFactories,
-			char fieldDelimter) {
-		this.valueParserFactories = valueParserFactories;
-		this.fieldDelimiter = fieldDelimter;
-		this.ctx = ctx;
-		this.recType = recType;
-		appender = new FrameTupleAppender(ctx.getFrameSize());
-		frame = ctx.allocateFrame();
-	}
+    public DelimitedDataTupleParser(IHyracksTaskContext ctx, ARecordType recType,
+            IValueParserFactory[] valueParserFactories, char fieldDelimter) {
+        super(ctx, recType);
+        dataParser = new DelimitedDataParser(recType, valueParserFactories, fieldDelimter);
+    }
 
-	@Override
-	public void parse(InputStream in, IFrameWriter writer)
-			throws HyracksDataException {
-		try {
-			IValueParser[] valueParsers = new IValueParser[valueParserFactories.length];
-			for (int i = 0; i < valueParserFactories.length; ++i) {
-				valueParsers[i] = valueParserFactories[i].createValueParser();
-			}
-		
-			appender.reset(frame, true);
-			
-
-			ArrayBackedValueStorage fieldValueBuffer = new ArrayBackedValueStorage();
-			DataOutput fieldValueBufferOutput = fieldValueBuffer
-					.getDataOutput();
-			IARecordBuilder recBuilder = new RecordBuilder();
-			recBuilder.reset(recType);
-			recBuilder.init();
-
-			int n = recType.getFieldNames().length;
-			byte[] fieldTypeTags = new byte[n];
-			for (int i = 0; i < n; i++) {
-				ATypeTag tag = recType.getFieldTypes()[i].getTypeTag();
-				fieldTypeTags[i] = tag.serialize();
-			}
-
-			int[] fldIds = new int[n];
-			ArrayBackedValueStorage[] nameBuffers = new ArrayBackedValueStorage[n];
-			AMutableString str = new AMutableString(null);
-			for (int i = 0; i < n; i++) {
-				String name = recType.getFieldNames()[i];
-				fldIds[i] = recBuilder.getFieldId(name);
-				if (fldIds[i] < 0) {
-					if (!recType.isOpen()) {
-						throw new HyracksDataException("Illegal field " + name
-								+ " in closed type " + recType);
-					} else {
-						nameBuffers[i] = new ArrayBackedValueStorage();
-						fieldNameToBytes(name, str, nameBuffers[i]);
-					}
-				}
-			}
-
-			FieldCursor cursor = new FieldCursor(new InputStreamReader(in));
-			while (cursor.nextRecord()) {
-				tb.reset();
-				recBuilder.reset(recType);
-				recBuilder.init();
-
-				for (int i = 0; i < valueParsers.length; ++i) {
-					if (!cursor.nextField()) {
-						break;
-					}
-					fieldValueBuffer.reset();
-					fieldValueBufferOutput.writeByte(fieldTypeTags[i]);
-					valueParsers[i]
-							.parse(cursor.buffer, cursor.fStart, cursor.fEnd
-									- cursor.fStart, fieldValueBufferOutput);
-					if (fldIds[i] < 0) {
-						recBuilder.addField(nameBuffers[i], fieldValueBuffer);
-					} else {
-						recBuilder.addField(fldIds[i], fieldValueBuffer);
-					}
-				}
-				recBuilder.write(recDos, true);
-				tb.addFieldEndOffset();
-
-				if (!appender.append(tb.getFieldEndOffsets(),
-						tb.getByteArray(), 0, tb.getSize())) {
-					FrameUtils.flushFrame(frame, writer);
-					appender.reset(frame, true);
-					if (!appender.append(tb.getFieldEndOffsets(),
-							tb.getByteArray(), 0, tb.getSize())) {
-						throw new IllegalStateException();
-					}
-				}
-			}
-			if (appender.getTupleCount() > 0) {
-				FrameUtils.flushFrame(frame, writer);
-			}
-		} catch (IOException e) {
-			throw new HyracksDataException(e);
-		}
-	}
-
-	protected void fieldNameToBytes(String fieldName, AMutableString str,
-			ArrayBackedValueStorage buffer) throws HyracksDataException {
-		buffer.reset();
-		DataOutput out = buffer.getDataOutput();
-		str.setValue(fieldName);
-		try {
-			stringSerde.serialize(str, out);
-		} catch (IOException e) {
-			throw new HyracksDataException(e);
-		}
-	}
-
-	protected enum State {
-		INIT, IN_RECORD, EOR, CR, EOF
-	}
-
-	protected class FieldCursor {
-		private static final int INITIAL_BUFFER_SIZE = 4096;
-		private static final int INCREMENT = 4096;
-
-		private final Reader in;
-
-		private char[] buffer;
-		private int start;
-		private int end;
-		private State state;
-
-		private int fStart;
-		private int fEnd;
-
-		public FieldCursor(Reader in) {
-			this.in = in;
-			buffer = new char[INITIAL_BUFFER_SIZE];
-			start = 0;
-			end = 0;
-			state = State.INIT;
-		}
-
-		public boolean nextRecord() throws IOException {
-			while (true) {
-				switch (state) {
-				case INIT:
-					boolean eof = !readMore();
-					if (eof) {
-						state = State.EOF;
-						return false;
-					} else {
-						state = State.IN_RECORD;
-						return true;
-					}
-
-				case IN_RECORD:
-					int p = start;
-					while (true) {
-						if (p >= end) {
-							int s = start;
-							eof = !readMore();
-							if (eof) {
-								state = State.EOF;
-								return start < end;
-							}
-							p -= (s - start);
-						}
-						char ch = buffer[p];
-						if (ch == '\n') {
-							start = p + 1;
-							state = State.EOR;
-							break;
-						} else if (ch == '\r') {
-							start = p + 1;
-							state = State.CR;
-							break;
-						}
-						++p;
-					}
-					break;
-
-				case CR:
-					if (start >= end) {
-						eof = !readMore();
-						if (eof) {
-							state = State.EOF;
-							return false;
-						}
-					}
-					char ch = buffer[start];
-					if (ch == '\n') {
-						++start;
-						state = State.EOR;
-					} else {
-						state = State.IN_RECORD;
-						return true;
-					}
-
-				case EOR:
-					if (start >= end) {
-						eof = !readMore();
-						if (eof) {
-							state = State.EOF;
-							return false;
-						}
-					}
-					state = State.IN_RECORD;
-					return start < end;
-
-				case EOF:
-					return false;
-				}
-			}
-		}
-
-		public boolean nextField() throws IOException {
-			switch (state) {
-			case INIT:
-			case EOR:
-			case EOF:
-			case CR:
-				return false;
-
-			case IN_RECORD:
-				boolean eof;
-				int p = start;
-				while (true) {
-					if (p >= end) {
-						int s = start;
-						eof = !readMore();
-						if (eof) {
-							state = State.EOF;
-							return true;
-						}
-						p -= (s - start);
-					}
-					char ch = buffer[p];
-					if (ch == fieldDelimiter) {
-						fStart = start;
-						fEnd = p;
-						start = p + 1;
-						return true;
-					} else if (ch == '\n') {
-						fStart = start;
-						fEnd = p;
-						start = p + 1;
-						state = State.EOR;
-						return true;
-					} else if (ch == '\r') {
-						fStart = start;
-						fEnd = p;
-						start = p + 1;
-						state = State.CR;
-						return true;
-					}
-					++p;
-				}
-			}
-			throw new IllegalStateException();
-		}
-
-		protected boolean readMore() throws IOException {
-			if (start > 0) {
-				System.arraycopy(buffer, start, buffer, 0, end - start);
-			}
-			end -= start;
-			start = 0;
-
-			if (end == buffer.length) {
-				buffer = Arrays.copyOf(buffer, buffer.length + INCREMENT);
-			}
-
-			int n = in.read(buffer, end, buffer.length - end);
-			if (n < 0) {
-				return false;
-			}
-			end += n;
-			return true;
-		}
-
-		public int getfStart() {
-			return fStart;
-		}
-
-		public void setfStart(int fStart) {
-			this.fStart = fStart;
-		}
-
-		public int getfEnd() {
-			return fEnd;
-		}
-
-		public void setfEnd(int fEnd) {
-			this.fEnd = fEnd;
-		}
-
-		public char[] getBuffer() {
-			return buffer;
-		}
-	}
+    @Override
+    public IDataParser getDataParser() {
+        return dataParser;
+    }
 
 }
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/IDataParser.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/IDataParser.java
new file mode 100644
index 0000000..f23aeac
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/IDataParser.java
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.operators.file;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.io.InputStream;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.om.types.ARecordType;
+
+/**
+ * Interface implemented by a parser
+ */
+public interface IDataParser {
+
+    /**
+     * Initialize the parser prior to actual parsing.
+     * 
+     * @param in
+     *            input stream to be parsed
+     * @param recordType
+     *            record type associated with input data
+     * @param datasetRec
+     *            boolean flag set to true if input data represents dataset
+     *            records.
+     * @throws AsterixException
+     * @throws IOException
+     */
+    public void initialize(InputStream in, ARecordType recordType, boolean datasetRec) throws AsterixException,
+            IOException;
+
+    /**
+     * Parse data from source input stream and output ADM records.
+     * 
+     * @param out
+     *            DataOutput instance that for writing the parser output.
+     * @return
+     * @throws AsterixException
+     * @throws IOException
+     */
+    public boolean parse(DataOutput out) throws AsterixException, IOException;
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/NtDelimitedDataTupleParserFactory.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/NtDelimitedDataTupleParserFactory.java
index cfe94ef..86fba12 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/NtDelimitedDataTupleParserFactory.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/NtDelimitedDataTupleParserFactory.java
@@ -1,42 +1,34 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package edu.uci.ics.asterix.runtime.operators.file;
 
-import java.io.DataOutput;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.Reader;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-
-import edu.uci.ics.asterix.builders.IARecordBuilder;
-import edu.uci.ics.asterix.builders.RecordBuilder;
-import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
-import edu.uci.ics.asterix.om.base.AMutableString;
-import edu.uci.ics.asterix.om.base.AString;
 import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.BuiltinType;
-import edu.uci.ics.hyracks.api.comm.IFrameWriter;
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParser;
 import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
 import edu.uci.ics.hyracks.dataflow.std.file.ITupleParser;
 import edu.uci.ics.hyracks.dataflow.std.file.ITupleParserFactory;
 
+/**
+ * A tuple parser factory for creating a tuple parser capable of parsing
+ * delimited data.
+ */
 public class NtDelimitedDataTupleParserFactory implements ITupleParserFactory {
     private static final long serialVersionUID = 1L;
     protected ARecordType recordType;
     protected IValueParserFactory[] valueParserFactories;
     protected char fieldDelimiter;
-    @SuppressWarnings("unchecked")
-    private ISerializerDeserializer<AString> stringSerde = AqlSerializerDeserializerProvider.INSTANCE
-            .getSerializerDeserializer(BuiltinType.ASTRING);
 
     public NtDelimitedDataTupleParserFactory(ARecordType recordType, IValueParserFactory[] valueParserFactories,
             char fieldDelimiter) {
@@ -47,271 +39,7 @@
 
     @Override
     public ITupleParser createTupleParser(final IHyracksTaskContext ctx) {
-        return new ITupleParser() {
-            @Override
-            public void parse(InputStream in, IFrameWriter writer) throws HyracksDataException {
-                try {
-                    IValueParser[] valueParsers = new IValueParser[valueParserFactories.length];
-                    for (int i = 0; i < valueParserFactories.length; ++i) {
-                        valueParsers[i] = valueParserFactories[i].createValueParser();
-                    }
-                    ByteBuffer frame = ctx.allocateFrame();
-                    FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-                    appender.reset(frame, true);
-                    ArrayTupleBuilder tb = new ArrayTupleBuilder(1);
-                    DataOutput recDos = tb.getDataOutput();
-
-                    ArrayBackedValueStorage fieldValueBuffer = new ArrayBackedValueStorage();
-                    DataOutput fieldValueBufferOutput = fieldValueBuffer.getDataOutput();
-                    IARecordBuilder recBuilder = new RecordBuilder();
-                    recBuilder.reset(recordType);
-                    recBuilder.init();
-
-                    int n = recordType.getFieldNames().length;
-                    byte[] fieldTypeTags = new byte[n];
-                    for (int i = 0; i < n; i++) {
-                        ATypeTag tag = recordType.getFieldTypes()[i].getTypeTag();
-                        fieldTypeTags[i] = tag.serialize();
-                    }
-
-                    int[] fldIds = new int[n];
-                    ArrayBackedValueStorage[] nameBuffers = new ArrayBackedValueStorage[n];
-                    AMutableString str = new AMutableString(null);
-                    for (int i = 0; i < n; i++) {
-                        String name = recordType.getFieldNames()[i];
-                        fldIds[i] = recBuilder.getFieldId(name);
-                        if (fldIds[i] < 0) {
-                            if (!recordType.isOpen()) {
-                                throw new HyracksDataException("Illegal field " + name + " in closed type "
-                                        + recordType);
-                            } else {
-                                nameBuffers[i] = new ArrayBackedValueStorage();
-                                fieldNameToBytes(name, str, nameBuffers[i]);
-                            }
-                        }
-                    }
-
-                    FieldCursor cursor = new FieldCursor(new InputStreamReader(in));
-                    while (cursor.nextRecord()) {
-                        tb.reset();
-                        recBuilder.reset(recordType);
-                        recBuilder.init();
-
-                        for (int i = 0; i < valueParsers.length; ++i) {
-                            if (!cursor.nextField()) {
-                                break;
-                            }
-                            fieldValueBuffer.reset();
-                            fieldValueBufferOutput.writeByte(fieldTypeTags[i]);
-                            valueParsers[i].parse(cursor.buffer, cursor.fStart, cursor.fEnd - cursor.fStart,
-                                    fieldValueBufferOutput);
-                            if (fldIds[i] < 0) {
-                                recBuilder.addField(nameBuffers[i], fieldValueBuffer);
-                            } else {
-                                recBuilder.addField(fldIds[i], fieldValueBuffer);
-                            }
-                        }
-                        recBuilder.write(recDos, true);
-                        tb.addFieldEndOffset();
-
-                        if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
-                            FrameUtils.flushFrame(frame, writer);
-                            appender.reset(frame, true);
-                            if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
-                                throw new IllegalStateException();
-                            }
-                        }
-                    }
-                    if (appender.getTupleCount() > 0) {
-                        FrameUtils.flushFrame(frame, writer);
-                    }
-                } catch (IOException e) {
-                    throw new HyracksDataException(e);
-                }
-            }
-
-            private void fieldNameToBytes(String fieldName, AMutableString str, ArrayBackedValueStorage buffer)
-                    throws HyracksDataException {
-                buffer.reset();
-                DataOutput out = buffer.getDataOutput();
-                str.setValue(fieldName);
-                try {
-                    stringSerde.serialize(str, out);
-                } catch (IOException e) {
-                    throw new HyracksDataException(e);
-                }
-            }
-
-        };
-    }
-
-    private enum State {
-        INIT,
-        IN_RECORD,
-        EOR,
-        CR,
-        EOF
-    }
-
-    private class FieldCursor {
-        private static final int INITIAL_BUFFER_SIZE = 4096;
-        private static final int INCREMENT = 4096;
-
-        private final Reader in;
-
-        private char[] buffer;
-        private int start;
-        private int end;
-        private State state;
-
-        private int fStart;
-        private int fEnd;
-
-        public FieldCursor(Reader in) {
-            this.in = in;
-            buffer = new char[INITIAL_BUFFER_SIZE];
-            start = 0;
-            end = 0;
-            state = State.INIT;
-        }
-
-        public boolean nextRecord() throws IOException {
-            while (true) {
-                switch (state) {
-                    case INIT:
-                        boolean eof = !readMore();
-                        if (eof) {
-                            state = State.EOF;
-                            return false;
-                        } else {
-                            state = State.IN_RECORD;
-                            return true;
-                        }
-
-                    case IN_RECORD:
-                        int p = start;
-                        while (true) {
-                            if (p >= end) {
-                                int s = start;
-                                eof = !readMore();
-                                if (eof) {
-                                    state = State.EOF;
-                                    return start < end;
-                                }
-                                p -= (s - start);
-                            }
-                            char ch = buffer[p];
-                            if (ch == '\n') {
-                                start = p + 1;
-                                state = State.EOR;
-                                break;
-                            } else if (ch == '\r') {
-                                start = p + 1;
-                                state = State.CR;
-                                break;
-                            }
-                            ++p;
-                        }
-                        break;
-
-                    case CR:
-                        if (start >= end) {
-                            eof = !readMore();
-                            if (eof) {
-                                state = State.EOF;
-                                return false;
-                            }
-                        }
-                        char ch = buffer[start];
-                        if (ch == '\n') {
-                            ++start;
-                            state = State.EOR;
-                        } else {
-                            state = State.IN_RECORD;
-                            return true;
-                        }
-
-                    case EOR:
-                        if (start >= end) {
-                            eof = !readMore();
-                            if (eof) {
-                                state = State.EOF;
-                                return false;
-                            }
-                        }
-                        state = State.IN_RECORD;
-                        return start < end;
-
-                    case EOF:
-                        return false;
-                }
-            }
-        }
-
-        public boolean nextField() throws IOException {
-            switch (state) {
-                case INIT:
-                case EOR:
-                case EOF:
-                case CR:
-                    return false;
-
-                case IN_RECORD:
-                    boolean eof;
-                    int p = start;
-                    while (true) {
-                        if (p >= end) {
-                            int s = start;
-                            eof = !readMore();
-                            if (eof) {
-                                state = State.EOF;
-                                return true;
-                            }
-                            p -= (s - start);
-                        }
-                        char ch = buffer[p];
-                        if (ch == fieldDelimiter) {
-                            fStart = start;
-                            fEnd = p;
-                            start = p + 1;
-                            return true;
-                        } else if (ch == '\n') {
-                            fStart = start;
-                            fEnd = p;
-                            start = p + 1;
-                            state = State.EOR;
-                            return true;
-                        } else if (ch == '\r') {
-                            fStart = start;
-                            fEnd = p;
-                            start = p + 1;
-                            state = State.CR;
-                            return true;
-                        }
-                        ++p;
-                    }
-            }
-            throw new IllegalStateException();
-        }
-
-        private boolean readMore() throws IOException {
-            if (start > 0) {
-                System.arraycopy(buffer, start, buffer, 0, end - start);
-            }
-            end -= start;
-            start = 0;
-
-            if (end == buffer.length) {
-                buffer = Arrays.copyOf(buffer, buffer.length + INCREMENT);
-            }
-
-            int n = in.read(buffer, end, buffer.length - end);
-            if (n < 0) {
-                return false;
-            }
-            end += n;
-            return true;
-        }
+        return new DelimitedDataTupleParser(ctx, recordType, valueParserFactories, fieldDelimiter);
     }
 
 }
diff --git a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapter.java b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapter.java
new file mode 100644
index 0000000..84b989d
--- /dev/null
+++ b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapter.java
@@ -0,0 +1,279 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.tools.external.data;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Map;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.external.adapter.factory.IGenericDatasetAdapterFactory;
+import edu.uci.ics.asterix.external.dataset.adapter.FileSystemBasedAdapter;
+import edu.uci.ics.asterix.external.dataset.adapter.ITypedDatasourceAdapter;
+import edu.uci.ics.asterix.feed.managed.adapter.IManagedFeedAdapter;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.runtime.operators.file.ADMDataParser;
+import edu.uci.ics.asterix.runtime.operators.file.AbstractTupleParser;
+import edu.uci.ics.asterix.runtime.operators.file.DelimitedDataParser;
+import edu.uci.ics.asterix.runtime.operators.file.IDataParser;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.file.ITupleParser;
+import edu.uci.ics.hyracks.dataflow.std.file.ITupleParserFactory;
+
+/**
+ * An adapter that simulates a feed from the contents of a source file. The file can be on the local file
+ * system or on HDFS. The feed ends when the content of the source file has been ingested.
+ */
+public class RateControlledFileSystemBasedAdapter extends FileSystemBasedAdapter implements ITypedDatasourceAdapter,
+        IManagedFeedAdapter {
+
+    private static final long serialVersionUID = 1L;
+
+    public static final String KEY_FILE_SYSTEM = "fs";
+    public static final String LOCAL_FS = "localfs";
+    public static final String HDFS = "hdfs";
+
+    private final FileSystemBasedAdapter coreAdapter;
+    private final Map<String, String> configuration;
+    private final String fileSystem;
+    private final String format;
+
+    public RateControlledFileSystemBasedAdapter(ARecordType atype, Map<String, String> configuration) throws Exception {
+        super(atype);
+        checkRequiredArgs(configuration);
+        fileSystem = configuration.get(KEY_FILE_SYSTEM);
+        String adapterFactoryClass = null;
+        if (fileSystem.equalsIgnoreCase(LOCAL_FS)) {
+            adapterFactoryClass = "edu.uci.ics.asterix.external.adapter.factory.NCFileSystemAdapterFactory";
+        } else if (fileSystem.equals(HDFS)) {
+            adapterFactoryClass = "edu.uci.ics.asterix.external.adapter.factory.HDFSAdapterFactory";
+        } else {
+            throw new AsterixException("Unsupported file system type " + fileSystem);
+        }
+        format = configuration.get(KEY_FORMAT);
+        IGenericDatasetAdapterFactory adapterFactory = (IGenericDatasetAdapterFactory) Class.forName(
+                adapterFactoryClass).newInstance();
+        coreAdapter = (FileSystemBasedAdapter) adapterFactory.createAdapter(configuration, atype);
+        this.configuration = configuration;
+    }
+
+    private void checkRequiredArgs(Map<String, String> configuration) throws Exception {
+        if (configuration.get(KEY_FILE_SYSTEM) == null) {
+            throw new Exception("File system type not specified. (fs=?) File system could be 'localfs' or 'hdfs'");
+        }
+        if (configuration.get(IGenericDatasetAdapterFactory.KEY_TYPE_NAME) == null) {
+            throw new Exception("Record type not specified (output-type-name=?)");
+        }
+        if (configuration.get(KEY_PATH) == null) {
+            throw new Exception("File path not specified (path=?)");
+        }
+        if (configuration.get(KEY_FORMAT) == null) {
+            throw new Exception("File format not specified (format=?)");
+        }
+    }
+
+    @Override
+    public InputStream getInputStream(int partition) throws IOException {
+        return coreAdapter.getInputStream(partition);
+    }
+
+    @Override
+    public void initialize(IHyracksTaskContext ctx) throws Exception {
+        coreAdapter.initialize(ctx);
+        this.ctx = ctx;
+    }
+
+    @Override
+    public void configure(Map<String, String> arguments) throws Exception {
+        coreAdapter.configure(arguments);
+    }
+
+    @Override
+    public AdapterType getAdapterType() {
+        return coreAdapter.getAdapterType();
+    }
+
+    @Override
+    protected ITupleParser getTupleParser() throws Exception {
+        ITupleParser parser = null;
+        if (format.equals(FORMAT_DELIMITED_TEXT)) {
+            parser = getRateControlledDelimitedDataTupleParser((ARecordType) atype);
+        } else if (format.equals(FORMAT_ADM)) {
+            parser = getRateControlledADMDataTupleParser((ARecordType) atype);
+        } else {
+            throw new IllegalArgumentException(" format " + configuration.get(KEY_FORMAT) + " not supported");
+        }
+        return parser;
+
+    }
+
+    protected ITupleParser getRateControlledDelimitedDataTupleParser(ARecordType recordType) throws AsterixException {
+        ITupleParser parser;
+        int n = recordType.getFieldTypes().length;
+        IValueParserFactory[] fieldParserFactories = new IValueParserFactory[n];
+        for (int i = 0; i < n; i++) {
+            ATypeTag tag = recordType.getFieldTypes()[i].getTypeTag();
+            IValueParserFactory vpf = typeToValueParserFactMap.get(tag);
+            if (vpf == null) {
+                throw new NotImplementedException("No value parser factory for delimited fields of type " + tag);
+            }
+            fieldParserFactories[i] = vpf;
+
+        }
+        String delimiterValue = (String) configuration.get(KEY_DELIMITER);
+        if (delimiterValue != null && delimiterValue.length() > 1) {
+            throw new AsterixException("improper delimiter");
+        }
+
+        Character delimiter = delimiterValue.charAt(0);
+        parser = new RateControlledTupleParserFactory(recordType, fieldParserFactories, delimiter, configuration)
+                .createTupleParser(ctx);
+        return parser;
+    }
+
+    protected ITupleParser getRateControlledADMDataTupleParser(ARecordType recordType) throws AsterixException {
+        ITupleParser parser = null;
+        try {
+            parser = new RateControlledTupleParserFactory(recordType, configuration).createTupleParser(ctx);
+            return parser;
+        } catch (Exception e) {
+            throw new AsterixException(e);
+        }
+
+    }
+
+    @Override
+    public ARecordType getAdapterOutputType() {
+        return (ARecordType) atype;
+    }
+
+    @Override
+    public void alter(Map<String, String> properties) {
+        ((RateControlledTupleParser) parser).setInterTupleInterval(Long.parseLong(properties
+                .get(RateControlledTupleParser.INTER_TUPLE_INTERVAL)));
+    }
+
+    @Override
+    public void stop() {
+        ((RateControlledTupleParser) parser).stop();
+    }
+
+    @Override
+    public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
+        return coreAdapter.getPartitionConstraint();
+    }
+}
+
+class RateControlledTupleParserFactory implements ITupleParserFactory {
+
+    private static final long serialVersionUID = 1L;
+
+    private final ARecordType recordType;
+    private final IDataParser dataParser;
+    private final Map<String, String> configuration;
+
+    public RateControlledTupleParserFactory(ARecordType recordType, IValueParserFactory[] valueParserFactories,
+            char fieldDelimiter, Map<String, String> configuration) {
+        this.recordType = recordType;
+        dataParser = new DelimitedDataParser(recordType, valueParserFactories, fieldDelimiter);
+        this.configuration = configuration;
+    }
+
+    public RateControlledTupleParserFactory(ARecordType recordType, Map<String, String> configuration) {
+        this.recordType = recordType;
+        dataParser = new ADMDataParser();
+        this.configuration = configuration;
+    }
+
+    @Override
+    public ITupleParser createTupleParser(IHyracksTaskContext ctx) {
+        return new RateControlledTupleParser(ctx, recordType, dataParser, configuration);
+    }
+
+}
+
+class RateControlledTupleParser extends AbstractTupleParser {
+
+    private final IDataParser dataParser;
+    private long interTupleInterval;
+    private boolean delayConfigured;
+    private boolean continueIngestion = true;
+
+    public static final String INTER_TUPLE_INTERVAL = "tuple-interval";
+
+    public RateControlledTupleParser(IHyracksTaskContext ctx, ARecordType recType, IDataParser dataParser,
+            Map<String, String> configuration) {
+        super(ctx, recType);
+        this.dataParser = dataParser;
+        String propValue = configuration.get(INTER_TUPLE_INTERVAL);
+        if (propValue != null) {
+            interTupleInterval = Long.parseLong(propValue);
+        } else {
+            interTupleInterval = 0;
+        }
+        delayConfigured = interTupleInterval != 0;
+    }
+
+    public void setInterTupleInterval(long val) {
+        this.interTupleInterval = val;
+        this.delayConfigured = val > 0;
+    }
+
+    public void stop() {
+        continueIngestion = false;
+    }
+
+    @Override
+    public IDataParser getDataParser() {
+        return dataParser;
+    }
+
+    @Override
+    public void parse(InputStream in, IFrameWriter writer) throws HyracksDataException {
+
+        appender.reset(frame, true);
+        IDataParser parser = getDataParser();
+        try {
+            parser.initialize(in, recType, true);
+            while (continueIngestion) {
+                tb.reset();
+                if (!parser.parse(tb.getDataOutput())) {
+                    break;
+                }
+                tb.addFieldEndOffset();
+                if (delayConfigured) {
+                    Thread.sleep(interTupleInterval);
+                }
+                addTupleToFrame(writer);
+            }
+            if (appender.getTupleCount() > 0) {
+                FrameUtils.flushFrame(frame, writer);
+            }
+        } catch (AsterixException ae) {
+            throw new HyracksDataException(ae);
+        } catch (IOException ioe) {
+            throw new HyracksDataException(ioe);
+        } catch (InterruptedException ie) {
+            throw new HyracksDataException(ie);
+        }
+    }
+}
diff --git a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapterFactory.java b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapterFactory.java
new file mode 100644
index 0000000..6c32acb
--- /dev/null
+++ b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapterFactory.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.tools.external.data;
+
+import java.util.Map;
+
+import edu.uci.ics.asterix.external.adapter.factory.IGenericDatasetAdapterFactory;
+import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.IAType;
+
+/**
+ * Factory class for creating @see{RateControllerFileSystemBasedAdapter} The
+ * adapter simulates a feed from the contents of a source file. The file can be
+ * on the local file system or on HDFS. The feed ends when the content of the
+ * source file has been ingested.
+ */
+public class RateControlledFileSystemBasedAdapterFactory implements IGenericDatasetAdapterFactory {
+
+    @Override
+    public IDatasourceAdapter createAdapter(Map<String, String> configuration, IAType type) throws Exception {
+        return new RateControlledFileSystemBasedAdapter((ARecordType) type, configuration);
+    }
+
+    @Override
+    public String getName() {
+        return "file_feed";
+    }
+
+}
\ No newline at end of file
diff --git a/asterix-transactions/pom.xml b/asterix-transactions/pom.xml
index c440c2a..7db5cd9 100644
--- a/asterix-transactions/pom.xml
+++ b/asterix-transactions/pom.xml
@@ -28,9 +28,6 @@
 	    <dependency>
   		<groupId>edu.uci.ics.hyracks</groupId>
   		<artifactId>hyracks-storage-am-common</artifactId>
-  		<version>0.2.2-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
   	</dependency>
 	</dependencies>
 </project>
diff --git a/pom.xml b/pom.xml
index 506d57d..5080813 100644
--- a/pom.xml
+++ b/pom.xml
@@ -20,6 +20,11 @@
 		</plugins>
 	</build>
 
+    <properties>
+    	<algebricks.version>0.2.3-SNAPSHOT</algebricks.version>
+    	<hyracks.version>0.2.3-SNAPSHOT</hyracks.version>
+    </properties>
+
 	<scm>
 		<connection>scm:svn:https://grape.ics.uci.edu/svn/asterix/trunk/asterix</connection>
 		<developerConnection>scm:svn:https://grape.ics.uci.edu/svn/asterix/trunk/asterix</developerConnection>
@@ -137,4 +142,65 @@
             <optional>true</optional>
         </dependency>
     </dependencies>
+    <dependencyManagement>
+    	<dependencies>
+    		<dependency>
+    			<groupId>edu.uci.ics.hyracks</groupId>
+    			<artifactId>algebricks-compiler</artifactId>
+    			<version>${algebricks.version}</version>
+    		</dependency>
+    		<dependency>
+    			<groupId>edu.uci.ics.hyracks</groupId>
+    			<artifactId>hyracks-dataflow-std</artifactId>
+    			<version>${hyracks.version}</version>
+    		</dependency>
+    		<dependency>
+    			<groupId>edu.uci.ics.hyracks</groupId>
+    			<artifactId>hyracks-control-cc</artifactId>
+    			<version>${hyracks.version}</version>
+    		</dependency>
+    		<dependency>
+    			<groupId>edu.uci.ics.hyracks</groupId>
+    			<artifactId>hyracks-control-nc</artifactId>
+    			<version>${hyracks.version}</version>
+    		</dependency>
+    		<dependency>
+    			<groupId>edu.uci.ics.hyracks</groupId>
+    			<artifactId>hyracks-server</artifactId>
+    			<version>${hyracks.version}</version>
+    		</dependency>
+    		<dependency>
+    			<groupId>edu.uci.ics.hyracks</groupId>
+    			<artifactId>hyracks-cli</artifactId>
+    			<version>${hyracks.version}</version>
+    		</dependency>
+    		<dependency>
+    			<groupId>edu.uci.ics.hyracks</groupId>
+    			<artifactId>hyracks-dataflow-hadoop</artifactId>
+    			<version>${hyracks.version}</version>
+    		</dependency>
+    		<dependency>
+    			<groupId>edu.uci.ics.hyracks</groupId>
+    			<artifactId>hyracks-storage-am-btree</artifactId>
+    			<version>${hyracks.version}</version>
+    		</dependency>
+    		<dependency>
+    			<groupId>edu.uci.ics.hyracks</groupId>
+    			<artifactId>hyracks-storage-am-rtree</artifactId>
+    			<version>${hyracks.version}</version>
+    		</dependency>
+    		<dependency>
+    			<groupId>edu.uci.ics.hyracks</groupId>
+    			<artifactId>
+    				hyracks-storage-am-invertedindex
+    			</artifactId>
+    			<version>${hyracks.version}</version>
+    		</dependency>
+    		<dependency>
+    			<groupId>edu.uci.ics.hyracks</groupId>
+    			<artifactId>hyracks-storage-am-common</artifactId>
+    			<version>${hyracks.version}</version>
+    		</dependency>
+    	</dependencies>
+    </dependencyManagement>
 </project>