Merge branch 'gerrit/march-hare'

Change-Id: I183476cb9e32d7f300fd310afee0126eb9c6af31
diff --git a/.gitignore b/.gitignore
index 1dcc3c2..295d874 100644
--- a/.gitignore
+++ b/.gitignore
@@ -27,5 +27,3 @@
 *.swp
 .m2*
 ß
-
-.SpiderSilkWorkingDirectory
diff --git a/asterixdb/asterix-app/data/csv/01.csv b/asterixdb/asterix-app/data/csv/01.csv
new file mode 100644
index 0000000..6957e76
--- /dev/null
+++ b/asterixdb/asterix-app/data/csv/01.csv
@@ -0,0 +1,3 @@
+1,,"good","recommend"
+2,,"bad","not recommend"
+3,,"good",
\ No newline at end of file
diff --git a/asterixdb/asterix-app/data/csv/02.csv b/asterixdb/asterix-app/data/csv/02.csv
new file mode 100644
index 0000000..630843f
--- /dev/null
+++ b/asterixdb/asterix-app/data/csv/02.csv
@@ -0,0 +1,3 @@
+4,2018,"good","recommend"
+5,2018,,"not recommend"
+6,2018,"good",
\ No newline at end of file
diff --git a/asterixdb/asterix-app/data/csv/sample_09.csv b/asterixdb/asterix-app/data/csv/sample_09.csv
new file mode 100644
index 0000000..b14219d
--- /dev/null
+++ b/asterixdb/asterix-app/data/csv/sample_09.csv
@@ -0,0 +1,17 @@
+a,b,c,d,e
+0,",     boo", 1,2,3
+1,"","",❤,
+2,3,4,\n,
+3,"quoted ""f"" field",,,
+4,4,,,
+5,"{""vehicle"": ""car"", ""location"": [2.0, 0.1]}",,,
+6,2,3,,
+7,8,9,,
+8,2,3,,
+9,8,9,,
+10,"field
+""f""
+with multiple lines",,,
+11,4,,,
+12,5,ʤ,,
+John,Green,111 downtown st.,"city, state",99999
\ No newline at end of file
diff --git a/asterixdb/asterix-app/data/csv/sample_10.csv b/asterixdb/asterix-app/data/csv/sample_10.csv
new file mode 100644
index 0000000..3beee08
--- /dev/null
+++ b/asterixdb/asterix-app/data/csv/sample_10.csv
@@ -0,0 +1,39 @@
+1,"?/ Text ending with a backslash  / \",2000-09-03 07:12:22
+2,non quoted text!yes......,2003-08-09 22:34:19
+3,Text with more sentences. Another sentence.,2003-09-12 05:29:12
+4,"Quoted text.. yes.",2003-09-13 17:21:49
+5,Another text,2003-01-21 23:31:41
+6,Text with' quotes.,2003-09-14 20:15:50
+7,Text with quote's,2003-09-14 18:34:03
+8,"Text with quotes '",2003-01-28 20:32:13
+9,"Text with quotes """,2003-01-18 11:44:15
+10,Text with question marks!?!?,2003-09-18 06:25:56
+11,""" Text that starts with quotes",2003-09-12 00:31:24
+12,"Text with \"" backslash and quotes",2003-09-13 20:30:06
+13,"Text with \"" backslash and quotes\""",2003-09-14 16:20:36
+14,"Text that has comma ,",2003-09-12 08:21:18
+15,"Text that has "","" quoted comma",2003-09-12 08:21:18
+16,",Text that has ",2003-09-12 08:21:18
+17,","",Text that has ",2003-09-12 08:21:18
+18,"Text with commas,inside it., yes",2003-09-13 23:42:14
+19,"Text that has \n inside ",2003-09-12 08:21:18
+20,"Text that has \\\n inside ",2003-09-12 08:21:18
+21,text with :),2003-09-05 19:15:34
+22,"Text that has \\\"" inside \\",2003-09-12 08:21:18
+23,"Text that has \\\"" inside \\""",2003-09-12 08:21:18
+24,"""text that spans multiple
+Lines and more
+Lines ane more and more
+Lines ...
+And yet more lines
+And more""",2011-09-19 01:09:09
+25,"Text ""
+more lines",2011-09-19 01:09:09
+26,"""
+",2011-09-19 01:09:09
+27,"Text",""
+28,"Text","2011-09-19 01:09:09"
+29,"Text\.","2011-09-19 01:09:09"
+30,Text\.,"2011-09-19 01:09:09"
+31,"\.Text","2011-09-19 01:09:09"
+32,\.Text,"2011-09-19 01:09:09"
\ No newline at end of file
diff --git a/asterixdb/asterix-app/data/csv/sample_11.csv b/asterixdb/asterix-app/data/csv/sample_11.csv
new file mode 100644
index 0000000..b9a9571
--- /dev/null
+++ b/asterixdb/asterix-app/data/csv/sample_11.csv
@@ -0,0 +1,4 @@
+1,","", b", 3,4,5
+","", b",4, 3,4,5
+,,,,
+"dd",,,,
\ No newline at end of file
diff --git a/asterixdb/asterix-app/data/csv/sample_12.csv b/asterixdb/asterix-app/data/csv/sample_12.csv
new file mode 100644
index 0000000..2ab7c6d
--- /dev/null
+++ b/asterixdb/asterix-app/data/csv/sample_12.csv
@@ -0,0 +1,15 @@
+1,true,"text"
+2,false,"text"
+3,true,"text"
+4,true,""
+5,false,
+6,true,"text""
+more lines"
+7,false,"""
+"
+8,true,""
+9,false,"text"""
+10,false,text\.
+11,true,"text\."
+,false,\.text
+13,true,"\.text"
\ No newline at end of file
diff --git a/asterixdb/asterix-app/data/tsv/01.tsv b/asterixdb/asterix-app/data/tsv/01.tsv
new file mode 100644
index 0000000..98876c7
--- /dev/null
+++ b/asterixdb/asterix-app/data/tsv/01.tsv
@@ -0,0 +1,3 @@
+1		"good"	"recommend"
+2		"bad"	"not recommend"
+3		"good"	"recommend"
\ No newline at end of file
diff --git a/asterixdb/asterix-app/data/tsv/02.tsv b/asterixdb/asterix-app/data/tsv/02.tsv
new file mode 100644
index 0000000..c01ce7c
--- /dev/null
+++ b/asterixdb/asterix-app/data/tsv/02.tsv
@@ -0,0 +1,3 @@
+4	2018	"good"	"recommend"
+5	2018		"not recommend"
+6	2018	"good"	"recommend"
\ No newline at end of file
diff --git a/asterixdb/asterix-app/data/tsv/sample_01.tsv b/asterixdb/asterix-app/data/tsv/sample_01.tsv
new file mode 100644
index 0000000..aab289a
--- /dev/null
+++ b/asterixdb/asterix-app/data/tsv/sample_01.tsv
@@ -0,0 +1,28 @@
+11	55	text field wih , charrrrrrrrrrr	true	90	0.666666667
+12	55	text field with " charrrrrrrrrr	false	90	0.666666667
+14	55	text field with ' charrrrrrrrrr	false	90	0.666666667
+15	55	text field with \ charrrrrrrrrr	false	90	0.666666667
+16	55	text field wih \, char         	true	90	0.666666667
+17	55	text field with \" charrrrrrrrr	false	90	0.666666667
+18	55	text field with \' charrrrrrrrr	false	90	0.666666667
+19	55	text field with \\ charrrrrrrrr	false	90	0.666666667
+20	55	text field ending with  charr ,	false	90	0.666666667
+21	55	text field ending with  charr "	false	90	0.666666667
+22	55	text field ending with  charr '	false	90	0.666666667
+23	55	text field ending with  charr \	false	90	0.666666667
+24	55	text field ending with charr \,	false	90	0.666666667
+25	55	text field ending with charr \"	false	90	0.666666667
+26	55	text field ending with charr \'	false	90	0.666666667
+27	55	text field ending with charr \\	false	90	0.666666667
+28	55	,text field starting with charr	false	90	0.666666667
+29	55	"text field starting with charr	false	90	0.666666667
+30	55	'text field starting with charr	false	90	0.666666667
+31	55	\text field starting with charr	false	90	0.666666667
+32	55	\,text field starting with char	false	90	0.666666667
+33	55	\"text field starting with char	false	90	0.666666667
+34	55	\'text field starting with char	false	90	0.666666667
+35	55	\\text field starting with char	false	90	0.666666667
+36	55	"text field inside   with char"	false	90	0.666666667
+37	55	  text field with charrrrrrrrr 	false	90	0.666666667
+38	55	text field with "" charrrrrrrrr	false	90	0.666666667
+39	55	text field "with" charrrrrrrrrr	false	90	0.666666667
\ No newline at end of file
diff --git a/asterixdb/asterix-app/pom.xml b/asterixdb/asterix-app/pom.xml
index ac2c303..219595b 100644
--- a/asterixdb/asterix-app/pom.xml
+++ b/asterixdb/asterix-app/pom.xml
@@ -699,5 +699,38 @@
         </exclusion>
       </exclusions>
     </dependency>
+    <!-- AWS -->
+    <dependency>
+      <groupId>software.amazon.awssdk</groupId>
+      <artifactId>sdk-core</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>software.amazon.awssdk</groupId>
+      <artifactId>s3</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>software.amazon.awssdk</groupId>
+      <artifactId>regions</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>software.amazon.awssdk</groupId>
+      <artifactId>auth</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <!-- Mock for AWS S3 -->
+    <dependency>
+      <groupId>io.findify</groupId>
+      <artifactId>s3mock_2.12</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <!-- Needed for the s3 mock -->
+    <dependency>
+      <groupId>com.typesafe.akka</groupId>
+      <artifactId>akka-http-core_2.12</artifactId>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
 </project>
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalLibraryUtils.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalLibraryUtils.java
index a989941..c185340 100755
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalLibraryUtils.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalLibraryUtils.java
@@ -34,9 +34,9 @@
 
 import org.apache.asterix.common.exceptions.ACIDException;
 import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.common.external.IDataSourceAdapter;
 import org.apache.asterix.common.functions.FunctionSignature;
 import org.apache.asterix.common.library.ILibraryManager;
-import org.apache.asterix.external.api.IDataSourceAdapter;
 import org.apache.asterix.external.dataset.adapter.AdapterIdentifier;
 import org.apache.asterix.external.library.ExternalLibrary;
 import org.apache.asterix.external.library.LibraryAdapter;
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
index ce1a354..e211531 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
@@ -85,12 +85,15 @@
 import org.apache.asterix.external.indexing.IndexingConstants;
 import org.apache.asterix.external.operators.FeedIntakeOperatorNodePushable;
 import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.asterix.external.util.ExternalDataUtils;
 import org.apache.asterix.formats.nontagged.TypeTraitProvider;
 import org.apache.asterix.lang.common.base.IReturningStatement;
 import org.apache.asterix.lang.common.base.IRewriterFactory;
 import org.apache.asterix.lang.common.base.IStatementRewriter;
 import org.apache.asterix.lang.common.base.Statement;
 import org.apache.asterix.lang.common.expression.IndexedTypeExpression;
+import org.apache.asterix.lang.common.expression.TypeExpression;
+import org.apache.asterix.lang.common.expression.TypeReferenceExpression;
 import org.apache.asterix.lang.common.statement.CompactStatement;
 import org.apache.asterix.lang.common.statement.ConnectFeedStatement;
 import org.apache.asterix.lang.common.statement.CreateDataverseStatement;
@@ -558,10 +561,45 @@
         String dataverseName = getActiveDataverse(dd.getDataverse());
         String datasetName = dd.getName().getValue();
         DatasetType dsType = dd.getDatasetType();
-        String itemTypeDataverseName = getActiveDataverse(dd.getItemTypeDataverse());
-        String itemTypeName = dd.getItemTypeName().getValue();
-        String metaItemTypeDataverseName = getActiveDataverse(dd.getMetaItemTypeDataverse());
-        String metaItemTypeName = dd.getMetaItemTypeName().getValue();
+        TypeExpression itemTypeExpr = dd.getItemType();
+        String itemTypeDataverseName = null, itemTypeName = null, itemTypeFullyQualifiedName = null;
+        switch (itemTypeExpr.getTypeKind()) {
+            case TYPEREFERENCE:
+                TypeReferenceExpression itemTypeRefExpr = (TypeReferenceExpression) itemTypeExpr;
+                Identifier itemTypeDataverseIdent = itemTypeRefExpr.getIdent().first;
+                itemTypeDataverseName = itemTypeDataverseIdent != null && itemTypeDataverseIdent.getValue() != null
+                        ? itemTypeDataverseIdent.getValue() : dataverseName;
+                itemTypeName = itemTypeRefExpr.getIdent().second.getValue();
+                itemTypeFullyQualifiedName = itemTypeDataverseName + '.' + itemTypeName;
+                break;
+            case RECORD:
+                break;
+            default:
+                throw new CompilationException(ErrorCode.COMPILATION_ILLEGAL_STATE, sourceLoc,
+                        String.valueOf(itemTypeExpr.getTypeKind()));
+        }
+
+        TypeExpression metaItemTypeExpr = dd.getMetaItemType();
+        String metaItemTypeDataverseName = null, metaItemTypeName = null, metaItemTypeFullyQualifiedName = null;
+        if (metaItemTypeExpr != null) {
+            switch (metaItemTypeExpr.getTypeKind()) {
+                case TYPEREFERENCE:
+                    TypeReferenceExpression metaItemTypeRefExpr = (TypeReferenceExpression) metaItemTypeExpr;
+                    Identifier metaItemTypeDataverseIdent = metaItemTypeRefExpr.getIdent().first;
+                    metaItemTypeDataverseName =
+                            metaItemTypeDataverseIdent != null && metaItemTypeDataverseIdent.getValue() != null
+                                    ? metaItemTypeDataverseIdent.getValue() : dataverseName;
+                    metaItemTypeName = metaItemTypeRefExpr.getIdent().second.getValue();
+                    metaItemTypeFullyQualifiedName = metaItemTypeDataverseName + '.' + metaItemTypeName;
+                    break;
+                case RECORD:
+                    break;
+                default:
+                    throw new CompilationException(ErrorCode.COMPILATION_ILLEGAL_STATE, sourceLoc,
+                            String.valueOf(metaItemTypeExpr.getTypeKind()));
+            }
+        }
+
         Identifier ngNameId = dd.getNodegroupName();
         String nodegroupName = ngNameId == null ? null : ngNameId.getValue();
         String compactionPolicy = dd.getCompactionPolicy();
@@ -573,9 +611,9 @@
         boolean bActiveTxn = true;
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
         MetadataLockUtil.createDatasetBegin(lockManager, metadataProvider.getLocks(), dataverseName,
-                itemTypeDataverseName, itemTypeDataverseName + "." + itemTypeName, metaItemTypeDataverseName,
-                metaItemTypeDataverseName + "." + metaItemTypeName, nodegroupName, compactionPolicy,
-                dataverseName + "." + datasetName, defaultCompactionPolicy);
+                itemTypeDataverseName, itemTypeFullyQualifiedName, metaItemTypeDataverseName,
+                metaItemTypeFullyQualifiedName, nodegroupName, compactionPolicy, dataverseName + "." + datasetName,
+                defaultCompactionPolicy);
         Dataset dataset = null;
         try {
             IDatasetDetails datasetDetails = null;
@@ -588,10 +626,29 @@
                     throw new CompilationException(ErrorCode.DATASET_EXISTS, sourceLoc, datasetName, dataverseName);
                 }
             }
-            Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(),
-                    itemTypeDataverseName, itemTypeName);
-            if (dt == null) {
-                throw new CompilationException(ErrorCode.UNKNOWN_TYPE, sourceLoc, itemTypeName);
+            IAType itemType;
+            switch (itemTypeExpr.getTypeKind()) {
+                case TYPEREFERENCE:
+                    Datatype itemTypeEntity = metadataProvider.findTypeEntity(itemTypeDataverseName, itemTypeName);
+                    if (itemTypeEntity == null || itemTypeEntity.getIsAnonymous()) {
+                        // anonymous types cannot be referred from CREATE DATASET
+                        throw new AsterixException(ErrorCode.UNKNOWN_TYPE, sourceLoc,
+                                itemTypeDataverseName + "." + itemTypeName);
+                    }
+                    itemType = itemTypeEntity.getDatatype();
+                    break;
+                case RECORD:
+                    itemTypeDataverseName = dataverseName;
+                    itemTypeName = DatasetUtil.createInlineTypeName(datasetName, false);
+                    MetadataLockUtil.createTypeBegin(lockManager, metadataProvider.getLocks(), itemTypeDataverseName,
+                            itemTypeDataverseName + "." + itemTypeName);
+                    itemType = translateType(itemTypeDataverseName, itemTypeName, itemTypeExpr, mdTxnCtx);
+                    MetadataManager.INSTANCE.addDatatype(mdTxnCtx,
+                            new Datatype(itemTypeDataverseName, itemTypeName, itemType, true));
+                    break;
+                default:
+                    throw new CompilationException(ErrorCode.COMPILATION_ILLEGAL_STATE, sourceLoc,
+                            String.valueOf(itemTypeExpr.getTypeKind()));
             }
             String ngName = ngNameId != null ? ngNameId.getValue()
                     : configureNodegroupForDataset(appCtx, dd.getHints(), dataverseName, datasetName, metadataProvider,
@@ -605,19 +662,42 @@
             }
             switch (dd.getDatasetType()) {
                 case INTERNAL:
-                    IAType itemType = dt.getDatatype();
                     if (itemType.getTypeTag() != ATypeTag.OBJECT) {
                         throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
                                 "Dataset type has to be a record type.");
                     }
 
                     IAType metaItemType = null;
-                    if (metaItemTypeDataverseName != null && metaItemTypeName != null) {
-                        metaItemType = metadataProvider.findType(metaItemTypeDataverseName, metaItemTypeName);
-                    }
-                    if (metaItemType != null && metaItemType.getTypeTag() != ATypeTag.OBJECT) {
-                        throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
-                                "Dataset meta type has to be a record type.");
+                    if (metaItemTypeExpr != null) {
+                        switch (metaItemTypeExpr.getTypeKind()) {
+                            case TYPEREFERENCE:
+                                Datatype metaItemTypeEntity =
+                                        metadataProvider.findTypeEntity(metaItemTypeDataverseName, metaItemTypeName);
+                                if (metaItemTypeEntity == null || metaItemTypeEntity.getIsAnonymous()) {
+                                    // anonymous types cannot be referred from CREATE DATASET
+                                    throw new AsterixException(ErrorCode.UNKNOWN_TYPE, sourceLoc,
+                                            metaItemTypeDataverseName + "." + metaItemTypeName);
+                                }
+                                metaItemType = metaItemTypeEntity.getDatatype();
+                                if (metaItemType.getTypeTag() != ATypeTag.OBJECT) {
+                                    throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
+                                            "Dataset meta type has to be a record type.");
+                                }
+                                break;
+                            case RECORD:
+                                metaItemTypeDataverseName = dataverseName;
+                                metaItemTypeName = DatasetUtil.createInlineTypeName(datasetName, true);
+                                MetadataLockUtil.createTypeBegin(lockManager, metadataProvider.getLocks(),
+                                        metaItemTypeDataverseName, metaItemTypeDataverseName + "." + metaItemTypeName);
+                                metaItemType = translateType(metaItemTypeDataverseName, metaItemTypeName,
+                                        metaItemTypeExpr, mdTxnCtx);
+                                MetadataManager.INSTANCE.addDatatype(mdTxnCtx,
+                                        new Datatype(metaItemTypeDataverseName, metaItemTypeName, metaItemType, true));
+                                break;
+                            default:
+                                throw new CompilationException(ErrorCode.COMPILATION_ILLEGAL_STATE, sourceLoc,
+                                        String.valueOf(metaItemTypeExpr.getTypeKind()));
+                        }
                     }
                     ARecordType metaRecType = (ARecordType) metaItemType;
 
@@ -646,11 +726,11 @@
                             keySourceIndicators, partitioningTypes, autogenerated, filterField);
                     break;
                 case EXTERNAL:
-                    String adapter = ((ExternalDetailsDecl) dd.getDatasetDetailsDecl()).getAdapter();
-                    Map<String, String> properties = ((ExternalDetailsDecl) dd.getDatasetDetailsDecl()).getProperties();
-
-                    datasetDetails =
-                            new ExternalDatasetDetails(adapter, properties, new Date(), TransactionState.COMMIT);
+                    ExternalDetailsDecl externalDetails = (ExternalDetailsDecl) dd.getDatasetDetailsDecl();
+                    Map<String, String> properties = createExternalDatasetProperties(dd, metadataProvider, mdTxnCtx);
+                    ExternalDataUtils.defaultConfiguration(properties);
+                    datasetDetails = new ExternalDatasetDetails(externalDetails.getAdapter(), properties, new Date(),
+                            TransactionState.COMMIT);
                     break;
                 default:
                     throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
@@ -739,6 +819,12 @@
         }
     }
 
+    protected Map<String, String> createExternalDatasetProperties(DatasetDecl dd, MetadataProvider metadataProvider,
+            MetadataTransactionContext mdTxnCtx) throws AlgebricksException {
+        ExternalDetailsDecl externalDetails = (ExternalDetailsDecl) dd.getDatasetDetailsDecl();
+        return externalDetails.getProperties();
+    }
+
     protected static void validateIfResourceIsActiveInFeed(ICcApplicationContext appCtx, Dataset dataset,
             SourceLocation sourceLoc) throws CompilationException {
         ActiveNotificationHandler activeEventHandler =
@@ -1225,10 +1311,7 @@
                     throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
                             "Cannot redefine builtin type " + typeName + ".");
                 } else {
-                    Map<TypeSignature, IAType> typeMap = TypeTranslator.computeTypes(mdTxnCtx,
-                            stmtCreateType.getTypeDef(), stmtCreateType.getIdent().getValue(), dataverseName);
-                    TypeSignature typeSignature = new TypeSignature(dataverseName, typeName);
-                    IAType type = typeMap.get(typeSignature);
+                    IAType type = translateType(dataverseName, typeName, stmtCreateType.getTypeDef(), mdTxnCtx);
                     MetadataManager.INSTANCE.addDatatype(mdTxnCtx, new Datatype(dataverseName, typeName, type, false));
                 }
             }
@@ -1241,6 +1324,13 @@
         }
     }
 
+    private IAType translateType(String dataverseName, String typeName, TypeExpression typeDef,
+            MetadataTransactionContext mdTxnCtx) throws AlgebricksException {
+        Map<TypeSignature, IAType> typeMap = TypeTranslator.computeTypes(mdTxnCtx, typeDef, typeName, dataverseName);
+        TypeSignature typeSignature = new TypeSignature(dataverseName, typeName);
+        return typeMap.get(typeSignature);
+    }
+
     protected void handleDataverseDropStatement(MetadataProvider metadataProvider, Statement stmt,
             IHyracksClientConnection hcc, IRequestParameters requestParameters) throws Exception {
         DataverseDropStatement stmtDelete = (DataverseDropStatement) stmt;
@@ -1450,8 +1540,41 @@
                 }
             }
             validateDatasetState(metadataProvider, ds, sourceLoc);
+
+            // prepare to drop item and meta types if they were created as inline types
+            String itemTypeDataverseName = ds.getItemTypeDataverseName();
+            String itemTypeName = ds.getItemTypeName();
+            boolean isInlineItemType = DatasetUtil.isInlineTypeName(ds, itemTypeDataverseName, itemTypeName);
+            if (isInlineItemType) {
+                MetadataLockUtil.dropTypeBegin(lockManager, metadataProvider.getLocks(), itemTypeDataverseName,
+                        itemTypeDataverseName + '.' + itemTypeName);
+            }
+            String metaTypeDataverseName = ds.getMetaItemTypeDataverseName();
+            String metaTypeName = ds.getMetaItemTypeName();
+            boolean isInlineMetaType =
+                    metaTypeName != null && DatasetUtil.isInlineTypeName(ds, metaTypeDataverseName, metaTypeName);
+            if (isInlineMetaType) {
+                MetadataLockUtil.dropTypeBegin(lockManager, metadataProvider.getLocks(), metaTypeDataverseName,
+                        metaTypeDataverseName + '.' + metaTypeName);
+            }
+            Datatype inlineItemType = isInlineItemType
+                    ? MetadataManager.INSTANCE.getDatatype(mdTxnCtx.getValue(), itemTypeDataverseName, itemTypeName)
+                    : null;
+            Datatype inlineMetaType = isInlineMetaType
+                    ? MetadataManager.INSTANCE.getDatatype(mdTxnCtx.getValue(), metaTypeDataverseName, metaTypeName)
+                    : null;
+
             ds.drop(metadataProvider, mdTxnCtx, jobsToExecute, bActiveTxn, progress, hcc, dropCorrespondingNodeGroup,
                     sourceLoc);
+
+            // drop inline item and meta types
+            if (isInlineItemType && inlineItemType.getIsAnonymous()) {
+                MetadataManager.INSTANCE.dropDatatype(mdTxnCtx.getValue(), itemTypeDataverseName, itemTypeName);
+            }
+            if (isInlineMetaType && inlineMetaType.getIsAnonymous()) {
+                MetadataManager.INSTANCE.dropDatatype(mdTxnCtx.getValue(), metaTypeDataverseName, metaTypeName);
+            }
+
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx.getValue());
         } catch (Exception e) {
             if (bActiveTxn.booleanValue()) {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java
index 26c092f..fc912b0 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java
@@ -69,9 +69,11 @@
 import org.apache.asterix.common.config.ReplicationProperties;
 import org.apache.asterix.common.context.IStorageComponentProvider;
 import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.external.IAdapterFactoryService;
 import org.apache.asterix.common.library.ILibraryManager;
 import org.apache.asterix.common.replication.INcLifecycleCoordinator;
 import org.apache.asterix.common.utils.Servlets;
+import org.apache.asterix.external.adapter.factory.AdapterFactoryService;
 import org.apache.asterix.external.library.ExternalLibraryManager;
 import org.apache.asterix.file.StorageComponentProvider;
 import org.apache.asterix.messaging.CCMessageBroker;
@@ -154,7 +156,7 @@
         ccExtensionManager = new CCExtensionManager(new ArrayList<>(getExtensions()));
         IGlobalRecoveryManager globalRecoveryManager = createGlobalRecoveryManager();
         appCtx = createApplicationContext(libraryManager, globalRecoveryManager, lifecycleCoordinator,
-                () -> new Receptionist("CC"), ConfigValidator::new, ccExtensionManager);
+                () -> new Receptionist("CC"), ConfigValidator::new, ccExtensionManager, new AdapterFactoryService());
         final CCConfig ccConfig = controllerService.getCCConfig();
         if (System.getProperty("java.rmi.server.hostname") == null) {
             System.setProperty("java.rmi.server.hostname", ccConfig.getClusterPublicAddress());
@@ -182,10 +184,12 @@
     protected ICcApplicationContext createApplicationContext(ILibraryManager libraryManager,
             IGlobalRecoveryManager globalRecoveryManager, INcLifecycleCoordinator lifecycleCoordinator,
             IReceptionistFactory receptionistFactory, IConfigValidatorFactory configValidatorFactory,
-            CCExtensionManager ccExtensionManager) throws AlgebricksException, IOException {
+            CCExtensionManager ccExtensionManager, IAdapterFactoryService adapterFactoryService)
+            throws AlgebricksException, IOException {
         return new CcApplicationContext(ccServiceCtx, getHcc(), libraryManager, () -> MetadataManager.INSTANCE,
                 globalRecoveryManager, lifecycleCoordinator, new ActiveNotificationHandler(), componentProvider,
-                new MetadataLockManager(), receptionistFactory, configValidatorFactory, ccExtensionManager);
+                new MetadataLockManager(), receptionistFactory, configValidatorFactory, ccExtensionManager,
+                adapterFactoryService);
     }
 
     protected IGlobalRecoveryManager createGlobalRecoveryManager() throws Exception {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FeedOperations.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FeedOperations.java
index b74f4c6..fc64d99 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FeedOperations.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FeedOperations.java
@@ -42,7 +42,7 @@
 import org.apache.asterix.common.transactions.TxnId;
 import org.apache.asterix.common.utils.StoragePathUtil;
 import org.apache.asterix.compiler.provider.SqlppCompilationProvider;
-import org.apache.asterix.external.api.IAdapterFactory;
+import org.apache.asterix.external.api.ITypedAdapterFactory;
 import org.apache.asterix.external.feed.management.FeedConnectionId;
 import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
 import org.apache.asterix.external.feed.watch.FeedActivityDetails;
@@ -135,14 +135,14 @@
     private FeedOperations() {
     }
 
-    private static Pair<JobSpecification, IAdapterFactory> buildFeedIntakeJobSpec(Feed feed,
+    private static Pair<JobSpecification, ITypedAdapterFactory> buildFeedIntakeJobSpec(Feed feed,
             MetadataProvider metadataProvider, FeedPolicyAccessor policyAccessor) throws Exception {
         JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
         spec.setFrameSize(metadataProvider.getApplicationContext().getCompilerProperties().getFrameSize());
-        IAdapterFactory adapterFactory;
+        ITypedAdapterFactory adapterFactory;
         IOperatorDescriptor feedIngestor;
         AlgebricksPartitionConstraint ingesterPc;
-        Triple<IOperatorDescriptor, AlgebricksPartitionConstraint, IAdapterFactory> t =
+        Triple<IOperatorDescriptor, AlgebricksPartitionConstraint, ITypedAdapterFactory> t =
                 metadataProvider.buildFeedIntakeRuntime(spec, feed, policyAccessor);
         feedIngestor = t.first;
         ingesterPc = t.second;
@@ -447,13 +447,13 @@
             MetadataProvider metadataProvider, Feed feed, List<FeedConnection> feedConnections,
             IStatementExecutor statementExecutor, IHyracksClientConnection hcc) throws Exception {
         FeedPolicyAccessor fpa = new FeedPolicyAccessor(new HashMap<>());
-        Pair<JobSpecification, IAdapterFactory> intakeInfo = buildFeedIntakeJobSpec(feed, metadataProvider, fpa);
+        Pair<JobSpecification, ITypedAdapterFactory> intakeInfo = buildFeedIntakeJobSpec(feed, metadataProvider, fpa);
         List<JobSpecification> jobsList = new ArrayList<>();
         // TODO: Figure out a better way to handle insert/upsert per conn instead of per feed
         Boolean insertFeed = ExternalDataUtils.isInsertFeed(feed.getConfiguration());
         // Construct the ingestion Job
         JobSpecification intakeJob = intakeInfo.getLeft();
-        IAdapterFactory ingestionAdaptorFactory = intakeInfo.getRight();
+        ITypedAdapterFactory ingestionAdaptorFactory = intakeInfo.getRight();
         String[] ingestionLocations = ingestionAdaptorFactory.getPartitionConstraint().getLocations();
         // Add metadata configs
         metadataProvider.getConfig().put(FunctionUtil.IMPORT_PRIVATE_FUNCTIONS, Boolean.TRUE.toString());
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/external_dataset/aws/AwsS3ExternalDatasetTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/external_dataset/aws/AwsS3ExternalDatasetTest.java
new file mode 100644
index 0000000..d2158ba
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/external_dataset/aws/AwsS3ExternalDatasetTest.java
@@ -0,0 +1,242 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.test.external_dataset.aws;
+
+import static org.apache.hyracks.util.file.FileUtil.joinPath;
+
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.net.URI;
+import java.nio.file.Paths;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.asterix.common.api.INcApplicationContext;
+import org.apache.asterix.test.common.TestExecutor;
+import org.apache.asterix.test.runtime.ExecutionTestUtil;
+import org.apache.asterix.test.runtime.LangExecutionUtil;
+import org.apache.asterix.testframework.context.TestCaseContext;
+import org.apache.hyracks.control.nc.NodeControllerService;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+import io.findify.s3mock.S3Mock;
+import software.amazon.awssdk.auth.credentials.AnonymousCredentialsProvider;
+import software.amazon.awssdk.core.sync.RequestBody;
+import software.amazon.awssdk.regions.Region;
+import software.amazon.awssdk.services.s3.S3Client;
+import software.amazon.awssdk.services.s3.S3ClientBuilder;
+import software.amazon.awssdk.services.s3.model.CreateBucketRequest;
+import software.amazon.awssdk.services.s3.model.PutObjectRequest;
+
+/**
+ * Runs an AWS S3 mock server and test it as an external dataset
+ */
+@RunWith(Parameterized.class)
+public class AwsS3ExternalDatasetTest {
+
+    private static final Logger LOGGER = LogManager.getLogger();
+
+    protected static final String TEST_CONFIG_FILE_NAME = "src/main/resources/cc.conf";
+
+    // S3 mock server
+    private static S3Mock s3MockServer;
+
+    // IMPORTANT: The following values must be used in the AWS S3 test case
+    private static S3Client client;
+    private static final String S3_MOCK_SERVER_BUCKET = "playground";
+    private static final String S3_MOCK_SERVER_BUCKET_DEFINITION = "json-data/reviews/"; // data resides here
+    private static final String S3_MOCK_SERVER_BUCKET_CSV_DEFINITION = "csv-data/reviews/"; // data resides here
+    private static final String S3_MOCK_SERVER_BUCKET_TSV_DEFINITION = "tsv-data/reviews/"; // data resides here
+    private static final String S3_MOCK_SERVER_REGION = "us-west-2";
+    private static final int S3_MOCK_SERVER_PORT = 8001;
+    private static final String S3_MOCK_SERVER_HOSTNAME = "http://localhost:" + S3_MOCK_SERVER_PORT;
+    private static final String CSV_DATA_PATH = joinPath("data", "csv");
+    private static final String TSV_DATA_PATH = joinPath("data", "tsv");
+
+    @BeforeClass
+    public static void setUp() throws Exception {
+        final TestExecutor testExecutor = new TestExecutor();
+        LangExecutionUtil.setUp(TEST_CONFIG_FILE_NAME, testExecutor);
+        setNcEndpoints(testExecutor);
+        startAwsS3MockServer();
+    }
+
+    @AfterClass
+    public static void tearDown() throws Exception {
+        LangExecutionUtil.tearDown();
+
+        // Shutting down S3 mock server
+        LOGGER.info("Shutting down S3 mock server and client");
+        if (client != null) {
+            client.close();
+        }
+        if (s3MockServer != null) {
+            s3MockServer.shutdown();
+        }
+        LOGGER.info("S3 mock down and client shut down successfully");
+    }
+
+    @Parameters(name = "SqlppExecutionTest {index}: {0}")
+    public static Collection<Object[]> tests() throws Exception {
+        return LangExecutionUtil.tests("only_external_dataset.xml", "testsuite_external_dataset.xml");
+    }
+
+    protected TestCaseContext tcCtx;
+
+    public AwsS3ExternalDatasetTest(TestCaseContext tcCtx) {
+        this.tcCtx = tcCtx;
+    }
+
+    @Test
+    public void test() throws Exception {
+        LangExecutionUtil.test(tcCtx);
+    }
+
+    private static void setNcEndpoints(TestExecutor testExecutor) {
+        final NodeControllerService[] ncs = ExecutionTestUtil.integrationUtil.ncs;
+        final Map<String, InetSocketAddress> ncEndPoints = new HashMap<>();
+        final String ip = InetAddress.getLoopbackAddress().getHostAddress();
+        for (NodeControllerService nc : ncs) {
+            final String nodeId = nc.getId();
+            final INcApplicationContext appCtx = (INcApplicationContext) nc.getApplicationContext();
+            int apiPort = appCtx.getExternalProperties().getNcApiPort();
+            ncEndPoints.put(nodeId, InetSocketAddress.createUnresolved(ip, apiPort));
+        }
+        testExecutor.setNcEndPoints(ncEndPoints);
+    }
+
+    /**
+     * Starts the AWS s3 mocking server and loads some files for testing
+     */
+    private static void startAwsS3MockServer() {
+        // Starting S3 mock server to be used instead of real S3 server
+        LOGGER.info("Starting S3 mock server");
+        s3MockServer = new S3Mock.Builder().withPort(S3_MOCK_SERVER_PORT).withInMemoryBackend().build();
+        s3MockServer.start();
+        LOGGER.info("S3 mock server started successfully");
+
+        // Create a client and add some files to the S3 mock server
+        LOGGER.info("Creating S3 client to load initial files to S3 mock server");
+        S3ClientBuilder builder = S3Client.builder();
+        URI endpoint = URI.create(S3_MOCK_SERVER_HOSTNAME); // endpoint pointing to S3 mock server
+        builder.region(Region.of(S3_MOCK_SERVER_REGION)).credentialsProvider(AnonymousCredentialsProvider.create())
+                .endpointOverride(endpoint);
+        client = builder.build();
+        LOGGER.info("Client created successfully");
+
+        // Create the bucket and upload some json files
+        prepareS3Bucket();
+    }
+
+    /**
+     * Creates a bucket and fills it with some files for testing purpose.
+     */
+    private static void prepareS3Bucket() {
+        LOGGER.info("creating bucket " + S3_MOCK_SERVER_BUCKET);
+        client.createBucket(CreateBucketRequest.builder().bucket(S3_MOCK_SERVER_BUCKET).build());
+        LOGGER.info("bucket created successfully");
+
+        LOGGER.info("Adding JSON files to the bucket");
+        client.putObject(
+                PutObjectRequest.builder().bucket(S3_MOCK_SERVER_BUCKET)
+                        .key(S3_MOCK_SERVER_BUCKET_DEFINITION + "0.json").build(),
+                RequestBody.fromString("{\"id\": 1, \"year\": null, \"quarter\": null, \"review\": \"good\"}"));
+        client.putObject(
+                PutObjectRequest.builder().bucket(S3_MOCK_SERVER_BUCKET)
+                        .key(S3_MOCK_SERVER_BUCKET_DEFINITION + "1.json").build(),
+                RequestBody.fromString("{\"id\": 2, \"year\": null, \"quarter\": null, \"review\": \"good\"}"));
+        client.putObject(
+                PutObjectRequest.builder().bucket(S3_MOCK_SERVER_BUCKET)
+                        .key(S3_MOCK_SERVER_BUCKET_DEFINITION + "2018/1.json").build(),
+                RequestBody.fromString("{\"id\": 3, \"year\": 2018, \"quarter\": null, \"review\": \"good\"}"));
+        client.putObject(
+                PutObjectRequest.builder().bucket(S3_MOCK_SERVER_BUCKET)
+                        .key(S3_MOCK_SERVER_BUCKET_DEFINITION + "2018/2.json").build(),
+                RequestBody.fromString("{\"id\": 4, \"year\": 2018, \"quarter\": null, \"review\": \"bad\"}"));
+        client.putObject(
+                PutObjectRequest.builder().bucket(S3_MOCK_SERVER_BUCKET)
+                        .key(S3_MOCK_SERVER_BUCKET_DEFINITION + "2018/q1/1.json").build(),
+                RequestBody.fromString("{\"id\": 5, \"year\": 2018, \"quarter\": 1, \"review\": \"good\"}"));
+        client.putObject(
+                PutObjectRequest.builder().bucket(S3_MOCK_SERVER_BUCKET)
+                        .key(S3_MOCK_SERVER_BUCKET_DEFINITION + "2018/q1/2.json").build(),
+                RequestBody.fromString("{\"id\": 6, \"year\": 2018, \"quarter\": 1, \"review\": \"bad\"}"));
+        client.putObject(
+                PutObjectRequest.builder().bucket(S3_MOCK_SERVER_BUCKET)
+                        .key(S3_MOCK_SERVER_BUCKET_DEFINITION + "2018/q2/1.json").build(),
+                RequestBody.fromString("{\"id\": 7, \"year\": 2018, \"quarter\": 2, \"review\": \"good\"}"));
+        client.putObject(
+                PutObjectRequest.builder().bucket(S3_MOCK_SERVER_BUCKET)
+                        .key(S3_MOCK_SERVER_BUCKET_DEFINITION + "2018/q2/2.json").build(),
+                RequestBody.fromString("{\"id\": 8, \"year\": 2018, \"quarter\": 2, \"review\": \"bad\"}"));
+        client.putObject(
+                PutObjectRequest.builder().bucket(S3_MOCK_SERVER_BUCKET)
+                        .key(S3_MOCK_SERVER_BUCKET_DEFINITION + "2019/1.json").build(),
+                RequestBody.fromString("{\"id\": 9, \"year\": 2019, \"quarter\": null, \"review\": \"good\"}"));
+        client.putObject(
+                PutObjectRequest.builder().bucket(S3_MOCK_SERVER_BUCKET)
+                        .key(S3_MOCK_SERVER_BUCKET_DEFINITION + "2019/2.json").build(),
+                RequestBody.fromString("{\"id\": 10, \"year\": 2019, \"quarter\": null, \"review\": \"bad\"}"));
+        client.putObject(
+                PutObjectRequest.builder().bucket(S3_MOCK_SERVER_BUCKET)
+                        .key(S3_MOCK_SERVER_BUCKET_DEFINITION + "2019/q1/1.json").build(),
+                RequestBody.fromString("{\"id\": 11, \"year\": 2019, \"quarter\": 1, \"review\": \"good\"}"));
+        client.putObject(
+                PutObjectRequest.builder().bucket(S3_MOCK_SERVER_BUCKET)
+                        .key(S3_MOCK_SERVER_BUCKET_DEFINITION + "2019/q1/2.json").build(),
+                RequestBody.fromString("{\"id\": 12, \"year\": 2019, \"quarter\": 1, \"review\": \"bad\"}"));
+        client.putObject(
+                PutObjectRequest.builder().bucket(S3_MOCK_SERVER_BUCKET)
+                        .key(S3_MOCK_SERVER_BUCKET_DEFINITION + "2019/q2/1.json").build(),
+                RequestBody.fromString("{\"id\": 13, \"year\": 2019, \"quarter\": 2, \"review\": \"good\"}"));
+        client.putObject(
+                PutObjectRequest.builder().bucket(S3_MOCK_SERVER_BUCKET)
+                        .key(S3_MOCK_SERVER_BUCKET_DEFINITION + "2019/q2/2.json").build(),
+                RequestBody.fromString("{\"id\": 14, \"year\": 2019, \"quarter\": 2, \"review\": \"bad\"}"));
+
+        LOGGER.info("Adding CSV files to the bucket");
+        client.putObject(
+                PutObjectRequest.builder().bucket(S3_MOCK_SERVER_BUCKET)
+                        .key(S3_MOCK_SERVER_BUCKET_CSV_DEFINITION + "01.csv").build(),
+                RequestBody.fromFile(Paths.get(CSV_DATA_PATH, "01.csv")));
+        client.putObject(
+                PutObjectRequest.builder().bucket(S3_MOCK_SERVER_BUCKET)
+                        .key(S3_MOCK_SERVER_BUCKET_CSV_DEFINITION + "2018/01.csv").build(),
+                RequestBody.fromFile(Paths.get(CSV_DATA_PATH, "02.csv")));
+
+        LOGGER.info("Adding TSV files to the bucket");
+        client.putObject(
+                PutObjectRequest.builder().bucket(S3_MOCK_SERVER_BUCKET)
+                        .key(S3_MOCK_SERVER_BUCKET_TSV_DEFINITION + "01.tsv").build(),
+                RequestBody.fromFile(Paths.get(TSV_DATA_PATH, "01.tsv")));
+        client.putObject(
+                PutObjectRequest.builder().bucket(S3_MOCK_SERVER_BUCKET)
+                        .key(S3_MOCK_SERVER_BUCKET_TSV_DEFINITION + "2018/01.tsv").build(),
+                RequestBody.fromFile(Paths.get(TSV_DATA_PATH, "02.tsv")));
+        LOGGER.info("Files added successfully");
+    }
+}
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/only_external_dataset.xml b/asterixdb/asterix-app/src/test/resources/runtimets/only_external_dataset.xml
new file mode 100644
index 0000000..334dd52
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/only_external_dataset.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!--
+ ! Licensed to the Apache Software Foundation (ASF) under one
+ ! or more contributor license agreements.  See the NOTICE file
+ ! distributed with this work for additional information
+ ! regarding copyright ownership.  The ASF licenses this file
+ ! to you under the Apache License, Version 2.0 (the
+ ! "License"); you may not use this file except in compliance
+ ! with the License.  You may obtain a copy of the License at
+ !
+ !   http://www.apache.org/licenses/LICENSE-2.0
+ !
+ ! Unless required by applicable law or agreed to in writing,
+ ! software distributed under the License is distributed on an
+ ! "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ! KIND, either express or implied.  See the License for the
+ ! specific language governing permissions and limitations
+ ! under the License.
+ !-->
+<test-suite xmlns="urn:xml.testframework.asterix.apache.org" ResultOffsetPath="results" QueryOffsetPath="queries_sqlpp" QueryFileExtension=".sqlpp">
+  <test-group name="failed">
+  </test-group>
+</test-suite>
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.3.query.sqlpp
index ff0badd..03b82fe 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.3.query.sqlpp
@@ -21,8 +21,9 @@
 * Expected Res : Failure
 * Date         : Feb 7th 2014
 */
+-- param max-warnings:json=1000
 
 select element array_avg((
     select element x
-    from  [float('2.0'),'hello world',93847382783847382,date('2013-01-01')] as x
+    from  [float('2.0'),'hello world',10,date('2013-01-01')] as x
 ));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_double/serial_avg_double.1.ddl.sqlpp
similarity index 80%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_double/serial_avg_double.1.ddl.sqlpp
index 5dc84e2..da0bd7b 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_double/serial_avg_double.1.ddl.sqlpp
@@ -16,9 +16,18 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-/*
-* Description  : Run array_avg over an ordered list with mixed types
-* Expected Res : Failure
-* Date         : Feb 7th 2014
-*/
 
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
+create type test.TestType as
+{
+  id : bigint,
+  gid : bigint,
+  val : double
+};
+
+create  dataset Test(TestType) primary key id;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_double/serial_avg_double.2.update.sqlpp
similarity index 63%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_double/serial_avg_double.2.update.sqlpp
index bd244d0..88d820c 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_double/serial_avg_double.2.update.sqlpp
@@ -17,3 +17,15 @@
  * under the License.
  */
 
+use test;
+
+insert into Test
+select element {'id':0,'gid':1,'val':double(4.32),'valplus':double(473847.0)};
+insert into Test
+select element {'id':1,'gid':1,'val':double(5.32)};
+insert into Test
+select element {'id':2,'gid':1,'val':double(6.32),'valplus':double(38473827484738239.0)};
+insert into Test
+select element {'id':3,'gid':1,'val':double(4.32),'valplus':double(678900.0)};
+insert into Test
+select element {'id':4,'gid':1,'val':double(4.32),'valplus':null};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_double/serial_avg_double.3.query.sqlpp
similarity index 89%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_double/serial_avg_double.3.query.sqlpp
index bd244d0..c141c2b 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_double/serial_avg_double.3.query.sqlpp
@@ -17,3 +17,9 @@
  * under the License.
  */
 
+use test;
+
+select gid, avg(t.val) as avg
+from Test as t
+/* +hash */
+group by t.gid as gid;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_double_null/serial_avg_double_null.1.ddl.sqlpp
similarity index 79%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_double_null/serial_avg_double_null.1.ddl.sqlpp
index bd244d0..da0bd7b 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_double_null/serial_avg_double_null.1.ddl.sqlpp
@@ -17,3 +17,17 @@
  * under the License.
  */
 
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
+create type test.TestType as
+{
+  id : bigint,
+  gid : bigint,
+  val : double
+};
+
+create  dataset Test(TestType) primary key id;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_double_null/serial_avg_double_null.2.update.sqlpp
similarity index 64%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_double_null/serial_avg_double_null.2.update.sqlpp
index bd244d0..1cff84e 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_double_null/serial_avg_double_null.2.update.sqlpp
@@ -17,3 +17,15 @@
  * under the License.
  */
 
+use test;
+
+insert into Test
+select element {'id':0,'gid':1,'val':double(4.32),'valplus':double(100.0)};
+insert into Test
+select element {'id':1,'gid':1,'val':double(5.32)};
+insert into Test
+select element {'id':2,'gid':1,'val':double(6.32),'valplus':"str"};
+insert into Test
+select element {'id':3,'gid':1,'val':double(4.32),'valplus':double(200.0)};
+insert into Test
+select element {'id':4,'gid':1,'val':double(4.32),'valplus':null};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_double_null/serial_avg_double_null.3.query.sqlpp
similarity index 88%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_double_null/serial_avg_double_null.3.query.sqlpp
index bd244d0..acbf607 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_double_null/serial_avg_double_null.3.query.sqlpp
@@ -17,3 +17,9 @@
  * under the License.
  */
 
+use test;
+
+select gid, avg(t.valplus) as avg
+from Test as t
+/* +hash */
+group by t.gid as gid;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_empty/serial_avg_empty.1.ddl.sqlpp
similarity index 80%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_empty/serial_avg_empty.1.ddl.sqlpp
index 5dc84e2..da0bd7b 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_empty/serial_avg_empty.1.ddl.sqlpp
@@ -16,9 +16,18 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-/*
-* Description  : Run array_avg over an ordered list with mixed types
-* Expected Res : Failure
-* Date         : Feb 7th 2014
-*/
 
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
+create type test.TestType as
+{
+  id : bigint,
+  gid : bigint,
+  val : double
+};
+
+create  dataset Test(TestType) primary key id;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_empty/serial_avg_empty.2.update.sqlpp
similarity index 99%
rename from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
rename to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_empty/serial_avg_empty.2.update.sqlpp
index bd244d0..042f3ce 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_empty/serial_avg_empty.2.update.sqlpp
@@ -16,4 +16,3 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_empty/serial_avg_empty.3.query.sqlpp
similarity index 89%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_empty/serial_avg_empty.3.query.sqlpp
index bd244d0..c141c2b 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_empty/serial_avg_empty.3.query.sqlpp
@@ -17,3 +17,9 @@
  * under the License.
  */
 
+use test;
+
+select gid, avg(t.val) as avg
+from Test as t
+/* +hash */
+group by t.gid as gid;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_float/serial_avg_float.1.ddl.sqlpp
similarity index 80%
rename from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp
rename to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_float/serial_avg_float.1.ddl.sqlpp
index 5dc84e2..ddac6a0 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_float/serial_avg_float.1.ddl.sqlpp
@@ -16,9 +16,18 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-/*
-* Description  : Run array_avg over an ordered list with mixed types
-* Expected Res : Failure
-* Date         : Feb 7th 2014
-*/
 
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
+create type test.TestType as
+{
+  id : bigint,
+  gid : bigint,
+  val : float
+};
+
+create  dataset Test(TestType) primary key id;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_float/serial_avg_float.2.update.sqlpp
similarity index 64%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_float/serial_avg_float.2.update.sqlpp
index bd244d0..40641b5 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_float/serial_avg_float.2.update.sqlpp
@@ -17,3 +17,15 @@
  * under the License.
  */
 
+use test;
+
+insert into Test
+select element {'id':0,'gid':1,'val':float('4.32'),'valplus':float('1')};
+insert into Test
+select element {'id':1,'gid':1,'val':float('5.32')};
+insert into Test
+select element {'id':2,'gid':1,'val':float('6.32'),'valplus':float('2')};
+insert into Test
+select element {'id':3,'gid':1,'val':float('4.32'),'valplus':float('3')};
+insert into Test
+select element {'id':4,'gid':1,'val':float('4.32'),'valplus':null};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_float/serial_avg_float.3.query.sqlpp
similarity index 89%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_float/serial_avg_float.3.query.sqlpp
index bd244d0..c141c2b 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_float/serial_avg_float.3.query.sqlpp
@@ -17,3 +17,9 @@
  * under the License.
  */
 
+use test;
+
+select gid, avg(t.val) as avg
+from Test as t
+/* +hash */
+group by t.gid as gid;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_float_null/serial_avg_float_null.1.ddl.sqlpp
similarity index 80%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_float_null/serial_avg_float_null.1.ddl.sqlpp
index 5dc84e2..ddac6a0 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_float_null/serial_avg_float_null.1.ddl.sqlpp
@@ -16,9 +16,18 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-/*
-* Description  : Run array_avg over an ordered list with mixed types
-* Expected Res : Failure
-* Date         : Feb 7th 2014
-*/
 
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
+create type test.TestType as
+{
+  id : bigint,
+  gid : bigint,
+  val : float
+};
+
+create  dataset Test(TestType) primary key id;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_float_null/serial_avg_float_null.2.update.sqlpp
similarity index 64%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_float_null/serial_avg_float_null.2.update.sqlpp
index bd244d0..40641b5 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_float_null/serial_avg_float_null.2.update.sqlpp
@@ -17,3 +17,15 @@
  * under the License.
  */
 
+use test;
+
+insert into Test
+select element {'id':0,'gid':1,'val':float('4.32'),'valplus':float('1')};
+insert into Test
+select element {'id':1,'gid':1,'val':float('5.32')};
+insert into Test
+select element {'id':2,'gid':1,'val':float('6.32'),'valplus':float('2')};
+insert into Test
+select element {'id':3,'gid':1,'val':float('4.32'),'valplus':float('3')};
+insert into Test
+select element {'id':4,'gid':1,'val':float('4.32'),'valplus':null};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_float_null/serial_avg_float_null.3.query.sqlpp
similarity index 88%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_float_null/serial_avg_float_null.3.query.sqlpp
index bd244d0..acbf607 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_float_null/serial_avg_float_null.3.query.sqlpp
@@ -17,3 +17,9 @@
  * under the License.
  */
 
+use test;
+
+select gid, avg(t.valplus) as avg
+from Test as t
+/* +hash */
+group by t.gid as gid;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int16/serial_avg_int16.1.ddl.sqlpp
similarity index 79%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int16/serial_avg_int16.1.ddl.sqlpp
index bd244d0..0c0bfee 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int16/serial_avg_int16.1.ddl.sqlpp
@@ -17,3 +17,17 @@
  * under the License.
  */
 
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
+create type test.TestType as
+{
+  id : bigint,
+  gid : bigint,
+  val : smallint
+};
+
+create  dataset Test(TestType) primary key id;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int16/serial_avg_int16.2.update.sqlpp
similarity index 64%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int16/serial_avg_int16.2.update.sqlpp
index bd244d0..6210af4 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int16/serial_avg_int16.2.update.sqlpp
@@ -17,3 +17,15 @@
  * under the License.
  */
 
+use test;
+
+insert into Test
+select element {'id':0,'gid':1,'val':smallint('4'),'valplus':smallint('1')};
+insert into Test
+select element {'id':1,'gid':1,'val':smallint('5')};
+insert into Test
+select element {'id':2,'gid':1,'val':smallint('6'),'valplus':smallint('2')};
+insert into Test
+select element {'id':3,'gid':1,'val':smallint('4'),'valplus':smallint('3')};
+insert into Test
+select element {'id':4,'gid':1,'val':smallint('4'),'valplus':null};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int16/serial_avg_int16.3.query.sqlpp
similarity index 89%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int16/serial_avg_int16.3.query.sqlpp
index bd244d0..c141c2b 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int16/serial_avg_int16.3.query.sqlpp
@@ -17,3 +17,9 @@
  * under the License.
  */
 
+use test;
+
+select gid, avg(t.val) as avg
+from Test as t
+/* +hash */
+group by t.gid as gid;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int16_null/serial_avg_int16_null.1.ddl.sqlpp
similarity index 79%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int16_null/serial_avg_int16_null.1.ddl.sqlpp
index bd244d0..0c0bfee 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int16_null/serial_avg_int16_null.1.ddl.sqlpp
@@ -17,3 +17,17 @@
  * under the License.
  */
 
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
+create type test.TestType as
+{
+  id : bigint,
+  gid : bigint,
+  val : smallint
+};
+
+create  dataset Test(TestType) primary key id;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int16_null/serial_avg_int16_null.2.update.sqlpp
similarity index 64%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int16_null/serial_avg_int16_null.2.update.sqlpp
index bd244d0..6210af4 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int16_null/serial_avg_int16_null.2.update.sqlpp
@@ -17,3 +17,15 @@
  * under the License.
  */
 
+use test;
+
+insert into Test
+select element {'id':0,'gid':1,'val':smallint('4'),'valplus':smallint('1')};
+insert into Test
+select element {'id':1,'gid':1,'val':smallint('5')};
+insert into Test
+select element {'id':2,'gid':1,'val':smallint('6'),'valplus':smallint('2')};
+insert into Test
+select element {'id':3,'gid':1,'val':smallint('4'),'valplus':smallint('3')};
+insert into Test
+select element {'id':4,'gid':1,'val':smallint('4'),'valplus':null};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int16_null/serial_avg_int16_null.3.query.sqlpp
similarity index 88%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int16_null/serial_avg_int16_null.3.query.sqlpp
index bd244d0..acbf607 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int16_null/serial_avg_int16_null.3.query.sqlpp
@@ -17,3 +17,9 @@
  * under the License.
  */
 
+use test;
+
+select gid, avg(t.valplus) as avg
+from Test as t
+/* +hash */
+group by t.gid as gid;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int32/serial_avg_int32.1.ddl.sqlpp
similarity index 79%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int32/serial_avg_int32.1.ddl.sqlpp
index bd244d0..07312bf 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int32/serial_avg_int32.1.ddl.sqlpp
@@ -17,3 +17,17 @@
  * under the License.
  */
 
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
+create type test.TestType as
+{
+  id : bigint,
+  gid : bigint,
+  val : integer
+};
+
+create  dataset Test(TestType) primary key id;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int32/serial_avg_int32.2.update.sqlpp
similarity index 64%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int32/serial_avg_int32.2.update.sqlpp
index bd244d0..a248a0f 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int32/serial_avg_int32.2.update.sqlpp
@@ -17,3 +17,15 @@
  * under the License.
  */
 
+use test;
+
+insert into Test
+select element {'id':0,'gid':1,'val':integer('4'),'valplus':integer('1')};
+insert into Test
+select element {'id':1,'gid':1,'val':integer('5')};
+insert into Test
+select element {'id':2,'gid':1,'val':integer('6'),'valplus':integer('2')};
+insert into Test
+select element {'id':3,'gid':1,'val':integer('4'),'valplus':integer('3')};
+insert into Test
+select element {'id':4,'gid':1,'val':integer('4'),'valplus':null};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int32/serial_avg_int32.3.query.sqlpp
similarity index 89%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int32/serial_avg_int32.3.query.sqlpp
index bd244d0..c141c2b 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int32/serial_avg_int32.3.query.sqlpp
@@ -17,3 +17,9 @@
  * under the License.
  */
 
+use test;
+
+select gid, avg(t.val) as avg
+from Test as t
+/* +hash */
+group by t.gid as gid;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int32_null/serial_avg_int32_null.1.ddl.sqlpp
similarity index 79%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int32_null/serial_avg_int32_null.1.ddl.sqlpp
index bd244d0..07312bf 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int32_null/serial_avg_int32_null.1.ddl.sqlpp
@@ -17,3 +17,17 @@
  * under the License.
  */
 
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
+create type test.TestType as
+{
+  id : bigint,
+  gid : bigint,
+  val : integer
+};
+
+create  dataset Test(TestType) primary key id;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int32_null/serial_avg_int32_null.2.update.sqlpp
similarity index 64%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int32_null/serial_avg_int32_null.2.update.sqlpp
index bd244d0..a248a0f 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int32_null/serial_avg_int32_null.2.update.sqlpp
@@ -17,3 +17,15 @@
  * under the License.
  */
 
+use test;
+
+insert into Test
+select element {'id':0,'gid':1,'val':integer('4'),'valplus':integer('1')};
+insert into Test
+select element {'id':1,'gid':1,'val':integer('5')};
+insert into Test
+select element {'id':2,'gid':1,'val':integer('6'),'valplus':integer('2')};
+insert into Test
+select element {'id':3,'gid':1,'val':integer('4'),'valplus':integer('3')};
+insert into Test
+select element {'id':4,'gid':1,'val':integer('4'),'valplus':null};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int32_null/serial_avg_int32_null.3.query.sqlpp
similarity index 88%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int32_null/serial_avg_int32_null.3.query.sqlpp
index bd244d0..acbf607 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int32_null/serial_avg_int32_null.3.query.sqlpp
@@ -17,3 +17,9 @@
  * under the License.
  */
 
+use test;
+
+select gid, avg(t.valplus) as avg
+from Test as t
+/* +hash */
+group by t.gid as gid;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int64/serial_avg_int64.1.ddl.sqlpp
similarity index 80%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int64/serial_avg_int64.1.ddl.sqlpp
index 5dc84e2..5e9972c 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int64/serial_avg_int64.1.ddl.sqlpp
@@ -16,9 +16,18 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-/*
-* Description  : Run array_avg over an ordered list with mixed types
-* Expected Res : Failure
-* Date         : Feb 7th 2014
-*/
 
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
+create type test.TestType as
+{
+  id : bigint,
+  gid : bigint,
+  val : bigint
+};
+
+create  dataset Test(TestType) primary key id;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int64/serial_avg_int64.2.update.sqlpp
similarity index 64%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int64/serial_avg_int64.2.update.sqlpp
index bd244d0..b460af2 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int64/serial_avg_int64.2.update.sqlpp
@@ -17,3 +17,15 @@
  * under the License.
  */
 
+use test;
+
+insert into Test
+select element {'id':0,'gid':1,'val':bigint('4'),'valplus':bigint('1')};
+insert into Test
+select element {'id':1,'gid':1,'val':bigint('5')};
+insert into Test
+select element {'id':2,'gid':1,'val':bigint('6'),'valplus':bigint('2')};
+insert into Test
+select element {'id':3,'gid':1,'val':bigint('4'),'valplus':bigint('3')};
+insert into Test
+select element {'id':4,'gid':1,'val':bigint('4'),'valplus':null};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int64/serial_avg_int64.3.query.sqlpp
similarity index 89%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int64/serial_avg_int64.3.query.sqlpp
index bd244d0..c141c2b 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int64/serial_avg_int64.3.query.sqlpp
@@ -17,3 +17,9 @@
  * under the License.
  */
 
+use test;
+
+select gid, avg(t.val) as avg
+from Test as t
+/* +hash */
+group by t.gid as gid;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int64_null/serial_avg_int64_null.1.ddl.sqlpp
similarity index 79%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int64_null/serial_avg_int64_null.1.ddl.sqlpp
index bd244d0..5e9972c 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int64_null/serial_avg_int64_null.1.ddl.sqlpp
@@ -17,3 +17,17 @@
  * under the License.
  */
 
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
+create type test.TestType as
+{
+  id : bigint,
+  gid : bigint,
+  val : bigint
+};
+
+create  dataset Test(TestType) primary key id;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int64_null/serial_avg_int64_null.2.update.sqlpp
similarity index 64%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int64_null/serial_avg_int64_null.2.update.sqlpp
index bd244d0..b460af2 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int64_null/serial_avg_int64_null.2.update.sqlpp
@@ -17,3 +17,15 @@
  * under the License.
  */
 
+use test;
+
+insert into Test
+select element {'id':0,'gid':1,'val':bigint('4'),'valplus':bigint('1')};
+insert into Test
+select element {'id':1,'gid':1,'val':bigint('5')};
+insert into Test
+select element {'id':2,'gid':1,'val':bigint('6'),'valplus':bigint('2')};
+insert into Test
+select element {'id':3,'gid':1,'val':bigint('4'),'valplus':bigint('3')};
+insert into Test
+select element {'id':4,'gid':1,'val':bigint('4'),'valplus':null};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int64_null/serial_avg_int64_null.3.query.sqlpp
similarity index 88%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int64_null/serial_avg_int64_null.3.query.sqlpp
index bd244d0..acbf607 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int64_null/serial_avg_int64_null.3.query.sqlpp
@@ -17,3 +17,9 @@
  * under the License.
  */
 
+use test;
+
+select gid, avg(t.valplus) as avg
+from Test as t
+/* +hash */
+group by t.gid as gid;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int8/serial_avg_int8.1.ddl.sqlpp
similarity index 79%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int8/serial_avg_int8.1.ddl.sqlpp
index bd244d0..c45dbfe 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int8/serial_avg_int8.1.ddl.sqlpp
@@ -17,3 +17,17 @@
  * under the License.
  */
 
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
+create type test.TestType as
+{
+  id : bigint,
+  gid : bigint,
+  val : tinyint
+};
+
+create  dataset Test(TestType) primary key id;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int8/serial_avg_int8.2.update.sqlpp
similarity index 63%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int8/serial_avg_int8.2.update.sqlpp
index bd244d0..eb41173 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int8/serial_avg_int8.2.update.sqlpp
@@ -17,3 +17,15 @@
  * under the License.
  */
 
+use test;
+
+insert into Test
+select element {'id':0,'gid':1,'val':tinyint('100'),'valplus':tinyint('100')};
+insert into Test
+select element {'id':1,'gid':1,'val':tinyint('100')};
+insert into Test
+select element {'id':2,'gid':1,'val':tinyint('90'),'valplus':tinyint('100')};
+insert into Test
+select element {'id':3,'gid':1,'val':tinyint('40'),'valplus':tinyint('100')};
+insert into Test
+select element {'id':4,'gid':1,'val':tinyint('40'),'valplus':null};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int8/serial_avg_int8.3.query.sqlpp
similarity index 89%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int8/serial_avg_int8.3.query.sqlpp
index bd244d0..c141c2b 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int8/serial_avg_int8.3.query.sqlpp
@@ -17,3 +17,9 @@
  * under the License.
  */
 
+use test;
+
+select gid, avg(t.val) as avg
+from Test as t
+/* +hash */
+group by t.gid as gid;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int8_null/serial_avg_int8_null.1.ddl.sqlpp
similarity index 79%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int8_null/serial_avg_int8_null.1.ddl.sqlpp
index bd244d0..c45dbfe 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int8_null/serial_avg_int8_null.1.ddl.sqlpp
@@ -17,3 +17,17 @@
  * under the License.
  */
 
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
+create type test.TestType as
+{
+  id : bigint,
+  gid : bigint,
+  val : tinyint
+};
+
+create  dataset Test(TestType) primary key id;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int8_null/serial_avg_int8_null.2.update.sqlpp
similarity index 63%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int8_null/serial_avg_int8_null.2.update.sqlpp
index bd244d0..eb41173 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int8_null/serial_avg_int8_null.2.update.sqlpp
@@ -17,3 +17,15 @@
  * under the License.
  */
 
+use test;
+
+insert into Test
+select element {'id':0,'gid':1,'val':tinyint('100'),'valplus':tinyint('100')};
+insert into Test
+select element {'id':1,'gid':1,'val':tinyint('100')};
+insert into Test
+select element {'id':2,'gid':1,'val':tinyint('90'),'valplus':tinyint('100')};
+insert into Test
+select element {'id':3,'gid':1,'val':tinyint('40'),'valplus':tinyint('100')};
+insert into Test
+select element {'id':4,'gid':1,'val':tinyint('40'),'valplus':null};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int8_null/serial_avg_int8_null.3.query.sqlpp
similarity index 88%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int8_null/serial_avg_int8_null.3.query.sqlpp
index bd244d0..acbf607 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_int8_null/serial_avg_int8_null.3.query.sqlpp
@@ -17,3 +17,9 @@
  * under the License.
  */
 
+use test;
+
+select gid, avg(t.valplus) as avg
+from Test as t
+/* +hash */
+group by t.gid as gid;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_mixed/serial_avg_mixed.1.ddl.sqlpp
similarity index 80%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_mixed/serial_avg_mixed.1.ddl.sqlpp
index 5dc84e2..da0bd7b 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_mixed/serial_avg_mixed.1.ddl.sqlpp
@@ -16,9 +16,18 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-/*
-* Description  : Run array_avg over an ordered list with mixed types
-* Expected Res : Failure
-* Date         : Feb 7th 2014
-*/
 
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
+create type test.TestType as
+{
+  id : bigint,
+  gid : bigint,
+  val : double
+};
+
+create  dataset Test(TestType) primary key id;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_mixed/serial_avg_mixed.2.update.sqlpp
similarity index 69%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_mixed/serial_avg_mixed.2.update.sqlpp
index bd244d0..8c0b731 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_mixed/serial_avg_mixed.2.update.sqlpp
@@ -17,3 +17,13 @@
  * under the License.
  */
 
+use test;
+
+insert into Test
+select element {'id':0,'gid':1,'val':double(4.32),'valplus':float('2.0')};
+insert into Test
+select element {'id':1,'gid':1,'val':double(5.32)};
+insert into Test
+select element {'id':2,'gid':1,'val':double(6.32),'valplus':'hello world'};
+insert into Test
+select element {'id':3,'gid':1,'val':double(4.32),'valplus':double('3.0')};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_mixed/serial_avg_mixed.3.query.sqlpp
similarity index 70%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_mixed/serial_avg_mixed.3.query.sqlpp
index bd244d0..dcb6501 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/serial_avg_mixed/serial_avg_mixed.3.query.sqlpp
@@ -16,4 +16,18 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+ /*
+ * Description  : Run serial-sum over an ordered list with mixed types
+ * Expected Res : Failure
+ * Date         : March 5th 2018
+ */
 
+-- param max-warnings:json=1000
+
+use test;
+
+select gid, array_avg((select value g.valplus from g)) as avg
+from Test as t
+/* +hash */
+group by t.gid as gid 
+group as g(valplus as valplus);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_mixed/avg_mixed.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_mixed/avg_mixed.1.ddl.sqlpp
deleted file mode 100644
index 095f14e..0000000
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_mixed/avg_mixed.1.ddl.sqlpp
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-/*
-* Description  : Run avg over an ordered list with mixed types
-* Expected Res : Failure
-* Date         : Jun 2nd 2013
-*/
-
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_mixed/avg_mixed.2.update.sqlpp
deleted file mode 100644
index bd244d0..0000000
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_mixed/avg_mixed.2.update.sqlpp
+++ /dev/null
@@ -1,19 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_mixed/avg_mixed.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_mixed/avg_mixed.3.query.sqlpp
index d07bdc8..5e6596c 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_mixed/avg_mixed.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_mixed/avg_mixed.3.query.sqlpp
@@ -21,6 +21,7 @@
 * Expected Res : Failure
 * Date         : Jun 2nd 2013
 */
+-- param max-warnings:json=1000
 
 select element strict_avg((
     select element x
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_double/serial_avg_double.1.ddl.sqlpp
similarity index 80%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_double/serial_avg_double.1.ddl.sqlpp
index 5dc84e2..da0bd7b 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_double/serial_avg_double.1.ddl.sqlpp
@@ -16,9 +16,18 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-/*
-* Description  : Run array_avg over an ordered list with mixed types
-* Expected Res : Failure
-* Date         : Feb 7th 2014
-*/
 
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
+create type test.TestType as
+{
+  id : bigint,
+  gid : bigint,
+  val : double
+};
+
+create  dataset Test(TestType) primary key id;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_double/serial_avg_double.2.update.sqlpp
similarity index 63%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_double/serial_avg_double.2.update.sqlpp
index bd244d0..88d820c 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_double/serial_avg_double.2.update.sqlpp
@@ -17,3 +17,15 @@
  * under the License.
  */
 
+use test;
+
+insert into Test
+select element {'id':0,'gid':1,'val':double(4.32),'valplus':double(473847.0)};
+insert into Test
+select element {'id':1,'gid':1,'val':double(5.32)};
+insert into Test
+select element {'id':2,'gid':1,'val':double(6.32),'valplus':double(38473827484738239.0)};
+insert into Test
+select element {'id':3,'gid':1,'val':double(4.32),'valplus':double(678900.0)};
+insert into Test
+select element {'id':4,'gid':1,'val':double(4.32),'valplus':null};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_double/serial_avg_double.3.query.sqlpp
similarity index 84%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_double/serial_avg_double.3.query.sqlpp
index bd244d0..09c3106 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_double/serial_avg_double.3.query.sqlpp
@@ -17,3 +17,9 @@
  * under the License.
  */
 
+use test;
+
+select gid, strict_avg((select value g.val from g)) as avg
+from Test as t
+/* +hash */
+group by t.gid as gid group as g(val as val);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_double_null/serial_avg_double_null.1.ddl.sqlpp
similarity index 79%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_double_null/serial_avg_double_null.1.ddl.sqlpp
index bd244d0..da0bd7b 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_double_null/serial_avg_double_null.1.ddl.sqlpp
@@ -17,3 +17,17 @@
  * under the License.
  */
 
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
+create type test.TestType as
+{
+  id : bigint,
+  gid : bigint,
+  val : double
+};
+
+create  dataset Test(TestType) primary key id;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_double_null/serial_avg_double_null.2.update.sqlpp
similarity index 63%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_double_null/serial_avg_double_null.2.update.sqlpp
index bd244d0..88d820c 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_double_null/serial_avg_double_null.2.update.sqlpp
@@ -17,3 +17,15 @@
  * under the License.
  */
 
+use test;
+
+insert into Test
+select element {'id':0,'gid':1,'val':double(4.32),'valplus':double(473847.0)};
+insert into Test
+select element {'id':1,'gid':1,'val':double(5.32)};
+insert into Test
+select element {'id':2,'gid':1,'val':double(6.32),'valplus':double(38473827484738239.0)};
+insert into Test
+select element {'id':3,'gid':1,'val':double(4.32),'valplus':double(678900.0)};
+insert into Test
+select element {'id':4,'gid':1,'val':double(4.32),'valplus':null};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_double_null/serial_avg_double_null.3.query.sqlpp
similarity index 83%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_double_null/serial_avg_double_null.3.query.sqlpp
index bd244d0..35ff90f 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_double_null/serial_avg_double_null.3.query.sqlpp
@@ -17,3 +17,9 @@
  * under the License.
  */
 
+use test;
+
+select gid, strict_avg((select value g.valplus from g)) as avg
+from Test as t
+/* +hash */
+group by t.gid as gid group as g(valplus as valplus);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_empty/serial_avg_empty.1.ddl.sqlpp
similarity index 80%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_empty/serial_avg_empty.1.ddl.sqlpp
index 5dc84e2..da0bd7b 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_empty/serial_avg_empty.1.ddl.sqlpp
@@ -16,9 +16,18 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-/*
-* Description  : Run array_avg over an ordered list with mixed types
-* Expected Res : Failure
-* Date         : Feb 7th 2014
-*/
 
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
+create type test.TestType as
+{
+  id : bigint,
+  gid : bigint,
+  val : double
+};
+
+create  dataset Test(TestType) primary key id;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_empty/serial_avg_empty.2.update.sqlpp
similarity index 99%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_empty/serial_avg_empty.2.update.sqlpp
index bd244d0..042f3ce 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_empty/serial_avg_empty.2.update.sqlpp
@@ -16,4 +16,3 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_empty/serial_avg_empty.3.query.sqlpp
similarity index 84%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_empty/serial_avg_empty.3.query.sqlpp
index bd244d0..09c3106 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_empty/serial_avg_empty.3.query.sqlpp
@@ -17,3 +17,9 @@
  * under the License.
  */
 
+use test;
+
+select gid, strict_avg((select value g.val from g)) as avg
+from Test as t
+/* +hash */
+group by t.gid as gid group as g(val as val);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_float/serial_avg_float.1.ddl.sqlpp
similarity index 80%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_float/serial_avg_float.1.ddl.sqlpp
index 5dc84e2..ddac6a0 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_float/serial_avg_float.1.ddl.sqlpp
@@ -16,9 +16,18 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-/*
-* Description  : Run array_avg over an ordered list with mixed types
-* Expected Res : Failure
-* Date         : Feb 7th 2014
-*/
 
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
+create type test.TestType as
+{
+  id : bigint,
+  gid : bigint,
+  val : float
+};
+
+create  dataset Test(TestType) primary key id;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_float/serial_avg_float.2.update.sqlpp
similarity index 64%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_float/serial_avg_float.2.update.sqlpp
index bd244d0..40641b5 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_float/serial_avg_float.2.update.sqlpp
@@ -17,3 +17,15 @@
  * under the License.
  */
 
+use test;
+
+insert into Test
+select element {'id':0,'gid':1,'val':float('4.32'),'valplus':float('1')};
+insert into Test
+select element {'id':1,'gid':1,'val':float('5.32')};
+insert into Test
+select element {'id':2,'gid':1,'val':float('6.32'),'valplus':float('2')};
+insert into Test
+select element {'id':3,'gid':1,'val':float('4.32'),'valplus':float('3')};
+insert into Test
+select element {'id':4,'gid':1,'val':float('4.32'),'valplus':null};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_float/serial_avg_float.3.query.sqlpp
similarity index 84%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_float/serial_avg_float.3.query.sqlpp
index bd244d0..09c3106 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_float/serial_avg_float.3.query.sqlpp
@@ -17,3 +17,9 @@
  * under the License.
  */
 
+use test;
+
+select gid, strict_avg((select value g.val from g)) as avg
+from Test as t
+/* +hash */
+group by t.gid as gid group as g(val as val);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_float_null/serial_avg_float_null.1.ddl.sqlpp
similarity index 80%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_float_null/serial_avg_float_null.1.ddl.sqlpp
index 5dc84e2..ddac6a0 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_float_null/serial_avg_float_null.1.ddl.sqlpp
@@ -16,9 +16,18 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-/*
-* Description  : Run array_avg over an ordered list with mixed types
-* Expected Res : Failure
-* Date         : Feb 7th 2014
-*/
 
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
+create type test.TestType as
+{
+  id : bigint,
+  gid : bigint,
+  val : float
+};
+
+create  dataset Test(TestType) primary key id;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_float_null/serial_avg_float_null.2.update.sqlpp
similarity index 64%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_float_null/serial_avg_float_null.2.update.sqlpp
index bd244d0..40641b5 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_float_null/serial_avg_float_null.2.update.sqlpp
@@ -17,3 +17,15 @@
  * under the License.
  */
 
+use test;
+
+insert into Test
+select element {'id':0,'gid':1,'val':float('4.32'),'valplus':float('1')};
+insert into Test
+select element {'id':1,'gid':1,'val':float('5.32')};
+insert into Test
+select element {'id':2,'gid':1,'val':float('6.32'),'valplus':float('2')};
+insert into Test
+select element {'id':3,'gid':1,'val':float('4.32'),'valplus':float('3')};
+insert into Test
+select element {'id':4,'gid':1,'val':float('4.32'),'valplus':null};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_float_null/serial_avg_float_null.3.query.sqlpp
similarity index 83%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_float_null/serial_avg_float_null.3.query.sqlpp
index bd244d0..35ff90f 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_float_null/serial_avg_float_null.3.query.sqlpp
@@ -17,3 +17,9 @@
  * under the License.
  */
 
+use test;
+
+select gid, strict_avg((select value g.valplus from g)) as avg
+from Test as t
+/* +hash */
+group by t.gid as gid group as g(valplus as valplus);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int16/serial_avg_int16.1.ddl.sqlpp
similarity index 79%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int16/serial_avg_int16.1.ddl.sqlpp
index bd244d0..0c0bfee 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int16/serial_avg_int16.1.ddl.sqlpp
@@ -17,3 +17,17 @@
  * under the License.
  */
 
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
+create type test.TestType as
+{
+  id : bigint,
+  gid : bigint,
+  val : smallint
+};
+
+create  dataset Test(TestType) primary key id;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int16/serial_avg_int16.2.update.sqlpp
similarity index 64%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int16/serial_avg_int16.2.update.sqlpp
index bd244d0..6210af4 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int16/serial_avg_int16.2.update.sqlpp
@@ -17,3 +17,15 @@
  * under the License.
  */
 
+use test;
+
+insert into Test
+select element {'id':0,'gid':1,'val':smallint('4'),'valplus':smallint('1')};
+insert into Test
+select element {'id':1,'gid':1,'val':smallint('5')};
+insert into Test
+select element {'id':2,'gid':1,'val':smallint('6'),'valplus':smallint('2')};
+insert into Test
+select element {'id':3,'gid':1,'val':smallint('4'),'valplus':smallint('3')};
+insert into Test
+select element {'id':4,'gid':1,'val':smallint('4'),'valplus':null};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int16/serial_avg_int16.3.query.sqlpp
similarity index 84%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int16/serial_avg_int16.3.query.sqlpp
index bd244d0..09c3106 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int16/serial_avg_int16.3.query.sqlpp
@@ -17,3 +17,9 @@
  * under the License.
  */
 
+use test;
+
+select gid, strict_avg((select value g.val from g)) as avg
+from Test as t
+/* +hash */
+group by t.gid as gid group as g(val as val);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int16_null/serial_avg_int16_null.1.ddl.sqlpp
similarity index 79%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int16_null/serial_avg_int16_null.1.ddl.sqlpp
index bd244d0..0c0bfee 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int16_null/serial_avg_int16_null.1.ddl.sqlpp
@@ -17,3 +17,17 @@
  * under the License.
  */
 
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
+create type test.TestType as
+{
+  id : bigint,
+  gid : bigint,
+  val : smallint
+};
+
+create  dataset Test(TestType) primary key id;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int16_null/serial_avg_int16_null.2.update.sqlpp
similarity index 64%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int16_null/serial_avg_int16_null.2.update.sqlpp
index bd244d0..6210af4 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int16_null/serial_avg_int16_null.2.update.sqlpp
@@ -17,3 +17,15 @@
  * under the License.
  */
 
+use test;
+
+insert into Test
+select element {'id':0,'gid':1,'val':smallint('4'),'valplus':smallint('1')};
+insert into Test
+select element {'id':1,'gid':1,'val':smallint('5')};
+insert into Test
+select element {'id':2,'gid':1,'val':smallint('6'),'valplus':smallint('2')};
+insert into Test
+select element {'id':3,'gid':1,'val':smallint('4'),'valplus':smallint('3')};
+insert into Test
+select element {'id':4,'gid':1,'val':smallint('4'),'valplus':null};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int16_null/serial_avg_int16_null.3.query.sqlpp
similarity index 83%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int16_null/serial_avg_int16_null.3.query.sqlpp
index bd244d0..35ff90f 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int16_null/serial_avg_int16_null.3.query.sqlpp
@@ -17,3 +17,9 @@
  * under the License.
  */
 
+use test;
+
+select gid, strict_avg((select value g.valplus from g)) as avg
+from Test as t
+/* +hash */
+group by t.gid as gid group as g(valplus as valplus);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int32/serial_avg_int32.1.ddl.sqlpp
similarity index 79%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int32/serial_avg_int32.1.ddl.sqlpp
index bd244d0..07312bf 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int32/serial_avg_int32.1.ddl.sqlpp
@@ -17,3 +17,17 @@
  * under the License.
  */
 
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
+create type test.TestType as
+{
+  id : bigint,
+  gid : bigint,
+  val : integer
+};
+
+create  dataset Test(TestType) primary key id;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int32/serial_avg_int32.2.update.sqlpp
similarity index 64%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int32/serial_avg_int32.2.update.sqlpp
index bd244d0..a248a0f 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int32/serial_avg_int32.2.update.sqlpp
@@ -17,3 +17,15 @@
  * under the License.
  */
 
+use test;
+
+insert into Test
+select element {'id':0,'gid':1,'val':integer('4'),'valplus':integer('1')};
+insert into Test
+select element {'id':1,'gid':1,'val':integer('5')};
+insert into Test
+select element {'id':2,'gid':1,'val':integer('6'),'valplus':integer('2')};
+insert into Test
+select element {'id':3,'gid':1,'val':integer('4'),'valplus':integer('3')};
+insert into Test
+select element {'id':4,'gid':1,'val':integer('4'),'valplus':null};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int32/serial_avg_int32.3.query.sqlpp
similarity index 84%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int32/serial_avg_int32.3.query.sqlpp
index bd244d0..09c3106 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int32/serial_avg_int32.3.query.sqlpp
@@ -17,3 +17,9 @@
  * under the License.
  */
 
+use test;
+
+select gid, strict_avg((select value g.val from g)) as avg
+from Test as t
+/* +hash */
+group by t.gid as gid group as g(val as val);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int32_null/serial_avg_int32_null.1.ddl.sqlpp
similarity index 79%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int32_null/serial_avg_int32_null.1.ddl.sqlpp
index bd244d0..07312bf 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int32_null/serial_avg_int32_null.1.ddl.sqlpp
@@ -17,3 +17,17 @@
  * under the License.
  */
 
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
+create type test.TestType as
+{
+  id : bigint,
+  gid : bigint,
+  val : integer
+};
+
+create  dataset Test(TestType) primary key id;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int32_null/serial_avg_int32_null.2.update.sqlpp
similarity index 64%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int32_null/serial_avg_int32_null.2.update.sqlpp
index bd244d0..a248a0f 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int32_null/serial_avg_int32_null.2.update.sqlpp
@@ -17,3 +17,15 @@
  * under the License.
  */
 
+use test;
+
+insert into Test
+select element {'id':0,'gid':1,'val':integer('4'),'valplus':integer('1')};
+insert into Test
+select element {'id':1,'gid':1,'val':integer('5')};
+insert into Test
+select element {'id':2,'gid':1,'val':integer('6'),'valplus':integer('2')};
+insert into Test
+select element {'id':3,'gid':1,'val':integer('4'),'valplus':integer('3')};
+insert into Test
+select element {'id':4,'gid':1,'val':integer('4'),'valplus':null};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int32_null/serial_avg_int32_null.3.query.sqlpp
similarity index 83%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int32_null/serial_avg_int32_null.3.query.sqlpp
index bd244d0..35ff90f 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int32_null/serial_avg_int32_null.3.query.sqlpp
@@ -17,3 +17,9 @@
  * under the License.
  */
 
+use test;
+
+select gid, strict_avg((select value g.valplus from g)) as avg
+from Test as t
+/* +hash */
+group by t.gid as gid group as g(valplus as valplus);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int64/serial_avg_int64.1.ddl.sqlpp
similarity index 80%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int64/serial_avg_int64.1.ddl.sqlpp
index 5dc84e2..5e9972c 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int64/serial_avg_int64.1.ddl.sqlpp
@@ -16,9 +16,18 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-/*
-* Description  : Run array_avg over an ordered list with mixed types
-* Expected Res : Failure
-* Date         : Feb 7th 2014
-*/
 
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
+create type test.TestType as
+{
+  id : bigint,
+  gid : bigint,
+  val : bigint
+};
+
+create  dataset Test(TestType) primary key id;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int64/serial_avg_int64.2.update.sqlpp
similarity index 64%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int64/serial_avg_int64.2.update.sqlpp
index bd244d0..b460af2 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int64/serial_avg_int64.2.update.sqlpp
@@ -17,3 +17,15 @@
  * under the License.
  */
 
+use test;
+
+insert into Test
+select element {'id':0,'gid':1,'val':bigint('4'),'valplus':bigint('1')};
+insert into Test
+select element {'id':1,'gid':1,'val':bigint('5')};
+insert into Test
+select element {'id':2,'gid':1,'val':bigint('6'),'valplus':bigint('2')};
+insert into Test
+select element {'id':3,'gid':1,'val':bigint('4'),'valplus':bigint('3')};
+insert into Test
+select element {'id':4,'gid':1,'val':bigint('4'),'valplus':null};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int64/serial_avg_int64.3.query.sqlpp
similarity index 84%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int64/serial_avg_int64.3.query.sqlpp
index bd244d0..09c3106 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int64/serial_avg_int64.3.query.sqlpp
@@ -17,3 +17,9 @@
  * under the License.
  */
 
+use test;
+
+select gid, strict_avg((select value g.val from g)) as avg
+from Test as t
+/* +hash */
+group by t.gid as gid group as g(val as val);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int64_null/serial_avg_int64_null.1.ddl.sqlpp
similarity index 79%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int64_null/serial_avg_int64_null.1.ddl.sqlpp
index bd244d0..5e9972c 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int64_null/serial_avg_int64_null.1.ddl.sqlpp
@@ -17,3 +17,17 @@
  * under the License.
  */
 
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
+create type test.TestType as
+{
+  id : bigint,
+  gid : bigint,
+  val : bigint
+};
+
+create  dataset Test(TestType) primary key id;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int64_null/serial_avg_int64_null.2.update.sqlpp
similarity index 64%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int64_null/serial_avg_int64_null.2.update.sqlpp
index bd244d0..b460af2 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int64_null/serial_avg_int64_null.2.update.sqlpp
@@ -17,3 +17,15 @@
  * under the License.
  */
 
+use test;
+
+insert into Test
+select element {'id':0,'gid':1,'val':bigint('4'),'valplus':bigint('1')};
+insert into Test
+select element {'id':1,'gid':1,'val':bigint('5')};
+insert into Test
+select element {'id':2,'gid':1,'val':bigint('6'),'valplus':bigint('2')};
+insert into Test
+select element {'id':3,'gid':1,'val':bigint('4'),'valplus':bigint('3')};
+insert into Test
+select element {'id':4,'gid':1,'val':bigint('4'),'valplus':null};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int64_null/serial_avg_int64_null.3.query.sqlpp
similarity index 83%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int64_null/serial_avg_int64_null.3.query.sqlpp
index bd244d0..35ff90f 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int64_null/serial_avg_int64_null.3.query.sqlpp
@@ -17,3 +17,9 @@
  * under the License.
  */
 
+use test;
+
+select gid, strict_avg((select value g.valplus from g)) as avg
+from Test as t
+/* +hash */
+group by t.gid as gid group as g(valplus as valplus);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int8/serial_avg_int8.1.ddl.sqlpp
similarity index 79%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int8/serial_avg_int8.1.ddl.sqlpp
index bd244d0..c45dbfe 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int8/serial_avg_int8.1.ddl.sqlpp
@@ -17,3 +17,17 @@
  * under the License.
  */
 
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
+create type test.TestType as
+{
+  id : bigint,
+  gid : bigint,
+  val : tinyint
+};
+
+create  dataset Test(TestType) primary key id;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int8/serial_avg_int8.2.update.sqlpp
similarity index 63%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int8/serial_avg_int8.2.update.sqlpp
index bd244d0..eb41173 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int8/serial_avg_int8.2.update.sqlpp
@@ -17,3 +17,15 @@
  * under the License.
  */
 
+use test;
+
+insert into Test
+select element {'id':0,'gid':1,'val':tinyint('100'),'valplus':tinyint('100')};
+insert into Test
+select element {'id':1,'gid':1,'val':tinyint('100')};
+insert into Test
+select element {'id':2,'gid':1,'val':tinyint('90'),'valplus':tinyint('100')};
+insert into Test
+select element {'id':3,'gid':1,'val':tinyint('40'),'valplus':tinyint('100')};
+insert into Test
+select element {'id':4,'gid':1,'val':tinyint('40'),'valplus':null};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int8/serial_avg_int8.3.query.sqlpp
similarity index 84%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int8/serial_avg_int8.3.query.sqlpp
index bd244d0..09c3106 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int8/serial_avg_int8.3.query.sqlpp
@@ -17,3 +17,9 @@
  * under the License.
  */
 
+use test;
+
+select gid, strict_avg((select value g.val from g)) as avg
+from Test as t
+/* +hash */
+group by t.gid as gid group as g(val as val);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int8_null/serial_avg_int8_null.1.ddl.sqlpp
similarity index 79%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int8_null/serial_avg_int8_null.1.ddl.sqlpp
index bd244d0..c45dbfe 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int8_null/serial_avg_int8_null.1.ddl.sqlpp
@@ -17,3 +17,17 @@
  * under the License.
  */
 
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
+create type test.TestType as
+{
+  id : bigint,
+  gid : bigint,
+  val : tinyint
+};
+
+create  dataset Test(TestType) primary key id;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int8_null/serial_avg_int8_null.2.update.sqlpp
similarity index 63%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int8_null/serial_avg_int8_null.2.update.sqlpp
index bd244d0..eb41173 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int8_null/serial_avg_int8_null.2.update.sqlpp
@@ -17,3 +17,15 @@
  * under the License.
  */
 
+use test;
+
+insert into Test
+select element {'id':0,'gid':1,'val':tinyint('100'),'valplus':tinyint('100')};
+insert into Test
+select element {'id':1,'gid':1,'val':tinyint('100')};
+insert into Test
+select element {'id':2,'gid':1,'val':tinyint('90'),'valplus':tinyint('100')};
+insert into Test
+select element {'id':3,'gid':1,'val':tinyint('40'),'valplus':tinyint('100')};
+insert into Test
+select element {'id':4,'gid':1,'val':tinyint('40'),'valplus':null};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int8_null/serial_avg_int8_null.3.query.sqlpp
similarity index 83%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int8_null/serial_avg_int8_null.3.query.sqlpp
index bd244d0..35ff90f 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_int8_null/serial_avg_int8_null.3.query.sqlpp
@@ -17,3 +17,9 @@
  * under the License.
  */
 
+use test;
+
+select gid, strict_avg((select value g.valplus from g)) as avg
+from Test as t
+/* +hash */
+group by t.gid as gid group as g(valplus as valplus);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_mixed/serial_avg_mixed.1.ddl.sqlpp
similarity index 80%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_mixed/serial_avg_mixed.1.ddl.sqlpp
index 5dc84e2..da0bd7b 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_mixed/serial_avg_mixed.1.ddl.sqlpp
@@ -16,9 +16,18 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-/*
-* Description  : Run array_avg over an ordered list with mixed types
-* Expected Res : Failure
-* Date         : Feb 7th 2014
-*/
 
+drop  dataverse test if exists;
+create  dataverse test;
+
+use test;
+
+
+create type test.TestType as
+{
+  id : bigint,
+  gid : bigint,
+  val : double
+};
+
+create  dataset Test(TestType) primary key id;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_mixed/serial_avg_mixed.2.update.sqlpp
similarity index 67%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_mixed/serial_avg_mixed.2.update.sqlpp
index bd244d0..02788ca 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_mixed/serial_avg_mixed.2.update.sqlpp
@@ -17,3 +17,13 @@
  * under the License.
  */
 
+use test;
+
+insert into Test
+select element {'id':0,'gid':1,'val':double(4.32),'valplus':float('2.0')};
+insert into Test
+select element {'id':1,'gid':1,'val':double(5.32), 'valplus':int32("15")};
+insert into Test
+select element {'id':2,'gid':1,'val':double(6.32),'valplus':'hello world'};
+insert into Test
+select element {'id':3,'gid':1,'val':double(4.32),'valplus':double('3.0')};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_mixed/serial_avg_mixed.3.query.sqlpp
similarity index 70%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_mixed/serial_avg_mixed.3.query.sqlpp
index bd244d0..e48d5b8 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/serial_avg_mixed/serial_avg_mixed.3.query.sqlpp
@@ -16,4 +16,17 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+ /*
+ * Description  : Run serial-sum over an ordered list with mixed types
+ * Expected Res : Failure
+ * Date         : March 5th 2018
+ */
 
+-- param max-warnings:json=1000
+
+use test;
+
+select gid, strict_avg((select value g.valplus from g)) as avg
+from Test as t
+/* +hash */
+group by t.gid as gid group as g(valplus as valplus);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/csv-tsv-parser/csv-parser-001/csv-parser-001.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/csv-tsv-parser/csv-parser-001/csv-parser-001.1.ddl.sqlpp
new file mode 100644
index 0000000..5728e78
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/csv-tsv-parser/csv-parser-001/csv-parser-001.1.ddl.sqlpp
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE test IF EXISTS;
+CREATE DATAVERSE test;
+
+USE test;
+
+CREATE TYPE t1 AS {f1: string, f2: string, f3: string, f4: string, f5: string};
+CREATE TYPE t2 AS {f1: string, f2: string, f3: string};
+CREATE TYPE t3 AS {f1: int?, f2: boolean, f3: string?};
+
+CREATE EXTERNAL DATASET ds1(t1) USING localfs(("path"="asterix_nc1://data/csv/sample_09.csv"), ("format"="csv"));
+CREATE EXTERNAL DATASET ds2(t2) USING localfs(("path"="asterix_nc1://data/csv/sample_10.csv"), ("format"="csv"));
+CREATE EXTERNAL DATASET ds3(t1) USING localfs(("path"="asterix_nc1://data/csv/sample_11.csv"), ("format"="csv"));
+CREATE EXTERNAL DATASET ds4(t3) USING localfs(("path"="asterix_nc1://data/csv/sample_12.csv"), ("format"="csv"));
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/csv-tsv-parser/csv-parser-001/csv-parser-001.2.query.sqlpp
similarity index 94%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/csv-tsv-parser/csv-parser-001/csv-parser-001.2.query.sqlpp
index bd244d0..d870372 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/csv-tsv-parser/csv-parser-001/csv-parser-001.2.query.sqlpp
@@ -17,3 +17,6 @@
  * under the License.
  */
 
+USE test;
+
+FROM ds1 v SELECT VALUE v;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/csv-tsv-parser/csv-parser-001/csv-parser-001.3.query.sqlpp
similarity index 94%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/csv-tsv-parser/csv-parser-001/csv-parser-001.3.query.sqlpp
index bd244d0..64a2f8a 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/csv-tsv-parser/csv-parser-001/csv-parser-001.3.query.sqlpp
@@ -17,3 +17,6 @@
  * under the License.
  */
 
+USE test;
+
+FROM ds2 v SELECT VALUE v;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/csv-tsv-parser/csv-parser-001/csv-parser-001.4.query.sqlpp
similarity index 94%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/csv-tsv-parser/csv-parser-001/csv-parser-001.4.query.sqlpp
index bd244d0..313198c 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/csv-tsv-parser/csv-parser-001/csv-parser-001.4.query.sqlpp
@@ -17,3 +17,6 @@
  * under the License.
  */
 
+USE test;
+
+FROM ds3 v SELECT VALUE v;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/csv-tsv-parser/csv-parser-001/csv-parser-001.5.query.sqlpp
similarity index 94%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/csv-tsv-parser/csv-parser-001/csv-parser-001.5.query.sqlpp
index bd244d0..065de4e 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/csv-tsv-parser/csv-parser-001/csv-parser-001.5.query.sqlpp
@@ -17,3 +17,6 @@
  * under the License.
  */
 
+USE test;
+
+FROM ds4 v SELECT VALUE v;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/csv-tsv-parser/csv-parser-001/csv-parser-001.6.ddl.sqlpp
similarity index 96%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/csv-tsv-parser/csv-parser-001/csv-parser-001.6.ddl.sqlpp
index bd244d0..86a1b59 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/csv-tsv-parser/csv-parser-001/csv-parser-001.6.ddl.sqlpp
@@ -17,3 +17,4 @@
  * under the License.
  */
 
+DROP DATAVERSE test;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/csv-tsv-parser/tsv-parser-001/tsv-parser-002.1.ddl.sqlpp
similarity index 74%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/csv-tsv-parser/tsv-parser-001/tsv-parser-002.1.ddl.sqlpp
index bd244d0..c0faf16 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/csv-tsv-parser/tsv-parser-001/tsv-parser-002.1.ddl.sqlpp
@@ -17,3 +17,11 @@
  * under the License.
  */
 
+DROP DATAVERSE test IF EXISTS;
+CREATE DATAVERSE test;
+
+USE test;
+
+CREATE TYPE t1 AS {f1: int, f2: int, f3: string, f4: boolean, f5: bigint, f6: double};
+
+CREATE EXTERNAL DATASET ds1(t1) USING localfs(("path"="asterix_nc1://data/tsv/sample_01.tsv"), ("format"="tsv"))
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/csv-tsv-parser/tsv-parser-001/tsv-parser-002.2.query.sqlpp
similarity index 94%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/csv-tsv-parser/tsv-parser-001/tsv-parser-002.2.query.sqlpp
index bd244d0..d870372 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/csv-tsv-parser/tsv-parser-001/tsv-parser-002.2.query.sqlpp
@@ -17,3 +17,6 @@
  * under the License.
  */
 
+USE test;
+
+FROM ds1 v SELECT VALUE v;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/csv-tsv-parser/tsv-parser-001/tsv-parser-002.3.ddl.sqlpp
similarity index 96%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/csv-tsv-parser/tsv-parser-001/tsv-parser-002.3.ddl.sqlpp
index bd244d0..86a1b59 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/csv-tsv-parser/tsv-parser-001/tsv-parser-002.3.ddl.sqlpp
@@ -17,3 +17,4 @@
  * under the License.
  */
 
+DROP DATAVERSE test;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/ddl/create-dataset-inline-type-1/create-dataset-inline-type-1.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/ddl/create-dataset-inline-type-1/create-dataset-inline-type-1.1.ddl.sqlpp
new file mode 100644
index 0000000..6230d4b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/ddl/create-dataset-inline-type-1/create-dataset-inline-type-1.1.ddl.sqlpp
@@ -0,0 +1,141 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+DROP DATAVERSE test IF EXISTS;
+CREATE DATAVERSE test;
+USE test;
+
+/* Metadata test function */
+
+CREATE FUNCTION listMetadata() {
+  SELECT "Dataset" AS en, d.DatasetName, d.DatatypeDataverseName, d.DatatypeName,
+    d.MetatypeDataverseName, d.MetatypeName
+  FROM Metadata.`Dataset` d
+  WHERE d.DataverseName = "test"
+  UNION ALL
+  SELECT "Datatype" AS en, dt.DatatypeName, dt.Derived
+  FROM Metadata.`Datatype` dt
+  WHERE dt.DataverseName = "test"
+  ORDER BY en, DatasetName, DatatypeName
+};
+
+/* Internal datasets */
+
+CREATE DATASET A_Customers_Default_Closed(
+  c_custkey integer not null,
+  c_name string not null,
+  c_phone string,
+  c_comment string
+) PRIMARY KEY c_custkey ;
+
+CREATE DATASET A_Customers_Closed(
+  c_custkey integer not null,
+  c_name string not null,
+  c_phone string,
+  c_comment string
+) CLOSED TYPE PRIMARY KEY c_custkey;
+
+CREATE DATASET A_Customers_Open(
+  c_custkey integer not null,
+  c_name string not null,
+  c_phone string,
+  c_comment string
+) OPEN TYPE PRIMARY KEY c_custkey;
+
+/* External datasets */
+
+CREATE EXTERNAL DATASET B_Orders_Default_Closed(
+    o_orderkey integer not null,
+    o_custkey integer not null,
+    o_orderstatus string not null,
+    o_totalprice double not null,
+    o_orderdate string not null,
+    o_orderpriority string not null,
+    o_clerk string not null,
+    o_shippriority integer not null,
+    o_comment string
+)
+USING `localfs`
+((`path`=`asterix_nc1://data/tpch0.001/orders.tbl`),
+(`input-format`=`text-input-format`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+CREATE EXTERNAL DATASET B_Orders_Closed(
+    o_orderkey integer not null,
+    o_custkey integer not null,
+    o_orderstatus string not null,
+    o_totalprice double not null,
+    o_orderdate string not null,
+    o_orderpriority string not null,
+    o_clerk string not null,
+    o_shippriority integer not null,
+    o_comment string
+) CLOSED TYPE
+USING `localfs`
+((`path`=`asterix_nc1://data/tpch0.001/orders.tbl`),
+(`input-format`=`text-input-format`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+CREATE EXTERNAL DATASET B_Orders_Open(
+    o_orderkey integer not null,
+    o_custkey integer not null,
+    o_orderstatus string not null,
+    o_totalprice double not null,
+    o_orderdate string not null,
+    o_orderpriority string not null,
+    o_clerk string not null,
+    o_shippriority integer not null,
+    o_comment string
+) OPEN TYPE
+USING `localfs`
+((`path`=`asterix_nc1://data/tpch0.001/orders.tbl`),
+(`input-format`=`text-input-format`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+/* Internal datasets with inline META type */
+
+CREATE DATASET C_Customers_Meta_Default_Closed(
+  c_custkey integer not null,
+  c_name string not null,
+  c_phone string,
+  c_comment string
+)
+WITH META(c_x integer not null, c_y integer)
+PRIMARY KEY c_custkey ;
+
+CREATE DATASET C_Customers_Meta_Closed(
+  c_custkey integer not null,
+  c_name string not null,
+  c_phone string,
+  c_comment string
+) CLOSED TYPE
+WITH META(
+  c_x integer not null,
+  c_y integer
+) CLOSED TYPE
+PRIMARY KEY c_custkey;
+
+CREATE DATASET C_Customers_Meta_Open(
+  c_custkey integer not null,
+  c_name string not null,
+  c_phone string,
+  c_comment string
+) OPEN TYPE
+WITH META(
+  c_x integer not null,
+  c_y integer
+) OPEN TYPE
+PRIMARY KEY c_custkey;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/ddl/create-dataset-inline-type-1/create-dataset-inline-type-1.2.query.sqlpp
similarity index 96%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/ddl/create-dataset-inline-type-1/create-dataset-inline-type-1.2.query.sqlpp
index bd244d0..52a6324 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/ddl/create-dataset-inline-type-1/create-dataset-inline-type-1.2.query.sqlpp
@@ -17,3 +17,6 @@
  * under the License.
  */
 
+USE test;
+
+listMetadata();
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/ddl/create-dataset-inline-type-1/create-dataset-inline-type-1.3.ddl.sqlpp
similarity index 70%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/ddl/create-dataset-inline-type-1/create-dataset-inline-type-1.3.ddl.sqlpp
index bd244d0..8a08888 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/ddl/create-dataset-inline-type-1/create-dataset-inline-type-1.3.ddl.sqlpp
@@ -17,3 +17,14 @@
  * under the License.
  */
 
+USE test;
+
+DROP DATASET A_Customers_Default_Closed;
+DROP DATASET A_Customers_Closed;
+DROP DATASET A_Customers_Open;
+DROP DATASET B_Orders_Default_Closed;
+DROP DATASET B_Orders_Closed;
+DROP DATASET B_Orders_Open;
+DROP DATASET C_Customers_Meta_Default_Closed;
+DROP DATASET C_Customers_Meta_Closed;
+DROP DATASET C_Customers_Meta_Open;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/ddl/create-dataset-inline-type-1/create-dataset-inline-type-1.4.query.sqlpp
similarity index 87%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/ddl/create-dataset-inline-type-1/create-dataset-inline-type-1.4.query.sqlpp
index bd244d0..ad16400 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/ddl/create-dataset-inline-type-1/create-dataset-inline-type-1.4.query.sqlpp
@@ -17,3 +17,10 @@
  * under the License.
  */
 
+/*
+ * Test that inline types are deleted when dataset is dropped
+ */
+
+USE test;
+
+array_count(listMetadata());
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/ddl/create-dataset-inline-type-2/create-dataset-inline-type-2.1.ddl.sqlpp
similarity index 70%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/ddl/create-dataset-inline-type-2/create-dataset-inline-type-2.1.ddl.sqlpp
index bd244d0..d10d096 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/ddl/create-dataset-inline-type-2/create-dataset-inline-type-2.1.ddl.sqlpp
@@ -17,3 +17,20 @@
  * under the License.
  */
 
+DROP DATAVERSE test IF EXISTS;
+CREATE DATAVERSE test;
+
+USE test;
+
+/* Prepare: create datasets with inline type */
+
+CREATE DATASET Cust1(
+  c_custkey integer not null,
+  c_name string not null
+) PRIMARY KEY c_custkey;
+
+CREATE DATASET Cust2(
+  c_custkey integer not null,
+  c_name string not null,
+  c_phone string
+) PRIMARY KEY c_custkey;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/ddl/create-dataset-inline-type-2/create-dataset-inline-type-2.2.query.sqlpp
similarity index 82%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/ddl/create-dataset-inline-type-2/create-dataset-inline-type-2.2.query.sqlpp
index bd244d0..00cad73 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/ddl/create-dataset-inline-type-2/create-dataset-inline-type-2.2.query.sqlpp
@@ -17,3 +17,9 @@
  * under the License.
  */
 
+SELECT count(*) as cnt
+FROM Metadata.`Datatype` dt
+WHERE dt.DataverseName = "test"
+ AND starts_with(dt.DatatypeName, "$d$t$i$Cust")
+ AND dt.Derived.IsAnonymous
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/ddl/create-dataset-inline-type-2/create-dataset-inline-type-2.3.ddl.sqlpp
similarity index 80%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/ddl/create-dataset-inline-type-2/create-dataset-inline-type-2.3.ddl.sqlpp
index bd244d0..81572f6 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/ddl/create-dataset-inline-type-2/create-dataset-inline-type-2.3.ddl.sqlpp
@@ -17,3 +17,12 @@
  * under the License.
  */
 
+/* Create dataset that attempts to use first dataset's type
+   as its item type -> Expect error: unknown type */
+
+USE test;
+
+CREATE DATASET Cust1X(
+  `$d$t$i$Cust1`
+)
+PRIMARY KEY c_custkey;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/ddl/create-dataset-inline-type-2/create-dataset-inline-type-2.4.ddl.sqlpp
similarity index 75%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/ddl/create-dataset-inline-type-2/create-dataset-inline-type-2.4.ddl.sqlpp
index bd244d0..2ad4888 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/ddl/create-dataset-inline-type-2/create-dataset-inline-type-2.4.ddl.sqlpp
@@ -16,4 +16,16 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+/*
+ * Create dataset that attempts to use first dataset's type
+ * as its meta item type -> Expect error: unknown type
+ */
 
+USE test;
+
+CREATE DATASET Cust2X(
+  c_custkey integer not null,
+  c_name string not null
+)
+WITH META(`$d$t$i$Cust2`)
+PRIMARY KEY c_custkey;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/aws/s3/000/external_dataset.000.ddl.sqlpp
similarity index 65%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/aws/s3/000/external_dataset.000.ddl.sqlpp
index bd244d0..9c6a994 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/aws/s3/000/external_dataset.000.ddl.sqlpp
@@ -17,3 +17,22 @@
  * under the License.
  */
 
+drop dataverse test if exists;
+create dataverse test;
+use test;
+
+drop type test if exists;
+create type test as open {
+};
+
+drop dataset test if exists;
+create external dataset test(test) using S3 (
+("accessKey"="dummyAccessKey"),
+("secretKey"="dummySecretKey"),
+("region"="us-west-2"),
+("serviceEndpoint"="http://localhost:8001"),
+("container"="playground"),
+("definition"="json-data/reviews"),
+("format"="json")
+);
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/aws/s3/000/external_dataset.001.query.sqlpp
similarity index 92%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/aws/s3/000/external_dataset.001.query.sqlpp
index bd244d0..2dd9cc5 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/aws/s3/000/external_dataset.001.query.sqlpp
@@ -17,3 +17,9 @@
  * under the License.
  */
 
+use test;
+
+from test
+select value test
+order by id asc;
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/aws/s3/000/external_dataset.002.ddl.sqlpp
similarity index 95%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/aws/s3/000/external_dataset.002.ddl.sqlpp
index bd244d0..548e632 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/aws/s3/000/external_dataset.002.ddl.sqlpp
@@ -17,3 +17,4 @@
  * under the License.
  */
 
+drop dataverse test if exists;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/aws/s3/001/query-dataset.000.ddl.sqlpp
similarity index 63%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/aws/s3/001/query-dataset.000.ddl.sqlpp
index bd244d0..b906039 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/aws/s3/001/query-dataset.000.ddl.sqlpp
@@ -17,3 +17,20 @@
  * under the License.
  */
 
+DROP DATAVERSE test IF EXISTS;
+CREATE DATAVERSE test;
+USE test;
+
+DROP TYPE test IF EXISTS;
+CREATE TYPE test AS {id: int, year: int?, review: string, details: string?};
+
+DROP DATASET test IF EXISTS;
+CREATE EXTERNAL DATASET test(test) USING S3 (
+("accessKey"="dummyAccessKey"),
+("secretKey"="dummySecretKey"),
+("region"="us-west-2"),
+("serviceEndpoint"="http://localhost:8001"),
+("container"="playground"),
+("definition"="csv-data/reviews"),
+("format"="csv")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/aws/s3/001/query-dataset.002.query.sqlpp
similarity index 92%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/aws/s3/001/query-dataset.002.query.sqlpp
index bd244d0..6e31eb3 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/aws/s3/001/query-dataset.002.query.sqlpp
@@ -17,3 +17,6 @@
  * under the License.
  */
 
+USE test;
+
+FROM test SELECT VALUE test ORDER BY id ASC;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/aws/s3/001/query-dataset.003.ddl.sqlpp
similarity index 95%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/aws/s3/001/query-dataset.003.ddl.sqlpp
index bd244d0..0ff713d 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/aws/s3/001/query-dataset.003.ddl.sqlpp
@@ -17,3 +17,4 @@
  * under the License.
  */
 
+DROP DATASET test IF EXISTS;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/aws/s3/002/query-dataset.000.ddl.sqlpp
similarity index 63%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/aws/s3/002/query-dataset.000.ddl.sqlpp
index bd244d0..d385bee 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/aws/s3/002/query-dataset.000.ddl.sqlpp
@@ -17,3 +17,20 @@
  * under the License.
  */
 
+DROP DATAVERSE test IF EXISTS;
+CREATE DATAVERSE test;
+USE test;
+
+DROP TYPE test IF EXISTS;
+CREATE TYPE test AS {id: int, year: int?, review: string, details: string?};
+
+DROP DATASET test IF EXISTS;
+CREATE EXTERNAL DATASET test(test) USING S3 (
+("accessKey"="dummyAccessKey"),
+("secretKey"="dummySecretKey"),
+("region"="us-west-2"),
+("serviceEndpoint"="http://localhost:8001"),
+("container"="playground"),
+("definition"="tsv-data/reviews"),
+("format"="tsv")
+);
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/aws/s3/002/query-dataset.002.query.sqlpp
similarity index 92%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/aws/s3/002/query-dataset.002.query.sqlpp
index bd244d0..6e31eb3 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/aws/s3/002/query-dataset.002.query.sqlpp
@@ -17,3 +17,6 @@
  * under the License.
  */
 
+USE test;
+
+FROM test SELECT VALUE test ORDER BY id ASC;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/aws/s3/002/query-dataset.003.ddl.sqlpp
similarity index 95%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/aws/s3/002/query-dataset.003.ddl.sqlpp
index bd244d0..0ff713d 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-dataset/aws/s3/002/query-dataset.003.ddl.sqlpp
@@ -17,3 +17,4 @@
  * under the License.
  */
 
+DROP DATASET test IF EXISTS;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/avg_mixed/avg_mixed.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/avg_mixed/avg_mixed.1.adm
index e3b97f5..e0ea36f 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/avg_mixed/avg_mixed.1.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/avg_mixed/avg_mixed.1.adm
@@ -1 +1 @@
-[  ]
+6.0
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_double/serial_avg_double.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_double/serial_avg_double.1.adm
new file mode 100644
index 0000000..7e08e77
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_double/serial_avg_double.1.adm
@@ -0,0 +1 @@
+{ "gid": 1, "avg": 4.92 }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_double_null/serial_avg_double_null.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_double_null/serial_avg_double_null.1.adm
new file mode 100644
index 0000000..877de08
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_double_null/serial_avg_double_null.1.adm
@@ -0,0 +1 @@
+{ "gid": 1, "avg": 150.0 }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/aggregate/avg_mixed/avg_mixed.1.ast b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_empty/serial_sum_empty.1.adm
similarity index 100%
rename from asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/aggregate/avg_mixed/avg_mixed.1.ast
rename to asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_empty/serial_sum_empty.1.adm
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_float/serial_avg_float.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_float/serial_avg_float.1.adm
new file mode 100644
index 0000000..7e08e77
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_float/serial_avg_float.1.adm
@@ -0,0 +1 @@
+{ "gid": 1, "avg": 4.92 }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_float_null/serial_avg_float_null.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_float_null/serial_avg_float_null.1.adm
new file mode 100644
index 0000000..47ebaaa
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_float_null/serial_avg_float_null.1.adm
@@ -0,0 +1 @@
+{ "gid": 1, "avg": 2.0 }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_int16/serial_avg_int16.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_int16/serial_avg_int16.1.adm
new file mode 100644
index 0000000..821af2f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_int16/serial_avg_int16.1.adm
@@ -0,0 +1 @@
+{ "gid": 1, "avg": 4.6 }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_int16_null/serial_avg_int16_null.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_int16_null/serial_avg_int16_null.1.adm
new file mode 100644
index 0000000..47ebaaa
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_int16_null/serial_avg_int16_null.1.adm
@@ -0,0 +1 @@
+{ "gid": 1, "avg": 2.0 }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_int32/serial_avg_int32.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_int32/serial_avg_int32.1.adm
new file mode 100644
index 0000000..821af2f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_int32/serial_avg_int32.1.adm
@@ -0,0 +1 @@
+{ "gid": 1, "avg": 4.6 }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_int32_null/serial_avg_int32_null.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_int32_null/serial_avg_int32_null.1.adm
new file mode 100644
index 0000000..47ebaaa
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_int32_null/serial_avg_int32_null.1.adm
@@ -0,0 +1 @@
+{ "gid": 1, "avg": 2.0 }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_int64/serial_avg_int64.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_int64/serial_avg_int64.1.adm
new file mode 100644
index 0000000..821af2f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_int64/serial_avg_int64.1.adm
@@ -0,0 +1 @@
+{ "gid": 1, "avg": 4.6 }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_int64_null/serial_avg_int64_null.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_int64_null/serial_avg_int64_null.1.adm
new file mode 100644
index 0000000..47ebaaa
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_int64_null/serial_avg_int64_null.1.adm
@@ -0,0 +1 @@
+{ "gid": 1, "avg": 2.0 }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_int8/serial_avg_int8.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_int8/serial_avg_int8.1.adm
new file mode 100644
index 0000000..8e23c3e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_int8/serial_avg_int8.1.adm
@@ -0,0 +1 @@
+{ "gid": 1, "avg": 74.0 }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_int8_null/serial_avg_int8_null.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_int8_null/serial_avg_int8_null.1.adm
new file mode 100644
index 0000000..4c81aef
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_int8_null/serial_avg_int8_null.1.adm
@@ -0,0 +1 @@
+{ "gid": 1, "avg": 100.0 }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_mixed/serial_avg_mixed.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_mixed/serial_avg_mixed.1.adm
new file mode 100644
index 0000000..9d306b7
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate-sql/serial_avg_mixed/serial_avg_mixed.1.adm
@@ -0,0 +1 @@
+{ "gid": 1, "avg": 2.5 }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/avg_mixed/avg_mixed.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/avg_mixed/avg_mixed.1.adm
index e3b97f5..19765bd 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/avg_mixed/avg_mixed.1.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/avg_mixed/avg_mixed.1.adm
@@ -1 +1 @@
-[  ]
+null
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_double/serial_avg_double.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_double/serial_avg_double.1.adm
new file mode 100644
index 0000000..7e08e77
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_double/serial_avg_double.1.adm
@@ -0,0 +1 @@
+{ "gid": 1, "avg": 4.92 }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_double_null/serial_avg_double_null.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_double_null/serial_avg_double_null.1.adm
new file mode 100644
index 0000000..40a0b73
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_double_null/serial_avg_double_null.1.adm
@@ -0,0 +1 @@
+{ "gid": 1, "avg": null }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/aggregate/avg_mixed/avg_mixed.1.ast b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_empty/serial_sum_empty.1.adm
similarity index 100%
copy from asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/aggregate/avg_mixed/avg_mixed.1.ast
copy to asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_empty/serial_sum_empty.1.adm
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_float/serial_avg_float.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_float/serial_avg_float.1.adm
new file mode 100644
index 0000000..7e08e77
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_float/serial_avg_float.1.adm
@@ -0,0 +1 @@
+{ "gid": 1, "avg": 4.92 }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_float_null/serial_avg_float_null.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_float_null/serial_avg_float_null.1.adm
new file mode 100644
index 0000000..40a0b73
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_float_null/serial_avg_float_null.1.adm
@@ -0,0 +1 @@
+{ "gid": 1, "avg": null }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_int16/serial_avg_int16.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_int16/serial_avg_int16.1.adm
new file mode 100644
index 0000000..821af2f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_int16/serial_avg_int16.1.adm
@@ -0,0 +1 @@
+{ "gid": 1, "avg": 4.6 }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_int16_null/serial_avg_int16_null.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_int16_null/serial_avg_int16_null.1.adm
new file mode 100644
index 0000000..40a0b73
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_int16_null/serial_avg_int16_null.1.adm
@@ -0,0 +1 @@
+{ "gid": 1, "avg": null }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_int32/serial_avg_int32.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_int32/serial_avg_int32.1.adm
new file mode 100644
index 0000000..821af2f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_int32/serial_avg_int32.1.adm
@@ -0,0 +1 @@
+{ "gid": 1, "avg": 4.6 }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_int32_null/serial_avg_int32_null.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_int32_null/serial_avg_int32_null.1.adm
new file mode 100644
index 0000000..40a0b73
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_int32_null/serial_avg_int32_null.1.adm
@@ -0,0 +1 @@
+{ "gid": 1, "avg": null }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_int64/serial_avg_int64.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_int64/serial_avg_int64.1.adm
new file mode 100644
index 0000000..821af2f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_int64/serial_avg_int64.1.adm
@@ -0,0 +1 @@
+{ "gid": 1, "avg": 4.6 }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_int64_null/serial_avg_int64_null.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_int64_null/serial_avg_int64_null.1.adm
new file mode 100644
index 0000000..40a0b73
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_int64_null/serial_avg_int64_null.1.adm
@@ -0,0 +1 @@
+{ "gid": 1, "avg": null }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_int8/serial_avg_int8.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_int8/serial_avg_int8.1.adm
new file mode 100644
index 0000000..8e23c3e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_int8/serial_avg_int8.1.adm
@@ -0,0 +1 @@
+{ "gid": 1, "avg": 74.0 }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_int8_null/serial_avg_int8_null.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_int8_null/serial_avg_int8_null.1.adm
new file mode 100644
index 0000000..40a0b73
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_int8_null/serial_avg_int8_null.1.adm
@@ -0,0 +1 @@
+{ "gid": 1, "avg": null }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_mixed/serial_avg_mixed.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_mixed/serial_avg_mixed.1.adm
new file mode 100644
index 0000000..40a0b73
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/aggregate/serial_avg_mixed/serial_avg_mixed.1.adm
@@ -0,0 +1 @@
+{ "gid": 1, "avg": null }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/csv-tsv-parser/csv-parser-001/csv-parser-001.2.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/csv-tsv-parser/csv-parser-001/csv-parser-001.2.adm
new file mode 100644
index 0000000..5c84fb8
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/csv-tsv-parser/csv-parser-001/csv-parser-001.2.adm
@@ -0,0 +1,15 @@
+{ "f1": "a", "f2": "b", "f3": "c", "f4": "d", "f5": "e" }
+{ "f1": "0", "f2": ",     boo", "f3": " 1", "f4": "2", "f5": "3" }
+{ "f1": "1", "f2": "", "f3": "", "f4": "❤", "f5": "" }
+{ "f1": "2", "f2": "3", "f3": "4", "f4": "\\n", "f5": "" }
+{ "f1": "3", "f2": "quoted \"f\" field", "f3": "", "f4": "", "f5": "" }
+{ "f1": "4", "f2": "4", "f3": "", "f4": "", "f5": "" }
+{ "f1": "5", "f2": "{\"vehicle\": \"car\", \"location\": [2.0, 0.1]}", "f3": "", "f4": "", "f5": "" }
+{ "f1": "6", "f2": "2", "f3": "3", "f4": "", "f5": "" }
+{ "f1": "7", "f2": "8", "f3": "9", "f4": "", "f5": "" }
+{ "f1": "8", "f2": "2", "f3": "3", "f4": "", "f5": "" }
+{ "f1": "9", "f2": "8", "f3": "9", "f4": "", "f5": "" }
+{ "f1": "10", "f2": "field\n\"f\"\nwith multiple lines", "f3": "", "f4": "", "f5": "" }
+{ "f1": "11", "f2": "4", "f3": "", "f4": "", "f5": "" }
+{ "f1": "12", "f2": "5", "f3": "ʤ", "f4": "", "f5": "" }
+{ "f1": "John", "f2": "Green", "f3": "111 downtown st.", "f4": "city, state", "f5": "99999" }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/csv-tsv-parser/csv-parser-001/csv-parser-001.3.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/csv-tsv-parser/csv-parser-001/csv-parser-001.3.adm
new file mode 100644
index 0000000..80f5fb7
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/csv-tsv-parser/csv-parser-001/csv-parser-001.3.adm
@@ -0,0 +1,32 @@
+{ "f1": "1", "f2": "?/ Text ending with a backslash  / \\", "f3": "2000-09-03 07:12:22" }
+{ "f1": "2", "f2": "non quoted text!yes......", "f3": "2003-08-09 22:34:19" }
+{ "f1": "3", "f2": "Text with more sentences. Another sentence.", "f3": "2003-09-12 05:29:12" }
+{ "f1": "4", "f2": "Quoted text.. yes.", "f3": "2003-09-13 17:21:49" }
+{ "f1": "5", "f2": "Another text", "f3": "2003-01-21 23:31:41" }
+{ "f1": "6", "f2": "Text with' quotes.", "f3": "2003-09-14 20:15:50" }
+{ "f1": "7", "f2": "Text with quote's", "f3": "2003-09-14 18:34:03" }
+{ "f1": "8", "f2": "Text with quotes '", "f3": "2003-01-28 20:32:13" }
+{ "f1": "9", "f2": "Text with quotes \"", "f3": "2003-01-18 11:44:15" }
+{ "f1": "10", "f2": "Text with question marks!?!?", "f3": "2003-09-18 06:25:56" }
+{ "f1": "11", "f2": "\" Text that starts with quotes", "f3": "2003-09-12 00:31:24" }
+{ "f1": "12", "f2": "Text with \\\" backslash and quotes", "f3": "2003-09-13 20:30:06" }
+{ "f1": "13", "f2": "Text with \\\" backslash and quotes\\\"", "f3": "2003-09-14 16:20:36" }
+{ "f1": "14", "f2": "Text that has comma ,", "f3": "2003-09-12 08:21:18" }
+{ "f1": "15", "f2": "Text that has \",\" quoted comma", "f3": "2003-09-12 08:21:18" }
+{ "f1": "16", "f2": ",Text that has ", "f3": "2003-09-12 08:21:18" }
+{ "f1": "17", "f2": ",\",Text that has ", "f3": "2003-09-12 08:21:18" }
+{ "f1": "18", "f2": "Text with commas,inside it., yes", "f3": "2003-09-13 23:42:14" }
+{ "f1": "19", "f2": "Text that has \\n inside ", "f3": "2003-09-12 08:21:18" }
+{ "f1": "20", "f2": "Text that has \\\\\\n inside ", "f3": "2003-09-12 08:21:18" }
+{ "f1": "21", "f2": "text with :)", "f3": "2003-09-05 19:15:34" }
+{ "f1": "22", "f2": "Text that has \\\\\\\" inside \\\\", "f3": "2003-09-12 08:21:18" }
+{ "f1": "23", "f2": "Text that has \\\\\\\" inside \\\\\"", "f3": "2003-09-12 08:21:18" }
+{ "f1": "24", "f2": "\"text that spans multiple\nLines and more\nLines ane more and more\nLines ...\nAnd yet more lines\nAnd more\"", "f3": "2011-09-19 01:09:09" }
+{ "f1": "25", "f2": "Text \"\nmore lines", "f3": "2011-09-19 01:09:09" }
+{ "f1": "26", "f2": "\"\n", "f3": "2011-09-19 01:09:09" }
+{ "f1": "27", "f2": "Text", "f3": "" }
+{ "f1": "28", "f2": "Text", "f3": "2011-09-19 01:09:09" }
+{ "f1": "29", "f2": "Text\\.", "f3": "2011-09-19 01:09:09" }
+{ "f1": "30", "f2": "Text\\.", "f3": "2011-09-19 01:09:09" }
+{ "f1": "31", "f2": "\\.Text", "f3": "2011-09-19 01:09:09" }
+{ "f1": "32", "f2": "\\.Text", "f3": "2011-09-19 01:09:09" }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/csv-tsv-parser/csv-parser-001/csv-parser-001.4.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/csv-tsv-parser/csv-parser-001/csv-parser-001.4.adm
new file mode 100644
index 0000000..5c61b4a
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/csv-tsv-parser/csv-parser-001/csv-parser-001.4.adm
@@ -0,0 +1,4 @@
+{ "f1": "1", "f2": ",\", b", "f3": " 3", "f4": "4", "f5": "5" }
+{ "f1": ",\", b", "f2": "4", "f3": " 3", "f4": "4", "f5": "5" }
+{ "f1": "", "f2": "", "f3": "", "f4": "", "f5": "" }
+{ "f1": "dd", "f2": "", "f3": "", "f4": "", "f5": "" }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/csv-tsv-parser/csv-parser-001/csv-parser-001.5.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/csv-tsv-parser/csv-parser-001/csv-parser-001.5.adm
new file mode 100644
index 0000000..4b80e26
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/csv-tsv-parser/csv-parser-001/csv-parser-001.5.adm
@@ -0,0 +1,13 @@
+{ "f1": 1, "f2": true, "f3": "text" }
+{ "f1": 2, "f2": false, "f3": "text" }
+{ "f1": 3, "f2": true, "f3": "text" }
+{ "f1": 4, "f2": true, "f3": null }
+{ "f1": 5, "f2": false, "f3": null }
+{ "f1": 6, "f2": true, "f3": "text\"\nmore lines" }
+{ "f1": 7, "f2": false, "f3": "\"\n" }
+{ "f1": 8, "f2": true, "f3": null }
+{ "f1": 9, "f2": false, "f3": "text\"" }
+{ "f1": 10, "f2": false, "f3": "text\\." }
+{ "f1": 11, "f2": true, "f3": "text\\." }
+{ "f1": null, "f2": false, "f3": "\\.text" }
+{ "f1": 13, "f2": true, "f3": "\\.text" }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/csv-tsv-parser/tsv-parser-001/tsv-parser-001.2.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/csv-tsv-parser/tsv-parser-001/tsv-parser-001.2.adm
new file mode 100644
index 0000000..fbe287b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/csv-tsv-parser/tsv-parser-001/tsv-parser-001.2.adm
@@ -0,0 +1,28 @@
+{ "f1": 11, "f2": 55, "f3": "text field wih , charrrrrrrrrrr", "f4": true, "f5": 90, "f6": 0.666666667 }
+{ "f1": 12, "f2": 55, "f3": "text field with \" charrrrrrrrrr", "f4": false, "f5": 90, "f6": 0.666666667 }
+{ "f1": 14, "f2": 55, "f3": "text field with ' charrrrrrrrrr", "f4": false, "f5": 90, "f6": 0.666666667 }
+{ "f1": 15, "f2": 55, "f3": "text field with \\ charrrrrrrrrr", "f4": false, "f5": 90, "f6": 0.666666667 }
+{ "f1": 16, "f2": 55, "f3": "text field wih \\, char         ", "f4": true, "f5": 90, "f6": 0.666666667 }
+{ "f1": 17, "f2": 55, "f3": "text field with \\\" charrrrrrrrr", "f4": false, "f5": 90, "f6": 0.666666667 }
+{ "f1": 18, "f2": 55, "f3": "text field with \\' charrrrrrrrr", "f4": false, "f5": 90, "f6": 0.666666667 }
+{ "f1": 19, "f2": 55, "f3": "text field with \\\\ charrrrrrrrr", "f4": false, "f5": 90, "f6": 0.666666667 }
+{ "f1": 20, "f2": 55, "f3": "text field ending with  charr ,", "f4": false, "f5": 90, "f6": 0.666666667 }
+{ "f1": 21, "f2": 55, "f3": "text field ending with  charr \"", "f4": false, "f5": 90, "f6": 0.666666667 }
+{ "f1": 22, "f2": 55, "f3": "text field ending with  charr '", "f4": false, "f5": 90, "f6": 0.666666667 }
+{ "f1": 23, "f2": 55, "f3": "text field ending with  charr \\", "f4": false, "f5": 90, "f6": 0.666666667 }
+{ "f1": 24, "f2": 55, "f3": "text field ending with charr \\,", "f4": false, "f5": 90, "f6": 0.666666667 }
+{ "f1": 25, "f2": 55, "f3": "text field ending with charr \\\"", "f4": false, "f5": 90, "f6": 0.666666667 }
+{ "f1": 26, "f2": 55, "f3": "text field ending with charr \\'", "f4": false, "f5": 90, "f6": 0.666666667 }
+{ "f1": 27, "f2": 55, "f3": "text field ending with charr \\\\", "f4": false, "f5": 90, "f6": 0.666666667 }
+{ "f1": 28, "f2": 55, "f3": ",text field starting with charr", "f4": false, "f5": 90, "f6": 0.666666667 }
+{ "f1": 29, "f2": 55, "f3": "\"text field starting with charr", "f4": false, "f5": 90, "f6": 0.666666667 }
+{ "f1": 30, "f2": 55, "f3": "'text field starting with charr", "f4": false, "f5": 90, "f6": 0.666666667 }
+{ "f1": 31, "f2": 55, "f3": "\\text field starting with charr", "f4": false, "f5": 90, "f6": 0.666666667 }
+{ "f1": 32, "f2": 55, "f3": "\\,text field starting with char", "f4": false, "f5": 90, "f6": 0.666666667 }
+{ "f1": 33, "f2": 55, "f3": "\\\"text field starting with char", "f4": false, "f5": 90, "f6": 0.666666667 }
+{ "f1": 34, "f2": 55, "f3": "\\'text field starting with char", "f4": false, "f5": 90, "f6": 0.666666667 }
+{ "f1": 35, "f2": 55, "f3": "\\\\text field starting with char", "f4": false, "f5": 90, "f6": 0.666666667 }
+{ "f1": 36, "f2": 55, "f3": "\"text field inside   with char\"", "f4": false, "f5": 90, "f6": 0.666666667 }
+{ "f1": 37, "f2": 55, "f3": "  text field with charrrrrrrrr ", "f4": false, "f5": 90, "f6": 0.666666667 }
+{ "f1": 38, "f2": 55, "f3": "text field with \"\" charrrrrrrrr", "f4": false, "f5": 90, "f6": 0.666666667 }
+{ "f1": 39, "f2": 55, "f3": "text field \"with\" charrrrrrrrrr", "f4": false, "f5": 90, "f6": 0.666666667 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/ddl/create-dataset-inline-type-1/create-dataset-inline-type-1.2.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/ddl/create-dataset-inline-type-1/create-dataset-inline-type-1.2.adm
new file mode 100644
index 0000000..8a539c8
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/ddl/create-dataset-inline-type-1/create-dataset-inline-type-1.2.adm
@@ -0,0 +1,21 @@
+{ "en": "Dataset", "DatatypeName": "$d$t$i$A_Customers_Closed", "DatasetName": "A_Customers_Closed", "DatatypeDataverseName": "test" }
+{ "en": "Dataset", "DatatypeName": "$d$t$i$A_Customers_Default_Closed", "DatasetName": "A_Customers_Default_Closed", "DatatypeDataverseName": "test" }
+{ "en": "Dataset", "DatatypeName": "$d$t$i$A_Customers_Open", "DatasetName": "A_Customers_Open", "DatatypeDataverseName": "test" }
+{ "en": "Dataset", "DatatypeName": "$d$t$i$B_Orders_Closed", "DatasetName": "B_Orders_Closed", "DatatypeDataverseName": "test" }
+{ "en": "Dataset", "DatatypeName": "$d$t$i$B_Orders_Default_Closed", "DatasetName": "B_Orders_Default_Closed", "DatatypeDataverseName": "test" }
+{ "en": "Dataset", "DatatypeName": "$d$t$i$B_Orders_Open", "DatasetName": "B_Orders_Open", "DatatypeDataverseName": "test" }
+{ "en": "Dataset", "DatatypeName": "$d$t$i$C_Customers_Meta_Closed", "DatasetName": "C_Customers_Meta_Closed", "DatatypeDataverseName": "test", "MetatypeDataverseName": "test", "MetatypeName": "$d$t$m$C_Customers_Meta_Closed" }
+{ "en": "Dataset", "DatatypeName": "$d$t$i$C_Customers_Meta_Default_Closed", "DatasetName": "C_Customers_Meta_Default_Closed", "DatatypeDataverseName": "test", "MetatypeDataverseName": "test", "MetatypeName": "$d$t$m$C_Customers_Meta_Default_Closed" }
+{ "en": "Dataset", "DatatypeName": "$d$t$i$C_Customers_Meta_Open", "DatasetName": "C_Customers_Meta_Open", "DatatypeDataverseName": "test", "MetatypeDataverseName": "test", "MetatypeName": "$d$t$m$C_Customers_Meta_Open" }
+{ "en": "Datatype", "DatatypeName": "$d$t$i$A_Customers_Closed", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "Record": { "IsOpen": false, "Fields": [ { "FieldName": "c_custkey", "FieldType": "int32", "IsNullable": false }, { "FieldName": "c_name", "FieldType": "string", "IsNullable": false }, { "FieldName": "c_phone", "FieldType": "string", "IsNullable": true }, { "FieldName": "c_comment", "FieldType": "string", "IsNullable": true } ] } } }
+{ "en": "Datatype", "DatatypeName": "$d$t$i$A_Customers_Default_Closed", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "Record": { "IsOpen": false, "Fields": [ { "FieldName": "c_custkey", "FieldType": "int32", "IsNullable": false }, { "FieldName": "c_name", "FieldType": "string", "IsNullable": false }, { "FieldName": "c_phone", "FieldType": "string", "IsNullable": true }, { "FieldName": "c_comment", "FieldType": "string", "IsNullable": true } ] } } }
+{ "en": "Datatype", "DatatypeName": "$d$t$i$A_Customers_Open", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "c_custkey", "FieldType": "int32", "IsNullable": false }, { "FieldName": "c_name", "FieldType": "string", "IsNullable": false }, { "FieldName": "c_phone", "FieldType": "string", "IsNullable": true }, { "FieldName": "c_comment", "FieldType": "string", "IsNullable": true } ] } } }
+{ "en": "Datatype", "DatatypeName": "$d$t$i$B_Orders_Closed", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "Record": { "IsOpen": false, "Fields": [ { "FieldName": "o_orderkey", "FieldType": "int32", "IsNullable": false }, { "FieldName": "o_custkey", "FieldType": "int32", "IsNullable": false }, { "FieldName": "o_orderstatus", "FieldType": "string", "IsNullable": false }, { "FieldName": "o_totalprice", "FieldType": "double", "IsNullable": false }, { "FieldName": "o_orderdate", "FieldType": "string", "IsNullable": false }, { "FieldName": "o_orderpriority", "FieldType": "string", "IsNullable": false }, { "FieldName": "o_clerk", "FieldType": "string", "IsNullable": false }, { "FieldName": "o_shippriority", "FieldType": "int32", "IsNullable": false }, { "FieldName": "o_comment", "FieldType": "string", "IsNullable": true } ] } } }
+{ "en": "Datatype", "DatatypeName": "$d$t$i$B_Orders_Default_Closed", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "Record": { "IsOpen": false, "Fields": [ { "FieldName": "o_orderkey", "FieldType": "int32", "IsNullable": false }, { "FieldName": "o_custkey", "FieldType": "int32", "IsNullable": false }, { "FieldName": "o_orderstatus", "FieldType": "string", "IsNullable": false }, { "FieldName": "o_totalprice", "FieldType": "double", "IsNullable": false }, { "FieldName": "o_orderdate", "FieldType": "string", "IsNullable": false }, { "FieldName": "o_orderpriority", "FieldType": "string", "IsNullable": false }, { "FieldName": "o_clerk", "FieldType": "string", "IsNullable": false }, { "FieldName": "o_shippriority", "FieldType": "int32", "IsNullable": false }, { "FieldName": "o_comment", "FieldType": "string", "IsNullable": true } ] } } }
+{ "en": "Datatype", "DatatypeName": "$d$t$i$B_Orders_Open", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "o_orderkey", "FieldType": "int32", "IsNullable": false }, { "FieldName": "o_custkey", "FieldType": "int32", "IsNullable": false }, { "FieldName": "o_orderstatus", "FieldType": "string", "IsNullable": false }, { "FieldName": "o_totalprice", "FieldType": "double", "IsNullable": false }, { "FieldName": "o_orderdate", "FieldType": "string", "IsNullable": false }, { "FieldName": "o_orderpriority", "FieldType": "string", "IsNullable": false }, { "FieldName": "o_clerk", "FieldType": "string", "IsNullable": false }, { "FieldName": "o_shippriority", "FieldType": "int32", "IsNullable": false }, { "FieldName": "o_comment", "FieldType": "string", "IsNullable": true } ] } } }
+{ "en": "Datatype", "DatatypeName": "$d$t$i$C_Customers_Meta_Closed", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "Record": { "IsOpen": false, "Fields": [ { "FieldName": "c_custkey", "FieldType": "int32", "IsNullable": false }, { "FieldName": "c_name", "FieldType": "string", "IsNullable": false }, { "FieldName": "c_phone", "FieldType": "string", "IsNullable": true }, { "FieldName": "c_comment", "FieldType": "string", "IsNullable": true } ] } } }
+{ "en": "Datatype", "DatatypeName": "$d$t$i$C_Customers_Meta_Default_Closed", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "Record": { "IsOpen": false, "Fields": [ { "FieldName": "c_custkey", "FieldType": "int32", "IsNullable": false }, { "FieldName": "c_name", "FieldType": "string", "IsNullable": false }, { "FieldName": "c_phone", "FieldType": "string", "IsNullable": true }, { "FieldName": "c_comment", "FieldType": "string", "IsNullable": true } ] } } }
+{ "en": "Datatype", "DatatypeName": "$d$t$i$C_Customers_Meta_Open", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "c_custkey", "FieldType": "int32", "IsNullable": false }, { "FieldName": "c_name", "FieldType": "string", "IsNullable": false }, { "FieldName": "c_phone", "FieldType": "string", "IsNullable": true }, { "FieldName": "c_comment", "FieldType": "string", "IsNullable": true } ] } } }
+{ "en": "Datatype", "DatatypeName": "$d$t$m$C_Customers_Meta_Closed", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "Record": { "IsOpen": false, "Fields": [ { "FieldName": "c_x", "FieldType": "int32", "IsNullable": false }, { "FieldName": "c_y", "FieldType": "int32", "IsNullable": true } ] } } }
+{ "en": "Datatype", "DatatypeName": "$d$t$m$C_Customers_Meta_Default_Closed", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "Record": { "IsOpen": false, "Fields": [ { "FieldName": "c_x", "FieldType": "int32", "IsNullable": false }, { "FieldName": "c_y", "FieldType": "int32", "IsNullable": true } ] } } }
+{ "en": "Datatype", "DatatypeName": "$d$t$m$C_Customers_Meta_Open", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "c_x", "FieldType": "int32", "IsNullable": false }, { "FieldName": "c_y", "FieldType": "int32", "IsNullable": true } ] } } }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/ddl/create-dataset-inline-type-1/create-dataset-inline-type-1.4.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/ddl/create-dataset-inline-type-1/create-dataset-inline-type-1.4.adm
new file mode 100644
index 0000000..c227083
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/ddl/create-dataset-inline-type-1/create-dataset-inline-type-1.4.adm
@@ -0,0 +1 @@
+0
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/ddl/create-dataset-inline-type-2/create-dataset-inline-type-2.2.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/ddl/create-dataset-inline-type-2/create-dataset-inline-type-2.2.adm
new file mode 100644
index 0000000..3591912
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/ddl/create-dataset-inline-type-2/create-dataset-inline-type-2.2.adm
@@ -0,0 +1 @@
+{ "cnt": 2 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/external-dataset/aws/s3/000/external_dataset.001.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/external-dataset/aws/s3/000/external_dataset.001.adm
new file mode 100644
index 0000000..a7ce908
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/external-dataset/aws/s3/000/external_dataset.001.adm
@@ -0,0 +1,14 @@
+{ "id": 1, "year": null, "quarter": null, "review": "good" }
+{ "id": 2, "year": null, "quarter": null, "review": "good" }
+{ "id": 3, "year": 2018, "quarter": null, "review": "good" }
+{ "id": 4, "year": 2018, "quarter": null, "review": "bad" }
+{ "id": 5, "year": 2018, "quarter": 1, "review": "good" }
+{ "id": 6, "year": 2018, "quarter": 1, "review": "bad" }
+{ "id": 7, "year": 2018, "quarter": 2, "review": "good" }
+{ "id": 8, "year": 2018, "quarter": 2, "review": "bad" }
+{ "id": 9, "year": 2019, "quarter": null, "review": "good" }
+{ "id": 10, "year": 2019, "quarter": null, "review": "bad" }
+{ "id": 11, "year": 2019, "quarter": 1, "review": "good" }
+{ "id": 12, "year": 2019, "quarter": 1, "review": "bad" }
+{ "id": 13, "year": 2019, "quarter": 2, "review": "good" }
+{ "id": 14, "year": 2019, "quarter": 2, "review": "bad" }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/external-dataset/aws/s3/001/external_dataset.001.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/external-dataset/aws/s3/001/external_dataset.001.adm
new file mode 100644
index 0000000..93d1b57
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/external-dataset/aws/s3/001/external_dataset.001.adm
@@ -0,0 +1,6 @@
+{ "id": 1, "year": null, "review": "good", "details": "recommend" }
+{ "id": 2, "year": null, "review": "bad", "details": "not recommend" }
+{ "id": 3, "year": null, "review": "good", "details": null }
+{ "id": 4, "year": 2018, "review": "good", "details": "recommend" }
+{ "id": 5, "year": 2018, "review": "", "details": "not recommend" }
+{ "id": 6, "year": 2018, "review": "good", "details": null }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/external-dataset/aws/s3/002/external_dataset.001.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/external-dataset/aws/s3/002/external_dataset.001.adm
new file mode 100644
index 0000000..1954b05
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/external-dataset/aws/s3/002/external_dataset.001.adm
@@ -0,0 +1,6 @@
+{ "id": 1, "year": null, "review": "\"good\"", "details": "\"recommend\"" }
+{ "id": 2, "year": null, "review": "\"bad\"", "details": "\"not recommend\"" }
+{ "id": 3, "year": null, "review": "\"good\"", "details": "\"recommend\"" }
+{ "id": 4, "year": 2018, "review": "\"good\"", "details": "\"recommend\"" }
+{ "id": 5, "year": 2018, "review": "", "details": "\"not recommend\"" }
+{ "id": 6, "year": 2018, "review": "\"good\"", "details": "\"recommend\"" }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/feeds/feeds_01/feeds_01.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/feeds/feeds_01/feeds_01.1.adm
index 85cd967..3ab469e 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/feeds/feeds_01/feeds_01.1.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/feeds/feeds_01/feeds_01.1.adm
@@ -1 +1 @@
-{ "DataverseName": "feeds", "FeedName": "TweetFeed", "AdapterConfiguration": {{ { "Name": "path", "Value": "asterix_nc1://data/twitter/obamatweets.adm" }, { "Name": "feed", "Value": "TweetFeed" }, { "Name": "adapter-name", "Value": "localfs" }, { "Name": "is-feed", "Value": "true" }, { "Name": "parser", "Value": "adm" }, { "Name": "reader", "Value": "localfs" }, { "Name": "format", "Value": "adm" }, { "Name": "tuple-interval", "Value": "10" }, { "Name": "type-name", "Value": "TweetType" }, { "Name": "dataverse", "Value": "feeds" } }}, "Timestamp": "Thu Dec 07 19:22:41 PST 2017" }
+{ "DataverseName": "feeds", "FeedName": "TweetFeed", "AdapterConfiguration": {{ { "Name": "path", "Value": "asterix_nc1://data/twitter/obamatweets.adm" }, { "Name": "feed", "Value": "TweetFeed" }, { "Name": "adapter-name", "Value": "localfs" }, { "Name": "is-feed", "Value": "true" }, { "Name": "parser", "Value": "adm" }, { "Name": "reader", "Value": "localfs" }, { "Name": "format", "Value": "adm" }, { "Name": "tuple-interval", "Value": "10" }, { "Name": "type-name", "Value": "TweetType" }, { "Name": "dataverse", "Value": "feeds" } }}, "Timestamp": "Tue Mar 31 10:30:06 PDT 2020" }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ast b/asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ast
deleted file mode 100644
index e69de29..0000000
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ast
+++ /dev/null
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.ast b/asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.ast
deleted file mode 100644
index e69de29..0000000
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.ast
+++ /dev/null
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/aggregate-sql/avg_mixed/avg_mixed.3.ast b/asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/aggregate-sql/avg_mixed/avg_mixed.3.ast
index 034df11..7fac994 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/aggregate-sql/avg_mixed/avg_mixed.3.ast
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/aggregate-sql/avg_mixed/avg_mixed.3.ast
@@ -10,7 +10,7 @@
           LiteralExpr [STRING] [2.0]
         ]
         LiteralExpr [STRING] [hello world]
-        LiteralExpr [LONG] [93847382783847382]
+        LiteralExpr [LONG] [10]
         FunctionCall null.date@1[
           LiteralExpr [STRING] [2013-01-01]
         ]
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/aggregate/avg_mixed/avg_mixed.2.ast b/asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/aggregate/avg_mixed/avg_mixed.2.ast
deleted file mode 100644
index e69de29..0000000
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/aggregate/avg_mixed/avg_mixed.2.ast
+++ /dev/null
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/cross-dataverse/cross-dv01/cross-dv01.1.ast b/asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/cross-dataverse/cross-dv01/cross-dv01.1.ast
index c3a4877..ffe5bc3 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/cross-dataverse/cross-dv01/cross-dv01.1.ast
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/cross-dataverse/cross-dv01/cross-dv01.1.ast
@@ -17,7 +17,7 @@
     dept : string
   }
 ]
-DatasetDecl ugdstd(stdType) partitioned by [[id]]
-DatasetDecl gdstd(stdType) partitioned by [[id]]
+DatasetDecl ugdstd(student.stdType) partitioned by [[id]]
+DatasetDecl gdstd(student.stdType) partitioned by [[id]]
 DatasetDecl prof(tchrType) partitioned by [[id]]
-DatasetDecl pstdoc(tchrType) partitioned by [[id]]
+DatasetDecl pstdoc(tchrType) partitioned by [[id]]
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_external_dataset.xml b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_external_dataset.xml
new file mode 100644
index 0000000..9948209
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_external_dataset.xml
@@ -0,0 +1,38 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!--
+ ! Licensed to the Apache Software Foundation (ASF) under one
+ ! or more contributor license agreements.  See the NOTICE file
+ ! distributed with this work for additional information
+ ! regarding copyright ownership.  The ASF licenses this file
+ ! to you under the Apache License, Version 2.0 (the
+ ! "License"); you may not use this file except in compliance
+ ! with the License.  You may obtain a copy of the License at
+ !
+ !   http://www.apache.org/licenses/LICENSE-2.0
+ !
+ ! Unless required by applicable law or agreed to in writing,
+ ! software distributed under the License is distributed on an
+ ! "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ! KIND, either express or implied.  See the License for the
+ ! specific language governing permissions and limitations
+ ! under the License.
+ !-->
+<test-suite xmlns="urn:xml.testframework.asterix.apache.org" ResultOffsetPath="results" QueryOffsetPath="queries_sqlpp">
+  <test-group name="external-dataset">
+    <test-case FilePath="external-dataset">
+      <compilation-unit name="aws/s3/000">
+        <output-dir compare="Text">aws/s3/000</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="external-dataset">
+      <compilation-unit name="aws/s3/001">
+        <output-dir compare="Text">aws/s3/001</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="external-dataset">
+      <compilation-unit name="aws/s3/002">
+        <output-dir compare="Text">aws/s3/002</output-dir>
+      </compilation-unit>
+    </test-case>
+  </test-group>
+</test-suite>
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
index cc2cd24..a578690 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
@@ -303,10 +303,16 @@
         <output-dir compare="Text">agg_number_rec</output-dir>
       </compilation-unit>
     </test-case>
-    <test-case FilePath="aggregate">
+    <test-case FilePath="aggregate" check-warnings="true">
       <compilation-unit name="avg_mixed">
         <output-dir compare="Text">avg_mixed</output-dir>
-        <expected-error>Type incompatibility: function agg-avg gets incompatible input values: string and float</expected-error>
+        <expected-warn>Unsupported type: agg-avg cannot process input type string (in line 26, at column 16)</expected-warn>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate" check-warnings="true">
+      <compilation-unit name="serial_avg_mixed">
+        <output-dir compare="Text">serial_avg_mixed</output-dir>
+        <expected-warn>Unsupported type: agg-avg cannot process input type string (in line 29, at column 39)</expected-warn>
       </compilation-unit>
     </test-case>
     <test-case FilePath="aggregate">
@@ -789,6 +795,71 @@
       </compilation-unit>
     </test-case>
     <test-case FilePath="aggregate">
+      <compilation-unit name="serial_avg_empty">
+        <output-dir compare="Text">serial_avg_empty</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate">
+      <compilation-unit name="serial_avg_int8">
+        <output-dir compare="Text">serial_avg_int8</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate">
+      <compilation-unit name="serial_avg_int8_null">
+        <output-dir compare="Text">serial_avg_int8_null</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate">
+      <compilation-unit name="serial_avg_int16">
+        <output-dir compare="Text">serial_avg_int16</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate">
+      <compilation-unit name="serial_avg_int16_null">
+        <output-dir compare="Text">serial_avg_int16_null</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate">
+      <compilation-unit name="serial_avg_int32">
+        <output-dir compare="Text">serial_avg_int32</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate">
+      <compilation-unit name="serial_avg_int32_null">
+        <output-dir compare="Text">serial_avg_int32_null</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate">
+      <compilation-unit name="serial_avg_int64">
+        <output-dir compare="Text">serial_avg_int64</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate">
+      <compilation-unit name="serial_avg_int64_null">
+        <output-dir compare="Text">serial_avg_int64_null</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate">
+      <compilation-unit name="serial_avg_float">
+        <output-dir compare="Text">serial_avg_float</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate">
+      <compilation-unit name="serial_avg_float_null">
+        <output-dir compare="Text">serial_avg_float_null</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate">
+      <compilation-unit name="serial_avg_double">
+        <output-dir compare="Text">serial_avg_double</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate">
+      <compilation-unit name="serial_avg_double_null">
+        <output-dir compare="Text">serial_avg_double_null</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate">
       <compilation-unit name="serial_kurtosis_double">
         <output-dir compare="Text">serial_kurtosis_double</output-dir>
       </compilation-unit>
@@ -1497,10 +1568,16 @@
         <output-dir compare="Text">agg_number_rec</output-dir>
       </compilation-unit>
     </test-case>
-    <test-case FilePath="aggregate-sql">
+    <test-case FilePath="aggregate-sql" check-warnings="true">
       <compilation-unit name="avg_mixed">
         <output-dir compare="Text">avg_mixed</output-dir>
-        <expected-error>Type incompatibility: function agg-avg gets incompatible input values: string and float</expected-error>
+        <expected-warn>Unsupported type: agg-avg cannot process input type string (in line 26, at column 16)</expected-warn>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate-sql" check-warnings="true">
+      <compilation-unit name="serial_avg_mixed">
+        <output-dir compare="Text">serial_avg_mixed</output-dir>
+        <expected-warn>Unsupported type: agg-avg cannot process input type string (in line 29, at column 38)</expected-warn>
       </compilation-unit>
     </test-case>
     <test-case FilePath="aggregate-sql">
@@ -1981,6 +2058,71 @@
       </compilation-unit>
     </test-case>
     <test-case FilePath="aggregate-sql">
+      <compilation-unit name="serial_avg_empty">
+        <output-dir compare="Text">serial_avg_empty</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate-sql">
+      <compilation-unit name="serial_avg_int8">
+        <output-dir compare="Text">serial_avg_int8</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate-sql">
+      <compilation-unit name="serial_avg_int8_null">
+        <output-dir compare="Text">serial_avg_int8_null</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate-sql">
+      <compilation-unit name="serial_avg_int16">
+        <output-dir compare="Text">serial_avg_int16</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate-sql">
+      <compilation-unit name="serial_avg_int16_null">
+        <output-dir compare="Text">serial_avg_int16_null</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate-sql">
+      <compilation-unit name="serial_avg_int32">
+        <output-dir compare="Text">serial_avg_int32</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate-sql">
+      <compilation-unit name="serial_avg_int32_null">
+        <output-dir compare="Text">serial_avg_int32_null</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate-sql">
+      <compilation-unit name="serial_avg_int64">
+        <output-dir compare="Text">serial_avg_int64</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate-sql">
+      <compilation-unit name="serial_avg_int64_null">
+        <output-dir compare="Text">serial_avg_int64_null</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate-sql">
+      <compilation-unit name="serial_avg_float">
+        <output-dir compare="Text">serial_avg_float</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate-sql">
+      <compilation-unit name="serial_avg_float_null">
+        <output-dir compare="Text">serial_avg_float_null</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate-sql">
+      <compilation-unit name="serial_avg_double">
+        <output-dir compare="Text">serial_avg_double</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate-sql">
+      <compilation-unit name="serial_avg_double_null">
+        <output-dir compare="Text">serial_avg_double_null</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="aggregate-sql">
       <compilation-unit name="serial_kurtosis_double">
         <output-dir compare="Text">serial_kurtosis_double</output-dir>
       </compilation-unit>
@@ -3718,6 +3860,18 @@
       </compilation-unit>
     </test-case>
     <test-case FilePath="ddl">
+      <compilation-unit name="create-dataset-inline-type-1">
+        <output-dir compare="Text">create-dataset-inline-type-1</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="ddl">
+      <compilation-unit name="create-dataset-inline-type-2">
+        <output-dir compare="Text">create-dataset-inline-type-2</output-dir>
+        <expected-error>ASX1082: Cannot find datatype with name test.$d$t$i$Cust1</expected-error>
+        <expected-error>ASX1082: Cannot find datatype with name test.$d$t$i$Cust2</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="ddl">
       <compilation-unit name="drop-primary-index">
         <output-dir compare="Text">drop-primary-index</output-dir>
         <expected-error>Cannot drop index "ds". Drop dataset "ds" to remove this index</expected-error>
@@ -12095,6 +12249,18 @@
       </compilation-unit>
     </test-case>
   </test-group>
+  <test-group name="csv-tsv-parser">
+    <test-case FilePath="csv-tsv-parser">
+      <compilation-unit name="csv-parser-001">
+        <output-dir compare="Text">csv-parser-001</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="csv-tsv-parser">
+      <compilation-unit name="tsv-parser-001">
+        <output-dir compare="Text">tsv-parser-001</output-dir>
+      </compilation-unit>
+    </test-case>
+  </test-group>
   <test-group name="binary">
     <test-case FilePath="binary">
       <compilation-unit name="parse">
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/ICoordinationService.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/ICoordinationService.java
index 2019da9..5d2ef07 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/ICoordinationService.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/ICoordinationService.java
@@ -35,6 +35,18 @@
     void put(String key, byte[] value) throws HyracksDataException;
 
     /**
+     * Adds or updates the property with the name {@code key}
+     * with {@code value}, and indicates that values associated with
+     * this key should be treated as sensitive (e.g. not logged). Once a
+     * key is considered sensitive, it remains sensitive until deleted.
+     *
+     * @param key
+     * @param value
+     * @throws HyracksDataException
+     */
+    void putSensitive(String key, byte[] value) throws HyracksDataException;
+
+    /**
      * Gets the value of the property with name {@code key} if exists.
      *
      * @param key
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/dataflow/ICcApplicationContext.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/dataflow/ICcApplicationContext.java
index 3389962..5fc1bb7 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/dataflow/ICcApplicationContext.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/dataflow/ICcApplicationContext.java
@@ -19,6 +19,7 @@
 package org.apache.asterix.common.dataflow;
 
 import org.apache.asterix.common.api.IApplicationContext;
+import org.apache.asterix.common.api.ICoordinationService;
 import org.apache.asterix.common.api.IMetadataLockManager;
 import org.apache.asterix.common.api.INodeJobTracker;
 import org.apache.asterix.common.api.IRequestTracker;
@@ -26,6 +27,7 @@
 import org.apache.asterix.common.cluster.IGlobalRecoveryManager;
 import org.apache.asterix.common.config.ExtensionProperties;
 import org.apache.asterix.common.context.IStorageComponentProvider;
+import org.apache.asterix.common.external.IAdapterFactoryService;
 import org.apache.asterix.common.metadata.IMetadataBootstrap;
 import org.apache.asterix.common.replication.INcLifecycleCoordinator;
 import org.apache.asterix.common.storage.ICompressionManager;
@@ -127,4 +129,18 @@
      * @return the request tracker.
      */
     IRequestTracker getRequestTracker();
+
+    /**
+     * Gets the coordination service
+     *
+     * @return the coordination service
+     */
+    ICoordinationService getCoordinationService();
+
+    /**
+     * Gets the adapter factory service
+     *
+     * @return the adapter factory service
+     */
+    IAdapterFactoryService getAdapterFactoryService();
 }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IAdapterFactory.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/external/IAdapterFactory.java
similarity index 82%
rename from asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IAdapterFactory.java
rename to asterixdb/asterix-common/src/main/java/org/apache/asterix/common/external/IAdapterFactory.java
index 40bc7d8..e2e7e3e 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IAdapterFactory.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/external/IAdapterFactory.java
@@ -16,12 +16,11 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.asterix.external.api;
+package org.apache.asterix.common.external;
 
 import java.io.Serializable;
 import java.util.Map;
 
-import org.apache.asterix.om.types.ARecordType;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.api.application.IServiceContext;
@@ -60,8 +59,8 @@
     /**
      * Creates an instance of IDatasourceAdapter.
      *
-     * @param HyracksTaskContext
-     * @param partition
+     * @param ctx HyracksTaskContext
+     * @param partition partition number
      * @return An instance of IDatasourceAdapter.
      * @throws Exception
      */
@@ -77,28 +76,4 @@
      */
     void configure(IServiceContext serviceContext, Map<String, String> configuration)
             throws HyracksDataException, AlgebricksException;
-
-    /**
-     * Set the expected record output type of the adapter
-     *
-     * @param outputType
-     */
-    void setOutputType(ARecordType outputType);
-
-    /**
-     * Set the expected meta output type of the adapter
-     *
-     * @param metaType
-     */
-    void setMetaType(ARecordType metaType);
-
-    /**
-     * @return the adapter record output type
-     */
-    ARecordType getOutputType();
-
-    /**
-     * @return the adapter meta output type
-     */
-    ARecordType getMetaType();
 }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/external/IAdapterFactoryService.java
similarity index 75%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-common/src/main/java/org/apache/asterix/common/external/IAdapterFactoryService.java
index bd244d0..55e25b7 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/external/IAdapterFactoryService.java
@@ -16,4 +16,15 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+package org.apache.asterix.common.external;
 
+@FunctionalInterface
+public interface IAdapterFactoryService {
+
+    /**
+     * Creates and returns and adapter factory
+     *
+     * @return adapter factory
+     */
+    IAdapterFactory createAdapterFactory();
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IDataSourceAdapter.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/external/IDataSourceAdapter.java
similarity index 97%
rename from asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IDataSourceAdapter.java
rename to asterixdb/asterix-common/src/main/java/org/apache/asterix/common/external/IDataSourceAdapter.java
index 472cdae..18f59f2 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IDataSourceAdapter.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/external/IDataSourceAdapter.java
@@ -16,7 +16,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.asterix.external.api;
+package org.apache.asterix.common.external;
 
 import org.apache.hyracks.api.comm.IFrameWriter;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
diff --git a/asterixdb/asterix-external-data/pom.xml b/asterixdb/asterix-external-data/pom.xml
index 605fbe5..30e7770 100644
--- a/asterixdb/asterix-external-data/pom.xml
+++ b/asterixdb/asterix-external-data/pom.xml
@@ -435,5 +435,17 @@
       <groupId>io.netty</groupId>
       <artifactId>netty-all</artifactId>
     </dependency>
+    <dependency>
+      <groupId>software.amazon.awssdk</groupId>
+      <artifactId>s3</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>software.amazon.awssdk</groupId>
+      <artifactId>regions</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>software.amazon.awssdk</groupId>
+      <artifactId>auth</artifactId>
+    </dependency>
   </dependencies>
 </project>
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/AdapterFactoryService.java
similarity index 62%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
copy to asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/AdapterFactoryService.java
index bd244d0..aaf2002 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.2.update.sqlpp
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/AdapterFactoryService.java
@@ -16,4 +16,20 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+package org.apache.asterix.external.adapter.factory;
 
+import org.apache.asterix.common.external.IAdapterFactoryService;
+import org.apache.asterix.external.api.ITypedAdapterFactory;
+
+public class AdapterFactoryService implements IAdapterFactoryService {
+
+    /**
+     * Creates and returns an adapter factory
+     *
+     * @return adaptor factory
+     */
+    @Override
+    public ITypedAdapterFactory createAdapterFactory() {
+        return new GenericAdapterFactory();
+    }
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java
index fc59f68..078316c 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java
@@ -25,14 +25,15 @@
 import org.apache.asterix.common.api.IApplicationContext;
 import org.apache.asterix.common.api.INcApplicationContext;
 import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.common.external.IDataSourceAdapter;
 import org.apache.asterix.common.library.ILibraryManager;
-import org.apache.asterix.external.api.IAdapterFactory;
 import org.apache.asterix.external.api.IDataFlowController;
 import org.apache.asterix.external.api.IDataParserFactory;
-import org.apache.asterix.external.api.IDataSourceAdapter;
 import org.apache.asterix.external.api.IExternalDataSourceFactory;
 import org.apache.asterix.external.api.IIndexibleExternalDataSource;
 import org.apache.asterix.external.api.IIndexingAdapterFactory;
+import org.apache.asterix.external.api.ITypedAdapterFactory;
 import org.apache.asterix.external.dataflow.AbstractFeedDataFlowController;
 import org.apache.asterix.external.dataset.adapter.FeedAdapter;
 import org.apache.asterix.external.dataset.adapter.GenericAdapter;
@@ -59,7 +60,7 @@
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
 
-public class GenericAdapterFactory implements IIndexingAdapterFactory, IAdapterFactory {
+public class GenericAdapterFactory implements IIndexingAdapterFactory, ITypedAdapterFactory {
 
     private static final long serialVersionUID = 1L;
     private static final Logger LOGGER = LogManager.getLogger();
@@ -122,7 +123,7 @@
     private void restoreExternalObjects(IServiceContext serviceContext, ILibraryManager libraryManager)
             throws HyracksDataException, AlgebricksException {
         if (dataSourceFactory == null) {
-            dataSourceFactory = DatasourceFactoryProvider.getExternalDataSourceFactory(libraryManager, configuration);
+            dataSourceFactory = createExternalDataSourceFactory(configuration, libraryManager);
             // create and configure parser factory
             if (dataSourceFactory.isIndexible() && (files != null)) {
                 ((IIndexibleExternalDataSource) dataSourceFactory).setSnapshot(files, indexingOp);
@@ -131,7 +132,7 @@
         }
         if (dataParserFactory == null) {
             // create and configure parser factory
-            dataParserFactory = ParserFactoryProvider.getDataParserFactory(libraryManager, configuration);
+            dataParserFactory = createDataParserFactory(configuration, libraryManager);
             dataParserFactory.setRecordType(recordType);
             dataParserFactory.setMetaType(metaType);
             dataParserFactory.configure(configuration);
@@ -144,14 +145,13 @@
         this.configuration = configuration;
         IApplicationContext appCtx = (IApplicationContext) serviceContext.getApplicationContext();
         ExternalDataUtils.validateDataSourceParameters(configuration);
-        dataSourceFactory =
-                DatasourceFactoryProvider.getExternalDataSourceFactory(appCtx.getLibraryManager(), configuration);
+        dataSourceFactory = createExternalDataSourceFactory(configuration, appCtx.getLibraryManager());
         if (dataSourceFactory.isIndexible() && (files != null)) {
             ((IIndexibleExternalDataSource) dataSourceFactory).setSnapshot(files, indexingOp);
         }
         dataSourceFactory.configure(serviceContext, configuration);
         ExternalDataUtils.validateDataParserParameters(configuration);
-        dataParserFactory = ParserFactoryProvider.getDataParserFactory(appCtx.getLibraryManager(), configuration);
+        dataParserFactory = createDataParserFactory(configuration, appCtx.getLibraryManager());
         dataParserFactory.setRecordType(recordType);
         dataParserFactory.setMetaType(metaType);
         dataParserFactory.configure(configuration);
@@ -222,4 +222,14 @@
         dataParserFactory.configure(Collections.emptyMap());
         configuration = Collections.emptyMap();
     }
+
+    protected IExternalDataSourceFactory createExternalDataSourceFactory(Map<String, String> configuration,
+            ILibraryManager libraryManager) throws HyracksDataException, AsterixException {
+        return DatasourceFactoryProvider.getExternalDataSourceFactory(libraryManager, configuration);
+    }
+
+    protected IDataParserFactory createDataParserFactory(Map<String, String> configuration,
+            ILibraryManager libraryManager) throws AsterixException {
+        return ParserFactoryProvider.getDataParserFactory(libraryManager, configuration);
+    }
 }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IIndexingAdapterFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IIndexingAdapterFactory.java
index 37cc1cf..8d42046 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IIndexingAdapterFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IIndexingAdapterFactory.java
@@ -22,6 +22,6 @@
 
 import org.apache.asterix.external.indexing.ExternalFile;
 
-public interface IIndexingAdapterFactory extends IAdapterFactory {
+public interface IIndexingAdapterFactory extends ITypedAdapterFactory {
     public void setSnapshot(List<ExternalFile> files, boolean indexingOp);
 }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/ITypedAdapterFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/ITypedAdapterFactory.java
new file mode 100644
index 0000000..13e3b34
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/ITypedAdapterFactory.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.api;
+
+import org.apache.asterix.common.external.IAdapterFactory;
+import org.apache.asterix.om.types.ARecordType;
+
+/**
+ * Base interface for IGenericDatasetAdapterFactory and ITypedDatasetAdapterFactory.
+ * Acts as a marker interface indicating that the implementation provides functionality
+ * for creating an adapter.
+ */
+public interface ITypedAdapterFactory extends IAdapterFactory {
+
+    /**
+     * Set the expected record output type of the adapter
+     *
+     * @param outputType
+     */
+    void setOutputType(ARecordType outputType);
+
+    /**
+     * Set the expected meta output type of the adapter
+     *
+     * @param metaType
+     */
+    void setMetaType(ARecordType metaType);
+
+    /**
+     * @return the adapter record output type
+     */
+    ARecordType getOutputType();
+
+    /**
+     * @return the adapter meta output type
+     */
+    ARecordType getMetaType();
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/FeedAdapter.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/FeedAdapter.java
index 2a92d40..0ab59fe 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/FeedAdapter.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/FeedAdapter.java
@@ -21,7 +21,7 @@
 import java.io.Closeable;
 import java.io.IOException;
 
-import org.apache.asterix.external.api.IDataSourceAdapter;
+import org.apache.asterix.common.external.IDataSourceAdapter;
 import org.apache.asterix.external.dataflow.AbstractFeedDataFlowController;
 import org.apache.hyracks.api.comm.IFrameWriter;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/GenericAdapter.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/GenericAdapter.java
index 916fe0a..0904384 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/GenericAdapter.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/GenericAdapter.java
@@ -18,8 +18,8 @@
  */
 package org.apache.asterix.external.dataset.adapter;
 
+import org.apache.asterix.common.external.IDataSourceAdapter;
 import org.apache.asterix.external.api.IDataFlowController;
-import org.apache.asterix.external.api.IDataSourceAdapter;
 import org.apache.hyracks.api.comm.IFrameWriter;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/converter/CSVToRecordWithMetadataAndPKConverter.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/converter/CSVToRecordWithMetadataAndPKConverter.java
index 8255ebb..5c8f219 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/converter/CSVToRecordWithMetadataAndPKConverter.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/converter/CSVToRecordWithMetadataAndPKConverter.java
@@ -41,7 +41,7 @@
         this.cursor = new FieldCursorForDelimitedDataParser(null, delimiter, ExternalDataConstants.QUOTE);
         this.record = new CharArrayRecord();
         this.valueIndex = valueIndex;
-        this.recordWithMetadata = new RecordWithMetadataAndPK<char[]>(record, metaType.getFieldTypes(), recordType,
+        this.recordWithMetadata = new RecordWithMetadataAndPK<>(record, metaType.getFieldTypes(), recordType,
                 keyIndicator, keyIndexes, keyTypes);
     }
 
@@ -53,16 +53,15 @@
         int i = 0;
         int j = 0;
         while (cursor.nextField()) {
-            if (cursor.isDoubleQuoteIncludedInThisField) {
-                cursor.eliminateDoubleQuote(cursor.buffer, cursor.fStart, cursor.fEnd - cursor.fStart);
-                cursor.fEnd -= cursor.doubleQuoteCount;
-                cursor.isDoubleQuoteIncludedInThisField = false;
+            if (cursor.fieldHasDoubleQuote()) {
+                cursor.eliminateDoubleQuote();
             }
             if (i == valueIndex) {
-                record.setValue(cursor.buffer, cursor.fStart, cursor.fEnd - cursor.fStart);
+                record.setValue(cursor.getBuffer(), cursor.getFieldStart(), cursor.getFieldLength());
                 record.endRecord();
             } else {
-                recordWithMetadata.setRawMetadata(j, cursor.buffer, cursor.fStart, cursor.fEnd - cursor.fStart);
+                recordWithMetadata.setRawMetadata(j, cursor.getBuffer(), cursor.getFieldStart(),
+                        cursor.getFieldLength());
                 j++;
             }
             i++;
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/aws/AwsS3InputStream.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/aws/AwsS3InputStream.java
new file mode 100644
index 0000000..cfa1f6a
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/aws/AwsS3InputStream.java
@@ -0,0 +1,163 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.record.reader.aws;
+
+import static org.apache.asterix.external.util.ExternalDataConstants.AwsS3Constants;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.URI;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.asterix.external.api.AsterixInputStream;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.api.util.CleanupUtils;
+
+import software.amazon.awssdk.auth.credentials.AwsBasicCredentials;
+import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider;
+import software.amazon.awssdk.regions.Region;
+import software.amazon.awssdk.services.s3.S3Client;
+import software.amazon.awssdk.services.s3.S3ClientBuilder;
+import software.amazon.awssdk.services.s3.model.GetObjectRequest;
+
+public class AwsS3InputStream extends AsterixInputStream {
+
+    // Configuration
+    private final Map<String, String> configuration;
+
+    private final S3Client s3Client;
+
+    // File fields
+    private final List<String> filePaths;
+    private int nextFileIndex = 0;
+
+    // File reading fields
+    private InputStream inputStream;
+
+    public AwsS3InputStream(Map<String, String> configuration, List<String> filePaths) {
+        this.configuration = configuration;
+        this.filePaths = filePaths;
+
+        this.s3Client = buildAwsS3Client(configuration);
+    }
+
+    @Override
+    public int read() throws IOException {
+        throw new HyracksDataException(
+                "read() is not supported with this stream. use read(byte[] b, int off, int len)");
+    }
+
+    @Override
+    public int read(byte[] b, int off, int len) throws IOException {
+        if (inputStream == null) {
+            if (!advance()) {
+                return -1;
+            }
+        }
+
+        int result = inputStream.read(b, off, len);
+
+        // If file reading is done, go to the next file, or finish up if no files are left
+        if (result < 0) {
+            if (advance()) {
+                result = inputStream.read(b, off, len);
+            } else {
+                return -1;
+            }
+        }
+
+        return result;
+    }
+
+    private boolean advance() throws IOException {
+        // No files to read for this partition
+        if (filePaths == null || filePaths.isEmpty()) {
+            return false;
+        }
+
+        // Finished reading all the files
+        if (nextFileIndex == filePaths.size()) {
+            if (inputStream != null) {
+                inputStream.close();
+            }
+            return false;
+        }
+
+        // Close the current stream before going to the next one
+        if (inputStream != null) {
+            inputStream.close();
+        }
+
+        String bucket = configuration.get(AwsS3Constants.CONTAINER_NAME_FIELD_NAME);
+        GetObjectRequest.Builder getObjectBuilder = GetObjectRequest.builder();
+        GetObjectRequest getObjectRequest = getObjectBuilder.bucket(bucket).key(filePaths.get(nextFileIndex)).build();
+        inputStream = s3Client.getObject(getObjectRequest);
+
+        // Current file ready, point to the next file
+        nextFileIndex++;
+        return true;
+    }
+
+    @Override
+    public boolean stop() {
+        return false;
+    }
+
+    @Override
+    public boolean handleException(Throwable th) {
+        return false;
+    }
+
+    @Override
+    public void close() throws IOException {
+        if (inputStream != null) {
+            CleanupUtils.close(inputStream, null);
+        }
+    }
+
+    /**
+     * Prepares and builds the Amazon S3 client with the provided configuration
+     *
+     * @param configuration S3 client configuration
+     *
+     * @return Amazon S3 client
+     */
+    private static S3Client buildAwsS3Client(Map<String, String> configuration) {
+        S3ClientBuilder builder = S3Client.builder();
+
+        // Credentials
+        String accessKey = configuration.get(AwsS3Constants.ACCESS_KEY_FIELD_NAME);
+        String secretKey = configuration.get(AwsS3Constants.SECRET_KEY_FIELD_NAME);
+        AwsBasicCredentials credentials = AwsBasicCredentials.create(accessKey, secretKey);
+        builder.credentialsProvider(StaticCredentialsProvider.create(credentials));
+
+        // Region
+        String region = configuration.get(AwsS3Constants.REGION_FIELD_NAME);
+        builder.region(Region.of(region));
+
+        // Use user's endpoint if provided
+        if (configuration.get(AwsS3Constants.SERVICE_END_POINT_FIELD_NAME) != null) {
+            String endPoint = configuration.get(AwsS3Constants.SERVICE_END_POINT_FIELD_NAME);
+            builder.endpointOverride(URI.create(endPoint));
+        }
+
+        return builder.build();
+    }
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/aws/AwsS3InputStreamFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/aws/AwsS3InputStreamFactory.java
new file mode 100644
index 0000000..6b8bb59
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/aws/AwsS3InputStreamFactory.java
@@ -0,0 +1,253 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.record.reader.aws;
+
+import static org.apache.asterix.external.util.ExternalDataConstants.AwsS3Constants;
+
+import java.io.Serializable;
+import java.net.URI;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.common.exceptions.ErrorCode;
+import org.apache.asterix.external.api.AsterixInputStream;
+import org.apache.asterix.external.api.IInputStreamFactory;
+import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.api.application.IServiceContext;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+
+import software.amazon.awssdk.auth.credentials.AwsBasicCredentials;
+import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider;
+import software.amazon.awssdk.regions.Region;
+import software.amazon.awssdk.services.s3.S3Client;
+import software.amazon.awssdk.services.s3.S3ClientBuilder;
+import software.amazon.awssdk.services.s3.model.ListObjectsRequest;
+import software.amazon.awssdk.services.s3.model.ListObjectsResponse;
+import software.amazon.awssdk.services.s3.model.S3Object;
+
+public class AwsS3InputStreamFactory implements IInputStreamFactory {
+
+    private static final long serialVersionUID = 1L;
+    private Map<String, String> configuration;
+
+    // Files to read from
+    private List<PartitionWorkLoadBasedOnSize> partitionWorkLoadsBasedOnSize = new ArrayList<>();
+
+    private transient AlgebricksAbsolutePartitionConstraint partitionConstraint;
+
+    @Override
+    public DataSourceType getDataSourceType() {
+        return DataSourceType.STREAM;
+    }
+
+    @Override
+    public boolean isIndexible() {
+        return false;
+    }
+
+    @Override
+    public AsterixInputStream createInputStream(IHyracksTaskContext ctx, int partition) {
+        return new AwsS3InputStream(configuration, partitionWorkLoadsBasedOnSize.get(partition).getFilePaths());
+    }
+
+    @Override
+    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() {
+        return partitionConstraint;
+    }
+
+    @Override
+    public void configure(IServiceContext ctx, Map<String, String> configuration) throws AlgebricksException {
+        this.configuration = configuration;
+        ICcApplicationContext ccApplicationContext = (ICcApplicationContext) ctx.getApplicationContext();
+
+        String container = configuration.get(AwsS3Constants.CONTAINER_NAME_FIELD_NAME);
+
+        S3Client s3Client = buildAwsS3Client(configuration);
+
+        // Get all objects in a bucket and extract the paths to files
+        ListObjectsRequest.Builder listObjectsBuilder = ListObjectsRequest.builder().bucket(container);
+        String path = configuration.get(AwsS3Constants.DEFINITION_FIELD_NAME);
+        if (path != null) {
+            listObjectsBuilder.prefix(path + (path.endsWith("/") ? "" : "/"));
+        }
+        ListObjectsResponse listObjectsResponse = s3Client.listObjects(listObjectsBuilder.build());
+        List<S3Object> s3Objects = listObjectsResponse.contents();
+
+        // Exclude the directories and get the files only
+        String fileFormat = configuration.get(ExternalDataConstants.KEY_FORMAT);
+        List<S3Object> fileObjects = getFilesOnly(s3Objects, fileFormat);
+
+        // Partition constraints
+        partitionConstraint = ccApplicationContext.getClusterStateManager().getClusterLocations();
+        int partitionsCount = partitionConstraint.getLocations().length;
+
+        // Distribute work load amongst the partitions
+        distributeWorkLoad(fileObjects, partitionsCount);
+    }
+
+    /**
+     * AWS S3 returns all the objects as paths, not differentiating between folder and files. The path is considered
+     * a file if it does not end up with a "/" which is the separator in a folder structure.
+     *
+     * @param s3Objects List of returned objects
+     *
+     * @return A list of string paths that point to files only
+     *
+     * @throws AsterixException AsterixException
+     */
+    private List<S3Object> getFilesOnly(List<S3Object> s3Objects, String fileFormat) throws AsterixException {
+        List<S3Object> filesOnly = new ArrayList<>();
+        String fileExtension = getFileExtension(fileFormat);
+        if (fileExtension == null) {
+            throw AsterixException.create(ErrorCode.PROVIDER_STREAM_RECORD_READER_UNKNOWN_FORMAT, fileFormat);
+        }
+
+        s3Objects.stream().filter(object -> object.key().endsWith(fileExtension)).forEach(filesOnly::add);
+
+        return filesOnly;
+    }
+
+    /**
+     * To efficiently utilize the parallelism, work load will be distributed amongst the partitions based on the file
+     * size.
+     *
+     * Example:
+     * File1 1mb, File2 300kb, File3 300kb, File4 300kb
+     *
+     * Distribution:
+     * Partition1: [File1]
+     * Partition2: [File2, File3, File4]
+     *
+     * @param fileObjects AWS S3 file objects
+     * @param partitionsCount Partitions count
+     */
+    private void distributeWorkLoad(List<S3Object> fileObjects, int partitionsCount) {
+        // Prepare the workloads based on the number of partitions
+        for (int i = 0; i < partitionsCount; i++) {
+            partitionWorkLoadsBasedOnSize.add(new PartitionWorkLoadBasedOnSize());
+        }
+
+        for (S3Object object : fileObjects) {
+            PartitionWorkLoadBasedOnSize smallest = getSmallestWorkLoad();
+            smallest.addFilePath(object.key(), object.size());
+        }
+    }
+
+    /**
+     * Finds the smallest workload and returns it
+     *
+     * @return the smallest workload
+     */
+    private PartitionWorkLoadBasedOnSize getSmallestWorkLoad() {
+        PartitionWorkLoadBasedOnSize smallest = partitionWorkLoadsBasedOnSize.get(0);
+        for (PartitionWorkLoadBasedOnSize partition : partitionWorkLoadsBasedOnSize) {
+            // If the current total size is 0, add the file directly as this is a first time partition
+            if (partition.getTotalSize() == 0) {
+                smallest = partition;
+                break;
+            }
+            if (partition.getTotalSize() < smallest.getTotalSize()) {
+                smallest = partition;
+            }
+        }
+
+        return smallest;
+    }
+
+    /**
+     * Prepares and builds the Amazon S3 client with the provided configuration
+     *
+     * @param configuration S3 client configuration
+     *
+     * @return Amazon S3 client
+     */
+    private static S3Client buildAwsS3Client(Map<String, String> configuration) {
+        S3ClientBuilder builder = S3Client.builder();
+
+        // Credentials
+        String accessKey = configuration.get(AwsS3Constants.ACCESS_KEY_FIELD_NAME);
+        String secretKey = configuration.get(AwsS3Constants.SECRET_KEY_FIELD_NAME);
+        AwsBasicCredentials credentials = AwsBasicCredentials.create(accessKey, secretKey);
+        builder.credentialsProvider(StaticCredentialsProvider.create(credentials));
+
+        // Region
+        String region = configuration.get(AwsS3Constants.REGION_FIELD_NAME);
+        builder.region(Region.of(region));
+
+        // Use user's endpoint if provided
+        if (configuration.get(AwsS3Constants.SERVICE_END_POINT_FIELD_NAME) != null) {
+            String endPoint = configuration.get(AwsS3Constants.SERVICE_END_POINT_FIELD_NAME);
+            builder.endpointOverride(URI.create(endPoint));
+        }
+
+        return builder.build();
+    }
+
+    /**
+     * Returns the file extension for the provided file format.
+     *
+     * @param format file format
+     *
+     * @return file extension for the provided file format, null otherwise.
+     */
+    private String getFileExtension(String format) {
+        switch (format.toLowerCase()) {
+            case ExternalDataConstants.FORMAT_JSON_LOWER_CASE:
+                return ".json";
+            case ExternalDataConstants.FORMAT_CSV:
+                return ".csv";
+            case ExternalDataConstants.FORMAT_TSV:
+                return ".tsv";
+            default:
+                return null;
+        }
+    }
+
+    private static class PartitionWorkLoadBasedOnSize implements Serializable {
+        private static final long serialVersionUID = 1L;
+        private List<String> filePaths = new ArrayList<>();
+        private long totalSize = 0;
+
+        PartitionWorkLoadBasedOnSize() {
+        }
+
+        public List<String> getFilePaths() {
+            return filePaths;
+        }
+
+        public void addFilePath(String filePath, long size) {
+            this.filePaths.add(filePath);
+            this.totalSize += size;
+        }
+
+        public long getTotalSize() {
+            return totalSize;
+        }
+
+        @Override
+        public String toString() {
+            return "Files: " + filePaths.size() + ", Total Size: " + totalSize;
+        }
+    }
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/aws/AwsS3ReaderFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/aws/AwsS3ReaderFactory.java
new file mode 100644
index 0000000..e78783a
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/aws/AwsS3ReaderFactory.java
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.record.reader.aws;
+
+import java.lang.reflect.InvocationTargetException;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.asterix.external.api.IRecordReader;
+import org.apache.asterix.external.input.record.reader.stream.StreamRecordReader;
+import org.apache.asterix.external.input.record.reader.stream.StreamRecordReaderFactory;
+import org.apache.asterix.external.provider.StreamRecordReaderProvider;
+import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.api.application.IServiceContext;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class AwsS3ReaderFactory extends StreamRecordReaderFactory {
+
+    private static final long serialVersionUID = 1L;
+
+    private static final List<String> recordReaderNames =
+            Collections.singletonList(ExternalDataConstants.KEY_ADAPTER_NAME_AWS_S3);
+
+    @Override
+    public List<String> getRecordReaderNames() {
+        return recordReaderNames;
+    }
+
+    @Override
+    public DataSourceType getDataSourceType() {
+        return DataSourceType.RECORDS;
+    }
+
+    @Override
+    public Class<?> getRecordClass() {
+        return char[].class;
+    }
+
+    @Override
+    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws AlgebricksException {
+        return streamFactory.getPartitionConstraint();
+    }
+
+    @Override
+    public void configure(IServiceContext ctx, Map<String, String> configuration)
+            throws AlgebricksException, HyracksDataException {
+        this.configuration = configuration;
+
+        // Stream factory
+        streamFactory = new AwsS3InputStreamFactory();
+        streamFactory.configure(ctx, configuration);
+
+        // record reader
+        recordReaderClazz = StreamRecordReaderProvider.getRecordReaderClazz(configuration);
+    }
+
+    @Override
+    public IRecordReader<? extends char[]> createRecordReader(IHyracksTaskContext ctx, int partition)
+            throws HyracksDataException {
+        try {
+            StreamRecordReader streamRecordReader =
+                    (StreamRecordReader) recordReaderClazz.getConstructor().newInstance();
+            streamRecordReader.configure(streamFactory.createInputStream(ctx, partition), configuration);
+            return streamRecordReader;
+        } catch (InstantiationException | IllegalAccessException | InvocationTargetException
+                | NoSuchMethodException e) {
+            throw HyracksDataException.create(e);
+        }
+    }
+}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/LineRecordReader.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/LineRecordReader.java
index 0b41d4b..be600ed 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/LineRecordReader.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/LineRecordReader.java
@@ -36,8 +36,9 @@
     protected int newlineLength;
     protected int recordNumber = 0;
     protected boolean nextIsHeader = false;
-    private static final List<String> recordReaderFormats = Collections.unmodifiableList(
-            Arrays.asList(ExternalDataConstants.FORMAT_DELIMITED_TEXT, ExternalDataConstants.FORMAT_CSV));
+    private static final List<String> recordReaderFormats =
+            Collections.unmodifiableList(Arrays.asList(ExternalDataConstants.FORMAT_DELIMITED_TEXT,
+                    ExternalDataConstants.FORMAT_CSV, ExternalDataConstants.FORMAT_TSV));
     private static final String REQUIRED_CONFIGS = "";
 
     @Override
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/QuotedLineRecordReader.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/QuotedLineRecordReader.java
index 4c4128a..1fd328b 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/QuotedLineRecordReader.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/QuotedLineRecordReader.java
@@ -32,11 +32,10 @@
 public class QuotedLineRecordReader extends LineRecordReader {
 
     private char quote;
-    private boolean prevCharEscape;
-    private boolean inQuote;
+    private char quoteEscape;
     private static final List<String> recordReaderFormats = Collections.unmodifiableList(
             Arrays.asList(ExternalDataConstants.FORMAT_DELIMITED_TEXT, ExternalDataConstants.FORMAT_CSV));
-    private static final String REQUIRED_CONFIGS = "quote";
+    private static final String REQUIRED_CONFIGS = ExternalDataConstants.KEY_QUOTE;
 
     @Override
     public void configure(AsterixInputStream inputStream, Map<String, String> config) throws HyracksDataException {
@@ -47,6 +46,17 @@
                     ExternalDataConstants.PARAMETER_OF_SIZE_ONE, quoteString));
         }
         this.quote = quoteString.charAt(0);
+        String escapeString = config.get(ExternalDataConstants.KEY_QUOTE_ESCAPE);
+        if (escapeString == null) {
+            quoteEscape = ExternalDataConstants.ESCAPE;
+        } else {
+            if (escapeString.length() != 1) {
+                throw new HyracksDataException(
+                        ExceptionUtils.incorrectParameterMessage(ExternalDataConstants.KEY_QUOTE_ESCAPE,
+                                ExternalDataConstants.PARAMETER_OF_SIZE_ONE, escapeString));
+            }
+            quoteEscape = escapeString.charAt(0);
+        }
     }
 
     @Override
@@ -67,31 +77,35 @@
             }
             newlineLength = 0;
             prevCharCR = false;
-            prevCharEscape = false;
+            boolean prevCharEscape = false;
             record.reset();
             int readLength = 0;
-            inQuote = false;
+            boolean inQuote = false;
             do {
                 int startPosn = bufferPosn;
                 if (bufferPosn >= bufferLength) {
                     startPosn = bufferPosn = 0;
                     bufferLength = reader.read(inputBuffer);
                     if (bufferLength <= 0) {
-                        {
-                            if (readLength > 0) {
-                                if (inQuote) {
-                                    throw new IOException("malformed input record ended inside quote");
-                                }
-                                record.endRecord();
-                                recordNumber++;
-                                return true;
+                        if (readLength > 0) {
+                            if (inQuote) {
+                                throw new IOException("malformed input record ended inside quote");
                             }
-                            close();
-                            return false;
+                            record.endRecord();
+                            recordNumber++;
+                            return true;
                         }
+                        close();
+                        return false;
                     }
                 }
+                boolean maybeInQuote = false;
                 for (; bufferPosn < bufferLength; ++bufferPosn) {
+                    if (inputBuffer[bufferPosn] == quote && quoteEscape == quote) {
+                        inQuote |= maybeInQuote;
+                        prevCharEscape |= maybeInQuote;
+                    }
+                    maybeInQuote = false;
                     if (!inQuote) {
                         if (inputBuffer[bufferPosn] == ExternalDataConstants.LF) {
                             newlineLength = (prevCharCR) ? 2 : 1;
@@ -103,24 +117,25 @@
                             break;
                         }
                         prevCharCR = (inputBuffer[bufferPosn] == ExternalDataConstants.CR);
-                        if (inputBuffer[bufferPosn] == quote) {
-                            if (!prevCharEscape) {
-                                inQuote = true;
-                            }
+                        if (inputBuffer[bufferPosn] == quote && !prevCharEscape) {
+                            // this is an opening quote
+                            inQuote = true;
                         }
                         if (prevCharEscape) {
                             prevCharEscape = false;
                         } else {
-                            prevCharEscape = inputBuffer[bufferPosn] == ExternalDataConstants.ESCAPE;
+                            // the quoteEscape != quote is for making an opening quote not an escape
+                            prevCharEscape = inputBuffer[bufferPosn] == quoteEscape && quoteEscape != quote;
                         }
                     } else {
-                        // only look for next quote
-                        if (inputBuffer[bufferPosn] == quote) {
-                            if (!prevCharEscape) {
-                                inQuote = false;
-                            }
+                        // if quote == quoteEscape and current char is quote, then it could be closing or escaping
+                        if (inputBuffer[bufferPosn] == quote && !prevCharEscape) {
+                            // this is most likely a closing quote. the outcome depends on the next char
+                            inQuote = false;
+                            maybeInQuote = true;
                         }
-                        prevCharEscape = inputBuffer[bufferPosn] == ExternalDataConstants.ESCAPE;
+                        prevCharEscape =
+                                inputBuffer[bufferPosn] == quoteEscape && !prevCharEscape && quoteEscape != quote;
                     }
                 }
                 readLength = bufferPosn - startPosn;
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalScanOperatorDescriptor.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalScanOperatorDescriptor.java
index 081d49ec..4fd5151 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalScanOperatorDescriptor.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalScanOperatorDescriptor.java
@@ -18,8 +18,8 @@
  */
 package org.apache.asterix.external.operators;
 
-import org.apache.asterix.external.api.IAdapterFactory;
-import org.apache.asterix.external.api.IDataSourceAdapter;
+import org.apache.asterix.common.external.IDataSourceAdapter;
+import org.apache.asterix.external.api.ITypedAdapterFactory;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.dataflow.IOperatorNodePushable;
 import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
@@ -37,10 +37,10 @@
 
     private static final long serialVersionUID = 1L;
 
-    private IAdapterFactory adapterFactory;
+    private ITypedAdapterFactory adapterFactory;
 
     public ExternalScanOperatorDescriptor(JobSpecification spec, RecordDescriptor rDesc,
-            IAdapterFactory dataSourceAdapterFactory) {
+            ITypedAdapterFactory dataSourceAdapterFactory) {
         super(spec, 0, 1);
         outRecDescs[0] = rDesc;
         this.adapterFactory = dataSourceAdapterFactory;
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorDescriptor.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorDescriptor.java
index 7a0341a..d63e8a8 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorDescriptor.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorDescriptor.java
@@ -25,7 +25,7 @@
 import org.apache.asterix.common.exceptions.ErrorCode;
 import org.apache.asterix.common.exceptions.RuntimeDataException;
 import org.apache.asterix.common.library.ILibraryManager;
-import org.apache.asterix.external.api.IAdapterFactory;
+import org.apache.asterix.external.api.ITypedAdapterFactory;
 import org.apache.asterix.external.feed.api.IFeed;
 import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
 import org.apache.asterix.om.types.ARecordType;
@@ -57,7 +57,7 @@
     private final FeedPolicyAccessor policyAccessor;
     private final ARecordType adapterOutputType;
     /** The adaptor factory that is used to create an instance of the feed adaptor **/
-    private IAdapterFactory adaptorFactory;
+    private ITypedAdapterFactory adaptorFactory;
     /** The library that contains the adapter in use. **/
     private String adaptorLibraryName;
     /**
@@ -68,7 +68,7 @@
     /** The configuration parameters associated with the adapter. **/
     private Map<String, String> adaptorConfiguration;
 
-    public FeedIntakeOperatorDescriptor(JobSpecification spec, IFeed primaryFeed, IAdapterFactory adapterFactory,
+    public FeedIntakeOperatorDescriptor(JobSpecification spec, IFeed primaryFeed, ITypedAdapterFactory adapterFactory,
             ARecordType adapterOutputType, FeedPolicyAccessor policyAccessor, RecordDescriptor rDesc) {
         super(spec, 0, 1);
         this.feedId = new EntityId(FEED_EXTENSION_NAME, primaryFeed.getDataverseName(), primaryFeed.getFeedName());
@@ -100,15 +100,15 @@
         return new FeedIntakeOperatorNodePushable(ctx, feedId, adaptorFactory, partition, recordDescProvider, this);
     }
 
-    private IAdapterFactory createExternalAdapterFactory(IHyracksTaskContext ctx) throws HyracksDataException {
-        IAdapterFactory adapterFactory;
+    private ITypedAdapterFactory createExternalAdapterFactory(IHyracksTaskContext ctx) throws HyracksDataException {
+        ITypedAdapterFactory adapterFactory;
         INcApplicationContext runtimeCtx =
                 (INcApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext();
         ILibraryManager libraryManager = runtimeCtx.getLibraryManager();
         ClassLoader classLoader = libraryManager.getLibraryClassLoader(feedId.getDataverse(), adaptorLibraryName);
         if (classLoader != null) {
             try {
-                adapterFactory = (IAdapterFactory) (classLoader.loadClass(adaptorFactoryClassName).newInstance());
+                adapterFactory = (ITypedAdapterFactory) (classLoader.loadClass(adaptorFactoryClassName).newInstance());
                 adapterFactory.setOutputType(adapterOutputType);
                 adapterFactory.configure(ctx.getJobletContext().getServiceContext(), adaptorConfiguration);
             } catch (Exception e) {
@@ -128,11 +128,11 @@
         return feedId;
     }
 
-    public IAdapterFactory getAdaptorFactory() {
+    public ITypedAdapterFactory getAdaptorFactory() {
         return this.adaptorFactory;
     }
 
-    public void setAdaptorFactory(IAdapterFactory factory) {
+    public void setAdaptorFactory(ITypedAdapterFactory factory) {
         this.adaptorFactory = factory;
     }
 
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorNodePushable.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorNodePushable.java
index 98f75df..7002a23 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorNodePushable.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorNodePushable.java
@@ -23,7 +23,7 @@
 import org.apache.asterix.active.ActiveRuntimeId;
 import org.apache.asterix.active.ActiveSourceOperatorNodePushable;
 import org.apache.asterix.active.EntityId;
-import org.apache.asterix.external.api.IAdapterFactory;
+import org.apache.asterix.external.api.ITypedAdapterFactory;
 import org.apache.asterix.external.dataset.adapter.FeedAdapter;
 import org.apache.hyracks.api.comm.IFrame;
 import org.apache.hyracks.api.comm.VSizeFrame;
@@ -50,7 +50,7 @@
     private final FeedAdapter adapter;
     private boolean poisoned = false;
 
-    public FeedIntakeOperatorNodePushable(IHyracksTaskContext ctx, EntityId feedId, IAdapterFactory adapterFactory,
+    public FeedIntakeOperatorNodePushable(IHyracksTaskContext ctx, EntityId feedId, ITypedAdapterFactory adapterFactory,
             int partition, IRecordDescriptorProvider recordDescProvider,
             FeedIntakeOperatorDescriptor feedIntakeOperatorDescriptor) throws HyracksDataException {
         super(ctx, new ActiveRuntimeId(feedId, FeedIntakeOperatorNodePushable.class.getSimpleName(), partition));
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/DelimitedDataParser.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/DelimitedDataParser.java
index 4e371c8..8facce6 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/DelimitedDataParser.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/DelimitedDataParser.java
@@ -57,9 +57,9 @@
     private ArrayBackedValueStorage[] nameBuffers;
     private boolean areAllNullFields;
 
-    public DelimitedDataParser(IValueParserFactory[] valueParserFactories, char fieldDelimter, char quote,
+    public DelimitedDataParser(IValueParserFactory[] valueParserFactories, char fieldDelimiter, char quote,
             boolean hasHeader, ARecordType recordType, boolean isStreamParser) throws HyracksDataException {
-        this.fieldDelimiter = fieldDelimter;
+        this.fieldDelimiter = fieldDelimiter;
         this.quote = quote;
         this.hasHeader = hasHeader;
         this.recordType = recordType;
@@ -98,7 +98,7 @@
             }
         }
         if (!isStreamParser) {
-            cursor = new FieldCursorForDelimitedDataParser(null, fieldDelimiter, quote);
+            cursor = new FieldCursorForDelimitedDataParser(null, this.fieldDelimiter, quote);
         }
     }
 
@@ -134,25 +134,23 @@
             fieldValueBuffer.reset();
 
             try {
-                if (cursor.fStart == cursor.fEnd && recordType.getFieldTypes()[i].getTypeTag() != ATypeTag.STRING
+                if (cursor.isFieldEmpty() && recordType.getFieldTypes()[i].getTypeTag() != ATypeTag.STRING
                         && recordType.getFieldTypes()[i].getTypeTag() != ATypeTag.NULL) {
                     // if the field is empty and the type is optional, insert
                     // NULL. Note that string type can also process empty field as an
                     // empty string
                     if (!NonTaggedFormatUtil.isOptional(recordType.getFieldTypes()[i])) {
-                        throw new RuntimeDataException(ErrorCode.PARSER_DELIMITED_NONOPTIONAL_NULL, cursor.recordCount,
-                                cursor.fieldCount);
+                        throw new RuntimeDataException(ErrorCode.PARSER_DELIMITED_NONOPTIONAL_NULL,
+                                cursor.getRecordCount(), cursor.getFieldCount());
                     }
                     fieldValueBufferOutput.writeByte(ATypeTag.SERIALIZED_NULL_TYPE_TAG);
                 } else {
                     fieldValueBufferOutput.writeByte(fieldTypeTags[i]);
-                    // Eliminate doule quotes in the field that we are going to parse
-                    if (cursor.isDoubleQuoteIncludedInThisField) {
-                        cursor.eliminateDoubleQuote(cursor.buffer, cursor.fStart, cursor.fEnd - cursor.fStart);
-                        cursor.fEnd -= cursor.doubleQuoteCount;
-                        cursor.isDoubleQuoteIncludedInThisField = false;
+                    // Eliminate double quotes in the field that we are going to parse
+                    if (cursor.fieldHasDoubleQuote()) {
+                        cursor.eliminateDoubleQuote();
                     }
-                    valueParsers[i].parse(cursor.buffer, cursor.fStart, cursor.fEnd - cursor.fStart,
+                    valueParsers[i].parse(cursor.getBuffer(), cursor.getFieldStart(), cursor.getFieldLength(),
                             fieldValueBufferOutput);
                     areAllNullFields = false;
                 }
@@ -165,15 +163,14 @@
                 throw HyracksDataException.create(e);
             }
         }
+        if (valueParsers.length != cursor.getFieldCount()) {
+            throw new HyracksDataException("Record #" + cursor.getRecordCount() + " is missing some fields");
+        }
     }
 
     @Override
     public void parse(IRawRecord<? extends char[]> record, DataOutput out) throws HyracksDataException {
-        try {
-            cursor.nextRecord(record.get(), record.size());
-        } catch (IOException e) {
-            throw HyracksDataException.create(e);
-        }
+        cursor.nextRecord(record.get(), record.size());
         parseRecord();
         if (!areAllNullFields) {
             recBuilder.write(out, true);
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/DelimitedDataParserFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/DelimitedDataParserFactory.java
index f406729..1fee49f 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/DelimitedDataParserFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/DelimitedDataParserFactory.java
@@ -21,10 +21,7 @@
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
-import java.util.Map;
 
-import org.apache.asterix.common.exceptions.ErrorCode;
-import org.apache.asterix.common.exceptions.RuntimeDataException;
 import org.apache.asterix.external.api.IExternalDataSourceFactory.DataSourceType;
 import org.apache.asterix.external.api.IRecordDataParser;
 import org.apache.asterix.external.api.IStreamDataParser;
@@ -40,7 +37,8 @@
 
     private static final long serialVersionUID = 1L;
     private static final List<String> parserFormats =
-            Collections.unmodifiableList(Arrays.asList("csv", "delimited-text"));
+            Collections.unmodifiableList(Arrays.asList(ExternalDataConstants.FORMAT_CSV,
+                    ExternalDataConstants.FORMAT_DELIMITED_TEXT, ExternalDataConstants.FORMAT_TSV));
 
     @Override
     public IRecordDataParser<char[]> createRecordParser(IHyracksTaskContext ctx) throws HyracksDataException {
@@ -49,8 +47,8 @@
 
     private DelimitedDataParser createParser() throws HyracksDataException {
         IValueParserFactory[] valueParserFactories = ExternalDataUtils.getValueParserFactories(recordType);
-        Character delimiter = DelimitedDataParserFactory.getDelimiter(configuration);
-        char quote = DelimitedDataParserFactory.getQuote(configuration, delimiter);
+        char delimiter = ExternalDataUtils.getDelimiter(configuration);
+        char quote = ExternalDataUtils.getQuote(configuration, delimiter);
         boolean hasHeader = ExternalDataUtils.hasHeader(configuration);
         return new DelimitedDataParser(valueParserFactories, delimiter, quote, hasHeader, recordType,
                 ExternalDataUtils.getDataSourceType(configuration).equals(DataSourceType.STREAM));
@@ -67,40 +65,6 @@
         return createParser();
     }
 
-    // Get a delimiter from the given configuration
-    public static char getDelimiter(Map<String, String> configuration) throws HyracksDataException {
-        String delimiterValue = configuration.get(ExternalDataConstants.KEY_DELIMITER);
-        if (delimiterValue == null) {
-            delimiterValue = ExternalDataConstants.DEFAULT_DELIMITER;
-        } else if (delimiterValue.length() != 1) {
-            throw new RuntimeDataException(ErrorCode.PARSER_FACTORY_DELIMITED_DATA_PARSER_FACTORY_NOT_VALID_DELIMITER,
-                    delimiterValue);
-        }
-        return delimiterValue.charAt(0);
-    }
-
-    // Get a quote from the given configuration when the delimiter is given
-    // Need to pass delimiter to check whether they share the same character
-    public static char getQuote(Map<String, String> configuration, char delimiter) throws HyracksDataException {
-        String quoteValue = configuration.get(ExternalDataConstants.KEY_QUOTE);
-        if (quoteValue == null) {
-            quoteValue = ExternalDataConstants.DEFAULT_QUOTE;
-        } else if (quoteValue.length() != 1) {
-            throw new RuntimeDataException(ErrorCode.PARSER_FACTORY_DELIMITED_DATA_PARSER_FACTORY_NOT_VALID_QUOTE,
-                    quoteValue);
-        }
-
-        // Since delimiter (char type value) can't be null,
-        // we only check whether delimiter and quote use the same character
-        if (quoteValue.charAt(0) == delimiter) {
-            throw new RuntimeDataException(
-                    ErrorCode.PARSER_FACTORY_DELIMITED_DATA_PARSER_FACTORY_QUOTE_DELIMITER_MISMATCH, quoteValue,
-                    delimiter);
-        }
-
-        return quoteValue.charAt(0);
-    }
-
     @Override
     public void setMetaType(ARecordType metaType) {
     }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/AdapterFactoryProvider.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/AdapterFactoryProvider.java
index 5740143..27ac10e 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/AdapterFactoryProvider.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/AdapterFactoryProvider.java
@@ -21,12 +21,13 @@
 import java.util.List;
 import java.util.Map;
 
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.external.adapter.factory.GenericAdapterFactory;
 import org.apache.asterix.external.adapter.factory.LookupAdapterFactory;
-import org.apache.asterix.external.api.IAdapterFactory;
 import org.apache.asterix.external.api.IIndexingAdapterFactory;
+import org.apache.asterix.external.api.ITypedAdapterFactory;
 import org.apache.asterix.external.indexing.ExternalFile;
-import org.apache.asterix.external.util.ExternalDataCompatibilityUtils;
+import org.apache.asterix.external.util.ExternalDataUtils;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.api.application.IServiceContext;
@@ -38,23 +39,30 @@
  */
 public class AdapterFactoryProvider {
 
-    // Adapters
-    public static IAdapterFactory getAdapterFactory(IServiceContext serviceCtx, String adapterName,
+    private AdapterFactoryProvider() {
+    }
+
+    // get adapter factory. this method has the side effect of modifying the configuration as necessary
+    public static ITypedAdapterFactory getAdapterFactory(IServiceContext serviceCtx, String adapterName,
             Map<String, String> configuration, ARecordType itemType, ARecordType metaType)
             throws HyracksDataException, AlgebricksException {
-        ExternalDataCompatibilityUtils.prepare(adapterName, configuration);
-        GenericAdapterFactory adapterFactory = new GenericAdapterFactory();
+        ExternalDataUtils.defaultConfiguration(configuration);
+        ExternalDataUtils.prepare(adapterName, configuration);
+        ICcApplicationContext context = (ICcApplicationContext) serviceCtx.getApplicationContext();
+        ITypedAdapterFactory adapterFactory =
+                (ITypedAdapterFactory) context.getAdapterFactoryService().createAdapterFactory();
         adapterFactory.setOutputType(itemType);
         adapterFactory.setMetaType(metaType);
         adapterFactory.configure(serviceCtx, configuration);
         return adapterFactory;
     }
 
-    // Indexing Adapters
+    // get indexing adapter factory. this method has the side effect of modifying the configuration as necessary
     public static IIndexingAdapterFactory getIndexingAdapterFactory(IServiceContext serviceCtx, String adapterName,
             Map<String, String> configuration, ARecordType itemType, List<ExternalFile> snapshot, boolean indexingOp,
             ARecordType metaType) throws HyracksDataException, AlgebricksException {
-        ExternalDataCompatibilityUtils.prepare(adapterName, configuration);
+        ExternalDataUtils.defaultConfiguration(configuration);
+        ExternalDataUtils.prepare(adapterName, configuration);
         GenericAdapterFactory adapterFactory = new GenericAdapterFactory();
         adapterFactory.setOutputType(itemType);
         adapterFactory.setMetaType(metaType);
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DatasourceFactoryProvider.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DatasourceFactoryProvider.java
index 8024dc4..2a2289c 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DatasourceFactoryProvider.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DatasourceFactoryProvider.java
@@ -53,6 +53,7 @@
 
     public static IExternalDataSourceFactory getExternalDataSourceFactory(ILibraryManager libraryManager,
             Map<String, String> configuration) throws HyracksDataException, AsterixException {
+        // Take a copy of the configuration
         if (ExternalDataUtils.getDataSourceType(configuration).equals(DataSourceType.RECORDS)) {
             String reader = configuration.get(ExternalDataConstants.KEY_READER);
             return DatasourceFactoryProvider.getRecordReaderFactory(libraryManager, reader, configuration);
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataCompatibilityUtils.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataCompatibilityUtils.java
index e222e99..8181262 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataCompatibilityUtils.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataCompatibilityUtils.java
@@ -18,8 +18,6 @@
  */
 package org.apache.asterix.external.util;
 
-import java.util.Map;
-
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.external.api.IDataParserFactory;
 import org.apache.asterix.external.api.IExternalDataSourceFactory;
@@ -30,6 +28,9 @@
 
 public class ExternalDataCompatibilityUtils {
 
+    private ExternalDataCompatibilityUtils() {
+    }
+
     public static void validateCompatibility(IExternalDataSourceFactory dataSourceFactory,
             IDataParserFactory dataParserFactory) throws AsterixException {
         if (dataSourceFactory.getDataSourceType() != dataParserFactory.getDataSourceType()) {
@@ -58,16 +59,4 @@
                             + recordParserFactory.getRecordClass());
         }
     }
-
-    public static void prepare(String adapterName, Map<String, String> configuration) {
-        if (!configuration.containsKey(ExternalDataConstants.KEY_READER)) {
-            configuration.put(ExternalDataConstants.KEY_READER, adapterName);
-        }
-        if (!configuration.containsKey(ExternalDataConstants.KEY_PARSER)) {
-            if (configuration.containsKey(ExternalDataConstants.KEY_FORMAT)) {
-                configuration.put(ExternalDataConstants.KEY_PARSER,
-                        configuration.get(ExternalDataConstants.KEY_FORMAT));
-            }
-        }
-    }
 }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java
index 729215e..1378207 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java
@@ -19,6 +19,10 @@
 package org.apache.asterix.external.util;
 
 public class ExternalDataConstants {
+
+    private ExternalDataConstants() {
+    }
+
     // TODO: Remove unused variables.
     /**
      * Keys
@@ -62,6 +66,7 @@
     public static final String KEY_LOCAL_SOCKET_PATH = "local-socket-path";
     public static final String KEY_FORMAT = "format";
     public static final String KEY_QUOTE = "quote";
+    public static final String KEY_QUOTE_ESCAPE = "quote-escape";
     public static final String KEY_PARSER = "parser";
     public static final String KEY_DATASET_RECORD = "dataset-record";
     public static final String KEY_HIVE_SERDE = "hive-serde";
@@ -114,6 +119,7 @@
     public static final String KEY_ADAPTER_NAME_SOCKET = "socket";
     public static final String KEY_ALIAS_ADAPTER_NAME_SOCKET = "socket_adapter";
     public static final String KEY_ADAPTER_NAME_HTTP = "http_adapter";
+    public static final String KEY_ADAPTER_NAME_AWS_S3 = "S3";
 
     /**
      * HDFS class names
@@ -187,6 +193,8 @@
      */
     public static final String TRUE = "true";
     public static final String FALSE = "false";
+    public static final String TAB_STR = "\t";
+    public static final String NULL_STR = "\0";
 
     /**
      * Constant characters
@@ -227,6 +235,16 @@
     public static final String KEY_READER_FACTORY = "reader-factory";
     public static final String READER_RSS = "rss_feed";
     public static final String FORMAT_CSV = "csv";
+    public static final String FORMAT_TSV = "tsv";
 
     public static final String ERROR_PARSE_RECORD = "Parser failed to parse record";
+
+    public static class AwsS3Constants {
+        public static final String REGION_FIELD_NAME = "region";
+        public static final String ACCESS_KEY_FIELD_NAME = "accessKey";
+        public static final String SECRET_KEY_FIELD_NAME = "secretKey";
+        public static final String CONTAINER_NAME_FIELD_NAME = "container";
+        public static final String DEFINITION_FIELD_NAME = "definition";
+        public static final String SERVICE_END_POINT_FIELD_NAME = "serviceEndpoint";
+    }
 }
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java
index a418cbf..443aa7e 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java
@@ -34,6 +34,7 @@
 import org.apache.asterix.om.types.AUnionType;
 import org.apache.hyracks.algebricks.common.exceptions.NotImplementedException;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.dataflow.common.data.parsers.BooleanParserFactory;
 import org.apache.hyracks.dataflow.common.data.parsers.DoubleParserFactory;
 import org.apache.hyracks.dataflow.common.data.parsers.FloatParserFactory;
 import org.apache.hyracks.dataflow.common.data.parsers.IValueParserFactory;
@@ -43,48 +44,43 @@
 
 public class ExternalDataUtils {
 
+    private ExternalDataUtils() {
+    }
+
     // Get a delimiter from the given configuration
-    public static char getDelimiter(Map<String, String> configuration) throws AsterixException {
+    public static char getDelimiter(Map<String, String> configuration) throws HyracksDataException {
         String delimiterValue = configuration.get(ExternalDataConstants.KEY_DELIMITER);
         if (delimiterValue == null) {
             delimiterValue = ExternalDataConstants.DEFAULT_DELIMITER;
         } else if (delimiterValue.length() != 1) {
-            throw new AsterixException(
-                    "'" + delimiterValue + "' is not a valid delimiter. The length of a delimiter should be 1.");
+            throw new RuntimeDataException(ErrorCode.PARSER_FACTORY_DELIMITED_DATA_PARSER_FACTORY_NOT_VALID_DELIMITER,
+                    delimiterValue);
         }
         return delimiterValue.charAt(0);
     }
 
     // Get a quote from the given configuration when the delimiter is given
     // Need to pass delimiter to check whether they share the same character
-    public static char getQuote(Map<String, String> configuration, char delimiter) throws AsterixException {
+    public static char getQuote(Map<String, String> configuration, char delimiter) throws HyracksDataException {
         String quoteValue = configuration.get(ExternalDataConstants.KEY_QUOTE);
         if (quoteValue == null) {
             quoteValue = ExternalDataConstants.DEFAULT_QUOTE;
         } else if (quoteValue.length() != 1) {
-            throw new AsterixException("'" + quoteValue + "' is not a valid quote. The length of a quote should be 1.");
+            throw new RuntimeDataException(ErrorCode.PARSER_FACTORY_DELIMITED_DATA_PARSER_FACTORY_NOT_VALID_QUOTE,
+                    quoteValue);
         }
 
         // Since delimiter (char type value) can't be null,
         // we only check whether delimiter and quote use the same character
         if (quoteValue.charAt(0) == delimiter) {
-            throw new AsterixException(
-                    "Quote '" + quoteValue + "' cannot be used with the delimiter '" + delimiter + "'. ");
+            throw new RuntimeDataException(
+                    ErrorCode.PARSER_FACTORY_DELIMITED_DATA_PARSER_FACTORY_QUOTE_DELIMITER_MISMATCH, quoteValue,
+                    delimiter);
         }
 
         return quoteValue.charAt(0);
     }
 
-    // Get the header flag
-    public static boolean getHasHeader(Map<String, String> configuration) {
-        return Boolean.parseBoolean(configuration.get(ExternalDataConstants.KEY_HEADER));
-    }
-
-    public static void validateParameters(Map<String, String> configuration) throws AsterixException {
-        validateDataSourceParameters(configuration);
-        validateDataParserParameters(configuration);
-    }
-
     public static void validateDataParserParameters(Map<String, String> configuration) throws AsterixException {
         String parser = configuration.get(ExternalDataConstants.KEY_FORMAT);
         if (parser == null) {
@@ -150,15 +146,6 @@
         return parserFormat != null ? parserFormat : configuration.get(ExternalDataConstants.KEY_FORMAT);
     }
 
-    public static void setRecordFormat(Map<String, String> configuration, String format) {
-        if (!configuration.containsKey(ExternalDataConstants.KEY_DATA_PARSER)) {
-            configuration.put(ExternalDataConstants.KEY_DATA_PARSER, format);
-        }
-        if (!configuration.containsKey(ExternalDataConstants.KEY_FORMAT)) {
-            configuration.put(ExternalDataConstants.KEY_FORMAT, format);
-        }
-    }
-
     private static Map<ATypeTag, IValueParserFactory> valueParserFactoryMap = initializeValueParserFactoryMap();
 
     private static Map<ATypeTag, IValueParserFactory> initializeValueParserFactoryMap() {
@@ -168,6 +155,7 @@
         m.put(ATypeTag.DOUBLE, DoubleParserFactory.INSTANCE);
         m.put(ATypeTag.BIGINT, LongParserFactory.INSTANCE);
         m.put(ATypeTag.STRING, UTF8StringParserFactory.INSTANCE);
+        m.put(ATypeTag.BOOLEAN, BooleanParserFactory.INSTANCE);
         return m;
     }
 
@@ -201,10 +189,6 @@
         return vpf;
     }
 
-    public static String getRecordReaderStreamName(Map<String, String> configuration) {
-        return configuration.get(ExternalDataConstants.KEY_READER_STREAM);
-    }
-
     public static boolean hasHeader(Map<String, String> configuration) {
         String value = configuration.get(ExternalDataConstants.KEY_HEADER);
         if (value != null) {
@@ -281,12 +265,6 @@
         return configuration.get(ExternalDataConstants.KEY_FEED_NAME);
     }
 
-    public static int getQueueSize(Map<String, String> configuration) {
-        return configuration.containsKey(ExternalDataConstants.KEY_QUEUE_SIZE)
-                ? Integer.parseInt(configuration.get(ExternalDataConstants.KEY_QUEUE_SIZE))
-                : ExternalDataConstants.DEFAULT_QUEUE_SIZE;
-    }
-
     public static boolean isRecordWithMeta(Map<String, String> configuration) {
         return configuration.containsKey(ExternalDataConstants.KEY_META_TYPE_NAME);
     }
@@ -339,4 +317,42 @@
         }
         return intIndicators;
     }
+
+    /**
+     * Fills the configuration of the external dataset and its adapter with default values if not provided by user.
+     *
+     * @param configuration external data configuration
+     */
+    public static void defaultConfiguration(Map<String, String> configuration) {
+        String format = configuration.get(ExternalDataConstants.KEY_FORMAT);
+        if (format != null) {
+            // default quote, escape character for quote and fields delimiter for csv and tsv format
+            if (format.equals(ExternalDataConstants.FORMAT_CSV)) {
+                configuration.putIfAbsent(ExternalDataConstants.KEY_DELIMITER, ExternalDataConstants.DEFAULT_DELIMITER);
+                configuration.putIfAbsent(ExternalDataConstants.KEY_QUOTE, ExternalDataConstants.DEFAULT_QUOTE);
+                configuration.putIfAbsent(ExternalDataConstants.KEY_QUOTE_ESCAPE, ExternalDataConstants.DEFAULT_QUOTE);
+            } else if (format.equals(ExternalDataConstants.FORMAT_TSV)) {
+                configuration.putIfAbsent(ExternalDataConstants.KEY_DELIMITER, ExternalDataConstants.TAB_STR);
+                configuration.putIfAbsent(ExternalDataConstants.KEY_QUOTE, ExternalDataConstants.NULL_STR);
+                configuration.putIfAbsent(ExternalDataConstants.KEY_QUOTE_ESCAPE, ExternalDataConstants.NULL_STR);
+            }
+        }
+    }
+
+    /**
+     * Prepares the configuration of the external dataset and its adapter by filling the information required by
+     * adapters and parsers.
+     *
+     * @param adapterName adapter name
+     * @param configuration external data configuration
+     */
+    public static void prepare(String adapterName, Map<String, String> configuration) {
+        if (!configuration.containsKey(ExternalDataConstants.KEY_READER)) {
+            configuration.put(ExternalDataConstants.KEY_READER, adapterName);
+        }
+        if (!configuration.containsKey(ExternalDataConstants.KEY_PARSER)
+                && configuration.containsKey(ExternalDataConstants.KEY_FORMAT)) {
+            configuration.put(ExternalDataConstants.KEY_PARSER, configuration.get(ExternalDataConstants.KEY_FORMAT));
+        }
+    }
 }
diff --git a/asterixdb/asterix-external-data/src/main/resources/META-INF/services/org.apache.asterix.external.api.IRecordReaderFactory b/asterixdb/asterix-external-data/src/main/resources/META-INF/services/org.apache.asterix.external.api.IRecordReaderFactory
index 0d96658..fd3e473 100644
--- a/asterixdb/asterix-external-data/src/main/resources/META-INF/services/org.apache.asterix.external.api.IRecordReaderFactory
+++ b/asterixdb/asterix-external-data/src/main/resources/META-INF/services/org.apache.asterix.external.api.IRecordReaderFactory
@@ -20,3 +20,4 @@
 org.apache.asterix.external.input.HDFSDataSourceFactory
 org.apache.asterix.external.input.record.reader.stream.StreamRecordReaderFactory
 org.apache.asterix.external.input.record.reader.http.HttpServerRecordReaderFactory
+org.apache.asterix.external.input.record.reader.aws.AwsS3ReaderFactory
diff --git a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java
index 8ee8a57..a947c7e 100644
--- a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java
+++ b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java
@@ -25,9 +25,9 @@
 import org.apache.asterix.common.api.IApplicationContext;
 import org.apache.asterix.common.cluster.ClusterPartition;
 import org.apache.asterix.common.dataflow.ICcApplicationContext;
-import org.apache.asterix.external.api.IAdapterFactory;
-import org.apache.asterix.external.api.IDataSourceAdapter;
+import org.apache.asterix.common.external.IDataSourceAdapter;
 import org.apache.asterix.external.api.IExternalDataSourceFactory;
+import org.apache.asterix.external.api.ITypedAdapterFactory;
 import org.apache.asterix.external.dataflow.TupleForwarder;
 import org.apache.asterix.external.parser.ADMDataParser;
 import org.apache.asterix.om.types.ARecordType;
@@ -41,7 +41,7 @@
 import org.apache.hyracks.dataflow.std.file.ITupleParser;
 import org.apache.hyracks.dataflow.std.file.ITupleParserFactory;
 
-public class TestTypedAdapterFactory implements IAdapterFactory {
+public class TestTypedAdapterFactory implements ITypedAdapterFactory {
 
     private static final long serialVersionUID = 1L;
 
diff --git a/asterixdb/asterix-lang-aql/src/main/javacc/AQL.jj b/asterixdb/asterix-lang-aql/src/main/javacc/AQL.jj
index 4c8820f..9b54562 100644
--- a/asterixdb/asterix-lang-aql/src/main/javacc/AQL.jj
+++ b/asterixdb/asterix-lang-aql/src/main/javacc/AQL.jj
@@ -476,7 +476,7 @@
   DatasetDecl dsetDecl = null;
   boolean autogenerated = false;
   Pair<Integer, List<String>> filterField = null;
-  Pair<Identifier,Identifier> metaTypeComponents = new Pair<Identifier, Identifier>(null, null);
+  Pair<Identifier,Identifier> metaTypeComponents = null;
   RecordConstructor withRecord = null;
 }
 {
@@ -495,10 +495,8 @@
         try{
             dsetDecl = new DatasetDecl(nameComponents.first,
               nameComponents.second,
-              typeComponents.first,
-              typeComponents.second,
-              metaTypeComponents.first,
-              metaTypeComponents.second,
+              new TypeReferenceExpression(typeComponents),
+              null,
               nodeGroupName != null? new Identifier(nodeGroupName): null,
               hints,
               DatasetType.EXTERNAL,
@@ -542,10 +540,8 @@
         try{
           dsetDecl = new DatasetDecl(nameComponents.first,
                                    nameComponents.second,
-                                   typeComponents.first,
-                                   typeComponents.second,
-                                   metaTypeComponents.first,
-                                   metaTypeComponents.second,
+                                   new TypeReferenceExpression(typeComponents),
+                                   metaTypeComponents != null ? new TypeReferenceExpression(metaTypeComponents) : null,
                                    nodeGroupName != null ? new Identifier(nodeGroupName) : null,
                                    hints,
                                    DatasetType.INTERNAL,
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DatasetDecl.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DatasetDecl.java
index 0a17b24..22753d0 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DatasetDecl.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DatasetDecl.java
@@ -25,6 +25,7 @@
 import org.apache.asterix.lang.common.base.AbstractStatement;
 import org.apache.asterix.lang.common.base.Statement;
 import org.apache.asterix.lang.common.expression.RecordConstructor;
+import org.apache.asterix.lang.common.expression.TypeExpression;
 import org.apache.asterix.lang.common.struct.Identifier;
 import org.apache.asterix.lang.common.util.ConfigurationUtil;
 import org.apache.asterix.lang.common.util.DatasetDeclParametersUtil;
@@ -36,38 +37,25 @@
 public class DatasetDecl extends AbstractStatement {
     protected final Identifier name;
     protected final Identifier dataverse;
-    protected final Identifier itemTypeDataverse;
-    protected final Identifier itemTypeName;
-    protected final Identifier metaItemTypeDataverse;
-    protected final Identifier metaItemTypeName;
+    protected final TypeExpression itemType;
+    protected final TypeExpression metaItemType;
     protected final Identifier nodegroupName;
     protected final DatasetType datasetType;
     protected final IDatasetDetailsDecl datasetDetailsDecl;
     protected final Map<String, String> hints;
-    private final AdmObjectNode withObjectNode;
+    private AdmObjectNode withObjectNode;
     protected final boolean ifNotExists;
 
-    public DatasetDecl(Identifier dataverse, Identifier name, Identifier itemTypeDataverse, Identifier itemTypeName,
-            Identifier metaItemTypeDataverse, Identifier metaItemTypeName, Identifier nodeGroupName,
-            Map<String, String> hints, DatasetType datasetType, IDatasetDetailsDecl idd, RecordConstructor withRecord,
-            boolean ifNotExists) throws CompilationException {
+    public DatasetDecl(Identifier dataverse, Identifier name, TypeExpression itemType, TypeExpression metaItemType,
+            Identifier nodeGroupName, Map<String, String> hints, DatasetType datasetType, IDatasetDetailsDecl idd,
+            RecordConstructor withRecord, boolean ifNotExists) throws CompilationException {
         this.dataverse = dataverse;
         this.name = name;
-        this.itemTypeName = itemTypeName;
-        if (itemTypeDataverse.getValue() == null) {
-            this.itemTypeDataverse = dataverse;
-        } else {
-            this.itemTypeDataverse = itemTypeDataverse;
-        }
-        this.metaItemTypeName = metaItemTypeName;
-        if (metaItemTypeDataverse == null || metaItemTypeDataverse.getValue() == null) {
-            this.metaItemTypeDataverse = dataverse;
-        } else {
-            this.metaItemTypeDataverse = metaItemTypeDataverse;
-        }
+        this.itemType = itemType;
+        this.metaItemType = metaItemType;
         this.nodegroupName = nodeGroupName;
         this.hints = hints;
-        this.withObjectNode = DatasetDeclParametersUtil.validateAndGetWithObjectNode(withRecord);
+        this.withObjectNode = DatasetDeclParametersUtil.validateAndGetWithObjectNode(withRecord, datasetType);
         this.ifNotExists = ifNotExists;
         this.datasetType = datasetType;
         this.datasetDetailsDecl = idd;
@@ -85,40 +73,12 @@
         return name;
     }
 
-    public Identifier getItemTypeName() {
-        return itemTypeName;
+    public TypeExpression getItemType() {
+        return itemType;
     }
 
-    public Identifier getItemTypeDataverse() {
-        return itemTypeDataverse;
-    }
-
-    public String getQualifiedTypeName() {
-        if (itemTypeDataverse == dataverse) {
-            return itemTypeName.getValue();
-        } else {
-            return itemTypeDataverse.getValue() + "." + itemTypeName.getValue();
-        }
-    }
-
-    public Identifier getMetaName() {
-        return name;
-    }
-
-    public Identifier getMetaItemTypeName() {
-        return metaItemTypeName == null ? new Identifier() : metaItemTypeName;
-    }
-
-    public Identifier getMetaItemTypeDataverse() {
-        return metaItemTypeDataverse == null ? new Identifier() : metaItemTypeDataverse;
-    }
-
-    public String getQualifiedMetaTypeName() {
-        if (metaItemTypeDataverse == dataverse) {
-            return metaItemTypeName.getValue();
-        } else {
-            return metaItemTypeDataverse.getValue() + "." + metaItemTypeName.getValue();
-        }
+    public TypeExpression getMetaItemType() {
+        return metaItemType;
     }
 
     public Identifier getNodegroupName() {
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/DatasetDeclParametersUtil.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/DatasetDeclParametersUtil.java
index a26a638..52285d9 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/DatasetDeclParametersUtil.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/DatasetDeclParametersUtil.java
@@ -18,6 +18,7 @@
  */
 package org.apache.asterix.lang.common.util;
 
+import org.apache.asterix.common.config.DatasetConfig;
 import org.apache.asterix.common.exceptions.CompilationException;
 import org.apache.asterix.lang.common.expression.RecordConstructor;
 import org.apache.asterix.object.base.AdmObjectNode;
@@ -60,14 +61,21 @@
     private DatasetDeclParametersUtil() {
     }
 
-    public static AdmObjectNode validateAndGetWithObjectNode(RecordConstructor withRecord) throws CompilationException {
+    public static AdmObjectNode validateAndGetWithObjectNode(RecordConstructor withRecord,
+            DatasetConfig.DatasetType datasetType) throws CompilationException {
         if (withRecord == null) {
             return EMPTY_WITH_OBJECT;
         }
-        final ConfigurationTypeValidator validator = new ConfigurationTypeValidator();
-        final AdmObjectNode node = ExpressionUtils.toNode(withRecord);
-        validator.validateType(WITH_OBJECT_TYPE, node);
-        return node;
+
+        // Handle based on dataset type
+        if (datasetType == DatasetConfig.DatasetType.INTERNAL) {
+            final ConfigurationTypeValidator validator = new ConfigurationTypeValidator();
+            final AdmObjectNode node = ExpressionUtils.toNode(withRecord);
+            validator.validateType(WITH_OBJECT_TYPE, node);
+            return node;
+        } else {
+            return ExpressionUtils.toNode(withRecord);
+        }
     }
 
     private static ARecordType getWithObjectType() {
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/visitor/FormatPrintVisitor.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/visitor/FormatPrintVisitor.java
index 6b734dd..c123348 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/visitor/FormatPrintVisitor.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/visitor/FormatPrintVisitor.java
@@ -457,16 +457,18 @@
     public Void visit(DatasetDecl dd, Integer step) throws CompilationException {
         if (dd.getDatasetType() == DatasetType.INTERNAL) {
             out.print(skip(step) + "create " + datasetSymbol + generateFullName(dd.getDataverse(), dd.getName())
-                    + generateIfNotExists(dd.getIfNotExists()) + "(" + dd.getQualifiedTypeName() + ")"
-                    + " primary key ");
+                    + generateIfNotExists(dd.getIfNotExists()) + "(");
+            dd.getItemType().accept(this, step + 2);
+            out.print(skip(step) + ") primary key ");
             printDelimitedKeys(((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getPartitioningExprs(), ",");
             if (((InternalDetailsDecl) dd.getDatasetDetailsDecl()).isAutogenerated()) {
                 out.print(" autogenerated ");
             }
         } else if (dd.getDatasetType() == DatasetType.EXTERNAL) {
-            out.print(
-                    skip(step) + "create external " + datasetSymbol + generateFullName(dd.getDataverse(), dd.getName())
-                            + "(" + dd.getQualifiedTypeName() + ")" + generateIfNotExists(dd.getIfNotExists()));
+            out.print(skip(step) + "create external " + datasetSymbol
+                    + generateFullName(dd.getDataverse(), dd.getName()) + "(");
+            dd.getItemType().accept(this, step + 2);
+            out.print(skip(step) + ")" + generateIfNotExists(dd.getIfNotExists()));
             ExternalDetailsDecl externalDetails = (ExternalDetailsDecl) dd.getDatasetDetailsDecl();
             out.print(" using " + revertStringToQuoted(externalDetails.getAdapter()));
             printConfiguration(externalDetails.getProperties());
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/visitor/QueryPrintVisitor.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/visitor/QueryPrintVisitor.java
index ff55880..5b8c9c0 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/visitor/QueryPrintVisitor.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/visitor/QueryPrintVisitor.java
@@ -388,15 +388,18 @@
     @Override
     public Void visit(DatasetDecl dd, Integer step) throws CompilationException {
         if (dd.getDatasetType() == DatasetType.INTERNAL) {
-            String line = skip(step) + "DatasetDecl " + dd.getName() + "(" + dd.getItemTypeName() + ")"
-                    + " partitioned by " + ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getPartitioningExprs();
+            out.print(skip(step) + "DatasetDecl " + dd.getName() + "(");
+            dd.getItemType().accept(this, step + 2);
+            out.print(skip(step) + ") partitioned by "
+                    + ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getPartitioningExprs());
             if (((InternalDetailsDecl) dd.getDatasetDetailsDecl()).isAutogenerated()) {
-                line += " [autogenerated]";
+                out.print(" [autogenerated]");
             }
-            out.println(line);
+            out.println();
         } else if (dd.getDatasetType() == DatasetType.EXTERNAL) {
-            out.println(skip(step) + "DatasetDecl " + dd.getName() + "(" + dd.getItemTypeName() + ")"
-                    + "is an external dataset");
+            out.print(skip(step) + "DatasetDecl " + dd.getName() + "(");
+            dd.getItemType().accept(this, step + 2);
+            out.println(skip(step) + ")is an external dataset");
         }
         return null;
     }
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/javacc/SQLPP.jj b/asterixdb/asterix-lang-sqlpp/src/main/javacc/SQLPP.jj
index e0a4341..5aa45bc 100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/javacc/SQLPP.jj
+++ b/asterixdb/asterix-lang-sqlpp/src/main/javacc/SQLPP.jj
@@ -335,15 +335,15 @@
         hintCollector.clear();
         try {
             return parseFunction.parse();
+        } catch (SqlppParseException e) {
+            throw new CompilationException(ErrorCode.PARSE_ERROR, e.getSourceLocation(), LogRedactionUtil.userData(getMessage(e)));
+        } catch (ParseException e) {
+            throw new CompilationException(ErrorCode.PARSE_ERROR, LogRedactionUtil.userData(getMessage(e)));
         } catch (Error e) {
             // this is here as the JavaCharStream that's below the lexer sometimes throws Errors that are not handled
             // by the ANTLR-generated lexer or parser (e.g it does this for invalid backslash u + 4 hex digits escapes)
             final String msg = e.getClass().getSimpleName() + (e.getMessage() != null ? ": " + e.getMessage() : "");
             throw new CompilationException(ErrorCode.PARSE_ERROR, LogRedactionUtil.userData(msg));
-        } catch (SqlppParseException e) {
-            throw new CompilationException(ErrorCode.PARSE_ERROR, e.getSourceLocation(), LogRedactionUtil.userData(getMessage(e)));
-        } catch (ParseException e) {
-            throw new CompilationException(ErrorCode.PARSE_ERROR, LogRedactionUtil.userData(getMessage(e)));
         } finally {
             reportUnclaimedHints();
         }
@@ -428,7 +428,11 @@
     }
 
     private SqlppParseException createUnexpectedTokenError() {
-      return new SqlppParseException(getSourceLocation(token), "Unexpected token: " + LogRedactionUtil.userData(token.image));
+      return createUnexpectedTokenError(token);
+    }
+
+    private SqlppParseException createUnexpectedTokenError(Token t) {
+      return new SqlppParseException(getSourceLocation(t), "Unexpected token: " + LogRedactionUtil.userData(t.image));
     }
 
     private boolean laToken(int idx, int kind, String image) {
@@ -646,7 +650,8 @@
 {
   Pair<Identifier,Identifier> nameComponents = null;
   boolean ifNotExists = false;
-  Pair<Identifier,Identifier> typeComponents = null;
+  TypeExpression typeExpr = null;
+  TypeExpression metaTypeExpr = null;
   String adapterName = null;
   Map<String,String> properties = null;
   FunctionSignature appliedFunction = null;
@@ -656,13 +661,12 @@
   DatasetDecl stmt = null;
   boolean autogenerated = false;
   Pair<Integer, List<String>> filterField = null;
-  Pair<Identifier,Identifier> metaTypeComponents = new Pair<Identifier, Identifier>(null, null);
   RecordConstructor withRecord = null;
 }
 {
   (
     <EXTERNAL> Dataset() nameComponents = QualifiedName()
-    <LEFTPAREN> typeComponents = TypeName() <RIGHTPAREN>
+    typeExpr = DatasetTypeSpecification()
     ifNotExists = IfNotExists()
     <USING> adapterName = AdapterName() properties = Configuration()
     ( <ON> nodeGroupName = Identifier() )?
@@ -675,10 +679,8 @@
         try{
         stmt = new DatasetDecl(nameComponents.first,
                                    nameComponents.second,
-                                   typeComponents.first,
-                                   typeComponents.second,
-                                   metaTypeComponents.first,
-                                   metaTypeComponents.second,
+                                   typeExpr,
+                                   null,
                                    nodeGroupName != null? new Identifier(nodeGroupName): null,
                                    hints,
                                    DatasetType.EXTERNAL,
@@ -692,7 +694,7 @@
 
     | ( <INTERNAL> )?
     Dataset() nameComponents = QualifiedName()
-    <LEFTPAREN> typeComponents = TypeName() <RIGHTPAREN>
+    typeExpr = DatasetTypeSpecification()
     (
         { String name; }
         <WITH>
@@ -703,7 +705,7 @@
                     "We can only support one additional associated field called \"meta\".");
             }
         }
-        <LEFTPAREN> metaTypeComponents = TypeName() <RIGHTPAREN>
+        metaTypeExpr = DatasetTypeSpecification()
     )?
     ifNotExists = IfNotExists()
     primaryKeyFields = PrimaryKey()
@@ -724,10 +726,8 @@
         try{
         stmt = new DatasetDecl(nameComponents.first,
                                    nameComponents.second,
-                                   typeComponents.first,
-                                   typeComponents.second,
-                                   metaTypeComponents.first,
-                                   metaTypeComponents.second,
+                                   typeExpr,
+                                   metaTypeExpr,
                                    nodeGroupName != null ? new Identifier(nodeGroupName) : null,
                                    hints,
                                    DatasetType.INTERNAL,
@@ -744,6 +744,76 @@
     }
 }
 
+TypeExpression DatasetTypeSpecification() throws ParseException:
+{
+  TypeExpression typeExpr = null;
+}
+{
+  (
+    LOOKAHEAD(3) typeExpr = DatasetRecordTypeSpecification(true)
+    | typeExpr = DatasetReferenceTypeSpecification()
+  )
+  {
+    return typeExpr;
+  }
+}
+
+TypeExpression DatasetReferenceTypeSpecification() throws ParseException:
+{
+  TypeExpression typeExpr = null;
+}
+{
+  <LEFTPAREN> typeExpr = TypeReference() <RIGHTPAREN>
+  {
+    return typeExpr;
+  }
+}
+
+TypeExpression DatasetRecordTypeSpecification(boolean allowRecordKindModifier) throws ParseException:
+{
+  RecordTypeDefinition recordTypeDef = null;
+  RecordTypeDefinition.RecordKind recordKind = null;
+  Token recordKindToken = null;
+}
+{
+   <LEFTPAREN> recordTypeDef = DatasetRecordTypeDef() <RIGHTPAREN>
+   ( recordKind = RecordTypeKind() { recordKindToken = token; } <TYPE> )?
+   {
+     if (recordKind == null) {
+       recordKind = RecordTypeDefinition.RecordKind.CLOSED;
+     } else if (!allowRecordKindModifier) {
+       throw createUnexpectedTokenError(recordKindToken);
+     }
+     recordTypeDef.setRecordKind(recordKind);
+     return recordTypeDef;
+   }
+}
+
+RecordTypeDefinition DatasetRecordTypeDef() throws ParseException:
+{
+  RecordTypeDefinition recType = new RecordTypeDefinition();
+}
+{
+  DatasetRecordField(recType) ( <COMMA> DatasetRecordField(recType) )*
+  {
+    return recType;
+  }
+}
+
+void DatasetRecordField(RecordTypeDefinition recType) throws ParseException:
+{
+  String fieldName;
+  TypeExpression type = null;
+  boolean isUnknownable = true;
+}
+{
+  fieldName = Identifier()
+  type = TypeReference() ( <NOT> <NULL> { isUnknownable = false; } )?
+  {
+    recType.addField(fieldName, type, isUnknownable);
+  }
+}
+
 RefreshExternalDatasetStatement RefreshExternalDatasetStatement() throws ParseException:
 {
   Token startToken = null;
@@ -1575,15 +1645,28 @@
   }
 }
 
+RecordTypeDefinition.RecordKind RecordTypeKind() throws ParseException:
+{
+  RecordTypeDefinition.RecordKind recordKind = null;
+}
+{
+  (
+    <CLOSED> { recordKind = RecordTypeDefinition.RecordKind.CLOSED; }
+    | <OPEN> { recordKind = RecordTypeDefinition.RecordKind.OPEN; }
+  )
+  {
+    return recordKind;
+  }
+}
+
 RecordTypeDefinition RecordTypeDef() throws ParseException:
 {
   Token startToken = null;
   RecordTypeDefinition recType = new RecordTypeDefinition();
-  RecordTypeDefinition.RecordKind recordKind = null;
+  RecordTypeDefinition.RecordKind recordKind = RecordTypeDefinition.RecordKind.OPEN;
 }
 {
-  ( <CLOSED> { recordKind = RecordTypeDefinition.RecordKind.CLOSED; }
-    | <OPEN> { recordKind = RecordTypeDefinition.RecordKind.OPEN; } )?
+   ( recordKind = RecordTypeKind() )?
    <LEFTBRACE>
     {
       startToken = token;
@@ -1610,9 +1693,6 @@
     )?
    <RIGHTBRACE>
    {
-      if (recordKind == null) {
-        recordKind = RecordTypeDefinition.RecordKind.OPEN;
-      }
       recType.setRecordKind(recordKind);
       return addSourceLocation(recType, startToken);
    }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java
index 49fffe6..3412941 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java
@@ -35,13 +35,13 @@
 import org.apache.asterix.common.context.IStorageComponentProvider;
 import org.apache.asterix.common.exceptions.ACIDException;
 import org.apache.asterix.common.exceptions.MetadataException;
+import org.apache.asterix.common.external.IDataSourceAdapter;
 import org.apache.asterix.common.ioopcallbacks.LSMIndexIOOperationCallbackFactory;
 import org.apache.asterix.common.ioopcallbacks.LSMIndexPageWriteCallbackFactory;
 import org.apache.asterix.common.utils.StorageConstants;
 import org.apache.asterix.common.utils.StoragePathUtil;
 import org.apache.asterix.external.adapter.factory.GenericAdapterFactory;
-import org.apache.asterix.external.api.IAdapterFactory;
-import org.apache.asterix.external.api.IDataSourceAdapter;
+import org.apache.asterix.external.api.ITypedAdapterFactory;
 import org.apache.asterix.external.dataset.adapter.AdapterIdentifier;
 import org.apache.asterix.external.indexing.ExternalFile;
 import org.apache.asterix.metadata.IDatasetDetails;
@@ -294,7 +294,8 @@
 
     private static DatasourceAdapter getAdapter(String adapterFactoryClassName) throws AlgebricksException {
         try {
-            String adapterName = ((IAdapterFactory) (Class.forName(adapterFactoryClassName).newInstance())).getAlias();
+            String adapterName =
+                    ((ITypedAdapterFactory) (Class.forName(adapterFactoryClassName).newInstance())).getAlias();
             return new DatasourceAdapter(new AdapterIdentifier(MetadataConstants.METADATA_DATAVERSE_NAME, adapterName),
                     adapterFactoryClassName, IDataSourceAdapter.AdapterType.INTERNAL);
         } catch (InstantiationException | IllegalAccessException | ClassNotFoundException e) {
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataRecordTypes.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataRecordTypes.java
index d9309d9..7fdbfcf 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataRecordTypes.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataRecordTypes.java
@@ -74,7 +74,7 @@
     public static final String FIELD_NAME_KIND = "Kind";
     public static final String FIELD_NAME_LANGUAGE = "Language";
     public static final String FIELD_NAME_LAST_REFRESH_TIME = "LastRefreshTime";
-    public static final String FIELD_NAME_METADATA_DATAVERSE = "MetatypeDataverseName";
+    public static final String FIELD_NAME_METATYPE_DATAVERSE = "MetatypeDataverseName";
     public static final String FIELD_NAME_METATYPE_NAME = "MetatypeName";
     public static final String FIELD_NAME_NAME = "Name";
     public static final String FIELD_NAME_NODE_NAME = "NodeName";
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/DatasetDataSource.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/DatasetDataSource.java
index 62cce05..07bbc57 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/DatasetDataSource.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/DatasetDataSource.java
@@ -23,7 +23,7 @@
 import org.apache.asterix.common.config.DatasetConfig.DatasetType;
 import org.apache.asterix.common.exceptions.CompilationException;
 import org.apache.asterix.common.exceptions.ErrorCode;
-import org.apache.asterix.external.api.IAdapterFactory;
+import org.apache.asterix.external.api.ITypedAdapterFactory;
 import org.apache.asterix.metadata.IDatasetDetails;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.entities.Dataset;
@@ -111,7 +111,7 @@
                         externalDataset.getItemTypeDataverseName(), itemTypeName).getDatatype();
 
                 ExternalDatasetDetails edd = (ExternalDatasetDetails) externalDataset.getDatasetDetails();
-                IAdapterFactory adapterFactory = metadataProvider.getConfiguredAdapterFactory(externalDataset,
+                ITypedAdapterFactory adapterFactory = metadataProvider.getConfiguredAdapterFactory(externalDataset,
                         edd.getAdapter(), edd.getProperties(), (ARecordType) itemType, null);
                 return metadataProvider.buildExternalDatasetDataScannerRuntime(jobSpec, itemType, adapterFactory);
             case INTERNAL:
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/LoadableDataSource.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/LoadableDataSource.java
index 3460a46..c2983af 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/LoadableDataSource.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/LoadableDataSource.java
@@ -26,7 +26,7 @@
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.common.exceptions.CompilationException;
 import org.apache.asterix.common.exceptions.ErrorCode;
-import org.apache.asterix.external.api.IAdapterFactory;
+import org.apache.asterix.external.api.ITypedAdapterFactory;
 import org.apache.asterix.metadata.entities.Dataset;
 import org.apache.asterix.metadata.entities.InternalDatasetDetails;
 import org.apache.asterix.om.types.ARecordType;
@@ -137,7 +137,7 @@
         }
         LoadableDataSource alds = (LoadableDataSource) dataSource;
         ARecordType itemType = (ARecordType) alds.getLoadedType();
-        IAdapterFactory adapterFactory = metadataProvider.getConfiguredAdapterFactory(alds.getTargetDataset(),
+        ITypedAdapterFactory adapterFactory = metadataProvider.getConfiguredAdapterFactory(alds.getTargetDataset(),
                 alds.getAdapter(), alds.getAdapterProperties(), itemType, null);
         RecordDescriptor rDesc = JobGenHelper.mkRecordDescriptor(typeEnv, opSchema, context);
         return metadataProvider.buildLoadableDatasetScan(jobSpec, adapterFactory, rDesc);
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataManagerUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataManagerUtil.java
index e819d65..6317d94 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataManagerUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataManagerUtil.java
@@ -23,6 +23,8 @@
 
 import org.apache.asterix.common.cluster.IClusterStateManager;
 import org.apache.asterix.common.config.DatasetConfig.DatasetType;
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.common.exceptions.ErrorCode;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.MetadataTransactionContext;
 import org.apache.asterix.metadata.entities.Dataset;
@@ -48,14 +50,20 @@
 
     public static IAType findType(MetadataTransactionContext mdTxnCtx, String dataverse, String typeName)
             throws AlgebricksException {
+        Datatype type = findTypeEntity(mdTxnCtx, dataverse, typeName);
+        return type != null ? type.getDatatype() : null;
+    }
+
+    public static Datatype findTypeEntity(MetadataTransactionContext mdTxnCtx, String dataverse, String typeName)
+            throws AlgebricksException {
         if (dataverse == null || typeName == null) {
             return null;
         }
         Datatype type = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataverse, typeName);
         if (type == null) {
-            throw new AlgebricksException("Type name '" + typeName + "' unknown in dataverse '" + dataverse + "'");
+            throw new AsterixException(ErrorCode.UNKNOWN_TYPE, dataverse + "." + typeName);
         }
-        return type.getDatatype();
+        return type;
     }
 
     public static ARecordType findOutputRecordType(MetadataTransactionContext mdTxnCtx, String dataverse,
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java
index 0a72ceb..6b6cc78 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java
@@ -39,6 +39,7 @@
 import org.apache.asterix.common.dataflow.ICcApplicationContext;
 import org.apache.asterix.common.dataflow.LSMTreeInsertDeleteOperatorDescriptor;
 import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.common.external.IDataSourceAdapter;
 import org.apache.asterix.common.metadata.LockList;
 import org.apache.asterix.common.storage.ICompressionManager;
 import org.apache.asterix.common.transactions.ITxnIdFactory;
@@ -48,8 +49,7 @@
 import org.apache.asterix.dataflow.data.nontagged.MissingWriterFactory;
 import org.apache.asterix.dataflow.data.nontagged.serde.SerializerDeserializerUtil;
 import org.apache.asterix.external.adapter.factory.LookupAdapterFactory;
-import org.apache.asterix.external.api.IAdapterFactory;
-import org.apache.asterix.external.api.IDataSourceAdapter;
+import org.apache.asterix.external.api.ITypedAdapterFactory;
 import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
 import org.apache.asterix.external.indexing.ExternalFile;
 import org.apache.asterix.external.indexing.IndexingConstants;
@@ -72,6 +72,7 @@
 import org.apache.asterix.metadata.dataset.hints.DatasetHints.DatasetCardinalityHint;
 import org.apache.asterix.metadata.entities.Dataset;
 import org.apache.asterix.metadata.entities.DatasourceAdapter;
+import org.apache.asterix.metadata.entities.Datatype;
 import org.apache.asterix.metadata.entities.Dataverse;
 import org.apache.asterix.metadata.entities.ExternalDatasetDetails;
 import org.apache.asterix.metadata.entities.Feed;
@@ -345,6 +346,10 @@
         return MetadataManagerUtil.findNodes(mdTxnCtx, nodeGroupName);
     }
 
+    public Datatype findTypeEntity(String dataverse, String typeName) throws AlgebricksException {
+        return MetadataManagerUtil.findTypeEntity(mdTxnCtx, dataverse, typeName);
+    }
+
     public IAType findType(String dataverse, String typeName) throws AlgebricksException {
         return MetadataManagerUtil.findType(mdTxnCtx, dataverse, typeName);
     }
@@ -416,7 +421,7 @@
     }
 
     protected Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildLoadableDatasetScan(
-            JobSpecification jobSpec, IAdapterFactory adapterFactory, RecordDescriptor rDesc)
+            JobSpecification jobSpec, ITypedAdapterFactory adapterFactory, RecordDescriptor rDesc)
             throws AlgebricksException {
         ExternalScanOperatorDescriptor dataScanner = new ExternalScanOperatorDescriptor(jobSpec, rDesc, adapterFactory);
         try {
@@ -430,14 +435,14 @@
         return MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
     }
 
-    public Triple<IOperatorDescriptor, AlgebricksPartitionConstraint, IAdapterFactory> buildFeedIntakeRuntime(
+    public Triple<IOperatorDescriptor, AlgebricksPartitionConstraint, ITypedAdapterFactory> buildFeedIntakeRuntime(
             JobSpecification jobSpec, Feed feed, FeedPolicyAccessor policyAccessor) throws Exception {
-        Triple<IAdapterFactory, RecordDescriptor, IDataSourceAdapter.AdapterType> factoryOutput;
+        Triple<ITypedAdapterFactory, RecordDescriptor, IDataSourceAdapter.AdapterType> factoryOutput;
         factoryOutput =
                 FeedMetadataUtil.getFeedFactoryAndOutput(feed, policyAccessor, mdTxnCtx, getApplicationContext());
         ARecordType recordType =
                 FeedMetadataUtil.getOutputType(feed, feed.getConfiguration().get(ExternalDataConstants.KEY_TYPE_NAME));
-        IAdapterFactory adapterFactory = factoryOutput.first;
+        ITypedAdapterFactory adapterFactory = factoryOutput.first;
         FeedIntakeOperatorDescriptor feedIngestor = null;
         switch (factoryOutput.third) {
             case INTERNAL:
@@ -775,11 +780,11 @@
         return numElementsHint / numPartitions;
     }
 
-    protected IAdapterFactory getConfiguredAdapterFactory(Dataset dataset, String adapterName,
+    protected ITypedAdapterFactory getConfiguredAdapterFactory(Dataset dataset, String adapterName,
             Map<String, String> configuration, ARecordType itemType, ARecordType metaType) throws AlgebricksException {
         try {
             configuration.put(ExternalDataConstants.KEY_DATAVERSE, dataset.getDataverseName());
-            IAdapterFactory adapterFactory = AdapterFactoryProvider.getAdapterFactory(
+            ITypedAdapterFactory adapterFactory = AdapterFactoryProvider.getAdapterFactory(
                     getApplicationContext().getServiceContext(), adapterName, configuration, itemType, metaType);
 
             // check to see if dataset is indexed
@@ -922,7 +927,7 @@
     }
 
     public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildExternalDatasetDataScannerRuntime(
-            JobSpecification jobSpec, IAType itemType, IAdapterFactory adapterFactory) throws AlgebricksException {
+            JobSpecification jobSpec, IAType itemType, ITypedAdapterFactory adapterFactory) throws AlgebricksException {
         if (itemType.getTypeTag() != ATypeTag.OBJECT) {
             throw new AlgebricksException("Can only scan datasets of records.");
         }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/DatasourceAdapter.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/DatasourceAdapter.java
index b72c058..c29fb93 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/DatasourceAdapter.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/DatasourceAdapter.java
@@ -18,7 +18,7 @@
  */
 package org.apache.asterix.metadata.entities;
 
-import org.apache.asterix.external.api.IDataSourceAdapter.AdapterType;
+import org.apache.asterix.common.external.IDataSourceAdapter.AdapterType;
 import org.apache.asterix.external.dataset.adapter.AdapterIdentifier;
 import org.apache.asterix.metadata.MetadataCache;
 import org.apache.asterix.metadata.api.IMetadataEntity;
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
index 7f8b9bf6..74f5076 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
@@ -25,6 +25,7 @@
 import java.io.DataOutput;
 import java.util.ArrayList;
 import java.util.Calendar;
+import java.util.Collections;
 import java.util.Date;
 import java.util.HashMap;
 import java.util.LinkedHashMap;
@@ -68,6 +69,7 @@
 import org.apache.asterix.om.types.IAType;
 import org.apache.asterix.runtime.compression.CompressionManager;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.algebricks.common.utils.Pair;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
@@ -135,20 +137,9 @@
         String nodeGroupName =
                 ((AString) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_GROUPNAME_FIELD_INDEX))
                         .getStringValue();
-        String compactionPolicy = ((AString) datasetRecord
-                .getValueByPos(MetadataRecordTypes.DATASET_ARECORD_COMPACTION_POLICY_FIELD_INDEX)).getStringValue();
-        IACursor cursor = ((AOrderedList) datasetRecord
-                .getValueByPos(MetadataRecordTypes.DATASET_ARECORD_COMPACTION_POLICY_PROPERTIES_FIELD_INDEX))
-                        .getCursor();
-        Map<String, String> compactionPolicyProperties = new LinkedHashMap<>();
-        String key;
-        String value;
-        while (cursor.next()) {
-            ARecord field = (ARecord) cursor.get();
-            key = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_NAME_FIELD_INDEX)).getStringValue();
-            value = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_VALUE_FIELD_INDEX)).getStringValue();
-            compactionPolicyProperties.put(key, value);
-        }
+
+        Pair<String, Map<String, String>> compactionPolicy = readCompactionPolicy(datasetType, datasetRecord);
+
         switch (datasetType) {
             case INTERNAL: {
                 ARecord datasetDetailsRecord = (ARecord) datasetRecord
@@ -159,7 +150,7 @@
                 PartitioningStrategy partitioningStrategy = PartitioningStrategy.valueOf(((AString) datasetDetailsRecord
                         .getValueByPos(MetadataRecordTypes.INTERNAL_DETAILS_ARECORD_PARTITIONSTRATEGY_FIELD_INDEX))
                                 .getStringValue());
-                cursor = ((AOrderedList) datasetDetailsRecord
+                IACursor cursor = ((AOrderedList) datasetDetailsRecord
                         .getValueByPos(MetadataRecordTypes.INTERNAL_DETAILS_ARECORD_PARTITIONKEY_FIELD_INDEX))
                                 .getCursor();
                 List<List<String>> partitioningKey = new ArrayList<>();
@@ -220,15 +211,15 @@
                 String adapter = ((AString) datasetDetailsRecord
                         .getValueByPos(MetadataRecordTypes.EXTERNAL_DETAILS_ARECORD_DATASOURCE_ADAPTER_FIELD_INDEX))
                                 .getStringValue();
-                cursor = ((AOrderedList) datasetDetailsRecord
+                IACursor cursor = ((AOrderedList) datasetDetailsRecord
                         .getValueByPos(MetadataRecordTypes.EXTERNAL_DETAILS_ARECORD_PROPERTIES_FIELD_INDEX))
                                 .getCursor();
                 Map<String, String> properties = new HashMap<>();
                 while (cursor.next()) {
                     ARecord field = (ARecord) cursor.get();
-                    key = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_NAME_FIELD_INDEX))
+                    String key = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_NAME_FIELD_INDEX))
                             .getStringValue();
-                    value = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_VALUE_FIELD_INDEX))
+                    String value = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_VALUE_FIELD_INDEX))
                             .getStringValue();
                     properties.put(key, value);
                 }
@@ -250,7 +241,7 @@
         String metaTypeDataverseName = null;
         String metaTypeName = null;
         int metaTypeDataverseNameIndex =
-                datasetRecord.getType().getFieldIndex(MetadataRecordTypes.FIELD_NAME_METADATA_DATAVERSE);
+                datasetRecord.getType().getFieldIndex(MetadataRecordTypes.FIELD_NAME_METATYPE_DATAVERSE);
         if (metaTypeDataverseNameIndex >= 0) {
             metaTypeDataverseName =
                     ((AString) datasetRecord.getValueByPos(metaTypeDataverseNameIndex)).getStringValue();
@@ -262,10 +253,34 @@
         String compressionScheme = getCompressionScheme(datasetRecord);
 
         return new Dataset(dataverseName, datasetName, typeDataverseName, typeName, metaTypeDataverseName, metaTypeName,
-                nodeGroupName, compactionPolicy, compactionPolicyProperties, datasetDetails, hints, datasetType,
+                nodeGroupName, compactionPolicy.first, compactionPolicy.second, datasetDetails, hints, datasetType,
                 datasetId, pendingOp, rebalanceCount, compressionScheme);
     }
 
+    protected Pair<String, Map<String, String>> readCompactionPolicy(DatasetType datasetType, ARecord datasetRecord) {
+
+        String compactionPolicy = ((AString) datasetRecord
+                .getValueByPos(MetadataRecordTypes.DATASET_ARECORD_COMPACTION_POLICY_FIELD_INDEX)).getStringValue();
+        AOrderedList compactionPolicyPropertiesList = ((AOrderedList) datasetRecord
+                .getValueByPos(MetadataRecordTypes.DATASET_ARECORD_COMPACTION_POLICY_PROPERTIES_FIELD_INDEX));
+
+        Map<String, String> compactionPolicyProperties;
+        if (compactionPolicyPropertiesList.size() > 0) {
+            compactionPolicyProperties = new LinkedHashMap<>();
+            for (IACursor cursor = compactionPolicyPropertiesList.getCursor(); cursor.next();) {
+                ARecord field = (ARecord) cursor.get();
+                String key = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_NAME_FIELD_INDEX))
+                        .getStringValue();
+                String value = ((AString) field.getValueByPos(MetadataRecordTypes.PROPERTIES_VALUE_FIELD_INDEX))
+                        .getStringValue();
+                compactionPolicyProperties.put(key, value);
+            }
+        } else {
+            compactionPolicyProperties = Collections.emptyMap();
+        }
+        return new Pair<>(compactionPolicy, compactionPolicyProperties);
+    }
+
     private long getRebalanceCount(ARecord datasetRecord) {
         // Read the rebalance count if there is one.
         int rebalanceCountIndex =
@@ -342,29 +357,9 @@
         stringSerde.serialize(aString, fieldValue.getDataOutput());
         recordBuilder.addField(MetadataRecordTypes.DATASET_ARECORD_GROUPNAME_FIELD_INDEX, fieldValue);
 
-        // write field 6
-        fieldValue.reset();
-        aString.setValue(dataset.getCompactionPolicy());
-        stringSerde.serialize(aString, fieldValue.getDataOutput());
-        recordBuilder.addField(MetadataRecordTypes.DATASET_ARECORD_COMPACTION_POLICY_FIELD_INDEX, fieldValue);
-
-        // write field 7
-        listBuilder.reset((AOrderedListType) MetadataRecordTypes.DATASET_RECORDTYPE
-                .getFieldTypes()[MetadataRecordTypes.DATASET_ARECORD_COMPACTION_POLICY_PROPERTIES_FIELD_INDEX]);
-        if (dataset.getCompactionPolicyProperties() != null) {
-            for (Map.Entry<String, String> property : dataset.getCompactionPolicyProperties().entrySet()) {
-                String name = property.getKey();
-                String value = property.getValue();
-                itemValue.reset();
-                DatasetUtil.writePropertyTypeRecord(name, value, itemValue.getDataOutput(),
-                        MetadataRecordTypes.COMPACTION_POLICY_PROPERTIES_RECORDTYPE);
-                listBuilder.addItem(itemValue);
-            }
-        }
-        fieldValue.reset();
-        listBuilder.write(fieldValue.getDataOutput(), true);
-        recordBuilder.addField(MetadataRecordTypes.DATASET_ARECORD_COMPACTION_POLICY_PROPERTIES_FIELD_INDEX,
-                fieldValue);
+        // write field 6/7
+        writeCompactionPolicy(dataset.getDatasetType(), dataset.getCompactionPolicy(),
+                dataset.getCompactionPolicyProperties(), listBuilder, itemValue);
 
         // write field 8/9
         fieldValue.reset();
@@ -414,6 +409,34 @@
         return tuple;
     }
 
+    protected void writeCompactionPolicy(DatasetType datasetType, String compactionPolicy,
+            Map<String, String> compactionPolicyProperties, OrderedListBuilder listBuilder,
+            ArrayBackedValueStorage itemValue) throws HyracksDataException {
+        // write field 6
+        fieldValue.reset();
+        aString.setValue(compactionPolicy);
+        stringSerde.serialize(aString, fieldValue.getDataOutput());
+        recordBuilder.addField(MetadataRecordTypes.DATASET_ARECORD_COMPACTION_POLICY_FIELD_INDEX, fieldValue);
+
+        // write field 7
+        listBuilder.reset((AOrderedListType) MetadataRecordTypes.DATASET_RECORDTYPE
+                .getFieldTypes()[MetadataRecordTypes.DATASET_ARECORD_COMPACTION_POLICY_PROPERTIES_FIELD_INDEX]);
+        if (compactionPolicyProperties != null && !compactionPolicyProperties.isEmpty()) {
+            for (Map.Entry<String, String> property : compactionPolicyProperties.entrySet()) {
+                String name = property.getKey();
+                String value = property.getValue();
+                itemValue.reset();
+                DatasetUtil.writePropertyTypeRecord(name, value, itemValue.getDataOutput(),
+                        MetadataRecordTypes.COMPACTION_POLICY_PROPERTIES_RECORDTYPE);
+                listBuilder.addItem(itemValue);
+            }
+        }
+        fieldValue.reset();
+        listBuilder.write(fieldValue.getDataOutput(), true);
+        recordBuilder.addField(MetadataRecordTypes.DATASET_ARECORD_COMPACTION_POLICY_PROPERTIES_FIELD_INDEX,
+                fieldValue);
+    }
+
     /**
      * Keep protected to allow other extensions to add additional fields
      *
@@ -430,7 +453,7 @@
         if (dataset.hasMetaPart()) {
             // write open field 1, the meta item type Dataverse name.
             fieldName.reset();
-            aString.setValue(MetadataRecordTypes.FIELD_NAME_METADATA_DATAVERSE);
+            aString.setValue(MetadataRecordTypes.FIELD_NAME_METATYPE_DATAVERSE);
             stringSerde.serialize(aString, fieldName.getDataOutput());
             fieldValue.reset();
             aString.setValue(dataset.getMetaItemTypeDataverseName());
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java
index 8f630cf..9e65c08 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java
@@ -24,7 +24,7 @@
 import java.io.DataInputStream;
 import java.util.Calendar;
 
-import org.apache.asterix.external.api.IDataSourceAdapter;
+import org.apache.asterix.common.external.IDataSourceAdapter;
 import org.apache.asterix.external.dataset.adapter.AdapterIdentifier;
 import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
 import org.apache.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java
index 3ae0fec..7ed53e4 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java
@@ -28,9 +28,9 @@
 import org.apache.asterix.common.exceptions.CompilationException;
 import org.apache.asterix.common.exceptions.ErrorCode;
 import org.apache.asterix.common.exceptions.MetadataException;
-import org.apache.asterix.external.api.IAdapterFactory;
-import org.apache.asterix.external.api.IDataSourceAdapter;
-import org.apache.asterix.external.api.IDataSourceAdapter.AdapterType;
+import org.apache.asterix.common.external.IDataSourceAdapter;
+import org.apache.asterix.common.external.IDataSourceAdapter.AdapterType;
+import org.apache.asterix.external.api.ITypedAdapterFactory;
 import org.apache.asterix.external.feed.api.IFeed;
 import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
 import org.apache.asterix.external.provider.AdapterFactoryProvider;
@@ -117,20 +117,20 @@
                 adapterEntity = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, feed.getDataverseName(), adapterName);
             }
             AdapterType adapterType;
-            IAdapterFactory adapterFactory;
+            ITypedAdapterFactory adapterFactory;
             if (adapterEntity != null) {
                 adapterType = adapterEntity.getType();
                 String adapterFactoryClassname = adapterEntity.getClassname();
                 switch (adapterType) {
                     case INTERNAL:
-                        adapterFactory = (IAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
+                        adapterFactory = (ITypedAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
                         break;
                     case EXTERNAL:
                         String[] anameComponents = adapterName.split("#");
                         String libraryName = anameComponents[0];
                         ClassLoader cl =
                                 appCtx.getLibraryManager().getLibraryClassLoader(feed.getDataverseName(), libraryName);
-                        adapterFactory = (IAdapterFactory) cl.loadClass(adapterFactoryClassname).newInstance();
+                        adapterFactory = (ITypedAdapterFactory) cl.loadClass(adapterFactoryClassname).newInstance();
                         break;
                     default:
                         throw new AsterixException("Unknown Adapter type " + adapterType);
@@ -165,17 +165,17 @@
     }
 
     @SuppressWarnings("rawtypes")
-    public static Triple<IAdapterFactory, RecordDescriptor, AdapterType> getFeedFactoryAndOutput(Feed feed,
+    public static Triple<ITypedAdapterFactory, RecordDescriptor, AdapterType> getFeedFactoryAndOutput(Feed feed,
             FeedPolicyAccessor policyAccessor, MetadataTransactionContext mdTxnCtx, ICcApplicationContext appCtx)
             throws AlgebricksException {
         // This method needs to be re-visited
         String adapterName = null;
         DatasourceAdapter adapterEntity = null;
         String adapterFactoryClassname = null;
-        IAdapterFactory adapterFactory = null;
+        ITypedAdapterFactory adapterFactory = null;
         ARecordType adapterOutputType = null;
         ARecordType metaType = null;
-        Triple<IAdapterFactory, RecordDescriptor, IDataSourceAdapter.AdapterType> feedProps = null;
+        Triple<ITypedAdapterFactory, RecordDescriptor, IDataSourceAdapter.AdapterType> feedProps = null;
         IDataSourceAdapter.AdapterType adapterType = null;
         try {
             Map<String, String> configuration = feed.getConfiguration();
@@ -196,14 +196,14 @@
                 adapterFactoryClassname = adapterEntity.getClassname();
                 switch (adapterType) {
                     case INTERNAL:
-                        adapterFactory = (IAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
+                        adapterFactory = (ITypedAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
                         break;
                     case EXTERNAL:
                         String[] anameComponents = adapterName.split("#");
                         String libraryName = anameComponents[0];
                         ClassLoader cl =
                                 appCtx.getLibraryManager().getLibraryClassLoader(feed.getDataverseName(), libraryName);
-                        adapterFactory = (IAdapterFactory) cl.loadClass(adapterFactoryClassname).newInstance();
+                        adapterFactory = (ITypedAdapterFactory) cl.loadClass(adapterFactoryClassname).newInstance();
                         break;
                     default:
                         throw new AsterixException("Unknown Adapter type " + adapterType);
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
index 4b7d359..a5084e0 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
@@ -99,6 +99,8 @@
      */
     public static final byte OP_UPSERT = 0x03;
 
+    private static final String DATASET_INLINE_TYPE_PREFIX = "$d$t$";
+
     private DatasetUtil() {
     }
 
@@ -207,9 +209,13 @@
      * field is actually a key by making sure the field is coming from the right record (data record or meta record),
      * e.g. if the field name happens to be equal to the key name but the field is coming from the data record while
      * the key is coming from the meta record.
-     * @param keySourceIndicator indicates where the key is coming from, 1 from meta record, 0 from data record
-     * @param keyIndex the key index we're checking the field against
-     * @param fieldFromMeta whether the field is coming from the meta record or the data record
+     *
+     * @param keySourceIndicator
+     *            indicates where the key is coming from, 1 from meta record, 0 from data record
+     * @param keyIndex
+     *            the key index we're checking the field against
+     * @param fieldFromMeta
+     *            whether the field is coming from the meta record or the data record
      * @return true if the key source matches the field source. Otherwise, false.
      */
     private static boolean keySourceMatches(List<Integer> keySourceIndicator, int keyIndex, boolean fieldFromMeta) {
@@ -603,4 +609,13 @@
         }
         return new Pair<>(first, second);
     }
+
+    public static String createInlineTypeName(String datasetName, boolean forMetaItemType) {
+        char typeChar = forMetaItemType ? 'm' : 'i';
+        return DATASET_INLINE_TYPE_PREFIX + typeChar + '$' + datasetName;
+    }
+
+    public static boolean isInlineTypeName(Dataset dataset, String typeDataverseName, String typeName) {
+        return dataset.getDataverseName().equals(typeDataverseName) && typeName.startsWith(DATASET_INLINE_TYPE_PREFIX);
+    }
 }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/ExternalIndexingOperations.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/ExternalIndexingOperations.java
index 47db3b0..c1d8f42 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/ExternalIndexingOperations.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/ExternalIndexingOperations.java
@@ -30,7 +30,7 @@
 import org.apache.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
 import org.apache.asterix.common.config.DatasetConfig.TransactionState;
 import org.apache.asterix.common.context.IStorageComponentProvider;
-import org.apache.asterix.external.api.IAdapterFactory;
+import org.apache.asterix.external.api.ITypedAdapterFactory;
 import org.apache.asterix.external.indexing.ExternalFile;
 import org.apache.asterix.external.indexing.IndexingConstants;
 import org.apache.asterix.external.operators.ExternalDatasetIndexesAbortOperatorDescriptor;
@@ -254,7 +254,7 @@
             throws HyracksDataException, AlgebricksException {
         ExternalDatasetDetails externalDatasetDetails = (ExternalDatasetDetails) dataset.getDatasetDetails();
         Map<String, String> configuration = externalDatasetDetails.getProperties();
-        IAdapterFactory adapterFactory = AdapterFactoryProvider.getIndexingAdapterFactory(
+        ITypedAdapterFactory adapterFactory = AdapterFactoryProvider.getIndexingAdapterFactory(
                 metadataProvider.getApplicationContext().getServiceContext(), externalDatasetDetails.getAdapter(),
                 configuration, (ARecordType) itemType, files, true, null);
         ExternalScanOperatorDescriptor scanOp =
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/MetadataUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/MetadataUtil.java
index e5d4721..3436b44 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/MetadataUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/MetadataUtil.java
@@ -43,4 +43,5 @@
         int idx = datasetName.indexOf('.');
         return datasetName.substring(0, idx);
     }
+
 }
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/AbstractSerializableAvgAggregateFunction.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/AbstractSerializableAvgAggregateFunction.java
index f77e6c4..bf8698d 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/AbstractSerializableAvgAggregateFunction.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/AbstractSerializableAvgAggregateFunction.java
@@ -35,6 +35,7 @@
 import org.apache.asterix.om.base.AMutableDouble;
 import org.apache.asterix.om.base.AMutableInt64;
 import org.apache.asterix.om.base.ANull;
+import org.apache.asterix.om.exceptions.ExceptionUtil;
 import org.apache.asterix.om.functions.BuiltinFunctions;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.om.types.ATypeTag;
@@ -44,8 +45,8 @@
 import org.apache.asterix.om.types.hierachy.ATypeHierarchy;
 import org.apache.asterix.runtime.evaluators.common.AccessibleByteArrayEval;
 import org.apache.asterix.runtime.evaluators.common.ClosedRecordConstructorEvalFactory.ClosedRecordConstructorEval;
-import org.apache.asterix.runtime.exceptions.IncompatibleTypeException;
 import org.apache.asterix.runtime.exceptions.UnsupportedItemTypeException;
+import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.algebricks.runtime.base.IEvaluatorContext;
 import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator;
 import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
@@ -65,6 +66,11 @@
     private static final int COUNT_OFFSET = 8;
     protected static final int AGG_TYPE_OFFSET = 16;
 
+    private final IEvaluatorContext context;
+
+    // Warning flag to warn only once in case of non-numeric data
+    private boolean isWarned;
+
     private IPointable inputVal = new VoidPointable();
     private IScalarEvaluator eval;
     private AMutableDouble aDouble = new AMutableDouble(0);
@@ -92,6 +98,7 @@
     public AbstractSerializableAvgAggregateFunction(IScalarEvaluatorFactory[] args, IEvaluatorContext context,
             SourceLocation sourceLoc) throws HyracksDataException {
         super(sourceLoc);
+        this.context = context;
         eval = args[0].createScalarEvaluator(context);
     }
 
@@ -101,6 +108,7 @@
             state.writeDouble(0.0);
             state.writeLong(0);
             state.writeByte(ATypeTag.SERIALIZED_SYSTEM_NULL_TYPE_TAG);
+            isWarned = false;
         } catch (IOException e) {
             throw HyracksDataException.create(e);
         }
@@ -136,17 +144,16 @@
         } else if (aggType == ATypeTag.SYSTEM_NULL) {
             aggType = typeTag;
         } else if (typeTag != ATypeTag.SYSTEM_NULL && !ATypeHierarchy.isCompatible(typeTag, aggType)) {
-            if (typeTag.ordinal() > aggType.ordinal()) {
-                throw new IncompatibleTypeException(sourceLoc, BuiltinFunctions.AVG, bytes[offset],
-                        aggType.serialize());
-            } else {
-                throw new IncompatibleTypeException(sourceLoc, BuiltinFunctions.AVG, aggType.serialize(),
-                        bytes[offset]);
+            // Issue warning only once and treat current tuple as null
+            if (!isWarned) {
+                isWarned = true;
+                ExceptionUtil.warnUnsupportedType(context, sourceLoc, getIdentifier().getName(), typeTag);
             }
+            processNull(state, start);
+            return;
         } else if (ATypeHierarchy.canPromote(aggType, typeTag)) {
             aggType = typeTag;
         }
-        ++count;
         switch (typeTag) {
             case TINYINT: {
                 byte val = AInt8SerializerDeserializer.getByte(bytes, offset + 1);
@@ -179,8 +186,15 @@
                 break;
             }
             default:
-                throw new UnsupportedItemTypeException(sourceLoc, BuiltinFunctions.AVG, bytes[offset]);
+                // Issue warning only once and treat current tuple as null
+                if (!isWarned) {
+                    isWarned = true;
+                    ExceptionUtil.warnUnsupportedType(context, sourceLoc, getIdentifier().getName(), typeTag);
+                }
+                processNull(state, start);
+                return;
         }
+        count++;
         BufferSerDeUtil.writeDouble(sum, state, start + SUM_OFFSET);
         BufferSerDeUtil.writeLong(count, state, start + COUNT_OFFSET);
         state[start + AGG_TYPE_OFFSET] = aggType.serialize();
@@ -279,4 +293,8 @@
         return false;
     }
 
+    // Function identifier
+    private FunctionIdentifier getIdentifier() {
+        return BuiltinFunctions.AVG;
+    }
 }
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractAvgAggregateFunction.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractAvgAggregateFunction.java
index 29719ad..d2367f9 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractAvgAggregateFunction.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractAvgAggregateFunction.java
@@ -35,6 +35,7 @@
 import org.apache.asterix.om.base.AMutableDouble;
 import org.apache.asterix.om.base.AMutableInt64;
 import org.apache.asterix.om.base.ANull;
+import org.apache.asterix.om.exceptions.ExceptionUtil;
 import org.apache.asterix.om.functions.BuiltinFunctions;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.om.types.ATypeTag;
@@ -44,8 +45,8 @@
 import org.apache.asterix.om.types.hierachy.ATypeHierarchy;
 import org.apache.asterix.runtime.evaluators.common.AccessibleByteArrayEval;
 import org.apache.asterix.runtime.evaluators.common.ClosedRecordConstructorEvalFactory.ClosedRecordConstructorEval;
-import org.apache.asterix.runtime.exceptions.IncompatibleTypeException;
 import org.apache.asterix.runtime.exceptions.UnsupportedItemTypeException;
+import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.algebricks.runtime.base.IEvaluatorContext;
 import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator;
 import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
@@ -62,6 +63,11 @@
     private static final int SUM_FIELD_ID = 0;
     private static final int COUNT_FIELD_ID = 1;
 
+    private final IEvaluatorContext context;
+
+    // Warning flag to warn only once in case of non-numeric data
+    private boolean isWarned;
+
     private final ARecordType recType;
 
     private ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();
@@ -95,6 +101,7 @@
     public AbstractAvgAggregateFunction(IScalarEvaluatorFactory[] args, IEvaluatorContext context,
             SourceLocation sourceLoc) throws HyracksDataException {
         super(sourceLoc);
+        this.context = context;
         eval = args[0].createScalarEvaluator(context);
         recType = new ARecordType(null, new String[] { "sum", "count" },
                 new IAType[] { BuiltinType.ADOUBLE, BuiltinType.AINT64 }, false);
@@ -106,6 +113,7 @@
         aggType = ATypeTag.SYSTEM_NULL;
         sum = 0.0;
         count = 0;
+        isWarned = false;
     }
 
     @Override
@@ -134,15 +142,17 @@
         } else if (aggType == ATypeTag.SYSTEM_NULL) {
             aggType = typeTag;
         } else if (typeTag != ATypeTag.SYSTEM_NULL && !ATypeHierarchy.isCompatible(typeTag, aggType)) {
-            if (typeTag.ordinal() > aggType.ordinal()) {
-                throw new IncompatibleTypeException(sourceLoc, BuiltinFunctions.AVG, data[offset], aggType.serialize());
-            } else {
-                throw new IncompatibleTypeException(sourceLoc, BuiltinFunctions.AVG, aggType.serialize(), data[offset]);
+            // Issue warning only once and treat current tuple as null
+            if (!isWarned) {
+                isWarned = true;
+                ExceptionUtil.warnUnsupportedType(context, sourceLoc, getIdentifier().getName(), typeTag);
             }
+            processNull();
+            return;
         } else if (ATypeHierarchy.canPromote(aggType, typeTag)) {
             aggType = typeTag;
         }
-        ++count;
+
         switch (typeTag) {
             case TINYINT: {
                 byte val = AInt8SerializerDeserializer.getByte(data, offset + 1);
@@ -175,9 +185,16 @@
                 break;
             }
             default: {
-                throw new UnsupportedItemTypeException(sourceLoc, BuiltinFunctions.AVG, data[offset]);
+                // Issue warning only once and treat current tuple as null
+                if (!isWarned) {
+                    isWarned = true;
+                    ExceptionUtil.warnUnsupportedType(context, sourceLoc, getIdentifier().getName(), typeTag);
+                }
+                processNull();
+                return;
             }
         }
+        count++;
     }
 
     protected void finishPartialResults(IPointable result) throws HyracksDataException {
@@ -259,4 +276,8 @@
         return false;
     }
 
+    // Function identifier
+    private FunctionIdentifier getIdentifier() {
+        return BuiltinFunctions.AVG;
+    }
 }
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/CcApplicationContext.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/CcApplicationContext.java
index a0b10c6..3366ac1 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/CcApplicationContext.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/CcApplicationContext.java
@@ -45,6 +45,7 @@
 import org.apache.asterix.common.config.TransactionProperties;
 import org.apache.asterix.common.context.IStorageComponentProvider;
 import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.external.IAdapterFactoryService;
 import org.apache.asterix.common.library.ILibraryManager;
 import org.apache.asterix.common.metadata.IMetadataBootstrap;
 import org.apache.asterix.common.replication.INcLifecycleCoordinator;
@@ -98,14 +99,15 @@
     private final IReceptionist receptionist;
     private final IRequestTracker requestTracker;
     private final IConfigValidator configValidator;
+    private final IAdapterFactoryService adapterFactoryService;
 
     public CcApplicationContext(ICCServiceContext ccServiceCtx, IHyracksClientConnection hcc,
             ILibraryManager libraryManager, Supplier<IMetadataBootstrap> metadataBootstrapSupplier,
             IGlobalRecoveryManager globalRecoveryManager, INcLifecycleCoordinator ftStrategy,
             IJobLifecycleListener activeLifeCycleListener, IStorageComponentProvider storageComponentProvider,
             IMetadataLockManager mdLockManager, IReceptionistFactory receptionistFactory,
-            IConfigValidatorFactory configValidatorFactory, Object extensionManager)
-            throws AlgebricksException, IOException {
+            IConfigValidatorFactory configValidatorFactory, Object extensionManager,
+            IAdapterFactoryService adapterFactoryService) throws AlgebricksException, IOException {
         this.ccServiceCtx = ccServiceCtx;
         this.hcc = hcc;
         this.libraryManager = libraryManager;
@@ -139,6 +141,7 @@
         receptionist = receptionistFactory.create();
         requestTracker = new RequestTracker(this);
         configValidator = configValidatorFactory.create();
+        this.adapterFactoryService = adapterFactoryService;
     }
 
     @Override
@@ -306,4 +309,9 @@
     public IRequestTracker getRequestTracker() {
         return requestTracker;
     }
+
+    @Override
+    public IAdapterFactoryService getAdapterFactoryService() {
+        return adapterFactoryService;
+    }
 }
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/NoOpCoordinationService.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/NoOpCoordinationService.java
index 6f9a8d2..3cbaed4 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/NoOpCoordinationService.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/NoOpCoordinationService.java
@@ -35,6 +35,11 @@
     }
 
     @Override
+    public void putSensitive(String key, byte[] value) {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
     public Optional<byte[]> get(String key) {
         throw new UnsupportedOperationException();
     }
diff --git a/asterixdb/asterix-server/pom.xml b/asterixdb/asterix-server/pom.xml
index 421dab8..a542c70 100644
--- a/asterixdb/asterix-server/pom.xml
+++ b/asterixdb/asterix-server/pom.xml
@@ -172,6 +172,11 @@
               <gav>io.netty:netty-all:4.1.48.Final</gav>
               <noticeUrl>https://raw.githubusercontent.com/netty/netty/netty-4.1.48.Final/NOTICE.txt</noticeUrl>
             </override>
+            <override>
+              <gav>org.reactivestreams:reactive-streams:1.0.2</gav>
+              <noticeUrl>https://raw.githubusercontent.com/reactive-streams/reactive-streams-jvm/v1.0.2/COPYING.txt</noticeUrl>
+              <url>https://raw.githubusercontent.com/reactive-streams/reactive-streams-jvm/v1.0.2/LICENSE.txt</url>
+            </override>
           </overrides>
           <licenses>
             <license>
@@ -205,6 +210,7 @@
                 <aliasUrl>http://www.apache.org/licenses/LICENSE-2.0</aliasUrl>
                 <aliasUrl>https://www.apache.org/licenses/LICENSE-2.0.txt</aliasUrl>
                 <aliasUrl>http://www.apache.org/licenses/LICENSE-2.0.html</aliasUrl>
+                <aliasUrl>https://aws.amazon.com/apache2.0</aliasUrl>
               </aliasUrls>
               <metric>1</metric>
             </license>
diff --git a/asterixdb/asterix-spidersilk/config/cc.conf b/asterixdb/asterix-spidersilk/config/cc.conf
deleted file mode 100644
index 3212003..0000000
--- a/asterixdb/asterix-spidersilk/config/cc.conf
+++ /dev/null
@@ -1,35 +0,0 @@
-; Licensed to the Apache Software Foundation (ASF) under one
-; or more contributor license agreements.  See the NOTICE file
-; distributed with this work for additional information
-; regarding copyright ownership.  The ASF licenses this file
-; to you under the Apache License, Version 2.0 (the
-; "License"); you may not use this file except in compliance
-; with the License.  You may obtain a copy of the License at
-;
-;   http://www.apache.org/licenses/LICENSE-2.0
-;
-; Unless required by applicable law or agreed to in writing,
-; software distributed under the License is distributed on an
-; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-; KIND, either express or implied.  See the License for the
-; specific language governing permissions and limitations
-; under the License.
-
-[nc/nc1]
-txn.log.dir=/data/txnlog
-core.dump.dir=/data/coredump
-iodevices=/data
-address=nc1
-
-[nc/nc2]
-txn.log.dir=/data/txnlog
-core.dump.dir=/data/coredump
-iodevices=/data
-address=nc2
-
-[nc]
-app.class=org.apache.asterix.hyracks.bootstrap.NCApplicationEntryPoint
-command=asterixnc
-
-[cc]
-address=cc
\ No newline at end of file
diff --git a/asterixdb/asterix-spidersilk/docker/Dockerfile b/asterixdb/asterix-spidersilk/docker/Dockerfile
deleted file mode 100644
index b25561a..0000000
--- a/asterixdb/asterix-spidersilk/docker/Dockerfile
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-FROM centos:7
-
-USER root
-
-RUN yum install -y unzip git which docker-client java-1.8.0-openjdk-headless
diff --git a/asterixdb/asterix-spidersilk/pom.xml b/asterixdb/asterix-spidersilk/pom.xml
index 6b370c8..c58ee4f 100644
--- a/asterixdb/asterix-spidersilk/pom.xml
+++ b/asterixdb/asterix-spidersilk/pom.xml
@@ -35,91 +35,4 @@
       <comments>A business-friendly OSS license</comments>
     </license>
   </licenses>
-
-  <properties>
-    <root.dir>${basedir}/..</root.dir>
-  </properties>
-
-  <repositories>
-    <repository>
-      <id>snapshots-repo</id>
-      <url>https://oss.sonatype.org/content/repositories/snapshots</url>
-      <releases><enabled>false</enabled></releases>
-      <snapshots><enabled>true</enabled></snapshots>
-    </repository>
-  </repositories>
-
-  <dependencies>
-    <dependency>
-      <groupId>junit</groupId>
-      <artifactId>junit</artifactId>
-      <version>4.12</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>me.arminb.spidersilk</groupId>
-      <artifactId>spidersilk</artifactId>
-      <version>0.4.0</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-api</artifactId>
-      <version>1.7.25</version>
-    </dependency>
-    <dependency>
-      <groupId>com.fasterxml.jackson.core</groupId>
-      <artifactId>jackson-databind</artifactId>
-      <version>2.9.7</version>
-    </dependency>
-    <dependency>
-      <groupId>ch.qos.logback</groupId>
-      <artifactId>logback-classic</artifactId>
-      <version>1.2.3</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.asterix</groupId>
-      <artifactId>asterix-app</artifactId>
-      <version>${project.version}</version>
-      <type>test-jar</type>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.asterix</groupId>
-      <artifactId>asterix-test-framework</artifactId>
-      <version>${project.version}</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.asterix</groupId>
-      <artifactId>asterix-app</artifactId>
-      <version>${project.version}</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.asterix</groupId>
-      <artifactId>asterix-server</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-
-  </dependencies>
-
-  <!-- While these dependencies are declared and being used, the maven dependency plugin detects them as declared and
-  not used. To make this right, it is needed to force these dependencies as used -->
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-dependency-plugin</artifactId>
-        <configuration>
-          <usedDependencies>
-            <usedDependency>ch.qos.logback:logback-classic</usedDependency>
-            <usedDependency>org.apache.asterix:asterix-app</usedDependency>
-            <usedDependency>org.apache.asterix:asterix-server</usedDependency>
-          </usedDependencies>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
 </project>
diff --git a/asterixdb/asterix-spidersilk/src/test/java/org/apache/asterix/spidersilk/SampleTestIT.java b/asterixdb/asterix-spidersilk/src/test/java/org/apache/asterix/spidersilk/SampleTestIT.java
deleted file mode 100644
index b3b73bf..0000000
--- a/asterixdb/asterix-spidersilk/src/test/java/org/apache/asterix/spidersilk/SampleTestIT.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.spidersilk;
-
-import java.io.InputStream;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.asterix.test.common.TestExecutor;
-import org.apache.asterix.testframework.context.TestCaseContext;
-import org.junit.AfterClass;
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-
-import me.arminb.spidersilk.SpiderSilkRunner;
-import me.arminb.spidersilk.dsl.entities.Deployment;
-import me.arminb.spidersilk.exceptions.RuntimeEngineException;
-
-public class SampleTestIT {
-    private static final Logger logger = LoggerFactory.getLogger(SampleTestIT.class);
-
-    protected static SpiderSilkRunner runner;
-
-    @BeforeClass
-    public static void before() throws RuntimeEngineException {
-        Deployment deployment = TestUtil.getSimpleClusterDeployment();
-        runner = SpiderSilkRunner.run(deployment);
-        TestUtil.waitForClusterToBeUp(runner);
-        logger.info("The cluster is UP!");
-    }
-
-    @AfterClass
-    public static void after() {
-        if (runner != null) {
-            runner.stop();
-        }
-    }
-
-    @Test
-    public void sampleTest() throws Exception {
-
-        TestExecutor testExecutor = TestUtil.getTestExecutor(runner);
-        String ddl = "drop dataverse company if exists;" + "create dataverse company;" + "use company;"
-                + "create type Emp as open {" + "  id : int32," + "  name : string" + "};"
-                + "create dataset Employee(Emp) primary key id;";
-
-        String insertStatements = "use company;" + "insert into Employee({ \"id\":123,\"name\":\"John Doe\"});";
-
-        String query = "use company;" + "select value emp from Employee emp;";
-
-        testExecutor.executeSqlppUpdateOrDdl(ddl, TestCaseContext.OutputFormat.CLEAN_JSON);
-        logger.info("Company dataverse and employee dataset are created!");
-        testExecutor.executeSqlppUpdateOrDdl(insertStatements, TestCaseContext.OutputFormat.CLEAN_JSON);
-        logger.info("A record is inserted into employee dataset");
-        InputStream resultStream = testExecutor.executeSqlppUpdateOrDdl(query, TestCaseContext.OutputFormat.CLEAN_JSON);
-
-        ObjectMapper objectMapper = new ObjectMapper();
-        List<Map<String, String>> result = objectMapper.readValue(resultStream, List.class);
-
-        Assert.assertEquals(1, result.size());
-        Assert.assertEquals(123, result.get(0).get("id"));
-        Assert.assertEquals("John Doe", result.get(0).get("name"));
-
-        logger.info("The fetched record matches the inserted record");
-    }
-}
diff --git a/asterixdb/asterix-spidersilk/src/test/java/org/apache/asterix/spidersilk/TestUtil.java b/asterixdb/asterix-spidersilk/src/test/java/org/apache/asterix/spidersilk/TestUtil.java
deleted file mode 100644
index 03e8191..0000000
--- a/asterixdb/asterix-spidersilk/src/test/java/org/apache/asterix/spidersilk/TestUtil.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.spidersilk;
-
-import java.io.File;
-import java.util.Objects;
-import java.util.Optional;
-import java.util.stream.Stream;
-
-import org.apache.asterix.test.common.TestExecutor;
-
-import me.arminb.spidersilk.SpiderSilkRunner;
-import me.arminb.spidersilk.dsl.entities.Deployment;
-import me.arminb.spidersilk.dsl.entities.PortType;
-import me.arminb.spidersilk.dsl.entities.ServiceType;
-import me.arminb.spidersilk.exceptions.RuntimeEngineException;
-
-public class TestUtil {
-    private static String mavenVersion;
-    private static String asterixHome;
-
-    public static Deployment getSimpleClusterDeployment() {
-        mavenVersion = getMavenArtifactVersion();
-        asterixHome = "/asterix/apache-asterixdb-" + mavenVersion;
-
-        return new Deployment.DeploymentBuilder("simpleClusterDeployment")
-                // Service Definitions
-                .withService("asterix")
-                .applicationPath("../asterix-server/target/asterix-server-" + mavenVersion + "-binary-assembly.zip",
-                        "/asterix", false, true, false)
-                .dockerFileAddress("docker/Dockerfile", false).dockerImage("spidersilk/test-asterix")
-                .instrumentablePath(asterixHome + "/repo/asterix-server-" + mavenVersion + ".jar")
-                .libraryPath(asterixHome + "/repo/*.jar").libraryPath(asterixHome + "/lib/*.jar")
-                .logDirectory(asterixHome + "/logs").serviceType(ServiceType.JAVA).and()
-                // Node Definitions
-                .withNode("cc", "asterix").applicationPath("config", "/asterix/config")
-                .startCommand(asterixHome + "/bin/asterixcc -config-file /asterix/config/cc.conf").tcpPort(19002).and()
-                .withNode("nc1", "asterix").startCommand(asterixHome + "/bin/asterixncservice").and()
-                .withNode("nc2", "asterix").startCommand(asterixHome + "/bin/asterixncservice").and().build();
-    }
-
-    public static String getMavenArtifactVersion() {
-        Optional<String> version = Stream
-                .of(Objects.requireNonNull(new File("../asterix-server/target")
-                        .list((dir, name) -> name.matches("asterix-server-.*-binary-assembly.zip"))))
-                .map(foo -> foo.replace("asterix-server-", "")).map(foo -> foo.replace("-binary-assembly.zip", ""))
-                .findFirst();
-        return version.orElseThrow(IllegalStateException::new);
-    }
-
-    public static void waitForClusterToBeUp(SpiderSilkRunner runner) throws RuntimeEngineException {
-        runner.runtime().runCommandInNode("cc", asterixHome + "/bin/asterixhelper wait_for_cluster");
-    }
-
-    public static TestExecutor getTestExecutor(SpiderSilkRunner runner) {
-        return new TestExecutor(runner.runtime().ip("cc"), runner.runtime().portMapping("cc", 19002, PortType.TCP));
-    }
-}
diff --git a/asterixdb/asterix-spidersilk/src/test/resources/logback.xml b/asterixdb/asterix-spidersilk/src/test/resources/logback.xml
deleted file mode 100644
index ca7aa79..0000000
--- a/asterixdb/asterix-spidersilk/src/test/resources/logback.xml
+++ /dev/null
@@ -1,35 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one
-  ~ or more contributor license agreements.  See the NOTICE file
-  ~ distributed with this work for additional information
-  ~ regarding copyright ownership.  The ASF licenses this file
-  ~ to you under the Apache License, Version 2.0 (the
-  ~ "License"); you may not use this file except in compliance
-  ~ with the License.  You may obtain a copy of the License at
-  ~
-  ~   http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing,
-  ~ software distributed under the License is distributed on an
-  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  ~ KIND, either express or implied.  See the License for the
-  ~ specific language governing permissions and limitations
-  ~ under the License.
-  -->
-
-<configuration>
-
-    <appender name="Console" class="ch.qos.logback.core.ConsoleAppender">
-        <layout class="ch.qos.logback.classic.PatternLayout">
-            <Pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</Pattern>
-        </layout>
-    </appender>
-
-    <logger name="me.arminb" level="DEBUG"/>
-    <logger name="org.apache.asterix" level="DEBUG"/>
-
-    <root level="ERROR">
-        <appender-ref ref="Console" />
-    </root>
-</configuration>
diff --git a/asterixdb/pom.xml b/asterixdb/pom.xml
index 25550bf..96a1998 100644
--- a/asterixdb/pom.xml
+++ b/asterixdb/pom.xml
@@ -81,6 +81,7 @@
     <hyracks.version>0.3.5-SNAPSHOT</hyracks.version>
     <hadoop.version>2.8.5</hadoop.version>
     <jacoco.version>0.7.6.201602180812</jacoco.version>
+    <awsjavasdk.version>2.10.83</awsjavasdk.version>
 
     <implementation.title>Apache AsterixDB - ${project.name}</implementation.title>
     <implementation.url>https://asterixdb.apache.org/</implementation.url>
@@ -194,7 +195,6 @@
             <exclude>**/*.json</exclude>
             <exclude>**/*.adm</exclude>
             <exclude>**/*.template</exclude>
-            <exclude>**/.SpiderSilkWorkingDirectory/**</exclude>
             <exclude>asterix-installer/**</exclude> <!-- in case -DskipInstaller -->
           </excludes>
         </configuration>
@@ -1340,6 +1340,84 @@
         <artifactId>reflections</artifactId>
         <version>0.9.12</version>
       </dependency>
+      <dependency>
+        <groupId>software.amazon.awssdk</groupId>
+        <artifactId>s3</artifactId>
+        <version>${awsjavasdk.version}</version>
+        <exclusions>
+          <exclusion>
+            <groupId>io.netty</groupId>
+            <artifactId>netty-codec-http</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>io.netty</groupId>
+            <artifactId>netty-codec-http2</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>io.netty</groupId>
+            <artifactId>netty-codec</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>io.netty</groupId>
+            <artifactId>netty-transport</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>io.netty</groupId>
+            <artifactId>netty-common</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>io.netty</groupId>
+            <artifactId>netty-buffer</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>io.netty</groupId>
+            <artifactId>netty-handler</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>io.netty</groupId>
+            <artifactId>netty-transport-native-epoll</artifactId>
+          </exclusion>
+        </exclusions>
+      </dependency>
+      <dependency>
+        <groupId>software.amazon.awssdk</groupId>
+        <artifactId>regions</artifactId>
+        <version>${awsjavasdk.version}</version>
+        <exclusions>
+          <exclusion>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-api</artifactId>
+          </exclusion>
+        </exclusions>
+      </dependency>
+      <dependency>
+        <groupId>software.amazon.awssdk</groupId>
+        <artifactId>auth</artifactId>
+        <version>${awsjavasdk.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>software.amazon.awssdk</groupId>
+        <artifactId>sdk-core</artifactId>
+        <version>${awsjavasdk.version}</version>
+        <exclusions>
+          <exclusion>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-api</artifactId>
+          </exclusion>
+        </exclusions>
+      </dependency>
+      <!-- Mock for AWS S3 -->
+      <dependency>
+        <groupId>io.findify</groupId>
+        <artifactId>s3mock_2.12</artifactId>
+        <version>0.2.5</version>
+      </dependency>
+      <!-- Needed for the s3 mock -->
+      <dependency>
+        <groupId>com.typesafe.akka</groupId>
+        <artifactId>akka-http-core_2.12</artifactId>
+        <version>10.1.0</version>
+      </dependency>
     </dependencies>
   </dependencyManagement>
 
diff --git a/asterixdb/src/main/licenses/content/raw.githubusercontent.com_reactive-streams_reactive-streams-jvm_v1.0.2_COPYING.txt b/asterixdb/src/main/licenses/content/raw.githubusercontent.com_reactive-streams_reactive-streams-jvm_v1.0.2_COPYING.txt
new file mode 100644
index 0000000..1625c17
--- /dev/null
+++ b/asterixdb/src/main/licenses/content/raw.githubusercontent.com_reactive-streams_reactive-streams-jvm_v1.0.2_COPYING.txt
@@ -0,0 +1,121 @@
+Creative Commons Legal Code
+
+CC0 1.0 Universal
+
+    CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE
+    LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE AN
+    ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS
+    INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES
+    REGARDING THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS
+    PROVIDED HEREUNDER, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM
+    THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED
+    HEREUNDER.
+
+Statement of Purpose
+
+The laws of most jurisdictions throughout the world automatically confer
+exclusive Copyright and Related Rights (defined below) upon the creator
+and subsequent owner(s) (each and all, an "owner") of an original work of
+authorship and/or a database (each, a "Work").
+
+Certain owners wish to permanently relinquish those rights to a Work for
+the purpose of contributing to a commons of creative, cultural and
+scientific works ("Commons") that the public can reliably and without fear
+of later claims of infringement build upon, modify, incorporate in other
+works, reuse and redistribute as freely as possible in any form whatsoever
+and for any purposes, including without limitation commercial purposes.
+These owners may contribute to the Commons to promote the ideal of a free
+culture and the further production of creative, cultural and scientific
+works, or to gain reputation or greater distribution for their Work in
+part through the use and efforts of others.
+
+For these and/or other purposes and motivations, and without any
+expectation of additional consideration or compensation, the person
+associating CC0 with a Work (the "Affirmer"), to the extent that he or she
+is an owner of Copyright and Related Rights in the Work, voluntarily
+elects to apply CC0 to the Work and publicly distribute the Work under its
+terms, with knowledge of his or her Copyright and Related Rights in the
+Work and the meaning and intended legal effect of CC0 on those rights.
+
+1. Copyright and Related Rights. A Work made available under CC0 may be
+protected by copyright and related or neighboring rights ("Copyright and
+Related Rights"). Copyright and Related Rights include, but are not
+limited to, the following:
+
+  i. the right to reproduce, adapt, distribute, perform, display,
+     communicate, and translate a Work;
+ ii. moral rights retained by the original author(s) and/or performer(s);
+iii. publicity and privacy rights pertaining to a person's image or
+     likeness depicted in a Work;
+ iv. rights protecting against unfair competition in regards to a Work,
+     subject to the limitations in paragraph 4(a), below;
+  v. rights protecting the extraction, dissemination, use and reuse of data
+     in a Work;
+ vi. database rights (such as those arising under Directive 96/9/EC of the
+     European Parliament and of the Council of 11 March 1996 on the legal
+     protection of databases, and under any national implementation
+     thereof, including any amended or successor version of such
+     directive); and
+vii. other similar, equivalent or corresponding rights throughout the
+     world based on applicable law or treaty, and any national
+     implementations thereof.
+
+2. Waiver. To the greatest extent permitted by, but not in contravention
+of, applicable law, Affirmer hereby overtly, fully, permanently,
+irrevocably and unconditionally waives, abandons, and surrenders all of
+Affirmer's Copyright and Related Rights and associated claims and causes
+of action, whether now known or unknown (including existing as well as
+future claims and causes of action), in the Work (i) in all territories
+worldwide, (ii) for the maximum duration provided by applicable law or
+treaty (including future time extensions), (iii) in any current or future
+medium and for any number of copies, and (iv) for any purpose whatsoever,
+including without limitation commercial, advertising or promotional
+purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each
+member of the public at large and to the detriment of Affirmer's heirs and
+successors, fully intending that such Waiver shall not be subject to
+revocation, rescission, cancellation, termination, or any other legal or
+equitable action to disrupt the quiet enjoyment of the Work by the public
+as contemplated by Affirmer's express Statement of Purpose.
+
+3. Public License Fallback. Should any part of the Waiver for any reason
+be judged legally invalid or ineffective under applicable law, then the
+Waiver shall be preserved to the maximum extent permitted taking into
+account Affirmer's express Statement of Purpose. In addition, to the
+extent the Waiver is so judged Affirmer hereby grants to each affected
+person a royalty-free, non transferable, non sublicensable, non exclusive,
+irrevocable and unconditional license to exercise Affirmer's Copyright and
+Related Rights in the Work (i) in all territories worldwide, (ii) for the
+maximum duration provided by applicable law or treaty (including future
+time extensions), (iii) in any current or future medium and for any number
+of copies, and (iv) for any purpose whatsoever, including without
+limitation commercial, advertising or promotional purposes (the
+"License"). The License shall be deemed effective as of the date CC0 was
+applied by Affirmer to the Work. Should any part of the License for any
+reason be judged legally invalid or ineffective under applicable law, such
+partial invalidity or ineffectiveness shall not invalidate the remainder
+of the License, and in such case Affirmer hereby affirms that he or she
+will not (i) exercise any of his or her remaining Copyright and Related
+Rights in the Work or (ii) assert any associated claims and causes of
+action with respect to the Work, in either case contrary to Affirmer's
+express Statement of Purpose.
+
+4. Limitations and Disclaimers.
+
+ a. No trademark or patent rights held by Affirmer are waived, abandoned,
+    surrendered, licensed or otherwise affected by this document.
+ b. Affirmer offers the Work as-is and makes no representations or
+    warranties of any kind concerning the Work, express, implied,
+    statutory or otherwise, including without limitation warranties of
+    title, merchantability, fitness for a particular purpose, non
+    infringement, or the absence of latent or other defects, accuracy, or
+    the present or absence of errors, whether or not discoverable, all to
+    the greatest extent permissible under applicable law.
+ c. Affirmer disclaims responsibility for clearing rights of other persons
+    that may apply to the Work or any use thereof, including without
+    limitation any person's Copyright and Related Rights in the Work.
+    Further, Affirmer disclaims responsibility for obtaining any necessary
+    consents, permissions or other rights required for any use of the
+    Work.
+ d. Affirmer understands and acknowledges that Creative Commons is not a
+    party to this document and has no duty or obligation with respect to
+    this CC0 or use of the Work.
\ No newline at end of file
diff --git a/asterixdb/src/main/licenses/content/raw.githubusercontent.com_reactive-streams_reactive-streams-jvm_v1.0.2_LICENSE.txt b/asterixdb/src/main/licenses/content/raw.githubusercontent.com_reactive-streams_reactive-streams-jvm_v1.0.2_LICENSE.txt
new file mode 100644
index 0000000..eadae05
--- /dev/null
+++ b/asterixdb/src/main/licenses/content/raw.githubusercontent.com_reactive-streams_reactive-streams-jvm_v1.0.2_LICENSE.txt
@@ -0,0 +1,8 @@
+Licensed under Public Domain (CC0)
+
+To the extent possible under law, the person who associated CC0 with
+this code has waived all copyright and related or neighboring
+rights to this code.
+
+You should have received a copy of the CC0 legalcode along with this
+work. If not, see <http://creativecommons.org/publicdomain/zero/1.0/>.
\ No newline at end of file
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/parsers/BooleanParserFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/parsers/BooleanParserFactory.java
new file mode 100644
index 0000000..488be04
--- /dev/null
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/parsers/BooleanParserFactory.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hyracks.dataflow.common.data.parsers;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class BooleanParserFactory implements IValueParserFactory {
+
+    private static final long serialVersionUID = 1L;
+
+    public static final IValueParserFactory INSTANCE = new BooleanParserFactory();
+
+    private BooleanParserFactory() {
+    }
+
+    @Override
+    public IValueParser createValueParser() {
+        return BooleanParserFactory::parse;
+    }
+
+    public static void parse(char[] buffer, int start, int length, DataOutput out) throws HyracksDataException {
+        try {
+            if (length == 4 && (buffer[start] == 't' || buffer[start] == 'T')
+                    && (buffer[start + 1] == 'r' || buffer[start + 1] == 'R')
+                    && (buffer[start + 2] == 'u' || buffer[start + 2] == 'U')
+                    && (buffer[start + 3] == 'e' || buffer[start + 3] == 'E')) {
+                out.writeBoolean(true);
+                return;
+            } else if (length == 5 && (buffer[start] == 'f' || buffer[start] == 'F')
+                    && (buffer[start + 1] == 'a' || buffer[start + 1] == 'A')
+                    && (buffer[start + 2] == 'l' || buffer[start + 2] == 'L')
+                    && (buffer[start + 3] == 's' || buffer[start + 3] == 'S')
+                    && (buffer[start + 4] == 'e' || buffer[start + 4] == 'E')) {
+                out.writeBoolean(false);
+                return;
+            }
+        } catch (IOException e) {
+            throw HyracksDataException.create(e);
+        }
+
+        throw new HyracksDataException("Invalid input data");
+    }
+}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/DelimitedDataTupleParserFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/DelimitedDataTupleParserFactory.java
index 9ddb4c2..2eb882a 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/DelimitedDataTupleParserFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/DelimitedDataTupleParserFactory.java
@@ -76,12 +76,11 @@
                                 break;
                             }
                             // Eliminate double quotes in the field that we are going to parse
-                            if (cursor.isDoubleQuoteIncludedInThisField) {
-                                cursor.eliminateDoubleQuote(cursor.buffer, cursor.fStart, cursor.fEnd - cursor.fStart);
-                                cursor.fEnd -= cursor.doubleQuoteCount;
-                                cursor.isDoubleQuoteIncludedInThisField = false;
+                            if (cursor.fieldHasDoubleQuote()) {
+                                cursor.eliminateDoubleQuote();
                             }
-                            valueParsers[i].parse(cursor.buffer, cursor.fStart, cursor.fEnd - cursor.fStart, dos);
+                            valueParsers[i].parse(cursor.getBuffer(), cursor.getFieldStart(), cursor.getFieldLength(),
+                                    dos);
                             tb.addFieldEndOffset();
                         }
                         FrameUtils.appendToWriter(writer, appender, tb.getFieldEndOffsets(), tb.getByteArray(), 0,
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/FieldCursorForDelimitedDataParser.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/FieldCursorForDelimitedDataParser.java
index 7e5ee2c..fd3e4c3 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/FieldCursorForDelimitedDataParser.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/FieldCursorForDelimitedDataParser.java
@@ -32,18 +32,18 @@
         EOF //end of stream reached
     }
 
-    public char[] buffer; //buffer to holds the input coming form the underlying input stream
-    public int fStart; //start position for field
-    public int fEnd; //end position for field
-    public int recordCount; //count of records
-    public int fieldCount; //count of fields in current record
-    public int doubleQuoteCount; //count of double quotes
-    public boolean isDoubleQuoteIncludedInThisField; //does current field include double quotes
+    private char[] buffer; //buffer to holds the input coming form the underlying input stream
+    private int fStart; //start position for field
+    private int fEnd; //end position for field
+    private int recordCount; //count of records
+    private int fieldCount; //count of fields in current record
+    private int doubleQuoteCount; //count of double quotes
+    private boolean isDoubleQuoteIncludedInThisField; //does current field include double quotes
 
     private static final int INITIAL_BUFFER_SIZE = 4096;//initial buffer size
     private static final int INCREMENT = 4096; //increment size
 
-    private Reader in; //the underlying buffer
+    private final Reader in; //the underlying buffer
 
     private int start; //start of valid buffer area
     private int end; //end of valid buffer area
@@ -55,8 +55,8 @@
     private int quoteCount; //count of single quotes
     private boolean startedQuote; //whether a quote has been started
 
-    private char quote; //the quote character
-    private char fieldDelimiter; //the delimiter
+    private final char quote; //the quote character
+    private final char fieldDelimiter; //the delimiter
 
     public FieldCursorForDelimitedDataParser(Reader in, char fieldDelimiter, char quote) {
         this.in = in;
@@ -70,9 +70,9 @@
         state = State.INIT;
         this.quote = quote;
         this.fieldDelimiter = fieldDelimiter;
-        lastDelimiterPosition = -99;
-        lastQuotePosition = -99;
-        lastDoubleQuotePosition = -99;
+        lastDelimiterPosition = -1;
+        lastQuotePosition = -1;
+        lastDoubleQuotePosition = -1;
         quoteCount = 0;
         doubleQuoteCount = 0;
         startedQuote = false;
@@ -81,9 +81,44 @@
         fieldCount = 0;
     }
 
-    public void nextRecord(char[] buffer, int recordLength) throws IOException {
+    public char[] getBuffer() {
+        return buffer;
+    }
+
+    public int getFieldStart() {
+        return fStart;
+    }
+
+    public int getFieldLength() {
+        return fEnd - fStart;
+    }
+
+    public boolean isFieldEmpty() {
+        return fStart == fEnd;
+    }
+
+    public boolean fieldHasDoubleQuote() {
+        return isDoubleQuoteIncludedInThisField;
+    }
+
+    public int getFieldCount() {
+        return fieldCount;
+    }
+
+    public int getRecordCount() {
+        return recordCount;
+    }
+
+    public void nextRecord(char[] buffer, int recordLength) {
         recordCount++;
         fieldCount = 0;
+        lastDelimiterPosition = -1;
+        lastQuotePosition = -1;
+        lastDoubleQuotePosition = -1;
+        quoteCount = 0;
+        doubleQuoteCount = 0;
+        startedQuote = false;
+        isDoubleQuoteIncludedInThisField = false;
         start = 0;
         end = recordLength;
         state = State.IN_RECORD;
@@ -187,7 +222,6 @@
     }
 
     public boolean nextField() throws IOException {
-        fieldCount++;
         switch (state) {
             case INIT:
             case EOR:
@@ -196,12 +230,12 @@
                 return false;
 
             case IN_RECORD:
-                boolean eof;
+                fieldCount++;
                 // reset quote related values
                 startedQuote = false;
                 isDoubleQuoteIncludedInThisField = false;
-                lastQuotePosition = -99;
-                lastDoubleQuotePosition = -99;
+                lastQuotePosition = -1;
+                lastDoubleQuotePosition = -1;
                 quoteCount = 0;
                 doubleQuoteCount = 0;
 
@@ -209,21 +243,26 @@
                 while (true) {
                     if (p >= end) {
                         int s = start;
-                        eof = !readMore();
+                        boolean eof = !readMore();
                         p -= (s - start);
-                        lastQuotePosition -= (s - start);
-                        lastDoubleQuotePosition -= (s - start);
-                        lastDelimiterPosition -= (s - start);
+                        lastQuotePosition -= (lastQuotePosition > -1) ? (s - start) : 0;
+                        lastDoubleQuotePosition -= (lastDoubleQuotePosition > -1) ? (s - start) : 0;
+                        lastDelimiterPosition -= (lastDelimiterPosition > -1) ? (s - start) : 0;
                         if (eof) {
                             state = State.EOF;
-                            if (startedQuote && lastQuotePosition == p - 1 && lastDoubleQuotePosition != p - 1
-                                    && quoteCount == doubleQuoteCount * 2 + 2) {
-                                // set the position of fStart to +1, fEnd to -1 to remove quote character
-                                fStart = start + 1;
-                                fEnd = p - 1;
-                            } else {
+                            if (!startedQuote) {
                                 fStart = start;
                                 fEnd = p;
+                            } else {
+                                if (lastQuotePosition == p - 1 && lastDoubleQuotePosition != p - 1
+                                        && quoteCount == doubleQuoteCount * 2 + 2) {
+                                    // set the position of fStart to +1, fEnd to -1 to remove quote character
+                                    fStart = start + 1;
+                                    fEnd = p - 1;
+                                } else {
+                                    throw new IOException("At record: " + recordCount + ", field#: " + fieldCount
+                                            + " - missing a closing quote");
+                                }
                             }
                             return true;
                         }
@@ -232,12 +271,12 @@
                     if (ch == quote) {
                         // If this is first quote in the field, then it needs to be placed in the beginning.
                         if (!startedQuote) {
-                            if (lastDelimiterPosition == p - 1 || lastDelimiterPosition == -99) {
+                            if (p == start) {
                                 startedQuote = true;
                             } else {
                                 // In this case, we don't have a quote in the beginning of a field.
                                 throw new IOException("At record: " + recordCount + ", field#: " + fieldCount
-                                        + " - a quote enclosing a field needs to be placed in the beginning of that field.");
+                                        + " - a quote enclosing a field needs to be placed in the beginning of that field");
                             }
                         }
                         // Check double quotes - "". We check [start != p-2]
@@ -245,8 +284,8 @@
                         // since it looks like a double quote. However, it's not a double quote.
                         // (e.g. if field2 has no value:
                         //       field1,"",field3 ... )
-                        if (lastQuotePosition == p - 1 && lastDelimiterPosition != p - 2
-                                && lastDoubleQuotePosition != p - 1) {
+                        if (lastQuotePosition == p - 1 && lastDoubleQuotePosition != p - 1
+                                && lastQuotePosition != start) {
                             isDoubleQuoteIncludedInThisField = true;
                             doubleQuoteCount++;
                             lastDoubleQuotePosition = p;
@@ -262,64 +301,46 @@
                             start = p + 1;
                             lastDelimiterPosition = p;
                             return true;
-                        } else if (startedQuote) {
-                            if (lastQuotePosition == p - 1 && lastDoubleQuotePosition != p - 1) {
-                                // There is a quote right before the delimiter (e.g. ",)  and it is not two quote,
-                                // then the field contains a valid string.
-                                // We set the position of fStart to +1, fEnd to -1 to remove quote character
-                                fStart = start + 1;
-                                fEnd = p - 1;
-                                start = p + 1;
-                                lastDelimiterPosition = p;
-                                startedQuote = false;
-                                return true;
-                            } else if (lastQuotePosition < p - 1 && lastQuotePosition != lastDoubleQuotePosition
-                                    && quoteCount == doubleQuoteCount * 2 + 2) {
-                                // There is a quote before the delimiter, however it is not directly placed before the delimiter.
-                                // In this case, we throw an exception.
-                                // quoteCount == doubleQuoteCount * 2 + 2 : only true when we have two quotes except double-quotes.
-                                throw new IOException("At record: " + recordCount + ", field#: " + fieldCount
-                                        + " -  A quote enclosing a field needs to be followed by the delimiter.");
-                            }
+                        }
+
+                        if (lastQuotePosition == p - 1 && lastDoubleQuotePosition != p - 1
+                                && lastQuotePosition != start) {
+                            // There is a quote right before the delimiter (e.g. ",)  and it is not two quote,
+                            // then the field contains a valid string.
+                            // We set the position of fStart to +1, fEnd to -1 to remove quote character
+                            fStart = start + 1;
+                            fEnd = p - 1;
+                            start = p + 1;
+                            lastDelimiterPosition = p;
+                            startedQuote = false;
+                            return true;
+                        } else if (lastQuotePosition < p - 1 && lastQuotePosition != lastDoubleQuotePosition
+                                && quoteCount == doubleQuoteCount * 2 + 2) {
+                            // There is a quote before the delimiter, however it is not directly placed before the delimiter.
+                            // In this case, we throw an exception.
+                            // quoteCount == doubleQuoteCount * 2 + 2 : only true when we have two quotes except double-quotes.
+                            throw new IOException("At record: " + recordCount + ", field#: " + fieldCount
+                                    + " -  A quote enclosing a field needs to be followed by the delimiter.");
                         }
                         // If the control flow reaches here: we have a delimiter in this field and
                         // there should be a quote in the beginning and the end of
                         // this field. So, just continue reading next character
-                    } else if (ch == '\n') {
+                    } else if (ch == '\n' || ch == '\r') {
                         if (!startedQuote) {
                             fStart = start;
                             fEnd = p;
                             start = p + 1;
-                            state = State.EOR;
+                            state = ch == '\n' ? State.EOR : State.CR;
                             lastDelimiterPosition = p;
                             return true;
-                        } else if (startedQuote && lastQuotePosition == p - 1 && lastDoubleQuotePosition != p - 1
+                        } else if (lastQuotePosition == p - 1 && lastDoubleQuotePosition != p - 1
                                 && quoteCount == doubleQuoteCount * 2 + 2) {
                             // set the position of fStart to +1, fEnd to -1 to remove quote character
                             fStart = start + 1;
                             fEnd = p - 1;
                             lastDelimiterPosition = p;
                             start = p + 1;
-                            state = State.EOR;
-                            startedQuote = false;
-                            return true;
-                        }
-                    } else if (ch == '\r') {
-                        if (!startedQuote) {
-                            fStart = start;
-                            fEnd = p;
-                            start = p + 1;
-                            state = State.CR;
-                            lastDelimiterPosition = p;
-                            return true;
-                        } else if (startedQuote && lastQuotePosition == p - 1 && lastDoubleQuotePosition != p - 1
-                                && quoteCount == doubleQuoteCount * 2 + 2) {
-                            // set the position of fStart to +1, fEnd to -1 to remove quote character
-                            fStart = start + 1;
-                            fEnd = p - 1;
-                            lastDelimiterPosition = p;
-                            start = p + 1;
-                            state = State.CR;
+                            state = ch == '\n' ? State.EOR : State.CR;
                             startedQuote = false;
                             return true;
                         }
@@ -330,7 +351,10 @@
         throw new IllegalStateException();
     }
 
-    protected boolean readMore() throws IOException {
+    private boolean readMore() throws IOException {
+        if (in == null) {
+            return false;
+        }
         if (start > 0) {
             System.arraycopy(buffer, start, buffer, 0, end - start);
         }
@@ -350,10 +374,11 @@
     }
 
     // Eliminate escaped double quotes("") in a field
-    public void eliminateDoubleQuote(char[] buffer, int start, int length) {
-        int lastDoubleQuotePosition = -99;
-        int writepos = start;
-        int readpos = start;
+    public void eliminateDoubleQuote() {
+        int lastDoubleQuotePosition = -1;
+        int writepos = fStart;
+        int readpos = fStart;
+        int length = fEnd - fStart;
         // Find positions where double quotes appear
         for (int i = 0; i < length; i++) {
             // Skip double quotes
@@ -369,5 +394,7 @@
                 readpos++;
             }
         }
+        fEnd -= doubleQuoteCount;
+        isDoubleQuoteIncludedInThisField = false;
     }
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/file/CursorTest.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/file/CursorTest.java
index e663179..8edcafc 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/file/CursorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/file/CursorTest.java
@@ -51,9 +51,8 @@
             while (cursor.nextRecord()) {
                 int fieldNumber = 0;
                 while (cursor.nextField()) {
-                    if (cursor.isDoubleQuoteIncludedInThisField) {
-                        cursor.eliminateDoubleQuote(cursor.buffer, cursor.fStart, cursor.fEnd - cursor.fStart);
-                        cursor.fEnd -= cursor.doubleQuoteCount;
+                    if (cursor.fieldHasDoubleQuote()) {
+                        cursor.eliminateDoubleQuote();
                     }
                     fieldNumber++;
                 }